├── .editorconfig ├── .gitattributes ├── .github └── workflows │ ├── cdp.yml │ ├── ci.yml │ ├── sca.yml │ └── stale.yml ├── .gitignore ├── .husky └── pre-commit ├── .npmignore ├── .nvmrc ├── .prettierignore ├── .prettierrc.json ├── LICENSE ├── README.md ├── contribute.md ├── eslint.config.mjs ├── package-lock.json ├── package.json ├── src ├── data │ ├── index.ts │ └── profane-words.ts ├── index.ts ├── models │ ├── censor-type.ts │ ├── index.ts │ └── list.ts ├── profanity-options.ts ├── profanity.ts ├── tools │ ├── benchmark │ │ ├── Dockerfile │ │ ├── benchmark-interfaces.ts │ │ ├── benchmark.ts │ │ ├── docker-compose.yml │ │ ├── results.md │ │ └── test-data.json │ └── translate │ │ ├── docker-compose.yml │ │ └── translate.ts └── utils │ ├── index.ts │ └── misc.ts ├── supported-languages.md ├── tests ├── import.spec.ts ├── profanity-censor.spec.ts ├── profanity-exists.spec.ts ├── profanity-languages.spec.ts ├── profanity-options.spec.ts ├── profanity.spec.ts └── require.spec.ts └── tsconfig.json /.editorconfig: -------------------------------------------------------------------------------- 1 | # Editor configuration, see http://editorconfig.org 2 | root = true 3 | 4 | [*] 5 | charset = utf-8 6 | indent_style = space 7 | indent_size = 2 8 | insert_final_newline = true 9 | trim_trailing_whitespace = true 10 | end_of_line = lf 11 | 12 | [*.md] 13 | max_line_length = off 14 | trim_trailing_whitespace = false 15 | insert_final_newline = false 16 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto eol=lf 2 | -------------------------------------------------------------------------------- /.github/workflows/cdp.yml: -------------------------------------------------------------------------------- 1 | # This Continuous Deployment (CDP) workflow publishes a package to NPM when a release is created 2 | 3 | name: CDP 4 | 5 | on: 6 | release: 7 | types: [created] 8 | 9 | jobs: 10 | continuous-deployment: 11 | name: Continuous Deployment 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Checkout Repository 15 | uses: actions/checkout@v4 16 | 17 | - name: Get Node Version 18 | run: echo "NODE_VERSION=$(cat .nvmrc)" >> $GITHUB_ENV 19 | 20 | - name: Set up Node 21 | uses: actions/setup-node@v4 22 | with: 23 | node-version: ${{ env.NODE_VERSION }} 24 | registry-url: https://registry.npmjs.org/ 25 | 26 | - name: Install Dependencies 27 | run: npm ci 28 | 29 | - name: Publish Package 30 | run: npm publish 31 | env: 32 | NODE_AUTH_TOKEN: ${{secrets.npm_token}} 33 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | # This Continuous Integration (CI) pipeline lints, builds, and tests the code when a Pull Request is 2 | # created or a commit is pushed to a Prod branch 3 | 4 | name: CI 5 | on: 6 | push: 7 | branches: ["main", "1.x.x"] 8 | pull_request: 9 | branches: ["main", "1.x.x"] 10 | 11 | jobs: 12 | continuous-integration: 13 | name: Continuous Integration 14 | runs-on: ubuntu-latest 15 | 16 | steps: 17 | - name: Checkout Repository 18 | uses: actions/checkout@v4 19 | 20 | - name: Get Node Version 21 | run: echo "NODE_VERSION=$(cat .nvmrc)" >> $GITHUB_ENV 22 | 23 | - name: Set up Node 24 | uses: actions/setup-node@v4 25 | with: 26 | node-version: ${{ env.NODE_VERSION }} 27 | 28 | - name: Install Dependencies 29 | run: npm ci 30 | 31 | - name: Lint 32 | run: npm run lint 33 | 34 | - name: Unit Tests 35 | run: npm test 36 | -------------------------------------------------------------------------------- /.github/workflows/sca.yml: -------------------------------------------------------------------------------- 1 | # This workflow performs an SCA (i.e., dependency review) scan on pull requests 2 | 3 | name: "Dependency Review" 4 | 5 | on: [pull_request] 6 | 7 | permissions: 8 | contents: read 9 | 10 | jobs: 11 | dependency-review: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: "Checkout Repository" 15 | uses: actions/checkout@v4 16 | 17 | - name: "Dependency Review" 18 | uses: actions/dependency-review-action@v4 19 | with: 20 | fail-on-severity: high 21 | -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | name: "Close stale issues and PRs" 2 | on: 3 | schedule: 4 | - cron: "0 0 * * *" # midnight 5 | 6 | permissions: 7 | issues: write 8 | pull-requests: write 9 | 10 | jobs: 11 | stale: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/stale@v9 15 | with: 16 | stale-issue-message: "This issue is stale because it has been open 30 days with no activity. Remove stale label or comment or this will be closed in 5 days." 17 | stale-pr-message: "This PR is stale because it has been open 45 days with no activity. Remove stale label or comment or this will be closed in 10 days." 18 | close-issue-message: "This issue was automatically closed because it has been stale for 5 days." 19 | close-pr-message: "This PR was automatically closed because it has been stale for 10 days." 20 | days-before-issue-stale: 30 21 | days-before-pr-stale: 45 22 | days-before-issue-close: 5 23 | days-before-pr-close: 10 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # compiled output 2 | /dist 3 | .eslintcache 4 | 5 | # dependencies 6 | /node_modules 7 | 8 | # IDE - VSCode 9 | .vscode/* 10 | !.vscode/settings.json 11 | !.vscode/tasks.json 12 | !.vscode/launch.json 13 | !.vscode/extensions.json 14 | 15 | # System Files 16 | .DS_Store 17 | Thumbs.db 18 | -------------------------------------------------------------------------------- /.husky/pre-commit: -------------------------------------------------------------------------------- 1 | npx lint-staged 2 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | .github/ 2 | .husky/ 3 | src/ 4 | tests/ 5 | .editorconfig 6 | .eslintcache 7 | .gitattributes 8 | .gitignore 9 | .nvmrc 10 | .prettierignore 11 | .prettierrc.json 12 | eslint.config.mjs 13 | tsconfig.json 14 | -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | 20.17.0 2 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | .vscode 2 | .husky 3 | dist 4 | node_modules 5 | .eslintcache 6 | .gitattributes 7 | .gitignore 8 | .npmignore 9 | .nvmrc 10 | .prettierignore 11 | *.md 12 | Dockerfile 13 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 150, 3 | "proseWrap": "never", 4 | "tabWidth": 2 5 | } 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 2Toad 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Profanity 🧼 2 | 3 | ![GitHub Release](https://img.shields.io/github/v/release/2Toad/Profanity) 4 | [![Downloads](https://img.shields.io/npm/dm/@2toad/profanity.svg)](https://www.npmjs.com/package/@2toad/profanity) 5 | [![Build status](https://github.com/2toad/profanity/actions/workflows/ci.yml/badge.svg)](https://github.com/2Toad/Profanity/actions/workflows/nodejs.yml) 6 | 7 | A multi-language profanity filter with full TypeScript support 8 | 9 | ## Getting Started 10 | 11 | Install the package 12 | 13 | ```Shell 14 | npm i @2toad/profanity 15 | ``` 16 | 17 | >If you're using Node 11.x or older, you'll need to install [Profanity 1.x](https://github.com/2Toad/Profanity/releases) 18 | 19 | ## Usage 20 | 21 | ```JavaScript 22 | import { profanity, CensorType } from '@2toad/profanity'; 23 | // or 24 | const { profanity, CensorType } = require('@2toad/profanity'); 25 | ``` 26 | 27 | ```JavaScript 28 | profanity.exists('I like big butts and I cannot lie'); 29 | // true 30 | 31 | profanity.exists('I like big glutes and I cannot lie'); 32 | // false 33 | 34 | profanity.censor('I like big butts (aka arses) and I cannot lie'); 35 | // I like big @#$%&! (aka @#$%&!) and I cannot lie 36 | 37 | profanity.censor('I like big butts (aka arses) and I cannot lie', CensorType.FirstChar); 38 | // I like big *utts (aka *rses) and I cannot lie 39 | ``` 40 | 41 | ## Options 42 | Create an instance of the Profanity class to change the default options: 43 | 44 | ```JavaScript 45 | import { Profanity } from '@2toad/profanity'; 46 | 47 | const profanity = new Profanity({ 48 | languages: ['de'], 49 | wholeWord: false, 50 | grawlix: '*****', 51 | grawlixChar: '$', 52 | }); 53 | ``` 54 | 55 | ### languages 56 | 57 | By default, this is set to `['en']` (English). You can change the default to any [supported language](./supported-languages.md), including multiple languages: 58 | 59 | ```JavaScript 60 | const profanity = new Profanity({ 61 | languages: ['en', 'de'], 62 | }); 63 | ``` 64 | 65 | You can override this option by specifying the languages in `exists` or `censor`: 66 | 67 | ```JavaScript 68 | profanity.exists('Je suis un connard', ['fr']); 69 | // true 70 | 71 | profanity.censor('I like big butts and je suis un connard', CensorType.Word, ['en', 'de', 'fr']); 72 | // I like big @#$%&! and je suis un @#$%&! 73 | ``` 74 | 75 | If no languages are specified in the method call, it will use the languages specified in the options. 76 | 77 | ### wholeWord 78 | 79 | By default, this is set to `true` so profanity only matches on whole words: 80 | ```JavaScript 81 | profanity.exists('Arsenic is poisonous but not profane'); 82 | // false 83 | ``` 84 | 85 | Setting this to `false`, results in partial word matches: 86 | ```JavaScript 87 | profanity.exists('Arsenic is poisonous but not profane'); 88 | // true (matched on arse) 89 | ``` 90 | 91 | #### Compound Words 92 | Profanity detection works on parts of compound words, rather than treating hyphenated or underscore-separated words as indivisible. 93 | 94 | When `wholeWord` is `true`, each portion of a compound word is analyzed for a match: 95 | ```JavaScript 96 | profanity.exists("Don't be an arsenic-monster"); 97 | // false 98 | 99 | profanity.exists("Don't be an arse-monster"); 100 | // true (matched on arse) 101 | ``` 102 | Setting `wholeWord` to `false`, results in partial word matches on each portion of a compound word: 103 | ```JavaScript 104 | profanity.exists("Don't be an arsenic-monster"); 105 | // true (matched on arse) 106 | ``` 107 | 108 | ### grawlix 109 | 110 | By default this is set to `@#$%&!`: 111 | ```JavaScript 112 | profanity.censor('I like big butts and I cannot lie'); 113 | // I like big @#$%&! and I cannot lie 114 | ``` 115 | 116 | Setting this to `****`, results in: 117 | ```JavaScript 118 | profanity.censor('I like big butts and I cannot lie'); 119 | // I like big **** and I cannot lie 120 | ``` 121 | 122 | ### grawlixChar 123 | 124 | When specifying a `CensorType` other than `CensorType.Word`, this is the character used by the `censor` function. 125 | 126 | By default this is set to `*`: 127 | ```JavaScript 128 | profanity.censor('I like big butts and I cannot lie', CensorType.AllVowels); 129 | // I like big b*tts and I cannot lie 130 | ``` 131 | 132 | Setting this to `$`, results in: 133 | ```JavaScript 134 | profanity.censor('I like big butts and I cannot lie', CensorType.AllVowels); 135 | // I like big b$tts and I cannot lie 136 | ``` 137 | 138 | ## Customize the word list 139 | 140 | Add words: 141 | ```JavaScript 142 | profanity.addWords(['aardvark', 'zebra']); 143 | ``` 144 | 145 | Remove words: 146 | ```JavaScript 147 | profanity.removeWords(['butt', 'arse']); 148 | ``` 149 | 150 | ## Whitelist 151 | The whitelist allows you to specify words that are always ignored by the profanity filter. 152 | 153 | >This can be useful if you want to enable partial word matching (`wholeWord = false`), so combined words are caught (e.g., arselicker), while specific words you add to the whitelist are ignored (e.g., arsenic). 154 | 155 | Add words to the whitelist: 156 | ```JavaScript 157 | profanity.whitelist.addWords(['arsenic', 'buttress']); 158 | ``` 159 | 160 | Remove words from the whitelist: 161 | ```JavaScript 162 | profanity.whitelist.removeWords(['arsenic', 'buttress']); 163 | ``` 164 | 165 | ## Benchmarking ⏱️ 166 | 167 | To see how Profanity performs, check out our [benchmark results](./src/tools/benchmark/results.md). 168 | 169 | ## Contributing 🤝 170 | 171 | So you want to contribute to the Profanity project? Fantastic! Please read the [Contribute](./contribute.md) doc to get started. -------------------------------------------------------------------------------- /contribute.md: -------------------------------------------------------------------------------- 1 | # Contribute to the Profanity project 🤝 2 | 3 | Thank you for wanting to contribute to the Profanity project. With your contributions we can ensure Profanity remains a leading solution for filtering profanity within JavaScript projects. 4 | 5 | ## Steps for success 6 | 7 | 1. [Issues](https://github.com/2Toad/Profanity/issues): 8 | 1. Always work off of an Issue. Please do not submit a Pull Request that is not associated with an Issue (create the Issue if necessary). 9 | 2. If you are beginning work on an Issue, please leave a comment on the issue letting us know, and we'll assign the Issue to you. This way somebody else won't start working on the same Issue. 10 | 2. [Branches](https://github.com/2Toad/Profanity/branches): 11 | 1. We support two versions of Profanity: `1.x.x` and `main`. The `main` branch has features that require Node 12+, but we maintain the `1.x.x` branch for projects that require older versions of Node. 12 | 2. If the Issue you are working on has a `1.x.x` label on it, you must branch off of the `1.x.x` branch. Otherwise, please branch off of `main`. 13 | 3. [Pull Request](https://github.com/2Toad/Profanity/pulls) (PR): 14 | 1. Make sure you run the following scripts in local, and that all of them pass, before submitting a PR: 15 | 1. `npm run lint:fix` 16 | 2. `npm run format` 17 | 3. `npm test` 18 | 2. Make sure your PR is targeting the correct branch (see Step 2.ii) 19 | 3. At the top of your PR description write: "Fixes #_n_". Where _n_ is the number of the Issue your PR is fixing (e.g., `Fixes #33`). This will tell GitHub to associate your PR with the Issue. 20 | 21 | ## Development 22 | 23 | ### Prerequisites 24 | 25 | - `main` branch: [Node 20+](https://nodejs.org) 26 | - `1.x.x` branch: [Node 10.23.0](https://nodejs.org) 27 | 28 | ### Source Code 29 | 30 | 1. Clone the repo 31 | 2. Change directories: `cd Profanity` 32 | 3. Install dependencies: `npm i` 33 | 34 | ### Workflow 35 | 36 | Start app in watch mode: `npm run local` 37 | 38 | > When file changes are detected, the app will automatically rebuild/restart 39 | 40 | #### Linting 41 | 42 | - Check lint rules: `npm run lint` 43 | - Fix lint errors: `npm run lint:fix` 44 | - Fix formatting errors: `npm run format` 45 | 46 | ## Appendix 47 | 48 | ### Dev Tools 49 | 50 | The following section includes optional dev tools that enhance the Profanity development experience, but are not necessary. 51 | 52 | #### NVM 53 | 54 | The Profanity project includes an .nvmrc file, so you can run `nvm use` to switch to the required version of Node. 55 | 56 | ##### Setup 57 | 58 | 1. Install nvm: https://github.com/nvm-sh/nvm 59 | 2. Install the Node.js version required by this app: `nvm install` 60 | 1. NVM will determine the required Node version using the .nvmrc file 61 | 2. NVM will install the required Node version if it isn't installed 62 | 3. NVM will set your current Node version to the one required 63 | 64 | #### Git Hooks 65 | 66 | The Profanity project includes Husky for running Git Hooks. Running `git commit` will trigger `lint-staged` which will lint all files currently staged in Git. If linting fails, the commit will be cancelled 67 | 68 | ### Dependencies 69 | 70 | - `chai`: we must use v4.x because v5.x is pure ESM, and we require CommonJS modules 71 | 72 | ### Translations 73 | 74 | We utilize a self-hosted instance of the Open Source [LibreTranslate](https://github.com/LibreTranslate/LibreTranslate) lib to translate the core English list of profane words. 75 | 76 | #### Steps to Run Translations 77 | 78 | 1. Open a terminal. 79 | 2. Generate translations: `npm run translate`. 80 | 81 | #### Available Languages 82 | 83 | By default, the LibreTranslate service is configured to include all available target languages from [argos-translate](https://github.com/argosopentech/argos-translate). This configuration affects: 84 | 85 | - **Startup Time**: Initial service startup. Depending on your system, this can take ~5 minutes. 86 | - **Translation Time**: Translating across all languages increases processing time. 87 | - **Library Size**: The final size of the Profanity library. 88 | 89 | To optimize performance, we limit the target languages by configuring the `LT_LOAD_ONLY` environment variable: 90 | 91 | ##### Configure Target Languages 92 | 1. Open the [docker-compose.yml](./src/tools/translate/docker-compose.yml) file. 93 | 2. Add a comma-separated list of the [supported language codes](https://github.com/argosopentech/argos-translate/blob/master/argostranslate/languages.csv) you wish to include. Ensure English (`en`) is included, as it serves as the source language. 94 | 95 | **Example Configuration:** 96 | ```yaml 97 | environment: 98 | LT_LOAD_ONLY: "en,es,fr,de" 99 | ``` 100 | 101 | > **Note:** 102 | > - To add a new language, remove existing language codes from `LT_LOAD_ONLY` 103 | > - To update existing languages after changes to the core English list, include their language codes in `LT_LOAD_ONLY` 104 | 105 | ### Deployment 106 | 107 | Deployments to Prod consist of building and publishing the Profanity lib to NPM, and are automated through our Continuous Deployment workflow. 108 | 109 | #### 1. Create New Version 110 | 1. Checkout `main`. 111 | 2. Increment the version in package.json, using semantic versioning (e.g., `1.1.0`). 112 | 3. Perform benchmarking: 113 | 1. Run the script: `npm run benchmark`. 114 | 2. Record the results in [benchmark/results.md](./src/tools/benchmark/results.md), for the new version. 115 | 4. Rebuild package-lock, to pick up the new version number: `npm i --package-lock-only`. 116 | 5. Create local NPM package: `npm pack` 117 | - Examine generated tar file to ensure it looks healthy 118 | 6. Push changes: 119 | ``` 120 | git add . 121 | git commit -m "Bump version to 1.1.0" 122 | git push 123 | ``` 124 | 125 | #### 2. Verify Checks 126 | 1. Navigate to the [CI](https://github.com/2Toad/Profanity/actions/workflows/ci.yml) workflow. 127 | 2. Ensure the build checks for this push succeed. 128 | 129 | #### 3. Publish GitHub Release 130 | 1. Navigate to [Profanity's releases](https://github.com/2Toad/Profanity/releases). 131 | 2. Click "Draft a new release": 132 | - **Choose a tag**: enter version (e.g., `v1.1.0`) and click "Create new tag" 133 | - **Target**: `main` 134 | - **Previous tag**: `auto` 135 | - **Release title**: (e.g., `1.1.0`) 136 | - **Description**: click the "Generate release notes" 137 | - [x] **Set as the latest release** 138 | 3. Click "Publish release". 139 | 140 | > This will trigger the [CDP](https://github.com/2Toad/Profanity/actions/workflows/cdp.yml) workflow, which will build and deploy the package to NPM: https://www.npmjs.com/package/@2toad/profanity -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import globals from "globals"; 2 | import js from "@eslint/js"; 3 | import ts from "typescript-eslint"; 4 | import security from "eslint-plugin-security"; 5 | import prettier from "eslint-config-prettier"; 6 | 7 | export default [ 8 | // ESLint processes configurations in order, with the last setting taking precedence 9 | { languageOptions: { globals: { ...globals.node } } }, 10 | { ignores: [".husky", "dist", "node_modules", "tests", ".eslintcache"] }, 11 | js.configs.recommended, 12 | ...ts.configs.recommended, 13 | security.configs.recommended, 14 | { 15 | // These file-matching rules will be processed after the above configs 16 | files: ["**/*.{js,ts}"], 17 | }, 18 | prettier, // placed last to ensure Prettier formatting wins 19 | ]; 20 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@2toad/profanity", 3 | "version": "3.1.1", 4 | "description": "A multi-language profanity filter with full TypeScript support", 5 | "homepage": "https://github.com/2Toad/Profanity", 6 | "author": "2Toad", 7 | "license": "MIT", 8 | "engines": { 9 | "node": ">=12" 10 | }, 11 | "main": "dist/index.js", 12 | "types": "dist/index.d.ts", 13 | "scripts": { 14 | "clean": "npx rimraf dist", 15 | "build": "npm run clean && npx tsc", 16 | "local": "npm run clean && nodemon src/index.ts", 17 | "pretest": "npm run build", 18 | "test": "mocha -r ts-node/register tests/**/*.spec.ts", 19 | "test:watch": "npm run test -- --watch", 20 | "lint": "eslint . --cache", 21 | "lint:fix": "eslint . --fix", 22 | "format": "prettier . --write", 23 | "prepublishOnly": "npm run lint && npm test", 24 | "prepare": "husky", 25 | "translate": "docker-compose -f ./src/tools/translate/docker-compose.yml up -d && ts-node ./src/tools/translate/translate.ts && docker-compose -f ./src/tools/translate/docker-compose.yml down", 26 | "benchmark": "docker-compose -f ./src/tools/benchmark/docker-compose.yml up --build" 27 | }, 28 | "repository": { 29 | "type": "git", 30 | "url": "git+ssh://git@github.com/2Toad/Profanity.git" 31 | }, 32 | "keywords": [ 33 | "profanity", 34 | "profane", 35 | "obscenity", 36 | "obscene", 37 | "cussing", 38 | "curse", 39 | "cursing", 40 | "swearing", 41 | "swearwords", 42 | "swear-words", 43 | "vulgarity", 44 | "badwords", 45 | "bad-words", 46 | "badlanguage", 47 | "bad-language", 48 | "dirtywords", 49 | "dirty-words", 50 | "censor", 51 | "filter" 52 | ], 53 | "devDependencies": { 54 | "@eslint/js": "^9.9.1", 55 | "@types/benchmark": "^2.1.5", 56 | "@types/chai": "^4.3.19", 57 | "@types/eslint__js": "^8.42.3", 58 | "@types/eslint-config-prettier": "^6.11.3", 59 | "@types/eslint-plugin-security": "^3.0.0", 60 | "@types/mocha": "^10.0.7", 61 | "@types/node": "^22.5.2", 62 | "@typescript-eslint/eslint-plugin": "^8.4.0", 63 | "@typescript-eslint/parser": "^8.4.0", 64 | "axios": "^1.7.9", 65 | "benchmark": "^2.1.4", 66 | "chai": "^4.5.0", 67 | "eslint": "^9.9.1", 68 | "eslint-config-prettier": "^9.1.0", 69 | "eslint-plugin-prettier": "^5.2.1", 70 | "eslint-plugin-security": "^3.0.1", 71 | "globals": "^15.9.0", 72 | "husky": "^9.1.5", 73 | "lint-staged": "^15.2.10", 74 | "mocha": "^10.7.3", 75 | "nodemon": "^3.1.4", 76 | "prettier": "^3.3.3", 77 | "rimraf": "^6.0.1", 78 | "ts-node": "^10.9.2", 79 | "typescript": "^5.5.4", 80 | "typescript-eslint": "^8.4.0" 81 | }, 82 | "overrides": { 83 | "inflight": "^2.0.0", 84 | "glob": "^9.0.0", 85 | "rimraf": "^6.0.1" 86 | }, 87 | "lint-staged": { 88 | "*.ts": "eslint --cache --fix", 89 | "*": "prettier --write" 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /src/data/index.ts: -------------------------------------------------------------------------------- 1 | export { profaneWords } from "./profane-words"; 2 | -------------------------------------------------------------------------------- /src/data/profane-words.ts: -------------------------------------------------------------------------------- 1 | // WARNING: this file contains profanity. The below list of profane words is necessary for this tool to function properly. 2 | // Do not read below this line if you do not wish to be exposed to lots of profane words 3 | 4 | export const profaneWords: Map = new Map([ 5 | [ 6 | "en", 7 | [ 8 | "4r5e", 9 | "5h1t", 10 | "5hit", 11 | "a55", 12 | "anal", 13 | "anus", 14 | "ar5e", 15 | "arrse", 16 | "arse", 17 | "arses", 18 | "ass", 19 | "ass-fucker", 20 | "asses", 21 | "assfucker", 22 | "assfukka", 23 | "asshole", 24 | "assholes", 25 | "asswhole", 26 | "a_s_s", 27 | "a$$", 28 | "as$", 29 | "a$s", 30 | "b!tch", 31 | "b00bs", 32 | "b17ch", 33 | "b1tch", 34 | "ballbag", 35 | "balls", 36 | "ballsack", 37 | "bastard", 38 | "beastial", 39 | "beastiality", 40 | "bellend", 41 | "bestial", 42 | "bestiality", 43 | "bi+ch", 44 | "biatch", 45 | "bitch", 46 | "bitchboy", 47 | "bitcher", 48 | "bitchers", 49 | "bitches", 50 | "bitchin", 51 | "bitching", 52 | "bloody", 53 | "blow job", 54 | "blowjob", 55 | "blowjobs", 56 | "boiolas", 57 | "bollock", 58 | "bollok", 59 | "boner", 60 | "boob", 61 | "boobs", 62 | "booobs", 63 | "boooobs", 64 | "booooobs", 65 | "booooooobs", 66 | "breasts", 67 | "buceta", 68 | "bugger", 69 | "bullshit", 70 | "bum", 71 | "butt", 72 | "butts", 73 | "butthole", 74 | "buttmuch", 75 | "buttplug", 76 | "c0ck", 77 | "c0cksucker", 78 | "carpet muncher", 79 | "cawk", 80 | "chink", 81 | "cipa", 82 | "cl1t", 83 | "clit", 84 | "clitoris", 85 | "clits", 86 | "cnut", 87 | "cock", 88 | "cock-sucker", 89 | "cockface", 90 | "cockhead", 91 | "cockmunch", 92 | "cockmuncher", 93 | "cocks", 94 | "cocksuck", 95 | "cocksucked", 96 | "cocksucker", 97 | "cocksucking", 98 | "cocksucks", 99 | "cocksuka", 100 | "cocksukka", 101 | "cok", 102 | "cokmuncher", 103 | "coksucka", 104 | "coon", 105 | "cox", 106 | "crap", 107 | "cum", 108 | "cummer", 109 | "cumming", 110 | "cums", 111 | "cumshot", 112 | "cunilingus", 113 | "cunillingus", 114 | "cunnilingus", 115 | "cunt", 116 | "cuntlick", 117 | "cuntlicker", 118 | "cuntlicking", 119 | "cunts", 120 | "cyalis", 121 | "cyberfuc", 122 | "cyberfuck", 123 | "cyberfucked", 124 | "cyberfucker", 125 | "cyberfuckers", 126 | "cyberfucking", 127 | "d1ck", 128 | "damn", 129 | "dick", 130 | "dickhead", 131 | "dildo", 132 | "dildos", 133 | "dink", 134 | "dinks", 135 | "dirsa", 136 | "dlck", 137 | "dog-fucker", 138 | "doggin", 139 | "dogging", 140 | "donkeyribber", 141 | "doosh", 142 | "duche", 143 | "dyke", 144 | "ejaculate", 145 | "ejaculated", 146 | "ejaculates", 147 | "ejaculating", 148 | "ejaculatings", 149 | "ejaculation", 150 | "ejakulate", 151 | "f u c k", 152 | "f u c k e r", 153 | "f4nny", 154 | "fag", 155 | "fagging", 156 | "faggitt", 157 | "faggot", 158 | "faggs", 159 | "fagot", 160 | "fagots", 161 | "fags", 162 | "fanny", 163 | "fannyflaps", 164 | "fannyfucker", 165 | "fanyy", 166 | "fatass", 167 | "fcuk", 168 | "fcuker", 169 | "fcuking", 170 | "feck", 171 | "fecker", 172 | "felching", 173 | "fellate", 174 | "fellatio", 175 | "fingerfuck", 176 | "fingerfucked", 177 | "fingerfucker", 178 | "fingerfuckers", 179 | "fingerfucking", 180 | "fingerfucks", 181 | "fistfuck", 182 | "fistfucked", 183 | "fistfucker", 184 | "fistfuckers", 185 | "fistfucking", 186 | "fistfuckings", 187 | "fistfucks", 188 | "flange", 189 | "fook", 190 | "fooker", 191 | "fuck", 192 | "fucka", 193 | "fucked", 194 | "fucker", 195 | "fuckers", 196 | "fuckhead", 197 | "fuckheads", 198 | "fuckin", 199 | "fucking", 200 | "fuckings", 201 | "fuckingshitmotherfucker", 202 | "fuckme", 203 | "fucks", 204 | "fuckwhit", 205 | "fuckwit", 206 | "fudge packer", 207 | "fudgepacker", 208 | "fuk", 209 | "fuker", 210 | "fukker", 211 | "fukkin", 212 | "fuks", 213 | "fukwhit", 214 | "fukwit", 215 | "fux", 216 | "fux0r", 217 | "f_u_c_k", 218 | "gangbang", 219 | "gangbanged", 220 | "gangbangs", 221 | "gaylord", 222 | "gaysex", 223 | "goatse", 224 | "god-dam", 225 | "god-damned", 226 | "goddamn", 227 | "goddamned", 228 | "hardcoresex", 229 | "headass", 230 | "hoar", 231 | "hoare", 232 | "hoer", 233 | "hoes", 234 | "homo", 235 | "hore", 236 | "horniest", 237 | "horny", 238 | "hotsex", 239 | "jack-off", 240 | "jackoff", 241 | "jap", 242 | "jerk-off", 243 | "jism", 244 | "jiz", 245 | "jizm", 246 | "jizz", 247 | "kawk", 248 | "knobead", 249 | "knobed", 250 | "knobend", 251 | "knobhead", 252 | "knobjocky", 253 | "knobjokey", 254 | "kock", 255 | "kondum", 256 | "kondums", 257 | "kum", 258 | "kummer", 259 | "kumming", 260 | "kums", 261 | "kunilingus", 262 | "l3i+ch", 263 | "l3itch", 264 | "labia", 265 | "lust", 266 | "lusting", 267 | "m0f0", 268 | "m0fo", 269 | "m45terbate", 270 | "ma5terb8", 271 | "ma5terbate", 272 | "masochist", 273 | "master-bate", 274 | "masterb8", 275 | "masterbat*", 276 | "masterbat3", 277 | "masterbate", 278 | "masterbation", 279 | "masterbations", 280 | "masturbate", 281 | "mo-fo", 282 | "mof0", 283 | "mofo", 284 | "mothafuck", 285 | "mothafucka", 286 | "mothafuckas", 287 | "mothafuckaz", 288 | "mothafucked", 289 | "mothafucker", 290 | "mothafuckers", 291 | "mothafuckin", 292 | "mothafucking", 293 | "mothafuckings", 294 | "mothafucks", 295 | "motherfuck", 296 | "motherfucked", 297 | "motherfucker", 298 | "motherfuckers", 299 | "motherfuckin", 300 | "motherfucking", 301 | "motherfuckings", 302 | "motherfuckka", 303 | "motherfucks", 304 | "muff", 305 | "muthafecker", 306 | "muthafuckker", 307 | "mutherfucker", 308 | "n1gga", 309 | "n1gger", 310 | "nazi", 311 | "nigg3r", 312 | "nigg4h", 313 | "nigga", 314 | "niggah", 315 | "niggas", 316 | "niggaz", 317 | "nigger", 318 | "niggers", 319 | "nob", 320 | "nob jokey", 321 | "nobhead", 322 | "nobjocky", 323 | "nobjokey", 324 | "numbnuts", 325 | "nutsack", 326 | "orgasim", 327 | "orgasims", 328 | "orgasm", 329 | "orgasms", 330 | "p0rn", 331 | "pawn", 332 | "pecker", 333 | "penis", 334 | "penisfucker", 335 | "phonesex", 336 | "phuck", 337 | "phuk", 338 | "phuked", 339 | "phuking", 340 | "phukked", 341 | "phukking", 342 | "phuks", 343 | "phuq", 344 | "pigfucker", 345 | "pimpis", 346 | "piss", 347 | "pissed", 348 | "pisser", 349 | "pissers", 350 | "pisses", 351 | "pissflaps", 352 | "pissin", 353 | "pissing", 354 | "pissoff", 355 | "poop", 356 | "porn", 357 | "porno", 358 | "pornography", 359 | "pornos", 360 | "prick", 361 | "pricks", 362 | "pron", 363 | "pube", 364 | "pusse", 365 | "pussi", 366 | "pussies", 367 | "pussy", 368 | "pussys", 369 | "rectum", 370 | "retard", 371 | "rimjaw", 372 | "rimming", 373 | "s hit", 374 | "s.o.b.", 375 | "sadist", 376 | "schlong", 377 | "screwing", 378 | "scroat", 379 | "scrote", 380 | "scrotum", 381 | "semen", 382 | "sex", 383 | "sh!+", 384 | "sh!t", 385 | "sh1t", 386 | "shag", 387 | "shagger", 388 | "shaggin", 389 | "shagging", 390 | "shemale", 391 | "shi+", 392 | "shit", 393 | "shitdick", 394 | "shite", 395 | "shited", 396 | "shitey", 397 | "shitfuck", 398 | "shitfull", 399 | "shithead", 400 | "shiting", 401 | "shitings", 402 | "shits", 403 | "shitted", 404 | "shitter", 405 | "shitters", 406 | "shitting", 407 | "shittings", 408 | "shitty", 409 | "skank", 410 | "slut", 411 | "sluts", 412 | "smegma", 413 | "smut", 414 | "snatch", 415 | "son-of-a-bitch", 416 | "spac", 417 | "spunk", 418 | "s_h_i_t", 419 | "t1tt1e5", 420 | "t1tties", 421 | "teets", 422 | "teez", 423 | "testical", 424 | "testicle", 425 | "tit", 426 | "titfuck", 427 | "tits", 428 | "titt", 429 | "tittie5", 430 | "tittiefucker", 431 | "titties", 432 | "tittyfuck", 433 | "tittywank", 434 | "titwank", 435 | "tosser", 436 | "turd", 437 | "tw4t", 438 | "twat", 439 | "twathead", 440 | "twatty", 441 | "twunt", 442 | "twunter", 443 | "v14gra", 444 | "v1gra", 445 | "vagina", 446 | "viagra", 447 | "vulva", 448 | "w00se", 449 | "wang", 450 | "wank", 451 | "wanker", 452 | "wanky", 453 | "whoar", 454 | "whore", 455 | "willies", 456 | "willy", 457 | ], 458 | ], 459 | [ 460 | "ar", 461 | [ 462 | "4r5e", 463 | "5h1t", 464 | "5hit", 465 | "a55", 466 | "anal", 467 | "anus", 468 | "ar5e", 469 | "الحصان", 470 | "المؤخرات", 471 | "مؤخرة", 472 | "الوغد", 473 | "الحمير", 474 | "assfukka", 475 | "أحمق", 476 | "حمقى", 477 | "الأحمق", 478 | "دولار", 479 | "كدولارات", 480 | "(ب)", 481 | "b00bs", 482 | "b17ch", 483 | "b1tch", 484 | "الكرة", 485 | "الكرات", 486 | "كرات", 487 | "الوحشية", 488 | "الجرس", 489 | "الأفضلية", 490 | "b+ch", 491 | "biatch", 492 | "الكلبة", 493 | "الكلبات", 494 | "العاهرات", 495 | "اللعنة", 496 | "blow job", 497 | "عمل فاسد", 498 | "أعمال تفجيرية", 499 | "boiolas", 500 | "هراء", 501 | "bollok", 502 | "العظام", 503 | "صدر", 504 | "الثدي", 505 | "البوق", 506 | "عصيان", 507 | "البوووب", 508 | "buceta", 509 | "حشرة", 510 | "bum", 511 | "buttplug", 512 | "c0ck", 513 | "c0cksucker", 514 | "carpet muncher", 515 | "مقهى", 516 | "cipa", 517 | "cl1t", 518 | "clit", 519 | "clitoris", 520 | "clits", 521 | "cnut", 522 | "قضيب", 523 | "وغد", 524 | "وجهك", 525 | "القضيب", 526 | "كوكر", 527 | "أيها الوغد", 528 | "سحقًا", 529 | "صراخ", 530 | "cocksuka", 531 | "cocksukka", 532 | "كوك", 533 | "cokmuncher", 534 | "كوكاكوتا", 535 | "coon", 536 | "كوكس", 537 | "حثالة", 538 | "cummer", 539 | "الطهي", 540 | "cumshot", 541 | "cunilingus", 542 | "cunillingus", 543 | "cunnilingus", 544 | "cuntlick", 545 | "النقر", 546 | "العبث", 547 | "الحمقى", 548 | "cyalis", 549 | "الإنترنت(ج)", 550 | "الفضاء الإلكتروني", 551 | "الملاعين السيبرانيين", 552 | "تسلل عبر الإنترنت", 553 | "d1ck", 554 | "dildo", 555 | "dildos", 556 | "دينك", 557 | "dirsa", 558 | "♪", 559 | "الكلب اللعين", 560 | "الكلب", 561 | "حبوب منع الحمل", 562 | "doosh", 563 | "duche", 564 | "dyke", 565 | "الإيجاز", 566 | "ejaculated", 567 | "الجزيئات", 568 | "ejaculating", 569 | "الهرجات", 570 | "القذف", 571 | "ejakulate", 572 | "f u c k", 573 | "f u c k e r", 574 | "f4nny", 575 | "fag", 576 | "زائفة", 577 | "الشاذ", 578 | "الشواذ", 579 | "fagot", 580 | "fagots", 581 | "الكلاب", 582 | "فاني", 583 | "fannyflaps", 584 | "الملاعين", 585 | "الخيال", 586 | "الدهون", 587 | "fcuk", 588 | "متعهد", 589 | "الحضانات", 590 | "feck", 591 | "فاسد", 592 | "المهرجان", 593 | "سقط", 594 | "fellatio", 595 | "قبضة لعينة", 596 | "flange", 597 | "fook", 598 | "fooker", 599 | "مارس الجنس مع", 600 | "اللعين", 601 | "سخيف", 602 | "أيها اللعين", 603 | "fudge packer", 604 | "حمأة", 605 | "فوك", 606 | "فوكر", 607 | "fukkin", 608 | "صمامات", 609 | "فوكوهي", 610 | "فوكويت", 611 | "الثعلب", 612 | "fux0r", 613 | "gangbang", 614 | "العصابات", 615 | "مثلي الجنس", 616 | "الماعز", 617 | "يا إلهي", 618 | "صلبة", 619 | "الرأس", 620 | "hoar", 621 | "hoare", 622 | "hoer", 623 | "hoes", 624 | "homo", 625 | "hore", 626 | "الأشقر", 627 | "مثارة", 628 | "مثير", 629 | "(جاك أوف)", 630 | "(جاك)", 631 | "jap", 632 | "jiz", 633 | "jizm", 634 | "jizz", 635 | "kawk", 636 | "knobead", 637 | "حبس", 638 | "knobend", 639 | "knobhead", 640 | "knobjocky", 641 | "knobjokey", 642 | "kondum", 643 | "الكوندوم", 644 | "kum", 645 | "kummer", 646 | "الكم", 647 | "kums", 648 | "kunilingus", 649 | "l3i+ch", 650 | "l3itch", 651 | "المختبرات", 652 | "شهوة", 653 | "الإغراء", 654 | "m0f", 655 | "m0fo", 656 | "m45terbate", 657 | "ma5terb8", 658 | "الماجستير", 659 | "مسوخ", 660 | "درجة الماجستير", 661 | "الماجستير(ب)", 662 | "المناقشة العامة*", 663 | "المناقشة الرئيسية 3", 664 | "سيدي", 665 | "التحفة", 666 | "الاستمناء", 667 | "mo-fo", 668 | "mof0", 669 | "mofo", 670 | "موثا", 671 | "mothafucka", 672 | "mothafuckas", 673 | "mothafuckaz", 674 | "مضاجعة", 675 | "mothafuckin", 676 | "الأوغاد", 677 | "لعين", 678 | "muff", 679 | "muthafecker", 680 | "ملاعين", 681 | "n1gga", 682 | "n1gger", 683 | "نازي", 684 | "nigg3r", 685 | "nigg4h", 686 | "زنجي", 687 | "الزنوج", 688 | "nob", 689 | "nob jokey", 690 | "nobhead", 691 | "نابوكي", 692 | "nobjokey", 693 | "الراهبات", 694 | "مجنون", 695 | "النشوة", 696 | "الأورام", 697 | "p0rn", 698 | "الرهن", 699 | "بيكر", 700 | "الهواتف الجنسية", 701 | "phuck", 702 | "phuk", 703 | "رشاش", 704 | "رهبان", 705 | "phukked", 706 | "الفوكة", 707 | "phuks", 708 | "phuq", 709 | "القواد", 710 | "بول", 711 | "سكران غاضب", 712 | "البول", 713 | "تبول", 714 | "poop", 715 | "porn", 716 | "porno", 717 | "المواد الإباحية", 718 | "pornos", 719 | "الخوخ", 720 | "pron", 721 | "حانة", 722 | "نقية", 723 | "pussi", 724 | "الجبناء", 725 | "جبان", 726 | "إعادة التشكيل", 727 | "المتخلفون", 728 | "rimjaw", 729 | "قذف", 730 | "s hit", 731 | "s.o.b.", 732 | "سادي", 733 | "schlong", 734 | "scroat", 735 | "scrote", 736 | "scrotum", 737 | "semen", 738 | "الجنس", 739 | "ش", 740 | "sh1t", 741 | "shag", 742 | "شجر", 743 | "shaggin", 744 | "shemale", 745 | "shi+", 746 | "القرف", 747 | "skank", 748 | "عاهرة", 749 | "عاهرات", 750 | "smegma", 751 | "smut", 752 | "snatch", 753 | "ابن العاهرة", 754 | "spac", 755 | "سافل", 756 | "t1tt1e5", 757 | "t1tties", 758 | "teets", 759 | "teez", 760 | "اختباري", 761 | "خصية", 762 | "tit", 763 | "أيها الحقير", 764 | "tittie5", 765 | "tittywank", 766 | "titwank", 767 | "toser", 768 | "tw4t", 769 | "twat", 770 | "twathead", 771 | "twatty", 772 | "توان", 773 | "v14gra", 774 | "v1gra", 775 | "vagina", 776 | "viagra", 777 | "vulva", 778 | "w00se", 779 | "وانغ", 780 | "wank", 781 | "wanker", 782 | "wanky", 783 | "الويلات", 784 | "ويلي", 785 | ], 786 | ], 787 | [ 788 | "de", 789 | [ 790 | "4r5e", 791 | "5h1t", 792 | "5hit", 793 | "a55", 794 | "anal", 795 | "anus", 796 | "ar5e", 797 | "arrse", 798 | "arse", 799 | "arses", 800 | "arsch", 801 | "arschficker", 802 | "esel", 803 | "assfukka", 804 | "arschloch", 805 | "arschlöcher", 806 | "arschganz", 807 | "a$$", 808 | "in %", 809 | "a$s", 810 | "b.tch", 811 | "b00bs", 812 | "b17ch", 813 | "b1tch", 814 | "kugelbeutel", 815 | "eier", 816 | "bälle", 817 | "bastard", 818 | "beastial", 819 | "beastialität", 820 | "glocke", 821 | "bestial", 822 | "bestialität", 823 | "bi+ch", 824 | "biatch", 825 | "schlampe", 826 | "hündchen", 827 | "metzger", 828 | "schlampen", 829 | "bitchin", 830 | "bitching", 831 | "verdammt", 832 | "blow job", 833 | "blasen", 834 | "boiolas", 835 | "bollock", 836 | "bollok", 837 | "knochen", 838 | "titten", 839 | "brüste", 840 | "buhn", 841 | "boooobs", 842 | "boooooobs", 843 | "buceta", 844 | "fehler", 845 | "schwachsinn", 846 | "bum", 847 | "hintern", 848 | "buttmuch", 849 | "buttplug", 850 | "c0ck", 851 | "c0cksucker", 852 | "carpet muncher", 853 | "kacke", 854 | "chink", 855 | "cipa", 856 | "cl1t", 857 | "klitoris", 858 | "cnut", 859 | "schwanz", 860 | "schwanzsauger", 861 | "schwanzkopf", 862 | "schwanzlutscher", 863 | "hahnmacher", 864 | "schwänze", 865 | "kokos", 866 | "kokmuncher", 867 | "koksucka", 868 | "coon", 869 | "cox", 870 | "mist", 871 | "cum", 872 | "kümmel", 873 | "cumming", 874 | "cums", 875 | "cumshot", 876 | "cunilingus", 877 | "cunillingus", 878 | "cunnilingus", 879 | "fotze", 880 | "cuntlicker", 881 | "cuntlicking", 882 | "stiche", 883 | "cyalis", 884 | "cyberfuc", 885 | "cyberfuck", 886 | "cyberfickt", 887 | "cyberfucker", 888 | "cyberficker", 889 | "cyberficken", 890 | "d1ck", 891 | "dildo", 892 | "dildos", 893 | "dink", 894 | "dinks", 895 | "dirsa", 896 | "dlck", 897 | "hundeficker", 898 | "doggin", 899 | "hunde", 900 | "eselibber", 901 | "doosh", 902 | "duche", 903 | "dyke", 904 | "ejakulat", 905 | "ejakuliert", 906 | "ejaculate", 907 | "ejakulation", 908 | "f u c k", 909 | "f u c k e r", 910 | "f4nny", 911 | "schwuchtel", 912 | "schwuchteln", 913 | "fagot", 914 | "fanny", 915 | "fannyflaps", 916 | "fannyfucker", 917 | "phantasievoll", 918 | "fettass", 919 | "fcuk", 920 | "fcuker", 921 | "fcuking", 922 | "feck", 923 | "fecker", 924 | "felching", 925 | "flat", 926 | "fellatio", 927 | "fingerfick", 928 | "fingerfickt", 929 | "fingerficker", 930 | "fingerficken", 931 | "fistfuck", 932 | "fistfickt", 933 | "fistfucker", 934 | "fistficken", 935 | "scheiße", 936 | "flansch", 937 | "fook", 938 | "fooker", 939 | "fick", 940 | "fucka", 941 | "gefickt", 942 | "wichser", 943 | "scheißkerle", 944 | "scheißkerl", 945 | "fick mich", 946 | "ficks", 947 | "fuckwit", 948 | "fudge packer", 949 | "fudgepacker", 950 | "fuk", 951 | "fuker", 952 | "fukker", 953 | "fokkin", 954 | "fuks", 955 | "fukwhit", 956 | "fukwit", 957 | "fux", 958 | "fux0r", 959 | "gangbang", 960 | "gangbanged", 961 | "gangbangs", 962 | "gaylord", 963 | "gaysex", 964 | "ziegen", 965 | "gott-dam", 966 | "hardcoresex", 967 | "headass", 968 | "hor", 969 | "hoare", 970 | "hoer", 971 | "huhn", 972 | "homo", 973 | "hore", 974 | "horniest", 975 | "geil", 976 | "hotsex", 977 | "ausleger", 978 | "knüppel", 979 | "jap", 980 | "idiot", 981 | "jisma", 982 | "jiz", 983 | "jizm", 984 | "jizz", 985 | "kawk", 986 | "knospead", 987 | "geknallt", 988 | "knaufende", 989 | "knopfkopf", 990 | "knospen", 991 | "knowjokey", 992 | "kock", 993 | "kondum", 994 | "kum", 995 | "kummer", 996 | "kumming", 997 | "kums", 998 | "kunilingus", 999 | "l3i+ch", 1000 | "l3itch", 1001 | "labia", 1002 | "lust", 1003 | "lustig", 1004 | "m0f0", 1005 | "m0fo", 1006 | "m45terbat", 1007 | "ma5terb8", 1008 | "ma5terbat", 1009 | "masochist", 1010 | "master-bate", 1011 | "masterb8", 1012 | "masterbat*", 1013 | "masterbat3", 1014 | "masterbat", 1015 | "masterbation", 1016 | "masterbationen", 1017 | "masturbieren", 1018 | "mo-fo", 1019 | "mof0", 1020 | "mofo", 1021 | "mothafuck", 1022 | "mothafucka", 1023 | "mothafuckas", 1024 | "mothafuckaz", 1025 | "mottafucked", 1026 | "mothafucker", 1027 | "mothafuckin", 1028 | "mothafucking", 1029 | "mothafuckings", 1030 | "mothafucks", 1031 | "verfickt", 1032 | "muff", 1033 | "muthafecker", 1034 | "muthafuckker", 1035 | "mutherfucker", 1036 | "n1gga", 1037 | "n1gger", 1038 | "nazi", 1039 | "nigg3r", 1040 | "nigg4h", 1041 | "nigga", 1042 | "niggas", 1043 | "niggaz", 1044 | "nigger", 1045 | "nob", 1046 | "nob jokey", 1047 | "nobhead", 1048 | "nobjocky", 1049 | "nobjokey", 1050 | "nüsse", 1051 | "nussack", 1052 | "orgasim", 1053 | "orgasime", 1054 | "orgasmus", 1055 | "orgasmen", 1056 | "p0rn", 1057 | "pfoten", 1058 | "pecker", 1059 | "penis", 1060 | "penisfucker", 1061 | "telefonsex", 1062 | "phuck", 1063 | "phuk", 1064 | "phuking", 1065 | "phukked", 1066 | "phukking", 1067 | "phuks", 1068 | "phuq", 1069 | "schweinefucker", 1070 | "pimpis", 1071 | "pisse", 1072 | "pissed", 1073 | "pisser", 1074 | "pissflaps", 1075 | "pissin", 1076 | "pissen", 1077 | "pisseff", 1078 | "poop", 1079 | "porno", 1080 | "pornografie", 1081 | "pornos", 1082 | "pricks", 1083 | "pron", 1084 | "pube", 1085 | "pusse", 1086 | "pussi", 1087 | "muschis", 1088 | "muschi", 1089 | "pussys", 1090 | "rektum", 1091 | "verzögert", 1092 | "rimjaw", 1093 | "felgen", 1094 | "s hit", 1095 | "s.o.b.", 1096 | "sadist", 1097 | "schlong", 1098 | "schrauben", 1099 | "skroat", 1100 | "skrote", 1101 | "skrotum", 1102 | "samen", 1103 | "geschlecht", 1104 | "sh!+", 1105 | "sh!t", 1106 | "sh1t", 1107 | "shag", 1108 | "shagger", 1109 | "shaggin", 1110 | "transen", 1111 | "shi+", 1112 | "shit", 1113 | "shited", 1114 | "schlitten", 1115 | "scheißkopf", 1116 | "shitter", 1117 | "splitter", 1118 | "scherben", 1119 | "shank", 1120 | "smegma", 1121 | "smut", 1122 | "snatch", 1123 | "spac", 1124 | "spunk", 1125 | "t1tt1e5", 1126 | "t1tties", 1127 | "teets", 1128 | "teez", 1129 | "hoden", 1130 | "tit", 1131 | "titfuck", 1132 | "titt", 1133 | "tittie5", 1134 | "tittiefucker", 1135 | "tittyfuck", 1136 | "tittywank", 1137 | "titwank", 1138 | "tosser", 1139 | "dord", 1140 | "tw4t", 1141 | "twat", 1142 | "twathead", 1143 | "twatty", 1144 | "twunt", 1145 | "twunter", 1146 | "v14gra", 1147 | "v1gra", 1148 | "vagina", 1149 | "viagra", 1150 | "vulva", 1151 | "w00se", 1152 | "wang", 1153 | "wank", 1154 | "wahnsinn", 1155 | "hure", 1156 | "willies", 1157 | "willy", 1158 | ], 1159 | ], 1160 | [ 1161 | "es", 1162 | [ 1163 | "4r5e", 1164 | "5h1t", 1165 | "5hit", 1166 | "a55", 1167 | "anal", 1168 | "aus", 1169 | "ar5e", 1170 | "culo", 1171 | "culos", 1172 | "culo de mierda", 1173 | "idiota", 1174 | "assfukka", 1175 | "imbécil", 1176 | "idiotas", 1177 | "culo completo", 1178 | "a dólares", 1179 | "a)", 1180 | "b!tch", 1181 | "b00bs", 1182 | "b17ch", 1183 | "b1tch", 1184 | "bolsa de bolas", 1185 | "bolas", 1186 | "bastardo", 1187 | "bestia", 1188 | "bellend", 1189 | "bestial", 1190 | "bestialidad", 1191 | "bi+ch", 1192 | "biatch", 1193 | "perra", 1194 | "puta", 1195 | "putas", 1196 | "perras", 1197 | "¡maldita sea", 1198 | "blow job", 1199 | "mamada", 1200 | "mamadas", 1201 | "boiolas", 1202 | "bollock", 1203 | "bollok", 1204 | "hueso", 1205 | "tetas", 1206 | "booobs", 1207 | "boooobs", 1208 | "booooobs", 1209 | "booooooobs", 1210 | "senos", 1211 | "buceta", 1212 | "negro", 1213 | "mierda", 1214 | "bum", 1215 | "butt", 1216 | "buttmuch", 1217 | "buttplug", 1218 | "c0ck", 1219 | "c0cksucker", 1220 | "carpet muncher", 1221 | "cawk", 1222 | "chink", 1223 | "cipa", 1224 | "cl1t", 1225 | "entendido", 1226 | "clítoris", 1227 | "clits", 1228 | "cnut", 1229 | "polla", 1230 | "polla-sucker", 1231 | "masturbación", 1232 | "pene", 1233 | "gallos", 1234 | "pollas", 1235 | "polla grande", 1236 | "pollasucked", 1237 | "chupavergas", 1238 | "chupa pollas", 1239 | "penes negros grandes", 1240 | "cocksuka", 1241 | "cocksukka", 1242 | "cok", 1243 | "cokmuncher", 1244 | "coksucka", 1245 | "coon", 1246 | "cox", 1247 | "cum", 1248 | "cummer", 1249 | "colibrí", 1250 | "semen", 1251 | "corrida", 1252 | "cunilingus", 1253 | "cunillingus", 1254 | "cunnilingus", 1255 | "cuñada", 1256 | "cuntlick", 1257 | "cuntlicker", 1258 | "cuntlicking", 1259 | "cuñas", 1260 | "cyalis", 1261 | "cyberfuc", 1262 | "ciberpolvo", 1263 | "cibernético", 1264 | "¡mierda", 1265 | "los ciberdelincuentes", 1266 | "ciberdelincuente", 1267 | "d1ck", 1268 | "maldita sea", 1269 | "dildo", 1270 | "consoladores", 1271 | "dink", 1272 | "dinks", 1273 | "dirsa", 1274 | "♪", 1275 | "perro mierda", 1276 | "perro", 1277 | "donkeyribber", 1278 | "doosh", 1279 | "duche", 1280 | "dyke", 1281 | "eyaculado", 1282 | "eyaculada", 1283 | "eyaculados", 1284 | "eyaculando", 1285 | "eyaculaciones", 1286 | "eyaculación", 1287 | "eyakulate", 1288 | "f u c k", 1289 | "f u c k e r", 1290 | "f4nny", 1291 | "maricón", 1292 | "maricas", 1293 | "faggit", 1294 | "faggs", 1295 | "fanny", 1296 | "fannyflaps", 1297 | "fannyfucker", 1298 | "fanyy", 1299 | "grasa", 1300 | "fcuk", 1301 | "fcuker", 1302 | "fcuting", 1303 | "feck", 1304 | "fecker", 1305 | "felching", 1306 | "caído", 1307 | "♪♪", 1308 | "dedo", 1309 | "¡dedo", 1310 | "dedos de mierda", 1311 | "dedo de mierda", 1312 | "dedos", 1313 | "fistfuck", 1314 | "fistfucking", 1315 | "fistfucker", 1316 | "fistfuckers", 1317 | "fistfuckings", 1318 | "fistfucks", 1319 | "brida", 1320 | "fook", 1321 | "fooker", 1322 | "follada", 1323 | "hijo de puta", 1324 | "mierdas", 1325 | "puta mierda", 1326 | "mierda mierda mierda madre mierda mierda mierda", 1327 | "maldición", 1328 | "fudge packer", 1329 | "fudgepacker", 1330 | "fuk", 1331 | "fuker", 1332 | "fukker", 1333 | "fukkin", 1334 | "fusibles", 1335 | "fukwhit", 1336 | "fukwit", 1337 | "fux", 1338 | "fux0r", 1339 | "gangbang", 1340 | "gangbanged", 1341 | "gangbangs", 1342 | "gaylord", 1343 | "gaysex", 1344 | "cabra", 1345 | "maldito", 1346 | "hardcoresex", 1347 | "headass", 1348 | "hoar", 1349 | "hoare", 1350 | "hoer", 1351 | "hoes", 1352 | "homo", 1353 | "hore", 1354 | "más caliente", 1355 | "caliente", 1356 | "hotsex", 1357 | "jack-off", 1358 | "jackoff", 1359 | "jap", 1360 | "jism", 1361 | "jiz", 1362 | "jizm", 1363 | "jizz", 1364 | "kawk", 1365 | "kno", 1366 | "knobed", 1367 | "knobend", 1368 | "knobhead", 1369 | "knobjocky", 1370 | "knobjokey", 1371 | "kock", 1372 | "kondum", 1373 | "kondums", 1374 | "kum", 1375 | "kummer", 1376 | "kumming", 1377 | "kums", 1378 | "kunilingus", 1379 | "l3i+ch", 1380 | "l3itch", 1381 | "labia", 1382 | "lujuria", 1383 | "m0f0", 1384 | "m0fo", 1385 | "m45terbate", 1386 | "ma5terb8", 1387 | "ma5terbate", 1388 | "masoquista", 1389 | "master-bate", 1390 | "masterb8", 1391 | "masterbat*", 1392 | "masterbat3", 1393 | "masterbate", 1394 | "masterbation", 1395 | "masterbations", 1396 | "mo-fo", 1397 | "mof0", 1398 | "mofo", 1399 | "mothafuck", 1400 | "mothafucka", 1401 | "mothafuckas", 1402 | "mothafuckaz", 1403 | "mothafucking", 1404 | "mothafucker", 1405 | "bigotes", 1406 | "mothafuckin", 1407 | "mothafucks", 1408 | "hijos de puta", 1409 | "madre mierda", 1410 | "muff", 1411 | "muthafecker", 1412 | "muthafuckker", 1413 | "mutherfucker", 1414 | "n1gga", 1415 | "n1gger", 1416 | "nazi", 1417 | "nigg3r", 1418 | "nigg4h", 1419 | "niggah", 1420 | "niggas", 1421 | "niggaz", 1422 | "negros", 1423 | "nob", 1424 | "nob jokey", 1425 | "nobhead", 1426 | "nobjocky", 1427 | "nobjokey", 1428 | "nueces", 1429 | "orgasim", 1430 | "orgasims", 1431 | "orgasmo", 1432 | "orgasmos", 1433 | "p0rn", 1434 | "peón", 1435 | "pecker", 1436 | "chupa penes", 1437 | "teléfonosex", 1438 | "phuk", 1439 | "phuked", 1440 | "phuking", 1441 | "phukked", 1442 | "phukking", 1443 | "phuks", 1444 | "phuq", 1445 | "cabrón", 1446 | "chulos", 1447 | "orinar", 1448 | "enojada", 1449 | "meandos", 1450 | "meadas", 1451 | "pispa", 1452 | "orina", 1453 | "meando", 1454 | "poop", 1455 | "porno", 1456 | "pornografía", 1457 | "pornos", 1458 | "pron", 1459 | "pube", 1460 | "pusse", 1461 | "pussi", 1462 | "pussies", 1463 | "vagina", 1464 | "vaginas", 1465 | "recto", 1466 | "retardado", 1467 | "rimjaw", 1468 | "rimming", 1469 | "s hit", 1470 | "s.o.b.", 1471 | "sadista", 1472 | "schlong", 1473 | "tornillo", 1474 | "scroat", 1475 | "scrote", 1476 | "escroto", 1477 | "sexo", 1478 | "sh!+", 1479 | "sh", 1480 | "sh1t", 1481 | "shag", 1482 | "shagger", 1483 | "shaggin", 1484 | "temblando", 1485 | "transexual", 1486 | "shi+", 1487 | "skank", 1488 | "smegma", 1489 | "smut", 1490 | "snatch", 1491 | "spac", 1492 | "sunk", 1493 | "t1t1e5", 1494 | "t1tties", 1495 | "teets", 1496 | "teez", 1497 | "testículo", 1498 | "titular", 1499 | "titfuck", 1500 | "tit", 1501 | "tittie5", 1502 | "tittie", 1503 | "tittyfuck", 1504 | "tittywank", 1505 | "titwank", 1506 | "tosser", 1507 | "turd", 1508 | "tw4t", 1509 | "twat", 1510 | "twathead", 1511 | "twatty", 1512 | "twunt", 1513 | "twunter", 1514 | "v14gra", 1515 | "v1gra", 1516 | "viagra", 1517 | "vulva", 1518 | "w00se", 1519 | "wang", 1520 | "wank", 1521 | "wanker", 1522 | "wanky", 1523 | "whoar", 1524 | "willies", 1525 | "willy", 1526 | ], 1527 | ], 1528 | [ 1529 | "fr", 1530 | [ 1531 | "4r5e", 1532 | "5h1t", 1533 | "5 heures", 1534 | "a55", 1535 | "anal", 1536 | "anus", 1537 | "ar5e", 1538 | "arrs", 1539 | "cul", 1540 | "ânes", 1541 | "enculé de cul", 1542 | "culs", 1543 | "enfoiré", 1544 | "assfukka", 1545 | "trou du cul", 1546 | "gros cul", 1547 | "a$", 1548 | "en dollars", 1549 | "c'est ça", 1550 | "b00bs", 1551 | "b17ch", 1552 | "b1tch", 1553 | "sac à bille", 1554 | "boules", 1555 | "sac à billes", 1556 | "bâtard", 1557 | "bête", 1558 | "bêteté", 1559 | "clocher", 1560 | "bestial", 1561 | "bestialité", 1562 | "bi+ch", 1563 | "biatch", 1564 | "salope", 1565 | "les salopes", 1566 | "salopes", 1567 | "sanglant", 1568 | "blow job", 1569 | "pipe", 1570 | "pipes", 1571 | "boiolas", 1572 | "cliquetis", 1573 | "bollok", 1574 | "osseux", 1575 | "sein", 1576 | "seins", 1577 | "bouchons", 1578 | "les nichons", 1579 | "boucliers", 1580 | "booooooobs", 1581 | "poitrines", 1582 | "buceta", 1583 | "enculé", 1584 | "des conneries", 1585 | "bum", 1586 | "fesses", 1587 | "mégots", 1588 | "gros plan", 1589 | "plug anal", 1590 | "c0ck", 1591 | "c0cksucker", 1592 | "carpet muncher", 1593 | "cawk", 1594 | "chine", 1595 | "cipa", 1596 | "cl1t", 1597 | "clito", 1598 | "clitoris", 1599 | "clits", 1600 | "nut", 1601 | "coq", 1602 | "suceur de bite", 1603 | "visage de coq", 1604 | "tête de coq", 1605 | "munche de coq", 1606 | "coq-muncher", 1607 | "coqs", 1608 | "sucre de bite", 1609 | "bites sucées", 1610 | "coqsucks", 1611 | "cocksuka", 1612 | "cocksukka", 1613 | "cok", 1614 | "cokmuncher", 1615 | "coksucka", 1616 | "coon", 1617 | "cox", 1618 | "sperme", 1619 | "pomme", 1620 | "cumming", 1621 | "spermes", 1622 | "éjaculation", 1623 | "cunilingus", 1624 | "cunillingus", 1625 | "cunnilingus", 1626 | "pâte", 1627 | "lèche-cul", 1628 | "chattes", 1629 | "cyalis", 1630 | "cyberfuc", 1631 | "cyberfuck", 1632 | "cyberfucked", 1633 | "les cyber-fuckers", 1634 | "cyberdépannage", 1635 | "d1ck", 1636 | "putain", 1637 | "bite", 1638 | "tête de bite", 1639 | "gode", 1640 | "godes", 1641 | "évier", 1642 | "éviers", 1643 | "dirsa", 1644 | "dlck", 1645 | "enculé de chien", 1646 | "chienne", 1647 | "levrette", 1648 | "d'ânes", 1649 | "doux", 1650 | "duché", 1651 | "dyke", 1652 | "éjaculations", 1653 | "éjaculate", 1654 | "f u c k", 1655 | "f u c k e r", 1656 | "pour", 1657 | "pédé", 1658 | "signalisation", 1659 | "foggitt", 1660 | "peaux", 1661 | "fagot", 1662 | "fagots", 1663 | "pédés", 1664 | "fanny", 1665 | "les fannyflaps", 1666 | "fany", 1667 | "gras", 1668 | "fcuk", 1669 | "fcuker", 1670 | "fcuking", 1671 | "feu", 1672 | "fecker", 1673 | "feuillage", 1674 | "infesté", 1675 | "taux de chute", 1676 | "culotte", 1677 | "avec les doigts", 1678 | "enfoirés", 1679 | "fucking des doigts", 1680 | "bouchées de doigts", 1681 | "fuck", 1682 | "farcis", 1683 | "frappe de poing", 1684 | "frappes de poing", 1685 | "fusils", 1686 | "bride", 1687 | "fook", 1688 | "fooker", 1689 | "merde", 1690 | "baisée", 1691 | "enculés", 1692 | "espèce d'enfoiré", 1693 | "crétins", 1694 | "putains", 1695 | "fils de pute", 1696 | "baise-moi", 1697 | "connard", 1698 | "fudge packer", 1699 | "fudgepacker", 1700 | "fuk", 1701 | "fuker", 1702 | "fukker", 1703 | "fukkin", 1704 | "fuks", 1705 | "fukwhit", 1706 | "fukwit", 1707 | "fux", 1708 | "fox0r", 1709 | "un gangbang", 1710 | "bandes", 1711 | "des gangs", 1712 | "gaylord", 1713 | "gaysexe", 1714 | "caprins", 1715 | "seigneur", 1716 | "dieu maudit", 1717 | "nom de dieu", 1718 | "hardcoresexe", 1719 | "tête de cul", 1720 | "arnaque", 1721 | "hoare", 1722 | "attaque", 1723 | "sabots", 1724 | "homo", 1725 | "thorax", 1726 | "les plus excités", 1727 | "excité", 1728 | "sexe chaud", 1729 | "décollage", 1730 | "jap", 1731 | "branleur", 1732 | "le jisme", 1733 | "jiz", 1734 | "jizm", 1735 | "jizz", 1736 | "kawk", 1737 | "boutons", 1738 | "boutonné", 1739 | "bouton", 1740 | "tête de bouton", 1741 | "jocky", 1742 | "boutonjokey", 1743 | "kock", 1744 | "kondum", 1745 | "kondums", 1746 | "kum", 1747 | "kummer", 1748 | "le kumming", 1749 | "des kums", 1750 | "kunilingus", 1751 | "l3i+ch", 1752 | "3 points", 1753 | "labia", 1754 | "luxure", 1755 | "lustre", 1756 | "m0f0", 1757 | "m0fo", 1758 | "m45terbate", 1759 | "m5terb8", 1760 | "ma5terbate", 1761 | "masochiste", 1762 | "maître-bateau", 1763 | "maîtreb8", 1764 | "maître-bat*", 1765 | "maîtrise3", 1766 | "maître", 1767 | "masterbation", 1768 | "masterbations", 1769 | "masturbe", 1770 | "les", 1771 | "mof0", 1772 | "mofo", 1773 | "mothafuck", 1774 | "mothafucka", 1775 | "mothafuckas", 1776 | "mothafuckaz", 1777 | "mothafucked", 1778 | "mothafucker", 1779 | "mothafucks", 1780 | "mothafuckin", 1781 | "mothafucking", 1782 | "mothafuckings", 1783 | "merdeux", 1784 | "muff", 1785 | "le muthafecker", 1786 | "muthafuckker", 1787 | "n1gga", 1788 | "n1gger", 1789 | "nazi", 1790 | "4 h", 1791 | "negga", 1792 | "niggué", 1793 | "niggas", 1794 | "niggaz", 1795 | "nègre", 1796 | "nègres", 1797 | "nob", 1798 | "nob jokey", 1799 | "tête de noeud", 1800 | "nobjocky", 1801 | "nobjokey", 1802 | "engourdissements", 1803 | "sac à noix", 1804 | "orgasim", 1805 | "orgasims", 1806 | "orgasme", 1807 | "orgasmes", 1808 | "p0rn", 1809 | "sur gage", 1810 | "pic", 1811 | "pénis", 1812 | "suceur de pénis", 1813 | "téléphonex", 1814 | "purée", 1815 | "phuk", 1816 | "ouvrés", 1817 | "le phuking", 1818 | "phukked", 1819 | "ouvrage", 1820 | "les phuks", 1821 | "phuq", 1822 | "maquereau", 1823 | "pisse", 1824 | "en colère", 1825 | "pisser", 1826 | "les pisseurs", 1827 | "pousses de pisse", 1828 | "caca", 1829 | "porno", 1830 | "pornographie mettant en scène des enfants", 1831 | "pornos", 1832 | "pique", 1833 | "les briques", 1834 | "tablier", 1835 | "pube", 1836 | "pusse", 1837 | "pousse", 1838 | "chatte", 1839 | "rectum", 1840 | "retard", 1841 | "rimjaw", 1842 | "jante", 1843 | "s hit", 1844 | "s.o.b.", 1845 | "sadique", 1846 | "schlong", 1847 | "vissage", 1848 | "scroat", 1849 | "scrotte", 1850 | "scrotum", 1851 | "sexe", 1852 | "sh!+", 1853 | "- oui", 1854 | "s'il vous plaît", 1855 | "shagger", 1856 | "shaggin", 1857 | "sciages", 1858 | "transexuelle", 1859 | "shi+", 1860 | "merdique", 1861 | "crétin", 1862 | "des merdes", 1863 | "sankk", 1864 | "smegma", 1865 | "smut", 1866 | "snatch", 1867 | "spac", 1868 | "croûte", 1869 | "t1tt1e5", 1870 | "t1tiétés", 1871 | "tétons", 1872 | "et toi", 1873 | "testique", 1874 | "testicule", 1875 | "tit", 1876 | "titfuck", 1877 | "titre", 1878 | "titre5", 1879 | "tittyfuck", 1880 | "tittywank", 1881 | "titwank", 1882 | "lancer", 1883 | "tuyau", 1884 | "tw4t", 1885 | "twat", 1886 | "twathead", 1887 | "twatty", 1888 | "twunt", 1889 | "détecteur", 1890 | "v14gra", 1891 | "v1gra", 1892 | "vagin", 1893 | "viagra", 1894 | "vulve", 1895 | "w00se", 1896 | "wang", 1897 | "gland", 1898 | "branlé", 1899 | "souris", 1900 | "pute", 1901 | "walies", 1902 | "willy", 1903 | ], 1904 | ], 1905 | [ 1906 | "it", 1907 | [ 1908 | "allupato", 1909 | "ammucchiata", 1910 | "anale", 1911 | "arrapato", 1912 | "arrusa", 1913 | "arruso", 1914 | "assatanato", 1915 | "bagascia", 1916 | "bagassa", 1917 | "bagnarsi", 1918 | "baldracca", 1919 | "balle", 1920 | "battere", 1921 | "battona", 1922 | "belino", 1923 | "biga", 1924 | "bocchinara", 1925 | "bocchino", 1926 | "bofilo", 1927 | "boiata", 1928 | "bordello", 1929 | "brinca", 1930 | "bucaiolo", 1931 | "budiùlo", 1932 | "busone", 1933 | "cacca", 1934 | "caciocappella", 1935 | "cadavere", 1936 | "cagare", 1937 | "cagata", 1938 | "cagna", 1939 | "casci", 1940 | "cazzata", 1941 | "cazzimma", 1942 | "cazzo", 1943 | "cesso", 1944 | "cazzone", 1945 | "checca", 1946 | "chiappa", 1947 | "chiavare", 1948 | "chiavata", 1949 | "ciospo", 1950 | "ciucciami il cazzo", 1951 | "coglione", 1952 | "coglioni", 1953 | "cornuto", 1954 | "cozza", 1955 | "culattina", 1956 | "culattone", 1957 | "culo", 1958 | "ditalino", 1959 | "fava", 1960 | "femminuccia", 1961 | "fica", 1962 | "figa", 1963 | "figlio di buona donna", 1964 | "figlio di puttana", 1965 | "figone", 1966 | "finocchio", 1967 | "fottere", 1968 | "fottersi", 1969 | "fracicone", 1970 | "fregna", 1971 | "frocio", 1972 | "froscio", 1973 | "goldone", 1974 | "guardone", 1975 | "imbecille", 1976 | "incazzarsi", 1977 | "incoglionirsi", 1978 | "ingoio", 1979 | "leccaculo", 1980 | "lecchino", 1981 | "lofare", 1982 | "loffa", 1983 | "loffare", 1984 | "mannaggia", 1985 | "merda", 1986 | "merdata", 1987 | "merdoso", 1988 | "mignotta", 1989 | "minchia", 1990 | "minchione", 1991 | "mona", 1992 | "monta", 1993 | "montare", 1994 | "mussa", 1995 | "nave scuola", 1996 | "nerchia", 1997 | "padulo", 1998 | "palle", 1999 | "palloso", 2000 | "patacca", 2001 | "patonza", 2002 | "pecorina", 2003 | "pesce", 2004 | "picio", 2005 | "pincare", 2006 | "pippa", 2007 | "pinnolone", 2008 | "pipì", 2009 | "pippone", 2010 | "pirla", 2011 | "pisciare", 2012 | "piscio", 2013 | "pisello", 2014 | "pistolotto", 2015 | "pomiciare", 2016 | "pompa", 2017 | "pompino", 2018 | "porca", 2019 | "porca madonna", 2020 | "porca miseria", 2021 | "porca puttana", 2022 | "porco", 2023 | "porco due", 2024 | "porco zio", 2025 | "potta", 2026 | "puppami", 2027 | "puttana", 2028 | "quaglia", 2029 | "recchione", 2030 | "regina", 2031 | "rincoglionire", 2032 | "rizzarsi", 2033 | "rompiballe", 2034 | "rompipalle", 2035 | "ruffiano", 2036 | "sbattere", 2037 | "sbattersi", 2038 | "sborra", 2039 | "sborrata", 2040 | "sborrone", 2041 | "sbrodolata", 2042 | "scopare", 2043 | "scopata", 2044 | "scorreggiare", 2045 | "sega", 2046 | "slinguare", 2047 | "slinguata", 2048 | "smandrappata", 2049 | "soccia", 2050 | "socmel", 2051 | "sorca", 2052 | "spagnola", 2053 | "spompinare", 2054 | "sticchio", 2055 | "stronza", 2056 | "stronzata", 2057 | "stronzo", 2058 | "succhiami", 2059 | "succhione", 2060 | "sveltina", 2061 | "sverginare", 2062 | "tarzanello", 2063 | "terrone", 2064 | "testa di cazzo", 2065 | "tette", 2066 | "tirare", 2067 | "topa", 2068 | "troia", 2069 | "trombare", 2070 | "vacca", 2071 | "vaffanculo", 2072 | "vangare", 2073 | "zinne", 2074 | "zio cantante", 2075 | "zoccola", 2076 | ], 2077 | ], 2078 | [ 2079 | "hi", 2080 | [ 2081 | "4r5e", 2082 | "5h1t", 2083 | "5hit", 2084 | "a55", 2085 | "गुदामैथुन", 2086 | "एनस", 2087 | "ar5e", 2088 | "arrse", 2089 | "आर्सेना", 2090 | "शस्त्र", 2091 | "ass", 2092 | "ass-fucker", 2093 | "assfucker", 2094 | "assfukka", 2095 | "asshole", 2096 | "assholes", 2097 | "asswhole", 2098 | "$$", 2099 | "$", 2100 | "b", 2101 | "b00b", 2102 | "b17ch", 2103 | "b1tch", 2104 | "बॉलबैग", 2105 | "गेंद", 2106 | "गेंदों", 2107 | "बस्टर्ड", 2108 | "जानवर", 2109 | "समानता", 2110 | "घंटी", 2111 | "श्रेष्ठ", 2112 | "श्रेष्ठता", 2113 | "bi+ch", 2114 | "बिच", 2115 | "बिटच", 2116 | "चूंचियां", 2117 | "बिटचर", 2118 | "बिट्च", 2119 | "बिटचिन", 2120 | "बिटिंग", 2121 | "खूनी", 2122 | "blow job", 2123 | "blowjob", 2124 | "बियोला", 2125 | "बोल्क", 2126 | "बोललोक", 2127 | "हड्डी", 2128 | "शराब", 2129 | "स्तन", 2130 | "booooobs", 2131 | "buceta", 2132 | "बगीचा", 2133 | "हिन्दी", 2134 | "bum", 2135 | "बट", 2136 | "बटहोल", 2137 | "बटमच", 2138 | "buttplug", 2139 | "c0ck", 2140 | "c0cksucker", 2141 | "carpet muncher", 2142 | "चिंक", 2143 | "cipa", 2144 | "cl1t", 2145 | "क्लिट", 2146 | "क्लिटोरिस", 2147 | "अखरोट", 2148 | "मुर्गा", 2149 | "मुर्गा चूसने वाला", 2150 | "कॉकफेस", 2151 | "कॉक", 2152 | "मुर्गा चूसने", 2153 | "कोक", 2154 | "cokmuncher", 2155 | "coksucka", 2156 | "कोन", 2157 | "केकड़ा", 2158 | "सह", 2159 | "cummer", 2160 | "cumming", 2161 | "कमशॉट", 2162 | "गुलगुला", 2163 | "cunillingus", 2164 | "हिंदी", 2165 | "चाचा", 2166 | "cuntlick", 2167 | "cuntlicker", 2168 | "cuntlicking", 2169 | "cunts", 2170 | "cyalis", 2171 | "साइबरफ़ुक", 2172 | "साइबर बकवास", 2173 | "d1ck", 2174 | "damn", 2175 | "डिक", 2176 | "dickhead", 2177 | "डिल्डो", 2178 | "dildos", 2179 | "गुत्थी", 2180 | "डंक", 2181 | "कुत्ते-fucker", 2182 | "कुत्ते", 2183 | "dogging", 2184 | "गधा", 2185 | "डोष", 2186 | "डच", 2187 | "डाइक", 2188 | "ejaculate", 2189 | "ejaculated", 2190 | "ejaculation", 2191 | "ejaculations", 2192 | "ejakulate", 2193 | "f u c k", 2194 | "f u c k e r", 2195 | "f4nny", 2196 | "फ़ैग", 2197 | "छूत", 2198 | "चुभोना", 2199 | "fagot", 2200 | "फैनी", 2201 | "फैनीफ्लैप", 2202 | "कट्टर", 2203 | "वसा", 2204 | "fcuk", 2205 | "fcuker", 2206 | "fcuking", 2207 | "fecker", 2208 | "गिरना", 2209 | "गिरती", 2210 | "उंगली करना", 2211 | "fingerfucked", 2212 | "उंगलियों", 2213 | "मुट्ठीकरना", 2214 | "fistfucked", 2215 | "fistfucker", 2216 | "fistfuckers", 2217 | "fistfucking", 2218 | "fistfuck", 2219 | "फुहार", 2220 | "चॉकर", 2221 | "भाड़ में जाओ", 2222 | "बकवास", 2223 | "गड़बड़", 2224 | "मुख-मैथुन", 2225 | "fuckheads", 2226 | "कमबख्त", 2227 | "fuckingshitmotherfucker", 2228 | "fudge packer", 2229 | "fudgepacker", 2230 | "fuk", 2231 | "fucker", 2232 | "fukkin", 2233 | "फक", 2234 | "फूहड़", 2235 | "fux0r", 2236 | "गिरोह बैंग", 2237 | "gangbanged", 2238 | "समलैंगिक", 2239 | "बकरी", 2240 | "देवता", 2241 | "देवी", 2242 | "कट्टरसेक्स", 2243 | "सिर", 2244 | "होर", 2245 | "हो", 2246 | "होम", 2247 | "सींग का", 2248 | "सेक्सी", 2249 | "जैक ऑफ", 2250 | "जैकपॉट", 2251 | "जैप", 2252 | "मरोड़ते", 2253 | "jism", 2254 | "जिज़", 2255 | "जिज़्म", 2256 | "कावा", 2257 | "घुंडी", 2258 | "knobjocky", 2259 | "kock", 2260 | "कोंडोम", 2261 | "कोन्डम", 2262 | "कुम्भ", 2263 | "kumming", 2264 | "गुनगुना", 2265 | "l3i+ch", 2266 | "l3itch", 2267 | "लैबिया", 2268 | "चमकना", 2269 | "m0f0", 2270 | "m0fo", 2271 | "m45terbate", 2272 | "ma5terb8", 2273 | "ma5terbate", 2274 | "मैसोचिस्ट", 2275 | "मास्टर-बेट", 2276 | "मास्टरब8", 2277 | "मास्टरबैट*", 2278 | "मास्टरबैट3", 2279 | "मास्टरबेट", 2280 | "masterbation", 2281 | "हस्तमैथुन", 2282 | "mo-fo", 2283 | "mof0", 2284 | "साइटमैप", 2285 | "mothafucka", 2286 | "mothafuckaz", 2287 | "mothafucked", 2288 | "mothafucker", 2289 | "mothafuckin", 2290 | "mothafucking", 2291 | "माँ", 2292 | "motherfucked", 2293 | "motherfucker", 2294 | "motherfuckers", 2295 | "motherfuckin", 2296 | "motherfucking", 2297 | "motherfuckka", 2298 | "माताओं", 2299 | "माफ", 2300 | "muthafecker", 2301 | "muthafuckker", 2302 | "mutherfucker", 2303 | "n1gga", 2304 | "n1gger", 2305 | "नाज़ी", 2306 | "nigg3r", 2307 | "nigg4h", 2308 | "निगा", 2309 | "निगाह", 2310 | "नागा", 2311 | "nigger", 2312 | "niggers", 2313 | "नोब", 2314 | "nob jokey", 2315 | "नोबहेड", 2316 | "nobjocky", 2317 | "nobjokey", 2318 | "नट्सैक", 2319 | "orgasim", 2320 | "संभोग सुख", 2321 | "p0", 2322 | "मोहन", 2323 | "पेकर", 2324 | "पेनिस", 2325 | "penisfucker", 2326 | "फोनसेक्स", 2327 | "चक", 2328 | "फुक", 2329 | "फुकेत", 2330 | "phuq", 2331 | "pigfucker", 2332 | "पिम्पिस", 2333 | "पेशाब", 2334 | "pissed", 2335 | "pissing", 2336 | "पेशाब करना", 2337 | "पोप", 2338 | "अश्लील", 2339 | "पोर्नोग्राफी", 2340 | "ट्रिक", 2341 | "pricks", 2342 | "प्रोन", 2343 | "पुसी", 2344 | "बिल्ली", 2345 | "राशि", 2346 | "मंदिर", 2347 | "रिमजॉ", 2348 | "rimming", 2349 | "s hit", 2350 | "s.o.b.", 2351 | "sadist", 2352 | "schlong", 2353 | "पेंच", 2354 | "झरना", 2355 | "घूंघट", 2356 | "अंडकोष", 2357 | "वीर्य", 2358 | "सेक्स", 2359 | "sh!+", 2360 | "sh!t", 2361 | "sh1t", 2362 | "shag", 2363 | "shagger", 2364 | "shaggin", 2365 | "shagging", 2366 | "shemale", 2367 | "शि +", 2368 | "shitfuck", 2369 | "shitful", 2370 | "शिटिंग", 2371 | "पतला", 2372 | "शिथिल", 2373 | "शिटर", 2374 | "शिट्टी", 2375 | "skank", 2376 | "गुदगुदी", 2377 | "smegma", 2378 | "smut", 2379 | "छीनना", 2380 | "बेटा", 2381 | "स्पेक", 2382 | "टट्टू", 2383 | "t1t1e5", 2384 | "टी1टी", 2385 | "टी", 2386 | "टीज़", 2387 | "वृषण", 2388 | "तैसा", 2389 | "टिट्स", 2390 | "टिट", 2391 | "tittie5", 2392 | "tittiefucker", 2393 | "titties", 2394 | "tittyfuck", 2395 | "tittywank", 2396 | "titwank", 2397 | "toser", 2398 | "turd", 2399 | "tw4t", 2400 | "twat", 2401 | "twathead", 2402 | "twatty", 2403 | "ट्वंट", 2404 | "ट्वंटर", 2405 | "v14gra", 2406 | "v1gra", 2407 | "योनि", 2408 | "वियाग्रा", 2409 | "vulva", 2410 | "w00se", 2411 | "वांग", 2412 | "wank", 2413 | "wanker", 2414 | "wanky", 2415 | "whoar", 2416 | "whore", 2417 | "इच्छा", 2418 | ], 2419 | ], 2420 | [ 2421 | "ja", 2422 | [ 2423 | "4r5eの", 2424 | "5h1t", 2425 | "5ヒット", 2426 | "の55", 2427 | "アナル", 2428 | "ログイン", 2429 | "ar5eの", 2430 | "アルス", 2431 | "トピックス", 2432 | "ass fucker", 2433 | "アコース", 2434 | "assfucker", 2435 | "アスフッカ", 2436 | "アスホール", 2437 | "$$$$", 2438 | "アスドル", 2439 | "a$s", 2440 | "b!パッチ", 2441 | "b00bsの", 2442 | "b17chの", 2443 | "b1tchの", 2444 | "ボールバッグ", 2445 | "ボールボール", 2446 | "ボールバック", 2447 | "バスタード", 2448 | "ビーストリアル", 2449 | "獣性", 2450 | "ベルン", 2451 | "ベスト", 2452 | "人気カテゴリー", 2453 | "bi+chの", 2454 | "バイアッチ", 2455 | "ビットチェボーイ", 2456 | "ビットチャー", 2457 | "ビットチャーズ", 2458 | "ビットチェス", 2459 | "ビットキャッシュ", 2460 | "blow job", 2461 | "ブロージョブ", 2462 | "ボヨラス", 2463 | "ボルロック", 2464 | "ボロク", 2465 | "ボブ", 2466 | "ブーブ", 2467 | "ブーブス", 2468 | "ブーツ", 2469 | "ブームーブ", 2470 | "ブームーブス", 2471 | "ブラストス", 2472 | "ボセタ", 2473 | "バッガー", 2474 | "ブルシット", 2475 | "バム", 2476 | "バッツ", 2477 | "バトール", 2478 | "バッチュ", 2479 | "パスワード", 2480 | "c0cksuckerの", 2481 | "carpet muncher", 2482 | "チンク", 2483 | "プリカ", 2484 | "クラス1t", 2485 | "クライストリ", 2486 | "クルミ", 2487 | "コック", 2488 | "コックサッカー", 2489 | "コックフェイス", 2490 | "コックヘッド", 2491 | "コックムンチャー", 2492 | "コックサック", 2493 | "コック吸う", 2494 | "コックキャッキング", 2495 | "コックスカ", 2496 | "コックスッカ", 2497 | "コクンチャー", 2498 | "コクサッカ", 2499 | "コックス", 2500 | "クムマー", 2501 | "つぶやき", 2502 | "カムス", 2503 | "cuntlicking", 2504 | "シアリス", 2505 | "サイバーフック", 2506 | "サイバーfuck", 2507 | "サイバーfucked", 2508 | "サイバーファッカー", 2509 | "サイバーファッキング", 2510 | "d1ckの", 2511 | "ダムン", 2512 | "dickheadの", 2513 | "ディルドー", 2514 | "ディルドス", 2515 | "ダイニング", 2516 | "ディルサ", 2517 | "ダック", 2518 | "犬 fucker", 2519 | "ドッグギン", 2520 | "ドッギング", 2521 | "ドオッシュ", 2522 | "デュチェ", 2523 | "ダイケ", 2524 | "エジャキュレート", 2525 | "ejaculated(エジャキュレーション)", 2526 | "電子メール", 2527 | "ejaculating(エジャカルト)", 2528 | "ejaculating(エジャキュレーション)", 2529 | "ejakulateの", 2530 | "f u c k", 2531 | "f u c k e r", 2532 | "おばあさん", 2533 | "ファグ", 2534 | "ファッギング", 2535 | "ファゴット", 2536 | "ファグス", 2537 | "ファンニー", 2538 | "ファンニーフラップ", 2539 | "ファンニーfucker", 2540 | "脂肪マッサージ", 2541 | "フォーク", 2542 | "フィードバック", 2543 | "フェッカー", 2544 | "落ち着き", 2545 | "フィンガーfuck", 2546 | "指fucked", 2547 | "フィンガーファイヤー", 2548 | "フィンガーファッカー", 2549 | "指fucking", 2550 | "フィンガーfucks", 2551 | "フィストfuck", 2552 | "fistfucked", 2553 | "fistfucker", 2554 | "フィストfuckers", 2555 | "fistfucking", 2556 | "fistfuckings", 2557 | "fistfucks", 2558 | "フランジ", 2559 | "フーカー", 2560 | "クソ", 2561 | "フラッシャー", 2562 | "クソヘッド", 2563 | "fuckingshitmotherfucker", 2564 | "fuckwhitさん", 2565 | "fuckwitの", 2566 | "fudge packer", 2567 | "ファッジパック", 2568 | "フューカー", 2569 | "フクカー", 2570 | "フクキン", 2571 | "ファクシミリ", 2572 | "fukwitの", 2573 | "ギャングバン", 2574 | "ギャング", 2575 | "ギャングバング", 2576 | "ガールフレンド", 2577 | "ゲイsexsex", 2578 | "ヤギ", 2579 | "ゴッドダム", 2580 | "神秘的な被害を受けた", 2581 | "ゴダメン", 2582 | "ハードコアsexsex", 2583 | "ヘッドサス", 2584 | "ふりがな", 2585 | "ホーンシーズ", 2586 | "ホーンシー", 2587 | "ホットsex", 2588 | "ジャックオフ", 2589 | "ジャークオフ", 2590 | "ジャーミー", 2591 | "カウク", 2592 | "ノビーズ", 2593 | "ノベット", 2594 | "ノベンド", 2595 | "ノブヘッド", 2596 | "ノブジョキー", 2597 | "コンドム", 2598 | "コンダム", 2599 | "クマー", 2600 | "クムス", 2601 | "l3i+chの", 2602 | "l3itchの", 2603 | "ラボリア", 2604 | "プロフィール", 2605 | "m0f0", 2606 | "m0foの", 2607 | "m45terbateの", 2608 | "マ5terb8", 2609 | "ma5terbateの", 2610 | "マゾキスト", 2611 | "マスター・リベート", 2612 | "マスターb8", 2613 | "マスターバット*", 2614 | "マスターバット3", 2615 | "マスターリベート", 2616 | "マスタービング", 2617 | "マスターブレーション", 2618 | "masturbate", 2619 | "モフォ", 2620 | "モフ0", 2621 | "mothafuck", 2622 | "モタクソファ", 2623 | "mothafuckas", 2624 | "mothafuckaz", 2625 | "mothafucked", 2626 | "mothafucker", 2627 | "モタクソファー", 2628 | "mothafuckin", 2629 | "mothafucking", 2630 | "mothafuckings", 2631 | "mothafucks", 2632 | "母fuck", 2633 | "母fucked", 2634 | "母fucker", 2635 | "母fuckers", 2636 | "母fuckin", 2637 | "母fucking", 2638 | "母fuckings", 2639 | "母fuckka", 2640 | "母fucks", 2641 | "マフ", 2642 | "n1ggaの", 2643 | "n1ガー", 2644 | "ナジ", 2645 | "ニグ3r", 2646 | "ニグ4h", 2647 | "ニグガ", 2648 | "ナイガス", 2649 | "ニガー", 2650 | "ノブ", 2651 | "nob jokey", 2652 | "ノブジョーキー", 2653 | "ナムナッツ", 2654 | "ナッツマック", 2655 | "オーガスム", 2656 | "オルガスム", 2657 | "p0rnの", 2658 | "プーン", 2659 | "ペッカー", 2660 | "ペニス", 2661 | "penisfucker", 2662 | "電話sex", 2663 | "ファック", 2664 | "フクウク", 2665 | "フィッシング", 2666 | "ピッキング", 2667 | "プーク", 2668 | "ピクチャー", 2669 | "ピンピス", 2670 | "小便", 2671 | "放尿", 2672 | "小便小便", 2673 | "pissflaps", 2674 | "プッシー", 2675 | "ポップ", 2676 | "恋物癖", 2677 | "パトリック", 2678 | "プロン", 2679 | "パブ", 2680 | "パッシー", 2681 | "パスシス", 2682 | "リクルート", 2683 | "リタード", 2684 | "リムジャウ", 2685 | "リミング", 2686 | "s hit", 2687 | "s.o.b.", 2688 | "サディスト", 2689 | "シュロン", 2690 | "スクリュー", 2691 | "セメン", 2692 | "性別", 2693 | "お問い合わせ", 2694 | "ツイート", 2695 | "sh1tの", 2696 | "シェーグ", 2697 | "シャガール", 2698 | "シャギン", 2699 | "シェーディング", 2700 | "シーメール", 2701 | "シープラス", 2702 | "サイトマップ", 2703 | "インタビュー", 2704 | "シッター", 2705 | "インフォメーション", 2706 | "スカンク", 2707 | "ソリューション", 2708 | "スメグマ", 2709 | "スムート", 2710 | "スナッチ", 2711 | "息子-of-a-bitch", 2712 | "スパク", 2713 | "t1tt1e5の特長", 2714 | "t1タイ", 2715 | "テッツ", 2716 | "テエズ", 2717 | "リファレンス", 2718 | "アクティシクル", 2719 | "タイ5", 2720 | "tittiefucker", 2721 | "ティワンク", 2722 | "titwankの", 2723 | "トッサー", 2724 | "タード", 2725 | "tw4tの", 2726 | "タワトヘッド", 2727 | "トワット", 2728 | "トウランター", 2729 | "v14graの", 2730 | "v1graの", 2731 | "バギーナ", 2732 | "ビアグラ", 2733 | "w00se", 2734 | "ワンク", 2735 | "ワンカー", 2736 | "ワンキー", 2737 | "スタッフ", 2738 | "パーティー", 2739 | "メニュー", 2740 | ], 2741 | ], 2742 | [ 2743 | "ko", 2744 | [ 2745 | "4r5e를", 2746 | "1시간", 2747 | "5열", 2748 | "a55's 수", 2749 | "뚱 베어", 2750 | "한국어", 2751 | "사이트맵", 2752 | "카테고리", 2753 | "장바구니", 2754 | "₢ 킹", 2755 | "회사 소개", 2756 | "제품 정보", 2757 | "볼랙", 2758 | "배틀그라운드", 2759 | "제품정보", 2760 | "채용 정보", 2761 | "인기있는", 2762 | "blow job", 2763 | "채용정보", 2764 | "스낵 바", 2765 | "뼈", 2766 | "carpet muncher", 2767 | "프로젝트", 2768 | "바퀴 슈커", 2769 | "큰 가슴", 2770 | "채용공고", 2771 | "인기 카테고리", 2772 | "cyalis의", 2773 | "사이버후크", 2774 | "사이버감사", 2775 | "사이버fucker", 2776 | "사이버fuckers", 2777 | "이름 *", 2778 | "언어: 한국어", 2779 | "개 게이", 2780 | "개진", 2781 | "관련 기사", 2782 | "f u c k", 2783 | "f u c k e r", 2784 | "팟캐스트", 2785 | "모든 새", 2786 | "뚱뚱뚱", 2787 | "· fcuk", 2788 | "털이 많은", 2789 | "주먹질", 2790 | "중년부인", 2791 | "옵션 정보", 2792 | "빌어 먹을hitmotherfucker", 2793 | "fudge packer", 2794 | "펌웨어", 2795 | "갱뱅", 2796 | "게이주", 2797 | "게이sex", 2798 | "신담", 2799 | "뉴스 레터", 2800 | "신담n", 2801 | "하드코어sex", 2802 | "맨 위로", 2803 | "가장 핫한", 2804 | "핫 성별", 2805 | "잭 오프", 2806 | "잭오프", 2807 | "주 메뉴", 2808 | "칼 머리", 2809 | "칼조키", 2810 | "엄마5terb8", 2811 | "마스터베이트", 2812 | "마스터b8", 2813 | "마스터 배트*", 2814 | "마스터배트3", 2815 | "모thafuck", 2816 | "모thafucka", 2817 | "모thafuckas", 2818 | "모thafuckaz", 2819 | "모thafucked", 2820 | "모thafucker", 2821 | "모thafuckers", 2822 | "모thafuckin", 2823 | "모thafucking", 2824 | "모thafucks", 2825 | "엄마", 2826 | "엄마fuckka", 2827 | "모든 인기있는", 2828 | "노브", 2829 | "nob jokey", 2830 | "노브조키", 2831 | "메뉴 닫기", 2832 | "견과류", 2833 | "오르가즘", 2834 | "페커", 2835 | "페니스", 2836 | "오줌싸기", 2837 | "pussys 소개", 2838 | "핥기", 2839 | "s hit", 2840 | "사이트맵.", 2841 | "관련 제품", 2842 | "쉬메일", 2843 | "·", 2844 | "전체장편", 2845 | "스카크", 2846 | "스크랩", 2847 | "아들의 a-bitch", 2848 | "스펀지", 2849 | "t1티", 2850 | "시험대", 2851 | "가슴5", 2852 | "tittiefucker의", 2853 | "tittyfuck의", 2854 | "tittywank, 영국", 2855 | "titwank의", 2856 | "v14그라", 2857 | "v1그라", 2858 | "뱅커", 2859 | ], 2860 | ], 2861 | [ 2862 | "pt", 2863 | [ 2864 | "4r5e", 2865 | "5h1t", 2866 | "5", 2867 | "a55", 2868 | "anal", 2869 | "anus", 2870 | "ar5e", 2871 | "arrumos", 2872 | "arse", 2873 | "arses", 2874 | "bunda", 2875 | "filho da puta", 2876 | "julgo", 2877 | "o quê", 2878 | "idiota", 2879 | "idiotas", 2880 | "- o quê", 2881 | "$$", 2882 | "como $", 2883 | "a $s", 2884 | "b!tch", 2885 | "b00", 2886 | "b17ch", 2887 | "b1", 2888 | "saco de bola", 2889 | "bolas", 2890 | "sacana", 2891 | "bestial", 2892 | "bestialidade", 2893 | "amigos", 2894 | "bi-ch", 2895 | "biatch", 2896 | "cabrão", 2897 | "putas", 2898 | "cadelas", 2899 | "sangrento", 2900 | "blow job", 2901 | "broche", 2902 | "broches", 2903 | "boa sorte", 2904 | "bolo", 2905 | "osso", 2906 | "mamas", 2907 | "peitos", 2908 | "buceta", 2909 | "bugger", 2910 | "merda", 2911 | "bum", 2912 | "mas..", 2913 | "bundas", 2914 | "mastplug", 2915 | "c0m", 2916 | "c0cksucker", 2917 | "carpet muncher", 2918 | "cawk", 2919 | "chink", 2920 | "cipa", 2921 | "não", 2922 | "claire", 2923 | "clitoris", 2924 | "clérigos", 2925 | "amendoim", 2926 | "caralho", 2927 | "cara de pau", 2928 | "cabeça de pau", 2929 | "galão", 2930 | "carpinteiro", 2931 | "paus", 2932 | "pausucked", 2933 | "pausucking", 2934 | "pausuka", 2935 | "cok", 2936 | "colheitadeira", 2937 | "o que é que se passa", 2938 | "cláudia", 2939 | "cox", 2940 | "porra", 2941 | "de verão", 2942 | "cumming", 2943 | "cums", 2944 | "com licença", 2945 | "cunilingus", 2946 | "cunillingus", 2947 | "cúpula", 2948 | "não sei", 2949 | "o que é isso", 2950 | "incômodo", 2951 | "cúmplices", 2952 | "cialis", 2953 | "cyberfuc", 2954 | "cyberfuck", 2955 | "cyberfucked", 2956 | "cibercriminoso", 2957 | "os cibercriminosos", 2958 | "cyberfucking", 2959 | "maldição", 2960 | "pauzinho", 2961 | "dildo", 2962 | "dildos", 2963 | "dink", 2964 | "dinks", 2965 | "dirsança", 2966 | "dl", 2967 | "cãozinho", 2968 | "- não", 2969 | "duquesa", 2970 | "dyke", 2971 | "ejaculações", 2972 | "ejaculada", 2973 | "ejaculando", 2974 | "ejaculação", 2975 | "ejaculação interna", 2976 | "f u c k", 2977 | "f u c k e r", 2978 | "f4b", 2979 | "fagão", 2980 | "fagging", 2981 | "faggit", 2982 | "faggs", 2983 | "fagot", 2984 | "fagots", 2985 | "fags", 2986 | "fantoche", 2987 | "fannyflaps", 2988 | "fany", 2989 | "gorda", 2990 | "fcuk", 2991 | "fcuker", 2992 | "fcuking", 2993 | "pateta", 2994 | "fecker", 2995 | "felching", 2996 | "caído", 2997 | "fodido", 2998 | "filhos da puta", 2999 | "fistfucked", 3000 | "fistfucker", 3001 | "fistfuckers", 3002 | "fistfucking", 3003 | "fistfuckings", 3004 | "foda-se", 3005 | "flange", 3006 | "fook", 3007 | "fooker", 3008 | "fodida", 3009 | "cabrões", 3010 | "caralhos", 3011 | "fodas", 3012 | "fudge packer", 3013 | "fudgepacker", 3014 | "fuk", 3015 | "fuker", 3016 | "fukker", 3017 | "cortina", 3018 | "fuks", 3019 | "eu sei", 3020 | "fux", 3021 | "gangbang", 3022 | "gangbanged", 3023 | "jogos de vestir", 3024 | "gay", 3025 | "gaysex", 3026 | "cabras", 3027 | "raios", 3028 | "maldito", 3029 | "hardcoresex", 3030 | "headass", 3031 | "hoar", 3032 | "hoare", 3033 | "hoer", 3034 | "hoes", 3035 | "homo", 3036 | "hore", 3037 | "excitado", 3038 | "sexo quente", 3039 | "jack-off", 3040 | "jackoff", 3041 | "tudo bem", 3042 | "jismo", 3043 | "júnior", 3044 | "jizm", 3045 | "jizz", 3046 | "kawk", 3047 | "botão", 3048 | "cabeça de cabeça", 3049 | "- sim", 3050 | "kum", 3051 | "kummer", 3052 | "kums", 3053 | "i3i+ch", 3054 | "i3", 3055 | "laboratório", 3056 | "brilho", 3057 | "lustrando", 3058 | "m0f0", 3059 | "m0fo", 3060 | "m45terbate", 3061 | "o que fazer", 3062 | "ma5terbate", 3063 | "masoquista", 3064 | "mestre-bate", 3065 | "masterb8", 3066 | "masterbat", 3067 | "masterbat3", 3068 | "masterbate", 3069 | "masterbação", 3070 | "masterbations", 3071 | "masturbar-se", 3072 | "mo-fo", 3073 | "o que se passa", 3074 | "mofo", 3075 | "mothafuckers", 3076 | "que se lixe", 3077 | "fodendo", 3078 | "mothafuckings", 3079 | "merdas", 3080 | "filho da mãe", 3081 | "muthafecer", 3082 | "fode-se", 3083 | "n1gga", 3084 | "nazi", 3085 | "o que foi", 3086 | "nigga", 3087 | "os pretos", 3088 | "o que é", 3089 | "preto", 3090 | "pretos", 3091 | "no", 3092 | "nob jokey", 3093 | "nobjocky", 3094 | "boa noite", 3095 | "nozes", 3096 | "orgasim", 3097 | "orgasims", 3098 | "orgasmo", 3099 | "orgasmos", 3100 | "p0rn", 3101 | "pão", 3102 | "pecker", 3103 | "pênis", 3104 | "pênisfucker", 3105 | "telefonesex", 3106 | "phuck", 3107 | "phuk", 3108 | "phuked", 3109 | "a tremer", 3110 | "phukked", 3111 | "phukking", 3112 | "phuras", 3113 | "phuq", 3114 | "pimpos", 3115 | "miúda", 3116 | "miúdo", 3117 | "pissers", 3118 | "pias", 3119 | "pissflaps", 3120 | "imposição", 3121 | "mijando", 3122 | "cocó", 3123 | "pornô", 3124 | "por favor", 3125 | "pornografia", 3126 | "pimba", 3127 | "cricks", 3128 | "pron", 3129 | "bar", 3130 | "puxa", 3131 | "puxas", 3132 | "cona", 3133 | "conas", 3134 | "recto", 3135 | "retirada", 3136 | "bordas", 3137 | "s hit", 3138 | "s.o.b.", 3139 | "sádico", 3140 | "schlong", 3141 | "parafusos", 3142 | "scroat", 3143 | "scrote", 3144 | "escroto", 3145 | "sémen", 3146 | "sexo", 3147 | "mais", 3148 | "shag", 3149 | "shagger", 3150 | "shaggin", 3151 | "shagging", 3152 | "shema", 3153 | "brilhante", 3154 | "skank", 3155 | "puta", 3156 | "smegma", 3157 | "snatch", 3158 | "spac", 3159 | "t1t1e5", 3160 | "t1t", 3161 | "tetas", 3162 | "testículo", 3163 | "t", 3164 | "não te metas", 3165 | "titã", 3166 | "titãs", 3167 | "que merda", 3168 | "turva", 3169 | "tw4t", 3170 | "twat", 3171 | "twathead", 3172 | "twatty", 3173 | "twunt", 3174 | "twunter", 3175 | "v14g", 3176 | "v1gra", 3177 | "vagina", 3178 | "via satélite", 3179 | "vulva", 3180 | "wang", 3181 | "vamos", 3182 | "wanker", 3183 | "wanky", 3184 | "willies", 3185 | "willy", 3186 | ], 3187 | ], 3188 | [ 3189 | "ru", 3190 | [ 3191 | "4r5e", 3192 | "5h1t", 3193 | "5хит", 3194 | "55", 3195 | "анальный", 3196 | "анус", 3197 | "ar5e", 3198 | "аррс", 3199 | "задница", 3200 | "задницы", 3201 | "ублюдок", 3202 | "асфукка", 3203 | "придурок", 3204 | "придурки", 3205 | "полный", 3206 | "$$", 3207 | "как $", 3208 | "$", 3209 | "б/ч", 3210 | "00бс", 3211 | "17ч", 3212 | "b1tch", 3213 | "мешок с мячом", 3214 | "шары", 3215 | "мешок", 3216 | "звериный", 3217 | "звериность", 3218 | "колокольчик", 3219 | "скотство", 3220 | "би+ч", 3221 | "двустворчатый", 3222 | "сука", 3223 | "сукин сын", 3224 | "шлюха", 3225 | "шлюхи", 3226 | "стервы", 3227 | "стерва", 3228 | "кровавый", 3229 | "blow job", 3230 | "минет", 3231 | "буйвол", 3232 | "боллок", 3233 | "буллок", 3234 | "костлявый", 3235 | "сиська", 3236 | "сиськи", 3237 | "болваны", 3238 | "грудь", 3239 | "буцета", 3240 | "говнюк", 3241 | "дерьмо", 3242 | "бродяга", 3243 | "болван", 3244 | "затвор", 3245 | "вес", 3246 | "мудак", 3247 | "carpet muncher", 3248 | "топор", 3249 | "ерунда", 3250 | "шипа", 3251 | "cl1t", 3252 | "клит", 3253 | "клитор", 3254 | "клиты", 3255 | "кнут", 3256 | "член", 3257 | "петух", 3258 | "лицо члена", 3259 | "членоголовый", 3260 | "членосос", 3261 | "петухи", 3262 | "хулиган", 3263 | "сосать член", 3264 | "членсука", 3265 | "кокс", 3266 | "кумунчер", 3267 | "кексука", 3268 | "кун", 3269 | "кум", 3270 | "колоть", 3271 | "кумминг", 3272 | "кумс", 3273 | "кумшот", 3274 | "кунилингус", 3275 | "куниллингус", 3276 | "куннилингус", 3277 | "пизда", 3278 | "щекотать", 3279 | "щекотливый", 3280 | "пиздец", 3281 | "циалис", 3282 | "киберфук", 3283 | "кибер-трах", 3284 | "киберпроклятый", 3285 | "киберпреступник", 3286 | "киберпреступники", 3287 | "кибер-ебать", 3288 | "1кк", 3289 | "проклятый", 3290 | "фаллоимитатор", 3291 | "ворчать", 3292 | "помойка", 3293 | "дирша", 3294 | "собачий", 3295 | "собака", 3296 | "оселовоз", 3297 | "душ", 3298 | "герцог", 3299 | "дамба", 3300 | "эякулировать", 3301 | "эякулированный", 3302 | "эякулирует", 3303 | "эякулирующий", 3304 | "эякуляция", 3305 | "f u c k", 3306 | "f u c k e r", 3307 | "4nny", 3308 | "педик", 3309 | "педикюр", 3310 | "педики", 3311 | "фагот", 3312 | "фаготы", 3313 | "фанатичный", 3314 | "фаннифлапс", 3315 | "толстый", 3316 | "фук", 3317 | "засранец", 3318 | "грязь", 3319 | "хуй", 3320 | "сволочь", 3321 | "откидывание", 3322 | "сорняк", 3323 | "фелляция", 3324 | "сраный пальцем", 3325 | "ублюдки", 3326 | "гребаный палец", 3327 | "пальчики", 3328 | "сраный", 3329 | "долбаный", 3330 | "фланцеобразный", 3331 | "обман", 3332 | "фукер", 3333 | "ебать", 3334 | "трахаться", 3335 | "ебаный", 3336 | "гребаный", 3337 | "чертов ублюдок", 3338 | "блядь", 3339 | "fudge packer", 3340 | "фуккин", 3341 | "фуквист", 3342 | "приправа", 3343 | "fux0r", 3344 | "бандаж", 3345 | "бандитизм", 3346 | "бандиты", 3347 | "гей-лорд", 3348 | "гейсекс", 3349 | "козел", 3350 | "черт возьми", 3351 | "хардкорекс", 3352 | "голова", 3353 | "хриплый", 3354 | "кабак", 3355 | "горшок", 3356 | "мотыга", 3357 | "гомо", 3358 | "обнимать", 3359 | "самый роговой", 3360 | "роговой", 3361 | "секс", 3362 | "отсрочка", 3363 | "рывок", 3364 | "яблочный", 3365 | "джизм", 3366 | "джиза", 3367 | "бегство", 3368 | "шипение", 3369 | "лаять", 3370 | "бездельник", 3371 | "ручка", 3372 | "тупица", 3373 | "неряшливый", 3374 | "кнопка", 3375 | "стучать", 3376 | "кондум", 3377 | "кондумы", 3378 | "куммер", 3379 | "l3i+ch", 3380 | "3ч", 3381 | "лабиринт", 3382 | "похоть", 3383 | "m0f0", 3384 | "m0fo", 3385 | "m45terterate", 3386 | "ma5terb8", 3387 | "ма5тербат", 3388 | "мазохист", 3389 | "мастер-бит", 3390 | "мастер8", 3391 | "мастербат", 3392 | "мастерить", 3393 | "мастербирование", 3394 | "мастербации", 3395 | "мастурбировать", 3396 | "мофо", 3397 | "мафия", 3398 | "фото", 3399 | "молоток", 3400 | "мотафук", 3401 | "мотафак", 3402 | "мотылек", 3403 | "усатый", 3404 | "мотафакер", 3405 | "мотафакеры", 3406 | "мотафакин", 3407 | "молохвост", 3408 | "кекс", 3409 | "мутхафекер", 3410 | "n1gga", 3411 | "n1gger", 3412 | "нацистский", 3413 | "nigg3r", 3414 | "nigg4h", 3415 | "нигга", 3416 | "ниггер", 3417 | "ниггеры", 3418 | "ниггаз", 3419 | "негр", 3420 | "ноб", 3421 | "nob jokey", 3422 | "нобджоки", 3423 | "бежокей", 3424 | "обезболивающие", 3425 | "гайка", 3426 | "оргазм", 3427 | "рог", 3428 | "пешка", 3429 | "пенис", 3430 | "пенисовец", 3431 | "телефоны", 3432 | "фак", 3433 | "обожженный", 3434 | "пыхтение", 3435 | "пьяный", 3436 | "пьянство", 3437 | "паук", 3438 | "сутенер", 3439 | "моча", 3440 | "разозленный", 3441 | "порох", 3442 | "поршни", 3443 | "сосульки", 3444 | "пессина", 3445 | "отстой", 3446 | "какашка", 3447 | "порно", 3448 | "порнография", 3449 | "уколы", 3450 | "наклон", 3451 | "лоб", 3452 | "гной", 3453 | "пусси", 3454 | "киска", 3455 | "прямая кишка", 3456 | "отсталый", 3457 | "римжа", 3458 | "обрамление", 3459 | "s hit", 3460 | "с.о.б.", 3461 | "садист", 3462 | "шлонг", 3463 | "облажаться", 3464 | "горло", 3465 | "писать", 3466 | "мошонка", 3467 | "сперма", 3468 | "ше! +", 3469 | "блин", 3470 | "шт", 3471 | "дрожать", 3472 | "шаггин", 3473 | "женщина", 3474 | "ши+", 3475 | "дерьмовый", 3476 | "говнюки", 3477 | "смегма", 3478 | "смута", 3479 | "похищать", 3480 | "пак", 3481 | "отрыжка", 3482 | "t1tt1e5", 3483 | "т1тис", 3484 | "трость", 3485 | "тез", 3486 | "тестовый", 3487 | "яичко", 3488 | "сиськи5", 3489 | "чокнутый", 3490 | "титванк", 3491 | "тоссер", 3492 | "tw4t", 3493 | "v14гра", 3494 | "v1гра", 3495 | "влагалище", 3496 | "виагра", 3497 | "вульва", 3498 | "00се", 3499 | "ван", 3500 | "дрочить", 3501 | "вялый", 3502 | "вор", 3503 | "мухи", 3504 | "волей", 3505 | ], 3506 | ], 3507 | [ 3508 | "zh", 3509 | [ 3510 | "4r5e (4r5e) (韩语)", 3511 | "5小时1小时", 3512 | "5发", 3513 | "a55个", 3514 | "肛门", 3515 | "轨道5e", 3516 | "皮肤", 3517 | "屁股", 3518 | "他妈的", 3519 | "驴", 3520 | "阿斯福卡语name", 3521 | "混帐", 3522 | "屁股整齐", 3523 | "(单位:美元)", 3524 | "作为美元", 3525 | "ch", 3526 | "页:1", 3527 | "b17ch (英语)", 3528 | "b1吨级", 3529 | "球囊", 3530 | "球头", 3531 | "鸡巴", 3532 | "杂种", 3533 | "兽形", 3534 | "兽性", 3535 | "铃声", 3536 | "最佳状态", 3537 | "质量", 3538 | "双倍径", 3539 | "双节", 3540 | "贱人", 3541 | "婊子们", 3542 | "骂人", 3543 | "该死的", 3544 | "blow job", 3545 | "吹箫", 3546 | "bo", 3547 | "胡说", 3548 | "波罗克", 3549 | "骨头", 3550 | "嘘声", 3551 | "乳头", 3552 | "嘘嘘", 3553 | "布鲁塞塔", 3554 | "可恶", 3555 | "废话", 3556 | "流浪汉", 3557 | "臀部", 3558 | "屁眼", 3559 | "枪托", 3560 | "c0ck (英语)", 3561 | "carpet muncher", 3562 | "aw", 3563 | "中国", 3564 | "齐巴", 3565 | "cl1t 键", 3566 | "阴蒂", 3567 | "果实", 3568 | "鸡巴脸", 3569 | "鸡头", 3570 | "鸡尾酒", 3571 | "公鸡", 3572 | "鸡鸡", 3573 | "库克", 3574 | "库克门彻", 3575 | "库克萨", 3576 | "铜", 3577 | "缩写", 3578 | "积分", 3579 | "弯曲", 3580 | "累积射击", 3581 | "阴囊", 3582 | "库尼灵格斯", 3583 | "阴间", 3584 | "贱货", 3585 | "低调", 3586 | "ya", 3587 | "网络福克", 3588 | "网络操", 3589 | "网络他妈的", 3590 | "网络混蛋", 3591 | "d1ck (英语)", 3592 | "蠢货", 3593 | "迪尔多", 3594 | "二极管", 3595 | "叮当", 3596 | "日数a", 3597 | "烂透了", 3598 | "狗娘养的", 3599 | "狗语", 3600 | "养狗", 3601 | "驴肋架", 3602 | "杜许", 3603 | "杜彻", 3604 | "堤坝", 3605 | "射线", 3606 | "射精术", 3607 | "标记", 3608 | "f u c k", 3609 | "f u c k e r", 3610 | "f4ny (英语)", 3611 | "同性恋", 3612 | "发牢骚", 3613 | "粪便", 3614 | "风扇", 3615 | "扇形叶片", 3616 | "你个混蛋", 3617 | "迷恋", 3618 | "肥猪", 3619 | "fcuk (英语)", 3620 | "鸡肉", 3621 | "欢呼", 3622 | "呕吐", 3623 | "堕落", 3624 | "口交时", 3625 | "手指他妈的", 3626 | "指头", 3627 | "拳头操", 3628 | "拳头他妈的", 3629 | "拳头", 3630 | "他妈的拳头", 3631 | "花纹", 3632 | "恶棍", 3633 | "他妈的混蛋", 3634 | "他妈的我", 3635 | "妈的", 3636 | "fudge packer", 3637 | "软糖包装机", 3638 | "乌克", 3639 | "福尔克", 3640 | "乌克金", 3641 | "烟花", 3642 | "乌克维特", 3643 | "阴茎", 3644 | "furx0r (英语)", 3645 | "黑帮", 3646 | "帮派", 3647 | "同性恋者", 3648 | "山羊队", 3649 | "天杀的", 3650 | "硬核性行为", 3651 | "头部", 3652 | "桨", 3653 | "爱", 3654 | "吼声", 3655 | "鹅", 3656 | "高尔", 3657 | "角最强", 3658 | "角质", 3659 | "热性", 3660 | "骗局", 3661 | "贾宝玉", 3662 | "jism 语句", 3663 | "吉兹", 3664 | "坐标", 3665 | "键", 3666 | "针叶", 3667 | "已锁定", 3668 | "旋钮", 3669 | "旋钮头", 3670 | "键盘", 3671 | "鸡", 3672 | "宽度", 3673 | "库姆", 3674 | "ku", 3675 | "单位", 3676 | "库林斯", 3677 | "l3i+ch 键", 3678 | "ưμ㼯a", 3679 | "阴唇", 3680 | "欲望", 3681 | "淫荡", 3682 | "m0f0 时", 3683 | "m0fo 数据", 3684 | "m45 位点", 3685 | "ma5terb8 键", 3686 | "半径", 3687 | "虐恋狂", 3688 | "主减法", 3689 | "主机b8", 3690 | "高级项目*", 3691 | "高级bat3", 3692 | "高级", 3693 | "掌握", 3694 | "手淫", 3695 | "mo", 3696 | "摩尔福", 3697 | "调味料", 3698 | "摩塔他妈的", 3699 | "灭鼠机", 3700 | "马夫", 3701 | "变种人", 3702 | "无", 3703 | "n1gger (英语)", 3704 | "纳粹", 3705 | "零点3r", 3706 | "ni4h", 3707 | "黑鬼", 3708 | "唉哟", 3709 | "黑鬼们", 3710 | "纳吉", 3711 | "黑头", 3712 | "nob jokey", 3713 | "头", 3714 | "无趣", 3715 | "无名", 3716 | "麻核", 3717 | "疯子", 3718 | "质子", 3719 | "矫形", 3720 | "高潮", 3721 | "p0rn 语录", 3722 | "啄木鸟", 3723 | "电话", 3724 | "发球", 3725 | "福尔", 3726 | "划开", 3727 | "摇摆", 3728 | "发声", 3729 | "p", 3730 | "猪头", 3731 | "皮条客", 3732 | "撒尿", 3733 | "愤怒", 3734 | "尿裤子", 3735 | "撒尿器", 3736 | "小便便便便便时", 3737 | "细小的叶片", 3738 | "神经病", 3739 | "便便", 3740 | "色情电影", 3741 | "色情制品", 3742 | "刺头", 3743 | "专业", 3744 | "普丝", 3745 | "猫咪", 3746 | "娘们", 3747 | "阴道", 3748 | "直肠", 3749 | "迟钝", 3750 | "连环画", 3751 | "旋转", 3752 | "s hit", 3753 | "绍乙.", 3754 | "虐待狂", 3755 | "长", 3756 | "划伤", 3757 | "切开", 3758 | "精液", 3759 | "性别", 3760 | "嘘 +", 3761 | "嘘", 3762 | "小时", 3763 | "萨格", 3764 | "抖动", 3765 | "变形", 3766 | "做爱", 3767 | "女同性恋", 3768 | "shi+ 键", 3769 | "拉屎了", 3770 | "屎东西", 3771 | "乱七八糟", 3772 | "大便师", 3773 | "乱七八糟的", 3774 | "荡妇", 3775 | "斯迈格马", 3776 | "闪烁", 3777 | "抢", 3778 | "沉积", 3779 | "t1tt1e5 键", 3780 | "吨数", 3781 | "垫子", 3782 | "泰兹语name", 3783 | "测试", 3784 | "睾丸", 3785 | "奶头", 3786 | "奶头5", 3787 | "奶妈", 3788 | "掷掷器", 3789 | "tw4吨级", 3790 | "wa", 3791 | "wa头", 3792 | "watt", 3793 | "v14gra (英语)", 3794 | "v1 颜色", 3795 | "via草", 3796 | "转数", 3797 | "黄", 3798 | "闲着", 3799 | "虚无", 3800 | "妇人", 3801 | "威尔", 3802 | ], 3803 | ], 3804 | ]); 3805 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export { Profanity, profanity } from "./profanity"; 2 | export { ProfanityOptions } from "./profanity-options"; 3 | export { CensorType } from "./models"; 4 | export { profaneWords } from "./data"; 5 | -------------------------------------------------------------------------------- /src/models/censor-type.ts: -------------------------------------------------------------------------------- 1 | export enum CensorType { 2 | Word, 3 | FirstChar, 4 | FirstVowel, 5 | AllVowels, 6 | } 7 | -------------------------------------------------------------------------------- /src/models/index.ts: -------------------------------------------------------------------------------- 1 | export { CensorType } from "./censor-type"; 2 | export { List } from "./list"; 3 | -------------------------------------------------------------------------------- /src/models/list.ts: -------------------------------------------------------------------------------- 1 | export class List { 2 | words: Set; 3 | 4 | onListChanged: () => void; 5 | 6 | get empty(): boolean { 7 | return this.words.size === 0; 8 | } 9 | 10 | constructor(onListChanged: () => void) { 11 | this.onListChanged = onListChanged; 12 | this.words = new Set(); 13 | } 14 | 15 | removeWords(words: string[]): void { 16 | words.forEach((word) => this.words.delete(word)); 17 | this.onListChanged(); 18 | } 19 | 20 | addWords(words: readonly string[] | string[]): void { 21 | words.forEach((word) => this.words.add(word.toLowerCase())); 22 | this.onListChanged(); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/profanity-options.ts: -------------------------------------------------------------------------------- 1 | export class ProfanityOptions { 2 | wholeWord: boolean; 3 | 4 | grawlix: string; 5 | 6 | grawlixChar: string; 7 | 8 | languages: string[]; 9 | 10 | constructor(options: Partial = {}) { 11 | this.wholeWord = options.wholeWord ?? true; 12 | this.grawlix = options.grawlix ?? "@#$%&!"; 13 | this.grawlixChar = options.grawlixChar ?? "*"; 14 | this.languages = options.languages ?? ["en"]; 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /src/profanity.ts: -------------------------------------------------------------------------------- 1 | import { ProfanityOptions } from "./profanity-options"; 2 | import { List, CensorType } from "./models"; 3 | import { escapeRegExp } from "./utils"; 4 | import { profaneWords } from "./data"; 5 | 6 | export class Profanity { 7 | options: ProfanityOptions; 8 | whitelist: List; 9 | 10 | private blacklist: List; 11 | private removed: List; 12 | private regexes: Map; 13 | 14 | constructor(options?: ProfanityOptions | Partial) { 15 | this.options = options ? { ...new ProfanityOptions(), ...options } : new ProfanityOptions(); 16 | 17 | this.whitelist = new List(() => this.clearRegexes()); 18 | this.blacklist = new List(() => this.clearRegexes()); 19 | this.removed = new List(() => this.clearRegexes()); 20 | this.regexes = new Map(); 21 | } 22 | 23 | /** 24 | * Checks if the given text contains any profanity. 25 | * @param text - The text to check for profanity. 26 | * @param languages - Optional array of language codes to use for profanity detection. 27 | * If not provided, uses the languages specified in the options. 28 | * @returns True if profanity is found, false otherwise. 29 | */ 30 | exists(text: string, languages?: string[]): boolean { 31 | if (typeof text !== "string") { 32 | return false; 33 | } 34 | 35 | const regex = this.getRegex(this.resolveLanguages(languages)); 36 | regex.lastIndex = 0; 37 | 38 | const lowercaseText = text.toLowerCase(); 39 | 40 | let match: RegExpExecArray | null; 41 | while ((match = regex.exec(lowercaseText)) !== null) { 42 | const matchStart = match.index; 43 | const matchEnd = matchStart + match[0].length; 44 | 45 | if (!this.isWhitelisted(matchStart, matchEnd, lowercaseText)) { 46 | return true; 47 | } 48 | } 49 | 50 | return false; 51 | } 52 | 53 | /** 54 | * Censors profanity in the given text. 55 | * @param text - The text to censor. 56 | * @param censorType - The type of censoring to apply. Defaults to CensorType.Word. 57 | * @param languages - Optional array of language codes to use for profanity detection. 58 | * If not provided, uses the languages specified in the options. 59 | * @returns The censored text. 60 | */ 61 | censor(text: string, censorType: CensorType = CensorType.Word, languages?: string[]): string { 62 | if (typeof text !== "string") { 63 | return text; 64 | } 65 | 66 | const regex = this.getRegex(this.resolveLanguages(languages)); 67 | regex.lastIndex = 0; 68 | 69 | const lowercaseText = text.toLowerCase(); 70 | 71 | return this.replaceProfanity( 72 | text, 73 | lowercaseText, 74 | (word, start, end) => { 75 | if (this.isWhitelisted(start, end, lowercaseText)) { 76 | return word; 77 | } 78 | switch (censorType) { 79 | case CensorType.Word: { 80 | const underscore = word.includes("_") ? "_" : ""; 81 | return this.options.grawlix + underscore; 82 | } 83 | case CensorType.FirstChar: 84 | return this.options.grawlixChar + word.slice(1); 85 | case CensorType.FirstVowel: 86 | case CensorType.AllVowels: { 87 | const vowelRegex = new RegExp("[aeiou]", censorType === CensorType.FirstVowel ? "i" : "ig"); 88 | return word.replace(vowelRegex, this.options.grawlixChar); 89 | } 90 | default: 91 | throw new Error(`Invalid replacement type: "${censorType}"`); 92 | } 93 | }, 94 | regex, 95 | ); 96 | } 97 | 98 | /** 99 | * Adds words to the profanity blacklist. 100 | * @param words - An array of words to add to the blacklist. 101 | */ 102 | addWords(words: string[]): void { 103 | const removedWords: string[] = []; 104 | const blacklistWords: string[] = []; 105 | 106 | words.forEach((word) => { 107 | const lowerCaseWord = word.toLowerCase(); 108 | if (this.removed.words.has(lowerCaseWord)) { 109 | removedWords.push(lowerCaseWord); 110 | } else { 111 | blacklistWords.push(lowerCaseWord); 112 | } 113 | }); 114 | 115 | if (removedWords.length) { 116 | this.removed.removeWords(removedWords); 117 | } 118 | if (blacklistWords.length) { 119 | this.blacklist.addWords(blacklistWords); 120 | } 121 | } 122 | 123 | /** 124 | * Removes words from the profanity blacklist. 125 | * @param words - An array of words to remove from the blacklist. 126 | */ 127 | removeWords(words: string[]): void { 128 | const blacklistedWords: string[] = []; 129 | const removeWords: string[] = []; 130 | 131 | words.forEach((word) => { 132 | const lowerCaseWord = word.toLowerCase(); 133 | if (this.blacklist.words.has(lowerCaseWord)) { 134 | blacklistedWords.push(lowerCaseWord); 135 | } else { 136 | removeWords.push(lowerCaseWord); 137 | } 138 | }); 139 | 140 | if (blacklistedWords.length) { 141 | this.blacklist.removeWords(blacklistedWords); 142 | } 143 | if (removeWords.length) { 144 | this.removed.addWords(removeWords); 145 | } 146 | } 147 | 148 | /** 149 | * Checks if a given match is whitelisted. 150 | * @param matchStart - The starting index of the match in the text. 151 | * @param matchEnd - The ending index of the match in the text. 152 | * @param text - The lowercase text being checked. 153 | * @returns True if the match is whitelisted, false otherwise. 154 | */ 155 | private isWhitelisted(matchStart: number, matchEnd: number, text: string): boolean { 156 | for (const whitelistedWord of this.whitelist.words) { 157 | const whitelistedIndex = text.indexOf(whitelistedWord, Math.max(0, matchStart - whitelistedWord.length + 1)); 158 | if (whitelistedIndex !== -1) { 159 | const whitelistedEnd = whitelistedIndex + whitelistedWord.length; 160 | 161 | if (this.options.wholeWord) { 162 | if ( 163 | matchStart === whitelistedIndex && 164 | matchEnd === whitelistedEnd && 165 | (matchStart === 0 || !/[\w-_]/.test(text[matchStart - 1])) && 166 | (matchEnd === text.length || !/[\w-_]/.test(text[matchEnd])) 167 | ) { 168 | return true; 169 | } 170 | } else { 171 | if ( 172 | (matchStart >= whitelistedIndex && matchStart < whitelistedEnd) || 173 | (matchEnd > whitelistedIndex && matchEnd <= whitelistedEnd) || 174 | (whitelistedIndex >= matchStart && whitelistedEnd <= matchEnd) 175 | ) { 176 | return true; 177 | } 178 | } 179 | } 180 | } 181 | return false; 182 | } 183 | 184 | /** 185 | * Replaces profanity in the text using the provided replacer function. 186 | * @param text - The original text. 187 | * @param lowercaseText - The lowercase version of the text. 188 | * @param replacer - A function that determines how to replace profane words. 189 | * @param regex - The regular expression used to find profane words. 190 | * @returns The text with profanity replaced. 191 | */ 192 | private replaceProfanity( 193 | text: string, 194 | lowercaseText: string, 195 | replacer: (word: string, start: number, end: number) => string, 196 | regex: RegExp, 197 | ): string { 198 | let result = text; 199 | let offset = 0; 200 | 201 | let match: RegExpExecArray | null; 202 | while ((match = regex.exec(lowercaseText)) !== null) { 203 | const matchStart = match.index; 204 | const matchEnd = matchStart + match[0].length; 205 | const originalWord = text.slice(matchStart + offset, matchEnd + offset); 206 | const censoredWord = replacer(originalWord, matchStart, matchEnd); 207 | result = result.slice(0, matchStart + offset) + censoredWord + result.slice(matchEnd + offset); 208 | offset += censoredWord.length - originalWord.length; 209 | } 210 | 211 | return result; 212 | } 213 | 214 | /** 215 | * Determines the list of languages to use, either from the provided list or falling back to default languages. 216 | * @param languages - An optional list of languages to use. 217 | * @returns The list of languages to be used. 218 | */ 219 | private resolveLanguages(languages?: string[]): string[] { 220 | return languages?.length ? languages : this.options.languages; 221 | } 222 | 223 | /** 224 | * Retrieves or constructs a regular expression for detecting profanity in the specified languages. 225 | * This method first checks if a regex for the given combination of languages already exists in the cache. 226 | * 227 | * @param languages - An array of languages to include in the regex. 228 | * @throws {Error} If no languages are provided. 229 | * @returns A RegExp object for detecting profanity in the specified languages. 230 | */ 231 | private getRegex(languages: string[]): RegExp { 232 | if (!languages.length) { 233 | throw new Error("At least one language must be provided"); 234 | } 235 | 236 | const uniqueLanguages = [...new Set(languages.map((language) => language.trim().toLowerCase()))]; 237 | 238 | const regexKey = uniqueLanguages.sort().join(","); 239 | if (this.regexes.has(regexKey)) { 240 | return this.regexes.get(regexKey)!; 241 | } 242 | 243 | const allWords = uniqueLanguages.flatMap((language) => { 244 | const words = profaneWords.get(language); 245 | if (!words) { 246 | throw new Error(`Invalid language: "${language}"`); 247 | } 248 | return words.filter((word) => !this.removed.words.has(word)); 249 | }); 250 | 251 | const regex = this.buildRegex(allWords); 252 | this.regexes.set(regexKey, regex); 253 | return regex; 254 | } 255 | 256 | /** 257 | * Constructs a regular expression for detecting profane words. 258 | * 259 | * @param words - An array of profane words to be included in the regex. 260 | * @returns A RegExp that matches any of the profane or blacklisted words. 261 | */ 262 | private buildRegex(words: string[]): RegExp { 263 | const allProfaneWords = [...words, ...this.blacklist.words]; 264 | const escapedProfaneWords = allProfaneWords.map(escapeRegExp); 265 | const profanityPattern = `${this.options.wholeWord ? "(?:\\b|_)" : ""}(${escapedProfaneWords.join("|")})${this.options.wholeWord ? "(?:\\b|_)" : ""}`; 266 | // eslint-disable-next-line security/detect-non-literal-regexp 267 | return new RegExp(profanityPattern, "gi"); 268 | } 269 | 270 | /** 271 | * Clear the cached regexes. 272 | */ 273 | private clearRegexes(): void { 274 | this.regexes.clear(); 275 | } 276 | } 277 | 278 | export const profanity = new Profanity(); 279 | -------------------------------------------------------------------------------- /src/tools/benchmark/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:20.17-alpine3.19 2 | 3 | WORKDIR /app 4 | 5 | COPY ../../../package*.json ./ 6 | 7 | RUN npm install 8 | 9 | COPY ../../../ ./ 10 | 11 | CMD ["npx", "ts-node", "src/tools/benchmark/benchmark.ts"] 12 | -------------------------------------------------------------------------------- /src/tools/benchmark/benchmark-interfaces.ts: -------------------------------------------------------------------------------- 1 | export interface VersionData { 2 | version: number; 3 | smallCleanText: string; 4 | smallProfaneText: string; 5 | largeCleanText: string; 6 | largeProfaneText: string; 7 | } 8 | 9 | export interface TestData { 10 | comment1: string; 11 | comment2: string; 12 | versions: VersionData[]; 13 | } 14 | -------------------------------------------------------------------------------- /src/tools/benchmark/benchmark.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This script benchmarks the performance of the Profanity filter. 3 | * 4 | * Usage: 5 | * - To run the benchmark, execute: `npm run benchmark` 6 | * 7 | * Benchmarking: 8 | * - The benchmark script uses test data stored in `test-data.json`. 9 | * - The data is versioned so we can use the same test data across multiple benchmarking sessions. 10 | * - If you want to generate new random test data, increment TEST_VERSION before running the benchmark. 11 | */ 12 | 13 | import * as fs from "fs"; 14 | import * as path from "path"; 15 | import { Suite, Event } from "benchmark"; 16 | 17 | import { VersionData, TestData } from "./benchmark-interfaces"; 18 | import { Profanity, CensorType } from "../../"; 19 | 20 | const TEST_VERSION: number = 1; 21 | 22 | const suite: Suite = new Suite(); 23 | const testDataFile: string = path.join(__dirname, "test-data.json"); 24 | 25 | const createLargeString = (size: number, profanity: boolean): string => { 26 | const words = profanity ? ["hello", "world", "arse", "shite", "damn", "bugger"] : ["hello", "world", "foo", "bar", "baz", "qux"]; 27 | return Array.from({ length: size }, () => words[Math.floor(Math.random() * words.length)]).join(" "); 28 | }; 29 | 30 | const generateTestData = () => ({ 31 | smallCleanText: "Hello world, this is a clean text.", 32 | smallProfaneText: "Hello world, this is a damn profane text.", 33 | largeCleanText: createLargeString(1000, false), 34 | largeProfaneText: createLargeString(1000, true), 35 | }); 36 | 37 | let fileData: TestData; 38 | try { 39 | fileData = JSON.parse(fs.readFileSync(testDataFile, "utf-8")); 40 | } catch (error) { 41 | console.error("Error reading test data file:", error); 42 | process.exit(1); 43 | } 44 | 45 | const testData = 46 | fileData.versions.find((data: VersionData) => data.version === TEST_VERSION) || 47 | (() => { 48 | const newData = generateTestData(); 49 | fileData.versions.push({ version: TEST_VERSION, ...newData }); 50 | try { 51 | fs.writeFileSync(testDataFile, JSON.stringify(fileData, null, 2)); 52 | console.log("Generated new test data"); 53 | } catch (error) { 54 | console.error("Error writing test data file:", error); 55 | process.exit(1); 56 | } 57 | return newData; 58 | })(); 59 | 60 | console.log(`Using test data: v${TEST_VERSION}`); 61 | const { smallCleanText, smallProfaneText, largeCleanText, largeProfaneText } = testData; 62 | 63 | // Create Profanity instances for different scenarios 64 | const defaultProfanity = new Profanity(); 65 | const partialMatchProfanity = new Profanity({ wholeWord: false }); 66 | 67 | // Pre-cache regexes 68 | defaultProfanity.exists("foo"); 69 | partialMatchProfanity.exists("bar"); 70 | 71 | // Benchmark exists() function 72 | suite 73 | .add("exists() - small clean text", () => { 74 | defaultProfanity.exists(smallCleanText); 75 | }) 76 | .add("exists() - small profane text", () => { 77 | defaultProfanity.exists(smallProfaneText); 78 | }) 79 | .add("exists() - large clean text", () => { 80 | defaultProfanity.exists(largeCleanText); 81 | }) 82 | .add("exists() - large profane text", () => { 83 | defaultProfanity.exists(largeProfaneText); 84 | }) 85 | .add("exists() - partial match, small profane text", () => { 86 | partialMatchProfanity.exists(smallProfaneText); 87 | }) 88 | 89 | // Benchmark censor() function 90 | .add("censor() - Word, small profane text", () => { 91 | defaultProfanity.censor(smallProfaneText, CensorType.Word); 92 | }) 93 | .add("censor() - FirstChar, small profane text", () => { 94 | defaultProfanity.censor(smallProfaneText, CensorType.FirstChar); 95 | }) 96 | .add("censor() - FirstVowel, small profane text", () => { 97 | defaultProfanity.censor(smallProfaneText, CensorType.FirstVowel); 98 | }) 99 | .add("censor() - AllVowels, small profane text", () => { 100 | defaultProfanity.censor(smallProfaneText, CensorType.AllVowels); 101 | }) 102 | .add("censor() - Word, large profane text", () => { 103 | defaultProfanity.censor(largeProfaneText, CensorType.Word); 104 | }) 105 | .add("censor() - partial match, Word, small profane text", () => { 106 | partialMatchProfanity.censor(smallProfaneText, CensorType.Word); 107 | }) 108 | 109 | // Run the benchmark 110 | .on("cycle", (event: Event) => { 111 | console.log(String(event.target)); 112 | }) 113 | .on("complete", function () { 114 | console.log(`Fastest: ${this.filter("fastest").map("name")[0]}`); 115 | }) 116 | .run({ async: true }); 117 | -------------------------------------------------------------------------------- /src/tools/benchmark/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | benchmark: 3 | build: 4 | context: ../../../ 5 | dockerfile: ./src/tools/benchmark/Dockerfile 6 | deploy: 7 | resources: 8 | limits: 9 | cpus: "1.0" 10 | memory: 512M 11 | reservations: 12 | cpus: "1.0" 13 | memory: 512M 14 | -------------------------------------------------------------------------------- /src/tools/benchmark/results.md: -------------------------------------------------------------------------------- 1 | # Benchmark Results ⏱️ 2 | 3 | ## Test Environment 4 | 5 | - **OS**: Windows 11 - WSL2 (Ubuntu 22.04.4 LTS) 6 | - **CPU**: AMD Ryzen 9 5900HX 3.30 GHz (Benchmark constrained to 1 CPU core) 7 | - **RAM**: 64 GB (Benchmark constrained to 512 MB) 8 | 9 | ### Benchmarks 10 | 11 | #### v3.1.1 12 | ``` 13 | benchmark-1 | Using test data: v1 14 | benchmark-1 | exists() - small clean text x 2,559,368 ops/sec ±2.95% (92 runs sampled) 15 | benchmark-1 | exists() - small profane text x 1,941,051 ops/sec ±5.64% (79 runs sampled) 16 | benchmark-1 | exists() - large clean text x 44,904 ops/sec ±5.60% (90 runs sampled) 17 | benchmark-1 | exists() - large profane text x 816,615 ops/sec ±0.74% (96 runs sampled) 18 | benchmark-1 | exists() - partial match, small profane text x 1,853,445 ops/sec ±28.20% (92 runs sampled) 19 | benchmark-1 | censor() - Word, small profane text x 1,559,873 ops/sec ±2.07% (94 runs sampled) 20 | benchmark-1 | censor() - FirstChar, small profane text x 1,548,998 ops/sec ±6.01% (96 runs sampled) 21 | benchmark-1 | censor() - FirstVowel, small profane text x 1,093,426 ops/sec ±5.54% (89 runs sampled) 22 | benchmark-1 | censor() - AllVowels, small profane text x 1,098,984 ops/sec ±0.64% (91 runs sampled) 23 | benchmark-1 | censor() - Word, large profane text x 1,633 ops/sec ±6.12% (91 runs sampled) 24 | benchmark-1 | censor() - partial match, Word, small profane text x 1,334,764 ops/sec ±5.43% (91 runs sampled) 25 | benchmark-1 | Fastest: exists() - small clean text 26 | ``` 27 | 28 | #### v3.0.1 29 | ``` 30 | benchmark-1 | Using test data: v1 31 | benchmark-1 | exists() - small clean text x 2,357,978 ops/sec ±3.50% (86 runs sampled) 32 | benchmark-1 | exists() - small profane text x 1,705,114 ops/sec ±6.80% (77 runs sampled) 33 | benchmark-1 | exists() - large clean text x 47,741 ops/sec ±1.74% (94 runs sampled) 34 | benchmark-1 | exists() - large profane text x 817,113 ops/sec ±0.64% (94 runs sampled) 35 | benchmark-1 | exists() - partial match, small profane text x 2,144,550 ops/sec ±5.92% (94 runs sampled) 36 | benchmark-1 | censor() - Word, small profane text x 1,506,507 ops/sec ±6.02% (91 runs sampled) 37 | benchmark-1 | censor() - FirstChar, small profane text x 1,507,623 ops/sec ±5.90% (92 runs sampled) 38 | benchmark-1 | censor() - FirstVowel, small profane text x 1,105,023 ops/sec ±0.79% (88 runs sampled) 39 | benchmark-1 | censor() - AllVowels, small profane text x 1,054,991 ops/sec ±5.46% (89 runs sampled) 40 | benchmark-1 | censor() - Word, large profane text x 1,659 ops/sec ±5.81% (89 runs sampled) 41 | benchmark-1 | censor() - partial match, Word, small profane text x 1,503,000 ops/sec ±0.55% (94 runs sampled) 42 | benchmark-1 | Fastest: exists() - small clean text 43 | ``` 44 | 45 | #### v3.0.0 46 | ``` 47 | Using test data: v1 48 | exists() - small clean text x 2,263,763 ops/sec ±3.96% (83 runs sampled) 49 | exists() - small profane text x 1,831,670 ops/sec ±3.09% (86 runs sampled) 50 | exists() - large clean text x 38,185 ops/sec ±2.82% (84 runs sampled) 51 | exists() - large profane text x 686,951 ops/sec ±2.11% (87 runs sampled) 52 | exists() - partial match, small profane text x 1,624,503 ops/sec ±8.02% (78 runs sampled) 53 | censor() - Word, small profane text x 915,620 ops/sec ±6.16% (83 runs sampled) 54 | censor() - FirstChar, small profane text x 1,275,945 ops/sec ±2.68% (77 runs sampled) 55 | censor() - FirstVowel, small profane text x 902,065 ops/sec ±3.43% (81 runs sampled) 56 | censor() - AllVowels, small profane text x 942,445 ops/sec ±2.94% (84 runs sampled) 57 | censor() - Word, large profane text x 5,578 ops/sec ±2.17% (86 runs sampled) 58 | censor() - partial match, Word, small profane text x 869,941 ops/sec ±7.91% (82 runs sampled) 59 | Fastest: exists() - small clean text 60 | ``` 61 | 62 | #### v2.4.0 63 | ``` 64 | Using test data: v1 65 | exists() - small clean text x 3,838,466 ops/sec ±3.34% (81 runs sampled) 66 | exists() - small profane text x 2,557,317 ops/sec ±7.47% (74 runs sampled) 67 | exists() - large clean text x 41,031 ops/sec ±2.82% (83 runs sampled) 68 | exists() - large profane text x 799,283 ops/sec ±2.16% (83 runs sampled) 69 | exists() - partial match, small profane text x 3,013,455 ops/sec ±5.68% (88 runs sampled) 70 | censor() - Word, small profane text x 1,328,481 ops/sec ±2.17% (86 runs sampled) 71 | censor() - FirstChar, small profane text x 2,197,796 ops/sec ±5.86% (84 runs sampled) 72 | censor() - FirstVowel, small profane text x 1,184,065 ops/sec ±4.31% (75 runs sampled) 73 | censor() - AllVowels, small profane text x 1,105,599 ops/sec ±7.69% (77 runs sampled) 74 | censor() - Word, large profane text x 5,594 ops/sec ±6.02% (85 runs sampled) 75 | censor() - partial match, Word, small profane text x 1,031,901 ops/sec ±2.86% (81 runs sampled) 76 | Fastest: exists() - small clean text 77 | ``` 78 | -------------------------------------------------------------------------------- /src/tools/benchmark/test-data.json: -------------------------------------------------------------------------------- 1 | { 2 | "comment1": "WARNING: this file contains profanity. The below list of profane words is necessary for this tool to function properly.", 3 | "comment2": "Do not read below this line if you do not wish to be exposed to lots of profane words", 4 | "versions": [ 5 | { 6 | "version": 1, 7 | "smallCleanText": "Hello world, this is a clean text.", 8 | "smallProfaneText": "Hello world, this is a damn profane text.", 9 | "largeCleanText": "foo bar hello qux foo world bar hello bar bar foo foo bar bar world hello foo baz foo qux baz world foo world bar bar baz world world bar world hello hello bar qux foo foo world baz world bar world bar bar hello qux bar baz bar bar qux bar baz bar world baz world qux bar hello hello baz bar qux hello world hello qux hello bar baz baz foo bar foo qux world qux foo baz world baz foo qux foo foo bar hello baz baz qux world baz world baz qux baz world baz foo baz world hello bar bar world qux baz world baz bar hello bar hello qux hello foo baz bar bar qux world bar bar qux qux bar hello bar qux hello world hello world bar world foo baz baz world world foo qux qux bar foo bar world foo baz baz foo baz foo bar baz qux bar bar hello hello qux bar baz foo qux hello bar world foo hello qux world bar bar hello qux foo world qux world baz hello baz baz qux world baz bar qux foo baz world bar qux baz hello world foo baz baz bar foo qux world baz bar hello qux baz world hello hello hello hello world hello world qux foo world foo qux foo baz world qux qux foo qux baz qux hello world foo bar bar foo world bar foo foo bar baz hello baz foo hello baz hello hello qux world hello qux hello foo hello hello foo baz baz baz foo hello foo qux qux world qux bar baz hello baz baz hello bar hello hello world foo baz world qux baz bar baz world world baz bar baz world hello world baz world hello baz world baz bar hello world bar hello baz bar foo baz qux qux hello foo foo foo qux hello bar foo hello world bar foo baz world hello qux hello hello bar qux baz world world baz qux bar hello qux world baz bar foo bar world bar bar hello qux hello hello baz bar bar hello foo qux hello baz foo world qux world baz world qux world foo bar foo world foo world qux bar bar qux foo world bar hello qux hello hello world baz hello bar baz foo qux baz foo qux qux hello hello world world foo bar world foo qux bar hello foo world baz baz hello world baz foo qux qux foo bar bar foo world qux qux baz bar bar world qux world hello baz hello qux hello hello world foo bar hello baz foo bar foo baz bar world bar qux hello bar hello hello foo hello foo bar bar hello foo world world baz baz hello foo bar qux bar baz foo hello foo baz hello foo hello hello qux world baz baz qux bar bar hello foo hello bar bar world baz qux baz world hello foo bar qux hello foo baz foo foo bar hello foo world world hello world foo bar hello baz qux world qux foo qux qux qux bar hello bar qux bar world foo qux baz qux qux qux hello world world world bar bar qux bar hello hello bar bar world hello hello foo world qux bar baz bar bar qux bar baz qux foo qux bar qux world world baz baz hello hello qux world world baz baz world baz world bar hello hello bar hello baz bar bar baz baz bar baz qux foo bar baz foo baz foo qux world baz qux world foo baz foo hello hello qux bar world bar foo world bar world baz qux baz world foo qux foo bar bar bar hello baz foo hello bar foo world world qux qux qux qux foo bar foo hello bar world qux hello world bar foo foo bar hello foo baz bar hello qux bar baz hello baz baz foo baz world qux bar bar hello qux world baz qux foo world baz foo baz world hello world foo bar world hello foo foo hello world hello foo bar baz hello baz foo hello baz bar foo hello qux qux qux hello baz world world bar baz qux bar hello baz foo bar qux bar hello hello world foo foo world hello world hello foo bar qux foo hello bar bar baz baz baz hello hello foo baz world qux baz baz bar foo hello foo qux world hello baz baz world world world world baz bar qux bar hello foo hello hello baz bar world hello bar hello bar foo hello qux baz baz world foo hello world baz foo foo qux foo bar qux baz baz qux foo qux qux foo baz world bar qux bar qux baz hello hello hello baz bar qux hello foo foo baz bar baz baz baz hello baz world bar foo baz bar baz world world bar hello baz baz baz hello world bar qux world bar foo qux baz bar foo bar baz world qux qux hello world bar hello baz qux qux qux bar bar foo foo world baz bar bar hello qux qux bar hello world world foo hello hello qux bar hello qux hello world bar bar foo foo bar qux baz baz world world foo qux foo hello world world hello world hello hello qux qux hello qux bar world bar baz baz qux baz world world foo world world foo foo baz bar qux hello foo world bar foo bar qux baz world baz bar hello world qux foo qux foo foo qux foo foo baz bar baz bar bar baz hello foo qux foo foo world foo qux baz hello qux bar hello baz baz foo foo hello hello bar baz qux baz qux foo foo hello hello hello foo bar foo world bar baz bar world baz baz bar bar bar world hello bar foo hello baz foo qux hello bar foo", 10 | "largeProfaneText": "bugger shite shite bugger shite arse arse damn arse hello shite arse damn hello damn shite hello bugger world bugger hello bugger bugger bugger hello hello bugger shite arse damn arse bugger arse hello world hello hello hello arse shite damn world damn world damn hello shite shite bugger world bugger world bugger shite damn world shite damn world bugger hello damn shite bugger world damn hello arse hello world bugger shite arse world arse shite shite hello arse damn shite world hello world damn world bugger shite world bugger hello damn world arse hello bugger arse damn hello arse damn hello bugger hello arse damn world bugger world world arse bugger arse hello arse hello shite bugger world shite world arse shite bugger bugger bugger world damn world bugger world bugger world hello damn damn arse world arse bugger hello hello damn damn damn bugger bugger shite bugger damn world hello arse shite bugger world arse world world bugger damn hello world arse hello shite hello damn hello hello shite arse damn hello world hello world arse hello world hello shite world damn hello world damn shite world world world damn shite arse shite hello arse bugger arse damn world shite arse shite arse damn arse damn bugger damn damn damn shite arse shite bugger damn world shite world bugger arse world shite bugger hello arse damn arse bugger bugger shite bugger arse shite arse shite arse arse damn shite shite damn damn world bugger hello bugger shite bugger bugger damn shite damn shite hello shite arse damn bugger damn damn bugger arse hello shite damn hello shite bugger shite world hello damn hello shite bugger damn damn shite world damn bugger bugger damn damn shite damn hello damn shite world arse arse bugger shite world arse bugger damn bugger bugger arse bugger arse world shite shite world shite hello shite shite damn arse damn damn bugger world hello shite arse damn hello bugger arse world world hello damn bugger arse hello world shite arse hello arse hello shite bugger arse bugger arse arse world hello damn bugger shite world arse world hello arse bugger arse world shite hello shite damn hello hello damn world shite world bugger arse hello damn shite world arse damn arse hello shite world hello hello hello shite shite arse world world bugger world hello bugger hello hello damn arse shite hello world bugger arse arse shite arse damn hello world shite damn arse hello bugger damn world arse hello bugger damn bugger arse shite world damn hello damn shite shite bugger bugger damn bugger shite shite damn damn bugger shite hello arse shite shite bugger world damn bugger world world damn world hello arse shite bugger arse world hello bugger hello shite world shite hello arse bugger damn shite shite world damn arse damn hello arse hello shite damn shite world arse arse bugger bugger shite arse world damn world bugger hello world hello arse hello shite shite bugger damn arse damn shite bugger bugger arse shite world hello world world hello bugger damn damn damn damn shite damn world shite world arse bugger damn bugger bugger world hello bugger shite damn bugger arse arse damn arse damn world bugger hello arse damn world world shite hello damn damn arse bugger damn arse hello world bugger arse shite damn damn arse bugger damn hello bugger world arse arse shite world damn hello shite shite damn shite hello hello shite arse hello arse world arse damn damn damn shite shite bugger shite bugger world bugger hello hello bugger hello shite world world shite damn bugger hello bugger hello shite bugger shite bugger bugger damn damn hello shite hello arse arse bugger arse world shite damn damn shite arse shite bugger bugger arse hello damn shite damn hello arse arse world world arse shite bugger world world hello damn damn bugger bugger hello arse arse arse bugger shite world shite arse arse damn hello arse hello arse damn world world damn world damn hello damn shite hello shite bugger hello bugger hello world world shite damn bugger damn shite damn shite damn damn arse bugger damn damn hello bugger bugger world bugger world arse world shite world arse arse shite damn shite arse damn bugger hello arse arse shite damn bugger bugger world damn hello world arse bugger shite bugger shite bugger world hello damn damn damn arse damn damn damn damn hello world damn shite bugger shite damn shite bugger hello arse damn hello arse hello arse bugger damn damn hello hello world arse damn world damn damn world damn damn world arse arse bugger bugger bugger arse world hello damn arse shite damn world world damn arse world arse shite world damn hello damn bugger hello damn bugger arse arse arse shite shite bugger hello damn shite damn world shite damn hello arse arse hello bugger world damn world damn world hello damn bugger bugger damn damn shite arse arse world hello bugger arse world shite world world hello bugger world hello damn shite world bugger shite world damn bugger shite arse bugger arse bugger world bugger damn damn damn damn shite hello damn bugger arse arse world world bugger bugger arse hello hello world bugger shite bugger arse world damn hello bugger damn world arse hello damn damn arse damn shite world hello damn bugger world hello arse arse hello hello hello damn arse damn world damn damn world damn world world bugger shite damn damn shite world world hello world world world arse shite shite bugger shite bugger hello world bugger world shite damn world hello hello arse hello bugger shite arse hello world shite world arse world bugger world shite world shite bugger hello hello bugger shite bugger world shite hello arse hello arse damn arse damn damn damn shite shite shite hello hello arse hello arse hello bugger bugger arse hello world world shite shite damn arse arse bugger damn world arse bugger hello damn shite arse world hello" 11 | } 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /src/tools/translate/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | libretranslate: 3 | image: libretranslate/libretranslate:latest-cuda 4 | ports: 5 | - "5000:5000" 6 | environment: 7 | - LT_LOAD_ONLY=en,ar,zh,fr,de,hi,ja,ko,pt,ru,es 8 | - NVIDIA_VISIBLE_DEVICES=all 9 | - CUDA_LAUNCH_BLOCKING=0 10 | deploy: 11 | resources: 12 | reservations: 13 | devices: 14 | - driver: nvidia 15 | count: 1 16 | capabilities: [gpu] 17 | limits: 18 | memory: 5G 19 | healthcheck: 20 | test: ["CMD", "curl", "-f", "http://localhost:5000/languages"] 21 | interval: 30s 22 | timeout: 10s 23 | retries: 3 24 | -------------------------------------------------------------------------------- /src/tools/translate/translate.ts: -------------------------------------------------------------------------------- 1 | import axios from "axios"; 2 | import * as fs from "fs"; 3 | import * as path from "path"; 4 | import { profaneWords } from "../../data/profane-words"; 5 | 6 | interface TranslationResponse { 7 | translatedText: string; 8 | } 9 | 10 | interface LanguageInfo { 11 | code: string; 12 | name: string; 13 | targets: string[]; 14 | } 15 | 16 | const LIBRE_TRANSLATE_URL = "http://localhost:5000"; 17 | const BATCH_SIZE = 50; 18 | 19 | async function getTargetLanguages(): Promise { 20 | let attempts = 0; 21 | 22 | while (true) { 23 | attempts++; 24 | try { 25 | const response = await axios.get(`${LIBRE_TRANSLATE_URL}/languages`); 26 | const englishInfo = response.data.find((lang) => lang.code === "en"); 27 | 28 | if (!englishInfo) { 29 | throw new Error("English language support not found in API"); 30 | } 31 | 32 | process.stdout.write("\n"); // Clear the retry line 33 | // Filter out 'en' from targets 34 | return englishInfo.targets.filter((lang) => lang !== "en"); 35 | } catch (error) { 36 | // Check if it's a connection error 37 | if (error.code === "ECONNREFUSED" || error.code === "ECONNRESET" || error.message.includes("socket hang up")) { 38 | process.stdout.write(`\rWaiting for LibreTranslate API to come online... (Attempt ${attempts})`); 39 | await delay(5000); // Wait 5 seconds before retry 40 | continue; 41 | } 42 | 43 | // If it's not a connection error, rethrow 44 | console.error("Error fetching supported languages:", error.message); 45 | throw error; 46 | } 47 | } 48 | } 49 | 50 | function normalizeWord(word: string): { normalized: string; hasSpaces: boolean } { 51 | const hasSpaces = word.includes(" "); 52 | // Remove spaces and underscores from words like "f u c k" or "f_u_c_k" 53 | const normalized = word.replace(/[\s_]+/g, ""); 54 | return { normalized, hasSpaces }; 55 | } 56 | 57 | async function translateWord(word: string, targetLang: string): Promise { 58 | const { normalized, hasSpaces } = normalizeWord(word); 59 | 60 | // If the word had spaces, and removing them makes it match another word in our list, 61 | // just use the spaced version of that word in the target language 62 | if (hasSpaces) { 63 | // Skip translation for spaced words, keep original format 64 | return word; 65 | } 66 | 67 | try { 68 | const response = await axios.post(`${LIBRE_TRANSLATE_URL}/translate`, { 69 | q: normalized, 70 | source: "en", 71 | target: targetLang, 72 | format: "text", 73 | }); 74 | 75 | const translatedText = response.data.translatedText.toLowerCase(); 76 | 77 | // Check if the translated text is just asterisks and spaces 78 | if (/^[\s*]+$/.test(translatedText)) { 79 | return word; // Return original word if translation is just asterisks and spaces 80 | } 81 | 82 | // Check for unexpected content that might indicate an error 83 | if (["error", "not found", "invalid", "translation"].some((keyword) => translatedText.includes(keyword))) { 84 | console.warn(`Unexpected translation result for "${word}" to ${targetLang}: "${translatedText}"`); 85 | return word; // Return original word if translation seems to be an error message 86 | } 87 | 88 | return translatedText; 89 | } catch (error) { 90 | console.error(`Error translating "${word}" to ${targetLang}:`, error.message); 91 | return word; // Return original word on error 92 | } 93 | } 94 | 95 | async function delay(ms: number): Promise { 96 | return new Promise((resolve) => setTimeout(resolve, ms)); 97 | } 98 | 99 | async function translateBatch(words: string[], targetLang: string): Promise { 100 | const translations: string[] = []; 101 | const totalBatches = Math.ceil(words.length / BATCH_SIZE); 102 | 103 | for (let i = 0; i < words.length; i += BATCH_SIZE) { 104 | const batch = words.slice(i, i + BATCH_SIZE); 105 | 106 | // Add retry logic for failed batches 107 | let retries = 3; 108 | let batchTranslations: string[] = []; 109 | 110 | while (retries > 0) { 111 | try { 112 | batchTranslations = await Promise.all(batch.map((word) => translateWord(word, targetLang))); 113 | break; // Success, exit retry loop 114 | } catch (error) { 115 | retries--; 116 | if (retries === 0) { 117 | console.error(`\nBatch translation failed after 3 attempts, using original words`, error); 118 | batchTranslations = batch; // Use original words on complete failure 119 | } else { 120 | console.error(`\nRetrying batch translation (${retries} attempts remaining)`, error); 121 | await delay(1000); // Wait 1 second before retry 122 | } 123 | } 124 | } 125 | 126 | translations.push(...batchTranslations); 127 | 128 | const currentBatch = Math.floor(i / BATCH_SIZE) + 1; 129 | const percentage = ((currentBatch / totalBatches) * 100).toFixed(1); 130 | const progress = `[${currentBatch}/${totalBatches}]`; 131 | const wordRange = `[${i + 1}-${Math.min(i + BATCH_SIZE, words.length)}/${words.length}]`; 132 | process.stdout.write(`\r${progress} English to ${targetLang} ${percentage}% ${wordRange}`); 133 | } 134 | 135 | process.stdout.write("\n"); 136 | return translations; 137 | } 138 | 139 | async function main() { 140 | // Fetch supported target languages 141 | console.log("Fetching supported languages..."); 142 | const targetLanguages = await getTargetLanguages(); 143 | console.log(`Found ${targetLanguages.length} supported target languages\n${targetLanguages}\n`); 144 | 145 | const englishWords = profaneWords.get("en") || []; 146 | 147 | if (englishWords.length === 0) { 148 | throw new Error("No English words found in profaneWords map"); 149 | } 150 | 151 | // Check for duplicates in English list 152 | const uniqueEnglishWords = [...new Set(englishWords)]; 153 | const duplicateCount = englishWords.length - uniqueEnglishWords.length; 154 | 155 | if (duplicateCount > 0) { 156 | console.log(`Found ${duplicateCount} duplicates in English word list`); 157 | console.log(`Original count: ${englishWords.length}, Unique count: ${uniqueEnglishWords.length}`); 158 | } 159 | 160 | console.log(`${uniqueEnglishWords.length} English words to translate`); 161 | 162 | // Translate to each target language 163 | const translations = new Map(); 164 | 165 | for (let i = 0; i < targetLanguages.length; i++) { 166 | const lang = targetLanguages[i]; 167 | const langProgress = `[${i + 1}/${targetLanguages.length}]`; 168 | console.log(`\n${langProgress} Translating ${lang}...`); 169 | const translatedWords = await translateBatch(uniqueEnglishWords, lang); 170 | // Clean up translations - remove quotes, invalid characters, and duplicates 171 | const cleanedWords = [...new Set(translatedWords.map((word) => word.replace(/["""]/g, "").trim()).filter((word) => word.length > 0))]; 172 | translations.set(lang, cleanedWords); 173 | console.log(`Removed ${translatedWords.length - cleanedWords.length} duplicates`); 174 | } 175 | 176 | // Combine new translations with existing ones 177 | const combinedTranslations = new Map([...profaneWords, ...translations]); 178 | combinedTranslations.delete("en"); 179 | 180 | const sortedTranslations = Array.from(combinedTranslations.entries()).sort(([langA], [langB]) => langA.localeCompare(langB)); 181 | 182 | // Generate new content 183 | let newContent = "// WARNING: this file contains profanity. The below list of profane words is necessary for this tool to function properly.\n"; 184 | newContent += "// Do not read below this line if you do not wish to be exposed to lots of profane words\n\n"; 185 | newContent += "export const profaneWords: Map = new Map([\n"; 186 | 187 | // Add English words first 188 | newContent += ` ["en", [\n "${uniqueEnglishWords.join('",\n "')}"\n ]],\n`; 189 | 190 | // Add sorted translations 191 | for (const [lang, words] of sortedTranslations) { 192 | newContent += ` ["${lang}", [\n "${words.join('",\n "')}"\n ]],\n`; 193 | } 194 | 195 | newContent += "]);\n"; 196 | 197 | // Write back to file 198 | const filePath = path.join(__dirname, "../../data/profane-words.ts"); 199 | fs.writeFileSync(filePath, newContent); 200 | console.log("\nTranslation complete! Updated profane-words.ts"); 201 | } 202 | 203 | main().catch(console.error); 204 | -------------------------------------------------------------------------------- /src/utils/index.ts: -------------------------------------------------------------------------------- 1 | export { escapeRegExp } from "./misc"; 2 | -------------------------------------------------------------------------------- /src/utils/misc.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Escapes all Regular Expression characters in a string 3 | * @param text the string to escape 4 | * @returns an escaped string 5 | */ 6 | export const escapeRegExp = (text: string) => { 7 | return text.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); 8 | }; 9 | -------------------------------------------------------------------------------- /supported-languages.md: -------------------------------------------------------------------------------- 1 | # Profanity Supported Languages 2 | 3 | There are many more [languages](https://github.com/argosopentech/argos-translate/blob/master/argostranslate/languages.csv) that Profanity _can_ support, however each additional language incorporated into the Profanity library increases the overall size of the library. To maintain performance and usability, we've limited translations to the following languages: 4 | 5 | | **Language** | **Locale** | 6 | |-----------------------|------------| 7 | | Arabic | `ar` | 8 | | Chinese | `zh` | 9 | | English | `en` | 10 | | French | `fr` | 11 | | German | `de` | 12 | | Hindi | `hi` | 13 | | Italian | `it` | 14 | | Japanese | `ja` | 15 | | Korean | `ko` | 16 | | Portuguese | `pt` | 17 | | Russian | `ru` | 18 | | Spanish | `es` | 19 | 20 | If you'd like to request a language not currently supported by Profanity, please submit your suggestion via our [GitHub issues page](https://github.com/2Toad/Profanity/issues). Keep in mind that Profanity's architecture already supports all languages through its [Customize Wordlist](https://github.com/2Toad/Profanity?tab=readme-ov-file#customize-the-word-list) feature, allowing you to add as many words or translations as you like during runtime. 21 | -------------------------------------------------------------------------------- /tests/import.spec.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai"; 2 | import { profanity, Profanity, CensorType, ProfanityOptions } from "../dist"; 3 | 4 | describe("ES Module Import", () => { 5 | it("should import profanity correctly", () => { 6 | expect(profanity).to.be.an.instanceOf(Profanity); 7 | }); 8 | 9 | it("should import Profanity class correctly", () => { 10 | expect(Profanity).to.be.a("function"); 11 | const instance = new Profanity(); 12 | expect(instance).to.be.an.instanceOf(Profanity); 13 | }); 14 | 15 | it("should import CensorType enum correctly", () => { 16 | expect(CensorType).to.be.an("object"); 17 | expect(CensorType.Word).to.exist; 18 | expect(CensorType.FirstChar).to.exist; 19 | expect(CensorType.FirstVowel).to.exist; 20 | expect(CensorType.AllVowels).to.exist; 21 | }); 22 | 23 | it("should import ProfanityOptions class correctly", () => { 24 | expect(ProfanityOptions).to.be.a("function"); 25 | const options = new ProfanityOptions(); 26 | expect(options).to.be.an.instanceOf(ProfanityOptions); 27 | }); 28 | }); 29 | -------------------------------------------------------------------------------- /tests/profanity-censor.spec.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai"; 2 | 3 | import { profanity, CensorType } from "../src"; 4 | 5 | describe("censor", () => { 6 | describe("Default censoring (CensorType.Word)", () => { 7 | it("should replace multiple profane words within a sentence with grawlix", () => { 8 | const censored = profanity.censor("I like big butts (aka arses) and I cannot lie"); 9 | expect(censored).to.equal(`I like big ${profanity.options.grawlix} (aka ${profanity.options.grawlix}) and I cannot lie`); 10 | }); 11 | 12 | it("should replace profane words within a multi-line sentence with grawlix", () => { 13 | const censored = profanity.censor(` 14 | Nothing profane on line 1. 15 | Censoring butt on line 2. 16 | Nothing profane on line 3. 17 | `); 18 | expect(censored).to.equal(` 19 | Nothing profane on line 1. 20 | Censoring ${profanity.options.grawlix} on line 2. 21 | Nothing profane on line 3. 22 | `); 23 | }); 24 | 25 | it("sentences without profanity should not be altered", () => { 26 | const original = "I like big glutes and I cannot lie"; 27 | expect(profanity.censor(original)).to.equal(original); 28 | }); 29 | 30 | it("should censor profanity at the beginning of a sentence", () => { 31 | expect(profanity.censor("Butt is a profane word")).to.equal(`${profanity.options.grawlix} is a profane word`); 32 | }); 33 | 34 | it("should censor profanity at the end of a sentence", () => { 35 | expect(profanity.censor("Don't be a butt")).to.equal(`Don't be a ${profanity.options.grawlix}`); 36 | }); 37 | it("should censor multiple occurrences of the same profane word", () => { 38 | expect(profanity.censor("Butt, butt, butt!")).to.equal( 39 | `${profanity.options.grawlix}, ${profanity.options.grawlix}, ${profanity.options.grawlix}!`, 40 | ); 41 | }); 42 | 43 | it("should not censor parts of words that contain profanity", () => { 44 | expect(profanity.censor("I need to assess the situation")).to.equal("I need to assess the situation"); 45 | }); 46 | 47 | it("should censor profanity separated by hyphens", () => { 48 | expect(profanity.censor("Don't be a butt-head")).to.equal(`Don't be a ${profanity.options.grawlix}-head`); 49 | }); 50 | 51 | it("should censor profanity separated by underscores", () => { 52 | expect(profanity.censor("Don't be a butt_head")).to.equal(`Don't be a ${profanity.options.grawlix}_head`); 53 | }); 54 | 55 | it("should censor profanity surrounded by emoji", () => { 56 | expect(profanity.censor("That's 💩butt💩")).to.equal(`That's 💩${profanity.options.grawlix}💩`); 57 | }); 58 | }); 59 | 60 | describe("CensorType.FirstChar", () => { 61 | it("should replace first character of each profane word with grawlix character", () => { 62 | const censored = profanity.censor("I like big butts (aka arses) and I cannot lie", CensorType.FirstChar); 63 | expect(censored).to.equal(`I like big ${profanity.options.grawlixChar}utts (aka ${profanity.options.grawlixChar}rses) and I cannot lie`); 64 | }); 65 | 66 | it("should preserve case when censoring first character", () => { 67 | expect(profanity.censor("Don't be a BuTt", CensorType.FirstChar)).to.equal(`Don't be a ${profanity.options.grawlixChar}uTt`); 68 | }); 69 | 70 | it("should censor first character of profanity at the beginning of a sentence", () => { 71 | expect(profanity.censor("Butt is a profane word", CensorType.FirstChar)).to.equal(`${profanity.options.grawlixChar}utt is a profane word`); 72 | }); 73 | 74 | it("should censor first character of profanity at the end of a sentence", () => { 75 | expect(profanity.censor("Don't be a butt.", CensorType.FirstChar)).to.equal(`Don't be a ${profanity.options.grawlixChar}utt.`); 76 | }); 77 | 78 | it("should censor first character of profanity separated by hyphens", () => { 79 | expect(profanity.censor("Don't be a butt-head", CensorType.FirstChar)).to.equal(`Don't be a ${profanity.options.grawlixChar}utt-head`); 80 | }); 81 | 82 | it("should censor first character of profanity separated by underscores", () => { 83 | expect(profanity.censor("Don't be a butt_head", CensorType.FirstChar)).to.equal(`Don't be a ${profanity.options.grawlixChar}utt_head`); 84 | }); 85 | }); 86 | describe("CensorType.FirstVowel", () => { 87 | it("should replace first vowel of each profane word with grawlix character", () => { 88 | const censored = profanity.censor("I like big butts (aka arses) and I cannot lie", CensorType.FirstVowel); 89 | expect(censored).to.equal(`I like big b${profanity.options.grawlixChar}tts (aka ${profanity.options.grawlixChar}rses) and I cannot lie`); 90 | }); 91 | 92 | it("should not censor if no vowels are present", () => { 93 | expect(profanity.censor("tsk tsk", CensorType.FirstVowel)).to.equal("tsk tsk"); 94 | }); 95 | 96 | it("should censor first vowel of profanity at the beginning of a sentence", () => { 97 | expect(profanity.censor("Butt is a profane word", CensorType.FirstVowel)).to.equal(`B${profanity.options.grawlixChar}tt is a profane word`); 98 | }); 99 | 100 | it("should censor first vowel of profanity at the end of a sentence", () => { 101 | expect(profanity.censor("Don't be a butt.", CensorType.FirstVowel)).to.equal(`Don't be a b${profanity.options.grawlixChar}tt.`); 102 | }); 103 | 104 | it("should handle profane words with no vowels", () => { 105 | expect(profanity.censor("Don't say tsk", CensorType.FirstVowel)).to.equal("Don't say tsk"); 106 | }); 107 | 108 | it("should censor first vowel of profanity separated by hyphens", () => { 109 | expect(profanity.censor("Don't be a butt-head", CensorType.FirstVowel)).to.equal(`Don't be a b${profanity.options.grawlixChar}tt-head`); 110 | }); 111 | it("should censor first vowel of profanity separated by underscores", () => { 112 | expect(profanity.censor("Don't be a butt_head", CensorType.FirstVowel)).to.equal(`Don't be a b${profanity.options.grawlixChar}tt_head`); 113 | }); 114 | }); 115 | 116 | describe("CensorType.AllVowels", () => { 117 | it("should replace all vowels within each profane word with grawlix character", () => { 118 | const censored = profanity.censor("I like big butts (aka arses) and I cannot lie", CensorType.AllVowels); 119 | expect(censored).to.equal( 120 | `I like big b${profanity.options.grawlixChar}tts (aka ${profanity.options.grawlixChar}rs${profanity.options.grawlixChar}s) and I cannot lie`, 121 | ); 122 | }); 123 | 124 | it("should preserve case when censoring all vowels", () => { 125 | expect(profanity.censor("BuTt", CensorType.AllVowels)).to.equal(`B${profanity.options.grawlixChar}Tt`); 126 | }); 127 | 128 | it("should censor all vowels of profanity at the beginning of a sentence", () => { 129 | expect(profanity.censor("Butt is a profane word", CensorType.AllVowels)).to.equal(`B${profanity.options.grawlixChar}tt is a profane word`); 130 | }); 131 | 132 | it("should censor all vowels of profanity at the end of a sentence", () => { 133 | expect(profanity.censor("Don't be a butt.", CensorType.AllVowels)).to.equal(`Don't be a b${profanity.options.grawlixChar}tt.`); 134 | }); 135 | 136 | it("should handle profane words with no vowels", () => { 137 | expect(profanity.censor("Don't say tsk", CensorType.AllVowels)).to.equal("Don't say tsk"); 138 | }); 139 | }); 140 | describe("Case sensitivity", () => { 141 | it("should censor while preserving case", () => { 142 | expect(profanity.censor("Don't be a BuTt")).to.equal("Don't be a @#$%&!"); 143 | }); 144 | 145 | it("should censor all uppercase profanity", () => { 146 | expect(profanity.censor("DON'T BE A BUTT")).to.equal("DON'T BE A @#$%&!"); 147 | }); 148 | 149 | it("should censor mixed case profanity", () => { 150 | expect(profanity.censor("Don't Be A bUtT")).to.equal("Don't Be A @#$%&!"); 151 | }); 152 | 153 | it("should censor profanity with alternating case", () => { 154 | expect(profanity.censor("dOn'T bE a BuTt")).to.equal("dOn'T bE a @#$%&!"); 155 | }); 156 | }); 157 | 158 | describe("Multi-word profanities", () => { 159 | it("should censor multi-word profanities", () => { 160 | expect(profanity.censor("He's a fudge packer")).to.equal(`He's a ${profanity.options.grawlix}`); 161 | expect(profanity.censor("That's a blow job")).to.equal(`That's a ${profanity.options.grawlix}`); 162 | expect(profanity.censor("Don't be a son-of-a-bitch")).to.equal(`Don't be a ${profanity.options.grawlix}`); 163 | }); 164 | 165 | it("should handle multi-word profanities with different censor types", () => { 166 | expect(profanity.censor("He's a fudge packer", CensorType.FirstChar)).to.equal(`He's a ${profanity.options.grawlixChar}udge packer`); 167 | expect(profanity.censor("That's a blow job", CensorType.FirstVowel)).to.equal(`That's a bl${profanity.options.grawlixChar}w job`); 168 | expect(profanity.censor("Don't be a son-of-a-bitch", CensorType.AllVowels)).to.equal( 169 | `Don't be a s${profanity.options.grawlixChar}n-${profanity.options.grawlixChar}f-${profanity.options.grawlixChar}-b${profanity.options.grawlixChar}tch`, 170 | ); 171 | }); 172 | }); 173 | 174 | describe("Input type handling", () => { 175 | it("should return original input for non-string input", () => { 176 | expect(profanity.censor(null as any)).to.be.null; 177 | expect(profanity.censor(undefined as any)).to.be.undefined; 178 | expect(profanity.censor(123 as any)).to.equal(123); 179 | expect(profanity.censor(true as any)).to.be.true; 180 | expect(profanity.censor({} as any)).to.deep.equal({}); 181 | expect(profanity.censor([] as any)).to.deep.equal([]); 182 | }); 183 | }); 184 | }); 185 | -------------------------------------------------------------------------------- /tests/profanity-exists.spec.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai"; 2 | 3 | import { profanity, Profanity } from "../src"; 4 | 5 | describe("exists", () => { 6 | describe("wholeWord = true", () => { 7 | it("should return true when profanity exists in a sentence", () => { 8 | expect(profanity.exists("I like big butts and I cannot lie")).to.be.true; 9 | }); 10 | 11 | it("should return false when profanity is not a whole word in a sentence", () => { 12 | expect(profanity.exists("Should we censor the word arsenic?")).to.be.false; 13 | }); 14 | 15 | it("should return true when profanity exists within multiple lines", () => { 16 | expect( 17 | profanity.exists(` 18 | Nothing profane on line 1. 19 | Censoring butt on line 2. 20 | Nothing profane on line 3. 21 | `), 22 | ).to.be.true; 23 | }); 24 | 25 | it("should return false when profanity does not exist", () => { 26 | expect(profanity.exists("I like big glutes and I cannot lie")).to.be.false; 27 | }); 28 | 29 | it("should return true when profanity is surrounded by punctuation", () => { 30 | expect(profanity.exists("What the (butt)!")).to.be.true; 31 | }); 32 | 33 | it("should return false when profanity is part of a larger word", () => { 34 | expect(profanity.exists("I'm feeling passionate today")).to.be.false; 35 | }); 36 | 37 | it("should return true when profanity is at the beginning of a sentence", () => { 38 | expect(profanity.exists("Butt is a profane word")).to.be.true; 39 | }); 40 | it("should return true when profanity is at the end of a sentence", () => { 41 | expect(profanity.exists("Don't be a butt.")).to.be.true; 42 | }); 43 | 44 | it("should return false for words that are substrings of profane words", () => { 45 | expect(profanity.exists("I need to assess the situation")).to.be.false; 46 | }); 47 | 48 | it("should return true when profanity is separated by hyphens", () => { 49 | expect(profanity.exists("Don't be a butt-head")).to.be.true; 50 | expect(profanity.exists("Don't be a head-butt")).to.be.true; 51 | }); 52 | 53 | it("should return true when profanity is separated by underscores", () => { 54 | expect(profanity.exists("Don't be a butt_face")).to.be.true; 55 | expect(profanity.exists("Don't be a face_butt")).to.be.true; 56 | }); 57 | 58 | it("should return false when profanity is part of a word separated by a hyphen", () => { 59 | expect(profanity.exists("Don't be an arsenic-head")).to.be.false; 60 | expect(profanity.exists("Don't be a head-arsenic")).to.be.false; 61 | }); 62 | 63 | it("should return false when profanity is part of a word separated by an underscore", () => { 64 | expect(profanity.exists("Don't be an arsenic_head")).to.be.false; 65 | expect(profanity.exists("Don't be a head_arsenic")).to.be.false; 66 | }); 67 | it("should return false when profanity is part of a URL", () => { 68 | expect(profanity.exists("Visit https://www.example.com/assets/image.jpg")).to.be.false; 69 | }); 70 | 71 | it("should return true when profanity is surrounded by emoji", () => { 72 | expect(profanity.exists("That's 💩butt💩")).to.be.true; 73 | }); 74 | }); 75 | 76 | describe("wholeWord = false", () => { 77 | let customProfanity: Profanity; 78 | 79 | before(() => { 80 | customProfanity = new Profanity({ wholeWord: false }); 81 | }); 82 | 83 | it("should return true when profanity is part of a word in a sentence", () => { 84 | expect(customProfanity.exists("Should we censor the word arsenic?")).to.be.true; 85 | }); 86 | 87 | it("should return false when profanity does not exist", () => { 88 | expect(customProfanity.exists("I like big glutes and I cannot lie")).to.be.false; 89 | }); 90 | 91 | it("Should return false when the last character is an 'A' with no profanity (A$$ edge case)", () => { 92 | expect(customProfanity.exists("FUNTIMESA")).to.be.false; 93 | }); 94 | 95 | it("Should return true when the last character is an 'A' and there is profanity (A$$ edge case)", () => { 96 | expect(customProfanity.exists("BUTTSA")).to.be.true; 97 | }); 98 | 99 | it("Should return true when some regex characters are present as profanity", () => { 100 | expect(customProfanity.exists("lovea$$")).to.be.true; 101 | }); 102 | 103 | it("should return true when profanity is at the beginning of a word", () => { 104 | expect(customProfanity.exists("buttress the wall")).to.be.true; 105 | }); 106 | 107 | it("should return true when profanity is at the end of a word", () => { 108 | expect(customProfanity.exists("kickbutt performance")).to.be.true; 109 | }); 110 | it("should return true when profanity is in the middle of a word", () => { 111 | expect(customProfanity.exists("Massachusetts")).to.be.true; 112 | }); 113 | 114 | it("should return true for words that are substrings of profane words", () => { 115 | expect(customProfanity.exists("I need to assess the situation")).to.be.true; 116 | }); 117 | 118 | it("should return true when profanity is part of a URL", () => { 119 | expect(customProfanity.exists("Visit https://www.example.com/assets/image.jpg")).to.be.true; 120 | }); 121 | 122 | it("should return true when profanity is separated by hyphens", () => { 123 | expect(customProfanity.exists("Don't be a butt-head")).to.be.true; 124 | expect(customProfanity.exists("Don't be a head-butt")).to.be.true; 125 | expect(customProfanity.exists("Don't be an arsenic-head")).to.be.true; 126 | expect(customProfanity.exists("Don't be a head-arsenic")).to.be.true; 127 | }); 128 | 129 | it("should return true when profanity is separated by underscores", () => { 130 | expect(customProfanity.exists("Don't be a butt_head")).to.be.true; 131 | expect(customProfanity.exists("Don't be a head_butt")).to.be.true; 132 | expect(customProfanity.exists("Don't be an arsenic_head")).to.be.true; 133 | expect(customProfanity.exists("Don't be a head_arsenic")).to.be.true; 134 | }); 135 | it("should return true when profanity is surrounded by emoji", () => { 136 | expect(customProfanity.exists("That's 💩butt💩")).to.be.true; 137 | }); 138 | }); 139 | 140 | describe("Case sensitivity", () => { 141 | it("should detect mixed case profanity", () => { 142 | expect(profanity.exists("Don't be a BuTt")).to.be.true; 143 | }); 144 | 145 | it("should detect all uppercase profanity", () => { 146 | expect(profanity.exists("DON'T BE A BUTT")).to.be.true; 147 | }); 148 | 149 | it("should detect all lowercase profanity", () => { 150 | expect(profanity.exists("don't be a butt")).to.be.true; 151 | }); 152 | 153 | it("should detect profanity with alternating case", () => { 154 | expect(profanity.exists("dOn'T bE a BuTt")).to.be.true; 155 | }); 156 | 157 | it("should detect profanity with random casing", () => { 158 | expect(profanity.exists("DoN't Be A bUtT")).to.be.true; 159 | }); 160 | }); 161 | 162 | describe("Multi-word profanities", () => { 163 | it("should detect multi-word profanities", () => { 164 | expect(profanity.exists("He's a fudge packer")).to.be.true; 165 | expect(profanity.exists("That's a blow job")).to.be.true; 166 | expect(profanity.exists("Don't be a son-of-a-bitch")).to.be.true; 167 | }); 168 | 169 | it("should not detect partial matches of multi-word profanities", () => { 170 | expect(profanity.exists("I like to pack fudge for desserts")).to.be.false; 171 | expect(profanity.exists("The wind blew jobs away")).to.be.false; 172 | expect(profanity.exists("He's the son of a businessman")).to.be.false; 173 | }); 174 | }); 175 | 176 | describe("Input type handling", () => { 177 | it("should return false for non-string input", () => { 178 | expect(profanity.exists(null as any)).to.be.false; 179 | expect(profanity.exists(undefined as any)).to.be.false; 180 | expect(profanity.exists(123 as any)).to.be.false; 181 | expect(profanity.exists(true as any)).to.be.false; 182 | expect(profanity.exists({} as any)).to.be.false; 183 | expect(profanity.exists([] as any)).to.be.false; 184 | }); 185 | }); 186 | }); 187 | -------------------------------------------------------------------------------- /tests/profanity-languages.spec.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai"; 2 | import { profanity, Profanity } from "../src"; 3 | 4 | describe("Languages", () => { 5 | describe("Multi-language support", () => { 6 | it("should detect and censor profanity in specified languages", () => { 7 | expect(profanity.exists("I like big butts and I cannot lie", ["en"])).to.be.true; 8 | expect(profanity.exists("Ich bin ein arschloch", ["de"])).to.be.true; 9 | expect(profanity.exists("I like big butts and ich bin ein arschloch", ["en", "de"])).to.be.true; 10 | 11 | expect(profanity.censor("I like big butts and I cannot lie", undefined, ["en"])).to.equal( 12 | `I like big ${profanity.options.grawlix} and I cannot lie`, 13 | ); 14 | expect(profanity.censor("Ich bin ein arschloch", undefined, ["de"])).to.equal(`Ich bin ein ${profanity.options.grawlix}`); 15 | expect(profanity.censor("I like big butts and ich bin ein arschloch", undefined, ["en", "de"])).to.equal( 16 | `I like big ${profanity.options.grawlix} and ich bin ein ${profanity.options.grawlix}`, 17 | ); 18 | }); 19 | 20 | it("should detect and censor profanity in a sentence with multiple languages", () => { 21 | expect(profanity.exists("I like big butts and ich bin ein arschloch", ["en", "de"])).to.be.true; 22 | expect(profanity.exists("Je suis un arschloch and I like big butts", ["en", "de"])).to.be.true; 23 | 24 | expect(profanity.censor("I like big butts and ich bin ein arschloch", undefined, ["en", "de"])).to.equal( 25 | `I like big ${profanity.options.grawlix} and ich bin ein ${profanity.options.grawlix}`, 26 | ); 27 | expect(profanity.censor("Je suis un arschloch and I like big butts", undefined, ["en", "de"])).to.equal( 28 | `Je suis un ${profanity.options.grawlix} and I like big ${profanity.options.grawlix}`, 29 | ); 30 | }); 31 | 32 | it("should throw an error when an invalid language is specified", () => { 33 | expect(() => profanity.exists("I like big butts and I cannot lie", ["en", "invalid"])).to.throw('Invalid language: "invalid"'); 34 | expect(() => profanity.censor("I like big butts and I cannot lie", undefined, ["en", "invalid"])).to.throw('Invalid language: "invalid"'); 35 | }); 36 | 37 | it("should handle language codes case-insensitively", () => { 38 | expect(profanity.exists("I like big butts", ["EN"])).to.be.true; 39 | expect(profanity.exists("ich bin ein arschloch", ["De"])).to.be.true; 40 | 41 | expect(profanity.censor("I like big butts", undefined, ["EN"])).to.equal(`I like big ${profanity.options.grawlix}`); 42 | expect(profanity.censor("ich bin ein arschloch", undefined, ["De"])).to.equal(`ich bin ein ${profanity.options.grawlix}`); 43 | }); 44 | }); 45 | 46 | describe("Language options", () => { 47 | it("should use languages specified in options when an empty language array is provided", () => { 48 | const customProfanity = new Profanity({ languages: ["de"] }); 49 | expect(customProfanity.exists("I like big butts", [])).to.be.false; 50 | expect(customProfanity.exists("ich bin ein arschloch", [])).to.be.true; 51 | 52 | expect(customProfanity.censor("I like big butts", undefined, [])).to.equal("I like big butts"); 53 | expect(customProfanity.censor("ich bin ein arschloch", undefined, [])).to.equal(`ich bin ein ${customProfanity.options.grawlix}`); 54 | }); 55 | 56 | it("should use default language (en) when no languages are specified in options or method call", () => { 57 | const customProfanity = new Profanity(); 58 | expect(customProfanity.exists("I like big butts")).to.be.true; 59 | expect(customProfanity.exists("ich bin ein arschloch")).to.be.false; 60 | 61 | expect(customProfanity.censor("I like big butts")).to.equal(`I like big ${customProfanity.options.grawlix}`); 62 | expect(customProfanity.censor("ich bin ein arschloch")).to.equal("ich bin ein arschloch"); 63 | }); 64 | 65 | it("should use provided languages even if options.languages is set", () => { 66 | const customProfanity = new Profanity({ languages: ["de"] }); 67 | expect(customProfanity.exists("I like big butts", ["en"])).to.be.true; 68 | expect(customProfanity.exists("ich bin ein arschloch", ["en"])).to.be.false; 69 | 70 | expect(customProfanity.censor("I like big butts", undefined, ["en"])).to.equal(`I like big ${customProfanity.options.grawlix}`); 71 | expect(customProfanity.censor("ich bin ein arschloch", undefined, ["en"])).to.equal("ich bin ein arschloch"); 72 | }); 73 | }); 74 | 75 | describe("Word list management across languages", () => { 76 | it("should detect and censor added words across all languages", () => { 77 | const customProfanity = new Profanity(); 78 | customProfanity.addWords(["testword"]); 79 | expect(customProfanity.exists("this is a testword", ["en"])).to.be.true; 80 | expect(customProfanity.exists("this is a testword", ["de"])).to.be.true; 81 | 82 | expect(customProfanity.censor("this is a testword", undefined, ["en"])).to.equal(`this is a ${customProfanity.options.grawlix}`); 83 | expect(customProfanity.censor("this is a testword", undefined, ["de"])).to.equal(`this is a ${customProfanity.options.grawlix}`); 84 | }); 85 | 86 | it("should not detect or censor removed words across all languages", () => { 87 | const customProfanity = new Profanity(); 88 | customProfanity.removeWords(["butts", "arschloch"]); 89 | expect(customProfanity.exists("I like big butts", ["en"])).to.be.false; 90 | expect(customProfanity.exists("ich bin ein arschloch", ["de"])).to.be.false; 91 | 92 | expect(customProfanity.censor("I like big butts", undefined, ["en"])).to.equal("I like big butts"); 93 | expect(customProfanity.censor("ich bin ein arschloch", undefined, ["de"])).to.equal("ich bin ein arschloch"); 94 | }); 95 | }); 96 | }); 97 | -------------------------------------------------------------------------------- /tests/profanity-options.spec.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai"; 2 | 3 | import { ProfanityOptions } from "../src"; 4 | 5 | describe("ProfanityOptions", () => { 6 | it("should create ProfanityOptions with default values", () => { 7 | const options = new ProfanityOptions(); 8 | expect(options.wholeWord).to.be.true; 9 | expect(options.grawlix).to.equal("@#$%&!"); 10 | expect(options.grawlixChar).to.equal("*"); 11 | }); 12 | 13 | it("should create ProfanityOptions with custom values", () => { 14 | const options = new ProfanityOptions(); 15 | options.wholeWord = false; 16 | options.grawlix = "***"; 17 | options.grawlixChar = "#"; 18 | expect(options.wholeWord).to.be.false; 19 | expect(options.grawlix).to.equal("***"); 20 | expect(options.grawlixChar).to.equal("#"); 21 | }); 22 | 23 | it("should create ProfanityOptions with all partial custom values", () => { 24 | const options = new ProfanityOptions({ 25 | wholeWord: false, 26 | grawlix: "***", 27 | grawlixChar: "#", 28 | }); 29 | expect(options.wholeWord).to.be.false; 30 | expect(options.grawlix).to.equal("***"); 31 | expect(options.grawlixChar).to.equal("#"); 32 | }); 33 | 34 | it("should create ProfanityOptions with some partial custom values", () => { 35 | const options = new ProfanityOptions({ 36 | wholeWord: false, 37 | }); 38 | expect(options.wholeWord).to.be.false; 39 | expect(options.grawlix).to.equal("@#$%&!"); 40 | expect(options.grawlixChar).to.equal("*"); 41 | }); 42 | }); 43 | -------------------------------------------------------------------------------- /tests/profanity.spec.ts: -------------------------------------------------------------------------------- 1 | import { expect } from "chai"; 2 | 3 | import { Profanity, CensorType, ProfanityOptions } from "../src"; 4 | 5 | describe("Profanity", () => { 6 | describe("Class instantiation", () => { 7 | describe("Profanity", () => { 8 | it("should create Profanity instance with default options", () => { 9 | const profanityInstance = new Profanity(); 10 | expect(profanityInstance.options.wholeWord).to.be.true; 11 | expect(profanityInstance.options.grawlix).to.equal("@#$%&!"); 12 | expect(profanityInstance.options.grawlixChar).to.equal("*"); 13 | }); 14 | 15 | it("should create Profanity instance with custom options", () => { 16 | const options = new ProfanityOptions({ 17 | wholeWord: false, 18 | grawlix: "***", 19 | grawlixChar: "#", 20 | }); 21 | const profanityInstance = new Profanity(options); 22 | expect(profanityInstance.options.wholeWord).to.be.false; 23 | expect(profanityInstance.options.grawlix).to.equal("***"); 24 | expect(profanityInstance.options.grawlixChar).to.equal("#"); 25 | }); 26 | 27 | it("should create Profanity instance with all partial custom options", () => { 28 | const profanityInstance = new Profanity({ 29 | wholeWord: false, 30 | grawlix: "***", 31 | grawlixChar: "#", 32 | }); 33 | expect(profanityInstance.options.wholeWord).to.be.false; 34 | expect(profanityInstance.options.grawlix).to.equal("***"); 35 | expect(profanityInstance.options.grawlixChar).to.equal("#"); 36 | }); 37 | 38 | it("should create Profanity instance with some partial custom options", () => { 39 | const profanityInstance = new Profanity({ 40 | wholeWord: false, 41 | }); 42 | expect(profanityInstance.options.wholeWord).to.be.false; 43 | expect(profanityInstance.options.grawlix).to.equal("@#$%&!"); 44 | expect(profanityInstance.options.grawlixChar).to.equal("*"); 45 | }); 46 | }); 47 | }); 48 | 49 | describe("Word list management", () => { 50 | let customProfanity: Profanity; 51 | 52 | beforeEach(() => { 53 | customProfanity = new Profanity(); 54 | }); 55 | 56 | describe("addWords", () => { 57 | it("should add multiple words to the list of profane words", () => { 58 | customProfanity.addWords(["aardvark", "zebra"]); 59 | expect(customProfanity.exists("Should we censor the word aardvark and zebra?")).to.be.true; 60 | }); 61 | 62 | it("should handle adding duplicate words", () => { 63 | customProfanity.addWords(["test", "test"]); 64 | expect(customProfanity.exists("test")).to.be.true; 65 | }); 66 | }); 67 | 68 | describe("removeWords", () => { 69 | it("should remove multiple words from the list of profane words", () => { 70 | customProfanity.removeWords(["butts", "arses"]); 71 | expect(customProfanity.exists("I like big butts (aka arses) and I cannot lie")).to.be.false; 72 | }); 73 | 74 | it("should handle removing non-existent words", () => { 75 | customProfanity.removeWords(["nonexistent"]); 76 | expect(customProfanity.exists("nonexistent")).to.be.false; 77 | }); 78 | }); 79 | 80 | describe("Custom word list", () => { 81 | it("should detect custom added words (wholeWord = true)", () => { 82 | customProfanity.addWords(["cucumber", "banana"]); 83 | expect(customProfanity.exists("I love cucumbers")).to.be.false; 84 | expect(customProfanity.censor("I love cucumbers")).to.equal("I love cucumbers"); 85 | expect(customProfanity.exists("I love cucumber")).to.be.true; 86 | expect(customProfanity.censor("I love cucumber")).to.equal(`I love ${customProfanity.options.grawlix}`); 87 | expect(customProfanity.exists("Bananas are yellow")).to.be.false; 88 | expect(customProfanity.censor("Bananas are yellow")).to.equal("Bananas are yellow"); 89 | expect(customProfanity.exists("This banana is yellow")).to.be.true; 90 | expect(customProfanity.censor("This banana is yellow")).to.equal(`This ${customProfanity.options.grawlix} is yellow`); 91 | }); 92 | 93 | it("should detect custom added words (wholeWord = false)", () => { 94 | const customProfanityPartial = new Profanity({ wholeWord: false }); 95 | customProfanityPartial.addWords(["cucumber", "banana"]); 96 | expect(customProfanityPartial.exists("I love cucumbers")).to.be.true; 97 | expect(customProfanityPartial.censor("I love cucumbers")).to.equal(`I love ${customProfanityPartial.options.grawlix}s`); 98 | expect(customProfanityPartial.exists("Bananas are yellow")).to.be.true; 99 | expect(customProfanityPartial.censor("Bananas are yellow")).to.equal(`${customProfanityPartial.options.grawlix}s are yellow`); 100 | }); 101 | 102 | it("should not detect removed words", () => { 103 | customProfanity.removeWords(["butt", "arse"]); 104 | expect(customProfanity.exists("Don't be a butt")).to.be.false; 105 | expect(customProfanity.censor("Don't be a butt")).to.equal("Don't be a butt"); 106 | expect(customProfanity.exists("You're an arse")).to.be.false; 107 | expect(customProfanity.censor("You're an arse")).to.equal("You're an arse"); 108 | }); 109 | 110 | it("should handle adding and removing words in sequence", () => { 111 | customProfanity.addWords(["test"]); 112 | expect(customProfanity.exists("test")).to.be.true; 113 | expect(customProfanity.censor("test")).to.equal(customProfanity.options.grawlix); 114 | customProfanity.removeWords(["test"]); 115 | expect(customProfanity.exists("test")).to.be.false; 116 | expect(customProfanity.censor("test")).to.equal("test"); 117 | }); 118 | }); 119 | }); 120 | 121 | describe("Whitelist functionality", () => { 122 | let customProfanity: Profanity; 123 | 124 | beforeEach(() => { 125 | customProfanity = new Profanity(); 126 | }); 127 | 128 | describe("wholeWord = true", () => { 129 | it("should whitelist a word", () => { 130 | customProfanity.whitelist.addWords(["butt"]); 131 | expect(customProfanity.exists("Don't be a butt")).to.be.false; 132 | expect(customProfanity.censor("Don't be a butt")).to.equal("Don't be a butt"); 133 | }); 134 | 135 | it("should whitelist multiple words", () => { 136 | customProfanity.whitelist.addWords(["butt", "arse"]); 137 | expect(customProfanity.exists("Should we censor the word butt or arse?")).to.be.false; 138 | expect(customProfanity.censor("Should we censor the word butt or arse?")).to.equal("Should we censor the word butt or arse?"); 139 | }); 140 | 141 | it("should only whitelist exact whole words", () => { 142 | customProfanity.whitelist.addWords(["but"]); 143 | expect(customProfanity.exists("Don't be a but")).to.be.false; 144 | expect(customProfanity.censor("Don't be a but")).to.equal("Don't be a but"); 145 | expect(customProfanity.exists("Don't be a butt")).to.be.true; 146 | expect(customProfanity.censor("Don't be a butt")).to.equal("Don't be a @#$%&!"); 147 | }); 148 | 149 | describe("Hyphenated and underscore-separated words", () => { 150 | beforeEach(() => { 151 | customProfanity.whitelist.addWords(["butt"]); 152 | }); 153 | 154 | it("should detect profanity in hyphenated words when part is whitelisted", () => { 155 | expect(customProfanity.exists("Don't be a butt-head")).to.be.true; 156 | expect(customProfanity.censor("Don't be a butt-head")).to.equal(`Don't be a ${customProfanity.options.grawlix}-head`); 157 | }); 158 | 159 | it("should detect profanity in underscore-separated words when part is whitelisted", () => { 160 | expect(customProfanity.exists("Don't be a butt_head")).to.be.true; 161 | expect(customProfanity.censor("Don't be a butt_head")).to.equal(`Don't be a ${customProfanity.options.grawlix}_head`); 162 | }); 163 | }); 164 | }); 165 | 166 | describe("wholeWord = false", () => { 167 | let customProfanityPartial: Profanity; 168 | 169 | before(() => { 170 | customProfanityPartial = new Profanity({ wholeWord: false }); 171 | }); 172 | 173 | it("should whitelist multiple words", () => { 174 | customProfanityPartial.whitelist.addWords(["buttocks", "arsenic"]); 175 | expect(customProfanityPartial.exists("Should we censor the word buttocks or arsenic?")).to.be.false; 176 | }); 177 | 178 | describe("Edge cases", () => { 179 | before(() => { 180 | customProfanityPartial.whitelist.addWords(["arsenic", "class", "password", "classic"]); 181 | }); 182 | 183 | it("should detect 'arse' as profanity", () => { 184 | expect(customProfanityPartial.exists("what an arse")).to.be.true; 185 | expect(customProfanityPartial.censor("what an arse")).to.equal(`what an ${customProfanityPartial.options.grawlix}`); 186 | }); 187 | 188 | it("should not detect 'arsenic' as profanity due to whitelist", () => { 189 | expect(customProfanityPartial.exists("dedicated arsenic")).to.be.false; 190 | expect(customProfanityPartial.censor("dedicated arsenic")).to.equal("dedicated arsenic"); 191 | }); 192 | 193 | it("should not detect 'class' as profanity due to whitelist", () => { 194 | expect(customProfanityPartial.exists("dedicated class person")).to.be.false; 195 | expect(customProfanityPartial.censor("dedicated class person")).to.equal("dedicated class person"); 196 | }); 197 | 198 | it("should not detect 'classic' as profanity due to whitelist", () => { 199 | expect(customProfanityPartial.exists("dedicated classic")).to.be.false; 200 | expect(customProfanityPartial.censor("dedicated classic")).to.equal("dedicated classic"); 201 | }); 202 | 203 | it("should not detect 'password' as profanity due to whitelist", () => { 204 | expect(customProfanityPartial.exists("dedicated password")).to.be.false; 205 | expect(customProfanityPartial.censor("dedicated password")).to.equal("dedicated password"); 206 | }); 207 | }); 208 | }); 209 | 210 | describe("removeWords", () => { 211 | it("should remove multiple words from the whitelist", () => { 212 | customProfanity.whitelist.addWords(["butts", "arses"]); 213 | expect(customProfanity.exists("I like big butts (aka arses) and I cannot lie")).to.be.false; 214 | 215 | customProfanity.whitelist.removeWords(["butts"]); 216 | expect(customProfanity.exists("I like big butts (aka arses) and I cannot lie")).to.be.true; 217 | }); 218 | 219 | it("should handle removing non-existent words from whitelist", () => { 220 | customProfanity.whitelist.removeWords(["nonexistent"]); 221 | expect(customProfanity.exists("nonexistent")).to.be.false; 222 | }); 223 | }); 224 | 225 | it("should not detect whitelisted words", () => { 226 | customProfanity.whitelist.addWords(["classic", "assembly"]); 227 | expect(customProfanity.exists("That's a classic movie")).to.be.false; 228 | expect(customProfanity.censor("That's a classic movie")).to.equal("That's a classic movie"); 229 | expect(customProfanity.exists("The assembly line is efficient")).to.be.false; 230 | expect(customProfanity.censor("The assembly line is efficient")).to.equal("The assembly line is efficient"); 231 | }); 232 | 233 | it("should detect profanity after removing from whitelist", () => { 234 | customProfanity.whitelist.addWords(["classic"]); 235 | customProfanity.whitelist.removeWords(["classic"]); 236 | expect(customProfanity.exists("That's a classic butt movie")).to.be.true; 237 | expect(customProfanity.censor("That's a classic butt movie")).to.equal(`That's a classic ${customProfanity.options.grawlix} movie`); 238 | }); 239 | 240 | it("should handle adding and removing words from whitelist in sequence", () => { 241 | customProfanity.whitelist.addWords(["test"]); 242 | customProfanity.addWords(["test"]); 243 | expect(customProfanity.exists("test")).to.be.false; 244 | expect(customProfanity.censor("test")).to.equal("test"); 245 | }); 246 | }); 247 | 248 | describe("Custom options", () => { 249 | describe("Custom grawlix", () => { 250 | it("should use custom grawlix string", () => { 251 | const customProfanity = new Profanity({ grawlix: "!@#" }); 252 | expect(customProfanity.censor("Don't be a butt")).to.equal("Don't be a !@#"); 253 | }); 254 | 255 | it("should use custom grawlix character", () => { 256 | const customProfanity = new Profanity({ grawlixChar: "X" }); 257 | expect(customProfanity.censor("You're a butt", CensorType.FirstChar)).to.equal("You're a Xutt"); 258 | }); 259 | }); 260 | }); 261 | }); 262 | -------------------------------------------------------------------------------- /tests/require.spec.ts: -------------------------------------------------------------------------------- 1 | const { expect } = require("chai"); 2 | const { profanity, Profanity, CensorType, ProfanityOptions } = require("../dist"); 3 | 4 | describe("CommonJS Require", () => { 5 | it("should require profanity correctly", () => { 6 | expect(profanity).to.be.an.instanceOf(Profanity); 7 | }); 8 | 9 | it("should require Profanity class correctly", () => { 10 | expect(Profanity).to.be.a("function"); 11 | const instance = new Profanity(); 12 | expect(instance).to.be.an.instanceOf(Profanity); 13 | }); 14 | 15 | it("should require CensorType enum correctly", () => { 16 | expect(CensorType).to.be.an("object"); 17 | expect(CensorType.Word).to.exist; 18 | expect(CensorType.FirstChar).to.exist; 19 | expect(CensorType.FirstVowel).to.exist; 20 | expect(CensorType.AllVowels).to.exist; 21 | }); 22 | 23 | it("should require ProfanityOptions class correctly", () => { 24 | expect(ProfanityOptions).to.be.a("function"); 25 | const options = new ProfanityOptions(); 26 | expect(options).to.be.an.instanceOf(ProfanityOptions); 27 | }); 28 | }); 29 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compileOnSave": false, 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "sourceMap": true, 6 | "declaration": true, 7 | "module": "CommonJS", 8 | "moduleResolution": "node", 9 | "emitDecoratorMetadata": true, 10 | "experimentalDecorators": true, 11 | "target": "es2019", 12 | "typeRoots": ["node_modules/@types"], 13 | "lib": ["dom", "es2019"] 14 | }, 15 | "include": ["src/**/*"], 16 | "exclude": ["src/tools/**/*"] 17 | } 18 | --------------------------------------------------------------------------------