├── model ├── __init__.py ├── model_data │ ├── model.h5 │ └── model.json ├── parser.py ├── furry_detector.py └── parse_data │ └── word_list.json ├── twitter ├── __init__.py └── wrapper.py ├── requirements.txt ├── twitter_key.json ├── assets ├── no_user.png └── paw_icon.ico ├── LICENSE ├── README.md ├── .gitignore └── main.py /model/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /twitter/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | requests 2 | tensorflow 3 | Pillow 4 | -------------------------------------------------------------------------------- /twitter_key.json: -------------------------------------------------------------------------------- 1 | { 2 | "bearer_token": "", 3 | "v1.1": false 4 | } -------------------------------------------------------------------------------- /assets/no_user.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ZenithO-o/NLP-furry-detector/HEAD/assets/no_user.png -------------------------------------------------------------------------------- /assets/paw_icon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ZenithO-o/NLP-furry-detector/HEAD/assets/paw_icon.ico -------------------------------------------------------------------------------- /model/model_data/model.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ZenithO-o/NLP-furry-detector/HEAD/model/model_data/model.h5 -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 ZenithO-o 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /model/model_data/model.json: -------------------------------------------------------------------------------- 1 | {"class_name": "Sequential", "config": {"name": "sequential", "layers": [{"class_name": "Flatten", "config": {"name": "flatten", "trainable": true, "batch_input_shape": [null, 1, 1000], "dtype": "float32", "data_format": "channels_last"}}, {"class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 64, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "dtype": "float32", "units": 64, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Dense", "config": {"name": "dense_2", "trainable": true, "dtype": "float32", "units": 1, "activation": "sigmoid", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}], "build_input_shape": [null, 1, 1000]}, "keras_version": "2.3.0-tf", "backend": "tensorflow"} -------------------------------------------------------------------------------- /model/parser.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from collections import Counter 3 | import json 4 | import re 5 | 6 | PARSE_PATH = Path(__file__).parent / 'parse_data' 7 | 8 | def _load_word_list() -> dict: 9 | """Helper function to get word list json 10 | 11 | Returns: 12 | dict: 0 populated dictionary of word list 13 | """ 14 | with open(PARSE_PATH / 'word_list.json') as json_file: 15 | data = json.load(json_file) 16 | 17 | return {k:0 for k in data['set']} 18 | 19 | def parse_tweets(tweets: list[str]) -> list[float]: 20 | """Helper function to convert a list of tweets into something usable by 21 | `parse_text`. Returns what `parse_text` would return. 22 | 23 | Args: 24 | tweets (list[str]): The list of tweets to convert 25 | 26 | Returns: 27 | list[float]: output word vector 28 | """ 29 | text = ' '.join(map(str, tweets)) 30 | return parse_text(text) 31 | 32 | 33 | def parse_text(text: str) -> list[float]: 34 | """Convert text into a word vector usable by `FurryDetector`. 35 | 36 | Args: 37 | text (str): input text to convert 38 | 39 | Returns: 40 | list[float]: output word vector 41 | """ 42 | word_list = _load_word_list() 43 | 44 | lines = text.lower().splitlines(keepends=False) 45 | words = [] 46 | for line in lines: 47 | words.extend(line.split(' ')) 48 | 49 | # O(1000*n) 50 | # misses many values, but the overall trend should cut through noise. Other 51 | # parsing tricks, like checking if re.sub(r'\W', '', word) would take too 52 | # long, so this will suffice. 53 | highest = 1 54 | for key in word_list: 55 | for word in words: 56 | if key == word: 57 | word_list[key] += 1 58 | highest = max(word_list[key], highest) 59 | 60 | return [word_list[word]/highest for word in word_list] -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # NLP-furry-detector 2 | This is the Furry Detector program as seen on my [YouTube](https://youtube.com/c/FurryMemes)! 3 | 4 | It uses the Twitter API, TensorFlow, and Tkinter to make an all-in-one furry detecting program. 5 | 6 | ## How it Works 7 | This machine learning model uses the text given from tweets to determine if a Twitter user is furry or not (it can also be applied more generally to non-twitter applications, but results may vary). 8 | 9 | First it takes a Twitter user, and collects their tweets. Then, using `word_list.json`, a 1000 length word vector is generated that has the counts of all the words used. It is normalized between [0-1], 1 being the most common occuring work, and 0 being not used at all. Finally, this word vector is passed through the model, and a confidence score is given. 10 | 11 | ## Usage 12 | For my own sanity, the classes in this repository are **NOT** packageble and are self contained. `main.py` takes advantage of this, and is a simply GUI interface for you to take advantage of and sample the project. Here are some of the different files you can find: 13 | 14 | ### Model 15 | This directory contains the `furry_detector.py` and `parser.py` files. Here, you can parse text and run it through the `FurryDetector` class to determine if the text written was done so by a furry. 16 | 17 | ### Twitter 18 | This directory contains the `wrapper.py` file. This has a basic class that does API calls to Twitter for tweets of a user, and the user itself. It is barebones, and is really only meant for usage within `main.py`. 19 | 20 | ### Main.py 21 | In the file `twitter_key.json` you must put your bearer token and whether or not you want to use Twitter v1.1. By default, this program will use v2.0 22 | 23 | Read Twitter's tutorial [here](https://developer.twitter.com/en/docs/twitter-api/getting-started/about-twitter-api) for obtaining an API key. 24 | 25 | Run `main.py` to see a GUI for selecting a Twitter User! Type who you want, and the model will run on their account to see if they are a furry! :D 26 | 27 | ## Other 28 | 29 | **Please be sure to read `requirements.txt` for the modules used** 30 | 31 | #### Contributions 32 | Contributions are welcome! 33 | 34 | I am always willing to look at PR's and issues if you think there is something that can be fixed or added :) 35 | 36 | ----- 37 | #### *2022-11-22 Update* 38 | This repository has gotten a long overdue makeover. I have refactored the project so that the code is much cleaner (and does not look like it was written by someone who only had 6 months experience with Python), and also so that it runs somewhat faster. 39 | 40 | ## License 41 | 42 | [MIT](https://choosealicense.com/licenses/mit/) 43 | -------------------------------------------------------------------------------- /model/furry_detector.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | import tensorflow as tf 3 | from tensorflow.keras.models import model_from_json 4 | from tensorflow.keras import Model 5 | from typing import Any 6 | 7 | class FurryDetector: 8 | """Keras model wrapper class for the Furry Detector model""" 9 | 10 | def __init__(self) -> None: 11 | """Create new model instance of Furry Detector model 12 | 13 | The Furry Detector model works by first recieving an input of the 14 | frequency of "furry words" used in text. While this class does not 15 | innately have this function. The file `parser.py` contains helper 16 | functions that you can use to make input easier. 17 | 18 | Once given this vector space of words used, the Keras model is then ran 19 | on it to give a final "confidence" score that the text the model was 20 | ran on was written by a furry. 21 | """ 22 | main_dir = Path(__file__).parent 23 | self._model_dir = main_dir / 'model_data' 24 | self.model = self._load_model() 25 | 26 | def _load_model(self) -> Model: 27 | """Helper function for loading json and h5 savefiles of the furry model 28 | into an uncompiled Keras model. 29 | 30 | Returns: 31 | model: Keras model of 'Furry Detector' model 32 | """ 33 | with open(self._model_dir / 'model.json') as json_file: 34 | data = json_file.read() 35 | 36 | loaded_model = model_from_json(data) 37 | 38 | loaded_model: Model # type hinting for IDE 39 | loaded_model.load_weights(self._model_dir / 'model.h5') 40 | 41 | return loaded_model 42 | 43 | def run(self, input_arr: Any) -> float: 44 | """Executes the Furry Detector model on the given input. 45 | 46 | If needed, converts `input_arr` into a `Tensor` before running. For 47 | simplities sake, the output value is converted back into a python 48 | float. 49 | 50 | Args: 51 | input_arr (Any): An array of floating point values consisting of 52 | the frequency of "furry words" used. 53 | 54 | Returns: 55 | float: The confidence score of the model based on the input word 56 | frequency vector. 57 | """ 58 | if not isinstance(input_arr, tf.Tensor): 59 | input_arr = tf.convert_to_tensor(input_arr) 60 | 61 | # model expects batch input, reshape to fit 62 | input_arr = input_arr[tf.newaxis, ...] 63 | 64 | # prediction = value of batch 0, output 0 65 | prediction = self.model.predict(input_arr)[0][0] 66 | 67 | return float(prediction) -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.toptal.com/developers/gitignore/api/python,vscode 2 | # Edit at https://www.toptal.com/developers/gitignore?templates=python,vscode 3 | 4 | ### Python ### 5 | # Byte-compiled / optimized / DLL files 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | # C extensions 11 | *.so 12 | 13 | # Distribution / packaging 14 | .Python 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | wheels/ 25 | pip-wheel-metadata/ 26 | share/python-wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | MANIFEST 31 | 32 | # PyInstaller 33 | # Usually these files are written by a python script from a template 34 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 35 | *.manifest 36 | *.spec 37 | 38 | # Installer logs 39 | pip-log.txt 40 | pip-delete-this-directory.txt 41 | 42 | # Unit test / coverage reports 43 | htmlcov/ 44 | .tox/ 45 | .nox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | *.cover 52 | *.py,cover 53 | .hypothesis/ 54 | .pytest_cache/ 55 | pytestdebug.log 56 | 57 | # Translations 58 | *.mo 59 | *.pot 60 | 61 | # Django stuff: 62 | *.log 63 | local_settings.py 64 | db.sqlite3 65 | db.sqlite3-journal 66 | 67 | # Flask stuff: 68 | instance/ 69 | .webassets-cache 70 | 71 | # Scrapy stuff: 72 | .scrapy 73 | 74 | # Sphinx documentation 75 | docs/_build/ 76 | doc/_build/ 77 | 78 | # PyBuilder 79 | target/ 80 | 81 | # Jupyter Notebook 82 | .ipynb_checkpoints 83 | 84 | # IPython 85 | profile_default/ 86 | ipython_config.py 87 | 88 | # pyenv 89 | .python-version 90 | 91 | # pipenv 92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 95 | # install all needed dependencies. 96 | #Pipfile.lock 97 | 98 | # poetry 99 | #poetry.lock 100 | 101 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 102 | __pypackages__/ 103 | 104 | # Celery stuff 105 | celerybeat-schedule 106 | celerybeat.pid 107 | 108 | # SageMath parsed files 109 | *.sage.py 110 | 111 | # Environments 112 | # .env 113 | .env/ 114 | .venv/ 115 | env/ 116 | venv/ 117 | ENV/ 118 | env.bak/ 119 | venv.bak/ 120 | pythonenv* 121 | 122 | # Spyder project settings 123 | .spyderproject 124 | .spyproject 125 | 126 | # Rope project settings 127 | .ropeproject 128 | 129 | # mkdocs documentation 130 | /site 131 | 132 | # mypy 133 | .mypy_cache/ 134 | .dmypy.json 135 | dmypy.json 136 | 137 | # Pyre type checker 138 | .pyre/ 139 | 140 | # pytype static type analyzer 141 | .pytype/ 142 | 143 | # operating system-related files 144 | *.DS_Store #file properties cache/storage on macOS 145 | Thumbs.db #thumbnail cache on Windows 146 | 147 | # profiling data 148 | .prof 149 | 150 | # other 151 | Twitterkey.json 152 | test.py 153 | 154 | ### vscode ### 155 | .vscode/* 156 | !.vscode/settings.json 157 | !.vscode/tasks.json 158 | !.vscode/launch.json 159 | !.vscode/extensions.json 160 | *.code-workspace 161 | 162 | # End of https://www.toptal.com/developers/gitignore/api/python,vscode 163 | -------------------------------------------------------------------------------- /twitter/wrapper.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from dataclasses import dataclass 3 | 4 | @dataclass 5 | class TwitterUser: 6 | id: int 7 | name: str 8 | screen_name: str 9 | profile_image_url: str 10 | 11 | @dataclass 12 | class Tweet: 13 | id: int 14 | text: str 15 | 16 | def __str__(self) -> str: 17 | return self.text 18 | 19 | 20 | class TwitterWrapperError(Exception): 21 | """An invalid parameter was passed somewhere in `BasicTwitterWrapper`""" 22 | pass 23 | 24 | class BasicTwitterWrapper: 25 | """This class is a specialized wrapper for the Twitter API. 26 | 27 | Note: This is NOT designed to be expanded on, it's really only for 28 | compatibility sake and performance. It is not very safe, and very much 29 | assumes you use it correctly. Burden of failure is on user, not this 30 | class. 31 | """ 32 | def __init__(self, bearer_token : str, v1_1: bool = False) -> None: 33 | """`BasicTwitterWrapper` can be used as an instance to iteract with Twitter API. 34 | 35 | Get a Twitter user's basic info and tweets using their 'screen_name' 36 | 37 | Args: 38 | bearer_token (str): The Bearer Token you recieve when creating a new Twitter App 39 | v1_1 (bool, optional): Which version of the Twitter API to use. Deafult is v2.0 40 | """ 41 | self._header = {"authorization": f"Bearer {str(bearer_token)}"} 42 | self._sess = requests.Session() 43 | self._v1_1 = True if v1_1 else False # No sneaky `None`'s >:( 44 | 45 | def get_user(self, screen_name : str) -> TwitterUser: 46 | """Collect a Twitter user's basic info. 47 | 48 | Args: 49 | screen_name (str): The Twitter handle of the user. 50 | 51 | Raises: 52 | TwitterWrapperError: The API Wrapper recieves an error code. 53 | 54 | Returns: 55 | TwitterUser: Dataclass containing user info. 56 | """ 57 | if self._v1_1: 58 | url = 'https://api.twitter.com/1.1/users/lookup.json' 59 | params = {'screen_name' : screen_name} 60 | else: 61 | url = f'https://api.twitter.com/2/users/by/username/{screen_name}' 62 | params = {'user.fields' : 'profile_image_url'} 63 | 64 | result = self._sess.get(url, params=params, headers=self._header) 65 | 66 | if result.status_code == 200: 67 | if self._v1_1: 68 | data = result.json()[0] 69 | else: 70 | data = result.json().get('data') 71 | 72 | if data: 73 | return TwitterUser(data.get('id'), data.get('name'), data.get('username') if data.get('username') else data.get('screen_name'), data.get('profile_image_url')) 74 | else: 75 | raise TwitterWrapperError(f'{result.status_code} was recieved from {url}') 76 | 77 | 78 | def get_tweets(self, screen_name : str, max_tweets : int = 3200) -> list[Tweet]: 79 | """Collect the tweets of a user based on their `screen_name` 80 | 81 | Collects as many tweets as possible, up to the 3200 tweet 82 | limit that Twitter enforces. 83 | 84 | Args: 85 | screen_name (str): The Twitter handle of the user. 86 | max_tweets (int, optional): Number of tweets to collect. Defaults to 3200. 87 | 88 | Raises: 89 | ValueError: The user does not exist, therefore, tweets cannot be found. 90 | TwitterWrapperError: The API Wrapper recieves an error code. 91 | 92 | 93 | Returns: 94 | list[Tweet]: A list of up to 3200 Tweets of the Twitter user. 95 | """ 96 | max_tweets = min(3200, max_tweets) 97 | user = self.get_user(screen_name) 98 | if user: 99 | if self._v1_1: 100 | url = 'https://api.twitter.com/1.1/statuses/user_timeline.json' 101 | params = { 102 | "screen_name" : screen_name, 103 | "count" : min(200, max_tweets), 104 | "tweet_mode" : 'extended' 105 | } 106 | else: 107 | url = f'https://api.twitter.com/2/users/{user.id}/tweets' 108 | 109 | params = { 110 | "max_results" : 100, 111 | 'tweet.fields' : 'text' 112 | } 113 | else: 114 | raise ValueError(f'"{screen_name}" does not exist!') 115 | 116 | tweets = [] 117 | status_code = 200 118 | left = max_tweets 119 | while status_code == 200 and left: 120 | result = self._sess.get(url, params=params, headers=self._header) 121 | status_code = result.status_code 122 | 123 | if status_code == 200: 124 | data = result.json() 125 | 126 | if self._v1_1: 127 | data:list[dict] # If working as intended, will be this type 128 | if data: 129 | tweets.extend([Tweet(tweet['id_str'], tweet['full_text']) for tweet in data]) 130 | params['max_id'] = str(data[-1].get('id', 1) - 1) 131 | left -= len(data) 132 | params['count'] = min(200, left) 133 | else: 134 | break 135 | else: 136 | data:dict # If working as intended, will be this type 137 | meta = data['meta'] 138 | data = data['data'] 139 | 140 | # add data 141 | tweets.extend([Tweet(tweet['id'], tweet['text']) for tweet in data]) 142 | 143 | # paginate 144 | meta:dict 145 | if meta.get('next_token'): 146 | params['pagination_token'] = meta['next_token'] 147 | else: 148 | break 149 | else: 150 | raise TwitterWrapperError(f'{result.status_code} was recieved from {url}') 151 | 152 | return tweets 153 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from io import BytesIO 3 | import json 4 | 5 | from twitter.wrapper import BasicTwitterWrapper 6 | from model.furry_detector import FurryDetector 7 | from model.parser import parse_tweets 8 | 9 | from tkinter import Tk, Canvas, Label, Entry, Button, Scale 10 | from tkinter import N, NE, S, CENTER, HORIZONTAL, NORMAL, GROOVE, FLAT, DISABLED 11 | from PIL import Image, ImageTk 12 | 13 | def load_twitter_api() -> BasicTwitterWrapper: 14 | with open('twitter_key.json') as json_file: 15 | data = json.load(json_file) 16 | bearer = data['bearer_token'] 17 | v1_1 = data['v1.1'] 18 | 19 | return BasicTwitterWrapper(bearer_token=bearer, v1_1=v1_1) 20 | 21 | def create_root() -> Tk: 22 | root = Tk() 23 | root.minsize(400, 500) 24 | root.maxsize(600, 800) 25 | root.title(TITLE) 26 | root.grid_columnconfigure(1, weight=1) 27 | root.wm_iconbitmap("assets/paw_icon.ico") 28 | root.configure(bg=BG) 29 | 30 | return root 31 | 32 | if __name__ == '__main__': 33 | """ 34 | Variables 35 | """ 36 | # Tkinter view constants 37 | TITLE = "Furry Detector" 38 | TITLEFONT = ("Tahoma", 18) 39 | HEADERFONT = ("Tahoma", 10) 40 | DEFAULTFONT = ("Tahoma", 8) 41 | DISCLAIMERFONT = ("Tahoma", 7) 42 | BIGFONT = ("Tahoma", 60) 43 | BG = "gray85" 44 | TITLE_BG = "gray80" 45 | BUTTON_BG = "dodger blue" 46 | BUTTON_DISABLED_BG = "gray90" 47 | FIELD_BG = "snow" 48 | COLOR_GRADIENT = [ 49 | "#f00", 50 | "#fc0a00", 51 | "#fa1400", 52 | "#f71e00", 53 | "#f52800", 54 | "#f23100", 55 | "#ef3b00", 56 | "#ed4400", 57 | "#ea4c00", 58 | "#e85500", 59 | "#e55e00", 60 | "#e26600", 61 | "#e06e00", 62 | "#dd7500", 63 | "#db7d00", 64 | "#d88400", 65 | "#d68b00", 66 | "#d39200", 67 | "#d09900", 68 | "#cea000", 69 | "#cba600", 70 | "#c9ac00", 71 | "#c6b200", 72 | "#c3b700", 73 | "#c1bd00", 74 | "#babe00", 75 | "#b0bc00", 76 | "#a6b900", 77 | "#9cb600", 78 | "#93b400", 79 | "#89b100", 80 | "#80af00", 81 | "#77ac00", 82 | "#6fa900", 83 | "#66a700", 84 | "#5ea400", 85 | "#56a200", 86 | "#4e9f00", 87 | "#469d00", 88 | "#3f9a00", 89 | "#389700", 90 | "#319500", 91 | "#2a9200", 92 | "#239000", 93 | "#1d8d00", 94 | "#178a00", 95 | "#180", 96 | "#0b8500", 97 | "#058300", 98 | "#008000", 99 | ] 100 | 101 | # Data variables 102 | user = None 103 | user_image = None 104 | 105 | tweet_limit = 3200 106 | tick_interval = 400 107 | tweet_amt = 3200 108 | 109 | api = load_twitter_api() 110 | detector = FurryDetector() 111 | 112 | 113 | """ 114 | Tkinter functions 115 | """ 116 | def test_user(): 117 | input_user = twitter_entry.get() 118 | 119 | global user 120 | global user_image 121 | 122 | try: 123 | input_user = input_user.replace('@', '') 124 | user = api.get_user(input_user) 125 | 126 | if user: 127 | # Get Image 128 | img_url = user.profile_image_url.replace("_normal", "") 129 | 130 | response = requests.get(img_url) 131 | img = Image.open(BytesIO(response.content)) 132 | img = img.resize((128, 128)) 133 | user_image = ImageTk.PhotoImage(img) 134 | twitter_user_canvas.itemconfigure(user_img, image=user_image) 135 | 136 | twitter_status_label.configure( 137 | text="Success! Found Twitter user", fg="green" 138 | ) 139 | 140 | run_detector_button.configure(state=NORMAL, bg=BUTTON_BG) 141 | else: 142 | raise ValueError('User does not exist') 143 | 144 | except Exception as e: 145 | print(e) 146 | user_image = ImageTk.PhotoImage(Image.open("assets/no_user.png")) 147 | twitter_user_canvas.itemconfigure(user_img, image=user_image) 148 | 149 | twitter_status_label.configure(text=f'Error, User "{input_user}" not found.', fg="red") 150 | 151 | 152 | def run_detector(): 153 | tweet_amt = int(tweet_scale.get()) 154 | tweets = api.get_tweets(user.screen_name, tweet_amt) 155 | in_arr = parse_tweets(tweets) 156 | 157 | pred = detector.run(input_arr=in_arr) 158 | 159 | update_pred(pred) 160 | 161 | 162 | def update_pred(prediction): 163 | color = "" 164 | division_size = 1 / len(COLOR_GRADIENT) 165 | 166 | for i in range(len(COLOR_GRADIENT)): 167 | if prediction > i * division_size: 168 | color = COLOR_GRADIENT[i] 169 | 170 | if prediction >= 0.95: 171 | description_label.configure(text="You found a furry OWO") 172 | elif prediction >= 0.75: 173 | description_label.configure(text="You most likely found a furry") 174 | elif prediction >= 0.50: 175 | description_label.configure(text="You probably found a furry") 176 | elif prediction >= 0.25: 177 | description_label.configure(text="You probably didn't find a furry") 178 | elif prediction >= 0.05: 179 | description_label.configure(text="You most likely didn't find a furry") 180 | else: 181 | description_label.configure(text="You didn't find a furry") 182 | 183 | percentage_label.configure(text=f"{round(prediction*100,1)}%", fg=color) 184 | description_label.configure(fg=color) 185 | 186 | """ 187 | MAIN TKINTER 188 | """ 189 | 190 | # INITIAL SETUP 191 | root = Tk() 192 | root.minsize(400, 500) 193 | root.maxsize(600, 800) 194 | root.title(TITLE) 195 | root.grid_columnconfigure(1, weight=1) 196 | root.wm_iconbitmap("assets/paw_icon.ico") 197 | root.configure(bg=BG) 198 | 199 | # APP TITLE LABEL 200 | title_label = Label(root, text=TITLE, bg=TITLE_BG, anchor=CENTER, font=TITLEFONT) 201 | 202 | title_label.grid( 203 | row=0, 204 | column=1, 205 | sticky="nsew", 206 | ) 207 | 208 | # CREDIT HEADER LABEL 209 | header_label = Label( 210 | root, text="Developed by @ZenithO_o on Twitter", bg=BG, anchor=N, font=HEADERFONT 211 | ) 212 | 213 | header_label.grid(row=1, column=1, sticky="nsew") 214 | 215 | # IMAGE CANVAS 216 | twitter_user_canvas = Canvas(root, width=128, height=128, bg=BG) 217 | twitter_user_canvas.grid(row=3, column=1) 218 | 219 | # USER IMAGE INITIALIZATION 220 | user_image = ImageTk.PhotoImage(Image.open("assets/no_user.png")) 221 | user_img = twitter_user_canvas.create_image(129, 0, anchor=NE, image=user_image) 222 | 223 | # TWITTER ACCOUNT ENTRY FIELD 224 | twitter_entry = Entry( 225 | root, width=40, bg=FIELD_BG, fg="gray30", bd=0, justify=CENTER, font=DEFAULTFONT 226 | ) 227 | 228 | twitter_entry.grid(row=4, column=1, pady=10) 229 | 230 | twitter_entry.insert(0, "Input Twitter user here... ex: @zenithO_o") 231 | 232 | # TWITTER ACCOUNT CHECK BUTTON 233 | twitter_button = Button( 234 | root, 235 | text="Get User", 236 | padx=10, 237 | pady=5, 238 | bd=0, 239 | bg=BUTTON_BG, 240 | fg="snow", 241 | font=DEFAULTFONT, 242 | justify=CENTER, 243 | command=test_user, 244 | ) 245 | 246 | twitter_button.grid(row=5, column=1) 247 | 248 | # TWITTER BUTTON STATUS LABEL 249 | twitter_status_label = Label( 250 | root, text="", bg=BG, fg="red", anchor=N, justify=CENTER, font=DISCLAIMERFONT 251 | ) 252 | 253 | twitter_status_label.grid(row=6, column=1) 254 | 255 | # TWEET NUM SCALE 256 | tweet_scale = Scale( 257 | root, 258 | font=DEFAULTFONT, 259 | label="Number of tweets to scrape:", 260 | from_=0, 261 | to=tweet_limit, 262 | relief=FLAT, 263 | bg=BG, 264 | bd=0, 265 | orient=HORIZONTAL, 266 | sliderlength=20, 267 | sliderrelief=GROOVE, 268 | highlightthickness=0, 269 | length=300, 270 | fg="gray20", 271 | troughcolor=FIELD_BG, 272 | tickinterval=tick_interval, 273 | ) 274 | 275 | tweet_scale.grid(row=7, column=1) 276 | 277 | # RUN DETECTOR BUTTON 278 | run_detector_button = Button( 279 | root, 280 | text="Run detector", 281 | bg=BUTTON_DISABLED_BG, 282 | fg="snow", 283 | font=DEFAULTFONT, 284 | justify=CENTER, 285 | padx=10, 286 | pady=5, 287 | bd=0, 288 | state=DISABLED, 289 | command=run_detector, 290 | ) 291 | 292 | run_detector_button.grid(row=8, column=1) 293 | 294 | # RUN DETECTOR BUTTON STATUS LABEL 295 | run_detector_status_label = Label( 296 | root, text="", bg=BG, anchor=N, justify=CENTER, font=DISCLAIMERFONT 297 | ) 298 | 299 | run_detector_status_label.grid(row=9, column=1) 300 | 301 | # RESULT PERCENTAGE 302 | percentage_label = Label( 303 | root, text="0.0%", bg=BG, anchor=CENTER, fg="gray60", font=BIGFONT 304 | ) 305 | percentage_label.grid(row=10, column=1) 306 | 307 | # RESULT DESCRIPTION 308 | description_label = Label( 309 | root, 310 | text="Input a user to get started", 311 | bg=BG, 312 | anchor=CENTER, 313 | fg="gray60", 314 | font=HEADERFONT, 315 | ) 316 | description_label.grid(row=11, column=1) 317 | 318 | # ERROR MESSAGES 319 | error_label = Label( 320 | root, 321 | text='', 322 | bg=BG, 323 | fg="red", 324 | anchor=S, 325 | pady=30, 326 | justify=CENTER, 327 | font=DISCLAIMERFONT, 328 | ) 329 | 330 | error_label.grid(row=100, column=1) 331 | 332 | 333 | root.mainloop() -------------------------------------------------------------------------------- /model/parse_data/word_list.json: -------------------------------------------------------------------------------- 1 | { 2 | "set": [ 3 | "good", 4 | "love", 5 | "dont", 6 | "time", 7 | "happy", 8 | "people", 9 | "art", 10 | "day", 11 | ":3", 12 | "yeah", 13 | "work", 14 | "commission", 15 | "<3", 16 | "cute", 17 | "lol", 18 | "today", 19 | ":d", 20 | "big", 21 | "gonna", 22 | "xd", 23 | "hope", 24 | "fun", 25 | "fursuitfriday", 26 | "lot", 27 | "feel", 28 | "birthday", 29 | "year", 30 | "great", 31 | "draw", 32 | "nice", 33 | "hey", 34 | "omg", 35 | "furry", 36 | "pretty", 37 | "wanna", 38 | "amazing", 39 | "friends", 40 | "didnt", 41 | "haha", 42 | "super", 43 | "man", 44 | "bad", 45 | "cool", 46 | "stuff", 47 | "night", 48 | "guys", 49 | "long", 50 | "years", 51 | "character", 52 | "open", 53 | "bit", 54 | "post", 55 | "drawing", 56 | "wait", 57 | "friend", 58 | "game", 59 | "shit", 60 | "god", 61 | "fuck", 62 | "glad", 63 | "finally", 64 | "fursuit", 65 | "awesome", 66 | "dog", 67 | "life", 68 | "morning", 69 | "making", 70 | "hard", 71 | "boy", 72 | "yall", 73 | "absolutely", 74 | "week", 75 | "sketch", 76 | "ya", 77 | "fucking", 78 | "video", 79 | "wanted", 80 | "days", 81 | "thought", 82 | "working", 83 | "commissions", 84 | "free", 85 | "real", 86 | "suit", 87 | "check", 88 | "play", 89 | ":p", 90 | "stream", 91 | "cat", 92 | "lmao", 93 | "dude", 94 | "damn", 95 | "start", 96 | "gotta", 97 | "design", 98 | "x3", 99 | "wow", 100 | "guess", 101 | "finished", 102 | "patreon", 103 | "havent", 104 | "remember", 105 | "kind", 106 | "isnt", 107 | "literally", 108 | "favorite", 109 | "person", 110 | "tho", 111 | "idea", 112 | "live", 113 | "kinda", 114 | "honestly", 115 | "fox", 116 | "support", 117 | "hugs", 118 | "month", 119 | "ready", 120 | "dm", 121 | "hear", 122 | "head", 123 | "hell", 124 | "coming", 125 | "uwu", 126 | "hours", 127 | "tomorrow", 128 | "watch", 129 | "piece", 130 | "nsfw", 131 | "dragon", 132 | "adorable", 133 | "paws", 134 | "hate", 135 | "true", 136 | "thinking", 137 | "hot", 138 | "tweet", 139 | "small", 140 | "beautiful", 141 | "times", 142 | "artist", 143 | "gay", 144 | "perfect", 145 | "enjoy", 146 | "left", 147 | "tonight", 148 | "place", 149 | "late", 150 | "feeling", 151 | "playing", 152 | "yo", 153 | "mind", 154 | "job", 155 | "pic", 156 | "point", 157 | "wont", 158 | "black", 159 | "guy", 160 | "style", 161 | "excited", 162 | "face", 163 | "weekend", 164 | "characters", 165 | "sweet", 166 | "lovely", 167 | "wrong", 168 | "thankyou", 169 | "aww", 170 | "talk", 171 | ">.>", 172 | "weird", 173 | "lil", 174 | "account", 175 | "share", 176 | "quick", 177 | "eat", 178 | "sounds", 179 | "holy", 180 | "sleep", 181 | "started", 182 | "fine", 183 | "totally", 184 | "baby", 185 | "artists", 186 | "money", 187 | "gift", 188 | "stay", 189 | "wonderful", 190 | ":(", 191 | "care", 192 | "ur", 193 | "buy", 194 | "couple", 195 | "set", 196 | "bed", 197 | "ass", 198 | "tbh", 199 | "christmas", 200 | "forgot", 201 | "room", 202 | "games", 203 | "pics", 204 | "heck", 205 | "food", 206 | "pride", 207 | "wolf", 208 | "send", 209 | "photo", 210 | "car", 211 | "blue", 212 | "youve", 213 | "read", 214 | "fluffy", 215 | "wasnt", 216 | "join", 217 | "color", 218 | "close", 219 | "house", 220 | "hehe", 221 | "idk", 222 | "drew", 223 | "congrats", 224 | "slots", 225 | "body", 226 | "reason", 227 | "doodle", 228 | "school", 229 | "music", 230 | "watching", 231 | "version", 232 | "gosh", 233 | "picture", 234 | "em", 235 | "furries", 236 | "bro", 237 | "high", 238 | "chance", 239 | "interested", 240 | "months", 241 | "funny", 242 | "called", 243 | "girl", 244 | "works", 245 | "white", 246 | "photos", 247 | "eyes", 248 | "safe", 249 | "change", 250 | "feels", 251 | "fa", 252 | "meet", 253 | "hit", 254 | "comic", 255 | "talking", 256 | "trans", 257 | "family", 258 | "youll", 259 | "early", 260 | "weeks", 261 | "heard", 262 | "fur", 263 | "red", 264 | "content", 265 | "soft", 266 | "cutie", 267 | "pm", 268 | "opening", 269 | "oc", 270 | "friday", 271 | "add", 272 | "form", 273 | "movie", 274 | "mood", 275 | "huge", 276 | "icon", 277 | "comment", 278 | "ych", 279 | "animation", 280 | "starting", 281 | "party", 282 | "moment", 283 | "telegram", 284 | "song", 285 | "yesterday", 286 | "problem", 287 | "fact", 288 | "decided", 289 | "yay", 290 | "waiting", 291 | "hair", 292 | "understand", 293 | "break", 294 | "tail", 295 | "link", 296 | "update", 297 | "yea", 298 | "boys", 299 | "agree", 300 | "sad", 301 | "phone", 302 | "lost", 303 | "colors", 304 | "cats", 305 | "order", 306 | "water", 307 | "happened", 308 | "posted", 309 | "bring", 310 | "hug", 311 | "luck", 312 | "dogs", 313 | "hand", 314 | "pay", 315 | "animal", 316 | "hour", 317 | "turn", 318 | "rest", 319 | "turned", 320 | "leave", 321 | "sona", 322 | "tired", 323 | "future", 324 | "space", 325 | "awww", 326 | "fan", 327 | "extra", 328 | "offer", 329 | "short", 330 | "imagine", 331 | "aw", 332 | "deer", 333 | "heart", 334 | "happen", 335 | "lots", 336 | "warm", 337 | "stickers", 338 | "easy", 339 | "snow", 340 | "sketches", 341 | "alright", 342 | "coffee", 343 | "bunch", 344 | "boi", 345 | "told", 346 | "finish", 347 | "worth", 348 | "mom", 349 | "wear", 350 | "thread", 351 | "forget", 352 | "pokemon", 353 | "loved", 354 | "felt", 355 | "final", 356 | "yep", 357 | "reminder", 358 | "brain", 359 | "win", 360 | "single", 361 | "tiny", 362 | "streaming", 363 | "nah", 364 | "ahh", 365 | "sale", 366 | "ahhh", 367 | "ty", 368 | "hands", 369 | "fandom", 370 | "question", 371 | "special", 372 | "main", 373 | "news", 374 | "power", 375 | "season", 376 | "story", 377 | "online", 378 | "gorgeous", 379 | "custom", 380 | "energy", 381 | "minutes", 382 | "based", 383 | "dumb", 384 | "pizza", 385 | "store", 386 | "case", 387 | "kitty", 388 | "entire", 389 | "wtf", 390 | "original", 391 | "owo", 392 | "fursona", 393 | "fanart", 394 | "folks", 395 | "image", 396 | "random", 397 | "fast", 398 | "ha", 399 | "fight", 400 | "posting", 401 | "crazy", 402 | "reply", 403 | "wild", 404 | "personal", 405 | "butt", 406 | "summer", 407 | "list", 408 | "youtube", 409 | "drink", 410 | "pick", 411 | "matter", 412 | "pls", 413 | "trade", 414 | "missed", 415 | "wip", 416 | "hold", 417 | "needed", 418 | "die", 419 | "cuz", 420 | "halloween", 421 | "paw", 422 | "appreciated", 423 | "winner", 424 | "dad", 425 | "rubber", 426 | "cold", 427 | "yup", 428 | "sticker", 429 | "normal", 430 | "bird", 431 | "fantastic", 432 | "hang", 433 | "asked", 434 | "sick", 435 | "model", 436 | "hahaha", 437 | "st", 438 | "bought", 439 | "closed", 440 | "listen", 441 | "sheet", 442 | "shot", 443 | "oo", 444 | "interesting", 445 | "basically", 446 | ">:3", 447 | "reference", 448 | "experience", 449 | "enter", 450 | "issue", 451 | ":3c", 452 | "mff", 453 | "bc", 454 | "pet", 455 | "designs", 456 | "sound", 457 | "takes", 458 | ":>", 459 | "pictures", 460 | "horny", 461 | "slot", 462 | "weve", 463 | "light", 464 | "uh", 465 | "group", 466 | ":o", 467 | "learn", 468 | "played", 469 | "beans", 470 | "base", 471 | "human", 472 | "featuring", 473 | "completely", 474 | "running", 475 | "comm", 476 | "likes", 477 | "knew", 478 | "kids", 479 | "incredible", 480 | "straight", 481 | "meme", 482 | "project", 483 | "stupid", 484 | "btw", 485 | "la", 486 | "type", 487 | "puppy", 488 | "dream", 489 | "team", 490 | "current", 491 | "actual", 492 | "joke", 493 | "living", 494 | "drawn", 495 | "social", 496 | "ppl", 497 | "smol", 498 | "auction", 499 | "worry", 500 | "message", 501 | "nope", 502 | "police", 503 | "videos", 504 | "drop", 505 | "eye", 506 | "fall", 507 | "sexy", 508 | "deal", 509 | "anthrocon", 510 | "sucks", 511 | "site", 512 | "simple", 513 | "itll", 514 | "dead", 515 | "shop", 516 | "test", 517 | "info", 518 | "practice", 519 | "chat", 520 | "price", 521 | "word", 522 | "wearing", 523 | "discord", 524 | "dark", 525 | "handsome", 526 | "number", 527 | "pain", 528 | "commissioning", 529 | "da", 530 | "huh", 531 | "panda", 532 | "fit", 533 | "covid", 534 | "strong", 535 | "chill", 536 | "voice", 537 | "paint", 538 | "drive", 539 | "size", 540 | "ends", 541 | "community", 542 | "kid", 543 | "painting", 544 | "mad", 545 | "worst", 546 | "silly", 547 | "plush", 548 | "series", 549 | "clean", 550 | "public", 551 | "boop", 552 | "rules", 553 | "plan", 554 | "city", 555 | "box", 556 | "book", 557 | "issues", 558 | "sold", 559 | "shirt", 560 | "sense", 561 | "green", 562 | "save", 563 | "buddy", 564 | "extremely", 565 | "twitch", 566 | "longer", 567 | "pack", 568 | "visit", 569 | "putting", 570 | "bigger", 571 | "mask", 572 | "dance", 573 | "monday", 574 | "level", 575 | "catch", 576 | "complete", 577 | "suits", 578 | "happening", 579 | "attention", 580 | "tiger", 581 | "species", 582 | "aint", 583 | "precious", 584 | "fair", 585 | "daily", 586 | "card", 587 | "watched", 588 | "boyfriend", 589 | "meant", 590 | "absolute", 591 | "quality", 592 | "showing", 593 | "bitch", 594 | "furryart", 595 | "sit", 596 | "tweets", 597 | "walk", 598 | "vote", 599 | "media", 600 | "busy", 601 | "doggo", 602 | "king", 603 | "artwork", 604 | "cons", 605 | "fully", 606 | "girls", 607 | "ocs", 608 | "worked", 609 | "beat", 610 | "tag", 611 | "deserve", 612 | "dang", 613 | "animals", 614 | "celebrate", 615 | "stuck", 616 | "pc", 617 | "block", 618 | "met", 619 | "bet", 620 | "sell", 621 | "answer", 622 | "supposed", 623 | "ride", 624 | "inspired", 625 | "throw", 626 | "lucky", 627 | "spent", 628 | "eating", 629 | "offers", 630 | "internet", 631 | "moving", 632 | "spend", 633 | "large", 634 | "view", 635 | "monster", 636 | "doodles", 637 | "local", 638 | "hoping", 639 | "hmm", 640 | "anime", 641 | "process", 642 | "step", 643 | "sharing", 644 | "bark", 645 | "lives", 646 | "comments", 647 | "bear", 648 | "dms", 649 | "class", 650 | "star", 651 | "details", 652 | "curious", 653 | "fav", 654 | "shiny", 655 | "dunno", 656 | "aaaa", 657 | "cut", 658 | "taste", 659 | "email", 660 | "choose", 661 | "ice", 662 | "figure", 663 | "ideas", 664 | "surprise", 665 | "company", 666 | "touch", 667 | "realize", 668 | "business", 669 | "vaccine", 670 | "vibes", 671 | "cake", 672 | "poor", 673 | "bunny", 674 | "background", 675 | "caught", 676 | "swear", 677 | "sunday", 678 | "|", 679 | "merry", 680 | "enjoying", 681 | "fluff", 682 | "favourite", 683 | "comms", 684 | "choice", 685 | "werewolf", 686 | "older", 687 | "sign", 688 | "sex", 689 | "tf", 690 | "vr", 691 | "pass", 692 | "en", 693 | "wake", 694 | ":c", 695 | "excuse", 696 | "country", 697 | "smile", 698 | "sending", 699 | "usd", 700 | "bud", 701 | "shouldnt", 702 | "secret", 703 | "feet", 704 | "trump", 705 | "looked", 706 | "changed", 707 | "bday", 708 | "def", 709 | "bat", 710 | "reading", 711 | "dear", 712 | "clear", 713 | "double", 714 | "prices", 715 | "expect", 716 | "ears", 717 | "realized", 718 | "loves", 719 | "imma", 720 | "spot", 721 | "wanting", 722 | "pieces", 723 | "switch", 724 | "meeting", 725 | "biggest", 726 | "cheese", 727 | "el", 728 | "woah", 729 | "scared", 730 | "questions", 731 | "winter", 732 | "tea", 733 | "weather", 734 | "sort", 735 | "outfit", 736 | "correct", 737 | "chicken", 738 | "lo", 739 | "claim", 740 | "bean", 741 | "ton", 742 | "worse", 743 | "april", 744 | "flat", 745 | "paypal", 746 | "telling", 747 | "recommend", 748 | "latex", 749 | "channel", 750 | "request", 751 | "attack", 752 | "stand", 753 | "allowed", 754 | "vrchat", 755 | "loving", 756 | "ship", 757 | "challenge", 758 | "treat", 759 | "badge", 760 | "calling", 761 | "hurt", 762 | "opportunity", 763 | "giveaway", 764 | "parents", 765 | "ugh", 766 | "ended", 767 | "air", 768 | "pleasure", 769 | "middle", 770 | "charity", 771 | "pink", 772 | "teeth", 773 | "general", 774 | "sitting", 775 | "deep", 776 | "valentines", 777 | "trip", 778 | "helps", 779 | "pro", 780 | "rough", 781 | "thoughts", 782 | "note", 783 | "missing", 784 | "todays", 785 | "adopt", 786 | "headshot", 787 | "earlier", 788 | "road", 789 | "rubs", 790 | "dragons", 791 | "reach", 792 | "brand", 793 | "episode", 794 | "arrived", 795 | "theme", 796 | "kiss", 797 | "se", 798 | "irl", 799 | "furs", 800 | "scene", 801 | "boops", 802 | "paper", 803 | "tysm", 804 | "table", 805 | "health", 806 | "reward", 807 | "animated", 808 | "cuties", 809 | "trust", 810 | "sooo", 811 | "bite", 812 | "shipping", 813 | "fish", 814 | "merch", 815 | "surprised", 816 | "death", 817 | "neat", 818 | "woke", 819 | "kill", 820 | "screen", 821 | "pup", 822 | "personally", 823 | "evil", 824 | "dick", 825 | "colored", 826 | "timeline", 827 | "lazy", 828 | "area", 829 | "scary", 830 | "sonic", 831 | "service", 832 | "husky", 833 | "incredibly", 834 | "shoot", 835 | "helping", 836 | "rts", 837 | "slow", 838 | "cards", 839 | "saturday", 840 | "keeping", 841 | "completed", 842 | "situation", 843 | "blast", 844 | "pins", 845 | "congratulations", 846 | "grab", 847 | "holiday", 848 | "milk", 849 | "pose", 850 | "angry", 851 | "belly", 852 | "prefer", 853 | "lion", 854 | "cover", 855 | "beach", 856 | "march", 857 | "app", 858 | "camera", 859 | "concept", 860 | "mess", 861 | "xx", 862 | "train", 863 | "moon", 864 | "minute", 865 | "toy", 866 | "count", 867 | "progress", 868 | "friendly", 869 | "fresh", 870 | "posts", 871 | "learned", 872 | "shark", 873 | "rip", 874 | "excellent", 875 | "mfymonday", 876 | "lookin", 877 | "tall", 878 | "google", 879 | "ac", 880 | "american", 881 | "official", 882 | "age", 883 | "history", 884 | "aaa", 885 | "sadly", 886 | "nintendoswitch", 887 | "theyll", 888 | "giant", 889 | "buying", 890 | "selling", 891 | "awwww", 892 | "goal", 893 | "learning", 894 | "fucked", 895 | "website", 896 | "accounts", 897 | "gender", 898 | "supporting", 899 | "brought", 900 | "foxes", 901 | "fullbody", 902 | "updated", 903 | "porn", 904 | "digital", 905 | "crop", 906 | "ad", 907 | "ball", 908 | "places", 909 | "ft", 910 | "sun", 911 | "commissioned", 912 | "fuckin", 913 | "dinner", 914 | "paid", 915 | "listening", 916 | "smell", 917 | "cuddle", 918 | "stick", 919 | "lose", 920 | "aaaaa", 921 | "retweets", 922 | "mode", 923 | "evening", 924 | "starts", 925 | "yesss", 926 | "holidays", 927 | "officially", 928 | "brother", 929 | "pin", 930 | "ooh", 931 | "event", 932 | "lines", 933 | "ipad", 934 | "cream", 935 | "sfw", 936 | "magic", 937 | "child", 938 | "handle", 939 | "shut", 940 | "crying", 941 | "af", 942 | "hanging", 943 | "terrible", 944 | "lie", 945 | "lady", 946 | "fursuits", 947 | "fellow", 948 | "mention", 949 | "boss", 950 | "mouth", 951 | "explain", 952 | "trash", 953 | "orange", 954 | "turns", 955 | "rock", 956 | "heads", 957 | "server", 958 | "woof", 959 | "continue", 960 | "ways", 961 | "town", 962 | "anytime", 963 | "build", 964 | "tree", 965 | "feed", 966 | "fat", 967 | "icons", 968 | "figured", 969 | "exist", 970 | "rights", 971 | "oof", 972 | "noticed", 973 | "door", 974 | "nose", 975 | "promise", 976 | "batch", 977 | "bruh", 978 | "legit", 979 | "oops", 980 | "pants", 981 | "hotel", 982 | "finding", 983 | "fixed", 984 | "bid", 985 | "holding", 986 | "men", 987 | "pop", 988 | "option", 989 | "awful", 990 | "smh", 991 | "minecraft", 992 | "clothes", 993 | "parts", 994 | "planning", 995 | "park", 996 | "son", 997 | "pandemic", 998 | "letting", 999 | "wall", 1000 | "code", 1001 | "nerd", 1002 | "war" 1003 | ] 1004 | } --------------------------------------------------------------------------------