├── pipeline.png ├── rawdata ├── id2feature.p ├── Baseline_label.csv ├── syntactic_complexity_measures.csv └── feature_name.csv ├── Transcriptions ├── C0000_example.csv └── C0001_example.csv ├── Data ├── DataInit.py └── DataPreProcessing.py ├── configs ├── cfg.py └── default_configs.py ├── Solvers ├── Solver_loader.py ├── Standard_solver.py ├── Baseline_confounder_solver.py ├── subject_harmonization_solver.py ├── confounder_harmonization_solver.py └── Solver_Base.py ├── LICENSE ├── tools └── utils.py ├── main.py ├── README.md ├── feature_extractor.py ├── .gitignore ├── Models └── model.py └── LIWC2007_English.dic /pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/illidanlab/subject-harmonization/HEAD/pipeline.png -------------------------------------------------------------------------------- /rawdata/id2feature.p: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/illidanlab/subject-harmonization/HEAD/rawdata/id2feature.p -------------------------------------------------------------------------------- /rawdata/Baseline_label.csv: -------------------------------------------------------------------------------- 1 | ts_sub_id,nac_normcog,nac_a1_age,nac_sex,nac_educ 2 | C0000,0,60.7,1,15 3 | C0001,1,80.2,2,16 4 | -------------------------------------------------------------------------------- /Transcriptions/C0000_example.csv: -------------------------------------------------------------------------------- 1 | spkid,uttid,st,et,role,duration,asr 2 | C0000_example,,0.45,4.6400000000000015,Moderator,4.19,hello what is your name 3 | C0000_example,,4.94,8.07,Participant,3.13,my name is bao hoang -------------------------------------------------------------------------------- /Transcriptions/C0001_example.csv: -------------------------------------------------------------------------------- 1 | spkid,uttid,st,et,role,duration,asr 2 | C0001_example,,0.45,4.6400000000000015,Moderator,4.19,hello what is your name 3 | C0001_example,,4.94,8.07,Participant,3.13,my name is yijiang pang -------------------------------------------------------------------------------- /rawdata/syntactic_complexity_measures.csv: -------------------------------------------------------------------------------- 1 | Filename,W,S,VP,C,T,DC,CT,CP,CN,MLS,MLT,MLC,C/S,VP/T,C/T,DC/C,DC/T,T/S,CT/T,CP/T,CP/C,CN/T,CN/C 2 | C0001_example.txt,2464,243,473,409,252,163,101,43,235,10.1399,9.7778,6.0244,1.6831,1.877,1.623,0.3985,0.6468,1.037,0.4008,0.1706,0.1051,0.9325,0.5746 3 | C0000_example.txt,3241,195,649,543,241,279,147,63,307,16.6205,13.4481,5.9687,2.7846,2.6929,2.2531,0.5138,1.1577,1.2359,0.61,0.2614,0.116,1.2739,0.5654 -------------------------------------------------------------------------------- /Data/DataInit.py: -------------------------------------------------------------------------------- 1 | from sklearn.model_selection import train_test_split 2 | from tools.utils import get_feature_from_id 3 | import numpy as np 4 | 5 | def data_init(cfg_proj, mci_subject, nl_subject, dic_id2feature, df_labels, seed): 6 | 7 | mci_train, mci_test = train_test_split(mci_subject, test_size=0.2, random_state = seed) 8 | nl_train, nl_test = train_test_split(nl_subject, test_size=0.2, random_state = seed) 9 | 10 | id_train = mci_train + nl_train 11 | id_test = mci_test + nl_test 12 | 13 | x_train, y_train, g_train, x_test, y_test, g_test = get_feature_from_id(id_train, id_test, dic_id2feature, df_labels) 14 | 15 | return x_train, y_train, g_train, x_test, y_test, g_test -------------------------------------------------------------------------------- /configs/cfg.py: -------------------------------------------------------------------------------- 1 | import os 2 | from configs.default_configs import get_default_configs 3 | 4 | def init_cfg(cfg_proj): 5 | n_solver = cfg_proj.solver 6 | config = get_default_configs() 7 | config.Note = None 8 | 9 | config.data.dim_out = 2 10 | config.training.epochs = 100 11 | config.training.batch_size = 512 12 | config.training.lr_init = 1.0e-3 13 | config.training.tol = 1e-4 14 | 15 | config.l2_lambda = None #or None 16 | config.l1_lambda = None 17 | 18 | config.training.epochs_whiting = 60 19 | 20 | if n_solver == "confounder_harmonization_solver": 21 | config.training.confounder_var = "educ" #["sbj", "age", "gender", "educ"] 22 | 23 | return config -------------------------------------------------------------------------------- /Data/DataPreProcessing.py: -------------------------------------------------------------------------------- 1 | from sklearn.preprocessing import StandardScaler 2 | import numpy as np 3 | 4 | def data_pre_processing(cfg_proj, cfg_m, x_train_raw, y_train, g_train, x_test_raw, y_test, g_test): 5 | 6 | if cfg_proj.solver not in ["Baseline_confounder_solver"]: 7 | x_train_raw = x_train_raw[:, :-3] 8 | x_test_raw = x_test_raw[:, :-3] 9 | 10 | if cfg_proj.solver not in ["confounder_harmonization_solver"]: 11 | g_train, g_test = [g[0] for g in g_train], [g[0] for g in g_test] 12 | 13 | scaler = StandardScaler() 14 | x_train = scaler.fit_transform(x_train_raw) 15 | x_test = scaler.transform(x_test_raw) 16 | 17 | return x_train, y_train, g_train, x_test, y_test, g_test -------------------------------------------------------------------------------- /Solvers/Solver_loader.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | #solver load 4 | from Solvers.Standard_solver import Standard_solver 5 | from Solvers.subject_harmonization_solver import subject_harmonization_solver 6 | from Solvers.confounder_harmonization_solver import confounder_harmonization_solver 7 | from Solvers.Baseline_confounder_solver import Baseline_confounder_solver 8 | 9 | #solver_loader = lambda cfg_proj, cfg_m : getattr(sys.modules[__name__], cfg_proj.solver)(cfg_proj, cfg_m) 10 | 11 | def solver_loader(cfg_proj, cfg_m): 12 | if cfg_proj.solver == "Standard_solver": 13 | s = Standard_solver(cfg_proj, cfg_m) 14 | elif cfg_proj.solver == "subject_harmonization_solver": 15 | s = subject_harmonization_solver(cfg_proj, cfg_m) 16 | elif cfg_proj.solver == "Baseline_confounder_solver": 17 | s = Baseline_confounder_solver(cfg_proj, cfg_m) 18 | elif cfg_proj.solver == "confounder_harmonization_solver": 19 | s = confounder_harmonization_solver(cfg_proj, cfg_m) 20 | return s -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 ILLIDAN Lab 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Solvers/Standard_solver.py: -------------------------------------------------------------------------------- 1 | from Solvers.Solver_Base import Solver_Base 2 | import torch 3 | from torch.utils.data import DataLoader 4 | from Models.model import MLP_pytorch, CustomDataset 5 | 6 | class Standard_solver(Solver_Base): 7 | 8 | def __init__(self, cfg_proj, cfg_m, name = "Std"): 9 | Solver_Base.__init__(self, cfg_proj, cfg_m, name) 10 | 11 | def run(self, x_train, y_train, g_train, x_test, y_test, g_test, seed): 12 | # Set seed 13 | self.set_random_seed(seed) 14 | 15 | # Initialize 16 | dataloader_train = DataLoader(CustomDataset(x_train, y_train, g_train), batch_size = self.cfg_m.training.batch_size, drop_last=True, shuffle = True) 17 | model = MLP_pytorch(input_dim = len(x_train[0]), output_dim = self.cfg_m.data.dim_out, classifier = self.cfg_proj.classifier) 18 | optimizer = torch.optim.AdamW(model.parameters(), lr = self.cfg_m.training.lr_init) 19 | lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, int(self.cfg_m.training.epochs*len(dataloader_train))) #very useful 20 | criterion = self.cross_entropy_regs 21 | model, _ = self.to_parallel_model(model) 22 | 23 | # Train classifier 24 | model, loss_train_trace = self.basic_train(model, dataloader_train, criterion, optimizer, lr_scheduler) 25 | 26 | # Evaluation 27 | auc, f1, sens, spec, auc_sbj, f1_sbj, sens_sbj, spec_sbj = self.eval_func(model, x_test, y_test, g_test) 28 | 29 | return auc, f1, sens, spec, auc_sbj, f1_sbj, sens_sbj, spec_sbj -------------------------------------------------------------------------------- /configs/default_configs.py: -------------------------------------------------------------------------------- 1 | import ml_collections 2 | import os 3 | 4 | 5 | def get_default_configs(): 6 | 7 | config = ml_collections.ConfigDict() 8 | # training 9 | config.training = training = ml_collections.ConfigDict() 10 | training.epochs = 100 11 | training.batch_size = 512 12 | training.log_freq = 1000 13 | training.valid_rate = 0.0 14 | training.num_workers = 16 15 | training.m_pretrained = True 16 | training.lr_init = 1.0e-3 17 | 18 | # finetuning 19 | config.finetuning = finetuning = ml_collections.ConfigDict() 20 | finetuning.epochs = 25 21 | finetuning.batch_size = 512 22 | finetuning.log_freq = 25 23 | finetuning.lr_init = 1.0e-3 24 | 25 | # test 26 | config.test = test = ml_collections.ConfigDict() 27 | test.batch_size = 512 28 | test.num_workers = 16 29 | 30 | # data 31 | config.data = data = ml_collections.ConfigDict() 32 | data.img_size = 32 33 | data.num_channels = 3 34 | data.num_data_points = -1 35 | data.num_data_points_test = -1 36 | 37 | # model 38 | config.model = model = ml_collections.ConfigDict() 39 | model.sigma_min = 0.01 40 | model.sigma_max = 50 41 | model.beta_min = 0.01 42 | model.beta_max = 2 43 | 44 | # model 45 | config.attack = attack = ml_collections.ConfigDict() 46 | attack.PGD = {"eps":8.0/255.0, "alpha":1.0/255.0, "steps":10} 47 | attack.PGDL2 = {"eps":1.0, "alpha":0.1, "steps":10} 48 | 49 | # optimization 50 | config.optim = optim = ml_collections.ConfigDict() 51 | optim.weight_decay = 0 52 | optim.optimizer = 'Adam' 53 | optim.lr = 2e-4 54 | optim.beta1 = 0.9 55 | optim.eps = 1e-8 56 | optim.warmup = 5000 57 | optim.grad_clip = 1. 58 | 59 | return config -------------------------------------------------------------------------------- /rawdata/feature_name.csv: -------------------------------------------------------------------------------- 1 | Feature Name 2 | Total function words 3 | Total pronouns 4 | Personal pronouns 5 | 1st pers singular 6 | 1st pers plural 7 | 2nd person 8 | 3rd pers singular 9 | 3rd pers plural 10 | Impersonal pronouns 11 | Articles 12 | Common verbs 13 | Auxiliary verbs 14 | Past tense 15 | Present tense 16 | Future tense 17 | Adverbs 18 | Prepositions 19 | Conjunctions 20 | Negations 21 | Quantifiers 22 | Numbers 23 | Swear words 24 | Social processes 25 | Family 26 | Friends 27 | Humans 28 | Affective processes 29 | Positive emotion 30 | Negative emotion 31 | Anxiety 32 | Anger 33 | Sadness 34 | Cognitive processes 35 | Insight 36 | Causation 37 | Discrepancy 38 | Tentative 39 | Certainty 40 | Inhibition 41 | Inclusive 42 | Exclusive 43 | Perceptual processes 44 | See 45 | Hear 46 | Feel 47 | Biological processes 48 | Body 49 | Health 50 | Sexual 51 | Ingestion 52 | Relativity 53 | Motion 54 | Space 55 | Time 56 | Work 57 | Achievement 58 | Leisure 59 | Home 60 | Money 61 | Religion 62 | Death 63 | Assent 64 | Nonfluencies 65 | Fillers 66 | Word count 67 | Sentence 68 | Verb phrase 69 | Clause 70 | T-unit 71 | Dependent clause 72 | Complex T-unit 73 | Coordinate phrase 74 | Complex nominal 75 | Mean length of sentence 76 | Mean length of T-unit 77 | Mean length of clause 78 | Clause per sentence 79 | Verb phrase per T-unit 80 | Clause per T-unit 81 | Dependent clause per clause 82 | Dependent clause per T-unit 83 | T-unit per sentence 84 | Complex T-unit ratio 85 | Coordinate phrase per T-unit 86 | Coordinate phrase per clause 87 | Complex nominal per T-unit 88 | Complex nominal per clause 89 | Simple TTR 90 | Root TTR 91 | Log TTR 92 | Mass TTR 93 | Mean segmental TTR 94 | Moving average TT 95 | Hypergeometric distribution D 96 | Measure of textual lexical diversity 97 | Measure of lexical textual diversity 98 | Measure of lexical textual diversity 99 | Mean of response length 100 | Variance of response length 101 | -------------------------------------------------------------------------------- /tools/utils.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | import pickle 4 | import torch 5 | import torch.nn.functional as F 6 | 7 | 8 | def load_raw_data(): 9 | 10 | with open("rawdata/id2feature.p", "rb") as f: 11 | dic_id2feature = pickle.load(f) 12 | 13 | for id in dic_id2feature: 14 | dic_id2feature[id] = np.array(dic_id2feature[id]) 15 | 16 | df_labels = pd.read_csv("rawdata/Baseline_label.csv") 17 | 18 | nl_subject = df_labels["ts_sub_id"][df_labels["nac_normcog"] == 1].to_list() 19 | mci_subject = df_labels["ts_sub_id"][df_labels["nac_normcog"] == 0].to_list() 20 | nl_subject = [subject for subject in nl_subject if subject in dic_id2feature] 21 | mci_subject = [subject for subject in mci_subject if subject in dic_id2feature] 22 | 23 | return dic_id2feature, df_labels, nl_subject, mci_subject 24 | 25 | def get_feature_from_id(train, test, dic_id2feature, df_labels): 26 | 27 | x_train, y_train, g_train, x_test, y_test, g_test = [], [], [], [], [], [] 28 | 29 | for id in train: 30 | label = 1-int(df_labels[df_labels["ts_sub_id"] == id]['nac_normcog'].values[0]) 31 | df = df_labels[df_labels["ts_sub_id"] == id] 32 | var_confounder = [int(df['nac_a1_age'].values[0]), int(df['nac_sex'].values[0]), int(df['nac_educ'].values[0])] 33 | [age, gender, educ] = [int(df['nac_a1_age'].values[0]), int(df['nac_sex'].values[0]), int(df['nac_educ'].values[0])] 34 | for feature in dic_id2feature[id]: 35 | feature = np.concatenate([feature, var_confounder]) 36 | x_train.append(feature) 37 | y_train.append(label*1.0) 38 | g_train.append([id, age, gender, educ]) 39 | 40 | for id in test: 41 | label = 1-int(df_labels[df_labels["ts_sub_id"] == id]['nac_normcog'].values[0]) 42 | df = df_labels[df_labels["ts_sub_id"] == id] 43 | var_confounder = [int(df['nac_a1_age'].values[0]), int(df['nac_sex'].values[0]), int(df['nac_educ'].values[0])] 44 | [age, gender, educ] = [int(df['nac_a1_age'].values[0]), int(df['nac_sex'].values[0]), int(df['nac_educ'].values[0])] 45 | for feature in dic_id2feature[id]: 46 | feature = np.concatenate([feature, var_confounder]) 47 | x_test.append(feature) 48 | y_test.append(label) 49 | g_test.append([id, age, gender, educ]) 50 | 51 | return np.array(x_train), np.array(y_train), g_train, np.array(x_test), np.array(y_test), g_test -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import os 2 | import argparse 3 | from time import localtime, strftime 4 | from tools.utils import load_raw_data 5 | from Data.DataInit import data_init 6 | from Data.DataPreProcessing import data_pre_processing 7 | from configs.cfg import init_cfg 8 | 9 | 10 | def main(cfg_proj, cfg_m): 11 | from Solvers.Solver_loader import solver_loader 12 | solver = solver_loader(cfg_proj, cfg_m) 13 | 14 | # Load raw data 15 | dic_id2feature, df_labels, nl_subject, mci_subject = load_raw_data() 16 | 17 | solver.setLabels(df_labels) 18 | 19 | for step in range(cfg_proj.num_total_runs): 20 | seed = step if cfg_proj.seed is None else cfg_proj.seed 21 | solver.set_random_seed(seed) 22 | 23 | # Split to train and test 24 | x_train, y_train, g_train, x_test, y_test, g_test = data_init(cfg_proj, mci_subject, nl_subject, dic_id2feature, df_labels, seed) 25 | 26 | # Data preprocessing 27 | x_train, y_train, g_train, x_test, y_test, g_test = data_pre_processing(cfg_proj, cfg_m, x_train, y_train, g_train, x_test, y_test, g_test) 28 | 29 | # Run the experiment 30 | auc, f1, sens, spec, auc_sbj, f1_sbj, sens_sbj, spec_sbj = solver.run(x_train, y_train, g_train, x_test, y_test, g_test, seed) 31 | 32 | print("step-%d, auc=%.3f,f1=%.3f,sens=%.3f,spec=%.3f, sbj:auc=%.3f,f1=%.3f,sens=%.3f,spec=%.3f"%(step, auc, f1, sens, spec, auc_sbj, f1_sbj, sens_sbj, spec_sbj)) 33 | 34 | # print results 35 | solver.save_results() 36 | 37 | if __name__ == "__main__": 38 | 39 | parser = argparse.ArgumentParser(description="template") 40 | parser.add_argument("--gpu", type=str, default="3", required=False) 41 | parser.add_argument("--seed", type=int, default = None, required=False) 42 | parser.add_argument("--num_total_runs", type=int, default = 100, required=False) 43 | parser.add_argument("--flag_generatePredictions", default = ["Sex", "Edu", "Age"]) 44 | parser.add_argument("--number_of_feature", type=int, default = 99, required=False) 45 | parser.add_argument("--vote_threshold", type=int, default = 0.5, required=False) 46 | 47 | #Standard_solver, Baseline_confounder_solver, subject_harmonization_solver, confounder_harmonization_solver 48 | parser.add_argument("--solver", type=str, default="subject_harmonization_solver", required=False) 49 | parser.add_argument("--classifier", type=str, default="MLP", required=False) #LR, MLP 50 | parser.add_argument("--flag_log", type=str, default = True, required=False) 51 | parser.add_argument("--save_whitening", type=bool, default = False, required=False) 52 | parser.add_argument("--flag_time", type=str, default = strftime("%Y-%m-%d_%H-%M-%S", localtime()), required=False) 53 | parser.add_argument("--flag_load", type=str, default = None, required=False) #if is not None, then the file of loaded para need to contain the str 54 | cfg_proj = parser.parse_args() 55 | 56 | cfg_m = init_cfg(cfg_proj) 57 | os.environ["CUDA_VISIBLE_DEVICES"] = "%s"%(cfg_proj.gpu) 58 | if cfg_proj.save_whitening: 59 | cfg_proj.num_total_runs = 1 60 | main(cfg_proj, cfg_m) -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Subject Harmonization of Digital Biomarkers: Improved Detection of Mild Cognitive Impairment from Language Markers 2 | Official code for paper: "Subject Harmonization of Digital Biomarkers: Improved Detection of Mild Cognitive Impairment from Language Markers", Bao Hoang, Yijiang Pang, Hiroko H. Dodge, and Jiayu Zhou, PSB 2024. 3 | 4 | ## Overview 5 | 6 | ![](pipeline.png) 7 | 8 | Mild cognitive impairment (MCI) represents the early stage of dementia including Alzheimer’s disease (AD) and plays a crucial role in developing therapeutic interventions and treatment. Early detection of MCI offers opportunities for early intervention and significantly benefits cohort enrichment for clinical trials. Imaging markers and in vivo markers 9 | in plasma and cerebrospinal fluid biomarkers have high detection performance, and yet their prohibitive costs and intrusiveness demand more affordable and accessible alternatives. The recent advances in digital biomarkers, especially language markers, have shown great potential, where variables informative to MCI are derived from linguistic and/or speech and later 10 | used for predictive modeling. A major challenge in modeling language markers comes from the variability of how each person speaks. As the cohort size for language studies is usually 11 | small due to extensive data collection efforts, the variability among persons makes language markers hard to generalize to unseen subjects. In this paper, we propose a novel subject harmonization tool to address the issue of distributional differences in language markers across subjects, thus enhancing the generalization performance of machine learning models. Our empirical results show that machine learning models built on our harmonized features have improved prediction performance on unseen data. 12 | 13 | ## Language Marker Extractor 14 | To extract language markers from the transcripts, you need to extract syntactic complexity feature using [L2 Syntactic Complexity Analyzer](https://sites.psu.edu/xxl13/l2sca/). After that, put your syntactic complexity feature in file `rawdata/syntactic_complexity_measures.csv` and your transcripts data in folder `Transcriptions`, then run command ```python feature_extractor.py``` 15 | 16 | It will give you 99-dimensional language marker feature in `rawdata/id2feature.p` 17 | 18 | ## Harmonization Methods 19 | Here we provide several demos of using harmonization commands. Remember to use your own transcripts data, the existing data in repo is just for demo. 20 | 21 | - **None Harmonization:** 22 | 23 | - Run ```python main.py --solver Standard_solver``` 24 | 25 | - **Generalized least squares:** 26 | 27 | - Run ```python main.py --solver Baseline_confounder_solver``` 28 | 29 | - **Deep harmonization - subject (Proposed method):** 30 | 31 | - Run ```python main.py --solver subject_harmonization_solver``` 32 | 33 | - **Deep harmonization - confounder:** 34 | 35 | - Run ```python main.py --solver confounder_harmonization_solver``` 36 | 37 | - You can change confounder variable using variable ``config.training.confounder_var`` in ``configs/cfg.py`` 38 | 39 | ## Data Request 40 | The data is available upon request at [https://www.i-conect.org/](https://www.i-conect.org/) 41 | 42 | ## Acknowledgement 43 | This material is based in part upon work supported by the National Science Foundation under 44 | Grant IIS-2212174, IIS-1749940, Office of Naval Research N00014-20-1-2382, and National 45 | Institute on Aging (NIA) RF1AG072449, R01AG051628, R01AG056102. 46 | 47 | 48 | 49 | 50 | 51 | -------------------------------------------------------------------------------- /feature_extractor.py: -------------------------------------------------------------------------------- 1 | #import library 2 | import liwc 3 | import pandas as pd 4 | import pickle 5 | import glob 6 | import os 7 | import copy 8 | import numpy as np 9 | from lexical_diversity import lex_div as ld 10 | 11 | #get parse and categories from LIWC 12 | parse, category_names = liwc.load_token_parser("LIWC2007_English.dic") 13 | category_position = {} 14 | 15 | for i in range(len(category_names)): 16 | category_position[category_names[i]] = i 17 | 18 | #get pandas 19 | df_syn = pd.read_csv("rawdata/syntactic_complexity_measures.csv") 20 | df_labels = pd.read_csv("rawdata/Baseline_label.csv") 21 | id2feature = {} 22 | 23 | #get all paths to text file 24 | video_chat_paths = sorted(glob.glob("Transcriptions/*.csv")) 25 | paths = video_chat_paths 26 | 27 | def respone_feature(df): 28 | df.dropna(subset = ['asr'], inplace=True) 29 | 30 | data = [] 31 | current_role = str(df["role"].iloc[0]) 32 | current_text = "" 33 | 34 | for i in range(df.shape[0]): 35 | if len(str(df["asr"].iloc[i]).split()) == 0: 36 | continue 37 | 38 | if str(df["role"].iloc[i]) != current_role: 39 | tokens = current_text.split() 40 | current_text = "" 41 | if len(tokens) != 0: 42 | if current_role == "Participant": 43 | data.append(len(tokens)) 44 | 45 | current_role = df["role"].iloc[i] 46 | current_text += str(df["asr"].iloc[i]).strip() 47 | current_text += " " 48 | 49 | tokens = current_text.split() 50 | if len(tokens) != 0: 51 | if current_role == "Participant": 52 | data.append(len(tokens)) 53 | 54 | return [np.mean(data), np.var(data)] 55 | 56 | def lexical(text): 57 | flt = ld.flemmatize(text) 58 | return [ld.ttr(flt), ld.root_ttr(flt), ld.log_ttr(flt), ld.maas_ttr(flt), ld.msttr(flt), ld.mattr(flt), ld.hdd(flt), ld.mtld(flt), ld.mtld_ma_wrap(flt), ld.mtld_ma_bid(flt)] 59 | 60 | for fileName in paths: 61 | 62 | subject = os.path.splitext(os.path.basename(fileName))[0].split("_")[0] 63 | baseName = os.path.basename(fileName)[:-3]+'txt' 64 | 65 | #check if subject has label or not 66 | if subject not in df_labels["ts_sub_id"].values: 67 | continue 68 | 69 | #check if subject has syntactic feature or not 70 | if baseName not in df_syn["Filename"].values: 71 | continue 72 | 73 | #get Participant text from text file 74 | df = pd.read_csv(fileName) 75 | df_save = copy.deepcopy(df) 76 | 77 | df = df[df["role"] == "Participant"] 78 | df.dropna(subset = ['asr'], inplace=True) #can replace 79 | 80 | texts = list(df["asr"]) 81 | for i in range(len(texts)): 82 | texts[i] = str(texts[i]).strip().lower() 83 | texts = " ".join(texts) 84 | tokens = texts.split() 85 | 86 | if(len(tokens) == 0): 87 | continue 88 | 89 | if subject not in id2feature: 90 | id2feature[subject] = [] 91 | 92 | #get LIWC categories from each tokens 93 | feature = [0 for i in range(64)] 94 | for token in tokens: 95 | for category in parse(token): 96 | feature[category_position[category]] +=1 97 | 98 | #add Syntactic feature 99 | feature.extend(df_syn[df_syn["Filename"] == baseName].iloc[0, 1:].tolist()) 100 | 101 | #add Lexical feature 102 | feature.extend(lexical(" ".join(tokens))) 103 | 104 | #add responses feature 105 | feature.extend(respone_feature(df_save)) 106 | 107 | id2feature[subject].append(feature) 108 | 109 | with open("rawdata/id2feature.p", "wb") as f: 110 | pickle.dump(id2feature, f) -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | -------------------------------------------------------------------------------- /Solvers/Baseline_confounder_solver.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from Solvers.Solver_Base import Solver_Base 3 | import torch 4 | from torch.utils.data import DataLoader 5 | from Models.model import MSE_pytorch, CustomDataset, LR_pytorch 6 | 7 | class Baseline_confounder_solver(Solver_Base): 8 | 9 | def __init__(self, cfg_proj, cfg_m, name = "baseline"): 10 | Solver_Base.__init__(self, cfg_proj, cfg_m, name) 11 | 12 | def run(self, x_train, y_train, g_train, x_test, y_test, g_test, seed): 13 | # Set seed 14 | self.set_random_seed(seed) 15 | 16 | # train for confounder classifier 17 | epochs = 50 18 | X_confounder, X_test_confounder = x_train[:, -3:], x_test[:, -3:] 19 | 20 | for i in range(x_train.shape[-1] - 3): 21 | Y = x_train[:, i] 22 | dataloader_train_c = DataLoader(CustomDataset(X_confounder, Y, g_train), batch_size = self.cfg_m.training.batch_size, drop_last=True, shuffle = True) 23 | model_confounder = MSE_pytorch(input_dim = len(X_confounder[0]), output_dim = 1) 24 | optimizer_c = torch.optim.AdamW(model_confounder.parameters(), lr = self.cfg_m.training.lr_init) 25 | lr_scheduler_c = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer_c, int(epochs*len(dataloader_train_c))) #very useful 26 | criterion_c = torch.nn.MSELoss() 27 | model_confounder, _ = self.to_parallel_model(model_confounder) 28 | model_confounder, _ = self.basic_train_confounder(model_confounder, dataloader_train_c, criterion_c, optimizer_c, lr_scheduler_c, epochs) 29 | 30 | model_confounder.eval() 31 | with torch.no_grad(): 32 | x_train_task_pred = model_confounder(torch.from_numpy(X_confounder).float().to(self.device)) 33 | x_train_task_pred = x_train_task_pred.data.detach().cpu().numpy().flatten() 34 | x_train[:, i] = x_train[:, i] - x_train_task_pred 35 | with torch.no_grad(): 36 | x_test_task_pred = model_confounder(torch.from_numpy(X_test_confounder).float().to(self.device)) 37 | x_test_task_pred = x_test_task_pred.data.detach().cpu().numpy().flatten() 38 | x_test[:, i] = x_test[:, i] - x_test_task_pred 39 | 40 | # train for task classifier 41 | x_train, x_test = x_train[:, :-3], x_test[:, :-3] 42 | dataloader_train = DataLoader(CustomDataset(x_train, y_train, g_train), batch_size = self.cfg_m.training.batch_size, drop_last=True, shuffle = True) 43 | model_task = LR_pytorch(input_dim = len(x_train[0]), output_dim = self.cfg_m.data.dim_out) 44 | optimizer = torch.optim.AdamW(model_task.parameters(), lr = self.cfg_m.training.lr_init) 45 | lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, int(self.cfg_m.training.epochs*len(dataloader_train))) #very useful 46 | criterion = self.cross_entropy_regs 47 | model_task, _ = self.to_parallel_model(model_task) 48 | model_task, loss_train_trace = self.basic_train(model_task, dataloader_train, criterion, optimizer, lr_scheduler) 49 | 50 | # Evaluation 51 | auc, f1, sens, spec, auc_sbj, f1_sbj, sens_sbj, spec_sbj = self.eval_func(model_task, x_test, y_test, g_test) 52 | 53 | return auc, f1, sens, spec, auc_sbj, f1_sbj, sens_sbj, spec_sbj 54 | 55 | def basic_train_confounder(self, model, dataloader_train, criterion, optimizer, lr_scheduler, epochs): 56 | loss_train_trace = [] 57 | for epoch in range(epochs): 58 | model.train() 59 | loss_epoch = [] 60 | for train_X, train_Y, _ , idx in dataloader_train: 61 | 62 | train_X, train_Y = train_X.float().to(self.device), train_Y.float().to(self.device) 63 | Y_hat = model(train_X) 64 | loss = criterion(train_Y, Y_hat.flatten()) 65 | loss_epoch.append(loss.item()) 66 | 67 | optimizer.zero_grad() 68 | loss.backward() 69 | optimizer.step() 70 | lr_scheduler.step() 71 | loss_train_trace.append(np.mean(loss_epoch)) 72 | return model, loss_train_trace -------------------------------------------------------------------------------- /Models/model.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch.utils.data import Dataset 3 | import numpy as np 4 | 5 | # Define model 6 | class MLP_pytorch(torch.nn.Module): 7 | def __init__(self, input_dim, output_dim, classifier = "MLP"): 8 | super(MLP_pytorch, self).__init__() 9 | 10 | self.classifier = classifier 11 | 12 | if self.classifier == "LR": 13 | # LR 14 | self.linear1 = torch.nn.Linear(input_dim, output_dim) 15 | else: 16 | # MLP 17 | self.linear1 = torch.nn.Linear(input_dim, 32) 18 | self.relu1 = torch.nn.ReLU() 19 | self.linear2 = torch.nn.Linear(32, output_dim) 20 | 21 | def forward(self, x): 22 | if self.classifier == "LR": 23 | # LR 24 | return self.linear1(x) 25 | 26 | # MLP 27 | outputs = self.linear1(x) 28 | outputs = self.relu1(outputs) 29 | outputs = self.linear2(outputs) 30 | return outputs 31 | 32 | class LR_pytorch(torch.nn.Module): 33 | def __init__(self, input_dim, output_dim): 34 | super(LR_pytorch, self).__init__() 35 | self.linear1 = torch.nn.Linear(input_dim, output_dim) 36 | def forward(self, x): 37 | return self.linear1(x) 38 | 39 | class MSE_pytorch(torch.nn.Module): 40 | def __init__(self, input_dim, output_dim): 41 | super(MSE_pytorch, self).__init__() 42 | self.linear1 = torch.nn.Linear(input_dim, output_dim) 43 | def forward(self, x): 44 | return self.linear1(x) 45 | 46 | 47 | class MLP_whiting(torch.nn.Module): 48 | feature_idx = None 49 | def __init__(self, input_dim, sbj_dim, task_in_dim, task_out_dim, classifier = "MLP"): 50 | super(MLP_whiting, self).__init__() 51 | self.feature_mapping = torch.nn.Sequential( 52 | torch.nn.Linear(input_dim, 64), 53 | torch.nn.ReLU(), 54 | torch.nn.Linear(64, 64), 55 | torch.nn.ReLU(), 56 | torch.nn.Linear(64, input_dim), 57 | ) 58 | self.out_sbj = torch.nn.Linear(input_dim, sbj_dim) 59 | self.classifier = classifier 60 | if self.classifier == "LR": 61 | self.out_task = torch.nn.Sequential( 62 | # LR 63 | torch.nn.Linear(task_in_dim, task_out_dim), 64 | ) 65 | else: 66 | self.out_task = torch.nn.Sequential( 67 | # MLP 68 | torch.nn.Linear(task_in_dim, 32), 69 | torch.nn.ReLU(), 70 | torch.nn.Linear(32, task_out_dim) 71 | ) 72 | 73 | def forward(self, x, id): 74 | feature = self.feature_mapping(x) 75 | if id == "0": 76 | return feature 77 | elif id == "1": 78 | return self.out_sbj(feature) 79 | elif id == "0,1": 80 | return [feature, self.out_sbj(feature)] 81 | elif id == "2": 82 | if self.feature_idx is not None: 83 | return self.out_task(feature[:, self.feature_idx]) 84 | else: 85 | return self.out_task(feature) 86 | 87 | 88 | class MLP_whiting_confounders(torch.nn.Module): 89 | feature_idx = None 90 | def __init__(self, input_dim, confounders_dim, task_in_dim, task_out_dim, classifier = "MLP"): 91 | super(MLP_whiting_confounders, self).__init__() 92 | self.feature_mapping = torch.nn.Sequential( 93 | torch.nn.Linear(input_dim, 64), 94 | torch.nn.ReLU(), 95 | torch.nn.Linear(64, 64), 96 | torch.nn.ReLU(), 97 | torch.nn.Linear(64, input_dim), 98 | ) 99 | # self.out_sbj = torch.nn.Linear(input_dim, confounders_dim[0]) 100 | self.out_age = torch.nn.Linear(input_dim, confounders_dim[0]) 101 | self.out_gender = torch.nn.Linear(input_dim, confounders_dim[1]) 102 | self.out_educ = torch.nn.Linear(input_dim, confounders_dim[2]) 103 | self.classifier = classifier 104 | if self.classifier == "LR": 105 | self.out_task = torch.nn.Sequential( 106 | # LR 107 | torch.nn.Linear(task_in_dim, task_out_dim), 108 | ) 109 | else: 110 | self.out_task = torch.nn.Sequential( 111 | # MLP 112 | torch.nn.Linear(task_in_dim, 32), 113 | torch.nn.ReLU(), 114 | torch.nn.Linear(32, task_out_dim) 115 | ) 116 | 117 | def forward(self, x, id): 118 | feature = self.feature_mapping(x) 119 | if id == "0": 120 | return feature 121 | elif id == "1": 122 | return self.out_sbj(feature) 123 | elif id == "0,1": 124 | return [feature, self.out_age(feature), self.out_gender(feature), self.out_educ(feature)] 125 | elif id == "2": 126 | if self.feature_idx is not None: 127 | return self.out_task(feature[:, self.feature_idx]) 128 | else: 129 | return self.out_task(feature) 130 | 131 | class CustomDataset(Dataset): 132 | def __init__(self, X, Y, G): 133 | self.X = X 134 | self.Y = np.array(Y) 135 | self.G = np.zeros(len(G), dtype=np.int64) 136 | self.subject_id = {} 137 | g_unique = list(sorted(set(G))) 138 | for i, g in enumerate(g_unique): 139 | index = [i for i in range(len(G)) if G[i] == g] 140 | self.G[index] = i 141 | self.subject_id[i] = g 142 | 143 | def __len__(self): 144 | return len(self.X) 145 | 146 | def __getitem__(self, idx): 147 | return self.X[idx], self.Y[idx], self.G[idx], idx 148 | 149 | def kept(self, idx_kept): 150 | self.X = self.X[idx_kept] 151 | self.Y = self.Y[idx_kept] 152 | self.G = self.G[idx_kept] 153 | return self 154 | 155 | class CustomDataset_coufounder(Dataset): 156 | def __init__(self, X, Y, G): 157 | self.X = X 158 | self.Y = np.array(Y) 159 | self.G = np.zeros(len(G[0]), dtype=np.int64) 160 | self.G_age = np.zeros(len(G[1]), dtype=np.int64) 161 | self.G_gender = np.zeros(len(G[2]), dtype=np.int64) 162 | self.G_educ = np.zeros(len(G[3]), dtype=np.int64) 163 | self.subject_id = {} 164 | g_unique = list(sorted(set(G[0]))) 165 | for i, g in enumerate(g_unique): 166 | index = [i for i in range(len(G[0])) if G[0][i] == g] 167 | self.G[index] = i 168 | self.subject_id[i] = g 169 | g_unique = list(sorted(set(G[1]))) 170 | for i, g in enumerate(g_unique): 171 | index = [i for i in range(len(G[1])) if G[1][i] == g] 172 | self.G_age[index] = i 173 | g_unique = list(sorted(set(G[2]))) 174 | for i, g in enumerate(g_unique): 175 | index = [i for i in range(len(G[2])) if G[2][i] == g] 176 | self.G_gender[index] = i 177 | g_unique = list(sorted(set(G[3]))) 178 | for i, g in enumerate(g_unique): 179 | index = [i for i in range(len(G[3])) if G[3][i] == g] 180 | self.G_educ[index] = i 181 | 182 | def __len__(self): 183 | return len(self.X) 184 | 185 | def __getitem__(self, idx): 186 | return self.X[idx], self.Y[idx], self.G[idx], self.G_age[idx], self.G_gender[idx], self.G_educ[idx], idx 187 | 188 | def kept(self, idx_kept): 189 | self.X = self.X[idx_kept] 190 | self.Y = self.Y[idx_kept] 191 | self.G = self.G[idx_kept] 192 | self.G_age = self.G_age[idx_kept] 193 | self.G_gender = self.G_gender[idx_kept] 194 | self.G_educ = self.G_educ[idx_kept] 195 | return self 196 | 197 | class CustomDatasetGroup(Dataset): 198 | def __init__(self, X, Y, G): 199 | self.X = X 200 | self.Y = np.array(Y) 201 | self.G = np.array(G) 202 | 203 | def __len__(self): 204 | return len(self.X) 205 | 206 | def __getitem__(self, idx): 207 | return self.X[idx], self.Y[idx], self.G[idx], idx 208 | 209 | def kept(self, idx_kept): 210 | self.X = self.X[idx_kept] 211 | self.Y = self.Y[idx_kept] 212 | return self -------------------------------------------------------------------------------- /Solvers/subject_harmonization_solver.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from Solvers.Solver_Base import Solver_Base 3 | import torch 4 | from torch.utils.data import DataLoader 5 | from Models.model import MLP_whiting, CustomDataset 6 | import torch.nn.functional as F 7 | import pickle 8 | 9 | class subject_harmonization_solver(Solver_Base): 10 | 11 | def __init__(self, cfg_proj, cfg_m, name = "white"): 12 | Solver_Base.__init__(self, cfg_proj, cfg_m, name) 13 | 14 | def run(self, x_train, y_train, g_train, x_test, y_test, g_test, seed): 15 | dataloader_train = DataLoader(CustomDataset(x_train, y_train, g_train), 16 | batch_size = self.cfg_m.training.batch_size, drop_last=True, shuffle = True, pin_memory=True, worker_init_fn = np.random.seed(seed)) 17 | 18 | model = MLP_whiting(input_dim = len(x_train[0]), sbj_dim = len(list(set(g_train))), task_in_dim = len(x_train[0]), task_out_dim = self.cfg_m.data.dim_out, classifier = self.cfg_proj.classifier) 19 | model = model.to(self.device) 20 | 21 | #whiting features 22 | self.freeze_grad(model, except_str = ["feature_mapping", "out_sbj"]) 23 | optimizer_sbj = torch.optim.AdamW([p for p in model.parameters() if p.requires_grad == True], lr = self.cfg_m.training.lr_init) 24 | lr_scheduler_sbj = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer_sbj, int(self.cfg_m.training.epochs_whiting*len(dataloader_train))) #very useful 25 | model, loss_train_trace = self.sbj_train(model, dataloader_train, None, optimizer_sbj, lr_scheduler_sbj) 26 | 27 | # save harmonized feature 28 | if self.cfg_proj.save_whitening: 29 | dataloader_train_ = DataLoader(CustomDataset(x_train, y_train, g_train), 30 | batch_size = self.cfg_m.training.batch_size, drop_last=False, shuffle = False, pin_memory=True, worker_init_fn = np.random.seed(seed)) 31 | dataloader_test_ = DataLoader(CustomDataset(x_test, y_test, g_test), 32 | batch_size = self.cfg_m.training.batch_size, drop_last=False, shuffle = False, pin_memory=True, worker_init_fn = np.random.seed(seed)) 33 | self.feature_preprocessing(model, dataloader_train_, dataloader_test_) 34 | 35 | #Task training 36 | self.freeze_grad(model, except_str = ["out_task"]) 37 | criterion = self.cross_entropy_regs 38 | optimizer_task = torch.optim.AdamW([p for p in model.parameters() if p.requires_grad == True], lr = self.cfg_m.training.lr_init) 39 | lr_scheduler_task = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer_task, int(self.cfg_m.training.epochs*len(dataloader_train))) #very useful 40 | 41 | model, loss_train_trace = self.basic_train(model, dataloader_train, criterion, optimizer_task, lr_scheduler_task) 42 | 43 | # Evaluation 44 | auc, f1, sens, spec, auc_sbj, f1_sbj, sens_sbj, spec_sbj = self.eval_func(model, x_test, y_test, g_test) 45 | 46 | return auc, f1, sens, spec, auc_sbj, f1_sbj, sens_sbj, spec_sbj 47 | 48 | 49 | def sbj_train(self, model, dataloader_train, criterion, optimizer, lr_scheduler): 50 | loss_train_trace = [] 51 | loss_mse = torch.nn.MSELoss() 52 | loss_cross_ent= torch.nn.CrossEntropyLoss() 53 | for epoch in range(self.cfg_m.training.epochs_whiting): 54 | model.train() 55 | loss_epoch = [] 56 | for train_X, train_Y, train_G, idx in dataloader_train: 57 | train_X, train_Y, train_G = train_X.float().to(self.device), train_Y.to(self.device), train_G.to(self.device) 58 | [features, logits_sbj] = model(train_X, id = "0,1") # 59 | loss_sbj_mse = loss_mse(train_X, features) 60 | loss_sbj = 5 - loss_cross_ent(logits_sbj, train_G) 61 | loss = loss_sbj*0.5 + loss_sbj_mse*0.5 62 | loss_epoch.append(loss.item()) 63 | optimizer.zero_grad() 64 | loss.backward() 65 | optimizer.step() 66 | lr_scheduler.step() 67 | loss_train_trace.append(np.mean(loss_epoch)) 68 | return model, loss_train_trace 69 | 70 | 71 | def cross_entropy_regs(self, model, Yhat, Y, l2_lambda, l1_lambda): #pred, train_Y 72 | Y_t = torch.zeros((Y.shape[0], 2)).to(Y) 73 | Y_t[:, 1] = Y.data 74 | Y_t[:, 0] = 1 - Y.data 75 | loss_mean = F.cross_entropy(Yhat, Y_t) 76 | 77 | l2_lambda = 0.5 78 | if l2_lambda is not None: 79 | l2_reg = torch.tensor(0., requires_grad=True) 80 | for name, param in model.named_parameters(): 81 | if 'weight' in name and "out_task" in name: 82 | l2_reg = l2_reg + torch.norm(param, 2) 83 | l2_reg = l2_lambda * l2_reg 84 | loss_mean += l2_reg 85 | l1_lambda = 0.0005 86 | if l1_lambda is not None: 87 | l1_reg = torch.tensor(0., requires_grad=True) 88 | for name, param in model.named_parameters(): 89 | if 'weight' in name and "out_task" in name: 90 | l1_reg = l1_reg + torch.norm(param, 1) 91 | l1_reg = l1_lambda * l1_reg 92 | loss_mean += l1_reg 93 | return loss_mean 94 | 95 | 96 | def feature_preprocessing(self, model, dataloader_train, dataloader_test): 97 | model.eval() 98 | id2feature = {} 99 | 100 | with torch.no_grad(): 101 | for train_X, train_Y, train_G, idx in dataloader_train: 102 | train_X, train_Y, train_G = train_X.float().to(self.device), train_Y.to(self.device), train_G.to(self.device) 103 | features = model(train_X, id = "0") # 104 | features = features.data.detach().cpu().numpy().tolist() 105 | train_G = train_G.data.detach().cpu().numpy().tolist() 106 | for i in range(len(features)): 107 | if dataloader_train.dataset.subject_id[train_G[i]] not in id2feature: 108 | id2feature[dataloader_train.dataset.subject_id[train_G[i]]] = [] 109 | id2feature[dataloader_train.dataset.subject_id[train_G[i]]].append(features[i]) 110 | 111 | for test_X, test_Y, test_G, idx in dataloader_test: 112 | test_X, test_Y, test_G = test_X.float().to(self.device), test_Y.to(self.device), test_G.to(self.device) 113 | features = model(test_X, id = "0") # 114 | features = features.data.detach().cpu().tolist() 115 | test_G = test_G.data.detach().cpu().tolist() 116 | for i in range(len(features)): 117 | if dataloader_test.dataset.subject_id[test_G[i]] not in id2feature: 118 | id2feature[dataloader_test.dataset.subject_id[test_G[i]]] = [] 119 | id2feature[dataloader_test.dataset.subject_id[test_G[i]]].append(features[i]) 120 | 121 | with open("rawdata/id2feature_whitening.p", "wb") as output_file: 122 | pickle.dump(id2feature, output_file) 123 | 124 | 125 | #for the id 126 | def basic_train(self, model, dataloader_train, criterion, optimizer, lr_scheduler): 127 | loss_train_trace = [] 128 | for epoch in range(self.cfg_m.training.epochs): 129 | model.train() 130 | loss_epoch = [] 131 | for train_X, train_Y, _ , idx in dataloader_train: 132 | 133 | train_X, train_Y = train_X.float().to(self.device), train_Y.to(self.device) 134 | Y_hat = model(train_X, id = "2") 135 | Y_hat = Y_hat if torch.is_tensor(Y_hat) else Y_hat[1] 136 | loss = criterion(model, Y_hat, train_Y, l2_lambda = self.cfg_m.l2_lambda, l1_lambda = self.cfg_m.l1_lambda) 137 | loss_epoch.append(loss.item()) 138 | 139 | optimizer.zero_grad() 140 | loss.backward() 141 | optimizer.step() 142 | lr_scheduler.step() 143 | loss_train_trace.append(np.mean(loss_epoch)) 144 | return model, loss_train_trace 145 | 146 | 147 | def predict(self, model, X, flag_prob = False): 148 | X = torch.from_numpy(X) 149 | 150 | model.eval() 151 | with torch.no_grad(): 152 | X = X.float().to(self.device) 153 | pred = model(X, id = "2") 154 | pred = pred if torch.is_tensor(pred) else pred[1] 155 | 156 | pred = torch.argmax(pred, 1) 157 | return pred.detach().cpu().numpy() 158 | 159 | def predict_proba(self, model, X, flag_prob = True): 160 | X = torch.from_numpy(X) 161 | 162 | model.eval() 163 | with torch.no_grad(): 164 | X = X.float().to(self.device) 165 | pred = model(X, id = "2") 166 | pred = pred if torch.is_tensor(pred) else pred[1] 167 | pred = torch.nn.functional.softmax(pred, dim = 1) 168 | 169 | return pred.detach().cpu().numpy() -------------------------------------------------------------------------------- /Solvers/confounder_harmonization_solver.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from Solvers.Solver_Base import Solver_Base 3 | import torch 4 | from torch.utils.data import DataLoader 5 | from Models.model import MLP_whiting, CustomDataset 6 | import torch.nn.functional as F 7 | import pickle 8 | from sklearn import metrics 9 | 10 | def generalization(values, values_f, num_class): 11 | v_mean = np.mean(values) 12 | v_std = np.std(values) 13 | values_norm = [v for v in values if v >= v_mean-3*v_std and v <= v_mean+3*v_std] 14 | v_min = min(values_norm) 15 | v_max = max(values_norm) 16 | class_interval = (v_max-v_min)/num_class 17 | class_abnorm = int(num_class/2) #-1, int(num_class/2), num_class 18 | values_g = [max(min(int((v-v_min)/class_interval), num_class-1), 0) for v in values] 19 | values_abnorm_id = [i for i, v in enumerate(values) if v < v_mean-3*v_std or v > v_mean+3*v_std] 20 | values_g = [v if i not in values_abnorm_id else class_abnorm for i,v in enumerate(values_g)] 21 | 22 | values_f_g = [max(min(int((v-v_min)/class_interval), num_class-1), 0) for v in values_f] 23 | values_f_abnorm_id = [i for i, v in enumerate(values_f) if v < v_mean-3*v_std or v > v_mean+3*v_std] 24 | values_f_g = [v if i not in values_f_abnorm_id else class_abnorm for i,v in enumerate(values_f_g)] 25 | 26 | return values_g, values_f_g 27 | 28 | 29 | class confounder_harmonization_solver(Solver_Base): 30 | 31 | def __init__(self, cfg_proj, cfg_m, name = "white_c"): 32 | Solver_Base.__init__(self, cfg_proj, cfg_m, name) 33 | 34 | def run(self, x_train, y_train, g_train, x_test, y_test, g_test, seed): 35 | 36 | confounder_var = self.cfg_m.training.confounder_var 37 | assert confounder_var in ["sbj", "age", "gender", "educ"] 38 | g_train_sbj, g_test_sbj = [g[0] for g in g_train], [g[0] for g in g_test] 39 | if confounder_var == "sbj": 40 | g_train, g_test = [g[0] for g in g_train], [g[0] for g in g_test] 41 | elif confounder_var == "age": 42 | g_train, g_test = [g[1] for g in g_train], [g[1] for g in g_test] 43 | g_train = np.array(g_train) 44 | g_train, g_test = generalization(g_train, g_test, 5) 45 | elif confounder_var == "gender": 46 | g_train, g_test = [g[2] for g in g_train], [g[2] for g in g_test] 47 | elif confounder_var == "educ": 48 | g_train, g_test = [g[3] for g in g_train], [g[3] for g in g_test] 49 | g_train = np.array(g_train) 50 | g_train, g_test = generalization(g_train, g_test, 3) 51 | 52 | 53 | dataloader_train = DataLoader(CustomDataset(x_train, y_train, g_train), 54 | batch_size = self.cfg_m.training.batch_size, drop_last=True, shuffle = True, pin_memory=True, worker_init_fn = np.random.seed(seed)) 55 | 56 | model = MLP_whiting(input_dim = len(x_train[0]), sbj_dim = len(list(set(g_train))), task_in_dim = len(x_train[0]), task_out_dim = self.cfg_m.data.dim_out, classifier = self.cfg_proj.classifier) 57 | model = model.to(self.device) 58 | 59 | #whiting features 60 | self.freeze_grad(model, except_str = ["feature_mapping", "out_sbj"]) 61 | optimizer_sbj = torch.optim.AdamW([p for p in model.parameters() if p.requires_grad == True], lr = self.cfg_m.training.lr_init) 62 | lr_scheduler_sbj = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer_sbj, int(self.cfg_m.training.epochs_whiting*len(dataloader_train))) #very useful 63 | model, loss_train_trace = self.sbj_train(model, dataloader_train, None, optimizer_sbj, lr_scheduler_sbj) 64 | 65 | #feature_preprocessing - outlier detection and feature selection 66 | dataloader_train_ = DataLoader(CustomDataset(x_train, y_train, g_train), 67 | batch_size = self.cfg_m.training.batch_size, drop_last=False, shuffle = False, pin_memory=True, worker_init_fn = np.random.seed(seed)) 68 | 69 | #feature_preprocessing - outlier detection and feature selection 70 | dataloader_test_ = DataLoader(CustomDataset(x_test, y_test, g_test), 71 | batch_size = self.cfg_m.training.batch_size, drop_last=False, shuffle = False, pin_memory=True, worker_init_fn = np.random.seed(seed)) 72 | 73 | self.feature_preprocessing(model, dataloader_train_, dataloader_test_) 74 | 75 | #Task training 76 | self.freeze_grad(model, except_str = ["out_task"]) 77 | criterion = self.cross_entropy_regs 78 | optimizer_task = torch.optim.AdamW([p for p in model.parameters() if p.requires_grad == True], lr = self.cfg_m.training.lr_init) 79 | lr_scheduler_task = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer_task, int(self.cfg_m.training.epochs*len(dataloader_train))) #very useful 80 | 81 | model, loss_train_trace = self.basic_train(model, dataloader_train, criterion, optimizer_task, lr_scheduler_task) 82 | 83 | # Evaluation 84 | auc, f1, sens, spec, auc_sbj, f1_sbj, sens_sbj, spec_sbj = self.eval_func(model, x_test, y_test, g_test, g_test_sbj) 85 | 86 | return auc, f1, sens, spec, auc_sbj, f1_sbj, sens_sbj, spec_sbj 87 | 88 | 89 | def sbj_train(self, model, dataloader_train, criterion, optimizer, lr_scheduler): 90 | loss_train_trace = [] 91 | loss_mse = torch.nn.MSELoss() 92 | loss_cross_ent= torch.nn.CrossEntropyLoss() 93 | for epoch in range(self.cfg_m.training.epochs_whiting): 94 | model.train() 95 | loss_epoch = [] 96 | for train_X, train_Y, train_G, idx in dataloader_train: 97 | train_X, train_Y, train_G = train_X.float().to(self.device), train_Y.to(self.device), train_G.to(self.device) 98 | [features, logits_sbj] = model(train_X, id = "0,1") # 99 | loss_sbj_mse = loss_mse(train_X, features) 100 | loss_sbj = 5 - loss_cross_ent(logits_sbj, train_G) 101 | loss = loss_sbj*0.5 + loss_sbj_mse*0.5 102 | loss_epoch.append(loss.item()) 103 | optimizer.zero_grad() 104 | loss.backward() 105 | optimizer.step() 106 | lr_scheduler.step() 107 | loss_train_trace.append(np.mean(loss_epoch)) 108 | return model, loss_train_trace 109 | 110 | 111 | def cross_entropy_regs(self, model, Yhat, Y, l2_lambda, l1_lambda): #pred, train_Y 112 | Y_t = torch.zeros((Y.shape[0], 2)).to(Y) 113 | Y_t[:, 1] = Y.data 114 | Y_t[:, 0] = 1 - Y.data 115 | loss_mean = F.cross_entropy(Yhat, Y_t) 116 | l2_lambda = 0.5 117 | if l2_lambda is not None: 118 | l2_reg = torch.tensor(0., requires_grad=True) 119 | for name, param in model.named_parameters(): 120 | if 'weight' in name and "out_task" in name: 121 | l2_reg = l2_reg + torch.norm(param, 2) 122 | l2_reg = l2_lambda * l2_reg 123 | loss_mean += l2_reg 124 | l1_lambda = 0.0005 125 | if l1_lambda is not None: 126 | l1_reg = torch.tensor(0., requires_grad=True) 127 | for name, param in model.named_parameters(): 128 | if 'weight' in name and "out_task" in name: 129 | l1_reg = l1_reg + torch.norm(param, 1) 130 | l1_reg = l1_lambda * l1_reg 131 | loss_mean += l1_reg 132 | return loss_mean 133 | 134 | def feature_preprocessing(self, model, dataloader_train, dataloader_test): 135 | model.eval() 136 | features_np = np.zeros(dataloader_train.dataset.X.shape) 137 | id2feature = {} 138 | 139 | with torch.no_grad(): 140 | for train_X, train_Y, train_G, idx in dataloader_train: 141 | train_X, train_Y, train_G = train_X.float().to(self.device), train_Y.to(self.device), train_G.to(self.device) 142 | features = model(train_X, id = "0") # 143 | features_np[idx] = features.data.detach().cpu().numpy() 144 | features = features.data.detach().cpu().numpy().tolist() 145 | train_G = train_G.data.detach().cpu().numpy().tolist() 146 | for i in range(len(features)): 147 | if dataloader_train.dataset.subject_id[train_G[i]] not in id2feature: 148 | id2feature[dataloader_train.dataset.subject_id[train_G[i]]] = [] 149 | id2feature[dataloader_train.dataset.subject_id[train_G[i]]].append(features[i]) 150 | 151 | if self.cfg_proj.save_whitening: 152 | with torch.no_grad(): 153 | for test_X, test_Y, test_G, idx in dataloader_test: 154 | test_X, test_Y, test_G = test_X.float().to(self.device), test_Y.to(self.device), test_G.to(self.device) 155 | features = model(test_X, id = "0") # 156 | features = features.data.detach().cpu().tolist() 157 | test_G = test_G.data.detach().cpu().tolist() 158 | for i in range(len(features)): 159 | if dataloader_test.dataset.subject_id[test_G[i]] not in id2feature: 160 | id2feature[dataloader_test.dataset.subject_id[test_G[i]]] = [] 161 | id2feature[dataloader_test.dataset.subject_id[test_G[i]]].append(features[i]) 162 | 163 | with open("rawdata/id2feature_whitening.p", "wb") as output_file: 164 | pickle.dump(id2feature, output_file) 165 | 166 | dataset_train_updated = dataloader_train.dataset 167 | 168 | #for the id 169 | def basic_train(self, model, dataloader_train, criterion, optimizer, lr_scheduler): 170 | loss_train_trace = [] 171 | for epoch in range(self.cfg_m.training.epochs): 172 | model.train() 173 | loss_epoch = [] 174 | for train_X, train_Y, _ , idx in dataloader_train: 175 | 176 | train_X, train_Y = train_X.float().to(self.device), train_Y.to(self.device) 177 | Y_hat = model(train_X, id = "2") 178 | Y_hat = Y_hat if torch.is_tensor(Y_hat) else Y_hat[1] 179 | loss = criterion(model, Y_hat, train_Y, l2_lambda = self.cfg_m.l2_lambda, l1_lambda = self.cfg_m.l1_lambda) 180 | loss_epoch.append(loss.item()) 181 | 182 | optimizer.zero_grad() 183 | loss.backward() 184 | optimizer.step() 185 | lr_scheduler.step() 186 | loss_train_trace.append(np.mean(loss_epoch)) 187 | return model, loss_train_trace 188 | 189 | 190 | def predict(self, model, X, flag_prob = False): 191 | X = torch.from_numpy(X) 192 | 193 | model.eval() 194 | with torch.no_grad(): 195 | X = X.float().to(self.device) 196 | pred = model(X, id = "2") 197 | pred = pred if torch.is_tensor(pred) else pred[1] 198 | 199 | pred = torch.argmax(pred, 1) 200 | return pred.detach().cpu().numpy() 201 | 202 | def predict_proba(self, model, X, flag_prob = True): 203 | X = torch.from_numpy(X) 204 | 205 | model.eval() 206 | with torch.no_grad(): 207 | X = X.float().to(self.device) 208 | pred = model(X, id = "2") 209 | pred = pred if torch.is_tensor(pred) else pred[1] 210 | pred = torch.nn.functional.softmax(pred, dim = 1) 211 | 212 | return pred.detach().cpu().numpy() 213 | 214 | def eval_func(self, model, x_test, y_test, g_test, g_test_sbj): 215 | model.eval() 216 | 217 | #conversation-wise 218 | pred = self.predict(model, x_test) 219 | pred_proba = self.predict_proba(model, x_test)[:, 1] 220 | 221 | # Evaluation metric 222 | tn, fp, fn, tp = metrics.confusion_matrix(y_test, pred).ravel() 223 | auc = metrics.roc_auc_score(y_test, pred_proba) 224 | f1 = metrics.f1_score(y_test, pred) 225 | sens = tp/(tp+fn) 226 | spec = tn/(fp+tn) 227 | 228 | #subject-wise 229 | pred_sbj = [] 230 | pred_proba_sbj = [] 231 | y_test_subject = [] 232 | id2pred = {} 233 | id2pred_proba = {} 234 | 235 | g_test_sbj_unique = list(set(g_test_sbj)) 236 | for g in g_test_sbj_unique: 237 | index = [i for i in range(len(g_test_sbj)) if g_test_sbj[i] == g] 238 | pred_single_sbj = pred_proba[index] 239 | 240 | # get most confidence conversations 241 | pred_single_sbj = list(pred_single_sbj) 242 | pred_single_sbj.sort(key = lambda x: -abs(x-0.5)) 243 | 244 | id2pred[g] = pred[index] 245 | id2pred_proba[g] = pred_proba[index] 246 | pred_proba_single_sbj =np.sum(pred_single_sbj)/len(pred_single_sbj) 247 | pred_single_sbj = 1 if pred_proba_single_sbj >= self.cfg_proj.vote_threshold else 0 248 | pred_sbj.append(pred_single_sbj) 249 | pred_proba_sbj.append(pred_proba_single_sbj) 250 | assert np.all(y_test[index] == y_test[index[0]]) 251 | y_test_subject.append(y_test[index[0]]) 252 | 253 | self.generatePredictions(id2pred, id2pred_proba) 254 | 255 | # Evaluation metric 256 | tn_sbj, fp_sbj, fn_sbj, tp_sbj = metrics.confusion_matrix(y_test_subject, pred_sbj).ravel() 257 | auc_sbj = metrics.roc_auc_score(y_test_subject, pred_proba_sbj) 258 | f1_sbj = metrics.f1_score(y_test_subject, pred_sbj) 259 | sens_sbj = tp_sbj/(tp_sbj+fn_sbj) 260 | spec_sbj = tn_sbj/(fp_sbj+tn_sbj) 261 | 262 | self.save_results_each_run(auc, f1, sens, spec, auc_sbj, f1_sbj, sens_sbj, spec_sbj) 263 | return auc, f1, sens, spec, auc_sbj, f1_sbj, sens_sbj, spec_sbj -------------------------------------------------------------------------------- /Solvers/Solver_Base.py: -------------------------------------------------------------------------------- 1 | import random 2 | import torch 3 | import numpy as np 4 | import logging 5 | import torch.nn.functional as F 6 | import os 7 | import torch.nn as nn 8 | import time 9 | import ml_collections 10 | from sklearn import metrics 11 | 12 | class Solver_Base: 13 | 14 | def __init__(self, cfg_proj, cfg_m, name): 15 | self.name = name 16 | self.cfg_proj = cfg_proj 17 | self.cfg_m = cfg_m 18 | self.loss_func = torch.nn.functional.cross_entropy 19 | self.init_env(name) 20 | self.performance_main = {"AUC":[], "F1":[], "Sens":[], "Spec":[]} 21 | self.performance = {} 22 | 23 | def setLabels(self, df_labels): 24 | self.df_labels = df_labels 25 | 26 | def init_env(self, name, log_folder = "checkpoints"): 27 | #init device 28 | use_cuda = torch.cuda.is_available() 29 | self.device = torch.device("cuda" if use_cuda else "cpu") 30 | self.cfg_m.device = self.device 31 | 32 | #init log sys 33 | self.log_sub_folder = "%s/log_%s_%s"%(log_folder, name, self.cfg_proj.flag_time) 34 | self.log_id = "log_%s_%s"%(name, self.cfg_proj.flag_time) 35 | self.logger = logging.getLogger(name) 36 | if self.cfg_proj.flag_log: 37 | os.makedirs(self.log_sub_folder, exist_ok=True) 38 | logging.basicConfig( 39 | format =' [%(asctime)s] - %(message)s', 40 | datefmt = '%Y/%m/%d %H:%M:%S', 41 | level = logging.INFO, 42 | filename = '%s/%s.log'%(self.log_sub_folder, self.log_id)) 43 | setting_log = "----Setting----" 44 | for n in self.cfg_m: 45 | if isinstance(self.cfg_m[n], ml_collections.ConfigDict): 46 | for n_sub in self.cfg_m[n]: 47 | setting_log = setting_log + "\n" + "%s - %s - %s"%(n, n_sub, self.cfg_m[n][n_sub]) 48 | else: 49 | setting_log = setting_log + "\n" + "%s - %s"%(n, self.cfg_m[n]) 50 | for p in vars(self.cfg_proj): 51 | setting_log = setting_log + "\n" + "%s - %s"%(p, getattr(self.cfg_proj, p)) 52 | setting_log = setting_log + "\n" + "----log----" 53 | self.logger.info(setting_log) 54 | 55 | def eval_func(self, model, x_test, y_test, g_test): 56 | model.eval() 57 | 58 | #conversation-wise 59 | pred = self.predict(model, x_test) 60 | pred_proba = self.predict_proba(model, x_test)[:, 1] 61 | 62 | # Evaluation metric 63 | tn, fp, fn, tp = metrics.confusion_matrix(y_test, pred).ravel() 64 | auc = metrics.roc_auc_score(y_test, pred_proba) 65 | f1 = metrics.f1_score(y_test, pred) 66 | sens = tp/(tp+fn) 67 | spec = tn/(fp+tn) 68 | 69 | #subject-wise 70 | pred_sbj = [] 71 | pred_proba_sbj = [] 72 | y_test_subject = [] 73 | id2pred = {} 74 | id2pred_proba = {} 75 | 76 | g_test_unique = list(set(g_test)) 77 | for g in g_test_unique: 78 | index = [i for i in range(len(g_test)) if g_test[i] == g] 79 | pred_single_sbj = pred_proba[index] 80 | 81 | # get most confidence conversations 82 | pred_single_sbj = list(pred_single_sbj) 83 | pred_single_sbj.sort(key = lambda x: -abs(x-0.5)) 84 | 85 | id2pred[g] = pred[index] 86 | id2pred_proba[g] = pred_proba[index] 87 | pred_proba_single_sbj =np.sum(pred_single_sbj)/len(pred_single_sbj) 88 | pred_single_sbj = 1 if pred_proba_single_sbj >= self.cfg_proj.vote_threshold else 0 89 | pred_sbj.append(pred_single_sbj) 90 | pred_proba_sbj.append(pred_proba_single_sbj) 91 | assert np.all(y_test[index] == y_test[index[0]]) 92 | y_test_subject.append(y_test[index[0]]) 93 | 94 | self.generatePredictions(id2pred, id2pred_proba) 95 | 96 | # Evaluation metric 97 | tn_sbj, fp_sbj, fn_sbj, tp_sbj = metrics.confusion_matrix(y_test_subject, pred_sbj).ravel() 98 | auc_sbj = metrics.roc_auc_score(y_test_subject, pred_proba_sbj) 99 | f1_sbj = metrics.f1_score(y_test_subject, pred_sbj) 100 | sens_sbj = tp_sbj/(tp_sbj+fn_sbj) 101 | spec_sbj = tn_sbj/(fp_sbj+tn_sbj) 102 | 103 | self.save_results_each_run(auc, f1, sens, spec, auc_sbj, f1_sbj, sens_sbj, spec_sbj) 104 | return auc, f1, sens, spec, auc_sbj, f1_sbj, sens_sbj, spec_sbj 105 | 106 | def load_ckp(self, model, optimizer, lr_scheduler, seed, contain_t): 107 | 108 | def getFilesInPath(flag_load, folder, suffix, contain_t): 109 | name_list = [] 110 | f_list = sorted(os.listdir(folder)) 111 | try: 112 | folder_sub = [f_n for f_n in f_list if flag_load in f_n][0] #time stamp is unique, for sure 113 | except: 114 | return name_list 115 | folder_sub = os.path.join(folder, folder_sub) 116 | f_sub_list = sorted(os.listdir(folder_sub)) 117 | 118 | for f_n in f_sub_list: 119 | if contain_t is not None: 120 | if suffix in os.path.splitext(f_n)[1] and "seed_%d"%(seed) in os.path.splitext(f_n)[0] and contain_t in os.path.splitext(f_n)[0]: 121 | pathName = os.path.join(folder_sub, f_n) 122 | name_list.append(pathName) 123 | else: 124 | if suffix in os.path.splitext(f_n)[1]: 125 | pathName = os.path.join(folder_sub, f_n) 126 | name_list.append(pathName) 127 | 128 | return name_list 129 | flag_load = self.cfg_proj.flag_load if self.cfg_proj.flag_load is not None else self.cfg_proj.flag_time 130 | file_ns = getFilesInPath(flag_load, folder = "checkpoints", suffix = "pt", contain_t = contain_t) 131 | 132 | if len(file_ns) == 1: 133 | checkpoint = torch.load(file_ns[0]) 134 | try: 135 | model.load_state_dict(checkpoint['net']) #, strict=False) #ignore the unmatched key 136 | except RuntimeError: 137 | try: 138 | model.load_state_dict(checkpoint['net'], strict=False) #ignore the unmatched key 139 | print("unmatched keys in paras are loaded to model at stage: %s" % (contain_t)) 140 | except: 141 | model.load_state_dict({k.replace('module.',''):v for k,v in checkpoint['net'].items()}) 142 | if optimizer is not None: optimizer.load_state_dict(checkpoint['optimizer']) 143 | if lr_scheduler is not None: lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) 144 | epoch_start = checkpoint['epoch'] 145 | str_record = "load ckp - %s, epoch_start = %d at stage: %s" % (file_ns[-1], epoch_start, contain_t) 146 | print(str_record) 147 | self.logger.info(str_record) 148 | else: 149 | epoch_start = 0 150 | print("Warning - no paras are loaded to model at stage: %s" % (contain_t)) 151 | 152 | return model, optimizer, lr_scheduler, epoch_start 153 | 154 | def save_ckp(self, model, optimizer, lr_scheduler, seed, epoch, stage): 155 | if os.path.exists(self.log_sub_folder): 156 | state_ckp = {'net':model.module.state_dict() if isinstance(model, nn.DataParallel) else model.state_dict(), 'optimizer':optimizer.state_dict() if optimizer is not None else None, \ 157 | 'lr_scheduler':lr_scheduler.state_dict() if lr_scheduler is not None else None, 'epoch':epoch} 158 | torch.save(state_ckp, '%s/%s_%s_%s_seed_%d_epoch_%04d.pt'%(self.log_sub_folder, self.cfg_proj.backbone, stage, self.cfg_proj.flag_time, seed, epoch)) 159 | time.sleep(1) 160 | else: 161 | print("no-saving-ckp, %s doesn't exist!" % (self.log_sub_folder)) 162 | 163 | def to_parallel_model(self, model): 164 | if torch.cuda.device_count() > 1: 165 | # print("Let's use", torch.cuda.device_count(), "GPUs!") 166 | model = nn.DataParallel(model) 167 | model = model.to(self.device) 168 | return model, model.module 169 | else: 170 | model = model.to(self.device) 171 | return model, model 172 | 173 | def set_random_seed(self, seed): 174 | torch.manual_seed(seed) 175 | if torch.cuda.is_available(): 176 | torch.cuda.manual_seed(seed) 177 | if torch.cuda.device_count() > 1: torch.cuda.manual_seed_all(seed) 178 | np.random.seed(seed) 179 | random.seed(seed) 180 | 181 | self.seed_current = seed 182 | 183 | 184 | def predict(self, model, X, flag_prob = False): 185 | X = torch.from_numpy(X) 186 | 187 | model.eval() 188 | with torch.no_grad(): 189 | X = X.float().to(self.device) 190 | pred = model(X) 191 | pred = pred if torch.is_tensor(pred) else pred[1] 192 | 193 | pred = torch.argmax(pred, 1) 194 | return pred.detach().cpu().numpy() 195 | 196 | def predict_proba(self, model, X, flag_prob = True): 197 | X = torch.from_numpy(X) 198 | 199 | model.eval() 200 | with torch.no_grad(): 201 | X = X.float().to(self.device) 202 | pred = model(X) 203 | pred = pred if torch.is_tensor(pred) else pred[1] 204 | pred = torch.nn.functional.softmax(pred, dim = 1) 205 | 206 | return pred.detach().cpu().numpy() 207 | 208 | def cross_entropy_regs(self, model, Yhat, Y, l2_lambda, l1_lambda): #pred, train_Y 209 | Y_t = torch.zeros((Y.shape[0], 2)).to(Y) 210 | Y_t[:, 1] = Y.data 211 | Y_t[:, 0] = 1 - Y.data 212 | loss_mean = F.cross_entropy(Yhat, Y_t) 213 | 214 | if l2_lambda is not None: 215 | l2_reg = torch.tensor(0., requires_grad=True) 216 | for name, param in model.named_parameters(): 217 | if 'weight' in name: 218 | l2_reg = l2_reg + torch.norm(param, 2) 219 | l2_reg = l2_lambda * l2_reg 220 | loss_mean += l2_reg 221 | if l1_lambda is not None: 222 | l1_reg = torch.tensor(0., requires_grad=True) 223 | for name, param in model.named_parameters(): 224 | if 'weight' in name: 225 | l1_reg = l1_reg + torch.norm(param, 1) 226 | l1_reg = l1_lambda * l1_reg 227 | loss_mean += l1_reg 228 | return loss_mean 229 | 230 | def basic_train(self, model, dataloader_train, criterion, optimizer, lr_scheduler): 231 | loss_train_trace = [] 232 | for epoch in range(self.cfg_m.training.epochs): 233 | model.train() 234 | loss_epoch = [] 235 | for train_X, train_Y, _ , idx in dataloader_train: 236 | 237 | train_X, train_Y = train_X.float().to(self.device), train_Y.to(self.device) 238 | Y_hat = model(train_X) 239 | Y_hat = Y_hat if torch.is_tensor(Y_hat) else Y_hat[1] 240 | loss = criterion(model, Y_hat, train_Y, l2_lambda = self.cfg_m.l2_lambda, l1_lambda = self.cfg_m.l1_lambda) 241 | loss_epoch.append(loss.item()) 242 | 243 | optimizer.zero_grad() 244 | loss.backward() 245 | optimizer.step() 246 | lr_scheduler.step() 247 | loss_train_trace.append(np.mean(loss_epoch)) 248 | return model, loss_train_trace 249 | 250 | def freeze_grad(self, model, except_full_names = [None], except_str = [None]): 251 | for n, para in model.named_parameters(): 252 | para.requires_grad = False 253 | for n, para in model.named_parameters(): 254 | for f_n in except_full_names: 255 | if f_n == n: para.requires_grad = True 256 | for s in except_str: 257 | if s is not None: 258 | if s in n: para.requires_grad = True 259 | return model 260 | 261 | def save_results_each_run(self, auc, f1, sens, spec, auc_sbj, f1_sbj, sens_sbj, spec_sbj): 262 | self.performance_main["AUC"].append([auc, auc_sbj]) 263 | self.performance_main["F1"].append([f1, f1_sbj]) 264 | self.performance_main["Sens"].append([sens, sens_sbj]) 265 | self.performance_main["Spec"].append([spec, spec_sbj]) 266 | 267 | def save_results(self): 268 | AUC, F1, Sens, Spec = np.array(self.performance_main["AUC"]), np.array(self.performance_main["F1"]), np.array(self.performance_main["Sens"]), np.array(self.performance_main["Spec"]) 269 | info_ = "step-%d, auc=%.3f\u00B1%.3f,f1=%.3f\u00B1%.3f,sens=%.3f\u00B1%.3f,spec=%.3f\u00B1%.3f, sbj:auc=%.3f\u00B1%.3f,f1=%.3f\u00B1%.3f,sens=%.3f\u00B1%.3f,spec=%.3f\u00B1%.3f"%(-1, 270 | np.mean(AUC[:, 0]), np.std(AUC[:, 0]), np.mean(F1[:, 0]), np.std(F1[:, 0]), np.mean(Sens[:, 0]), np.std(Sens[:, 0]), 271 | np.mean(Spec[:, 0]), np.std(Spec[:, 0]), np.mean(AUC[:, 1]), np.std(AUC[:, 1]), np.mean(F1[:, 1]), np.std(F1[:, 1]), 272 | np.mean(Sens[:, 1]), np.std(Sens[:, 1]), np.mean(Spec[:, 1]), np.std(Spec[:, 1])) 273 | print(info_) 274 | self.logger.info(info_) 275 | self.logger.info("AUC = %.3f\u00B1%.3f, AUC_sbj = %.3f\u00B1%.3f"%(np.mean(AUC[:, 0]), np.std(AUC[:, 0]), np.mean(AUC[:, 1]), np.std(AUC[:, 1]))) 276 | self.logger.info("F1 = %.3f\u00B1%.3f, f1_sbj = %.3f\u00B1%.3f"%(np.mean(F1[:, 0]), np.std(F1[:, 0]), np.mean(F1[:, 1]), np.std(F1[:, 1]))) 277 | self.logger.info("Sens = %.3f\u00B1%.3f, Sens_sbj = %.3f\u00B1%.3f"%(np.mean(Sens[:, 0]), np.std(Sens[:, 0]), np.mean(Sens[:, 1]), np.std(Sens[:, 1]))) 278 | self.logger.info("Spec = %.3f\u00B1%.3f, Spec_sbj = %.3f\u00B1%.3f"%(np.mean(Spec[:, 0]), np.std(Spec[:, 0]), np.mean(Spec[:, 1]), np.std(Spec[:, 1]))) 279 | 280 | if len(self.cfg_proj.flag_generatePredictions) != 0: 281 | for category in self.cfg_proj.flag_generatePredictions: 282 | for group in self.performance[category]: 283 | AUC, F1, Sens, Spec = np.array(self.performance[category][group]["AUC"]), np.array(self.performance[category][group]["F1"]), np.array(self.performance[category][group]["Sens"]), np.array(self.performance[category][group]["Spec"]) 284 | if len(AUC) == 0: 285 | AUC, F1, Sens, Spec = np.ones([1, 2])*-1, np.ones([1, 2])*-1, np.ones([1, 2])*-1, np.ones([1, 2])*-1 286 | self.logger.info(category + " " + str(group)) 287 | self.logger.info("AUC = %.3f\u00B1%.3f, AUC_sbj = %.3f\u00B1%.3f"%(np.mean(AUC[:, 0]), np.std(AUC[:, 0]), np.mean(AUC[:, 1]), np.std(AUC[:, 1]))) 288 | self.logger.info("F1 = %.3f\u00B1%.3f, f1_sbj = %.3f\u00B1%.3f"%(np.mean(F1[:, 0]), np.std(F1[:, 0]), np.mean(F1[:, 1]), np.std(F1[:, 1]))) 289 | self.logger.info("Sens = %.3f\u00B1%.3f, Sens_sbj = %.3f\u00B1%.3f"%(np.mean(Sens[:, 0]), np.std(Sens[:, 0]), np.mean(Sens[:, 1]), np.std(Sens[:, 1]))) 290 | self.logger.info("Spec = %.3f\u00B1%.3f, Spec_sbj = %.3f\u00B1%.3f"%(np.mean(Spec[:, 0]), np.std(Spec[:, 0]), np.mean(Spec[:, 1]), np.std(Spec[:, 1]))) 291 | 292 | self.logger.info("Conversation-wise " + str(np.mean(np.array(self.performance_main["AUC"])[:, 0]))) 293 | self.logger.info("Subject-wise " + str(np.mean(np.array(self.performance_main["AUC"])[:, 1]))) 294 | 295 | def generatePredictions(self, id2pred, id2pred_proba): 296 | dic_pd_index = {"Age":'nac_a1_age', "Edu":'nac_educ', "Sex":'nac_sex'} 297 | dic_attr_index = {"Age":["75-80", "81-87", "88-94"], "Edu":["12-15", "16-18", "19-21"], "Sex":[1, 2]} 298 | for cat_sub in self.cfg_proj.flag_generatePredictions: 299 | if cat_sub not in self.performance: 300 | self.performance[cat_sub] = {} 301 | for attr in dic_attr_index[cat_sub]: 302 | if attr not in self.performance[cat_sub]: 303 | self.performance[cat_sub][attr] = {"AUC":[], "F1":[], "Spec":[] , "Sens":[]} 304 | [lower, upper] = [int(attr[:2]), int(attr[3:])] if cat_sub != "Sex" else [attr, attr] 305 | auc, f1, sens, spec, auc_sbj, f1_sbj, sens_sbj, spec_sbj = self.getClinicalPerformance(id2pred, id2pred_proba, lower, upper, dic_pd_index[cat_sub]) 306 | if auc != -1: 307 | self.performance[cat_sub][attr]["AUC"].append([auc, auc_sbj]) 308 | self.performance[cat_sub][attr]["Sens"].append([sens, sens_sbj]) 309 | self.performance[cat_sub][attr]["F1"].append([f1, f1_sbj]) 310 | self.performance[cat_sub][attr]["Spec"].append([spec, spec_sbj]) 311 | 312 | def getClinicalPerformance(self, id2pred, id2pred_proba, lower, upper, clinical): 313 | pred_proba = [] 314 | pred = [] 315 | y_test = [] 316 | for id in id2pred: 317 | if int(self.df_labels[self.df_labels["ts_sub_id"] == id][clinical].values[0]) <= upper and int(self.df_labels[self.df_labels["ts_sub_id"] == id][clinical].values[0]) >= lower: 318 | pred_proba.extend(list(id2pred_proba[id])) 319 | pred.extend(list(id2pred[id])) 320 | y_test.extend([1-int(self.df_labels[self.df_labels["ts_sub_id"] == id]['nac_normcog'].values[0]) for i in range(len(id2pred_proba[id]))]) 321 | 322 | if len(set(y_test)) < 2: 323 | return -1, -1, -1, -1, -1, -1, -1, -1 324 | 325 | # Evaluation metric 326 | tn, fp, fn, tp = metrics.confusion_matrix(y_test, pred).ravel() 327 | auc = metrics.roc_auc_score(y_test, pred_proba) 328 | f1 = metrics.f1_score(y_test, pred) 329 | sens = tp/(tp+fn) 330 | spec = tn/(fp+tn) 331 | 332 | #subject-wise 333 | pred_sbj = [] 334 | pred_proba_sbj = [] 335 | y_test_subject = [] 336 | 337 | for id in id2pred_proba: 338 | if int(self.df_labels[self.df_labels["ts_sub_id"] == id][clinical].values[0]) <= upper and int(self.df_labels[self.df_labels["ts_sub_id"] == id][clinical].values[0]) >= lower: 339 | pred_single_sbj = id2pred_proba[id] 340 | 341 | # get most confidence conversations 342 | pred_single_sbj = list(pred_single_sbj) 343 | pred_single_sbj.sort(key = lambda x: -abs(x-0.5)) 344 | 345 | pred_proba_single_sbj = np.sum(pred_single_sbj)/len(pred_single_sbj) 346 | pred_single_sbj = 1 if pred_proba_single_sbj >= self.cfg_proj.vote_threshold else 0 347 | pred_sbj.append(pred_single_sbj) 348 | pred_proba_sbj.append(pred_proba_single_sbj) 349 | y_test_subject.append(1-int(self.df_labels[self.df_labels["ts_sub_id"] == id]['nac_normcog'].values[0])) 350 | 351 | # Evaluation metric 352 | tn_sbj, fp_sbj, fn_sbj, tp_sbj = metrics.confusion_matrix(y_test_subject, pred_sbj).ravel() 353 | auc_sbj = metrics.roc_auc_score(y_test_subject, pred_proba_sbj) 354 | f1_sbj = metrics.f1_score(y_test_subject, pred_sbj) 355 | sens_sbj = tp_sbj/(tp_sbj+fn_sbj) 356 | spec_sbj = tn_sbj/(fp_sbj+tn_sbj) 357 | 358 | return auc, f1, sens, spec, auc_sbj, f1_sbj, sens_sbj, spec_sbj -------------------------------------------------------------------------------- /LIWC2007_English.dic: -------------------------------------------------------------------------------- 1 | % 2 | 1 funct 3 | 2 pronoun 4 | 3 ppron 5 | 4 i 6 | 5 we 7 | 6 you 8 | 7 shehe 9 | 8 they 10 | 9 ipron 11 | 10 article 12 | 11 verb 13 | 12 auxverb 14 | 13 past 15 | 14 present 16 | 15 future 17 | 16 adverb 18 | 17 preps 19 | 18 conj 20 | 19 negate 21 | 20 quant 22 | 21 number 23 | 22 swear 24 | 121 social 25 | 122 family 26 | 123 friend 27 | 124 humans 28 | 125 affect 29 | 126 posemo 30 | 127 negemo 31 | 128 anx 32 | 129 anger 33 | 130 sad 34 | 131 cogmech 35 | 132 insight 36 | 133 cause 37 | 134 discrep 38 | 135 tentat 39 | 136 certain 40 | 137 inhib 41 | 138 incl 42 | 139 excl 43 | 140 percept 44 | 141 see 45 | 142 hear 46 | 143 feel 47 | 146 bio 48 | 147 body 49 | 148 health 50 | 149 sexual 51 | 150 ingest 52 | 250 relativ 53 | 251 motion 54 | 252 space 55 | 253 time 56 | 354 work 57 | 355 achieve 58 | 356 leisure 59 | 357 home 60 | 358 money 61 | 359 relig 62 | 360 death 63 | 462 assent 64 | 463 nonfl 65 | 464 filler 66 | % 67 | a 1 10 68 | abandon* 125 127 130 131 137 69 | abdomen* 146 147 70 | abilit* 355 71 | able* 355 72 | abortion* 146 148 149 73 | about 1 16 17 74 | above 1 17 252 250 75 | abrupt* 253 250 76 | abs 146 147 77 | absent* 354 78 | absolute 131 136 79 | absolutely 1 16 131 136 462 80 | abstain* 131 137 81 | abuse* 125 127 129 82 | abusi* 125 127 129 83 | academ* 354 84 | accept 125 126 131 132 85 | accepta* 125 126 131 132 86 | accepted 11 13 125 126 131 132 87 | accepting 125 126 131 132 88 | accepts 125 126 131 132 89 | accomplish* 354 355 90 | account* 358 91 | accura* 131 136 92 | ace 355 93 | ache* 125 127 130 146 148 94 | achiev* 354 355 95 | aching 125 127 130 146 148 96 | acid* 140 97 | acknowledg* 131 132 98 | acne 146 148 99 | acquainta* 121 123 100 | acquir* 355 101 | acquisition* 355 102 | acrid* 140 103 | across 1 17 252 250 104 | act 250 251 105 | action* 251 250 106 | activat* 131 133 107 | active* 125 126 108 | actor* 356 109 | actress* 356 110 | actually 1 16 111 | add 131 138 112 | addict* 146 148 113 | addit* 131 138 114 | address 357 115 | adequa* 355 116 | adjust* 131 132 117 | administrat* 354 118 | admir* 125 126 119 | admit 11 14 121 131 132 120 | admits 11 14 121 131 132 121 | admitted 11 13 121 131 132 122 | admitting 121 131 132 123 | ador* 125 126 124 | adult 121 124 125 | adults 121 124 126 | advanc* 251 250 355 127 | advantag* 125 126 355 128 | adventur* 125 126 129 | advers* 125 127 130 | advertising 354 131 | advice 121 132 | advil 146 148 133 | advis* 121 354 134 | aerobic* 356 135 | affair* 121 136 | affect 131 133 137 | affected 11 13 131 133 138 | affecting 131 133 139 | affection* 125 126 140 | affects 131 133 141 | afraid 125 127 128 142 | after 1 17 253 250 143 | afterlife* 253 250 359 144 | aftermath* 253 250 145 | afternoon* 253 250 146 | afterthought* 131 132 253 250 147 | afterward* 253 250 148 | again 1 16 253 250 149 | against 1 17 150 | age 253 250 151 | aged 253 250 152 | agent 354 153 | agents 354 154 | ages 253 250 155 | aggravat* 125 127 129 131 133 156 | aggress* 125 127 129 157 | aging 253 250 158 | agitat* 125 127 129 159 | agnost* 359 160 | ago 253 250 161 | agoniz* 125 127 130 162 | agony 125 127 130 163 | agree 125 126 462 164 | agreeab* 125 126 165 | agreed 125 126 166 | agreeing 125 126 167 | agreement* 125 126 168 | agrees 125 126 169 | ah 462 170 | ahead 1 17 253 250 355 171 | aids 146 148 149 172 | ain't 11 1 12 14 19 173 | aint 11 1 12 14 19 174 | air 250 252 175 | alarm* 125 127 128 176 | alcohol* 146 148 150 177 | alive 146 148 360 178 | all 1 20 131 136 179 | alla 359 180 | allah* 359 181 | allerg* 146 148 182 | allot 1 20 131 135 183 | allow* 131 133 184 | almost 131 135 185 | alone 125 127 130 186 | along 1 17 131 138 187 | alot 1 10 20 131 135 188 | already 253 250 189 | alright* 125 126 462 190 | also 1 16 18 191 | altar* 359 192 | although 1 18 193 | altogether 131 136 194 | always 131 136 253 250 195 | am 11 1 12 14 196 | amaz* 125 126 197 | ambigu* 131 135 198 | ambiti* 354 355 199 | amen 359 200 | amigo* 121 123 201 | amish 359 202 | among* 1 17 252 250 203 | amor* 125 126 204 | amount* 20 205 | amput* 146 148 206 | amus* 125 126 356 207 | an 1 10 208 | anal 131 137 146 147 209 | analy* 131 132 210 | ancient* 253 250 211 | and 1 18 131 138 212 | angel 359 213 | angelic* 359 214 | angels 359 215 | anger* 125 127 129 216 | angr* 125 127 129 217 | anguish* 125 127 128 218 | ankle* 146 147 219 | annoy* 125 127 129 220 | annual* 253 250 221 | anorexi* 146 148 150 222 | another 1 20 223 | answer* 131 132 224 | antacid* 146 148 225 | antagoni* 125 127 129 226 | antidepressant* 146 148 227 | anus* 146 147 228 | anxi* 125 127 128 229 | any 1 20 131 135 230 | anybod* 1 2 9 121 131 135 231 | anyhow 131 135 232 | anymore 1 20 250 253 233 | anyone* 1 2 9 121 131 135 234 | anything 1 2 9 131 135 235 | anytime 131 135 253 250 236 | anyway* 1 16 237 | anywhere 1 16 131 135 252 250 238 | aok 125 126 462 239 | apart 252 250 240 | apartment* 356 357 241 | apath* 125 127 242 | apolog* 121 243 | appall* 125 127 244 | apparent 131 136 245 | apparently 1 16 131 135 246 | appear 11 14 131 135 251 250 247 | appeared 11 13 131 135 251 250 248 | appearing 131 135 251 250 249 | appears 11 14 131 135 251 250 250 | appendic* 146 148 251 | appendix 146 147 252 | appeti* 146 150 253 | applicant* 354 254 | applicat* 354 255 | appreciat* 125 126 131 132 256 | apprehens* 125 127 128 257 | apprentic* 354 258 | approach* 251 250 259 | approv* 355 260 | approximat* 131 135 261 | april 253 250 262 | arbitrar* 131 135 263 | arch 146 147 264 | are 11 1 12 14 265 | area* 252 250 266 | aren't 11 1 12 14 19 267 | arent 11 1 12 14 19 268 | argh* 125 127 129 269 | argu* 121 125 127 129 270 | arm 146 147 271 | armies 121 272 | armpit* 146 147 273 | arms* 146 147 274 | army 121 275 | aroma* 140 276 | around 1 16 17 131 138 252 250 277 | arous* 146 147 149 278 | arrival* 251 250 279 | arrive 11 14 251 250 280 | arrived 11 13 251 250 281 | arrives 11 14 251 250 282 | arriving 251 250 283 | arrogan* 125 127 129 284 | arse 146 147 22 285 | arsehole* 22 286 | arses 146 147 22 287 | art 356 288 | arter* 146 147 289 | arthr* 146 148 290 | artist* 356 291 | arts 356 292 | as 1 17 18 293 | asham* 125 127 128 294 | ask 11 14 121 295 | asked 11 13 121 296 | asking 121 297 | asks 11 14 121 298 | asleep 146 147 299 | aspirin* 146 148 300 | ass 146 147 149 22 301 | assault* 125 127 129 302 | assembl* 121 303 | asses 146 147 149 22 304 | asshole* 125 127 129 22 305 | assign* 354 306 | assistan* 354 307 | associat* 354 308 | assum* 131 132 135 309 | assur* 125 126 131 136 310 | asthma* 146 148 311 | at 1 17 252 250 312 | ate 11 13 146 150 313 | athletic* 356 314 | atho 1 18 315 | atm 358 316 | atms 358 317 | atop 1 17 252 250 318 | attachment* 125 126 319 | attack* 125 127 129 320 | attain* 355 321 | attempt* 355 322 | attend 251 250 323 | attended 251 250 324 | attending 251 250 325 | attends 251 250 326 | attent* 131 132 327 | attract* 125 126 328 | attribut* 131 133 329 | auction* 358 330 | audibl* 140 142 331 | audio* 140 142 332 | audit 358 333 | audited 358 334 | auditing 358 335 | auditor 358 336 | auditorium* 354 337 | auditors 358 338 | audits 358 339 | august 253 250 340 | aunt* 121 122 341 | authorit* 355 342 | autops* 360 343 | autumn 253 250 344 | aversi* 125 127 128 345 | avert* 131 137 346 | avoid* 125 127 128 131 137 347 | aw 462 348 | award* 125 126 354 355 349 | aware* 131 132 350 | away 1 17 252 250 351 | awesome 125 126 462 352 | awful 125 127 353 | awhile 253 250 354 | awkward* 125 127 128 355 | babe* 121 124 356 | babies 121 124 357 | baby* 121 124 358 | back 1 16 253 250 359 | backward* 252 250 360 | backyard 357 361 | bad 125 127 362 | bake* 146 150 357 363 | baking 146 150 357 364 | balcon* 357 365 | bald 146 147 366 | ball 356 367 | ballet* 356 368 | bambino* 121 124 369 | ban 131 137 370 | band 121 356 371 | bandage* 146 148 372 | bandaid 146 148 373 | bands 121 356 374 | bank* 358 375 | banned 131 137 376 | banning 131 137 377 | bans 131 137 378 | baptis* 359 379 | baptiz* 359 380 | bar 146 150 356 381 | barely 131 135 382 | bargain* 358 383 | barrier* 131 137 384 | bars 146 150 356 385 | baseball* 356 386 | based 131 133 387 | bases 131 133 388 | bashful* 125 127 389 | basically 1 16 390 | basis 131 133 391 | basketball* 356 392 | bastard* 125 127 129 22 393 | bath* 356 357 394 | battl* 125 127 129 395 | be 11 1 12 396 | beach* 356 397 | beat 355 398 | beaten 125 127 129 354 355 399 | beaut* 125 126 140 141 400 | became 11 1 12 13 131 132 401 | because 1 18 131 133 402 | become 11 1 12 14 131 132 403 | becomes 11 1 12 14 131 132 404 | becoming 11 1 12 131 132 405 | bed 357 406 | bedding 357 407 | bedroom* 357 408 | beds 357 409 | been 11 1 12 13 410 | beer* 146 150 356 411 | before 1 17 253 250 412 | began 11 13 253 250 413 | beggar* 358 414 | begging 358 415 | begin 11 14 253 250 416 | beginn* 253 250 417 | begins 11 14 253 250 418 | begun 253 250 419 | behavio* 250 251 420 | behind 1 17 421 | being 11 1 12 422 | belief* 131 132 359 423 | believe 11 14 131 132 424 | believed 11 13 131 132 425 | believes 11 14 131 132 426 | believing 131 132 427 | bellies 146 147 428 | belly 146 147 429 | beloved 125 126 430 | below 1 17 252 250 431 | bend 252 250 432 | bending 252 250 433 | bends 252 250 434 | beneath 1 17 252 250 435 | benefic* 125 126 436 | benefit 125 126 437 | benefits 125 126 354 438 | benefitt* 125 126 439 | benevolen* 125 126 440 | benign* 125 126 441 | bent 252 250 442 | bereave* 360 443 | beside 1 17 252 250 444 | besides 1 17 20 131 134 445 | best 125 126 355 1 20 446 | bet 131 135 358 447 | bets 131 135 358 448 | better 125 126 355 449 | betting 131 135 358 450 | between 1 17 451 | beyond 1 16 17 252 250 452 | bf* 121 123 453 | bi 146 149 454 | biannu* 253 250 455 | bible* 359 456 | biblic* 359 457 | bicep* 146 147 458 | bicyc* 356 459 | big 252 250 460 | bigger 252 250 461 | biggest 252 250 462 | bike* 356 463 | bill 358 464 | billed 358 465 | billing* 358 466 | billion* 1 21 467 | bills 358 468 | bimonth* 253 250 469 | binding 131 137 470 | binge* 146 148 150 471 | binging 146 148 150 472 | biolog* 354 473 | bipolar 146 148 474 | birdie* 356 475 | birth* 253 250 476 | bishop* 359 477 | bit 20 478 | bitch* 125 127 129 22 479 | bits 20 480 | bitter* 125 127 129 140 481 | biweek* 253 250 482 | biz 354 483 | black 140 141 484 | blackboard* 354 485 | blacke* 140 141 486 | blackish* 140 141 487 | blackjack 356 488 | blacks 140 141 489 | bladder* 146 147 490 | blah 464 491 | blam* 121 125 127 129 492 | blatant* 131 136 493 | bldg* 354 494 | bleed* 146 148 495 | blender* 357 496 | bless* 125 126 359 497 | blind* 146 148 140 141 498 | block 131 137 499 | blockbuster* 356 500 | blocked 131 137 501 | blocker* 131 137 502 | blocking 131 137 503 | blocks 131 137 504 | blog* 356 505 | blond* 140 141 506 | blood 146 147 507 | bloody 146 147 22 508 | blue* 140 141 509 | blur* 131 135 510 | bodi* 146 147 511 | body* 146 147 512 | boil* 146 150 513 | bold* 125 126 514 | bone 146 147 515 | boner* 146 149 516 | bones 146 147 517 | bonus* 125 126 355 358 518 | bony 146 147 519 | boob* 146 147 149 22 520 | book* 354 356 521 | boom* 140 142 522 | booz* 146 150 523 | borderline* 131 135 524 | bore* 125 127 525 | boring 125 127 526 | born 253 250 527 | borrow* 358 528 | boss* 131 133 354 529 | both 1 20 131 138 252 250 530 | bother* 125 127 129 531 | bottom* 252 250 532 | bought 11 13 358 533 | bound* 131 137 534 | bowel* 146 147 535 | boy 121 124 536 | boy's 121 124 537 | boyf* 121 123 538 | boys* 121 124 539 | brain* 146 147 540 | brake* 131 137 541 | brave* 125 126 542 | bread 146 150 543 | breadth 252 250 544 | break 250 545 | breakfast* 146 150 546 | breast* 146 147 149 547 | breath* 146 147 548 | bridle* 131 137 549 | brief* 251 250 550 | bright* 125 126 140 141 551 | brillian* 125 126 552 | bring 11 14 251 250 553 | bringing 251 250 554 | brings 11 14 251 250 555 | brink 252 250 556 | bro 121 122 557 | broad* 252 250 558 | broke 125 127 130 559 | broker* 354 358 560 | bronchi* 146 148 561 | broom* 357 562 | bros 121 122 563 | brother* 121 122 564 | brought 11 13 251 250 565 | brown* 140 141 566 | brunch* 146 150 567 | brush* 140 143 568 | brutal* 125 127 129 569 | buck 358 570 | bucks 358 571 | bud 121 123 572 | buddh* 359 573 | buddies* 121 123 574 | buddy* 121 123 575 | budget* 358 576 | building 250 577 | bulimi* 146 148 150 578 | bunch 1 20 579 | burden* 125 127 580 | bureau* 354 581 | burial* 360 582 | buried 360 583 | burnout* 354 355 584 | burp* 146 148 585 | bury 360 586 | business* 354 358 587 | busy 250 253 354 588 | but 1 18 131 139 589 | butt 146 147 149 22 590 | butt's 146 147 149 22 591 | butter* 140 592 | butts 146 147 149 22 593 | buy* 358 594 | by 1 17 595 | bye 121 250 253 596 | caf* 146 150 597 | calculus 354 598 | call 121 599 | called 11 13 121 600 | caller* 121 601 | calling 121 602 | calls 121 603 | calm* 125 126 604 | came 11 13 131 138 251 250 605 | camping 356 606 | campus* 354 607 | can 11 1 12 14 608 | can't 11 1 12 14 19 609 | cancer* 146 148 610 | candie* 146 150 611 | candle* 140 141 612 | candy 146 150 613 | cannot 11 1 12 14 19 614 | cant 11 1 12 14 19 615 | capab* 355 616 | capacit* 252 250 617 | captain 121 618 | car 250 251 619 | caramel* 140 620 | cardia* 146 148 621 | cardio* 146 148 622 | cards 356 623 | care 11 14 125 126 624 | cared 11 13 125 126 625 | career* 354 626 | carefree 125 126 627 | careful* 125 126 131 137 628 | careless* 125 127 629 | cares 11 14 125 126 630 | caress* 140 143 631 | caring 125 126 632 | carpet* 357 633 | carried 11 13 251 250 634 | carrier* 251 250 635 | carries 11 14 251 250 636 | carry 11 14 251 250 637 | carrying 251 250 638 | cash* 358 639 | casino* 356 358 640 | casket* 360 641 | casual 125 126 356 642 | casually 125 126 643 | casualt* 360 644 | catch 250 251 645 | categor* 131 132 646 | catholic* 359 647 | caught 11 13 250 251 648 | caus* 131 133 649 | caut* 131 137 650 | cd* 356 651 | cease* 131 137 253 250 652 | ceasing 131 137 253 250 653 | ceiling* 252 250 654 | celebrat* 121 355 356 655 | celebrit* 356 656 | cell 121 657 | cellphon* 121 658 | cells 121 659 | cellular* 121 660 | cemet* 360 661 | cent 358 662 | center* 252 250 663 | centre* 252 250 664 | cents 358 665 | centur* 253 250 666 | ceo* 354 667 | certain* 125 126 131 136 668 | certif* 354 669 | cetera 1 20 670 | chairm* 354 671 | chalk 354 672 | challeng* 125 126 354 355 673 | champ* 125 126 354 355 674 | chance 131 135 675 | change 250 251 131 133 676 | changed 11 13 250 251 131 133 677 | changes 250 251 131 133 678 | changing 250 251 131 133 679 | channel* 356 680 | chapel* 359 681 | chaplain* 359 682 | charit* 125 126 358 683 | charm* 125 126 684 | chat* 121 356 685 | cheap* 358 686 | cheat* 125 127 129 687 | check 358 688 | checkers 356 689 | checking 358 690 | checks 358 691 | checkup* 146 148 692 | cheek* 146 147 693 | cheer* 125 126 694 | chequ* 358 695 | cherish* 125 126 696 | chess 356 697 | chest* 146 147 698 | chew* 146 150 699 | chick 121 124 700 | chick'* 121 124 701 | child 121 124 702 | child's 121 124 703 | childhood 250 253 704 | children* 121 124 705 | chillin* 356 706 | chills 146 148 707 | chiropract* 146 148 708 | chlamydia 146 148 149 709 | chocolate* 140 710 | choice* 131 132 711 | choir* 356 140 142 712 | chok* 146 148 713 | cholester* 146 148 714 | choos* 131 132 715 | chore* 357 716 | chorus 356 717 | chow* 146 150 718 | christ 359 719 | christian* 359 720 | christmas* 250 253 359 721 | chronic* 146 148 722 | chuckl* 125 126 723 | church* 359 724 | cigar* 146 150 725 | cinema* 356 726 | circle 140 141 727 | citizen 121 124 728 | citizen'* 121 124 729 | citizens 121 124 730 | citrus* 140 731 | city 250 252 732 | clarif* 131 132 733 | class 354 734 | classes 354 735 | classmate* 354 736 | classroom* 354 737 | clean* 357 738 | clear 131 136 739 | clearly 1 16 131 136 740 | clergy 359 741 | clever* 125 126 742 | click* 140 141 743 | climb* 251 250 355 744 | clinic* 146 148 745 | clock* 253 250 746 | close 131 138 252 250 747 | closed 252 250 748 | closely 251 250 749 | closer 252 250 750 | closes 251 250 751 | closest 252 250 752 | closet 357 753 | closets 357 754 | closing 251 250 755 | closure 131 132 355 756 | clothes 146 147 757 | club* 356 758 | coach* 356 759 | cock 146 147 149 22 760 | cocks* 146 147 149 22 761 | cocktail* 146 150 356 762 | codeine 146 148 763 | coffee* 146 150 356 764 | coffin* 360 765 | cohere* 131 132 766 | coin 358 767 | coins 358 768 | coke* 146 150 769 | cold* 140 143 770 | collab* 354 771 | colleague* 121 123 354 772 | colleg* 354 773 | cologne* 140 774 | colon 146 147 775 | colono* 146 148 776 | colons 146 147 777 | color* 140 141 778 | colour* 140 141 779 | column* 140 141 780 | com 354 781 | coma* 146 148 782 | come 11 14 131 138 251 250 783 | comed* 125 126 356 784 | comes 11 14 251 250 785 | comfort* 125 126 786 | comic* 356 787 | coming 251 250 788 | comment* 121 789 | commerc* 354 790 | commit 131 136 791 | commitment* 125 126 131 136 792 | commits 131 136 793 | committ* 131 136 794 | common 250 253 795 | commun* 121 796 | commute* 354 797 | commuting 354 798 | companies 354 799 | companion 121 123 800 | companions 121 123 801 | companionship* 121 802 | company 354 803 | compassion* 121 125 126 804 | compel* 131 133 805 | compensat* 358 806 | compet* 355 807 | complain* 121 125 127 808 | complete 131 136 809 | completed 131 136 810 | completely 1 16 131 136 811 | completes 131 136 812 | complex* 131 132 813 | compliance 131 133 814 | complica* 131 132 815 | complie* 131 133 816 | compliment* 125 126 817 | comply* 131 133 818 | compreh* 131 132 819 | compulsiv* 131 137 820 | comput* 354 821 | comrad* 121 123 822 | concentrat* 131 132 823 | concerned 125 824 | concert* 356 140 142 825 | conclud* 131 132 133 355 826 | conclus* 131 132 355 827 | condo 357 828 | condom 146 149 829 | condominium* 357 830 | condoms 146 149 831 | condos 357 832 | conferenc* 354 833 | confess* 121 131 132 359 834 | confide 121 835 | confided 121 836 | confidence 125 126 131 136 355 837 | confident 125 126 131 136 355 838 | confidently 125 126 131 136 355 839 | confides 121 840 | confiding 121 841 | confin* 131 137 842 | conflict* 131 137 843 | confront* 125 127 129 844 | confus* 125 127 128 131 135 845 | congest* 146 148 846 | conglom* 354 847 | congregat* 121 848 | connection* 250 252 849 | conquer* 355 850 | conscientious* 355 851 | conscious* 131 132 852 | consequen* 131 133 853 | conserv* 131 137 854 | consider 131 132 855 | considerate 125 126 856 | considered 131 132 857 | considering 131 132 858 | considers 131 132 859 | constant 250 253 860 | constantly 1 16 250 253 861 | constipat* 146 148 862 | constrain* 131 137 863 | constrict* 131 137 864 | consult* 121 354 865 | consumer* 354 358 866 | contact* 121 867 | contag* 146 148 868 | contain* 131 137 869 | contemplat* 131 132 870 | contempt* 125 127 129 871 | contented* 125 126 872 | contentment 125 126 873 | contingen* 131 135 874 | continu* 253 250 875 | contracts 354 876 | contradic* 121 125 127 129 131 137 877 | control* 131 133 137 355 878 | convent 359 879 | convents 359 880 | convers* 121 881 | convinc* 125 126 882 | cook* 146 150 356 883 | cool 125 126 462 140 143 884 | cornea* 146 147 885 | corner 252 250 886 | corners 252 250 887 | coronar* 146 148 888 | coroner* 360 360 889 | corp 354 890 | corporat* 354 358 891 | corps 354 892 | corpse* 360 893 | correct* 131 136 894 | correlat* 131 132 895 | cos 131 133 896 | cost* 358 897 | couch* 357 898 | cough* 146 148 899 | could 11 1 12 131 134 900 | could've 11 1 12 13 15 131 134 901 | couldn't 11 1 12 19 131 134 902 | couldnt 11 1 12 19 131 134 903 | couldve 11 1 12 13 15 131 134 904 | counc* 121 354 905 | couns* 121 354 906 | countr* 250 252 907 | couple 1 20 908 | coupon* 358 909 | courag* 125 126 910 | course* 354 911 | cousin* 121 122 912 | coverage 250 252 913 | coworker* 121 354 914 | coz 131 133 915 | cramp* 146 148 916 | crap 125 127 129 146 147 22 917 | crappy 125 127 129 22 918 | craz* 125 127 128 919 | cream 140 141 920 | create* 125 126 131 133 355 921 | creati* 125 126 131 133 355 922 | credential* 354 923 | credit* 125 126 354 358 924 | cremat* 360 925 | cried 11 13 125 127 130 926 | cries 125 127 130 927 | critical 125 127 129 928 | critici* 125 127 129 929 | cross* 251 250 930 | crotch 146 147 931 | crowd* 121 932 | crown* 355 933 | crucifi* 359 934 | crude* 125 127 129 935 | cruel* 125 127 129 936 | cruis* 251 250 356 937 | crusade* 359 938 | crushed 125 127 130 939 | cry 125 127 130 940 | crying 125 127 130 941 | crypt* 360 942 | cubicle* 354 943 | cuddl* 146 149 944 | cultur* 121 945 | cunt* 125 127 129 22 946 | curb* 131 137 947 | curio* 131 132 948 | currenc* 358 949 | current* 253 250 950 | curricul* 354 951 | curtail* 131 137 952 | curtain* 357 953 | customer* 354 358 954 | cut 125 127 129 955 | cute* 125 126 956 | cutie* 125 126 957 | cuz 1 18 131 133 958 | cv* 354 959 | cycle* 253 250 960 | cynic 125 127 129 961 | cyst* 146 148 962 | dad* 121 122 963 | dail* 253 250 964 | damag* 125 127 130 965 | damn* 125 127 129 22 966 | danc* 251 250 356 967 | dang 22 968 | danger* 125 127 129 969 | daring 125 126 970 | darlin* 125 126 971 | darn 22 972 | date* 253 250 973 | dating 121 974 | daughter* 121 122 975 | day* 253 250 976 | daze* 125 127 977 | dead 360 978 | deadline* 354 979 | deaf* 146 148 140 142 980 | deal 121 981 | dean* 354 982 | dear* 125 126 983 | death* 360 984 | debit* 358 985 | debt* 358 986 | decade* 253 250 987 | decay* 125 127 253 250 988 | decease* 360 989 | december 253 250 990 | decid* 131 132 991 | decis* 131 132 992 | decongest* 146 148 993 | decorat* 356 994 | deduc* 131 132 133 995 | deep* 252 250 996 | defeat* 125 127 130 355 997 | defect* 125 127 998 | defenc* 125 127 129 131 137 999 | defens* 125 127 129 131 137 1000 | define 131 132 1001 | defined 131 136 1002 | defines 131 132 1003 | defining 131 132 1004 | definite 125 126 131 136 1005 | definitely 1 16 125 126 131 136 1006 | definitive* 131 136 1007 | degrad* 125 127 1008 | delay* 131 137 253 250 1009 | delectabl* 140 125 126 1010 | delegat* 354 1011 | delicate* 125 126 1012 | delicious* 125 126 140 1013 | deligh* 125 126 1014 | deliver* 251 250 1015 | demise 360 1016 | demon* 359 1017 | demote* 354 1018 | den 357 1019 | denia* 131 137 1020 | denie* 131 137 1021 | dense 252 250 1022 | densit* 252 250 1023 | dentist* 146 148 1024 | deny* 131 137 1025 | deoder* 140 1026 | depart 251 250 1027 | departed 251 250 1028 | departing 251 250 1029 | department* 354 1030 | departs 251 250 1031 | departure* 251 250 1032 | depend 131 133 135 1033 | depended 11 13 131 133 135 1034 | depending 131 133 135 1035 | depends 11 14 131 133 135 1036 | deposit* 358 1037 | depress* 125 127 130 1038 | depriv* 125 127 130 1039 | dept 354 1040 | depth* 252 250 1041 | derma* 146 148 1042 | describe 11 14 121 1043 | described 11 13 121 1044 | describes 11 14 121 1045 | describing 121 1046 | desir* 131 134 1047 | desk* 354 1048 | despair* 125 127 130 1049 | desperat* 125 127 128 1050 | despis* 125 127 129 1051 | despite 1 17 1052 | dessert* 146 150 1053 | destroy* 125 127 129 1054 | destruct* 125 127 129 1055 | determina* 125 126 131 132 355 1056 | determine 131 132 1057 | determined 125 126 131 132 355 1058 | determines 131 132 1059 | determining 131 132 1060 | detox* 146 148 1061 | devastat* 125 127 130 1062 | devil* 125 127 359 1063 | devot* 125 126 1064 | diabet* 146 148 1065 | diagnos* 146 148 1066 | diagonal* 252 250 1067 | diaries 356 1068 | diarr* 146 148 1069 | diary 356 1070 | dick 146 147 149 22 1071 | dicks 146 147 149 22 1072 | did 11 1 12 13 1073 | didn't 11 1 12 13 19 1074 | didnt 11 1 12 13 19 1075 | die 360 1076 | died 11 13 360 1077 | dies 11 14 360 1078 | diet* 146 150 1079 | difference* 131 1 20 1080 | differentiat* 131 132 1081 | difficult* 125 127 1082 | digest* 146 148 150 1083 | digni* 125 126 1084 | diligen* 355 1085 | dime* 358 1086 | dinar* 358 1087 | dine 150 146 1088 | dined 146 150 1089 | diner 146 150 1090 | dinero 358 1091 | diners 146 150 1092 | dines 146 150 1093 | dining 146 150 1094 | dinner* 146 150 1095 | diplom* 354 1096 | directly 131 136 1097 | director* 354 1098 | disadvantage* 125 127 130 1099 | disagree* 125 127 1100 | disappear* 251 250 1101 | disappoint* 125 127 130 1102 | disaster* 125 127 1103 | discern* 131 132 1104 | disciplin* 131 137 1105 | disclo* 121 131 132 1106 | discomfort* 125 127 128 1107 | discount* 358 1108 | discourag* 125 127 130 131 137 1109 | discover* 131 132 1110 | discuss* 121 1111 | disease* 146 148 1112 | disgust* 125 127 129 1113 | dish 146 150 1114 | dishearten* 125 127 130 1115 | dishes 146 150 1116 | dishwasher* 357 1117 | disillusion* 125 127 130 1118 | dislike 11 14 125 127 1119 | disliked 11 13 125 127 1120 | dislikes 11 14 125 127 1121 | disliking 125 127 1122 | dismay* 125 127 1123 | disorient* 131 135 1124 | disregard* 131 137 1125 | dissatisf* 125 127 130 1126 | dissertat* 354 1127 | distan* 252 250 1128 | distinct* 131 136 1129 | distinguish* 131 132 1130 | distract* 125 127 128 1131 | distraught 125 127 128 1132 | distress* 125 127 128 1133 | distrust* 125 127 129 1134 | disturb* 125 127 128 1135 | dividend* 354 358 1136 | divin* 125 126 359 1137 | divorc* 121 1138 | dizz* 146 148 1139 | dj* 356 1140 | do 11 1 12 14 1141 | doa 360 1142 | doctor* 146 148 354 1143 | does 11 1 12 14 1144 | doesn't 11 1 12 14 19 1145 | doesnt 11 1 12 14 19 1146 | doghouse* 357 1147 | doing 11 1 12 1148 | dollar* 358 1149 | domestic* 357 1150 | domina* 125 127 129 355 1151 | domote* 355 1152 | don't 11 1 12 14 19 1153 | donat* 358 1154 | done 11 1 12 13 1155 | dont 11 1 12 14 19 1156 | doom* 125 127 130 359 1157 | door* 357 1158 | dork* 125 127 1159 | dorm* 354 357 1160 | dosage 146 148 1161 | dose* 146 148 1162 | dosing 146 148 1163 | dotcom 354 1164 | doubl* 1 20 1165 | doubt* 125 127 128 131 135 1166 | down 1 17 252 250 1167 | downsiz* 354 1168 | downward* 252 250 1169 | dozen* 1 21 1170 | dr 146 148 1171 | drama 356 1172 | drank 11 13 146 150 1173 | drape* 357 1174 | dread* 125 127 128 1175 | dream* 356 1176 | dresser* 357 1177 | drie* 140 143 1178 | drift* 251 250 1179 | drily 140 143 1180 | drink* 146 150 356 1181 | drive 251 250 1182 | driven 11 13 251 250 355 1183 | drives 251 250 1184 | driveway* 357 1185 | driving 251 250 1186 | drool* 140 146 147 1187 | drop 250 1188 | dropout* 354 355 1189 | drove 11 13 251 250 1190 | drown* 360 1191 | drows* 146 148 1192 | drs 146 148 1193 | drug* 146 148 1194 | drum 356 1195 | drumm* 356 1196 | drums 356 1197 | drunk* 146 150 356 1198 | dry* 140 143 1199 | dubious* 131 135 1200 | due 250 253 1201 | duh 462 1202 | dull* 125 127 130 1203 | dumb* 125 127 129 22 1204 | dump* 125 127 129 1205 | dunno 131 135 1206 | duplex* 357 1207 | during 1 17 253 250 1208 | duti* 131 137 354 1209 | duty 131 137 354 1210 | dvd* 356 1211 | dwell* 125 127 128 1212 | dx 146 148 1213 | dying 360 1214 | dyke* 146 149 22 1215 | dynam* 125 126 1216 | each 1 20 131 138 1217 | eager* 125 126 1218 | ear 140 142 146 147 1219 | earli* 253 250 1220 | early 253 250 1221 | earn* 354 355 1222 | ears 140 142 146 147 1223 | ease* 125 126 1224 | easie* 125 126 1225 | easily 125 126 1226 | easiness 125 126 1227 | easing 125 126 1228 | east* 252 250 1229 | easy* 125 126 356 1230 | eat 146 150 1231 | eaten 11 13 146 150 1232 | eating 146 150 1233 | eats 146 150 1234 | econ* 354 358 1235 | ecsta* 125 126 1236 | edge 252 250 140 143 1237 | edges 252 250 140 143 1238 | edging 252 250 140 143 1239 | edit* 354 1240 | educat* 354 1241 | effect* 131 132 133 355 1242 | efficien* 125 126 355 1243 | effort* 355 1244 | egg* 146 150 1245 | egotis* 125 127 1246 | eight* 1 21 1247 | either 1 20 131 139 1248 | elbow* 146 147 1249 | elegan* 125 126 1250 | elementary 354 1251 | eleven 1 21 1252 | elicit* 131 133 1253 | elit* 355 1254 | else 1 20 1255 | email 121 1256 | email'* 121 1257 | emailed 11 13 121 1258 | emailer* 121 1259 | emailing 121 1260 | emails 121 1261 | embalm* 360 1262 | embarrass* 125 127 128 1263 | embezzl* 358 1264 | emotion 125 1265 | emotional 125 127 128 1266 | emotions 125 1267 | emphysem* 146 148 1268 | employ* 354 1269 | empt* 125 127 130 252 250 1270 | enabl* 355 1271 | enclos* 131 137 252 250 1272 | encompass* 252 250 1273 | encourag* 121 125 126 1274 | end 253 250 1275 | endeav* 355 1276 | ended 11 13 253 250 1277 | ending 253 250 1278 | ends 253 250 1279 | enema* 146 148 1280 | enemie* 121 125 127 129 1281 | enemy* 121 125 127 129 1282 | energ* 125 126 1283 | engag* 125 126 1284 | enjoy* 125 126 1285 | enlighten* 131 132 1286 | enorm* 252 250 1287 | enough 1 20 1288 | enrag* 125 127 129 1289 | enter 251 250 1290 | entered 11 13 251 250 1291 | entering 251 250 1292 | enters 251 250 1293 | entertain* 125 126 356 1294 | enthus* 125 126 1295 | entire* 1 20 131 136 1296 | entrance* 252 250 1297 | envie* 125 127 129 1298 | envious 125 127 129 1299 | environment* 252 250 1300 | envy* 125 127 129 1301 | epidemic* 360 1302 | episcopal* 359 1303 | equal* 1 20 1304 | er 463 1305 | era 253 250 1306 | erectile 146 147 149 1307 | erection* 146 147 149 1308 | erotic* 146 149 1309 | esl 354 1310 | especially 1 16 1311 | espresso* 146 150 1312 | essential 131 136 1313 | estrogen* 146 148 1314 | etc 1 20 1315 | etern* 253 250 1316 | euro 358 1317 | euros 358 1318 | evaluat* 131 132 1319 | evangel* 359 1320 | eve 253 250 1321 | even 1 16 1322 | evening* 253 250 1323 | event 250 253 1324 | eventually 1 16 250 253 1325 | ever 1 16 131 136 253 250 1326 | every 1 20 131 136 1327 | everybod* 1 2 9 121 131 136 1328 | everyday 250 253 1329 | everyone* 1 2 9 121 1330 | everything* 1 2 9 121 131 136 1331 | everywhere 252 250 1332 | evident* 131 136 1333 | evil* 125 127 129 1334 | ex 121 122 1335 | exact* 131 136 1336 | exam 354 1337 | examin* 131 132 1338 | example* 1 20 1339 | exams 354 1340 | exbf* 121 123 1341 | exboyfriend* 121 123 1342 | excel* 125 126 354 355 1343 | except 1 17 131 139 1344 | exchang* 358 1345 | excit* 125 126 1346 | exclu* 131 139 1347 | excruciat* 125 127 1348 | excus* 121 1349 | execution* 360 1350 | executive* 354 1351 | exercis* 146 148 356 1352 | exes 121 122 1353 | exgf* 121 123 1354 | exgirl* 121 123 1355 | exhaust* 125 127 146 148 1356 | exhubby* 121 122 1357 | exhusband* 121 122 1358 | exit* 252 250 1359 | expect* 131 134 1360 | expel* 354 1361 | expens* 358 1362 | experienc* 140 1363 | experiment 131 133 1364 | explain 11 14 121 131 132 1365 | explained 11 13 121 131 132 1366 | explaining 121 131 132 1367 | explains 11 14 121 131 132 1368 | explanat* 131 132 1369 | explicit* 131 136 1370 | explor* 131 132 251 250 1371 | express* 121 1372 | expulsion* 354 1373 | extent 1 20 1374 | exterior* 252 250 1375 | exterminat* 360 1376 | extra 1 20 1377 | extremely 1 20 131 136 1378 | exwife* 121 122 1379 | exwive* 121 122 1380 | eye* 146 147 140 141 1381 | eying 140 141 1382 | fab 125 126 1383 | fabulous* 125 126 1384 | face 146 147 1385 | faces 146 147 1386 | facial* 146 147 1387 | fact 131 136 1388 | factories 354 1389 | factory 354 1390 | facts 131 136 1391 | factual* 131 136 1392 | facult* 354 1393 | fade* 253 250 1394 | fading* 253 250 1395 | fail* 125 127 130 354 355 1396 | faint* 146 148 1397 | fairly 1 20 131 135 1398 | faith* 125 126 359 1399 | fake 125 127 1400 | fall 251 250 1401 | fallen 251 250 1402 | falling 251 250 1403 | falls 251 250 1404 | families* 121 122 356 357 1405 | family 121 122 356 357 1406 | fantasi* 356 1407 | fantastic* 125 126 1408 | fantasy 356 1409 | far 252 250 1410 | farsighted 146 148 1411 | farther 252 250 1412 | farthest 252 250 1413 | fast 253 250 1414 | faster 253 250 1415 | fastest 253 250 1416 | fat 146 147 148 150 1417 | fatal* 125 127 360 1418 | father* 121 122 1419 | fatigu* 125 127 130 146 148 1420 | fats 146 148 150 1421 | fatt* 146 147 148 150 1422 | fault* 125 127 1423 | favor* 125 126 1424 | favour* 125 126 1425 | fax* 354 1426 | fear 125 127 128 1427 | feared 125 127 128 1428 | fearful* 125 127 128 1429 | fearing 125 127 128 1430 | fearless* 125 126 1431 | fears 125 127 128 1432 | february 253 250 1433 | fed 11 13 146 150 1434 | fee 358 1435 | feed 146 150 1436 | feedback 354 1437 | feeder* 146 150 1438 | feeding* 146 150 1439 | feeds 146 150 1440 | feel 11 14 131 132 140 143 1441 | feeling* 131 132 140 143 1442 | feels 11 14 131 132 140 143 1443 | fees 358 1444 | feet 146 147 1445 | fell 251 250 1446 | fellow* 121 123 1447 | felt 11 13 131 132 140 143 1448 | female* 121 124 1449 | fenc* 131 137 1450 | feroc* 125 127 129 1451 | festiv* 125 126 1452 | fetid* 140 1453 | feud* 121 125 127 129 1454 | fever* 146 148 1455 | few* 1 20 1456 | fiance* 121 123 1457 | fiery 125 127 129 1458 | fiesta* 125 126 1459 | fift* 1 21 1460 | fight* 121 125 127 129 1461 | figur* 131 132 1462 | fill* 250 251 1463 | film* 356 1464 | final 250 253 1465 | finaliz* 354 355 1466 | finally 250 253 1467 | finals 354 1468 | financ* 354 358 1469 | find* 131 132 1470 | fine 125 126 1471 | finger* 146 147 140 143 1472 | finish* 253 250 1473 | fire 140 143 1474 | fired 125 127 354 1475 | fireplace* 357 1476 | firing 354 1477 | first 1 21 253 250 355 1478 | firstly 1 21 253 250 1479 | firsts 1 21 253 250 355 1480 | fishing 356 1481 | fit 250 252 1482 | fitness 356 1483 | five 1 21 1484 | fizz* 140 142 1485 | flatscreen* 356 1486 | flatter* 121 125 126 1487 | flavor* 140 1488 | flavour* 140 1489 | flawless* 125 126 1490 | fled 11 13 251 250 1491 | flee* 251 250 1492 | flesh* 146 147 1493 | flew 11 13 251 250 1494 | flexib* 125 126 140 143 1495 | flies 251 250 1496 | flirt* 125 126 1497 | floor* 252 250 1498 | flow* 251 250 1499 | flu 146 148 1500 | flunk* 125 127 130 1501 | fly 251 250 1502 | flying 251 250 1503 | foe* 125 127 129 1504 | folks 121 122 1505 | follow 11 14 251 250 1506 | followed 11 13 251 250 1507 | following 251 250 1508 | follows 11 14 251 250 1509 | followup* 253 250 1510 | fond 125 126 1511 | fondly 125 126 1512 | fondness 125 126 1513 | food* 146 150 1514 | fool* 125 127 1515 | foot 146 147 1516 | football* 356 1517 | for 1 17 1518 | forbid* 125 127 131 137 1519 | force* 131 133 1520 | forearm* 146 147 1521 | forehead* 146 147 1522 | foreplay 146 149 1523 | forever 131 136 253 250 1524 | forgave 121 125 126 131 132 1525 | forget* 131 137 1526 | forgiv* 121 125 126 131 132 1527 | forgot* 131 137 11 13 1528 | form 1 20 1529 | former* 253 250 1530 | fortune* 358 1531 | forward* 251 250 252 253 1532 | fought 11 13 121 125 127 129 1533 | found 11 13 131 132 1534 | foundation* 131 133 1535 | founded 131 133 355 1536 | founder* 131 133 355 1537 | founding 355 1538 | four* 1 21 1539 | fragil* 140 143 1540 | fragran* 140 1541 | franc 358 1542 | franchis* 354 358 1543 | francs 358 1544 | frankly 131 136 1545 | frantic* 125 127 128 1546 | frat 354 1547 | fratern* 354 1548 | freak* 125 127 1549 | free 125 126 358 1550 | freeb* 125 126 358 1551 | freed* 125 126 1552 | freeing 125 126 1553 | freely 125 126 1554 | freeness 125 126 1555 | freer 125 126 1556 | frees* 125 126 1557 | freez* 140 143 1558 | frequent 253 250 1559 | frequented 253 250 1560 | frequenting 253 250 1561 | frequently 1 16 253 250 1562 | frequents 253 250 1563 | freshm* 354 1564 | friday* 253 250 1565 | fridge* 357 1566 | friend* 121 123 125 126 1567 | fries 146 150 1568 | fright* 125 127 128 1569 | frisbee* 356 1570 | from 1 17 1571 | front 250 251 1572 | froze* 140 143 1573 | fruit* 146 150 140 1574 | frustrat* 125 127 129 1575 | fry* 146 150 1576 | fuck 11 14 125 127 129 146 149 22 1577 | fucked* 11 13 125 127 129 146 149 22 1578 | fucker* 125 127 129 146 149 22 1579 | fuckin* 125 127 129 146 149 22 1580 | fucks 11 14 125 127 129 146 149 22 1581 | fulfill* 355 1582 | full* 1 20 252 250 1583 | fume* 125 127 129 1584 | fuming 125 127 129 1585 | fun 125 126 1586 | fund 358 1587 | fundamental 131 136 1588 | fundamentalis* 131 136 359 1589 | fundamentally 131 136 1590 | fundamentals 131 136 1591 | funded 358 1592 | funding 358 1593 | funds 358 1594 | funer* 360 1595 | funn* 125 126 1596 | furious* 125 127 129 1597 | furniture 357 1598 | further* 252 250 1599 | fury 125 127 129 1600 | futon* 357 1601 | futur* 253 250 1602 | fuzz* 131 135 140 143 1603 | gain* 355 1604 | gambl* 358 1605 | game* 121 356 1606 | gaming 356 1607 | garage* 356 357 1608 | garden* 356 357 1609 | gate* 357 1610 | gather* 121 1611 | gave 11 13 121 1612 | gay 146 149 1613 | gays 146 149 1614 | gaz* 140 141 1615 | geek* 125 127 1616 | general 131 1617 | generally 1 16 131 135 1618 | generate* 131 133 1619 | generating 131 133 1620 | generation* 253 250 1621 | generator* 131 133 1622 | genero* 125 126 1623 | genital* 146 147 149 1624 | genocid* 360 1625 | gentile* 359 1626 | gentle 125 126 1627 | gentlem* 121 124 1628 | gentler 125 126 1629 | gentlest 125 126 1630 | gently 125 126 1631 | get 11 14 1632 | gets 11 14 1633 | gf* 121 123 1634 | ghost* 360 1635 | giant* 252 250 1636 | gigantic 252 250 1637 | giggl* 125 126 1638 | ginormous 252 250 1639 | girl 121 124 1640 | girl's 121 124 1641 | girlfriend* 121 123 1642 | girls* 121 124 1643 | give 11 14 121 1644 | given 11 13 1645 | giver* 121 125 126 1646 | gives 11 14 121 1647 | giving 121 125 126 1648 | glad 125 126 1649 | gladly 125 126 1650 | glamor* 125 126 1651 | glamour* 125 126 1652 | glanc* 140 141 1653 | gland* 146 148 1654 | glaucoma 146 148 1655 | global* 252 250 1656 | gloom* 125 127 130 1657 | glori* 125 126 1658 | glory 125 126 1659 | glow* 140 141 1660 | glutton* 146 150 1661 | gmat 354 1662 | go 11 14 251 250 1663 | goal* 354 355 1664 | gobble* 146 150 1665 | gobbling 146 150 1666 | god 359 1667 | god'* 359 1668 | goddam* 125 127 129 22 1669 | goes 11 14 251 250 1670 | going 251 250 1671 | golf* 356 1672 | gone 11 13 251 250 1673 | gonna 11 12 15 1674 | gonorrhea 146 149 1675 | gonorrhoea 146 149 1676 | good 125 126 1677 | goodness 125 126 1678 | goosebump* 146 147 1679 | gorgeous* 125 126 1680 | gospel* 359 1681 | gossip* 121 125 127 1682 | got 11 13 1683 | gotta 11 12 15 1684 | gotten 11 13 1685 | gov 354 1686 | govern* 354 1687 | gpa 354 1688 | grab* 140 143 1689 | grace 125 126 1690 | graced 125 126 1691 | graceful* 125 126 1692 | graces 125 126 1693 | graci* 125 126 1694 | grad 354 1695 | grade* 354 1696 | grading 354 1697 | graduat* 354 1698 | grand 125 126 1699 | grandchil* 121 122 1700 | granddad* 121 122 1701 | granddau* 121 122 1702 | grande* 125 126 1703 | grandf* 121 122 1704 | grandkid* 121 122 1705 | grandm* 121 122 1706 | grandpa* 121 122 1707 | grandson* 121 122 1708 | granny 121 122 1709 | grasp* 131 132 1710 | gratef* 125 126 1711 | grati* 125 126 1712 | grave* 125 127 130 360 1713 | gray* 140 141 1714 | gre 354 1715 | greas* 140 1716 | great 125 126 1717 | greater 1 20 1718 | greatest 1 20 1719 | greed* 125 127 129 358 1720 | green* 140 141 1721 | grew 250 251 1722 | grey* 140 141 1723 | grief 125 127 130 360 1724 | griev* 125 127 130 360 1725 | grim* 125 127 130 1726 | grin 125 126 1727 | grinn* 125 126 1728 | grins 125 126 1729 | grip 140 143 1730 | gripp* 140 143 1731 | grips 140 143 1732 | grocer* 146 150 1733 | gross* 125 127 1734 | grouch* 125 127 129 1735 | ground* 252 250 1736 | group* 121 1737 | grow 250 251 1738 | growing 250 251 1739 | grown 250 251 1740 | grownup* 121 124 1741 | growth 250 251 1742 | grr* 125 127 129 1743 | grudge* 121 1744 | guarant* 131 136 1745 | guard* 131 137 1746 | guess 11 14 131 135 1747 | guessed 11 13 131 135 1748 | guesses 11 14 131 135 1749 | guessing 131 135 1750 | guest* 121 123 1751 | guilt* 125 127 128 1752 | guitar* 356 1753 | gulp* 146 150 1754 | gums 146 147 1755 | gut 146 147 1756 | guts 146 147 1757 | guy* 121 124 1758 | gym* 356 1759 | gynaecolog* 146 148 1760 | gynecolog* 146 148 1761 | gyno* 146 148 1762 | ha 125 126 462 1763 | had 11 1 12 13 1764 | hadn't 11 1 12 13 19 1765 | hadnt 11 1 12 13 19 1766 | hah 462 1767 | haha* 125 126 462 1768 | hair* 146 147 140 143 1769 | half 1 21 1770 | halfass* 131 135 1771 | hall 250 252 1772 | hallucinat* 146 148 1773 | halt* 131 137 1774 | hamstring* 146 147 1775 | hand 146 147 140 143 1776 | handful* 140 143 1777 | hands 146 147 140 143 1778 | handsom* 125 126 1779 | hang 250 251 1780 | hangin* 356 1781 | hangout* 356 1782 | hangover* 146 148 1783 | hangup* 131 137 1784 | happen 11 14 1785 | happened 11 13 1786 | happening 250 253 1787 | happens 11 14 1788 | happi* 125 126 1789 | happy 125 126 1790 | harass* 125 127 129 1791 | hard 140 143 1792 | harde* 140 143 1793 | hardly 131 135 1794 | hardwork* 354 1795 | harm 125 127 1796 | harmed 125 127 1797 | harmful* 125 127 1798 | harming 125 127 1799 | harmless* 125 126 1800 | harmon* 125 126 140 142 1801 | harms 125 127 1802 | harness* 131 137 1803 | has 11 1 12 14 1804 | hasn't 11 1 12 14 19 1805 | hasnt 11 1 12 14 19 1806 | hate 11 14 125 127 129 1807 | hated 11 13 125 127 129 1808 | hateful* 125 127 129 1809 | hater* 125 127 129 1810 | hates 11 14 125 127 129 1811 | hating 125 127 129 1812 | hatred 125 127 129 1813 | have 11 1 12 14 1814 | haven't 11 1 12 14 19 1815 | havent 11 1 12 14 19 1816 | having 11 1 12 1817 | hazie* 131 135 1818 | hazy 125 131 135 1819 | he 1 2 3 7 121 1820 | he'd 1 2 3 7 11 12 121 1821 | he'll 11 12 15 121 1822 | he's 1 2 3 7 11 12 14 121 1823 | head 146 147 1824 | headache* 146 148 1825 | headed 251 250 1826 | headhunter* 354 1827 | heading 251 250 1828 | heads 146 147 1829 | heal 146 148 1830 | healed 146 148 1831 | healer* 146 148 1832 | healing 146 148 1833 | heals 146 148 1834 | health* 146 148 1835 | hear 11 14 121 140 142 1836 | heard 11 13 121 140 142 1837 | hearing 121 140 142 1838 | hears 11 14 121 140 142 1839 | hearse* 360 1840 | heart 146 147 1841 | heartbreak* 125 127 130 1842 | heartbroke* 125 127 130 1843 | heartburn* 146 148 1844 | heartfelt 125 126 1845 | heartless* 125 127 129 1846 | hearts 146 147 1847 | heartwarm* 125 126 1848 | heaven* 125 126 359 1849 | heavie* 140 143 1850 | heavy* 140 143 1851 | heck 22 1852 | hed 1 2 3 7 11 12 121 1853 | heel* 146 147 1854 | heh* 125 126 462 1855 | height* 252 250 1856 | held 11 13 131 137 1857 | hell 125 127 129 359 22 1858 | hellish 125 127 129 359 1859 | hello* 121 1860 | hells 359 1861 | help 121 1862 | helped 11 13 1863 | helper* 121 125 126 1864 | helpful* 121 125 126 1865 | helping 121 125 126 1866 | helpings 146 150 1867 | helpless* 125 127 130 1868 | helps 11 14 121 125 126 1869 | hemor* 146 148 1870 | hence 131 133 1871 | her 1 2 3 7 121 1872 | here 1 16 1873 | here's 11 1 12 14 16 1874 | heres 11 1 12 14 16 1875 | hero* 125 126 355 1876 | herpes 146 148 1877 | hers 1 2 3 7 121 1878 | herself 1 2 3 7 121 1879 | hes 1 2 3 7 11 12 14 121 1880 | hesita* 125 127 128 131 135 137 1881 | hey 121 1882 | hi 121 1883 | hiccup* 146 148 1884 | high 252 250 1885 | highe* 252 250 1886 | highly 1 20 1887 | highschool* 354 1888 | hik* 251 250 356 1889 | hilarious 125 126 1890 | him 1 2 3 7 121 1891 | himself 1 2 3 7 121 1892 | hindu* 359 1893 | hip 146 147 1894 | hiphop 356 1895 | hips 146 147 1896 | hire* 354 1897 | hiring 354 1898 | his 1 2 3 7 121 1899 | histor* 253 250 1900 | hit 125 127 129 1901 | hiv 146 148 149 1902 | hm* 462 463 1903 | ho 146 149 1904 | hobb* 356 1905 | hockey* 356 1906 | hoho* 125 126 1907 | hold* 131 137 1908 | holiday* 356 1909 | holie* 359 1910 | holocaust* 360 1911 | holy 359 1912 | home 357 1913 | homes 357 1914 | homesick* 125 127 130 357 1915 | homework* 354 357 1916 | homo 146 149 22 1917 | homocid* 360 1918 | homos 146 149 1919 | homosexual* 146 149 1920 | honest* 125 126 1921 | honey 121 123 140 1922 | honor* 125 126 355 1923 | honour* 125 126 355 1924 | hope 11 14 125 126 131 134 135 1925 | hoped 11 13 125 126 131 134 135 1926 | hopeful 125 126 131 134 135 1927 | hopefully 1 16 125 126 131 134 135 1928 | hopefulness 125 126 131 134 135 1929 | hopeless* 125 127 130 1930 | hopes 11 14 125 126 131 134 135 1931 | hoping 125 126 131 134 135 1932 | horizontal* 252 250 1933 | hormone* 146 148 1934 | hornie* 146 147 149 1935 | horny 146 147 149 1936 | horr* 125 127 128 1937 | horseback 356 1938 | hospital* 146 148 1939 | hostil* 125 127 129 1940 | hot 140 143 1941 | hotel* 356 1942 | hott* 140 143 1943 | hour* 253 250 1944 | house* 357 1945 | housing 357 1946 | how 1 16 18 131 133 1947 | how's 11 14 131 133 1948 | however 1 16 18 1949 | hows 11 14 131 133 1950 | hubby 121 122 1951 | hug 125 146 149 126 1952 | huge* 252 250 1953 | hugg* 125 126 146 149 1954 | hugs 125 126 146 149 1955 | huh 462 1956 | human* 121 124 1957 | humiliat* 125 127 128 129 1958 | humor* 125 126 1959 | humour* 125 126 1960 | hump* 146 149 1961 | hundred* 1 21 1962 | hunger* 146 150 1963 | hungover 146 148 1964 | hungr* 146 150 1965 | hunting 356 1966 | hurra* 125 126 1967 | hurrie* 253 250 1968 | hurry* 253 250 1969 | hurt* 125 127 130 1970 | husband* 121 122 1971 | hush* 140 142 1972 | hymn* 359 1973 | hyperten* 146 148 1974 | hypotherm* 146 148 1975 | hypothes* 131 135 1976 | hypothetic* 131 135 1977 | i 1 2 3 4 1978 | i'd 1 2 3 4 11 12 1979 | i'll 1 2 3 4 11 12 15 1980 | i'm 1 2 3 4 11 12 14 1981 | i'm 1 2 3 4 11 12 14 1982 | i've 1 2 3 4 11 12 14 1983 | i've 1 2 3 4 11 12 14 1984 | ibuprofen 146 148 1985 | icu 146 148 1986 | id 1 2 3 4 11 12 1987 | idea 131 132 1988 | ideal* 125 126 131 134 355 1989 | ideas 131 132 1990 | identif* 131 132 1991 | idiot* 125 127 129 1992 | idon'tknow 464 1993 | idontknow 464 1994 | if 1 18 131 134 135 139 1995 | ignit* 131 133 1996 | ignor* 125 127 131 137 1997 | ill 146 148 1998 | illness* 146 148 1999 | im 1 2 3 4 11 12 14 2000 | image* 140 141 2001 | imagin* 131 132 2002 | imean 464 2003 | immediate 253 250 2004 | immediately 1 16 253 250 2005 | immediateness 253 250 2006 | immoral* 125 127 359 2007 | immortal* 253 250 359 360 2008 | immun* 146 148 2009 | impatien* 125 127 128 2010 | impersonal 125 127 2011 | implica* 131 133 2012 | implicit* 131 136 2013 | implie* 131 133 2014 | imply* 131 133 2015 | impolite* 125 127 2016 | importan* 355 125 126 2017 | impossib* 131 134 2018 | impress* 125 126 2019 | improve* 125 126 355 2020 | improving 125 126 355 2021 | in 1 17 252 250 2022 | inact* 131 133 2023 | inadequa* 125 127 128 130 131 134 355 2024 | inaudibl* 140 142 2025 | inc 354 2026 | incapab* 355 2027 | incentive* 355 125 126 2028 | incest* 146 149 2029 | inch* 252 250 2030 | inciden* 250 253 2031 | inclu* 131 138 2032 | income* 354 358 2033 | incompeten* 355 2034 | incomplet* 131 135 2035 | incorp* 354 2036 | increas* 250 251 20 2037 | indecis* 125 127 128 131 135 2038 | indeed 131 136 2039 | indefinit* 131 135 2040 | independ* 131 133 2041 | indetermin* 131 135 2042 | indigestion 146 148 2043 | indirect* 131 135 2044 | individual* 121 124 2045 | induc* 131 132 133 2046 | industr* 354 2047 | ineffect* 125 127 355 2048 | inevitab* 131 136 2049 | inexpens* 358 2050 | infallib* 131 136 2051 | infant 121 124 2052 | infant's 121 124 2053 | infants* 121 124 2054 | infect* 146 148 2055 | infer 131 132 133 2056 | inferior* 125 127 130 2057 | inferr* 131 132 133 2058 | infers 131 132 133 2059 | infinit* 1 21 253 250 2060 | inflam* 146 148 2061 | influenc* 131 133 2062 | info 131 132 2063 | inform 121 131 132 2064 | information 131 132 2065 | informative* 131 132 2066 | informed 131 132 2067 | informing 131 132 2068 | informs 121 131 132 2069 | ingest* 146 148 150 2070 | inhal* 140 2071 | inherit* 358 2072 | inhib* 125 127 128 131 137 2073 | initial* 253 250 2074 | initiat* 253 250 355 2075 | injur* 146 148 2076 | inn 356 2077 | inner* 252 250 2078 | innocen* 125 126 2079 | inns 356 2080 | inquir* 131 132 2081 | insecur* 125 127 128 2082 | inside 1 17 131 138 252 250 2083 | insides 1 17 252 250 2084 | insight* 131 132 2085 | insincer* 125 127 2086 | insomnia* 146 148 2087 | inspir* 125 126 131 132 2088 | instan* 253 250 2089 | instead 1 16 2090 | instruct* 354 2091 | insulin 146 148 2092 | insult* 121 125 127 129 2093 | insurance 358 2094 | intell* 125 126 2095 | intend* 131 133 2096 | intent* 131 133 2097 | interact* 121 2098 | interest* 125 126 2099 | interfer* 131 137 2100 | interior* 252 250 2101 | internal* 250 252 2102 | internation* 250 252 2103 | interpret* 131 132 2104 | interrup* 121 125 127 129 2105 | intersect* 252 250 2106 | interval* 253 250 2107 | interview* 121 354 2108 | intestin* 146 147 2109 | intimidat* 125 127 129 2110 | into 1 17 131 138 252 250 2111 | intox* 146 148 2112 | intramural* 356 2113 | invariab* 131 136 2114 | inventory 354 2115 | invest* 358 2116 | invigor* 125 126 2117 | involv* 121 2118 | ipod* 356 2119 | irrational* 125 127 128 131 2120 | irrefu* 131 136 2121 | irresponsible* 355 2122 | irrita* 125 127 128 2123 | irs 358 2124 | is 11 1 12 14 2125 | islam* 359 2126 | isn't 11 1 12 14 19 2127 | isnt 11 1 12 14 19 2128 | isolat* 125 127 130 2129 | issue 131 2130 | it 1 2 9 2131 | it'd 1 2 9 11 12 2132 | it'll 1 2 9 11 12 15 2133 | it's 1 2 9 11 12 14 2134 | itch* 146 147 148 2135 | itd 1 2 9 11 12 2136 | item* 1 20 2137 | itll 1 2 9 11 12 15 2138 | its 1 2 9 2139 | itself 1 2 9 2140 | iv 146 148 2141 | ive 1 2 3 4 11 12 14 2142 | jackpot* 358 2143 | jaded 125 127 2144 | january 253 250 2145 | jaw* 146 147 2146 | jazz* 356 2147 | jd 354 2148 | jealous* 125 127 129 2149 | jeez 22 2150 | jerk 125 127 129 2151 | jerked 125 127 129 2152 | jerks 125 127 129 2153 | jesuit* 359 2154 | jesus 359 2155 | jew 359 2156 | jewish* 359 2157 | jews 359 2158 | jihad* 359 2159 | jissom 146 149 2160 | jizz 146 149 2161 | job* 354 2162 | jog* 251 250 356 2163 | joints 146 147 2164 | joke* 125 126 356 2165 | joking 125 126 356 2166 | joll* 125 126 2167 | journey* 250 251 2168 | joy* 125 126 2169 | juda* 359 2170 | july 253 250 2171 | jump* 251 250 2172 | june 253 250 2173 | junior* 354 2174 | just 1 16 131 139 2175 | justif* 131 132 133 2176 | karaoke 356 2177 | karma 359 2178 | keen* 125 126 2179 | keep 11 14 131 137 2180 | keeping* 131 137 2181 | keeps 11 14 131 137 2182 | keg 356 2183 | kegger 356 2184 | keggers 356 2185 | kegs 356 2186 | kept 11 13 131 137 2187 | keyboard* 354 2188 | kid 121 124 2189 | kid'* 121 124 2190 | kidding 121 125 126 2191 | kidney* 146 147 2192 | kids* 121 124 2193 | kill* 125 127 129 360 2194 | kilometer* 252 250 2195 | kin 121 122 2196 | kind 125 126 2197 | kinda 131 135 2198 | kinderg* 354 2199 | kindly 125 126 2200 | kindn* 125 126 2201 | kindof 131 135 2202 | king* 355 2203 | kippur 359 2204 | kiss* 125 126 146 149 2205 | kitchen* 146 150 357 2206 | km* 252 250 2207 | knee* 146 147 2208 | knew 11 13 131 132 2209 | know 11 14 131 132 2210 | knowab* 131 132 2211 | knower* 131 132 2212 | knowing 131 132 2213 | knowledg* 131 132 2214 | known 131 132 2215 | knows 11 14 131 132 2216 | knuckle* 146 147 2217 | kopek* 358 2218 | koran 359 2219 | kosher 359 2220 | krishna* 359 2221 | krisna* 359 2222 | kron* 358 2223 | labor* 354 2224 | labour* 354 2225 | lack* 1 20 131 134 2226 | ladies 121 124 2227 | lady 121 124 2228 | lady's 121 124 2229 | laidback 125 126 356 2230 | laidoff 354 2231 | lame* 125 127 130 2232 | land 252 250 2233 | landlord* 357 2234 | language* 121 2235 | laptop* 354 2236 | large* 252 250 2237 | last* 253 250 2238 | late 253 250 2239 | lately 1 16 253 250 2240 | later 253 250 2241 | latest 253 250 2242 | laugh* 125 126 2243 | launch* 131 133 2244 | law 131 136 2245 | lawn* 357 2246 | lawyer* 354 2247 | layoff* 354 2248 | lazie* 355 125 127 2249 | lazy 355 125 127 2250 | lead* 131 133 251 250 354 355 2251 | learn* 131 132 354 2252 | lease* 357 358 2253 | leasing* 357 358 2254 | least 1 20 2255 | leather* 140 143 2256 | leave 251 250 2257 | leaves 251 250 2258 | leaving 251 250 2259 | lectur* 354 2260 | led 131 133 251 250 2261 | ledge* 252 250 2262 | ledging 252 250 2263 | left 250 252 2264 | leg 146 147 2265 | legal* 354 2266 | legs* 146 147 2267 | lesbian* 146 149 2268 | less 1 20 2269 | lesson* 355 131 132 2270 | let 1 11 12 2271 | let's 1 2 3 5 11 14 121 2272 | lethal* 360 2273 | lets 1 2 3 5 11 14 121 2274 | letter 121 2275 | leuke* 146 148 2276 | level 252 250 2277 | levels 252 250 2278 | liabilit* 125 127 131 134 2279 | liar* 125 127 129 2280 | libert* 125 126 2281 | libid* 146 149 2282 | librar* 354 2283 | lick* 140 2284 | lied 11 13 125 127 129 2285 | lies 125 127 129 2286 | life 146 148 2287 | light 140 2288 | like 125 126 134 253 464 2289 | likeab* 125 126 2290 | liked 11 13 125 126 2291 | likel* 131 135 2292 | likes 125 126 2293 | liking 125 126 2294 | limit* 131 137 355 2295 | limp* 140 143 2296 | link* 131 132 2297 | lip 146 147 2298 | lips* 146 147 2299 | liquor* 146 150 356 2300 | lira 358 2301 | listen 11 14 121 140 142 2302 | listened 11 13 121 140 142 2303 | listener* 121 140 142 2304 | listening 121 140 142 2305 | listens 11 14 121 140 142 2306 | lit 140 141 2307 | littl* 252 250 2308 | lived 11 13 2309 | livel* 125 126 2310 | liver* 146 147 2311 | living 146 148 2312 | lmao 125 126 2313 | loads 1 20 2314 | loan* 358 2315 | local* 252 250 2316 | loft 357 2317 | lofts 357 2318 | logic* 131 132 2319 | lol 125 126 462 2320 | lone* 125 127 130 2321 | long 253 250 2322 | longe* 253 250 2323 | longing* 125 127 130 2324 | longitud* 252 250 2325 | look 11 14 140 141 2326 | looked 11 13 140 141 2327 | looker* 140 141 2328 | looking 140 141 2329 | looks 11 14 140 141 2330 | loose* 140 143 2331 | lord* 359 2332 | lose 125 127 130 355 2333 | loser* 125 127 130 355 2334 | loses 125 127 130 355 2335 | losing 125 127 130 355 2336 | loss* 125 127 130 355 2337 | lost 11 13 125 127 130 355 2338 | lot 1 20 131 135 2339 | lotof 1 20 131 135 2340 | lots 1 20 131 135 2341 | lotsa 1 20 131 135 2342 | lotta 1 20 131 135 2343 | lotter* 358 2344 | loud* 140 142 2345 | lous* 125 127 129 2346 | love 11 14 125 126 146 149 121 2347 | loved 11 13 121 125 126 146 149 2348 | lovely 125 126 2349 | lover* 121 123 125 126 146 149 2350 | loves 11 14 121 125 126 146 149 2351 | loveseat* 357 2352 | loving* 121 125 126 2353 | low* 125 127 130 252 250 2354 | loyal* 125 126 2355 | lozenge* 146 148 2356 | lsat 354 2357 | ltd 354 2358 | luck 125 126 131 135 2359 | lucked 125 126 131 135 2360 | lucki* 125 126 131 135 2361 | luckless* 125 127 131 135 2362 | lucks 125 126 131 135 2363 | lucky 125 126 131 135 2364 | ludicrous* 125 127 129 2365 | lump 146 148 2366 | lunch* 146 150 2367 | lung* 146 147 2368 | lust* 146 149 2369 | lutheran* 359 2370 | lying 125 127 129 2371 | lymph* 146 148 2372 | lynch* 360 2373 | ma 121 122 2374 | ma'am 121 124 2375 | ma's 121 122 2376 | mad 125 127 129 2377 | maddening 125 127 129 2378 | madder 125 127 129 2379 | maddest 125 127 129 2380 | made 11 13 131 133 2381 | madly 125 126 2382 | magazine* 356 2383 | magnific* 125 126 2384 | maid* 357 2385 | mail 121 2386 | mailbox 357 2387 | mailed 121 2388 | mailer* 121 2389 | mailing 121 2390 | mailroom* 354 2391 | mails 121 2392 | main 1 20 2393 | mainly 131 135 2394 | major 20 2395 | majoring 354 2396 | majority 1 20 2397 | majors 354 2398 | make 11 14 133 131 2399 | makeout* 146 149 2400 | maker* 131 133 2401 | makes 11 14 131 133 2402 | making 131 133 2403 | male 121 124 2404 | male's 121 124 2405 | males 121 124 2406 | mall 356 2407 | malls 356 2408 | mam 121 124 2409 | mammogram 146 148 2410 | man 121 124 2411 | man's 121 124 2412 | manag* 354 2413 | maniac* 125 127 129 2414 | manicdep* 146 148 2415 | manipul* 131 133 2416 | manslaughter* 360 2417 | manufact* 354 2418 | many 1 20 2419 | map 252 250 2420 | mapped 252 250 2421 | mapping 252 250 2422 | maps 252 250 2423 | marathon* 356 2424 | march* 253 250 2425 | margarita* 356 2426 | marginal* 131 135 2427 | market* 354 2428 | marriag* 121 2429 | marrie* 121 122 2430 | martini* 356 2431 | masochis* 125 127 2432 | mass 252 250 2433 | massacre* 360 2434 | master 355 2435 | mastercard* 358 2436 | mastered 355 2437 | masterful* 355 2438 | mastering 355 2439 | mastermind* 355 2440 | masters 355 354 2441 | mastery 355 2442 | mate 121 123 2443 | mate's 121 123 2444 | mates 121 123 2445 | math* 354 2446 | mating 121 2447 | matter* 131 2448 | mattress* 357 2449 | mausoleum* 360 2450 | may 11 1 12 15 131 135 2451 | maybe 1 16 131 135 2452 | mcat 354 2453 | mda 354 2454 | me 1 2 3 4 2455 | meal* 146 150 2456 | mean 11 14 131 132 2457 | meaning* 131 132 2458 | means 11 14 131 132 2459 | meant 11 13 131 132 2460 | meantime 253 250 2461 | meanwhile 253 250 2462 | mecca 359 2463 | medal* 355 2464 | medic* 146 148 2465 | mediocr* 355 2466 | meditat* 356 359 2467 | meet 121 2468 | meeting* 121 354 2469 | meets 121 2470 | melanchol* 125 127 130 2471 | members 121 124 2472 | memo 354 2473 | memor* 131 132 2474 | memos 354 2475 | men 121 124 2476 | men'* 121 124 2477 | menial 354 2478 | mention* 121 2479 | mentor* 354 2480 | merchant* 358 2481 | mercif* 359 2482 | mercy 359 2483 | merger* 354 2484 | merit* 125 126 2485 | merr* 125 126 2486 | mess 125 127 2487 | messag* 121 2488 | messy 125 127 2489 | met 11 13 121 2490 | meter* 252 250 2491 | methodis* 359 2492 | metre* 252 250 2493 | mfg 354 2494 | mfr 354 2495 | mgmt 354 2496 | mgr 354 2497 | microwave* 357 2498 | mid 252 250 2499 | middl* 252 250 2500 | midterm* 354 2501 | might 11 1 12 15 131 135 2502 | might've 11 1 12 131 135 2503 | mightve 11 1 12 131 135 2504 | migrain* 146 148 2505 | mile* 252 250 2506 | milk* 146 150 2507 | million* 1 21 2508 | min 253 250 2509 | mind 131 2510 | mine 1 2 3 4 2511 | minesweeper 356 2512 | minister* 359 2513 | ministr* 359 2514 | mint* 140 2515 | minute* 253 250 2516 | miscar* 146 148 2517 | miser* 125 127 128 130 2518 | misle* 131 133 2519 | miss 11 14 125 127 130 2520 | missed 11 13 125 127 130 2521 | misses 11 14 125 127 130 2522 | missing 125 127 130 2523 | missionar* 359 2524 | mistak* 125 127 131 134 2525 | misunder* 131 132 2526 | mitzvah* 359 2527 | mm* 462 2528 | mob 121 2529 | mobb* 121 2530 | mobs 121 2531 | mock 125 127 129 2532 | mocked 125 127 129 2533 | mocker* 125 127 129 2534 | mocking 125 127 129 2535 | mocks 125 127 129 2536 | modern* 253 250 2537 | mofo 22 2538 | mohamm* 359 2539 | molest* 125 127 129 2540 | mom 121 122 2541 | mom's 121 122 2542 | moment* 253 250 2543 | momma* 121 122 2544 | mommy* 121 122 2545 | moms 121 122 2546 | monast* 359 2547 | monday* 253 250 2548 | money* 358 2549 | monk* 359 2550 | mono 146 148 2551 | monopol* 358 2552 | month* 253 250 2553 | mooch* 125 127 2554 | mood 125 2555 | moodi* 125 127 2556 | moods 125 2557 | moody 125 127 2558 | mop 357 2559 | moral 359 2560 | morality 359 2561 | morals 359 2562 | more 1 20 2563 | morgue* 360 2564 | mormon* 359 2565 | morning* 253 250 2566 | moron* 125 127 129 2567 | mortal* 360 2568 | mortg* 357 358 2569 | mortician* 360 2570 | mosque* 359 2571 | most 1 20 131 135 2572 | mostly 1 16 131 135 2573 | motel* 356 2574 | mother 121 122 2575 | motherf* 22 2576 | motherly 121 2577 | mothers 121 122 2578 | motion* 251 250 2579 | motiv* 131 132 133 354 355 2580 | mourn* 125 127 130 360 2581 | mouth* 146 147 2582 | move 251 250 2583 | moved 11 13 251 250 2584 | movement* 251 250 2585 | mover* 251 250 2586 | moves 251 250 2587 | movie* 356 2588 | moving 251 250 2589 | mr 121 124 2590 | mri 146 148 2591 | mrs 121 124 2592 | mtv* 356 2593 | much 1 20 2594 | mucho 1 20 2595 | mucous* 146 147 2596 | muhamm* 359 2597 | mujahid* 359 2598 | mum 121 122 2599 | mum's 121 122 2600 | mummy* 121 122 2601 | mums 121 122 2602 | murder* 125 127 129 360 2603 | muscle* 146 147 2604 | muscular 146 147 2605 | museum* 356 2606 | musi* 356 359 140 142 2607 | must 11 1 12 15 131 134 136 2608 | must'nt 11 1 12 15 19 131 134 136 2609 | must've 11 1 12 13 15 131 134 136 2610 | mustn't 11 1 12 15 19 131 134 136 2611 | mustnt 11 1 12 15 19 131 134 136 2612 | mustve 11 1 12 13 15 131 134 136 2613 | my 1 2 3 4 2614 | myopi* 146 148 2615 | myself 1 2 3 4 2616 | myster* 131 135 2617 | nag* 125 127 129 2618 | naked 146 147 149 2619 | name 121 131 2620 | nap 356 2621 | naps 356 2622 | narrow* 252 250 2623 | nasal 146 147 140 2624 | nast* 125 127 129 2625 | nation* 250 252 2626 | nause* 146 148 2627 | near 1 17 252 250 2628 | neared 252 250 2629 | nearer 252 250 2630 | nearest 252 250 2631 | nearing 252 250 2632 | nearly 1 16 131 135 2633 | nears 252 250 2634 | nearsighted 146 148 2635 | neat* 125 126 2636 | necessar* 131 136 2637 | neck 146 147 2638 | need 11 14 131 134 2639 | need'nt 1 19 131 134 2640 | needed 11 13 131 134 2641 | needing 131 134 2642 | needn't 1 19 131 134 2643 | neednt 1 19 131 134 2644 | needs 11 14 131 134 2645 | needy 125 127 2646 | negat* 1 19 2647 | neglect* 125 127 130 131 137 2648 | negotiat* 121 354 2649 | neighbor* 121 123 357 2650 | neighbour* 121 123 2651 | neither 1 19 20 2652 | nephew* 121 122 2653 | nerd* 125 127 2654 | nerve* 146 147 2655 | nervous* 125 127 128 2656 | netflix 356 2657 | neural* 146 147 2658 | neurolog* 146 148 2659 | neuron* 146 147 2660 | neurotic* 125 127 128 2661 | never 1 19 131 136 253 250 2662 | new 253 250 2663 | newborn* 121 124 2664 | newer 253 250 2665 | newest 253 250 2666 | newly 253 250 2667 | news 131 2668 | next 253 250 2669 | ngo 354 2670 | nice* 125 126 2671 | nickel* 358 2672 | niece* 121 122 2673 | nigger* 22 2674 | night 253 250 2675 | nightly 253 250 2676 | nights 253 250 2677 | nine* 1 21 2678 | nintendo* 356 2679 | nipple* 146 147 149 2680 | no 1 19 2681 | nobod* 1 2 9 19 2682 | noise 140 142 2683 | noises 140 142 2684 | noisy 140 142 2685 | none 1 19 20 2686 | nonprofit* 354 2687 | noon* 253 250 2688 | nope 1 19 2689 | nor 1 18 19 2690 | normal 131 131 134 2691 | north* 252 250 2692 | nose* 146 147 140 2693 | nostril* 140 146 147 2694 | not 1 19 131 139 2695 | nothing 1 19 2696 | notice* 131 132 2697 | noticing 131 132 2698 | novel 356 2699 | novels 356 2700 | november 253 250 2701 | now 1 16 253 250 2702 | nowhere 1 19 252 250 2703 | nude* 146 147 149 2704 | nudi* 146 147 2705 | numb* 125 127 146 148 2706 | nun 359 2707 | nuns 359 2708 | nurse* 146 148 2709 | nurtur* 125 126 2710 | nutrition* 146 148 2711 | o'clock* 253 250 2712 | obedien* 131 133 2713 | obes* 146 148 150 2714 | obey* 131 133 2715 | obit* 360 2716 | obnoxious* 125 127 129 2717 | obscur* 131 135 2718 | obsess* 125 127 128 2719 | obstac* 131 137 2720 | obtain* 355 2721 | obvious* 131 136 2722 | occasional* 131 135 253 250 2723 | occupa* 354 2724 | ocd 146 148 2725 | oclock* 253 250 2726 | october 253 250 2727 | od 360 2728 | oded 360 2729 | odor* 140 2730 | odour* 140 2731 | of 1 17 2732 | off 1 17 252 250 2733 | offence* 125 127 129 2734 | offend* 125 127 129 2735 | offens* 125 127 129 2736 | offer* 121 2737 | office* 354 2738 | often 1 16 131 135 2739 | oh 462 2740 | ohwell 464 2741 | oil* 140 2742 | ok 125 126 462 2743 | okay 125 126 462 2744 | okays 125 126 2745 | okey* 462 2746 | oks 125 126 2747 | old 253 250 2748 | olden 253 250 2749 | older 253 250 2750 | oldest 253 250 2751 | on 1 17 252 250 2752 | once 1 21 253 250 2753 | one 1 21 2754 | ones 1 20 2755 | oneself 1 2 3 2756 | only 1 16 2757 | onto 1 17 252 250 2758 | open 131 138 252 250 2759 | opened 250 252 2760 | opening* 252 250 2761 | openminded* 125 126 2762 | openness 125 126 2763 | opera* 356 2764 | opinion 131 135 2765 | opport* 125 126 355 2766 | oppos* 131 137 2767 | optimal* 125 126 2768 | optimi* 125 126 2769 | option 131 135 2770 | optometr* 146 148 2771 | or 1 18 131 135 139 2772 | orange* 140 141 2773 | oranything* 464 2774 | orchestra 356 2775 | order 250 2776 | org 354 2777 | organiz* 121 131 354 355 2778 | orgasm* 146 147 149 2779 | orgies 146 149 2780 | orgy 146 149 2781 | origin 131 133 253 250 2782 | original 125 126 2783 | originat* 131 133 355 2784 | origins 131 133 2785 | orsomething* 464 2786 | orthodon* 146 148 2787 | orthodox* 359 2788 | orthoped* 146 148 2789 | orwhatever* 464 2790 | other 1 2 9 2791 | others 1 2 9 2792 | otherwise 1 18 2793 | ought 11 1 12 15 131 134 2794 | ought'nt 11 1 12 15 19 131 134 2795 | ought've 11 1 12 15 131 134 2796 | oughta 11 1 12 15 131 134 2797 | oughtn't 11 1 12 15 19 131 134 2798 | oughtnt 11 1 12 15 19 131 134 2799 | oughtve 11 1 12 15 131 134 2800 | our 1 2 3 5 121 2801 | ours 1 2 3 5 121 2802 | ourselves 1 2 3 5 121 2803 | out 1 17 131 138 252 250 2804 | outcome* 131 133 355 2805 | outer* 252 250 2806 | outgoing 125 126 2807 | outlin* 354 2808 | outrag* 125 127 129 2809 | outside 1 17 252 250 2810 | outsider* 121 2811 | outsides 252 250 2812 | outsourc* 354 2813 | outstanding 131 134 2814 | outward* 252 250 2815 | ovar* 146 147 149 2816 | oven* 357 2817 | over 1 17 252 250 2818 | overall 131 135 2819 | overate 146 150 2820 | overcome 355 2821 | overconfiden* 355 2822 | overdosed 360 2823 | overeat* 146 150 2824 | overflow* 252 250 2825 | overhear* 121 2826 | overlap* 252 250 2827 | overpaid 354 358 2828 | overtak* 355 2829 | overtime 354 358 2830 | overweight 146 148 150 2831 | overwhelm* 125 127 128 130 2832 | overworked 354 2833 | owe 11 14 358 2834 | owed 11 13 358 2835 | owes 11 14 358 2836 | owing 358 2837 | own 1 20 2838 | owner* 121 2839 | pa 121 122 2840 | pa's 121 122 2841 | pagan* 359 2842 | page 20 2843 | paid 11 13 358 2844 | pain 125 127 146 148 2845 | pained 125 127 146 148 2846 | painf* 125 127 146 148 2847 | paining 125 127 146 148 2848 | painl* 125 126 146 148 2849 | pains 125 127 146 148 2850 | pal 121 123 2851 | palatabl* 140 125 126 2852 | pallbearer* 360 2853 | palm 146 147 2854 | palms 146 147 2855 | pals 121 123 2856 | panic* 125 127 128 2857 | pap 146 148 2858 | paper* 354 2859 | pappy 121 122 2860 | paradise 125 126 359 2861 | paraly* 146 148 2862 | paranoi* 125 127 129 2863 | parent* 121 122 2864 | parks 356 2865 | part 1 20 2866 | participant* 121 124 2867 | participat* 121 2868 | partie* 121 125 126 356 2869 | partly 1 20 131 135 2870 | partner* 121 123 124 2871 | party* 121 125 126 356 2872 | pass 251 250 2873 | passed 251 250 2874 | passes 251 250 2875 | passing 251 250 2876 | passion* 125 126 146 149 2877 | passover 359 2878 | past 253 250 2879 | pasta* 146 150 2880 | pastor* 359 2881 | pathetic* 125 127 130 2882 | patholog* 146 148 2883 | patio* 357 2884 | pay* 354 358 2885 | pc* 354 2886 | peace* 125 126 2887 | peculiar* 125 127 2888 | pediatr* 146 148 2889 | pee 146 147 2890 | pelvi* 146 147 2891 | pen 354 2892 | penance 359 2893 | pence 358 2894 | pencil* 354 2895 | penis* 146 147 149 2896 | pennies 358 2897 | penny 358 2898 | pens 354 2899 | pension* 354 2900 | pentecost* 359 2901 | people* 121 124 2902 | perceiv* 131 132 2903 | percent 20 2904 | percept* 131 132 2905 | perfect* 125 126 131 136 355 2906 | perform* 355 2907 | perfum* 140 2908 | perhaps 1 16 131 135 2909 | period* 253 250 2910 | permit* 131 133 2911 | perpetual* 253 250 2912 | persever* 355 2913 | persist* 355 2914 | person 121 124 2915 | person's 121 124 2916 | personal 125 121 2917 | persons 121 124 2918 | perspir* 146 147 2919 | persua* 121 2920 | perver* 125 127 146 149 2921 | peso 358 2922 | pesos 358 2923 | pessimis* 125 127 130 2924 | pet 357 2925 | petrif* 125 127 128 2926 | pets 357 2927 | pettie* 125 127 129 2928 | petty* 125 127 129 2929 | pew 359 2930 | pews 359 2931 | pharmac* 146 148 2932 | phd* 354 2933 | phobi* 125 127 128 146 148 2934 | phone* 121 2935 | phoning 121 2936 | photocop* 354 2937 | physical* 146 148 2938 | physician* 146 148 2939 | pick 131 133 2940 | picture 140 141 2941 | piec* 1 20 2942 | piety 359 2943 | pilgrim* 359 2944 | pill 146 148 2945 | pillow* 357 2946 | pills 146 148 2947 | pimple* 146 148 2948 | pink* 140 141 2949 | pious 359 2950 | piss* 125 127 129 146 147 22 2951 | pitcher* 356 2952 | piti* 125 127 130 2953 | pity* 125 127 130 2954 | pizza* 146 150 2955 | place* 252 250 2956 | placing* 252 250 2957 | plague* 360 2958 | plan 355 2959 | planned 355 2960 | planner* 355 2961 | planning 355 2962 | plans 355 2963 | platform* 252 250 2964 | play 125 126 356 2965 | played 11 13 125 126 356 2966 | playful* 125 126 356 2967 | playing 125 126 356 2968 | plays 125 126 356 2969 | playstation* 356 2970 | pleasant* 125 126 2971 | please* 125 126 2972 | pleasing 125 126 2973 | pleasur* 125 126 2974 | pledg* 354 2975 | plenty 1 20 2976 | plus 1 17 18 131 138 2977 | pms 146 148 2978 | podiatr* 146 148 2979 | poetry 356 2980 | point 250 252 2981 | poison* 125 127 129 146 148 2982 | poker 356 2983 | police 354 2984 | policy 354 2985 | political 354 2986 | politics 354 2987 | ponder* 131 132 2988 | pool* 356 2989 | poop* 146 147 2990 | poor* 358 2991 | pope* 359 2992 | popular* 125 126 2993 | porch 357 2994 | porn* 146 149 2995 | portfolio* 358 2996 | portion 20 2997 | position* 252 250 2998 | positiv* 125 126 131 136 2999 | possib* 131 135 3000 | post 250 3001 | potential* 355 3002 | poverty* 358 3003 | power* 355 3004 | practically 131 135 3005 | practice 354 355 3006 | prais* 121 125 126 355 359 3007 | pray* 359 3008 | preach* 359 3009 | preced* 253 250 3010 | precious* 125 126 3011 | precis* 131 136 3012 | prefer* 131 132 134 3013 | pregnan* 146 148 149 3014 | prejudic* 125 127 129 3015 | prereq* 354 3016 | presbyterian* 359 3017 | prescri* 146 148 3018 | present 253 250 3019 | presentation* 354 3020 | presently 253 250 3021 | presiden* 354 355 3022 | press 140 143 3023 | pressed 140 143 3024 | presser* 140 143 3025 | presses 140 143 3026 | pressur* 125 127 128 3027 | presum* 131 132 3028 | pretend* 356 3029 | prettie* 125 126 3030 | pretty 125 126 131 135 3031 | prevent* 131 137 3032 | price* 358 3033 | prici* 358 3034 | prick* 125 127 129 146 147 22 3035 | pride 125 126 355 3036 | priest* 359 3037 | primarily 1 16 3038 | prior 250 253 3039 | private 121 3040 | privileg* 125 126 3041 | prize* 125 126 355 3042 | probable 131 135 3043 | probablistic* 131 135 3044 | probably 1 16 131 135 3045 | problem* 125 127 131 134 3046 | proceed* 253 250 3047 | procrastin* 354 3048 | produc* 131 133 354 355 3049 | prof 354 3050 | profession* 354 3051 | professor* 354 3052 | proficien* 355 3053 | profit* 125 126 354 358 3054 | profs 354 3055 | prognos* 146 148 3056 | program* 354 3057 | progress 355 3058 | prohib* 131 137 3059 | project 354 3060 | projector* 354 3061 | projects 354 3062 | prom 354 3063 | promis* 125 126 3064 | promot* 354 355 3065 | proof 131 136 3066 | prophe* 359 3067 | prostat* 146 147 149 3068 | prostitu* 146 149 3069 | protect* 131 137 3070 | protest 125 127 129 3071 | protestant* 359 3072 | protested 125 127 129 3073 | protesting 125 127 129 3074 | proud* 125 126 355 3075 | prove* 131 132 136 3076 | provide 121 3077 | proving 131 132 3078 | provoc* 131 133 3079 | provok* 131 133 3080 | prozac 146 148 3081 | prude 131 137 146 149 3082 | prudes 131 137 146 149 3083 | prudish* 131 137 146 149 3084 | psalm* 359 3085 | psych 354 3086 | psychol* 354 3087 | pub 356 3088 | pubic 146 149 3089 | public 121 3090 | publish 354 3091 | pubs 356 3092 | puk* 125 127 146 148 3093 | pull* 251 250 3094 | pulse 146 147 3095 | pungen* 140 3096 | punish* 125 127 129 3097 | purchas* 358 3098 | pure* 131 136 3099 | purgator* 359 3100 | puritan* 359 3101 | purpl* 140 141 3102 | purpose* 131 133 355 3103 | push* 251 250 3104 | puss 146 148 3105 | pussies 146 147 3106 | pussy* 146 147 149 22 3107 | put 250 251 3108 | puts 250 251 3109 | putting 250 251 3110 | puzzl* 131 135 3111 | qualifi* 354 3112 | quarter* 1 21 3113 | queas* 146 148 3114 | queen 355 3115 | queenly 355 3116 | queer* 146 149 22 3117 | quer* 131 132 3118 | question* 121 131 132 135 3119 | quick* 1 16 253 250 3120 | quiet* 140 142 3121 | quit 355 3122 | quite 131 135 3123 | quitt* 355 3124 | quiz* 354 3125 | qur'an* 359 3126 | quran* 359 3127 | rabbi 359 3128 | rabbinical 359 3129 | rabbis 359 3130 | radian* 125 126 3131 | radio* 356 3132 | rage* 125 127 129 3133 | raging 125 127 129 3134 | ramadan 359 3135 | ran 11 13 251 250 3136 | rancid* 140 125 127 3137 | random* 131 135 3138 | rang 140 142 3139 | rank 355 3140 | ranked 355 3141 | ranking 355 3142 | ranks 355 3143 | rap 356 3144 | rape* 125 127 129 146 149 3145 | raping 125 127 129 146 149 3146 | rapist* 125 127 129 146 149 3147 | rarely 1 16 3148 | rash* 146 147 3149 | rather 1 16 131 134 139 3150 | rational* 131 132 133 3151 | react* 131 133 3152 | read 354 356 3153 | readiness 125 126 3154 | reading 356 3155 | ready 125 126 3156 | real 131 136 3157 | realis* 131 132 3158 | reality 131 136 3159 | realiz* 131 132 3160 | really 1 16 131 139 3161 | reaper* 360 3162 | rearrang* 131 132 3163 | reason* 131 132 133 3164 | reassur* 121 125 126 3165 | rebate* 358 3166 | rebel* 125 127 129 3167 | recall* 131 132 3168 | receiv* 121 250 251 3169 | recency 253 250 3170 | recent* 253 250 3171 | recession* 358 3172 | reckon* 131 132 3173 | recogni* 131 132 3174 | recollect* 131 132 3175 | reconcil* 131 132 3176 | reconsider* 131 132 3177 | reconstruct* 131 132 3178 | recording* 356 3179 | recover* 355 3180 | recreation* 356 3181 | recruit* 354 3182 | rectang* 140 141 3183 | recur* 253 250 3184 | red 140 141 3185 | redde* 140 141 3186 | reddish* 140 141 3187 | redness 140 141 3188 | reds 140 141 3189 | reek* 140 125 127 3190 | reevaluat* 131 132 3191 | refer* 131 132 3192 | reflect* 131 132 3193 | refrain* 131 137 3194 | refund* 358 3195 | refus* 121 131 137 3196 | regardless 131 134 3197 | reggae 356 3198 | register* 354 3199 | registra* 354 3200 | regret* 125 127 130 131 134 3201 | rehab* 146 148 3202 | reimburs* 358 3203 | rein* 131 137 3204 | reject* 125 127 130 3205 | relate* 131 132 3206 | relating 131 132 3207 | relation 131 132 3208 | relationship* 121 3209 | relatives 121 122 3210 | relax* 125 126 356 3211 | relief 125 126 3212 | reliev* 125 126 3213 | religio* 359 3214 | reluctan* 125 127 128 131 137 3215 | remaining 1 20 3216 | rememb* 131 132 3217 | remodel* 357 3218 | remorse* 125 127 130 3219 | remote* 252 250 3220 | remov* 251 250 3221 | renovat* 357 3222 | rent* 357 358 3223 | reorgani* 131 132 3224 | repeat* 253 250 3225 | repetit* 253 250 3226 | replace* 251 250 3227 | replacing 251 250 3228 | replie* 121 3229 | reply* 121 3230 | report* 354 3231 | repress* 125 127 128 131 137 3232 | request* 121 3233 | requir* 131 137 354 355 3234 | research* 354 3235 | resent* 125 127 129 3236 | reserved 131 137 3237 | residen* 357 3238 | resign* 125 127 130 3239 | resolu* 131 132 3240 | resolv* 125 126 131 132 355 3241 | resource 354 3242 | resourceful* 355 3243 | resources 354 3244 | resourcing 354 3245 | respect 125 126 3246 | respectively 250 253 3247 | respond* 121 3248 | response* 131 133 3249 | responsib* 131 137 354 355 3250 | rest 1 20 3251 | restau* 146 150 356 3252 | resting 356 3253 | restless* 125 127 128 3254 | restrain* 131 137 3255 | restrict* 131 137 3256 | restructur* 131 132 3257 | result* 131 133 3258 | resume 354 3259 | retail* 358 3260 | retain* 131 137 3261 | rethink* 131 132 3262 | retina* 146 147 3263 | retire* 354 3264 | retiring 354 3265 | return* 253 250 3266 | reveal* 131 132 3267 | revelat* 131 132 3268 | revenge* 125 127 129 3269 | revenue* 358 3270 | review* 354 3271 | revigor* 125 126 3272 | reward* 125 126 355 3273 | rhetor* 354 3274 | rhythm* 253 250 3275 | rib 146 147 3276 | ribs 146 147 3277 | rich* 125 126 358 3278 | ridden 251 250 3279 | ride 251 250 3280 | rides 251 250 3281 | ridicul* 125 127 129 3282 | riding 251 250 3283 | right 250 252 3284 | rigid* 125 127 128 131 137 3285 | ring 140 142 3286 | ringing 140 142 3287 | rings 140 142 3288 | rise* 251 250 3289 | rising 251 250 3290 | risk* 125 127 128 3291 | ritalin 146 148 3292 | rite 359 3293 | rites 359 3294 | ritual* 359 3295 | road* 252 250 3296 | rock 356 3297 | rocks 356 3298 | rode 251 250 3299 | rofl 125 126 462 3300 | role* 121 3301 | roller* 356 3302 | romanc* 125 126 3303 | romantic* 125 126 3304 | room 252 250 357 3305 | roomate* 121 123 252 250 357 3306 | roomed 121 252 250 357 3307 | roomie* 121 123 252 250 357 3308 | rooming 121 252 250 357 3309 | roommate* 121 123 252 250 357 3310 | rooms 252 250 357 3311 | root* 131 133 3312 | rosaries 359 3313 | rosary 359 3314 | roshashan* 359 3315 | rotten 140 125 127 3316 | rough* 140 143 3317 | round* 140 141 143 3318 | rowing 356 3319 | rr* 464 3320 | rub 140 143 3321 | rubbed 140 143 3322 | rubbing 140 143 3323 | ruble* 358 3324 | rubs 140 143 3325 | rude* 125 127 129 3326 | rug 357 3327 | rugby* 356 3328 | rugs 357 3329 | ruin* 125 127 130 3330 | rum 356 3331 | rumor* 121 3332 | rumour* 121 3333 | run 251 250 3334 | runner* 251 250 356 3335 | running 251 250 356 3336 | runs 251 250 3337 | rupee* 358 3338 | rush* 251 250 3339 | rx 146 148 3340 | sabbath* 359 3341 | saccharine 140 3342 | sacred 359 3343 | sacrific* 359 3344 | sad 125 127 130 3345 | sadde* 125 127 130 3346 | sadly 125 127 130 3347 | sadness 125 127 130 3348 | safe* 125 126 131 137 3349 | said 11 13 121 140 142 3350 | saint* 359 3351 | salad* 146 150 3352 | salar* 354 358 3353 | sale 358 3354 | sales 358 3355 | saliv* 146 147 140 3356 | salsa 356 3357 | salt* 140 3358 | salvation 359 3359 | same 131 1 20 3360 | sampl* 140 3361 | sand 140 143 3362 | sands 140 143 3363 | sandwich* 146 150 3364 | sandy 140 143 3365 | sang 356 140 142 3366 | sarcas* 125 127 129 3367 | sat 11 13 3368 | satan* 359 3369 | satisf* 125 126 3370 | saturday* 253 250 3371 | savage* 125 127 129 3372 | save 125 126 131 137 3373 | saving* 358 3374 | savor* 140 3375 | savour* 140 3376 | saw 11 13 140 141 3377 | say* 121 140 142 3378 | scab* 146 148 3379 | scalp 146 147 3380 | scan 140 141 3381 | scann* 140 141 3382 | scans 140 141 3383 | scare* 125 127 128 3384 | scaring 125 127 128 3385 | scary 125 127 128 3386 | scent* 140 3387 | sceptic* 125 127 129 3388 | schedul* 253 250 3389 | schizophren* 146 148 3390 | scholar 354 3391 | scholaring 354 3392 | scholarly 354 3393 | scholars 354 3394 | scholarship* 354 358 3395 | scholastic* 354 3396 | school* 354 3397 | scien* 354 3398 | scrabble 356 3399 | scrapbook* 356 3400 | scrape* 146 148 3401 | scratch* 140 143 3402 | scream* 125 127 140 142 3403 | screen 140 141 3404 | screw* 125 127 129 146 149 22 3405 | scriptur* 359 3406 | scrumptious* 140 125 126 3407 | sculpt* 356 3408 | season* 253 250 3409 | second 1 21 3410 | seconds 253 250 3411 | secret 121 131 132 3412 | secretar* 354 3413 | secretive* 121 3414 | secrets 121 131 132 3415 | sect 359 3416 | sectarian 359 3417 | section 250 252 1 20 3418 | sector* 354 3419 | sects 359 3420 | secur* 125 126 131 137 3421 | seduc* 146 149 3422 | see 11 14 140 141 3423 | seeing 140 141 3424 | seem 11 14 131 132 135 3425 | seemed 11 13 131 132 135 3426 | seeming* 131 132 135 3427 | seems 11 14 131 132 135 3428 | seen 11 13 140 141 3429 | seer 140 141 3430 | sees 11 14 140 141 3431 | segment 250 252 20 3432 | seizure* 146 148 3433 | selection 20 3434 | self 121 124 3435 | selfish* 125 127 3436 | sell 358 3437 | seller* 358 3438 | selling 358 3439 | sells 358 3440 | semester* 354 3441 | seminar* 359 3442 | send* 121 250 251 3443 | senior* 253 250 354 3444 | sensation 146 147 3445 | sensations 146 147 3446 | sense 131 132 3447 | sensed 11 13 131 132 3448 | senses 131 132 3449 | sensing 131 132 3450 | sent 121 250 251 11 13 3451 | sentimental* 125 126 3452 | separat* 250 252 3453 | september* 253 250 3454 | sequen* 253 250 3455 | series 20 3456 | serious 125 127 3457 | seriously 1 16 125 127 3458 | seriousness 125 127 3459 | servic* 354 3460 | servings 146 150 3461 | session* 354 3462 | set 250 3463 | seven* 1 21 3464 | several 1 20 3465 | severe* 125 127 3466 | sex* 146 149 3467 | shake* 125 127 128 251 250 3468 | shaki* 125 127 128 131 135 3469 | shaky 125 127 128 131 135 3470 | shall 11 1 12 15 3471 | shame* 125 127 128 3472 | shan't 11 1 12 19 3473 | shant 11 1 12 19 3474 | shape* 252 250 3475 | shaping* 252 250 3476 | share 121 125 126 3477 | shared 11 13 121 125 126 3478 | shares 121 125 126 3479 | sharing 121 125 126 3480 | sharp* 140 143 3481 | she 1 2 3 7 121 3482 | she'd 1 2 3 7 11 12 121 3483 | she'll 1 2 3 7 11 12 15 121 3484 | she's 1 2 3 7 11 12 14 121 3485 | shes 1 2 3 7 11 12 14 121 3486 | shi'* 359 3487 | shiite* 359 3488 | shilling* 358 3489 | shine 140 141 3490 | shini* 140 141 3491 | shiny 140 141 3492 | shirt* 146 147 3493 | shit* 125 127 129 146 147 22 3494 | shock* 125 127 3495 | shoe* 146 147 3496 | shook 125 127 128 251 250 3497 | shop 356 358 3498 | shopaholic* 356 358 3499 | shopp* 356 358 3500 | shops 356 358 3501 | short* 252 250 3502 | should 11 1 12 15 131 134 3503 | should'nt 11 1 12 15 19 131 134 3504 | should've 11 1 12 13 15 131 134 3505 | shoulder* 146 147 3506 | shouldn't 11 1 12 15 19 131 134 3507 | shouldnt 11 1 12 15 19 131 134 3508 | shoulds 131 134 3509 | shouldve 11 1 12 13 15 131 134 3510 | shout* 140 142 3511 | show 356 3512 | showed 11 13 3513 | shower* 357 3514 | shows 356 3515 | shrine* 359 3516 | shut 250 252 3517 | shy* 125 127 128 3518 | sick 146 148 3519 | sickday* 354 146 148 3520 | sicken* 125 127 128 3521 | sicker 146 148 3522 | sickest 146 148 3523 | sickleave* 354 146 148 3524 | sickly 146 148 3525 | sickness* 146 148 3526 | side 252 250 3527 | sides 252 250 3528 | siding 252 250 3529 | sigh 463 125 3530 | sighed 125 3531 | sighing 125 3532 | sighs 125 3533 | sight* 140 141 3534 | sign 131 3535 | significant 20 3536 | sikh* 359 3537 | silen* 140 142 3538 | silk* 140 143 3539 | silli* 125 126 3540 | silly 125 126 3541 | simple* 20 3542 | simply 1 16 3543 | simultaneous* 253 250 3544 | sin 125 127 359 3545 | since 1 17 131 133 3546 | sincer* 125 126 3547 | sing 356 3548 | singing 356 3549 | singl* 1 20 3550 | sings 356 3551 | sinister 125 127 129 3552 | sinn* 359 3553 | sins 125 127 359 3554 | sinus* 146 148 3555 | sir 121 124 3556 | sis 121 122 3557 | sister* 121 122 3558 | sit 250 3559 | site 250 252 3560 | sitting 250 3561 | six* 1 21 3562 | skat* 356 3563 | skelet* 146 147 3564 | skeptic* 125 127 129 3565 | ski 356 3566 | skied 356 3567 | skier* 356 3568 | skiing 356 3569 | skill 355 3570 | skilled 355 3571 | skills 355 3572 | skin 140 143 146 147 3573 | skin'* 140 143 146 147 3574 | skinni* 146 147 150 3575 | skinny* 146 150 3576 | skis 356 3577 | skull 146 147 3578 | sky* 252 250 3579 | slaughter* 360 3580 | sleep* 146 147 3581 | slender* 146 147 3582 | slept 146 147 11 13 3583 | slid 251 250 3584 | slide 251 250 3585 | slides 251 250 3586 | sliding 251 250 3587 | slow* 253 250 3588 | slut* 125 127 146 149 3589 | small* 252 250 3590 | smart* 125 126 3591 | smell* 140 3592 | smil* 125 126 3593 | smok* 146 150 3594 | smooth* 140 143 3595 | smother* 125 127 129 3596 | smug* 125 127 3597 | snack* 146 150 3598 | sniff* 140 3599 | snob* 125 127 129 3600 | snort* 140 3601 | so 1 16 18 3602 | soaps 356 3603 | sob 125 127 130 22 3604 | sobbed 125 127 130 3605 | sobbing 125 127 130 3606 | sobs 125 127 130 3607 | soccer* 356 3608 | sociab* 125 126 3609 | social* 121 3610 | societ* 121 121 124 3611 | soda* 146 150 3612 | sofa* 357 3613 | soft* 140 143 3614 | sold 11 13 358 3615 | solemn* 125 127 130 3616 | solitaire 356 3617 | solution* 131 132 133 355 3618 | solve 131 132 133 355 3619 | solved 131 133 355 132 3620 | solves 131 132 133 355 3621 | solving 131 132 133 355 3622 | some 1 20 131 135 3623 | somebod* 1 2 9 121 131 135 3624 | somehow 1 16 131 135 3625 | someone* 1 2 9 121 131 135 3626 | something* 1 2 9 131 135 139 3627 | sometime 131 135 139 253 250 3628 | sometimes 131 135 253 250 3629 | somewhat 131 135 1 20 3630 | somewhere 250 252 1 2 9 131 135 3631 | son 121 122 3632 | son's 121 122 3633 | song* 356 140 142 3634 | sonofa* 22 3635 | sons 121 122 3636 | soon 1 16 253 250 3637 | soone* 253 250 3638 | sooo* 1 16 3639 | sophom* 354 3640 | sore* 146 148 3641 | sororit* 354 3642 | sorrow* 125 127 130 3643 | sorry 125 127 3644 | sort 131 135 3645 | sorta 131 135 3646 | sortof 131 135 3647 | sorts 131 135 3648 | sortsa 131 135 3649 | soul 359 3650 | soulmate* 121 123 125 126 3651 | souls 359 3652 | sound* 140 142 3653 | sour 140 3654 | source* 131 133 3655 | soure* 140 3656 | souri* 140 3657 | sours 140 3658 | soury 140 3659 | south* 252 250 3660 | spa 356 3661 | space 252 250 3662 | spaced 252 250 3663 | spaces 252 250 3664 | spaci* 252 250 3665 | span 252 250 3666 | spann* 252 250 3667 | spas 356 3668 | spat 146 147 3669 | speak 121 140 142 3670 | speaker* 140 142 3671 | speaking 121 140 142 3672 | speaks 121 140 142 3673 | special 125 126 3674 | sped 253 250 3675 | speech* 140 142 3676 | speed* 253 250 3677 | spend 358 3678 | spender 358 3679 | spending 358 3680 | spends 358 3681 | spent 11 13 358 3682 | spice 140 3683 | spiced 140 3684 | spices 140 3685 | spicy 140 3686 | spinal 146 147 3687 | spine 146 147 3688 | spirit* 359 3689 | spit 146 147 3690 | spite* 125 127 129 3691 | spits 146 147 3692 | spitting 146 147 3693 | splend* 125 126 3694 | spoke* 11 13 121 140 142 3695 | sport* 356 3696 | spose 131 135 3697 | spous* 121 122 3698 | spring 253 250 3699 | squar* 140 141 3700 | squeez* 140 143 3701 | staff* 354 3702 | stage 250 3703 | stair* 250 252 3704 | stammer* 125 127 3705 | stand 250 3706 | standard 131 3707 | standup 356 3708 | stank 140 125 127 3709 | stapl* 354 3710 | stare* 140 141 3711 | staring 140 141 3712 | start 11 14 253 250 3713 | started 11 13 253 250 3714 | starter* 253 250 3715 | starting 253 250 3716 | startl* 125 127 128 3717 | starts 11 14 253 250 3718 | startup* 253 250 3719 | starve* 146 150 3720 | starving 146 150 3721 | state 250 3722 | statement* 131 132 3723 | stay* 250 3724 | stayed 11 13 3725 | std* 146 148 149 3726 | steal* 125 127 3727 | stench* 140 125 127 3728 | step 251 250 3729 | stepchild* 121 122 3730 | stepfat* 121 122 3731 | stepkid* 121 122 3732 | stepmot* 121 122 3733 | stepp* 251 250 3734 | steps 251 250 3735 | stereo 356 3736 | stereos 356 3737 | stiff* 131 137 146 148 3738 | still 1 16 253 250 3739 | stimul* 131 133 3740 | stink* 140 125 127 3741 | stipend* 354 358 3742 | stock 354 3743 | stocked 354 3744 | stocker 354 3745 | stocks 354 358 3746 | stomach* 146 147 3747 | stoned 356 3748 | stood 11 13 3749 | stop 131 137 253 250 3750 | stopped 11 13 131 137 253 250 3751 | stopper* 131 137 251 250 253 250 3752 | stopping 131 137 253 250 3753 | stops 131 137 253 250 3754 | store 358 3755 | stories 121 131 3756 | story 121 131 3757 | stove* 357 3758 | straight 250 252 3759 | strain* 125 127 128 3760 | strange 125 127 3761 | strateg* 355 3762 | street* 252 250 3763 | strength* 125 126 355 3764 | strept* 146 148 3765 | stress* 125 127 128 3766 | stretch* 252 250 3767 | striv* 355 3768 | stroke* 146 148 140 143 3769 | stroki* 140 143 3770 | strong* 125 126 355 3771 | struggl* 125 127 128 3772 | stubborn* 125 127 129 131 137 3773 | stuck 11 13 3774 | stud 146 149 3775 | student* 354 3776 | studied 11 13 354 3777 | studies 354 3778 | studio 357 3779 | studios 357 3780 | studious 354 3781 | study* 354 3782 | stuff 1 2 9 3783 | stuffed 146 150 3784 | stunk 140 125 127 3785 | stunned 125 127 128 3786 | stuns 125 127 128 3787 | stupid* 125 127 129 3788 | stutter* 125 127 3789 | subdue* 131 137 3790 | submissive* 125 127 3791 | subsequen* 253 250 3792 | succeed* 125 126 354 355 3793 | success* 125 126 354 355 3794 | such 1 16 3795 | suck 11 14 125 127 129 22 3796 | sucked 11 13 125 127 129 22 3797 | sucker* 125 127 129 3798 | sucks 11 14 125 127 129 22 3799 | sucky 125 127 129 3800 | sudden* 253 250 3801 | suffer 125 127 130 3802 | suffered 11 13 125 127 130 3803 | sufferer* 125 127 130 3804 | suffering 125 127 130 3805 | suffers 125 127 130 3806 | sugar* 146 150 140 3807 | suggest* 121 3808 | suicid* 360 3809 | summer* 253 250 3810 | sumptuous* 140 3811 | sunbath* 356 3812 | sunburn* 146 148 3813 | sunday* 253 250 3814 | sunli* 140 141 3815 | sunni 359 3816 | sunnier 125 126 3817 | sunniest 125 126 3818 | sunnis 359 3819 | sunny 125 126 3820 | sunshin* 125 126 140 141 3821 | super 125 126 355 3822 | superb* 355 3823 | superior* 125 126 3824 | supervis* 354 3825 | supper* 146 150 3826 | support 11 14 125 126 3827 | supported 11 13 125 126 3828 | supporter* 125 126 3829 | supporting 125 126 3830 | supportive* 125 126 3831 | supports 11 14 125 126 3832 | suppose 11 14 131 135 3833 | supposed 11 13 131 135 3834 | supposes 11 14 131 135 3835 | supposing 131 135 3836 | supposition* 131 135 3837 | suppress* 131 137 3838 | suprem* 125 126 3839 | sure* 125 126 131 136 3840 | surfac* 252 250 3841 | surgeon* 146 148 3842 | surger* 146 148 3843 | surpris* 125 126 3844 | surround* 252 250 3845 | surviv* 355 3846 | suspect* 131 132 3847 | suspicio* 125 127 128 131 132 3848 | swallow* 146 150 3849 | swam 251 250 3850 | sweat* 146 147 3851 | sweep* 357 3852 | sweet 125 126 140 3853 | sweetheart* 121 123 125 126 3854 | sweetie* 121 123 125 126 3855 | sweetly 125 126 3856 | sweetness* 140 125 126 3857 | sweets 140 125 126 3858 | swelling 146 148 3859 | swim* 251 250 356 3860 | swollen 146 148 3861 | syllabus* 354 3862 | symphon* 356 3863 | symptom* 146 148 3864 | synch* 253 250 3865 | syndrome* 146 148 3866 | syphili* 146 148 149 3867 | system* 250 3868 | taboo* 131 137 3869 | take 11 14 3870 | taken 11 13 3871 | takes 11 14 3872 | taking 11 14 3873 | talent* 125 126 3874 | talk 121 3875 | talkative* 121 3876 | talked 11 13 121 3877 | talker* 121 3878 | talking 121 3879 | talks 121 3880 | tall 252 250 3881 | taller 252 250 3882 | tallest 252 250 3883 | tang 140 3884 | tangy 140 3885 | tanning 356 3886 | tantrum* 125 127 129 3887 | tart 140 3888 | tast* 146 150 140 3889 | taught 11 13 354 3890 | tax 354 358 3891 | taxa* 354 358 3892 | taxed 354 358 3893 | taxes 354 358 3894 | taxing 354 358 3895 | tea 146 150 3896 | teach* 354 3897 | team* 121 354 355 356 3898 | tears 125 127 130 3899 | teas* 121 125 127 129 3900 | techno 356 3901 | teeth* 146 147 3902 | tehe 125 126 3903 | telephon* 121 3904 | television* 356 3905 | tell 121 3906 | telling 121 3907 | tells 121 3908 | temper 125 127 129 3909 | tempers 125 127 129 3910 | temple* 359 3911 | tempora* 131 135 253 250 3912 | ten 1 21 3913 | tenant* 357 3914 | tend 11 14 3915 | tended 11 13 3916 | tender* 125 126 146 148 3917 | tendon 146 147 3918 | tendoni* 146 148 3919 | tendons 146 147 3920 | tends 11 14 3921 | tennis* 356 3922 | tense* 125 127 128 3923 | tensing 125 127 128 3924 | tension* 125 127 128 3925 | tentativ* 131 135 3926 | tenth 1 21 3927 | tenure* 354 3928 | tequila 356 3929 | term 250 253 20 3930 | terminat* 250 253 3931 | terribl* 125 127 3932 | terrific* 125 126 3933 | terrified 125 127 128 3934 | terrifies 125 127 128 3935 | terrify 125 127 128 129 3936 | terrifying 125 127 128 3937 | territor* 252 250 3938 | terror* 125 127 128 3939 | test 354 3940 | testament* 359 3941 | tested 354 3942 | testing 354 3943 | testosterone* 146 148 3944 | tests 354 3945 | textbook* 354 3946 | than 1 17 3947 | thank 11 14 125 126 3948 | thanked 11 13 125 126 3949 | thankf* 125 126 3950 | thanks 11 14 125 126 3951 | that 1 2 9 3952 | that'd 1 2 9 11 12 3953 | that'll 1 2 9 11 12 15 3954 | that's 1 2 9 11 12 14 3955 | thatd 1 2 9 11 12 3956 | thatll 1 2 9 11 12 15 3957 | thats 1 2 9 11 12 14 3958 | the 1 10 3959 | theat* 356 3960 | thee 1 2 3 6 121 3961 | their* 1 2 3 8 121 3962 | them 1 2 3 8 121 3963 | themselves 1 2 3 8 121 3964 | then 1 18 253 250 3965 | theolog* 359 3966 | theor* 131 135 3967 | therap* 146 148 3968 | there 1 16 3969 | there's 11 1 12 14 16 3970 | therefor* 131 133 3971 | theres 11 1 12 14 16 3972 | thermometer* 146 148 3973 | these 1 2 9 3974 | theses 354 3975 | thesis 354 3976 | they 1 2 3 8 121 3977 | they'd 1 2 3 8 11 12 121 3978 | they'll 1 2 3 8 11 12 15 121 3979 | they're 11 1 12 14 121 3980 | they've 1 2 3 8 11 12 14 121 3981 | theyd 1 2 3 8 11 12 121 3982 | theyll 1 2 3 8 11 12 15 121 3983 | theyre 11 1 12 14 121 3984 | theyve 1 2 3 8 11 12 14 121 3985 | thick* 252 250 140 143 3986 | thief 125 127 3987 | thieve* 125 127 3988 | thigh* 146 147 3989 | thin 252 250 140 143 3990 | thine 1 2 3 6 121 3991 | thing* 1 2 9 3992 | think 11 14 131 132 3993 | thinker* 131 132 3994 | thinking* 131 132 3995 | thinks 11 14 131 132 3996 | thinly 252 250 3997 | thinn* 252 250 140 143 3998 | third 1 21 3999 | thirst* 146 147 150 4000 | thirt* 1 21 4001 | this 1 2 9 4002 | tho 1 16 18 4003 | those 1 2 9 4004 | thou 1 2 3 6 121 4005 | though 1 16 18 4006 | thought 11 13 131 132 4007 | thoughtful* 125 126 4008 | thoughts 131 132 4009 | thousand* 1 21 4010 | thoust 1 2 3 6 121 4011 | threat* 125 127 129 4012 | three 1 21 4013 | threw 11 13 251 250 4014 | thrift* 358 4015 | thrill* 125 126 4016 | throat* 146 147 4017 | throb* 146 148 4018 | through* 1 17 4019 | throw* 251 250 4020 | thru 1 17 4021 | thunder* 140 142 4022 | thursday* 253 250 4023 | thus 131 133 4024 | thy 1 2 3 6 121 4025 | thyroid* 146 148 4026 | ticked 125 127 129 4027 | ticket* 356 4028 | tidi* 131 137 4029 | tidy 131 137 4030 | tight* 131 137 140 143 4031 | til 1 17 18 253 250 4032 | till 1 17 18 253 250 4033 | time* 253 250 4034 | timid* 125 127 128 4035 | timing 253 250 4036 | tingl* 146 148 4037 | tinier 252 250 4038 | tiniest 252 250 4039 | tiny 252 250 4040 | tire* 146 148 4041 | tiring 146 148 4042 | tit 146 147 149 22 4043 | tits 146 147 149 22 4044 | titties 146 147 149 22 4045 | titty 146 147 149 22 4046 | tivo* 356 4047 | to 1 17 4048 | toaster* 357 4049 | today* 253 250 4050 | toe 146 147 4051 | toefl 354 4052 | toenail* 146 147 4053 | toes 146 147 4054 | together 252 250 4055 | told 11 13 121 4056 | toleran* 125 126 4057 | tomb* 360 4058 | tomorrow* 253 250 4059 | ton 1 20 4060 | tongue* 146 147 140 4061 | tonight* 253 250 4062 | tons 1 20 4063 | tonsils 146 147 4064 | too 1 16 4065 | took 11 13 251 250 4066 | tooth* 146 147 4067 | top 252 250 355 4068 | torah 359 4069 | tortur* 125 127 129 4070 | total 131 136 1 20 4071 | totally 1 16 131 136 4072 | touch* 140 143 4073 | tough* 125 127 4074 | toward* 1 17 252 250 4075 | town 250 252 4076 | tox* 146 148 4077 | toy* 356 4078 | trade* 354 358 4079 | trading 354 358 4080 | traged* 125 127 130 4081 | tragic* 125 127 130 4082 | tranquil* 125 126 4083 | transact* 250 251 121 4084 | transcript* 354 4085 | transfer* 354 4086 | transport* 251 250 4087 | trauma* 125 127 4088 | travel* 251 250 356 4089 | treasur* 125 126 4090 | treat 125 126 4091 | trembl* 125 127 128 4092 | triang* 140 141 4093 | triathl* 356 4094 | tricep* 146 147 4095 | trick* 125 127 129 4096 | tried 11 13 355 4097 | tries 11 14 355 4098 | trigger* 131 133 4099 | trillion* 1 21 4100 | trip 251 250 4101 | tripl* 1 20 4102 | tripped 251 250 4103 | tripping 251 250 4104 | trips 251 250 4105 | trite 125 127 4106 | triumph* 125 126 355 4107 | trivi* 125 127 4108 | troubl* 125 127 4109 | true 131 136 125 126 4110 | trueness 125 126 4111 | truer 125 126 4112 | truest 125 126 131 136 4113 | truly 1 16 125 126 131 136 4114 | trust* 125 126 4115 | truth* 125 126 131 136 4116 | try 11 14 355 4117 | trying 355 4118 | tuesday* 253 250 4119 | tuition 358 4120 | tumo* 146 148 4121 | turmoil 125 127 128 4122 | turn 11 14 4123 | turned 11 13 4124 | turns 11 14 4125 | tutor* 354 4126 | tv* 356 4127 | twel* 1 21 4128 | twent* 1 21 4129 | twice 1 21 4130 | twitch* 146 148 4131 | two 1 21 4132 | tylenol 146 148 4133 | type* 354 4134 | typically 131 135 4135 | typing 354 4136 | ugh 125 127 4137 | ugl* 125 127 129 4138 | uh 463 4139 | uhhu* 462 4140 | uhuh 1 19 462 4141 | ulcer* 146 148 4142 | um 463 4143 | umm* 463 4144 | unable 355 4145 | unaccept* 131 132 4146 | unambigu* 131 136 4147 | unattractive 125 127 4148 | unaware* 131 132 4149 | unbeat* 355 4150 | uncertain* 125 127 128 131 135 4151 | uncle 121 122 4152 | uncle's 121 122 4153 | unclear* 131 135 4154 | uncles 121 122 4155 | uncomfortabl* 125 127 128 4156 | uncontrol* 125 127 128 4157 | undecided* 131 135 4158 | undeniab* 131 136 4159 | under 1 17 252 250 4160 | undergrad* 354 4161 | underneath 1 17 252 250 4162 | underpaid 354 358 4163 | undersid* 252 250 4164 | understand 11 14 131 132 4165 | understandab* 131 132 4166 | understanding* 131 132 4167 | understands 11 14 131 132 4168 | understood 11 13 131 132 4169 | undesire* 131 134 4170 | undetermin* 131 135 4171 | undo 131 134 4172 | undoubt* 131 136 4173 | uneas* 125 127 128 4174 | unemploy* 354 4175 | unfortunate* 125 127 4176 | unfriendly 125 127 4177 | ungrateful* 125 127 4178 | unhapp* 125 127 130 4179 | unhealth* 146 148 4180 | unimportant 125 127 130 4181 | unimpress* 125 127 4182 | unique 1 20 4183 | universe* 252 250 4184 | universit* 354 4185 | unkind 125 127 4186 | unknow* 131 135 4187 | unless 1 17 18 131 139 4188 | unlikel* 131 135 4189 | unlov* 125 127 4190 | unluck* 131 135 4191 | unneccess* 131 134 4192 | unneed* 131 134 4193 | unpleasant 125 127 4194 | unproduc* 354 355 4195 | unprotected 125 127 4196 | unquestion* 131 136 4197 | unrelat* 131 132 4198 | unresolv* 131 135 4199 | unsavo* 140 125 127 4200 | unsettl* 131 135 4201 | unsuccessful* 125 127 130 355 4202 | unsure* 125 127 128 131 135 4203 | until 1 17 18 253 250 4204 | unto 1 17 4205 | unwant* 131 134 4206 | unwelcom* 125 127 4207 | unwind* 356 4208 | up 1 17 252 250 4209 | updat* 250 253 4210 | upon 1 17 252 250 4211 | upper 252 250 4212 | upperclass* 354 4213 | uppermost 252 250 4214 | upset* 125 127 128 4215 | upstairs 252 250 4216 | uptight* 125 127 128 131 137 4217 | ur 121 4218 | urin* 146 147 4219 | urn* 360 4220 | us 1 2 3 5 121 4221 | use 11 14 131 133 4222 | used 11 13 131 133 4223 | useful* 125 126 4224 | useless* 125 127 130 4225 | uses 11 14 131 133 4226 | using 11 14 131 133 4227 | usual 253 250 4228 | usually 1 16 131 135 253 250 4229 | uter* 146 147 4230 | vacation* 356 4231 | vacuum* 357 4232 | vagina* 146 147 149 4233 | vague* 131 135 4234 | vain 125 127 4235 | valuabl* 125 126 4236 | value 125 126 358 4237 | valued 125 126 4238 | values 125 126 4239 | valuing 125 126 4240 | vanity 125 127 4241 | variab* 131 135 4242 | varies 131 135 4243 | various 1 20 4244 | varsit* 354 4245 | vary 131 135 4246 | vast* 252 250 4247 | vatican* 359 4248 | vcr* 356 4249 | vd 146 149 4250 | veget* 146 150 4251 | veggie* 146 150 4252 | veil* 359 4253 | vein* 146 147 4254 | verg* 252 250 4255 | version* 20 4256 | versus 131 139 4257 | vertical* 252 250 4258 | vertigo 146 148 4259 | very 1 16 4260 | veto 131 137 4261 | viagra 146 148 4262 | vicious* 125 127 129 4263 | vicodin 146 148 4264 | victim* 125 127 129 4265 | victor* 355 4266 | video* 356 4267 | view 140 141 4268 | viewed 11 13 4269 | viewer* 140 141 4270 | viewing* 140 141 4271 | views 140 141 4272 | vigor* 125 126 4273 | vigour* 125 126 4274 | vile 125 127 129 4275 | villain* 125 127 129 4276 | violat* 125 127 129 4277 | violent* 125 127 129 4278 | virgin* 146 149 4279 | virtue* 125 126 4280 | virtuo* 125 126 4281 | visa* 358 4282 | visit* 121 251 250 4283 | vita 354 4284 | vital* 125 126 4285 | vitamin* 146 148 4286 | vitas 354 4287 | vivid* 140 141 4288 | vocation* 354 4289 | vodka* 356 4290 | voic* 140 142 4291 | volleyb* 356 4292 | vomit* 146 148 4293 | vp* 354 4294 | vs 131 139 4295 | vulnerab* 125 127 128 4296 | vulture* 125 127 4297 | waft* 140 4298 | wage 354 358 4299 | wager* 358 4300 | wages 354 358 4301 | waist* 146 147 150 4302 | wait 11 14 131 137 4303 | waited 11 13 131 137 4304 | waiting 131 137 4305 | waits 11 14 131 137 4306 | wake 146 147 4307 | walk 251 250 4308 | walked 11 13 251 250 4309 | walking 251 250 4310 | walks 251 250 4311 | wall 252 250 4312 | walling 252 250 4313 | walls 252 250 4314 | wanker* 22 4315 | wanna 1 17 131 134 4316 | want 11 14 131 134 4317 | wanted 11 13 131 134 4318 | wanting 131 134 4319 | wants 11 14 131 134 4320 | war 125 127 129 360 4321 | warehous* 354 4322 | warfare* 125 127 129 4323 | wariness 131 137 4324 | warm* 125 126 140 143 4325 | warred 125 127 129 4326 | warring 125 127 129 4327 | wars 125 127 129 4328 | wart 146 148 4329 | warts 146 148 4330 | wary 131 137 4331 | was 11 1 12 13 4332 | wash 146 148 4333 | wasn't 11 1 12 13 19 4334 | wasnt 11 1 12 13 19 4335 | watch* 140 141 4336 | water 146 150 4337 | way 250 4338 | we 1 2 3 5 121 131 138 4339 | we'd 1 2 3 5 11 12 121 4340 | we'll 1 2 3 5 11 12 15 121 4341 | we're 1 2 3 5 11 14 121 4342 | we've 1 2 3 5 11 12 13 121 4343 | weak* 125 127 146 148 4344 | wealth* 125 126 358 4345 | weapon* 125 127 129 4346 | wear 146 147 4347 | weary 146 148 4348 | wed 121 4349 | wedding* 121 4350 | wednesday* 253 250 4351 | weds 121 4352 | week 253 250 4353 | week'* 253 250 4354 | weekend* 253 250 356 4355 | weekl* 253 250 4356 | weeks 253 250 4357 | weep* 125 127 130 4358 | weigh 146 150 4359 | weighed 146 150 4360 | weighing* 146 150 4361 | weighs 146 150 4362 | weight 146 150 140 143 4363 | weighted 140 143 4364 | weighting 140 143 4365 | weightless* 140 143 4366 | weightlift* 356 140 143 4367 | weights 356 140 143 4368 | weird* 125 127 4369 | welcom* 121 125 126 4370 | welfare 354 4371 | well 1 16 463 125 126 4372 | went 11 13 251 250 4373 | wept 125 127 130 4374 | were 11 1 12 13 4375 | weren't 11 1 12 13 19 4376 | west* 252 250 4377 | wet 140 143 4378 | wetly 140 143 4379 | weve 1 2 3 5 11 12 13 121 4380 | what 1 2 9 4381 | what's 1 2 9 11 12 14 4382 | whatever 1 2 9 4383 | whats 1 2 9 11 12 14 4384 | wheez* 146 148 4385 | when 1 18 253 250 16 4386 | whenever 1 16 18 253 250 4387 | where 1 16 252 250 4388 | where's 11 1 12 14 252 250 4389 | whereas 1 18 4390 | wheres 11 1 12 14 252 250 4391 | wherever 252 250 4392 | whether 1 18 131 139 4393 | which 1 2 9 4394 | whichever 1 2 9 4395 | whiff* 140 4396 | while 1 18 253 250 4397 | whilst 253 250 4398 | whine* 125 127 130 4399 | whining 125 127 130 4400 | whiskey* 146 150 356 4401 | whisky* 146 150 356 4402 | whisper* 140 142 4403 | white* 140 141 4404 | whitish* 140 141 4405 | who 1 2 9 121 4406 | who'd 1 2 9 11 12 121 4407 | who'll 1 2 9 11 12 15 121 4408 | who's 11 14 121 4409 | whod 1 2 9 11 12 121 4410 | whole 1 20 4411 | wholl 1 2 9 11 12 15 121 4412 | wholly 131 136 4413 | whom 1 2 9 121 4414 | whore* 125 127 146 149 4415 | whos 11 14 121 4416 | whose 1 2 9 121 4417 | why 131 133 4418 | wicked* 125 127 129 4419 | wide* 252 250 4420 | width* 252 250 4421 | wife* 121 122 4422 | will 11 1 12 15 4423 | willing 125 121 4424 | wimp* 125 127 4425 | win 125 126 355 4426 | window* 357 4427 | wine 146 150 356 4428 | wines 146 150 356 4429 | winn* 125 126 355 4430 | wins 125 126 355 4431 | winter* 253 250 4432 | wisdom 125 126 4433 | wise* 125 126 4434 | wish 11 14 131 134 4435 | wished 11 13 131 134 4436 | wishes 11 14 131 134 4437 | wishing 131 134 4438 | witch 125 127 4439 | with 1 17 131 138 4440 | withdrawal 146 148 4441 | withheld 131 137 4442 | withhold* 131 137 4443 | within 1 17 252 250 4444 | without 1 17 19 131 139 4445 | wive* 121 122 4446 | woe* 125 127 130 4447 | woke 11 13 4448 | woken 11 13 4449 | woman 121 124 4450 | woman's 121 124 4451 | womanhood 121 4452 | womanly 121 4453 | womb* 146 147 149 4454 | women* 121 124 4455 | won 11 13 125 126 355 4456 | won't 11 1 12 15 19 4457 | wonder 11 14 131 132 135 4458 | wondered 11 13 131 132 135 4459 | wonderf* 125 126 4460 | wondering 131 132 135 4461 | wonders 131 132 135 4462 | wont 11 1 12 15 19 4463 | word* 131 121 4464 | wore 11 13 4465 | work 354 355 4466 | workabl* 354 355 4467 | worked 11 13 354 355 4468 | worker* 354 355 4469 | working* 354 355 4470 | workout* 356 4471 | works 354 355 4472 | world 250 252 4473 | worn 11 13 4474 | worr* 125 127 128 4475 | worse* 125 127 4476 | worship* 125 126 359 4477 | worst 125 127 1 20 4478 | worth 358 4479 | worthless* 125 127 130 4480 | worthwhile 125 126 4481 | would 11 1 12 15 131 134 4482 | would've 11 1 12 13 15 131 134 4483 | wouldn't 11 1 12 15 19 131 134 4484 | wouldnt 11 1 12 15 19 131 134 4485 | wouldve 11 1 12 13 15 131 134 4486 | wound* 146 148 4487 | wow* 125 126 4488 | wrist* 146 147 4489 | write 121 131 4490 | writing 121 131 4491 | written 11 13 4492 | wrong* 125 127 4493 | wrote 11 13 4494 | wrote 121 131 11 13 4495 | xanax 146 148 4496 | xbox* 356 4497 | xerox* 354 4498 | xray* 146 148 4499 | y'all 1 2 3 6 121 4500 | ya 1 2 3 6 121 4501 | yah 462 4502 | yakn* 464 4503 | yall 1 2 3 6 121 4504 | yard 356 357 4505 | yawn* 146 148 4506 | yay 125 126 462 4507 | yays 125 126 4508 | ye 1 2 3 6 121 4509 | yea 462 4510 | yeah 462 4511 | year 253 250 4512 | yearly 253 250 4513 | yearn* 125 127 130 131 134 4514 | years 253 250 4515 | yell 140 142 4516 | yelled 140 142 4517 | yelling 140 142 4518 | yellow* 140 141 4519 | yells 140 142 4520 | yen 358 4521 | yep* 462 4522 | yes 462 4523 | yesterday* 253 250 4524 | yet 1 16 253 250 4525 | yiddish 359 4526 | yield* 131 137 4527 | ykn* 464 4528 | yoga 356 4529 | you 1 2 3 6 121 4530 | you'd 1 2 3 6 11 12 121 4531 | you'll 1 2 3 6 11 12 15 121 4532 | you're 1 2 3 6 11 12 14 121 4533 | you've 1 2 3 6 11 12 14 121 4534 | youd 1 2 3 6 11 12 121 4535 | youknow* 464 4536 | youll 1 2 3 6 11 12 15 121 4537 | young* 253 250 4538 | your 1 2 3 6 121 4539 | youre 1 2 3 6 11 12 14 121 4540 | yours 1 2 3 6 121 4541 | youth* 253 250 4542 | youve 1 2 3 6 11 12 14 121 4543 | yuan 358 4544 | yum* 140 4545 | yup 462 4546 | zen 359 4547 | zero 1 21 4548 | zillion* 1 21 4549 | zion* 359 4550 | zit 146 148 4551 | zits 146 148 4552 | zoloft 146 148 4553 | zz* 463 --------------------------------------------------------------------------------