├── .gitignore ├── LICENSE ├── README.md ├── bayes ├── __init__.py ├── data_routines.py ├── explanations.py ├── models.py └── regression.py ├── citation.bib ├── data ├── __init__.py ├── compas-scores-two-years.csv ├── diego │ └── diego.png ├── german_processed.csv ├── mnist │ ├── __init__.py │ ├── mnist_cnn.pt │ └── mnist_model.py └── posteriors_fig_1.png ├── experiments ├── calibration.py ├── plotting │ ├── PTG Plots.ipynb │ └── Stability Plots.ipynb ├── ptg.py └── stability.py ├── requirements.txt └── visualization ├── diego.gif ├── image_posterior.py └── image_posterior_example.py /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | .idea/ 3 | 4 | # Created by https://www.gitignore.io/api/macos,linux,django,python,pycharm 5 | 6 | ### Django ### 7 | *.log 8 | *.pot 9 | *.pyc 10 | __pycache__/ 11 | local_settings.py 12 | db.sqlite3 13 | media 14 | 15 | ### Linux ### 16 | *~ 17 | 18 | # temporary files which can be created if a process still has a handle open of a deleted file 19 | .fuse_hidden* 20 | 21 | # KDE directory preferences 22 | .directory 23 | 24 | # Linux trash folder which might appear on any partition or disk 25 | .Trash-* 26 | 27 | # .nfs files are created when an open file is removed but is still being accessed 28 | .nfs* 29 | 30 | ### macOS ### 31 | *.DS_Store 32 | .AppleDouble 33 | .LSOverride 34 | 35 | # Icon must end with two \r 36 | Icon 37 | 38 | # Thumbnails 39 | ._* 40 | 41 | # Files that might appear in the root of a volume 42 | .DocumentRevisions-V100 43 | .fseventsd 44 | .Spotlight-V100 45 | .TemporaryItems 46 | .Trashes 47 | .VolumeIcon.icns 48 | .com.apple.timemachine.donotpresent 49 | 50 | # Directories potentially created on remote AFP share 51 | .AppleDB 52 | .AppleDesktop 53 | Network Trash Folder 54 | Temporary Items 55 | .apdisk 56 | 57 | ### PyCharm ### 58 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm 59 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 60 | 61 | # User-specific stuff: 62 | .idea/**/workspace.xml 63 | .idea/**/tasks.xml 64 | .idea/dictionaries 65 | 66 | # Sensitive or high-churn files: 67 | .idea/**/dataSources/ 68 | .idea/**/dataSources.ids 69 | .idea/**/dataSources.xml 70 | .idea/**/dataSources.local.xml 71 | .idea/**/sqlDataSources.xml 72 | .idea/**/dynamic.xml 73 | .idea/**/uiDesigner.xml 74 | 75 | # Gradle: 76 | .idea/**/gradle.xml 77 | .idea/**/libraries 78 | 79 | # CMake 80 | cmake-build-debug/ 81 | 82 | # Mongo Explorer plugin: 83 | .idea/**/mongoSettings.xml 84 | 85 | ## File-based project format: 86 | *.iws 87 | 88 | ## Plugin-specific files: 89 | 90 | # IntelliJ 91 | /out/ 92 | 93 | # mpeltonen/sbt-idea plugin 94 | .idea_modules/ 95 | 96 | # JIRA plugin 97 | atlassian-ide-plugin.xml 98 | 99 | # Cursive Clojure plugin 100 | .idea/replstate.xml 101 | 102 | # Crashlytics plugin (for Android Studio and IntelliJ) 103 | com_crashlytics_export_strings.xml 104 | crashlytics.properties 105 | crashlytics-build.properties 106 | fabric.properties 107 | 108 | ### PyCharm Patch ### 109 | # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 110 | 111 | # *.iml 112 | # modules.xml 113 | # .idea/misc.xml 114 | # *.ipr 115 | 116 | # Sonarlint plugin 117 | .idea/sonarlint 118 | 119 | ### Python ### 120 | # Byte-compiled / optimized / DLL files 121 | *.py[cod] 122 | *$py.class 123 | 124 | # C extensions 125 | *.so 126 | 127 | # Distribution / packaging 128 | .Python 129 | env/ 130 | build/ 131 | develop-eggs/ 132 | dist/ 133 | downloads/ 134 | eggs/ 135 | .eggs/ 136 | lib/ 137 | lib64/ 138 | parts/ 139 | sdist/ 140 | var/ 141 | wheels/ 142 | *.egg-info/ 143 | .installed.cfg 144 | *.egg 145 | 146 | # PyInstaller 147 | # Usually these files are written by a python script from a template 148 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 149 | *.manifest 150 | *.spec 151 | 152 | # Installer logs 153 | pip-log.txt 154 | pip-delete-this-directory.txt 155 | 156 | # Unit test / coverage reports 157 | htmlcov/ 158 | .tox/ 159 | .coverage 160 | .coverage.* 161 | .cache 162 | nosetests.xml 163 | coverage.xml 164 | *,cover 165 | .hypothesis/ 166 | 167 | # Translations 168 | *.mo 169 | 170 | # Django stuff: 171 | 172 | # Flask stuff: 173 | instance/ 174 | .webassets-cache 175 | 176 | # Scrapy stuff: 177 | .scrapy 178 | 179 | # Sphinx documentation 180 | docs/_build/ 181 | 182 | # PyBuilder 183 | target/ 184 | 185 | # Jupyter Notebook 186 | .ipynb_checkpoints 187 | 188 | # pyenv 189 | .python-version 190 | 191 | # celery beat schedule file 192 | celerybeat-schedule 193 | 194 | # SageMath parsed files 195 | *.sage.py 196 | 197 | # dotenv 198 | .env 199 | 200 | # virtualenv 201 | .venv 202 | venv/ 203 | ENV/ 204 | 205 | # Spyder project settings 206 | .spyderproject 207 | .spyproject 208 | 209 | # Rope project settings 210 | .ropeproject 211 | 212 | # mkdocs documentation 213 | /site 214 | 215 | # End of https://www.gitignore.io/api/macos,linux,django,python,pycharm 216 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Dylan Slack 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Reliable Post hoc Explanations: Modeling Uncertainty in Explainability 2 | 3 | Welcome to the code for our paper, Reliable Post hoc Explanations: Modeling Uncertainty in Explainability, published at NeurIPS 2021. We encourage you to read the [full paper](https://arxiv.org/abs/2008.05030). 4 | 5 | Visualizing the posteriors of BayesLIME explanations on an image of a dog and COMPAS: 6 | 7 |

8 | 9 | 10 |

11 | 12 | ## Citation 13 | 14 | If you found this work useful, please cite us: 15 | 16 | ``` 17 | @inproceedings{reliableposthoc:neurips21, 18 | author = {Dylan Slack and Sophie Hilgard and Sameer Singh and Himabindu Lakkaraju}, 19 | title = { {Reliable Post hoc Explanations Modeling Uncertainty in Explainability} }, 20 | booktitle = {Neural Information Processing Systems (NeurIPS)}, 21 | year = {2021} 22 | } 23 | ``` 24 | 25 | ## Examples 26 | 27 | An example usage of the explainer is provided in `./visualization/image_posterior_example.py`, where we visualize the posterior of a BayesLIME explanation on an image of the first author's dog. 28 | 29 | ## Experiments 30 | 31 | ### Data 32 | 33 | #### Tabular Data 34 | 35 | The German Credit + COMPAS datasets are included in the `./data` folder. Within experiments, the german credit data set is called as `--dataset german` and compas is called as `--dataset compas`. 36 | 37 | #### MNIST 38 | 39 | The MNIST data is set to download automatically on the first run. 40 | 41 | In places where the MNIST data is accepted, by specifying the `--dataset` flag, it is possible to select the digit on which to run the experiment by specifying, for example, `--dataset mnist_1` for the 1 digit or `--dataset mnist_3` for the 3 digit, and so on. 42 | 43 | #### ImageNet 44 | 45 | To download the ImageNet data, use [this script](https://github.com/mf1024/ImageNet-Datasets-Downloader), selecting the appropriate class indices (e.g., n02108915 is the French Bulldog class used in the paper). For example, to download the French Bulldog data, run: 46 | 47 | ```python 48 | python ./downloader.py 49 | -data_root ./data/imagenet/frenchbulldog \ 50 | -use_class_list True \ 51 | -class_list n02108915 \ 52 | -images_per_class 100 53 | ``` 54 | 55 | Once the imagenet dataset is installed, it can be called with `--dataset imagenet_classname` where `classname` is the name of the folder where the data is stored (for instance `frenchbulldog` running the script above). 56 | 57 | ### Models 58 | 59 | The tabular models are trained when they are called in experiments. The pre-trained MNIST model is provided in the `./data/mnist` subfolder. The VGG16 IMAGENET model will be downloaded when it is called. 60 | 61 | ### Experiments 62 | 63 | Code to run experiments from the paper is included in the `./experiments` directory within the project. 64 | 65 | ### Hardware Requirements 66 | 67 | For image experiments, GPU/TPU acceleration is recommended. I ran most of the experiments for this paper with a single NVIDIA 2080TI and a few with a NVIDIA Titan RTX. 68 | 69 | For the tabular experiments, it's possible to run them on CPU. I tested this using a 1.4 GHz Intel Core i5 from a 2019 MacBook Pro, and it seemed to work fine. In places in the experiments where multithreading is used (`--n_threads`) in the experiments, be careful to use a value less than the avaliable cores on your CPU. I noticed that if I set `--n_threads` value too high on the MacBook, it caused it to freeze. 70 | 71 | ### Questions 72 | 73 | You can reach out to [dslack@uci.edu](mailto:dslack@uci.edu) with any questions. 74 | 75 | -------------------------------------------------------------------------------- /bayes/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dylan-slack/Modeling-Uncertainty-Local-Explainability/72c4330bc7d29150b55018b0946f85f7df88e107/bayes/__init__.py -------------------------------------------------------------------------------- /bayes/data_routines.py: -------------------------------------------------------------------------------- 1 | """Routines for processing data.""" 2 | import numpy as np 3 | import os 4 | import pandas as pd 5 | from PIL import Image 6 | from skimage.segmentation import slic, mark_boundaries 7 | 8 | import torch 9 | from torchvision import datasets, transforms 10 | 11 | # The number of segments to use for the images 12 | NSEGMENTS = 20 13 | PARAMS = { 14 | 'protected_class': 1, 15 | 'unprotected_class': 0, 16 | 'positive_outcome': 1, 17 | 'negative_outcome': 0 18 | } 19 | IMAGENET_LABELS = { 20 | 'french_bulldog': 245, 21 | 'scuba_diver': 983, 22 | 'corn': 987, 23 | 'broccoli': 927 24 | } 25 | 26 | def get_and_preprocess_compas_data(): 27 | """Handle processing of COMPAS according to: https://github.com/propublica/compas-analysis 28 | 29 | Parameters 30 | ---------- 31 | params : Params 32 | Returns 33 | ---------- 34 | Pandas data frame X of processed data, np.ndarray y, and list of column names 35 | """ 36 | PROTECTED_CLASS = PARAMS['protected_class'] 37 | UNPROTECTED_CLASS = PARAMS['unprotected_class'] 38 | POSITIVE_OUTCOME = PARAMS['positive_outcome'] 39 | NEGATIVE_OUTCOME = PARAMS['negative_outcome'] 40 | 41 | compas_df = pd.read_csv("../data/compas-scores-two-years.csv", index_col=0) 42 | compas_df = compas_df.loc[(compas_df['days_b_screening_arrest'] <= 30) & 43 | (compas_df['days_b_screening_arrest'] >= -30) & 44 | (compas_df['is_recid'] != -1) & 45 | (compas_df['c_charge_degree'] != "O") & 46 | (compas_df['score_text'] != "NA")] 47 | 48 | compas_df['length_of_stay'] = (pd.to_datetime(compas_df['c_jail_out']) - pd.to_datetime(compas_df['c_jail_in'])).dt.days 49 | X = compas_df[['age', 'two_year_recid','c_charge_degree', 'race', 'sex', 'priors_count', 'length_of_stay']] 50 | 51 | # if person has high score give them the _negative_ model outcome 52 | y = np.array([NEGATIVE_OUTCOME if score == 'High' else POSITIVE_OUTCOME for score in compas_df['score_text']]) 53 | sens = X.pop('race') 54 | 55 | # assign African-American as the protected class 56 | X = pd.get_dummies(X) 57 | sensitive_attr = np.array(pd.get_dummies(sens).pop('African-American')) 58 | X['race'] = sensitive_attr 59 | 60 | # make sure everything is lining up 61 | assert all((sens == 'African-American') == (X['race'] == PROTECTED_CLASS)) 62 | cols = [col for col in X] 63 | 64 | categorical_features = [1, 4, 5, 6, 7, 8] 65 | 66 | output = { 67 | "X": X.values, 68 | "y": y, 69 | "column_names": cols, 70 | "cat_indices": categorical_features 71 | } 72 | 73 | return output 74 | 75 | def get_and_preprocess_german(): 76 | """"Handle processing of German. We use a preprocessed version of German from Ustun et. al. 77 | https://arxiv.org/abs/1809.06514. Thanks Berk! 78 | Parameters: 79 | ---------- 80 | params : Params 81 | Returns: 82 | ---------- 83 | Pandas data frame X of processed data, np.ndarray y, and list of column names 84 | """ 85 | PROTECTED_CLASS = PARAMS['protected_class'] 86 | UNPROTECTED_CLASS = PARAMS['unprotected_class'] 87 | POSITIVE_OUTCOME = PARAMS['positive_outcome'] 88 | NEGATIVE_OUTCOME = PARAMS['negative_outcome'] 89 | 90 | X = pd.read_csv("../data/german_processed.csv") 91 | y = X["GoodCustomer"] 92 | 93 | X = X.drop(["GoodCustomer", "PurposeOfLoan"], axis=1) 94 | X['Gender'] = [1 if v == "Male" else 0 for v in X['Gender'].values] 95 | 96 | y = np.array([POSITIVE_OUTCOME if p == 1 else NEGATIVE_OUTCOME for p in y.values]) 97 | categorical_features = [0, 1, 2] + list(range(9, X.shape[1])) 98 | 99 | output = { 100 | "X": X.values, 101 | "y": y, 102 | "column_names": [c for c in X], 103 | "cat_indices": categorical_features, 104 | } 105 | 106 | return output 107 | 108 | def get_PIL_transf(): 109 | """Gets the PIL image transformation.""" 110 | transf = transforms.Compose([ 111 | transforms.Resize((256, 256)), 112 | transforms.CenterCrop(224) 113 | ]) 114 | return transf 115 | 116 | def load_image(path): 117 | """Loads an image by path.""" 118 | with open(os.path.abspath(path), 'rb') as f: 119 | with Image.open(f) as img: 120 | return img.convert('RGB') 121 | 122 | def get_imagenet(name, get_label=True): 123 | """Gets the imagenet data. 124 | 125 | Arguments: 126 | name: The name of the imagenet dataset 127 | """ 128 | images_paths = [] 129 | 130 | # Store all the paths of the images 131 | data_dir = os.path.join("../data", name) 132 | for (dirpath, dirnames, filenames) in os.walk(data_dir): 133 | for fn in filenames: 134 | if fn != ".DS_Store": 135 | images_paths.append(os.path.join(dirpath, fn)) 136 | 137 | # Load & do transforms for the images 138 | pill_transf = get_PIL_transf() 139 | images, segs = [], [] 140 | for img_path in images_paths: 141 | img = load_image(img_path) 142 | PIL_transformed_image = np.array(pill_transf(img)) 143 | segments = slic(PIL_transformed_image, n_segments=NSEGMENTS, compactness=100, sigma=1) 144 | 145 | images.append(PIL_transformed_image) 146 | segs.append(segments) 147 | 148 | images = np.array(images) 149 | 150 | if get_label: 151 | assert name in IMAGENET_LABELS, "Get label set to True but name not in known imagenet labels" 152 | y = np.ones(images.shape[0]) * IMAGENET_LABELS[name] 153 | else: 154 | y = np.ones(images.shape[0]) * -1 155 | 156 | segs = np.array(segs) 157 | 158 | output = { 159 | "X": images, 160 | "y": y, 161 | "segments": segs 162 | } 163 | 164 | return output 165 | 166 | 167 | def get_mnist(num): 168 | """Gets the MNIST data for a certain digit. 169 | 170 | Arguments: 171 | num: The mnist digit to get 172 | """ 173 | 174 | # Get the mnist data 175 | test_loader = torch.utils.data.DataLoader(datasets.MNIST('../data/mnist', 176 | train=False, 177 | download=True, 178 | transform=transforms.Compose([transforms.ToTensor(), 179 | transforms.Normalize((0.1307,), (0.3081,)) 180 | ])), 181 | batch_size=1, 182 | shuffle=False) 183 | 184 | all_test_mnist_of_label_num, all_test_segments_of_label_num = [], [] 185 | 186 | # Get all instances of label num 187 | for data, y in test_loader: 188 | if y[0] == num: 189 | # Apply segmentation 190 | sample = np.squeeze(data.numpy().astype('double'),axis=0) 191 | segments = slic(sample.reshape(28,28,1), n_segments=NSEGMENTS, compactness=1, sigma=0.1).reshape(1,28,28) 192 | all_test_mnist_of_label_num.append(sample) 193 | all_test_segments_of_label_num.append(segments) 194 | 195 | all_test_mnist_of_label_num = np.array(all_test_mnist_of_label_num) 196 | all_test_segments_of_label_num = np.array(all_test_segments_of_label_num) 197 | 198 | output = { 199 | "X": all_test_mnist_of_label_num, 200 | "y": np.ones(all_test_mnist_of_label_num.shape[0]) * num, 201 | "segments": all_test_segments_of_label_num 202 | } 203 | 204 | return output 205 | 206 | def get_dataset_by_name(name, get_label=True): 207 | if name == "compas": 208 | d = get_and_preprocess_compas_data() 209 | elif name == "german": 210 | d = get_and_preprocess_german() 211 | elif "mnist" in name: 212 | d = get_mnist(int(name[-1])) 213 | elif "imagenet" in name: 214 | d = get_imagenet(name[9:], get_label=get_label) 215 | else: 216 | raise NameError("Unkown dataset %s", name) 217 | d['name'] = name 218 | return d 219 | -------------------------------------------------------------------------------- /bayes/explanations.py: -------------------------------------------------------------------------------- 1 | """Bayesian Local Explanations. 2 | 3 | This code implements bayesian local explanations. The code supports the LIME & SHAP 4 | kernels. Along with the LIME & SHAP feature importances, bayesian local explanations 5 | also support uncertainty expression over the feature importances. 6 | """ 7 | import logging 8 | 9 | from copy import deepcopy 10 | from functools import reduce 11 | from multiprocessing import Pool 12 | import numpy as np 13 | import operator as op 14 | from tqdm import tqdm 15 | 16 | import sklearn 17 | import sklearn.preprocessing 18 | from sklearn.linear_model import Ridge, Lasso 19 | from lime import lime_image, lime_tabular 20 | 21 | from bayes.regression import BayesianLinearRegression 22 | 23 | LDATA, LINVERSE, LSCALED, LDISTANCES, LY = list(range(5)) 24 | SDATA, SINVERSE, SY = list(range(3)) 25 | 26 | class BayesLocalExplanations: 27 | """Bayesian Local Explanations. 28 | 29 | This class implements the bayesian local explanations. 30 | """ 31 | def __init__(self, 32 | training_data, 33 | data="image", 34 | kernel="lime", 35 | credible_interval=95, 36 | mode="classification", 37 | categorical_features=[], 38 | discretize_continuous=True, 39 | save_logs=False, 40 | log_file_name="bayes.log", 41 | width=0.75, 42 | verbose=False): 43 | """Initialize the local explanations. 44 | 45 | Arguments: 46 | training_data: The 47 | data: The type of data, either "image" or "tabular" 48 | kernel: The kernel to use, either "lime" or "shap" 49 | credible_interval: The % credible interval to use for the feature importance 50 | uncertainty. 51 | mode: Whether to run with classification or regression. 52 | categorical_features: The indices of the categorical features, if in regression mode. 53 | save_logs: Whether to save logs from the run. 54 | log_file_name: The name of log file. 55 | """ 56 | 57 | assert kernel in ["lime", "shap"], f"Kernel must be one of lime or shap, not {kernel}" 58 | assert data in ["image", "tabular"], f"Data must be one of image or tabular, not {data}" 59 | assert mode in ["classification"], "Others modes like regression are not implemented" 60 | 61 | if save_logs: 62 | logging.basicConfig(filename=log_file_name, 63 | filemode='a', 64 | level=logging.INFO) 65 | 66 | logging.info("==============================================") 67 | logging.info("Initializing Bayes%s %s explanations", kernel, data) 68 | logging.info("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") 69 | 70 | self.cred_int = credible_interval 71 | self.data = data 72 | self.kernel = kernel 73 | self.mode = mode 74 | self.categorical_features = categorical_features 75 | self.discretize_continuous = discretize_continuous 76 | self.verbose = verbose 77 | self.width = width * np.sqrt(training_data.shape[1]) 78 | 79 | logging.info("Setting mode to %s", mode) 80 | logging.info("Credible interval set to %s", self.cred_int) 81 | 82 | if kernel == "shap" and data == "tabular": 83 | logging.info("Setting discretize_continuous to True, due to shapley sampling") 84 | discretize_continuous = True 85 | 86 | self.training_data = training_data 87 | self._run_init(training_data) 88 | 89 | def _run_init(self, training_data): 90 | if self.kernel == "lime": 91 | lime_tab_exp = lime_tabular.LimeTabularExplainer(training_data, 92 | mode=self.mode, 93 | categorical_features=self.categorical_features, 94 | discretize_continuous=self.discretize_continuous) 95 | self.lime_info = lime_tab_exp 96 | elif self.kernel == "shap": 97 | # Discretization forcibly set to true for shap sampling on initialization 98 | shap_tab_exp = lime_tabular.LimeTabularExplainer(training_data, 99 | mode=self.mode, 100 | categorical_features=self.categorical_features, 101 | discretize_continuous=self.discretize_continuous) 102 | self.shap_info = shap_tab_exp 103 | else: 104 | raise NotImplementedError 105 | 106 | def _log_args(self, args): 107 | """Logs arguments to function.""" 108 | logging.info(args) 109 | 110 | def _shap_tabular_perturb_n_samples(self, 111 | data, 112 | n_samples, 113 | max_coefs=None): 114 | """Generates n shap perturbations""" 115 | if max_coefs is None: 116 | max_coefs = np.arange(data.shape[0]) 117 | pre_rdata, pre_inverse = self.shap_info._LimeTabularExplainer__data_inverse(data_row=data, 118 | num_samples=n_samples) 119 | rdata = pre_rdata[:, max_coefs] 120 | inverse = np.tile(data, (n_samples, 1)) 121 | inverse[:, max_coefs] = pre_inverse[:, max_coefs] 122 | return rdata, inverse 123 | 124 | def _lime_tabular_perturb_n_samples(self, 125 | data, 126 | n_samples): 127 | """Generates n_perturbations for LIME.""" 128 | rdata, inverse = self.lime_info._LimeTabularExplainer__data_inverse(data_row=data, 129 | num_samples=n_samples) 130 | scaled_data = (rdata - self.lime_info.scaler.mean_) / self.lime_info.scaler.scale_ 131 | distances = sklearn.metrics.pairwise_distances( 132 | scaled_data, 133 | scaled_data[0].reshape(1, -1), 134 | metric='euclidean' 135 | ).ravel() 136 | return rdata, inverse, scaled_data, distances 137 | 138 | def _stack_tabular_return(self, existing_return, perturb_return): 139 | """Stacks data from new tabular return to existing return.""" 140 | if len(existing_return) == 0: 141 | return perturb_return 142 | new_return = [] 143 | for i, item in enumerate(existing_return): 144 | new_return.append(np.concatenate((item, perturb_return[i]), axis=0)) 145 | return new_return 146 | 147 | def _select_indices_from_data(self, perturb_return, indices, predictions): 148 | """Gets each element from the perturb return according to indices, then appends the predictions.""" 149 | # Previoulsy had this set to range(4) 150 | temp = [perturb_return[i][indices] for i in range(len(perturb_return))] 151 | temp.append(predictions) 152 | return temp 153 | 154 | def shap_tabular_focus_sample(self, 155 | data, 156 | classifier_f, 157 | label, 158 | n_samples, 159 | focus_sample_batch_size, 160 | focus_sample_initial_points, 161 | to_consider=10_000, 162 | tempurature=1e-2, 163 | enumerate_initial=True): 164 | """Focus sample n_samples perturbations for lime tabular.""" 165 | assert focus_sample_initial_points > 0, "Initial focusing sample points cannot be <= 0" 166 | current_n_perturbations = 0 167 | 168 | # Get 1's coalitions, if requested 169 | if enumerate_initial: 170 | enumerate_init_p = self._enumerate_initial_shap(data) 171 | current_n_perturbations += enumerate_init_p[0].shape[0] 172 | else: 173 | enumerate_init_p = None 174 | 175 | if self.verbose: 176 | pbar = tqdm(total=n_samples) 177 | pbar.update(current_n_perturbations) 178 | 179 | # Get initial points 180 | if current_n_perturbations < focus_sample_initial_points: 181 | initial_perturbations = self._shap_tabular_perturb_n_samples(data, focus_sample_initial_points - current_n_perturbations) 182 | 183 | if enumerate_init_p is not None: 184 | current_perturbations = self._stack_tabular_return(enumerate_init_p, initial_perturbations) 185 | else: 186 | current_perturbations = initial_perturbations 187 | 188 | current_n_perturbations += initial_perturbations[0].shape[0] 189 | else: 190 | current_perturbations = enumerate_init_p 191 | 192 | current_perturbations = list(current_perturbations) 193 | 194 | # Store initial predictions 195 | current_perturbations.append(classifier_f(current_perturbations[SINVERSE])[:, label]) 196 | if self.verbose: 197 | pbar.update(initial_perturbations[0].shape[0]) 198 | 199 | while current_n_perturbations < n_samples: 200 | current_batch_size = min(focus_sample_batch_size, n_samples - current_n_perturbations) 201 | 202 | # Init current BLR 203 | blr = BayesianLinearRegression(percent=self.cred_int) 204 | weights = self._get_shap_weights(current_perturbations[SDATA], current_perturbations[SDATA].shape[1]) 205 | blr.fit(current_perturbations[SDATA], current_perturbations[-1], weights, compute_creds=False) 206 | 207 | candidate_perturbations = self._shap_tabular_perturb_n_samples(data, to_consider) 208 | _, var = blr.predict(candidate_perturbations[SINVERSE]) 209 | 210 | # Get sampling weighting 211 | var /= tempurature 212 | exp_var = np.exp(var) 213 | all_exp = np.sum(exp_var) 214 | tempurature_scaled_weights = exp_var / all_exp 215 | 216 | # Get sampled indices 217 | least_confident_sample = np.random.choice(len(var), size=current_batch_size, p=tempurature_scaled_weights, replace=True) 218 | 219 | # Get predictions 220 | cy = classifier_f(candidate_perturbations[SINVERSE][least_confident_sample])[:, label] 221 | 222 | new_perturbations = self._select_indices_from_data(candidate_perturbations, least_confident_sample, cy) 223 | current_perturbations = self._stack_tabular_return(current_perturbations, new_perturbations) 224 | current_n_perturbations += new_perturbations[0].shape[0] 225 | 226 | if self.verbose: 227 | pbar.update(new_perturbations[0].shape[0]) 228 | 229 | return current_perturbations 230 | 231 | def lime_tabular_focus_sample(self, 232 | data, 233 | classifier_f, 234 | label, 235 | n_samples, 236 | focus_sample_batch_size, 237 | focus_sample_initial_points, 238 | to_consider=10_000, 239 | tempurature=5e-4, 240 | existing_data=[]): 241 | """Focus sample n_samples perturbations for lime tabular.""" 242 | current_n_perturbations = 0 243 | 244 | # Get initial focus sampling batch 245 | if len(existing_data) < focus_sample_initial_points: 246 | # If there's existing data, make sure we only sample up to existing_data points 247 | initial_perturbations = self._lime_tabular_perturb_n_samples(data, focus_sample_initial_points - len(existing_data)) 248 | current_perturbations = self._stack_tabular_return(existing_data, initial_perturbations) 249 | else: 250 | current_perturbations = existing_data 251 | 252 | if self.verbose: 253 | pbar = tqdm(total=n_samples) 254 | 255 | current_perturbations = list(current_perturbations) 256 | current_n_perturbations += initial_perturbations[0].shape[0] 257 | 258 | # Store predictions on initial data 259 | current_perturbations.append(classifier_f(current_perturbations[LINVERSE])[:, label]) 260 | if self.verbose: 261 | pbar.update(initial_perturbations[0].shape[0]) 262 | 263 | # Sample up to n_samples 264 | while current_n_perturbations < n_samples: 265 | 266 | # If batch size would exceed n_samples, only sample enough to reach n_samples 267 | current_batch_size = min(focus_sample_batch_size, n_samples - current_n_perturbations) 268 | 269 | # Init current BLR 270 | blr = BayesianLinearRegression(percent=self.cred_int) 271 | # Get weights on current distances 272 | weights = self._lime_kernel(current_perturbations[LDISTANCES], self.width) 273 | # Fit blr on current perturbations & data 274 | blr.fit(current_perturbations[LDATA], current_perturbations[LY], weights) 275 | 276 | # Get set of perturbations to consider labeling 277 | candidate_perturbations = self._lime_tabular_perturb_n_samples(data, to_consider) 278 | _, var = blr.predict(candidate_perturbations[LDATA]) 279 | 280 | # Reweight 281 | var /= tempurature 282 | exp_var = np.exp(var) 283 | all_exp = np.sum(exp_var) 284 | tempurature_scaled_weights = exp_var / all_exp 285 | 286 | # Get sampled indices 287 | least_confident_sample = np.random.choice(len(var), size=current_batch_size, p=tempurature_scaled_weights, replace=False) 288 | 289 | # Get predictions 290 | cy = classifier_f(candidate_perturbations[LINVERSE][least_confident_sample])[:, label] 291 | 292 | new_perturbations = self._select_indices_from_data(candidate_perturbations, least_confident_sample, cy) 293 | current_perturbations = self._stack_tabular_return(current_perturbations, new_perturbations) 294 | current_n_perturbations += new_perturbations[0].shape[0] 295 | 296 | if self.verbose: 297 | pbar.update(new_perturbations[0].shape[0]) 298 | 299 | return current_perturbations 300 | 301 | def _lime_kernel(self, d, kernel_width): 302 | return np.sqrt(np.exp(-(d ** 2) / kernel_width ** 2)) 303 | 304 | def _explain_bayes_lime(self, 305 | data, 306 | classifier_f, 307 | label, 308 | focus_sample, 309 | cred_width, 310 | n_samples, 311 | max_n_samples, 312 | focus_sample_batch_size, 313 | focus_sample_initial_points, 314 | ptg_initial_points, 315 | to_consider): 316 | """Computes the bayeslime tabular explanations.""" 317 | 318 | # Case where only n_samples is specified and not focused sampling 319 | if n_samples is not None and not focus_sample: 320 | logging.info("Generating bayeslime explanation with %s samples", n_samples) 321 | 322 | # Generate perturbations 323 | rdata, inverse, scaled_data, distances = self._lime_tabular_perturb_n_samples(data, n_samples) 324 | weights = self._lime_kernel(distances, self.width) 325 | y = classifier_f(inverse)[:, label] 326 | blr = BayesianLinearRegression(percent=self.cred_int) 327 | blr.fit(rdata, y, weights) 328 | # Focus sampling 329 | elif focus_sample: 330 | logging.info("Starting focused sampling") 331 | if n_samples: 332 | logging.info("n_samples preset, running focused sampling up to %s samples", n_samples) 333 | logging.info("using batch size %s with %s initial points", focus_sample_batch_size, focus_sample_initial_points) 334 | focused_sampling_output = self.lime_tabular_focus_sample(data, 335 | classifier_f, 336 | label, 337 | n_samples, 338 | focus_sample_batch_size, 339 | focus_sample_initial_points, 340 | to_consider=to_consider, 341 | existing_data=[]) 342 | rdata = focused_sampling_output[LDATA] 343 | distances = focused_sampling_output[LDISTANCES] 344 | y = focused_sampling_output[LY] 345 | 346 | blr = BayesianLinearRegression(percent=self.cred_int) 347 | weights = self._lime_kernel(distances, self.width) 348 | blr.fit(rdata, y, weights) 349 | else: 350 | # Use ptg to get the number of samples, then focus sample 351 | # Note, this isn't used in the paper, this case currently isn't implemented 352 | raise NotImplementedError 353 | 354 | else: 355 | # PTG Step 1, get initial 356 | rdata, inverse, scaled_data, distances = self._lime_tabular_perturb_n_samples(data, ptg_initial_points) 357 | weights = self._lime_kernel(distances, self.width) 358 | y = classifier_f(inverse)[:, label] 359 | blr = BayesianLinearRegression(percent=self.cred_int) 360 | blr.fit(rdata, y, weights) 361 | 362 | # PTG Step 2, get additional points needed 363 | n_needed = int(np.ceil(blr.get_ptg(cred_width))) 364 | if self.verbose: 365 | tqdm.write(f"Additional Number of perturbations needed is {n_needed}") 366 | ptg_rdata, ptg_inverse, ptg_scaled_data, ptg_distances = self._lime_tabular_perturb_n_samples(data, n_needed - ptg_initial_points) 367 | ptg_weights = self._lime_kernel(ptg_distances, self.width) 368 | 369 | rdata = np.concatenate((rdata, ptg_rdata), axis=0) 370 | inverse = np.concatenate((inverse, ptg_inverse), axis=0) 371 | scaled_data = np.concatenate((scaled_data, ptg_scaled_data), axis=0) 372 | distances = np.concatenate((distances, ptg_distances), axis=0) 373 | 374 | # Run final model 375 | ptgy = classifier_f(ptg_inverse)[:, label] 376 | y = np.concatenate((y, ptgy), axis=0) 377 | blr = BayesianLinearRegression(percent=self.cred_int) 378 | blr.fit(rdata, y, self._lime_kernel(distances, self.width)) 379 | 380 | # Format output for returning 381 | output = { 382 | "data": rdata, 383 | "y": y, 384 | "distances": distances, 385 | "blr": blr, 386 | "coef": blr.coef_, 387 | "max_coefs": None # Included for consistency purposes w/ bayesshap 388 | } 389 | 390 | return output 391 | 392 | def _get_shap_weights(self, data, M): 393 | """Gets shap weights. This assumes data is binary.""" 394 | nonzero = np.count_nonzero(data, axis=1) 395 | weights = [] 396 | for nz in nonzero: 397 | denom = (nCk(M, nz) * nz * (M - nz)) 398 | # Stabilize kernel 399 | if denom == 0: 400 | weight = 1.0 401 | else: 402 | weight = ((M - 1) / denom) 403 | weights.append(weight) 404 | return weights 405 | 406 | def _enumerate_initial_shap(self, data, max_coefs=None): 407 | """Enumerate 1's for stability.""" 408 | if max_coefs is None: 409 | data = np.eye(data.shape[0]) 410 | inverse = self.shap_info.discretizer.undiscretize(data) 411 | return data, inverse 412 | else: 413 | data = np.zeros((max_coefs.shape[0], data.shape[0])) 414 | for i in range(max_coefs.shape[0]): 415 | data[i, max_coefs[i]] = 1 416 | inverse = self.shap_info.discretizer.undiscretize(data) 417 | return data[:, max_coefs], inverse 418 | 419 | def _explain_bayes_shap(self, 420 | data, 421 | classifier_f, 422 | label, 423 | focus_sample, 424 | cred_width, 425 | n_samples, 426 | max_n_samples, 427 | focus_sample_batch_size, 428 | focus_sample_initial_points, 429 | ptg_initial_points, 430 | to_consider, 431 | feature_select_num_points=1_000, 432 | n_features=10, 433 | l2=True, 434 | enumerate_initial=True, 435 | feature_selection=True, 436 | max_coefs=None): 437 | """Computes the bayesshap tabular explanations.""" 438 | if feature_selection and max_coefs is None: 439 | n_features = min(n_features, data.shape[0]) 440 | _, feature_select_inverse = self._shap_tabular_perturb_n_samples(data, feature_select_num_points) 441 | lr = Ridge().fit(feature_select_inverse, classifier_f(feature_select_inverse)[:, label]) 442 | max_coefs = np.argsort(np.abs(lr.coef_))[-1 * n_features:] 443 | elif feature_selection and max_coefs is not None: 444 | pass 445 | else: 446 | max_coefs = None 447 | 448 | # Case without focused sampling 449 | if n_samples is not None and not focus_sample: 450 | logging.info("Generating bayesshap explanation with %s samples", n_samples) 451 | 452 | # Enumerate single coalitions, if requested 453 | if enumerate_initial: 454 | data_init, inverse_init = self._enumerate_initial_shap(data, max_coefs) 455 | n_more = n_samples - inverse_init.shape[0] 456 | else: 457 | n_more = n_samples 458 | 459 | rdata, inverse = self._shap_tabular_perturb_n_samples(data, n_more, max_coefs) 460 | 461 | if enumerate_initial: 462 | rdata = np.concatenate((data_init, rdata), axis=0) 463 | inverse = np.concatenate((inverse_init, inverse), axis=0) 464 | 465 | y = classifier_f(inverse)[:, label] 466 | weights = self._get_shap_weights(rdata, M=rdata.shape[1]) 467 | 468 | blr = BayesianLinearRegression(percent=self.cred_int) 469 | blr.fit(rdata, y, weights) 470 | elif focus_sample: 471 | if feature_selection: 472 | raise NotImplementedError 473 | 474 | logging.info("Starting focused sampling") 475 | if n_samples: 476 | logging.info("n_samples preset, running focused sampling up to %s samples", n_samples) 477 | logging.info("using batch size %s with %s initial points", focus_sample_batch_size, focus_sample_initial_points) 478 | focused_sampling_output = self.shap_tabular_focus_sample(data, 479 | classifier_f, 480 | label, 481 | n_samples, 482 | focus_sample_batch_size, 483 | focus_sample_initial_points, 484 | to_consider=to_consider, 485 | enumerate_initial=enumerate_initial) 486 | rdata = focused_sampling_output[SDATA] 487 | y = focused_sampling_output[SY] 488 | weights = self._get_shap_weights(rdata, rdata.shape[1]) 489 | blr = BayesianLinearRegression(percent=self.cred_int, l2=l2) 490 | blr.fit(rdata, y, weights) 491 | else: 492 | # Use ptg to get the number of samples, then focus sample 493 | # Note, this case isn't used in the paper and currently isn't implemented 494 | raise NotImplementedError 495 | else: 496 | # Use PTG to get initial samples 497 | 498 | # Enumerate intial points if requested 499 | if enumerate_initial: 500 | data_init, inverse_init = self._enumerate_initial_shap(data, max_coefs) 501 | n_more = ptg_initial_points - inverse_init.shape[0] 502 | else: 503 | n_more = ptg_initial_points 504 | 505 | # Perturb using initial samples 506 | rdata, inverse = self._shap_tabular_perturb_n_samples(data, n_more, max_coefs) 507 | if enumerate_initial: 508 | rdata = np.concatenate((data_init, rdata), axis=0) 509 | inverse = np.concatenate((inverse_init, inverse), axis=0) 510 | 511 | # Get labels 512 | y = classifier_f(inverse)[:, label] 513 | 514 | # Fit BLR 515 | weights = self._get_shap_weights(rdata, M=rdata.shape[1]) 516 | blr = BayesianLinearRegression(percent=self.cred_int, l2=l2) 517 | blr.fit(rdata, y, weights) 518 | 519 | # Compute PTG number needed 520 | n_needed = int(np.ceil(blr.get_ptg(cred_width))) 521 | ptg_rdata, ptg_inverse = self._shap_tabular_perturb_n_samples(data, 522 | n_needed - ptg_initial_points, 523 | max_coefs) 524 | 525 | if self.verbose: 526 | tqdm.write(f"{n_needed} more samples needed") 527 | 528 | rdata = np.concatenate((rdata, ptg_rdata), axis=0) 529 | inverse = np.concatenate((inverse, ptg_inverse), axis=0) 530 | ptgy = classifier_f(ptg_inverse)[:, label] 531 | weights = self._get_shap_weights(rdata, M=rdata.shape[1]) 532 | 533 | # Run final model 534 | ptgy = classifier_f(ptg_inverse)[:, label] 535 | y = np.concatenate((y, ptgy), axis=0) 536 | blr = BayesianLinearRegression(percent=self.cred_int, l2=l2) 537 | blr.fit(rdata, y, weights) 538 | 539 | # Format output for returning 540 | output = { 541 | "data": rdata, 542 | "y": y, 543 | "distances": weights, 544 | "blr": blr, 545 | "coef": blr.coef_, 546 | "max_coefs": max_coefs 547 | } 548 | 549 | return output 550 | 551 | def explain(self, 552 | data, 553 | classifier_f, 554 | label, 555 | cred_width=1e-2, 556 | focus_sample=True, 557 | n_samples=None, 558 | max_n_samples=10_000, 559 | focus_sample_batch_size=2_500, 560 | focus_sample_initial_points=100, 561 | ptg_initial_points=200, 562 | to_consider=10_000, 563 | feature_selection=True, 564 | n_features=15, 565 | tag=None, 566 | only_coef=False, 567 | only_blr=False, 568 | enumerate_initial=True, 569 | max_coefs=None, 570 | l2=True): 571 | """Explain an instance. 572 | 573 | As opposed to other model agnostic explanations, the bayes explanations 574 | accept a credible interval width instead of a number of perturbations 575 | value. 576 | 577 | If the credible interval is set to 95% (as is the default), the bayesian 578 | explanations will generate feature importances that are +/- width/2 579 | 95% of the time. 580 | 581 | 582 | Arguments: 583 | data: The data instance to explain 584 | classifier_f: The classification function. This function should return 585 | probabilities for each label, where if there are M labels 586 | and N instances, the output is of shape (N, M). 587 | label: The label index to explain. 588 | cred_width: The width of the credible interval of the resulting explanation. Note, 589 | this serves as a upper bound in the implementation, the final credible 590 | intervals may be tighter, because PTG is a bit approximate. Also, be 591 | aware that for kernelshap, if we can compute the kernelshap values exactly 592 | by enumerating all the coalitions. 593 | focus_sample: Whether to use uncertainty sampling. 594 | n_samples: If specified, n_samples with override the width setting feature 595 | and compute the explanation with n_samples. 596 | max_n_samples: The maximum number of samples to use. If the width is set to 597 | a very small value and many samples are required, this serves 598 | as a point to stop sampling. 599 | focus_sample_batch_size: The batch size of focus sampling. 600 | focus_sample_initial_points: The number of perturbations to collect before starting 601 | focused sampling. 602 | ptg_initial_points: The number perturbations to collect before computing the ptg estimate. 603 | to_consider: The number of perturbations to consider in focused sampling. 604 | feature_selection: Whether to do feature selection using Ridge regression. Note, currently 605 | only implemented for BayesSHAP. 606 | n_features: The number of features to use in feature selection. 607 | tag: A tag to add the explanation. 608 | only_coef: Only return the explanation means. 609 | only_blr: Only return the bayesian regression object. 610 | enumerate_initial: Whether to enumerate a set of initial shap coalitions. 611 | l2: Whether to fit with l2 regression. Turning off the l2 regression can be useful for the shapley value estimation. 612 | Returns: 613 | explanation: The resulting feature importances, credible intervals, and bayes regression 614 | object. 615 | """ 616 | assert isinstance(data, np.ndarray), "Data must be numpy array. Note, this means that classifier_f \ 617 | must accept numpy arrays." 618 | self._log_args(locals()) 619 | 620 | if self.kernel == "lime" and self.data in ["tabular", "image"]: 621 | output = self._explain_bayes_lime(data, 622 | classifier_f, 623 | label, 624 | focus_sample, 625 | cred_width, 626 | n_samples, 627 | max_n_samples, 628 | focus_sample_batch_size, 629 | focus_sample_initial_points, 630 | ptg_initial_points, 631 | to_consider) 632 | elif self.kernel == "shap" and self.data in ["tabular", "image"]: 633 | output = self._explain_bayes_shap(data, 634 | classifier_f, 635 | label, 636 | focus_sample, 637 | cred_width, 638 | n_samples, 639 | max_n_samples, 640 | focus_sample_batch_size, 641 | focus_sample_initial_points, 642 | ptg_initial_points, 643 | to_consider, 644 | feature_selection=feature_selection, 645 | n_features=n_features, 646 | enumerate_initial=enumerate_initial, 647 | max_coefs=max_coefs, 648 | l2=l2) 649 | else: 650 | pass 651 | 652 | output['tag'] = tag 653 | 654 | if only_coef: 655 | return output['coef'] 656 | 657 | if only_blr: 658 | return output['blr'] 659 | 660 | return output 661 | 662 | 663 | def nCk(n, r): 664 | """n choose r 665 | 666 | From: https://stackoverflow.com/questions/4941753/is-there-a-math-ncr-function-in-python""" 667 | r = min(r, n-r) 668 | numer = reduce(op.mul, range(n, n-r, -1), 1) 669 | denom = reduce(op.mul, range(1, r+1), 1) 670 | return numer / denom 671 | 672 | 673 | def do_exp(args): 674 | """Supporting function for the explanations.""" 675 | i, data, init_kwargs, exp_kwargs, labels, max_coefs, pass_args = args 676 | def do(data_i, label): 677 | 678 | if pass_args is not None and pass_args.balance_background_dataset: 679 | init_kwargs['training_data'] = np.concatenate((data_i[None, :], np.zeros((1, data_i.shape[0]))), axis=0) 680 | 681 | exp = BayesLocalExplanations(**init_kwargs) 682 | exp_kwargs['tag'] = i 683 | exp_kwargs['label'] = label 684 | if max_coefs is not None: 685 | exp_kwargs['max_coefs'] = max_coefs[i] 686 | e = deepcopy(exp.explain(data_i, **exp_kwargs)) 687 | return e 688 | if labels is not None: 689 | return do(data[i], labels[i]) 690 | else: 691 | return do(data[i], exp_kwargs['label']) 692 | 693 | 694 | def explain_many(all_data, init_kwargs, exp_kwargs, pool_size=1, verbose=False, labels=None, max_coefs=None, args=None): 695 | """Parallel explanations.""" 696 | with Pool(pool_size) as p: 697 | if verbose: 698 | results = list(tqdm(p.imap(do_exp, [(i, all_data, init_kwargs, exp_kwargs, labels, max_coefs, args) for i in range(all_data.shape[0])]))) 699 | else: 700 | results = p.map(do_exp, [(i, all_data, init_kwargs, exp_kwargs, labels, max_coefs, args) for i in range(all_data.shape[0])]) 701 | return results 702 | -------------------------------------------------------------------------------- /bayes/models.py: -------------------------------------------------------------------------------- 1 | """Routines that implement processing data & getting models. 2 | 3 | This file includes various routines for processing & acquiring models, for 4 | later use in the code. The table data preprocessing is straightforward. We 5 | first applying scaling to the data and fit a random forest classifier. 6 | 7 | The processing of the image data is a bit more complex. To simplify the construction 8 | of the explanations, the explanations don't accept images. Instead, for image explanations, 9 | it is necessary to define a function that accept a array of 0's and 1's corresponding to 10 | segments for a particular image being either excluded or included respectively. The explanation 11 | is performed on this array. 12 | """ 13 | import numpy as np 14 | from copy import deepcopy 15 | 16 | from sklearn.ensemble import RandomForestClassifier 17 | from sklearn.preprocessing import StandardScaler 18 | from sklearn.model_selection import train_test_split 19 | 20 | import torch 21 | from torchvision import models, transforms 22 | 23 | from data.mnist.mnist_model import Net 24 | 25 | def get_xtrain(segs): 26 | """A function to get the mock training data to use in the image explanations. 27 | 28 | This function returns a dataset containing a single instance of ones and 29 | another of zeros to represent the training data for the explanation. The idea 30 | is that the explanation will use this data to compute the perturbations, which 31 | will then be fed into the wrapped model. 32 | 33 | Arguments: 34 | segs: The current segments array 35 | """ 36 | n_segs = len(np.unique(segs)) 37 | xtrain = np.concatenate((np.ones((1, n_segs)), np.zeros((1, n_segs))), axis=0) 38 | return xtrain 39 | 40 | def process_imagenet_get_model(data): 41 | """Gets wrapped imagenet model.""" 42 | 43 | # Get the vgg16 model, used in the experiments 44 | model = models.vgg16(pretrained=True) 45 | model.eval() 46 | model.cuda() 47 | 48 | xtest = data['X'] 49 | ytest = data['y'].astype(int) 50 | xtest_segs = data['segments'] 51 | 52 | softmax = torch.nn.Softmax(dim=1) 53 | 54 | # Transforms 55 | normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], 56 | std=[0.229, 0.224, 0.225]) 57 | transf = transforms.Compose([ 58 | transforms.ToTensor(), 59 | normalize 60 | ]) 61 | 62 | t_xtest = transf(xtest[0])[None, :]#.cuda() 63 | 64 | # Define the wrapped model 65 | def get_wrapped_model(instance, segments, background=0, batch_size=64): 66 | def wrapped_model(data): 67 | perturbed_images = [] 68 | for d in data: 69 | perturbed_image = deepcopy(instance) 70 | for i, is_on in enumerate(d): 71 | if is_on == 0: 72 | perturbed_image[segments==i, 0] = background 73 | perturbed_image[segments==i, 1] = background 74 | perturbed_image[segments==i, 2] = background 75 | perturbed_images.append(transf(perturbed_image)[None, :]) 76 | perturbed_images = torch.from_numpy(np.concatenate(perturbed_images, axis=0)).float().cuda() 77 | predictions = [] 78 | for q in range(0, perturbed_images.shape[0], batch_size): 79 | predictions.append(softmax(model(perturbed_images[q:q+batch_size])).cpu().detach().numpy()) 80 | predictions = np.concatenate(predictions, axis=0) 81 | return predictions 82 | return wrapped_model 83 | 84 | output = { 85 | "model": get_wrapped_model, 86 | "xtest": xtest, 87 | "ytest": ytest, 88 | "xtest_segs": xtest_segs, 89 | "label": data['y'][0] 90 | } 91 | 92 | return output 93 | 94 | def process_mnist_get_model(data): 95 | """Gets wrapped mnist model.""" 96 | xtest = data['X'] 97 | ytest = data['y'].astype(int) 98 | xtest_segs = data['segments'] 99 | 100 | model = Net() 101 | model.load_state_dict(torch.load("../data/mnist/mnist_cnn.pt")) 102 | model.eval() 103 | model.cuda() 104 | 105 | softmax = torch.nn.Softmax(dim=1) 106 | def get_wrapped_model(instance, segments, background=-0.4242, batch_size=100): 107 | def wrapped_model(data): 108 | perturbed_images = [] 109 | data = torch.from_numpy(data).float().cuda() 110 | for d in data: 111 | perturbed_image = deepcopy(instance) 112 | for i, is_on in enumerate(d): 113 | if is_on == 0: 114 | a = segments==i 115 | perturbed_image[0, segments[0]==i] = background 116 | perturbed_images.append(perturbed_image[:, None]) 117 | perturbed_images = torch.from_numpy(np.concatenate(perturbed_images, axis=0)).float().cuda() 118 | 119 | # Batch predictions if necessary 120 | if perturbed_images.shape[0] > batch_size: 121 | predictions = [] 122 | for q in range(0, perturbed_images.shape[0], batch_size): 123 | predictions.append(softmax(model(perturbed_images[q:q+batch_size])).cpu().detach().numpy()) 124 | predictions = np.concatenate(predictions, axis=0) 125 | else: 126 | predictions = softmax(model(perturbed_images)).cpu().detach().numpy() 127 | return np.array(predictions) 128 | return wrapped_model 129 | 130 | output = { 131 | "model": get_wrapped_model, 132 | "xtest": xtest, 133 | "ytest": ytest, 134 | "xtest_segs": xtest_segs, 135 | "label": data['y'][0], 136 | } 137 | 138 | return output 139 | 140 | def process_tabular_data_get_model(data): 141 | """Processes tabular data + trains random forest classifier.""" 142 | X = data['X'] 143 | y = data['y'] 144 | 145 | xtrain,xtest,ytrain,ytest = train_test_split(X,y,test_size=0.2) 146 | ss = StandardScaler().fit(xtrain) 147 | xtrain = ss.transform(xtrain) 148 | xtest = ss.transform(xtest) 149 | rf = RandomForestClassifier(n_estimators=100).fit(xtrain,ytrain) 150 | 151 | output = { 152 | "model": rf, 153 | "xtrain": xtrain, 154 | "xtest": xtest, 155 | "ytrain": ytrain, 156 | "ytest": ytest, 157 | "label": 1, 158 | "model_score": rf.score(xtest, ytest) 159 | } 160 | 161 | print(f"Model Score: {output['model_score']}") 162 | 163 | return output -------------------------------------------------------------------------------- /bayes/regression.py: -------------------------------------------------------------------------------- 1 | """Bayesian regression. 2 | 3 | A class the implements the Bayesian Regression. 4 | """ 5 | import operator as op 6 | from functools import reduce 7 | import copy 8 | import collections 9 | 10 | import numpy as np 11 | from scipy.stats import invgamma 12 | from scipy.stats import multivariate_normal 13 | 14 | class BayesianLinearRegression: 15 | def __init__(self, percent=95, l2=True, prior=None): 16 | if prior is not None: 17 | raise NameError("Currently only support uninformative prior, set to None plz.") 18 | 19 | self.percent = percent 20 | self.l2 = l2 21 | 22 | def fit(self, xtrain, ytrain, sample_weight, compute_creds=True): 23 | """ 24 | Fit the bayesian linear regression. 25 | 26 | Arguments: 27 | xtrain: the training data 28 | ytrain: the training labels 29 | sample_weight: the weights for fitting the regression 30 | """ 31 | 32 | # store weights 33 | weights = sample_weight 34 | 35 | # add intercept 36 | xtrain = np.concatenate((np.ones(xtrain.shape[0])[:,None], xtrain), axis=1) 37 | diag_pi_z = np.zeros((len(weights), len(weights))) 38 | np.fill_diagonal(diag_pi_z, weights) 39 | 40 | if self.l2: 41 | V_Phi = np.linalg.inv(xtrain.transpose().dot(diag_pi_z).dot(xtrain) \ 42 | + np.eye(xtrain.shape[1])) 43 | else: 44 | V_Phi = np.linalg.inv(xtrain.transpose().dot(diag_pi_z).dot(xtrain)) 45 | 46 | Phi_hat = V_Phi.dot(xtrain.transpose()).dot(diag_pi_z).dot(ytrain) 47 | 48 | N = xtrain.shape[0] 49 | Y_m_Phi_hat = ytrain - xtrain.dot(Phi_hat) 50 | 51 | s_2 = (1.0 / N) * (Y_m_Phi_hat.dot(diag_pi_z).dot(Y_m_Phi_hat) \ 52 | + Phi_hat.transpose().dot(Phi_hat)) 53 | 54 | self.score = s_2 55 | 56 | self.s_2 = s_2 57 | self.N = N 58 | self.V_Phi = V_Phi 59 | self.Phi_hat = Phi_hat 60 | self.coef_ = Phi_hat[1:] 61 | self.intercept_ = Phi_hat[0] 62 | self.weights = weights 63 | 64 | if compute_creds: 65 | self.creds = self.get_creds(percent=self.percent) 66 | else: 67 | self.creds = "NA" 68 | 69 | self.crit_params = { 70 | "s_2": self.s_2, 71 | "N": self.N, 72 | "V_Phi": self.V_Phi, 73 | "Phi_hat": self.Phi_hat, 74 | "creds": self.creds 75 | } 76 | 77 | return self 78 | 79 | def predict(self, data): 80 | """ 81 | The predictive distribution. 82 | 83 | Arguments: 84 | data: The data to predict 85 | """ 86 | q_1 = np.eye(data.shape[0]) 87 | data_ones = np.concatenate((np.ones(data.shape[0])[:,None], data), axis=1) 88 | 89 | # Get response 90 | response = np.matmul(data, self.coef_) 91 | response += self.intercept_ 92 | 93 | # Compute var 94 | temp = np.matmul(data_ones, self.V_Phi) 95 | mat = np.matmul(temp, data_ones.transpose()) 96 | var = self.s_2 * (q_1 + mat) 97 | diag = np.diagonal(var) 98 | 99 | return response, np.sqrt(diag) 100 | 101 | def get_ptg(self, desired_width): 102 | """ 103 | Compute the ptg perturbations. 104 | """ 105 | cert = (desired_width / 1.96) ** 2 106 | S = self.coef_.shape[0] * self.s_2 107 | T = np.mean(self.weights) 108 | return 4 * S / (self.coef_.shape[0] * T * cert) 109 | 110 | def get_creds(self, percent=95, n_samples=10_000, get_intercept=False): 111 | """ 112 | Get the credible intervals. 113 | 114 | Arguments: 115 | percent: the percent cutoff for the credible interval, i.e., 95 is 95% credible interval 116 | n_samples: the number of samples to compute the credible interval 117 | get_intercept: whether to include the intercept in the credible interval 118 | """ 119 | samples = self.draw_posterior_samples(n_samples, get_intercept=get_intercept) 120 | creds = np.percentile(np.abs(samples - (self.Phi_hat if get_intercept else self.coef_)), 121 | percent, 122 | axis=0) 123 | return creds 124 | 125 | def draw_posterior_samples(self, num_samples, get_intercept=False): 126 | """ 127 | Sample from the posterior. 128 | 129 | Arguments: 130 | num_samples: number of samples to draw from the posterior 131 | get_intercept: whether to include the intercept 132 | """ 133 | 134 | sigma_2 = invgamma.rvs(self.N / 2, scale=(self.N * self.s_2) / 2, size=num_samples) 135 | 136 | phi_samples = [] 137 | for sig in sigma_2: 138 | sample = multivariate_normal.rvs(mean=self.Phi_hat, 139 | cov=self.V_Phi * sig, 140 | size=1) 141 | phi_samples.append(sample) 142 | 143 | phi_samples = np.vstack(phi_samples) 144 | 145 | if get_intercept: 146 | return phi_samples 147 | else: 148 | return phi_samples[:, 1:] -------------------------------------------------------------------------------- /citation.bib: -------------------------------------------------------------------------------- 1 | @inproceedings{reliable:neurips21, 2 | author = {Dylan Slack and Sophie Hilgard and Sameer Singh and Himabindu Lakkaraju}, 3 | title = { {Reliable Post hoc Explanations Modeling Uncertainty in Explainability} }, 4 | booktitle = {Neural Information Processing Systems (NeurIPS)}, 5 | year = {2021} 6 | } 7 | -------------------------------------------------------------------------------- /data/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dylan-slack/Modeling-Uncertainty-Local-Explainability/72c4330bc7d29150b55018b0946f85f7df88e107/data/__init__.py -------------------------------------------------------------------------------- /data/diego/diego.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dylan-slack/Modeling-Uncertainty-Local-Explainability/72c4330bc7d29150b55018b0946f85f7df88e107/data/diego/diego.png -------------------------------------------------------------------------------- /data/german_processed.csv: -------------------------------------------------------------------------------- 1 | GoodCustomer,Gender,ForeignWorker,Single,Age,LoanDuration,PurposeOfLoan,LoanAmount,LoanRateAsPercentOfIncome,YearsAtCurrentHome,NumberOfOtherLoansAtBank,NumberOfLiableIndividuals,HasTelephone,CheckingAccountBalance_geq_0,CheckingAccountBalance_geq_200,SavingsAccountBalance_geq_100,SavingsAccountBalance_geq_500,MissedPayments,NoCurrentLoan,CriticalAccountOrLoansElsewhere,OtherLoansAtBank,OtherLoansAtStore,HasCoapplicant,HasGuarantor,OwnsHouse,RentsHouse,Unemployed,YearsAtCurrentJob_lt_1,YearsAtCurrentJob_geq_4,JobClassIsSkilled 2 | 1,Male,0,1,67,6,Electronics,1169,4,4,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 3 | -1,Female,0,0,22,48,Electronics,5951,2,2,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 4 | 1,Male,0,1,49,12,Education,2096,2,3,1,2,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 5 | 1,Male,0,1,45,42,Furniture,7882,2,4,1,2,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,1,1 6 | -1,Male,0,1,53,24,NewCar,4870,3,4,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 7 | 1,Male,0,1,35,36,Education,9055,2,4,1,2,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0 8 | 1,Male,0,1,53,24,Furniture,2835,3,4,1,1,0,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1 9 | 1,Male,0,1,35,36,UsedCar,6948,2,2,1,1,1,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1 10 | 1,Male,0,0,61,12,Electronics,3059,2,4,1,1,0,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,1,0 11 | -1,Male,0,0,28,30,NewCar,5234,4,2,2,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,1,0,0,1 12 | -1,Female,0,0,25,12,NewCar,1295,3,1,1,1,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 13 | -1,Female,0,0,24,48,Business,4308,3,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 14 | 1,Female,0,0,22,12,Electronics,1567,1,1,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 15 | -1,Male,0,1,60,24,NewCar,1199,4,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 16 | 1,Female,0,0,28,15,NewCar,1403,2,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1 17 | -1,Female,0,0,32,24,Electronics,1282,4,2,1,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0 18 | 1,Male,0,1,53,24,Electronics,2424,4,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 19 | 1,Male,0,1,25,30,Business,8072,2,3,3,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,1 20 | -1,Female,0,0,44,24,UsedCar,12579,4,2,1,1,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1 21 | 1,Male,0,1,31,24,Electronics,3430,3,2,1,2,1,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1 22 | 1,Male,0,1,48,9,NewCar,2134,4,4,3,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 23 | 1,Male,0,1,44,6,Electronics,2647,2,3,1,2,0,0,0,1,1,1,0,0,0,0,0,0,0,1,0,0,0,1 24 | 1,Male,1,1,48,10,NewCar,2241,1,3,2,2,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0 25 | 1,Male,0,1,44,12,UsedCar,1804,3,4,1,1,0,1,0,1,0,1,0,1,0,0,0,0,1,0,0,1,0,1 26 | 1,Male,1,0,26,10,Furniture,2069,2,1,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 27 | 1,Male,0,1,36,6,Furniture,1374,1,2,1,1,1,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0 28 | 1,Male,0,0,39,6,Electronics,426,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0 29 | 1,Female,0,0,42,12,Electronics,409,3,3,2,1,0,1,1,1,1,1,1,0,0,0,0,0,0,1,0,0,0,1 30 | 1,Male,0,1,34,7,Electronics,2415,3,2,1,1,0,1,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,1 31 | -1,Male,0,1,63,60,Business,6836,3,4,2,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1 32 | 1,Male,0,0,36,18,Business,1913,3,3,1,1,1,1,0,1,1,1,0,0,1,0,0,0,1,0,0,1,0,1 33 | 1,Male,0,1,27,24,Furniture,4020,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 34 | 1,Male,0,1,30,18,NewCar,5866,2,2,2,1,1,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,1 35 | 1,Male,0,1,57,12,Business,1264,4,4,1,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,1,0 36 | 1,Female,0,0,33,12,Furniture,1474,4,1,1,1,1,1,1,0,0,1,0,0,1,0,0,0,1,0,0,1,0,1 37 | -1,Male,0,1,25,45,Electronics,4746,4,2,2,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0,0 38 | 1,Male,0,1,31,48,Education,6110,1,3,1,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,1 39 | -1,Male,0,1,37,18,Electronics,2100,4,2,1,1,0,1,1,0,0,1,0,0,0,0,1,0,1,0,0,0,0,1 40 | 1,Male,0,1,37,10,HomeAppliances,1225,2,2,1,1,1,1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 41 | 1,Male,0,1,24,9,Electronics,458,4,3,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 42 | 1,Male,0,1,30,30,Electronics,2333,4,2,1,1,0,0,0,1,1,1,0,0,1,0,0,0,1,0,0,0,1,1 43 | 1,Male,0,0,26,12,Electronics,1158,3,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,1 44 | 1,Male,0,1,44,18,Repairs,6204,2,4,1,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0 45 | 1,Male,0,0,24,30,UsedCar,6187,1,4,2,1,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,0,1,1 46 | -1,Female,0,0,58,48,UsedCar,6143,4,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,0 47 | 1,Female,0,0,35,11,NewCar,1393,4,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0,1 48 | 1,Male,0,1,39,36,Electronics,2299,4,4,1,1,0,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1 49 | 1,Female,0,0,23,6,UsedCar,1352,1,2,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,1,1,0,0,0 50 | 1,Male,0,1,39,11,NewCar,7228,1,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0 51 | 1,Female,0,0,28,12,Electronics,2073,4,2,1,1,0,0,0,1,0,1,0,0,0,0,1,0,1,0,0,0,0,1 52 | 1,Male,0,1,29,24,Furniture,2333,4,2,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,1,0,0 53 | 1,Male,0,1,30,27,UsedCar,5965,1,2,2,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1 54 | 1,Male,0,1,25,12,Electronics,1262,3,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 55 | 1,Male,0,1,31,18,UsedCar,3378,2,1,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 56 | -1,Male,0,1,57,36,NewCar,2225,4,4,2,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,1 57 | 1,Male,0,1,26,6,NewCar,783,1,2,1,2,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,0,0,0 58 | -1,Male,0,1,52,12,Electronics,6468,2,1,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,1 59 | 1,Female,0,0,31,36,Electronics,9566,2,2,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 60 | 1,Female,0,0,23,18,NewCar,1961,3,2,1,1,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 61 | -1,Female,0,0,23,36,Furniture,6229,4,4,2,1,1,0,0,0,0,1,0,1,0,0,1,0,0,1,0,1,0,0 62 | 1,Male,0,0,27,9,Business,1391,2,1,1,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0,0,0,0,1 63 | 1,Male,0,1,50,15,Electronics,1537,4,4,2,1,1,1,0,0,0,1,0,1,0,0,0,1,1,0,0,0,1,1 64 | -1,Male,0,1,61,36,Business,1953,4,4,1,1,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1 65 | -1,Male,0,1,25,48,Business,14421,2,2,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 66 | 1,Female,0,0,26,24,Electronics,3181,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 67 | 1,Male,0,1,48,27,Repairs,5190,4,4,4,2,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 68 | 1,Female,0,0,29,12,Electronics,2171,2,2,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,1 69 | 1,Male,0,0,22,12,NewCar,1007,4,1,1,1,0,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,1 70 | -1,Male,0,1,37,36,Education,1819,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1 71 | 1,Female,0,0,25,36,Electronics,2394,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 72 | 1,Female,0,0,30,36,UsedCar,8133,1,2,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,0,1 73 | 1,Male,0,1,46,7,Electronics,730,4,2,2,1,1,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,1,0 74 | 1,Male,0,1,51,8,Other,1164,3,4,2,2,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,1 75 | 1,Female,0,0,41,42,Business,5954,2,1,2,1,0,1,0,0,0,1,0,1,1,0,0,0,1,0,0,0,1,0 76 | -1,Male,0,1,40,36,Education,1977,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 77 | 1,Male,0,1,66,12,UsedCar,1526,4,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,1 78 | -1,Male,0,1,34,42,Electronics,3965,4,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 79 | 1,Male,0,1,51,11,Electronics,4771,2,4,1,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1 80 | 1,Male,0,1,39,54,UsedCar,9436,2,2,1,2,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 81 | 1,Male,0,0,22,30,Furniture,3832,2,1,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 82 | -1,Female,0,0,44,24,Electronics,5943,1,1,2,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 83 | 1,Male,0,1,47,15,Electronics,1213,4,3,1,1,1,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1 84 | 1,Female,0,0,24,18,Business,1568,3,4,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,0 85 | 1,Female,0,0,58,24,Other,1755,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,1,0 86 | 1,Male,0,1,52,10,Electronics,2315,3,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0 87 | 1,Female,0,0,29,12,Business,1412,4,2,2,1,1,0,0,0,0,1,0,1,0,0,0,1,1,0,0,0,0,1 88 | 1,Female,0,0,27,18,Furniture,1295,4,1,2,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0,1 89 | -1,Male,0,1,47,36,Education,12612,1,4,1,2,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1 90 | 1,Male,0,1,30,18,NewCar,2249,4,3,1,2,1,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,1,1 91 | -1,Male,0,1,28,12,Repairs,1108,4,3,2,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 92 | 1,Male,0,1,56,12,Electronics,618,4,4,1,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 93 | 1,Male,0,1,54,12,UsedCar,1409,4,3,1,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 94 | -1,Female,0,0,33,12,Electronics,797,4,3,1,2,0,0,0,0,0,1,0,1,1,0,0,0,1,0,0,0,1,0 95 | 1,Male,0,1,20,24,Furniture,3617,4,4,2,1,0,1,1,0,0,1,0,1,0,0,1,0,0,1,0,0,1,1 96 | 1,Male,0,1,54,12,NewCar,1318,4,4,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1 97 | -1,Male,0,1,58,54,Business,15945,3,4,1,1,1,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 98 | 1,Female,0,0,61,12,Education,2012,4,2,1,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 99 | 1,Male,0,1,34,18,Business,2622,4,4,1,1,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,1 100 | 1,Male,0,1,36,36,Electronics,2337,4,4,1,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 101 | 1,Male,0,1,36,20,UsedCar,7057,3,4,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,1,0,0,1,1 102 | 1,Male,0,0,41,24,NewCar,1469,4,4,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0,0,1,0 103 | 1,Male,0,1,24,36,Electronics,2323,4,4,1,1,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 104 | 1,Female,0,0,24,6,Electronics,932,3,2,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1 105 | 1,Male,0,1,35,9,Furniture,1919,4,3,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,1,1 106 | 1,Male,0,0,26,12,UsedCar,2445,2,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 107 | -1,Male,0,1,39,24,Other,11938,2,3,2,2,1,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1 108 | -1,Male,0,1,39,18,NewCar,6458,2,4,2,2,1,0,0,0,0,1,1,0,1,0,0,0,1,0,0,0,1,1 109 | 1,Male,0,1,32,12,NewCar,6078,2,2,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 110 | 1,Female,1,0,30,24,Furniture,7721,1,2,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 111 | 1,Male,0,0,35,14,Business,1410,1,2,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1 112 | 1,Male,0,0,31,6,Business,1449,1,2,2,2,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,1 113 | 1,Female,0,0,23,15,Education,392,4,4,1,1,1,1,1,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 114 | 1,Male,0,1,28,18,NewCar,6260,3,3,1,1,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,0 115 | -1,Female,0,0,25,36,NewCar,7855,4,2,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 116 | 1,Male,0,0,35,12,Electronics,1680,3,1,1,1,0,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1 117 | 1,Male,0,1,47,48,Electronics,3578,4,1,1,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 118 | -1,Female,0,0,30,42,Electronics,7174,4,3,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 119 | 1,Female,1,0,27,10,Furniture,2132,2,3,2,1,0,0,0,0,0,1,0,1,0,0,1,0,0,1,0,1,0,1 120 | -1,Female,0,0,23,33,Furniture,4281,1,4,2,1,0,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,0,1 121 | 1,Male,0,0,36,12,NewCar,2366,3,3,1,1,1,1,0,1,1,1,0,1,0,0,0,0,1,0,0,0,1,1 122 | -1,Female,0,0,25,21,Electronics,1835,3,2,2,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 123 | 1,Female,0,0,41,24,UsedCar,3868,4,2,2,1,1,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,1,1 124 | 1,Male,0,1,24,12,Furniture,1768,3,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0 125 | 1,Male,0,1,63,10,NewCar,781,4,4,2,1,1,1,1,0,0,1,0,1,0,0,0,0,0,0,0,0,1,1 126 | -1,Female,0,0,27,18,Furniture,1924,4,3,1,1,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 127 | 1,Male,0,1,30,12,NewCar,2121,4,2,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 128 | 1,Male,0,0,40,12,Electronics,701,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 129 | -1,Male,0,1,30,12,Repairs,639,4,2,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 130 | 1,Male,0,1,34,12,UsedCar,1860,4,2,2,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0,1,0,0,1 131 | -1,Female,0,0,29,12,NewCar,3499,3,2,2,1,0,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1 132 | 1,Female,0,0,24,48,NewCar,8487,1,2,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 133 | -1,Male,0,1,29,36,Education,6887,4,3,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1 134 | 1,Male,0,1,27,15,Furniture,2708,2,3,2,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,0 135 | 1,Male,0,1,47,18,Furniture,1984,4,4,2,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1 136 | 1,Female,0,0,21,60,Electronics,10144,2,4,1,1,1,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,1,1 137 | 1,Female,0,0,38,12,Electronics,1240,4,2,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 138 | 1,Male,0,1,27,27,UsedCar,8613,2,2,2,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,1 139 | -1,Male,0,1,66,12,Electronics,766,4,3,1,1,0,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0 140 | 1,Male,0,1,35,15,Electronics,2728,4,2,3,1,1,1,0,0,0,1,0,1,1,0,0,1,1,0,0,0,1,1 141 | 1,Female,0,0,44,12,Electronics,1881,2,2,1,1,1,1,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0 142 | 1,Male,1,0,27,6,NewCar,709,2,2,1,1,0,1,1,1,1,1,0,0,0,0,0,0,1,0,0,1,0,0 143 | 1,Female,0,0,30,36,Electronics,4795,4,1,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 144 | 1,Male,0,1,27,27,Electronics,3416,3,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 145 | -1,Male,0,1,22,18,Furniture,2462,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 146 | 1,Female,0,0,23,21,Furniture,2288,4,4,1,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0,1 147 | 1,Male,0,1,30,48,Business,3566,4,2,1,1,0,1,0,1,0,1,1,0,0,0,0,0,1,0,0,0,1,1 148 | 1,Female,0,0,39,6,NewCar,860,1,4,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 149 | 1,Female,0,0,51,12,NewCar,682,4,3,2,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,1 150 | 1,Male,0,1,28,36,Furniture,5371,3,2,2,1,0,0,0,0,0,1,0,1,0,0,0,1,1,0,0,0,0,1 151 | 1,Male,0,1,46,18,Electronics,1582,4,4,2,1,0,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,1,1 152 | 1,Male,0,1,42,6,Electronics,1346,2,4,1,2,1,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,1,1 153 | 1,Male,1,1,38,10,Electronics,1924,1,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 154 | 1,Male,0,1,24,36,Electronics,5848,4,1,1,1,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 155 | 1,Female,0,0,29,24,UsedCar,7758,2,4,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,1,0,0,1,1 156 | 1,Male,0,1,36,24,Business,6967,4,4,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1 157 | -1,Female,0,0,20,12,Furniture,1282,2,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1 158 | 1,Male,1,1,48,9,Repairs,1288,3,4,2,2,0,0,0,1,0,1,0,1,0,0,0,1,1,0,0,0,1,1 159 | 1,Male,0,0,45,12,Retraining,339,4,1,1,1,0,0,0,0,0,1,1,0,1,0,0,0,1,0,0,0,1,0 160 | 1,Male,0,1,38,24,NewCar,3512,2,3,2,1,1,1,0,1,0,1,0,0,1,0,0,0,1,0,0,0,1,1 161 | 1,Male,0,1,34,6,Electronics,1898,1,2,2,2,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0 162 | 1,Male,0,1,36,24,Electronics,2872,3,4,1,2,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,1 163 | 1,Female,0,0,30,18,NewCar,1055,4,1,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0,1 164 | 1,Male,0,1,36,15,HomeAppliances,1262,4,3,2,1,1,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1 165 | 1,Male,0,1,70,10,NewCar,7308,2,4,1,1,1,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,1 166 | 1,Male,0,1,36,36,NewCar,909,4,4,1,1,0,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1 167 | 1,Male,0,1,32,6,Furniture,2978,1,2,1,1,1,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,1 168 | -1,Female,0,0,33,18,Furniture,1131,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,1 169 | 1,Female,0,0,20,11,Furniture,1577,4,1,1,1,0,1,0,1,1,1,0,0,0,0,0,0,1,0,0,1,0,1 170 | 1,Female,0,0,25,24,Furniture,3972,2,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 171 | -1,Male,0,0,31,24,Business,1935,4,4,2,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 172 | -1,Male,0,1,33,15,NewCar,950,4,3,2,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 173 | 1,Female,0,0,26,12,Furniture,763,4,1,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 174 | -1,Female,0,0,34,24,Furniture,2064,3,2,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1 175 | 1,Male,1,1,33,8,Electronics,1414,4,2,1,1,0,1,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,1 176 | -1,Male,0,1,26,21,Education,3414,2,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,1 177 | -1,Female,0,0,53,30,UsedCar,7485,4,1,1,1,1,0,0,0,0,1,1,0,1,0,0,0,1,0,1,0,0,1 178 | 1,Male,0,0,42,12,Furniture,2577,2,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 179 | 1,Male,0,1,52,6,Electronics,338,4,4,2,1,0,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,1,1 180 | 1,Male,0,1,31,12,Electronics,1963,4,2,2,2,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 181 | 1,Male,0,1,65,21,NewCar,571,4,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 182 | -1,Male,0,0,28,36,Business,9572,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,1 183 | -1,Male,0,0,30,36,Business,4455,2,2,2,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1 184 | -1,Male,0,1,40,21,NewCar,1647,4,2,2,2,0,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,0 185 | 1,Male,0,1,50,24,Furniture,3777,4,4,1,1,1,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,0,1 186 | -1,Male,0,1,36,18,NewCar,884,4,4,1,2,1,1,0,0,0,1,0,1,1,0,0,0,1,0,0,0,1,1 187 | 1,Male,0,1,31,15,Electronics,1360,4,2,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 188 | -1,Female,0,0,74,9,UsedCar,5129,2,4,1,2,1,1,0,0,0,1,1,0,1,0,0,0,0,0,0,0,1,1 189 | 1,Male,0,1,68,16,NewCar,1175,2,3,3,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0 190 | -1,Male,0,0,20,12,Electronics,674,4,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,1,1 191 | 1,Female,0,0,33,18,Furniture,3244,1,4,2,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0,0,0,0,1 192 | -1,Male,0,1,54,24,Business,4591,2,3,3,1,1,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,1 193 | -1,Male,0,1,34,48,Business,3844,4,4,1,2,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0 194 | -1,Male,0,1,36,27,Business,3915,4,2,1,2,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 195 | 1,Male,0,0,29,6,Electronics,2108,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 196 | -1,Male,0,1,21,45,Electronics,3031,4,4,1,1,0,1,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,1 197 | -1,Female,0,0,34,9,Education,1501,2,3,2,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 198 | 1,Female,0,0,28,6,Electronics,1382,1,1,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 199 | -1,Female,0,0,27,12,Furniture,951,4,4,4,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1 200 | 1,Male,0,1,36,24,UsedCar,2760,4,4,1,1,1,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,1 201 | -1,Male,0,0,40,18,Furniture,4297,4,3,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1 202 | 1,Male,0,1,52,9,Education,936,4,2,2,1,1,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,1,1 203 | 1,Male,0,0,27,12,NewCar,1168,4,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 204 | 1,Male,0,1,26,27,Business,5117,3,4,2,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1 205 | -1,Male,0,0,21,12,Retraining,902,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 206 | 1,Male,0,1,38,12,NewCar,1495,4,1,2,2,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 207 | 1,Male,0,1,38,30,UsedCar,10623,3,4,3,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,1 208 | 1,Male,0,1,43,12,Furniture,1935,4,4,3,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 209 | 1,Male,0,1,26,12,HomeAppliances,1424,4,3,1,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 210 | 1,Male,0,0,21,24,Business,6568,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 211 | 1,Male,1,1,55,12,UsedCar,1413,3,2,1,1,0,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1 212 | 1,Male,0,1,33,9,Electronics,3074,1,2,2,2,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 213 | 1,Female,0,0,45,36,Electronics,3835,2,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0 214 | -1,Male,0,1,50,27,Business,5293,2,4,2,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,1 215 | -1,Male,0,1,66,30,Business,1908,4,4,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1 216 | 1,Male,0,1,51,36,Electronics,3342,4,2,1,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 217 | 1,Female,0,0,39,6,Retraining,932,1,3,2,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 218 | 1,Male,0,1,31,18,Business,3104,3,1,1,1,1,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,1,1 219 | 1,Male,0,1,23,36,Electronics,3913,2,2,1,1,1,1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 220 | 1,Male,0,0,24,24,Furniture,3021,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0 221 | 1,Female,0,0,64,10,NewCar,1364,2,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 222 | 1,Male,0,0,26,12,Electronics,625,4,1,1,1,0,1,0,0,0,1,0,0,1,0,0,1,1,0,0,1,0,0 223 | 1,Female,0,0,23,12,Education,1200,4,4,1,1,1,0,0,0,0,1,0,0,1,0,0,0,0,1,0,0,0,1 224 | 1,Male,0,1,30,12,Electronics,707,4,2,2,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,0,1 225 | 1,Male,0,1,32,24,Business,2978,4,4,2,2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1 226 | 1,Male,0,1,30,15,UsedCar,4657,3,2,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 227 | 1,Male,0,1,27,36,Repairs,2613,4,2,2,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 228 | -1,Male,0,1,27,48,Electronics,10961,1,2,2,1,1,1,0,1,1,1,0,0,1,0,1,0,1,0,0,0,1,1 229 | -1,Male,0,1,53,12,Furniture,7865,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1 230 | -1,Male,0,1,22,9,Electronics,1478,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 231 | 1,Male,0,1,22,24,Furniture,3149,4,1,1,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1 232 | -1,Male,0,1,26,36,Electronics,4210,4,2,1,1,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 233 | 1,Male,0,1,51,9,NewCar,2507,2,4,1,1,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0 234 | 1,Male,0,1,35,12,Electronics,2141,3,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,1,1 235 | 1,Male,0,0,25,18,Electronics,866,4,2,1,1,0,1,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,0 236 | 1,Male,0,1,42,4,Electronics,1544,2,1,3,2,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 237 | -1,Male,0,1,30,24,Electronics,1823,4,2,1,2,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,1 238 | -1,Male,0,1,23,6,NewCar,14555,1,2,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0 239 | -1,Male,0,0,61,21,Business,2767,4,2,2,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,1,0 240 | 1,Female,0,0,35,12,Electronics,1291,4,2,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 241 | 1,Male,0,1,39,30,Electronics,2522,1,3,1,2,0,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,1,1 242 | -1,Female,0,0,29,24,NewCar,915,4,2,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,1,1 243 | 1,Male,0,1,51,6,Electronics,1595,3,2,1,2,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 244 | -1,Male,0,1,24,48,UsedCar,4605,3,4,2,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1 245 | 1,Female,0,0,27,12,Business,1185,3,2,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 246 | 1,Female,0,0,35,12,Retraining,3447,4,3,1,2,0,0,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,0 247 | 1,Male,0,1,25,24,Business,1258,4,1,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 248 | 1,Male,0,1,52,12,Electronics,717,4,4,3,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 249 | 1,Male,1,1,35,6,NewCar,1204,4,1,1,1,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1 250 | 1,Male,0,1,26,24,Furniture,1925,2,2,1,1,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 251 | -1,Female,0,0,22,18,Electronics,433,3,4,1,1,0,0,0,0,0,1,0,0,0,0,1,0,0,1,1,0,0,1 252 | 1,Female,0,0,39,6,NewCar,666,3,4,2,1,1,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,1,0 253 | 1,Female,0,0,46,12,Furniture,2251,1,2,1,1,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 254 | -1,Female,0,0,24,30,NewCar,2150,4,2,1,1,0,1,0,0,0,1,0,0,1,0,0,1,1,0,0,0,0,1 255 | 1,Male,0,1,35,24,Furniture,4151,2,3,2,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,1 256 | 1,Male,0,1,24,9,Furniture,2030,2,1,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 257 | 1,Male,0,1,27,60,Electronics,7418,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0 258 | 1,Male,0,1,35,24,Electronics,2684,4,2,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0 259 | -1,Male,0,0,29,12,Electronics,2149,4,1,1,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1 260 | 1,Female,0,0,23,15,UsedCar,3812,1,4,1,1,1,0,0,1,0,1,0,0,0,0,0,0,1,0,0,1,0,1 261 | 1,Female,0,0,57,11,Electronics,1154,4,4,3,1,0,0,0,1,0,1,0,1,0,0,0,0,1,0,1,0,0,0 262 | 1,Male,0,1,27,12,Furniture,1657,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 263 | 1,Female,0,0,55,24,Electronics,1603,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 264 | 1,Male,0,1,36,18,NewCar,5302,2,4,3,1,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,1 265 | 1,Female,0,0,57,12,Education,2748,2,4,3,1,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0 266 | 1,Male,1,1,32,10,NewCar,1231,3,4,2,2,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 267 | -1,Male,0,1,37,15,Electronics,802,4,3,1,2,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 268 | 1,Male,0,1,36,36,Business,6304,4,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 269 | 1,Female,0,0,38,24,Electronics,1533,4,3,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 270 | -1,Male,1,0,45,14,NewCar,8978,1,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 271 | 1,Male,0,1,25,24,Electronics,999,4,2,2,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 272 | 1,Male,1,1,32,18,NewCar,2662,4,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 273 | 1,Female,0,0,37,12,Furniture,1402,3,4,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,1,0,0,1,1 274 | 1,Male,0,1,36,48,NewCar,12169,4,4,1,1,1,1,0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,1 275 | -1,Male,0,1,28,48,Electronics,3060,4,4,2,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 276 | -1,Male,0,0,34,30,Repairs,11998,1,1,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0 277 | 1,Male,0,1,32,9,Electronics,2697,1,2,1,2,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 278 | 1,Female,0,0,26,18,Electronics,2404,2,2,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 279 | 1,Male,0,0,49,12,Furniture,1262,2,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0 280 | -1,Female,0,0,32,6,Furniture,4611,1,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 281 | 1,Male,0,1,29,24,Electronics,1901,4,4,1,1,1,0,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,1 282 | 1,Male,0,1,23,15,UsedCar,3368,3,4,2,1,1,0,0,1,1,1,0,1,0,0,0,0,0,1,0,0,1,1 283 | 1,Male,0,1,50,12,Furniture,1574,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 284 | 1,Male,0,1,49,18,Electronics,1445,4,4,1,1,0,1,1,0,0,1,1,0,1,0,0,0,1,0,0,0,1,0 285 | 1,Male,0,1,63,15,Furniture,1520,4,4,1,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 286 | 1,Male,0,0,37,24,NewCar,3878,4,2,1,1,1,1,0,1,0,1,0,1,0,0,0,0,1,0,0,1,0,1 287 | 1,Female,0,0,35,47,NewCar,10722,1,1,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0 288 | 1,Male,0,1,26,48,UsedCar,4788,4,3,1,2,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 289 | 1,Male,0,1,31,48,Other,7582,2,4,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1 290 | 1,Female,0,0,49,12,Electronics,1092,4,4,2,1,1,1,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,1 291 | -1,Male,0,0,48,24,Electronics,1024,4,4,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,1 292 | 1,Male,1,0,26,12,Business,1076,2,2,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 293 | -1,Male,0,0,28,36,UsedCar,9398,1,4,1,1,1,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 294 | 1,Female,0,0,44,24,UsedCar,6419,2,4,2,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,1 295 | 1,Male,0,1,56,42,UsedCar,4796,4,4,1,1,0,1,1,0,0,1,0,1,0,0,0,0,0,0,0,0,1,1 296 | 1,Male,0,0,46,48,Business,7629,4,2,2,2,0,0,0,0,0,1,0,1,1,0,0,0,1,0,0,0,1,1 297 | -1,Female,0,0,26,48,Furniture,9960,1,2,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 298 | 1,Female,0,0,20,12,UsedCar,4675,1,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 299 | 1,Male,1,1,45,10,NewCar,1287,4,2,1,1,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,1,0 300 | 1,Male,0,1,43,18,Furniture,2515,3,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 301 | 1,Male,0,1,32,21,Furniture,2745,3,2,2,1,1,1,0,1,1,1,0,1,0,0,0,0,1,0,0,0,1,1 302 | 1,Female,0,0,54,6,NewCar,672,1,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0 303 | -1,Female,0,0,42,36,Electronics,3804,4,1,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 304 | -1,Male,0,1,37,24,NewCar,1344,4,2,2,2,0,1,1,0,0,1,0,1,1,0,0,0,1,0,0,0,1,0 305 | 1,Male,0,1,49,10,NewCar,1038,4,3,2,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,1,1 306 | -1,Male,0,1,44,48,NewCar,10127,2,2,1,1,0,0,0,1,1,1,0,1,1,0,0,0,0,0,0,0,0,1 307 | 1,Male,0,0,33,6,Furniture,1543,4,2,1,1,0,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,1 308 | 1,Female,0,0,24,30,UsedCar,4811,2,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,0 309 | -1,Male,0,0,33,12,Electronics,727,4,3,1,1,1,0,0,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0 310 | -1,Female,0,0,24,8,Furniture,1237,3,4,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 311 | 1,Male,0,0,22,9,NewCar,276,4,4,1,1,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0 312 | 1,Male,0,1,40,48,Other,5381,3,4,1,1,1,1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0 313 | 1,Male,0,1,25,24,Furniture,5511,4,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,1 314 | 1,Female,0,0,26,24,Furniture,3749,2,4,1,1,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 315 | -1,Male,0,0,25,12,NewCar,685,2,3,1,1,0,1,0,0,0,1,0,0,1,0,0,0,1,0,0,0,1,0 316 | 1,Male,1,1,29,4,NewCar,1494,1,2,1,2,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0 317 | -1,Male,0,1,31,36,Furniture,2746,4,4,1,1,0,0,0,0,0,1,1,0,1,0,0,0,1,0,0,0,1,1 318 | 1,Male,0,1,38,12,Furniture,708,2,3,1,2,0,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,0 319 | 1,Female,0,0,48,24,Furniture,4351,1,4,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 320 | 1,Male,0,1,32,12,Education,701,4,2,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 321 | 1,Female,0,0,27,15,Furniture,3643,1,4,2,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0 322 | -1,Male,0,0,28,30,NewCar,4249,4,2,2,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,1,0,0,1 323 | -1,Male,0,0,32,24,Electronics,1938,4,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 324 | 1,Male,0,1,34,24,UsedCar,2910,2,1,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1 325 | 1,Male,0,1,28,18,Furniture,2659,4,2,1,1,0,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,1 326 | 1,Female,0,0,36,18,NewCar,1028,4,3,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 327 | 1,Male,1,1,39,8,NewCar,3398,1,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 328 | 1,Male,0,1,49,12,Furniture,5801,2,4,1,1,1,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,1,1 329 | 1,Female,0,0,34,24,NewCar,1525,4,3,1,2,1,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1 330 | 1,Male,0,1,31,36,Electronics,4473,4,2,1,1,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 331 | 1,Male,0,1,28,6,Electronics,1068,4,4,1,2,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 332 | 1,Male,0,1,75,24,UsedCar,6615,2,4,2,1,1,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,1 333 | -1,Female,0,0,30,18,Education,1864,4,2,2,1,0,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,1 334 | -1,Female,0,0,24,60,NewCar,7408,4,2,1,1,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,1,0,1 335 | -1,Female,0,0,24,48,UsedCar,11590,2,4,2,1,0,0,0,1,0,1,0,1,1,0,0,0,0,1,0,0,0,0 336 | -1,Male,0,1,23,24,Furniture,4110,3,4,2,2,0,0,0,0,0,1,0,0,1,0,0,0,0,1,0,0,1,1 337 | -1,Male,0,0,44,6,Furniture,3384,1,4,1,1,1,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1 338 | 1,Female,0,0,23,13,Electronics,2101,2,4,1,1,0,1,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0 339 | -1,Female,0,0,24,15,HomeAppliances,1275,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1 340 | 1,Male,0,1,28,24,Furniture,4169,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 341 | 1,Male,0,0,31,10,Furniture,1521,4,2,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 342 | 1,Female,0,0,24,24,Education,5743,2,4,2,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0,1 343 | 1,Female,0,0,26,21,Furniture,3599,1,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,0 344 | 1,Male,0,0,25,18,Electronics,3213,1,3,1,1,0,1,0,1,1,1,0,0,0,0,0,0,0,1,0,1,0,1 345 | 1,Male,0,1,33,18,Business,4439,1,1,1,1,1,1,0,0,0,1,0,0,1,0,1,0,1,0,0,0,1,1 346 | 1,Male,0,1,37,10,NewCar,3949,1,1,1,2,0,1,1,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0 347 | 1,Female,0,0,43,15,Electronics,1459,4,2,1,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0 348 | 1,Male,0,1,23,13,Electronics,882,4,4,2,1,0,1,0,0,0,1,0,1,0,0,0,1,1,0,0,1,0,1 349 | 1,Female,0,0,23,24,Electronics,3758,1,4,1,1,0,1,0,1,1,1,0,0,0,0,0,0,0,1,1,0,0,0 350 | 1,Male,0,1,34,6,Business,1743,1,2,2,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0 351 | -1,Male,0,1,32,9,Education,1136,4,3,2,2,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,1,1 352 | 1,Female,0,0,23,9,HomeAppliances,1236,1,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 353 | -1,Female,1,0,29,9,Furniture,959,1,2,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 354 | 1,Male,0,1,38,18,UsedCar,3229,2,4,1,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,1,0,0,1 355 | -1,Male,0,1,28,12,Electronics,6199,4,2,2,1,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1 356 | 1,Male,0,1,46,10,Education,727,4,4,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1 357 | -1,Male,0,1,23,24,NewCar,1246,4,2,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0 358 | 1,Male,0,1,49,12,Electronics,2331,1,4,1,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,1,1 359 | -1,Male,0,1,26,36,Electronics,4463,4,2,2,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1 360 | 1,Male,0,0,28,12,Electronics,776,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 361 | -1,Female,0,0,23,30,Furniture,2406,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 362 | 1,Male,0,1,61,18,Education,1239,4,4,1,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1 363 | 1,Male,0,1,37,12,Electronics,3399,2,3,1,1,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 364 | 1,Female,0,0,36,12,NewCar,2247,2,2,2,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1 365 | 1,Male,0,0,21,6,Furniture,1766,1,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1 366 | -1,Male,0,1,25,18,Furniture,2473,4,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0 367 | 1,Male,0,1,36,12,Business,1542,2,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 368 | 1,Male,0,1,27,18,UsedCar,3850,3,1,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 369 | 1,Female,0,0,22,18,Furniture,3650,1,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 370 | -1,Male,0,1,42,36,Furniture,3446,4,2,1,2,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 371 | 1,Female,0,0,40,18,Furniture,3001,2,4,1,1,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 372 | 1,Male,0,1,36,36,NewCar,3079,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 373 | 1,Male,0,1,33,18,Electronics,6070,3,4,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 374 | 1,Female,0,0,23,10,Furniture,2146,1,3,2,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,1 375 | 1,Male,0,1,63,60,NewCar,13756,2,4,1,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,1 376 | -1,Female,0,0,60,60,Other,14782,3,4,2,1,1,1,0,1,0,1,1,0,1,0,0,0,0,0,0,0,1,1 377 | -1,Female,0,0,37,48,Business,7685,2,4,1,1,0,0,0,0,0,1,1,0,0,0,0,1,0,1,0,0,1,1 378 | 1,Male,0,0,34,18,Electronics,2320,2,3,2,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1 379 | 1,Male,0,1,36,7,Electronics,846,3,4,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1 380 | -1,Male,0,1,57,36,NewCar,14318,4,2,1,1,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1 381 | 1,Female,0,0,52,6,NewCar,362,4,4,2,1,0,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0 382 | 1,Male,0,1,39,20,Furniture,2212,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 383 | -1,Female,0,0,38,18,UsedCar,12976,3,4,1,1,1,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,1 384 | 1,Female,0,0,25,22,NewCar,1283,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 385 | 1,Male,0,1,26,12,NewCar,1330,4,1,1,1,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 386 | 1,Male,0,1,26,30,Business,4272,2,2,2,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0 387 | 1,Female,0,0,25,18,Electronics,2238,2,1,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 388 | 1,Female,0,0,21,18,Electronics,1126,4,2,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 389 | 1,Male,0,1,40,18,Furniture,7374,4,4,2,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0,1,0,0,1 390 | 1,Male,0,1,27,15,Business,2326,2,4,1,1,0,1,0,1,1,1,0,1,1,0,0,0,1,0,0,0,0,1 391 | 1,Female,0,0,27,9,Business,1449,3,2,2,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 392 | 1,Male,0,0,30,18,NewCar,1820,2,2,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 393 | 1,Female,0,0,19,12,Furniture,983,1,4,1,1,0,1,0,1,1,1,0,0,0,0,0,0,0,1,0,1,0,0 394 | 1,Male,0,1,39,36,NewCar,3249,2,4,1,2,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,1 395 | 1,Female,0,0,31,6,Electronics,1957,1,4,1,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 396 | 1,Male,0,1,31,9,Furniture,2406,2,3,1,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,1,0,0,1 397 | 1,Male,0,1,32,39,Education,11760,2,3,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1 398 | 1,Female,0,0,55,12,Furniture,2578,3,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,1 399 | 1,Male,0,0,46,36,Furniture,2348,3,2,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 400 | -1,Male,0,0,46,12,NewCar,1223,1,1,2,1,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 401 | 1,Female,0,0,43,24,Electronics,1516,4,1,2,1,0,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,0,0 402 | 1,Male,0,0,39,18,Electronics,1473,3,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 403 | 1,Male,0,0,28,18,Business,1887,4,4,2,1,0,1,0,0,0,1,0,1,1,0,0,0,1,0,0,0,0,1 404 | -1,Male,0,1,27,24,Business,8648,2,2,2,1,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,1,0,1 405 | 1,Male,0,1,27,14,NewCar,802,4,2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0 406 | 1,Male,0,1,43,18,NewCar,2899,4,4,1,2,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1 407 | -1,Male,0,0,22,24,Electronics,2039,1,1,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 408 | 1,Male,0,1,43,24,UsedCar,2197,4,4,2,2,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 409 | 1,Male,1,0,27,15,Electronics,1053,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 410 | 1,Male,0,0,26,24,Electronics,3235,3,2,1,1,1,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1 411 | -1,Male,0,0,28,12,NewCar,939,4,2,3,1,1,1,1,1,1,1,0,1,0,0,0,0,1,0,0,0,1,1 412 | 1,Female,0,0,20,24,Electronics,1967,4,4,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 413 | 1,Male,0,1,35,33,UsedCar,7253,3,2,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 414 | -1,Male,0,1,42,12,Business,2292,4,2,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,1,0,0,1 415 | 1,Male,1,1,40,10,NewCar,1597,3,2,1,2,0,0,0,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0 416 | -1,Female,0,0,35,24,NewCar,1381,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 417 | 1,Male,0,1,35,36,UsedCar,5842,2,2,2,2,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 418 | -1,Male,0,1,33,12,NewCar,2579,4,1,1,2,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0 419 | 1,Female,0,0,23,18,Education,8471,1,2,2,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1 420 | 1,Female,0,0,31,21,NewCar,2782,1,2,1,1,0,0,0,1,1,1,0,0,1,0,0,0,1,0,0,0,1,1 421 | -1,Female,0,0,33,18,NewCar,1042,4,2,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 422 | 1,Female,0,0,20,15,NewCar,3186,2,3,1,1,0,0,0,1,1,1,0,0,0,0,0,0,0,1,0,0,1,1 423 | 1,Male,0,1,30,12,UsedCar,2028,4,2,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 424 | 1,Male,0,1,47,12,NewCar,958,2,3,2,2,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 425 | 1,Male,0,1,34,21,Furniture,1591,4,3,2,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,1,1 426 | -1,Female,0,0,25,12,Furniture,2762,1,2,1,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0,0,0,1,1 427 | 1,Male,0,0,21,18,UsedCar,2779,1,3,1,1,1,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1 428 | 1,Male,0,1,29,28,Electronics,2743,4,2,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 429 | 1,Male,0,1,46,18,Electronics,1149,4,3,2,1,0,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,0,1 430 | 1,Male,0,1,20,9,Furniture,1313,1,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 431 | -1,Female,0,0,55,18,Repairs,1190,2,4,3,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0 432 | 1,Male,0,1,74,5,Business,3448,1,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0 433 | -1,Male,0,1,29,24,Other,11328,2,3,2,1,1,1,0,0,0,1,0,0,1,0,1,0,1,0,0,0,0,1 434 | 1,Male,0,1,36,6,Furniture,1872,4,4,3,1,1,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,1 435 | 1,Male,0,0,33,24,Repairs,2058,4,2,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 436 | 1,Male,0,1,25,9,Furniture,2136,3,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 437 | -1,Male,0,0,25,12,Electronics,1484,2,1,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 438 | 1,Male,0,0,23,6,Repairs,660,2,4,1,1,0,0,0,1,1,1,0,0,0,0,0,0,0,1,0,0,1,0 439 | 1,Female,0,0,37,24,NewCar,1287,4,4,2,1,1,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,1,1 440 | 1,Male,0,1,65,42,Repairs,3394,4,4,2,1,0,0,0,0,0,1,0,1,0,0,1,0,1,0,1,0,0,0 441 | -1,Female,0,0,26,12,Business,609,4,1,1,1,0,1,1,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0 442 | 1,Male,0,1,39,12,NewCar,1884,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 443 | 1,Female,0,0,30,12,Furniture,1620,2,3,1,1,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,1 444 | 1,Male,0,1,29,20,Other,2629,2,3,2,1,1,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,1 445 | -1,Male,0,1,41,12,Education,719,4,4,1,2,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,1,0 446 | -1,Female,0,0,30,48,Furniture,5096,2,3,1,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 447 | 1,Female,0,0,41,9,Education,1244,4,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,1,0 448 | -1,Female,0,0,34,36,NewCar,1842,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 449 | 1,Male,0,1,35,7,Electronics,2576,2,2,1,1,0,1,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,1 450 | 1,Female,0,0,55,12,Furniture,1424,3,4,1,1,1,1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 451 | -1,Male,0,0,61,15,Repairs,1512,3,3,2,1,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,1 452 | 1,Male,0,1,30,36,UsedCar,11054,4,2,1,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 453 | 1,Female,0,0,29,6,Electronics,518,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 454 | 1,Male,0,1,34,12,Furniture,2759,2,4,2,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 455 | 1,Male,0,1,35,24,UsedCar,2670,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 456 | -1,Male,0,1,31,24,NewCar,4817,2,3,1,1,1,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,1,1 457 | 1,Female,0,0,29,24,UsedCar,2679,4,1,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 458 | 1,Male,0,1,36,11,NewCar,3905,2,2,2,2,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1 459 | -1,Male,0,1,35,12,UsedCar,3386,3,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1 460 | 1,Female,0,0,27,6,HomeAppliances,343,4,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 461 | 1,Male,0,1,32,18,Electronics,4594,3,2,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 462 | 1,Male,0,1,37,36,Furniture,3620,1,2,1,2,0,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,1 463 | 1,Male,0,1,36,15,NewCar,1721,2,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 464 | 1,Female,0,0,34,12,Furniture,3017,3,1,1,1,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 465 | 1,Male,0,1,38,12,Retraining,754,4,4,2,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 466 | 1,Male,0,1,34,18,Business,1950,4,1,2,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 467 | 1,Male,0,1,63,24,UsedCar,2924,3,4,1,2,1,0,0,0,0,1,0,0,1,0,0,1,1,0,0,0,0,1 468 | -1,Female,0,0,29,24,Electronics,1659,4,2,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0 469 | 1,Male,0,1,32,48,Electronics,7238,3,3,2,2,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,1 470 | 1,Female,0,0,26,33,Business,2764,2,2,2,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1 471 | 1,Male,0,1,35,24,UsedCar,4679,3,3,2,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0 472 | -1,Male,0,0,22,24,Electronics,3092,3,2,1,1,1,1,0,1,0,1,0,0,0,0,0,0,0,1,0,1,0,1 473 | -1,Female,0,0,23,6,Education,448,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 474 | -1,Male,0,1,28,9,NewCar,654,4,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 475 | 1,Male,0,1,36,6,Retraining,1238,4,4,1,2,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,1 476 | -1,Male,0,0,33,18,Electronics,1245,4,2,1,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 477 | -1,Female,0,0,26,18,Furniture,3114,1,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 478 | 1,Male,0,1,24,39,UsedCar,2569,4,4,1,1,0,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,1 479 | 1,Male,0,1,25,24,Electronics,5152,4,2,1,1,0,1,1,0,0,1,0,0,1,0,0,0,1,0,0,0,1,1 480 | 1,Male,0,1,39,12,Business,1037,3,4,1,1,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0 481 | 1,Male,0,1,44,15,Furniture,1478,4,4,2,2,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 482 | 1,Female,0,0,23,12,Electronics,3573,1,1,1,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0 483 | 1,Male,0,1,26,24,NewCar,1201,4,1,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 484 | 1,Female,0,0,57,30,Furniture,3622,4,4,2,1,1,0,0,1,1,1,0,0,0,0,0,0,0,1,0,0,1,1 485 | 1,Female,0,0,30,15,Furniture,960,3,2,2,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,1,1 486 | 1,Male,0,1,44,12,NewCar,1163,4,4,1,1,1,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,0,1 487 | -1,Male,0,1,47,6,NewCar,1209,4,4,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1 488 | 1,Male,0,1,52,12,Electronics,3077,2,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 489 | 1,Female,0,0,62,24,NewCar,3757,4,4,1,1,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,1 490 | 1,Male,1,1,35,10,NewCar,1418,3,2,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,0 491 | 1,Male,0,1,26,6,NewCar,3518,2,3,1,1,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,1 492 | 1,Male,0,1,26,12,Electronics,1934,2,2,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 493 | -1,Female,0,0,42,27,Business,8318,2,4,2,1,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1 494 | 1,Female,0,0,27,6,Electronics,1237,1,1,2,1,0,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,1 495 | 1,Male,0,1,38,6,Electronics,368,4,4,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 496 | 1,Male,1,1,39,12,NewCar,2122,3,2,2,2,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0 497 | -1,Male,0,0,20,24,Furniture,2996,2,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 498 | -1,Male,0,1,29,36,Furniture,9034,4,1,1,1,1,1,0,1,0,1,0,0,0,0,1,0,0,1,0,1,0,1 499 | 1,Male,0,1,40,24,Furniture,1585,4,3,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 500 | 1,Male,0,0,32,18,Electronics,1301,4,2,1,1,0,1,0,0,0,1,0,0,0,0,0,1,1,0,0,0,1,0 501 | 1,Male,0,0,28,6,NewCar,1323,2,4,2,2,1,1,1,1,0,1,0,1,0,0,0,0,1,0,0,0,1,1 502 | -1,Female,0,0,27,24,NewCar,3123,4,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 503 | 1,Male,0,1,42,36,UsedCar,5493,2,4,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1 504 | 1,Male,0,0,49,9,Electronics,1126,2,4,1,1,0,1,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,1 505 | -1,Male,0,1,38,24,Electronics,1216,4,4,2,2,0,1,0,1,0,1,0,1,1,0,0,0,1,0,0,1,0,1 506 | -1,Female,0,0,24,24,NewCar,1207,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 507 | -1,Male,0,1,27,10,NewCar,1309,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,0 508 | 1,Male,0,1,36,15,UsedCar,2360,2,2,1,1,1,1,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,1 509 | -1,Male,0,1,34,15,NewCar,6850,1,2,1,2,1,1,0,1,0,1,1,0,0,0,0,0,1,0,1,0,0,1 510 | 1,Male,0,0,28,24,Electronics,1413,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 511 | 1,Male,0,1,45,39,UsedCar,8588,4,2,1,1,1,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,1,1 512 | -1,Male,0,1,26,12,NewCar,759,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 513 | 1,Male,0,1,32,36,UsedCar,4686,2,2,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1 514 | 1,Male,0,1,26,15,Business,2687,2,4,1,1,1,1,1,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 515 | 1,Male,0,0,20,12,Electronics,585,4,4,2,1,0,1,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,1 516 | 1,Male,0,1,54,24,NewCar,2255,4,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 517 | 1,Female,1,0,37,6,NewCar,609,4,3,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 518 | 1,Male,1,1,40,6,NewCar,1361,2,4,1,2,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0,0 519 | -1,Female,0,0,23,36,Furniture,7127,2,4,2,1,1,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,1 520 | 1,Male,0,1,43,6,NewCar,1203,3,2,1,1,1,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,1,1 521 | 1,Male,0,1,36,6,Electronics,700,4,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,1 522 | 1,Male,0,1,44,24,Repairs,5507,3,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,1 523 | -1,Female,0,0,24,18,Electronics,3190,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 524 | -1,Male,0,1,53,48,Furniture,7119,3,4,2,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1 525 | 1,Female,0,0,23,24,UsedCar,3488,3,4,1,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,1,1 526 | 1,Female,0,0,26,18,Electronics,1113,4,4,1,2,0,1,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,0 527 | 1,Male,0,1,30,26,UsedCar,7966,2,3,2,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 528 | 1,Female,0,0,31,15,Education,1532,4,3,1,1,0,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,1 529 | 1,Male,0,1,42,4,Electronics,1503,2,1,2,2,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 530 | -1,Male,0,0,31,36,Electronics,2302,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1 531 | 1,Male,0,1,41,6,NewCar,662,3,4,1,2,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0 532 | 1,Male,0,1,32,36,Education,2273,3,1,2,2,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 533 | -1,Female,0,0,28,15,NewCar,2631,2,4,2,1,1,1,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,1 534 | 1,Male,0,0,41,12,UsedCar,1503,4,4,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1 535 | 1,Male,0,0,26,24,Electronics,1311,4,3,1,1,1,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,1,1 536 | 1,Male,0,1,25,24,Electronics,3105,4,2,2,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 537 | -1,Male,0,0,33,21,Education,2319,2,1,1,1,0,1,1,0,0,1,0,1,0,0,0,0,0,1,0,1,0,1 538 | 1,Female,0,0,75,6,NewCar,1374,4,3,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,1 539 | 1,Female,0,0,37,18,Furniture,3612,3,4,1,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 540 | -1,Male,0,1,42,48,NewCar,7763,4,4,1,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,1 541 | 1,Female,0,0,45,18,Furniture,3049,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0 542 | -1,Male,0,0,23,12,Electronics,1534,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 543 | 1,Male,0,1,60,24,NewCar,2032,4,4,2,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1 544 | -1,Male,0,1,31,30,Furniture,6350,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 545 | -1,Male,0,1,34,18,Furniture,2864,2,1,1,2,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 546 | 1,Male,0,1,61,12,NewCar,1255,4,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 547 | -1,Male,0,1,43,24,NewCar,1333,4,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1 548 | 1,Female,0,0,37,24,NewCar,2022,4,4,1,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 549 | 1,Male,0,1,32,24,Electronics,1552,3,1,1,2,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,1,1 550 | -1,Female,0,0,24,12,Electronics,626,4,4,1,1,0,0,0,0,0,1,1,0,1,0,0,0,1,0,0,0,0,0 551 | 1,Male,0,1,35,48,UsedCar,8858,2,1,2,1,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,1 552 | 1,Female,0,0,23,12,Repairs,996,4,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 553 | 1,Male,0,1,45,6,Electronics,1750,2,4,1,2,0,0,0,1,1,1,1,0,1,0,0,0,1,0,0,0,1,0 554 | -1,Male,0,0,34,48,Electronics,6999,1,1,2,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,1,1 555 | 1,Male,0,1,27,12,NewCar,1995,4,1,1,1,0,1,0,1,0,1,0,1,0,0,0,0,1,0,0,1,0,1 556 | 1,Female,0,0,67,9,Education,1199,4,4,2,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 557 | -1,Male,0,1,22,12,Electronics,1331,2,1,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 558 | -1,Female,0,0,28,18,NewCar,2278,3,3,2,1,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,1,0,1 559 | -1,Female,0,0,29,21,NewCar,5003,1,4,2,1,1,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,0,1 560 | -1,Male,0,1,27,24,Furniture,3552,3,4,1,1,0,0,0,0,0,1,1,0,1,0,0,0,1,0,0,0,1,1 561 | -1,Male,0,1,31,18,Furniture,1928,2,2,2,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0,0 562 | 1,Male,0,1,49,24,UsedCar,2964,4,4,1,2,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,1 563 | -1,Male,0,1,24,24,Electronics,1546,4,4,1,1,0,0,0,0,0,1,1,0,1,0,0,1,0,1,0,0,1,0 564 | 1,Female,0,0,29,6,Electronics,683,2,1,1,1,0,1,1,0,0,0,0,0,1,0,0,0,1,0,0,1,0,1 565 | -1,Male,0,1,37,36,NewCar,12389,1,4,1,1,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1 566 | 1,Male,0,1,37,24,Business,4712,4,2,2,1,1,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,1 567 | 1,Female,0,0,23,24,Electronics,1553,3,2,2,1,1,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1 568 | -1,Male,0,0,36,12,NewCar,1372,2,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 569 | 1,Male,0,1,34,24,Electronics,2578,2,2,1,1,0,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,1,1 570 | 1,Male,0,1,41,48,Electronics,3979,4,1,2,2,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 571 | -1,Female,0,0,31,48,Electronics,6758,3,2,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 572 | -1,Female,0,0,23,24,Furniture,3234,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0 573 | 1,Male,0,1,38,30,Electronics,5954,3,2,1,1,0,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,1,1 574 | 1,Female,0,0,26,24,UsedCar,5433,2,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,1 575 | 1,Female,0,0,22,15,Business,806,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 576 | 1,Male,0,1,27,9,Electronics,1082,4,4,2,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0 577 | 1,Female,0,0,24,15,Furniture,2788,2,3,2,1,0,0,0,0,0,1,0,1,1,0,1,0,1,0,0,0,1,1 578 | 1,Female,0,0,27,12,Electronics,2930,2,1,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 579 | 1,Female,0,0,33,24,Education,1927,3,2,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 580 | -1,Male,0,0,27,36,NewCar,2820,4,4,2,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0,1 581 | 1,Male,0,0,27,24,Retraining,937,4,3,2,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0 582 | -1,Male,0,1,30,18,NewCar,1056,3,3,2,1,0,1,0,0,0,1,0,1,1,0,0,1,1,0,0,0,1,1 583 | 1,Male,0,1,49,12,NewCar,3124,1,3,2,2,0,1,0,0,0,1,0,1,1,0,0,0,1,0,0,1,0,0 584 | 1,Female,0,0,26,9,Furniture,1388,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1 585 | -1,Male,0,1,33,36,Repairs,2384,4,1,1,1,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0 586 | 1,Female,0,0,52,12,NewCar,2133,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1 587 | -1,Female,0,0,20,18,Furniture,2039,1,4,1,1,0,0,0,0,0,1,0,0,1,0,0,0,0,1,0,0,0,1 588 | 1,Male,0,1,36,9,NewCar,2799,2,2,2,2,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1 589 | 1,Male,0,1,21,12,Furniture,1289,4,1,1,1,0,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,0 590 | -1,Male,0,0,47,18,HomeAppliances,1217,4,3,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 591 | -1,Male,0,1,60,12,Furniture,2246,3,3,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 592 | 1,Female,0,0,58,12,Electronics,385,4,3,4,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 593 | 1,Female,0,0,42,24,NewCar,1965,4,4,2,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1 594 | 1,Female,0,0,36,21,Business,1572,4,4,1,1,0,0,0,1,1,1,0,0,1,0,0,0,1,0,0,0,1,0 595 | -1,Female,0,0,20,24,NewCar,2718,3,4,1,1,1,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0 596 | -1,Male,0,1,40,24,Other,1358,4,3,1,1,1,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,1,1 597 | -1,Female,0,0,32,6,NewCar,931,1,1,1,1,0,1,0,1,0,1,1,0,0,0,0,0,1,0,0,1,0,0 598 | -1,Female,0,0,23,24,NewCar,1442,4,4,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 599 | -1,Male,0,1,36,24,Business,4241,1,4,3,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 600 | -1,Male,0,1,31,18,NewCar,2775,2,2,2,1,0,0,0,0,0,1,0,1,1,0,0,0,1,0,0,0,1,1 601 | 1,Male,0,1,32,24,Business,3863,1,2,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 602 | 1,Female,0,0,45,7,Electronics,2329,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,1 603 | -1,Female,0,0,30,9,Furniture,918,4,1,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 604 | -1,Female,0,0,34,24,Education,1837,4,4,1,1,0,1,0,0,0,1,1,0,1,0,0,0,0,0,0,0,1,0 605 | -1,Female,0,0,28,36,Furniture,3349,4,2,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 606 | 1,Female,0,0,23,10,Furniture,1275,4,2,1,1,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 607 | 1,Male,0,1,22,24,Furniture,2828,4,4,1,1,1,0,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1 608 | 1,Male,0,1,74,24,Business,4526,3,2,1,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 609 | -1,Female,0,0,50,36,Electronics,2671,4,4,1,1,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1 610 | 1,Male,0,1,33,18,Electronics,2051,4,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 611 | 1,Male,0,1,45,15,UsedCar,1300,4,4,1,2,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,1 612 | -1,Female,0,0,22,12,HomeAppliances,741,4,3,1,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,1,0,0,1 613 | -1,Female,0,0,48,10,NewCar,1240,1,4,1,2,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0 614 | 1,Female,0,0,29,21,Electronics,3357,4,2,1,1,0,0,0,1,1,1,0,0,1,0,0,0,1,0,0,1,0,1 615 | 1,Female,1,0,22,24,UsedCar,3632,1,4,1,1,0,0,0,0,0,1,1,0,1,0,0,1,0,1,0,0,0,1 616 | -1,Female,0,0,22,18,Furniture,1808,4,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1 617 | 1,Male,0,1,48,48,Business,12204,2,2,1,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0,0,0,0,1 618 | 1,Male,0,1,27,60,Electronics,9157,2,2,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 619 | 1,Male,0,1,37,6,NewCar,3676,1,3,3,2,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1 620 | -1,Female,0,0,21,30,Furniture,3441,2,4,1,1,0,1,0,1,0,1,0,0,0,0,1,0,0,1,0,0,0,1 621 | 1,Male,0,0,49,12,NewCar,640,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 622 | 1,Male,0,1,27,21,Business,3652,2,3,2,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 623 | -1,Male,0,1,32,18,NewCar,1530,3,2,2,1,0,0,0,0,0,1,0,1,1,0,0,0,1,0,0,0,0,1 624 | -1,Male,0,0,38,48,Business,3914,4,2,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,0,1 625 | 1,Female,0,0,22,12,Furniture,1858,4,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 626 | -1,Male,0,1,65,18,Electronics,2600,4,4,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1 627 | 1,Male,0,1,35,15,Electronics,1979,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 628 | 1,Male,0,1,41,6,Furniture,2116,2,2,1,1,1,1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 629 | -1,Male,0,1,29,9,NewCar,1437,2,3,1,1,0,1,0,1,0,1,1,0,0,0,0,0,1,0,0,0,1,1 630 | 1,Male,0,1,36,42,Furniture,4042,4,4,2,1,1,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,0,1 631 | 1,Male,0,1,64,9,Education,3832,1,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0 632 | 1,Female,0,0,28,24,Electronics,3660,2,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 633 | -1,Male,0,1,44,18,Furniture,1553,4,3,1,1,0,0,0,0,0,1,1,0,1,0,0,0,1,0,0,0,0,1 634 | 1,Male,0,1,23,15,Electronics,1444,4,1,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 635 | -1,Female,0,0,19,9,Furniture,1980,2,2,2,1,0,0,0,0,0,1,0,0,0,0,1,0,0,1,0,1,0,1 636 | -1,Female,0,0,25,24,NewCar,1355,3,4,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0 637 | 1,Male,0,1,47,12,Education,1393,4,4,3,2,1,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,1,1 638 | 1,Female,0,0,28,24,Electronics,1376,4,1,1,1,0,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1 639 | 1,Male,0,1,21,60,Electronics,15653,2,4,2,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1 640 | 1,Female,0,0,34,12,Electronics,1493,4,3,1,2,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 641 | -1,Male,0,1,26,42,Electronics,4370,3,2,2,2,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,1 642 | -1,Female,0,0,27,18,Education,750,4,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0 643 | 1,Male,0,1,38,15,Repairs,1308,4,4,2,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0 644 | -1,Male,0,1,40,15,Education,4623,3,2,1,1,1,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,1 645 | 1,Male,0,0,33,24,Electronics,1851,4,2,2,1,1,0,0,0,0,1,0,1,0,0,0,1,1,0,0,0,1,1 646 | 1,Male,0,0,32,18,Electronics,1880,4,1,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 647 | -1,Male,0,1,27,36,Business,7980,4,4,2,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1 648 | 1,Male,0,0,32,30,Furniture,4583,2,2,2,1,0,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,1 649 | -1,Female,0,0,26,12,NewCar,1386,2,2,1,1,0,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,1 650 | -1,Male,0,1,38,24,NewCar,947,4,3,1,2,0,1,1,0,0,1,0,0,1,0,0,0,0,0,0,0,1,1 651 | -1,Male,0,1,40,12,Education,684,4,4,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0 652 | 1,Male,0,1,50,48,Education,7476,4,1,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1 653 | -1,Male,0,1,37,12,Furniture,1922,4,2,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 654 | -1,Male,0,1,45,24,NewCar,2303,4,1,1,1,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,1,1 655 | -1,Male,0,1,42,36,NewCar,8086,2,4,4,1,1,1,0,1,0,0,0,0,0,0,0,0,1,0,0,0,1,1 656 | 1,Male,0,1,35,24,UsedCar,2346,4,3,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 657 | 1,Male,0,1,22,14,NewCar,3973,1,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,1 658 | -1,Male,0,1,41,12,NewCar,888,4,4,1,2,0,1,0,0,0,1,0,0,1,0,0,0,1,0,0,0,1,0 659 | 1,Male,0,1,37,48,Electronics,10222,4,3,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 660 | 1,Female,0,0,28,30,Business,4221,2,1,2,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 661 | 1,Male,0,1,41,18,Furniture,6361,2,1,1,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 662 | 1,Male,0,0,23,12,Electronics,1297,3,4,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1 663 | -1,Male,0,0,23,12,NewCar,900,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 664 | 1,Male,0,1,50,21,Furniture,2241,4,2,2,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 665 | 1,Male,0,1,35,6,Furniture,1050,4,1,2,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,1 666 | 1,Female,0,0,50,6,Education,1047,2,4,1,1,0,1,1,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0 667 | 1,Male,0,1,27,24,Other,6314,4,2,2,1,1,0,0,0,0,1,0,1,1,0,1,0,1,0,1,0,0,1 668 | 1,Male,0,1,34,30,Furniture,3496,4,2,1,2,1,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1 669 | 1,Female,0,0,27,48,Business,3609,1,1,1,1,0,0,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,1 670 | -1,Male,0,1,43,12,NewCar,4843,3,4,2,1,1,0,0,0,0,1,0,1,0,0,1,0,0,1,0,0,1,1 671 | 1,Male,0,1,47,30,Electronics,3017,4,4,1,1,0,1,1,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 672 | 1,Male,0,1,27,24,Business,4139,3,3,2,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0 673 | 1,Male,0,1,31,36,Business,5742,2,2,2,1,1,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,1,1 674 | 1,Male,0,1,42,60,NewCar,10366,2,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 675 | 1,Male,0,0,24,6,NewCar,2080,1,2,1,1,0,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,0,1 676 | -1,Male,0,1,41,21,Business,2580,4,2,1,2,0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,1,0,0 677 | 1,Female,0,0,26,30,Electronics,4530,4,4,1,1,1,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,1,1 678 | 1,Male,0,1,33,24,Furniture,5150,4,4,1,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 679 | -1,Male,0,0,24,72,Electronics,5595,2,2,1,1,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,1 680 | 1,Male,0,1,64,24,Electronics,2384,4,4,1,1,0,0,0,0,0,1,0,0,1,0,0,0,0,1,0,0,1,0 681 | 1,Female,0,0,26,18,Electronics,1453,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 682 | 1,Female,0,0,56,6,Education,1538,1,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 683 | 1,Male,0,1,37,12,Electronics,2279,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1 684 | 1,Male,0,0,33,15,Electronics,1478,4,3,2,1,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,1 685 | 1,Male,0,0,47,24,Electronics,5103,3,3,3,1,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0,1 686 | 1,Male,0,1,31,36,Business,9857,1,3,2,2,1,1,0,1,0,0,0,0,0,0,0,0,1,0,0,0,1,0 687 | 1,Male,0,1,34,60,NewCar,6527,4,4,1,2,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1 688 | 1,Male,0,1,27,10,Electronics,1347,4,2,2,1,1,1,1,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 689 | 1,Male,0,1,30,36,NewCar,2862,4,3,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1 690 | 1,Male,0,1,35,9,Electronics,2753,3,4,1,1,1,0,0,1,0,1,0,0,0,0,1,0,1,0,0,0,1,1 691 | 1,Male,0,1,31,12,NewCar,3651,1,3,1,2,0,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,1 692 | 1,Male,0,0,25,15,Furniture,975,2,3,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 693 | 1,Female,0,0,25,15,Repairs,2631,3,2,1,1,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0 694 | 1,Male,0,1,29,24,Electronics,2896,2,1,1,1,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,1,0,1 695 | 1,Male,0,1,44,6,NewCar,4716,1,3,2,2,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0,0 696 | 1,Male,0,1,28,24,Electronics,2284,4,2,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 697 | 1,Male,0,1,50,6,UsedCar,1236,2,4,1,1,0,0,0,1,1,1,0,0,0,0,0,0,0,1,0,0,0,1 698 | 1,Male,1,1,29,12,Electronics,1103,4,3,2,1,0,1,0,0,0,1,0,0,0,0,0,1,1,0,0,0,1,1 699 | 1,Female,0,0,38,12,NewCar,926,1,2,1,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,1,0,0,0 700 | 1,Male,0,1,24,18,Electronics,1800,4,2,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 701 | 1,Male,0,1,40,15,Education,1905,4,4,1,1,1,1,1,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 702 | -1,Female,0,0,29,12,Furniture,1123,4,4,1,1,0,0,0,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0 703 | -1,Male,0,1,46,48,UsedCar,6331,4,4,2,1,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,1 704 | 1,Female,0,0,47,24,Electronics,1377,4,2,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,1,1 705 | 1,Male,0,1,41,30,Business,2503,4,2,2,1,0,1,0,1,0,0,0,0,0,0,0,0,1,0,0,0,1,1 706 | 1,Female,0,0,32,27,Business,2528,4,1,1,2,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 707 | 1,Female,0,0,35,15,NewCar,5324,1,4,1,1,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1 708 | -1,Male,0,1,24,48,NewCar,6560,3,2,1,1,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,1,1 709 | -1,Female,0,0,25,12,Furniture,2969,4,3,2,1,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 710 | 1,Female,0,0,25,9,Electronics,1206,4,4,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 711 | 1,Male,0,1,37,9,Electronics,2118,2,2,1,2,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 712 | 1,Male,0,1,32,18,Electronics,629,4,3,2,1,1,0,0,1,1,1,0,1,1,0,0,0,1,0,0,0,1,1 713 | -1,Female,0,0,35,6,Education,1198,4,4,1,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,1,1 714 | 1,Male,0,1,46,21,UsedCar,2476,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 715 | 1,Male,0,1,25,9,Electronics,1138,4,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0 716 | -1,Male,0,1,27,60,NewCar,14027,4,2,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 717 | 1,Male,0,1,63,30,UsedCar,7596,1,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 718 | 1,Male,0,1,40,30,Electronics,3077,3,2,2,2,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 719 | 1,Male,0,1,32,18,Electronics,1505,4,2,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1 720 | 1,Male,0,1,31,24,Electronics,3148,3,2,2,1,1,1,1,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 721 | 1,Male,0,0,31,20,UsedCar,6148,3,4,2,1,1,1,0,1,0,1,0,0,1,0,0,0,1,0,0,0,1,1 722 | -1,Male,0,1,34,9,Electronics,1337,4,2,2,1,1,1,1,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 723 | -1,Female,0,0,24,6,Education,433,4,2,1,2,0,1,0,1,1,1,1,0,1,0,0,0,0,1,0,1,0,1 724 | -1,Female,0,0,24,12,NewCar,1228,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 725 | 1,Female,0,0,66,9,Electronics,790,4,3,1,1,0,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0 726 | -1,Female,0,0,21,27,NewCar,2570,3,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1 727 | 1,Female,0,0,41,6,NewCar,250,2,2,2,1,0,0,0,1,1,1,0,1,1,0,0,0,1,0,0,0,0,0 728 | 1,Male,0,0,47,15,Electronics,1316,2,2,2,1,0,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,0,0 729 | -1,Female,0,0,25,18,Electronics,1882,4,4,2,1,0,0,0,0,0,1,0,0,1,0,0,0,0,1,0,0,0,1 730 | -1,Female,0,0,59,48,Business,6416,4,3,1,1,0,1,0,0,0,1,1,0,0,0,0,0,0,1,0,0,1,1 731 | 1,Male,0,0,36,24,Business,1275,2,4,2,1,1,1,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,1 732 | 1,Male,0,1,33,24,Electronics,6403,1,2,1,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,1 733 | -1,Male,0,1,21,24,Electronics,1987,2,4,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0 734 | 1,Female,0,0,44,8,Electronics,760,4,2,1,1,0,1,0,0,0,1,0,0,0,0,0,1,1,0,0,0,1,0 735 | 1,Female,0,0,28,24,UsedCar,2603,2,4,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,1,0,0,0,1 736 | 1,Female,0,0,37,4,NewCar,3380,1,1,1,2,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 737 | 1,Female,0,0,29,36,HomeAppliances,3990,3,2,1,1,0,1,0,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0 738 | -1,Female,0,0,23,24,UsedCar,11560,1,4,2,1,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1 739 | 1,Male,0,1,35,18,NewCar,4380,3,4,1,2,1,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0 740 | 1,Male,0,1,45,6,NewCar,6761,1,3,2,2,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 741 | -1,Female,0,0,26,30,Business,4280,4,4,2,1,0,1,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,0 742 | 1,Male,0,1,32,24,NewCar,2325,2,3,1,1,0,0,0,1,0,1,1,0,1,0,0,0,1,0,0,0,1,1 743 | 1,Male,0,1,23,10,Electronics,1048,4,4,1,1,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,0,0,0 744 | 1,Male,0,1,41,21,Electronics,3160,4,3,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 745 | 1,Male,0,1,22,24,Furniture,2483,4,4,1,1,1,0,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1 746 | 1,Male,0,1,30,39,Furniture,14179,4,4,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 747 | 1,Male,0,1,28,13,Business,1797,3,1,2,1,0,0,0,0,0,1,0,1,1,0,0,0,1,0,0,1,0,0 748 | 1,Female,0,0,23,15,NewCar,2511,1,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,1 749 | -1,Female,0,0,37,12,NewCar,1274,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0 750 | 1,Male,0,1,26,21,UsedCar,5248,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 751 | 1,Male,0,1,33,15,UsedCar,3029,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 752 | 1,Female,0,0,49,6,Furniture,428,2,1,1,1,1,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,1,1 753 | -1,Female,0,0,23,18,NewCar,976,1,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0 754 | 1,Female,0,0,23,12,Business,841,2,4,1,1,0,1,0,1,0,1,0,0,0,0,0,0,0,1,0,0,1,0 755 | 1,Female,0,0,25,30,Electronics,5771,4,2,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 756 | -1,Male,0,1,55,12,Repairs,1555,4,4,2,2,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,1,1 757 | -1,Female,0,0,32,24,NewCar,1285,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 758 | 1,Male,1,1,74,6,NewCar,1299,1,1,3,2,0,1,1,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0 759 | -1,Male,0,1,39,15,Electronics,1271,3,4,2,1,1,1,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1 760 | 1,Male,0,1,31,24,NewCar,1393,2,2,1,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,1 761 | -1,Male,0,1,35,12,NewCar,691,4,3,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 762 | 1,Female,0,0,59,15,NewCar,5045,1,4,1,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 763 | -1,Female,0,0,24,18,Furniture,2124,4,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1 764 | 1,Male,0,1,24,12,Electronics,2214,4,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 765 | -1,Male,0,1,30,21,NewCar,12680,4,4,1,1,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,1 766 | 1,Male,0,0,27,24,NewCar,2463,4,3,2,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,1 767 | 1,Male,0,0,40,12,Electronics,1155,3,3,2,1,0,1,0,0,0,1,0,0,1,0,0,1,1,0,0,0,1,0 768 | -1,Male,0,0,31,30,Furniture,3108,2,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0 769 | 1,Female,0,0,31,10,UsedCar,2901,1,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 770 | 1,Male,0,1,28,12,Furniture,3617,1,4,3,1,1,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,1,1 771 | 1,Male,0,1,63,12,Electronics,1655,2,4,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 772 | 1,Female,0,0,26,24,UsedCar,2812,2,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 773 | -1,Female,0,0,25,36,Education,8065,3,2,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 774 | 1,Male,0,1,36,21,UsedCar,3275,1,4,1,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 775 | 1,Male,0,1,52,24,Electronics,2223,4,4,2,1,0,0,0,1,0,1,0,1,1,0,0,0,1,0,0,0,1,1 776 | 1,Male,0,1,66,12,NewCar,1480,2,4,3,1,0,1,1,1,1,1,0,1,1,0,0,0,0,0,1,0,0,0 777 | -1,Female,0,0,25,24,NewCar,1371,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1 778 | 1,Male,0,1,37,36,NewCar,3535,4,4,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 779 | 1,Female,0,0,25,18,Electronics,3509,4,1,1,1,0,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,1,1 780 | 1,Male,0,1,38,36,UsedCar,5711,4,2,2,1,1,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,1,1 781 | 1,Female,0,0,67,18,Repairs,3872,2,4,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,1 782 | -1,Male,0,1,25,39,Electronics,4933,2,2,2,1,0,1,0,0,0,1,0,1,0,0,0,1,1,0,0,0,1,1 783 | 1,Male,0,1,60,24,NewCar,1940,4,4,1,1,1,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,1,1 784 | 1,Male,0,1,31,12,Retraining,1410,2,2,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 785 | -1,Female,0,0,23,12,NewCar,836,4,2,1,1,0,1,0,1,0,1,0,0,1,0,0,0,1,0,0,1,0,0 786 | 1,Male,0,0,60,20,UsedCar,6468,1,4,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,1 787 | 1,Male,0,1,35,18,Business,1941,4,2,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0 788 | 1,Male,0,1,40,22,Electronics,2675,3,4,1,1,0,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1 789 | 1,Male,0,1,38,48,UsedCar,2751,4,3,2,2,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 790 | -1,Male,0,1,50,48,Education,6224,4,4,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1 791 | -1,Male,0,1,27,40,Education,5998,4,3,1,1,1,0,0,0,0,1,0,1,1,0,0,0,1,0,0,0,0,1 792 | -1,Female,0,0,39,21,Business,1188,2,4,1,2,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 793 | 1,Male,0,1,41,24,UsedCar,6313,3,4,1,2,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 794 | 1,Male,0,0,27,6,Furniture,1221,1,2,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 795 | 1,Male,0,0,51,24,Furniture,2892,3,4,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1 796 | 1,Male,0,1,32,24,Furniture,3062,4,3,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,1,0,0,1,1 797 | 1,Female,0,0,22,9,Furniture,2301,2,4,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0,1,0,1 798 | -1,Male,0,1,51,18,UsedCar,7511,1,4,1,2,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1 799 | 1,Female,0,0,22,12,Furniture,1258,2,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0 800 | 1,Male,0,0,54,24,NewCar,717,4,4,2,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1 801 | 1,Male,0,1,35,9,NewCar,1549,4,2,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0 802 | 1,Male,0,1,54,24,Education,1597,4,4,2,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,1 803 | 1,Female,0,0,48,18,Electronics,1795,3,4,2,1,1,1,0,0,0,1,0,1,1,0,0,1,0,1,0,0,1,0 804 | 1,Female,0,0,24,20,Furniture,4272,1,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 805 | 1,Male,0,1,35,12,Electronics,976,4,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 806 | 1,Female,0,0,24,12,NewCar,7472,1,2,1,1,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0 807 | -1,Male,0,1,24,36,NewCar,9271,2,1,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 808 | 1,Male,1,0,26,6,Electronics,590,3,3,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0 809 | 1,Male,0,1,65,12,Electronics,930,4,4,4,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 810 | 1,Male,0,1,55,42,UsedCar,9283,1,2,1,1,1,1,0,0,0,1,1,0,1,0,0,0,0,0,1,0,0,1 811 | -1,Female,0,0,26,15,NewCar,1778,2,1,2,1,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0 812 | 1,Male,0,0,26,8,Business,907,3,2,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 813 | 1,Male,0,0,28,6,Electronics,484,3,3,1,1,0,1,0,0,0,1,0,0,1,0,0,1,1,0,0,0,1,0 814 | -1,Male,0,1,24,36,UsedCar,9629,4,4,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 815 | -1,Male,0,1,54,48,HomeAppliances,3051,3,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 816 | -1,Male,0,1,46,48,NewCar,3931,4,4,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1 817 | 1,Female,0,0,54,36,NewCar,7432,2,2,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1 818 | 1,Male,0,0,62,6,HomeAppliances,1338,1,4,1,1,0,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,1 819 | 1,Female,0,0,24,6,Electronics,1554,1,2,2,1,1,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,1,1 820 | 1,Male,0,0,43,36,Other,15857,2,3,1,1,0,0,0,0,0,1,0,0,0,0,1,0,1,0,1,0,0,1 821 | -1,Male,0,0,26,18,Electronics,1345,4,3,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,0,1 822 | 1,Male,0,0,27,12,NewCar,1101,3,2,2,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 823 | 1,Male,0,0,24,12,Electronics,3016,3,1,1,1,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 824 | -1,Male,0,1,41,36,Furniture,2712,2,2,1,2,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,1,1 825 | 1,Male,0,1,47,8,NewCar,731,4,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 826 | 1,Male,0,0,35,18,Furniture,3780,3,2,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0,1 827 | 1,Male,0,0,30,21,NewCar,1602,4,3,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 828 | -1,Female,0,0,33,18,NewCar,3966,1,4,3,1,1,0,0,0,0,1,0,1,1,0,0,0,0,1,0,0,1,1 829 | -1,Male,0,1,36,18,Business,4165,2,2,2,2,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 830 | -1,Male,0,1,47,36,UsedCar,8335,3,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1 831 | 1,Male,0,1,38,48,Business,6681,4,4,1,2,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 832 | 1,Male,0,1,44,24,Business,2375,4,2,2,2,1,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,1 833 | -1,Female,0,0,23,18,NewCar,1216,4,3,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 834 | -1,Male,0,1,29,45,Business,11816,2,4,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 835 | 1,Female,0,0,42,24,Electronics,5084,2,4,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 836 | -1,Female,0,0,25,15,Electronics,2327,2,3,1,1,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0 837 | -1,Male,0,1,48,12,NewCar,1082,4,4,2,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,0,1 838 | 1,Female,0,0,21,12,Electronics,886,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 839 | 1,Female,0,0,23,4,Furniture,601,1,3,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0 840 | 1,Male,0,1,63,24,UsedCar,2957,4,4,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 841 | 1,Male,0,0,46,24,Electronics,2611,4,3,2,1,0,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,1,1 842 | -1,Male,0,1,29,36,Furniture,5179,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 843 | 1,Male,0,1,28,21,UsedCar,2993,3,2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0 844 | -1,Female,0,0,23,18,Repairs,1943,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 845 | 1,Male,0,1,50,24,Business,1559,4,4,1,1,1,0,0,0,0,1,1,0,1,0,0,0,1,0,0,0,1,1 846 | 1,Male,0,1,47,18,Furniture,3422,4,4,3,2,1,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,1,1 847 | 1,Male,0,1,35,21,Furniture,3976,2,3,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 848 | -1,Male,0,1,68,18,NewCar,6761,2,4,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1 849 | 1,Male,0,0,28,24,NewCar,1249,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 850 | 1,Male,0,1,59,9,Electronics,1364,3,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 851 | -1,Male,0,1,57,12,Electronics,709,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0 852 | -1,Male,1,0,33,20,NewCar,2235,4,2,2,1,0,0,0,0,0,1,0,1,1,0,0,1,0,1,0,0,0,1 853 | 1,Male,0,1,43,24,UsedCar,4042,3,4,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 854 | 1,Male,0,1,35,15,Electronics,1471,4,4,2,1,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1 855 | -1,Male,0,1,32,18,NewCar,1442,4,4,2,2,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,1,0 856 | 1,Male,0,1,45,36,NewCar,10875,2,2,2,2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1 857 | 1,Male,0,0,33,24,NewCar,1474,4,3,1,1,1,0,0,1,0,1,0,0,0,0,0,0,1,0,0,1,0,1 858 | 1,Female,0,0,40,10,Retraining,894,4,3,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 859 | 1,Male,0,1,28,15,Furniture,3343,4,2,1,1,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1 860 | -1,Female,0,0,29,15,NewCar,3959,3,2,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 861 | 1,Male,1,1,26,9,NewCar,3577,1,2,1,2,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,1 862 | 1,Male,0,1,27,24,UsedCar,5804,4,2,2,1,0,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,0,1 863 | -1,Male,0,0,28,18,Business,2169,4,2,1,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1 864 | -1,Female,0,0,35,24,Electronics,2439,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 865 | 1,Male,0,1,32,27,Furniture,4526,4,2,2,2,1,0,0,1,1,1,0,1,0,0,0,0,1,0,0,1,0,0 866 | -1,Male,0,1,25,10,Furniture,2210,2,2,1,1,0,0,0,0,0,1,0,0,1,0,0,0,0,1,0,0,0,0 867 | 1,Female,0,0,20,15,Furniture,2221,2,4,1,1,0,0,0,1,1,1,0,0,0,0,0,0,0,1,0,0,0,1 868 | 1,Female,0,0,27,18,Electronics,2389,4,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 869 | 1,Male,0,1,42,12,Furniture,3331,2,4,1,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 870 | 1,Male,0,1,37,36,Business,7409,3,2,2,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 871 | 1,Female,0,0,24,12,Furniture,652,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 872 | 1,Female,0,0,40,36,Furniture,7678,2,4,2,1,1,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,1,1 873 | 1,Male,1,1,46,6,NewCar,1343,1,4,2,2,0,1,1,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 874 | 1,Male,0,1,26,24,Business,1382,4,1,2,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,1 875 | 1,Female,0,0,24,15,HomeAppliances,874,4,1,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 876 | 1,Male,0,1,29,12,Furniture,3590,2,2,1,2,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,0 877 | 1,Female,0,0,40,11,NewCar,1322,4,4,2,1,0,1,0,1,1,1,0,1,0,0,0,0,1,0,0,0,0,1 878 | 1,Male,0,1,36,18,Electronics,1940,3,4,1,1,1,0,0,0,0,1,1,0,1,0,1,0,0,0,0,1,0,1 879 | 1,Male,0,1,28,36,Electronics,3595,4,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 880 | -1,Male,0,1,27,9,NewCar,1422,3,2,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1 881 | 1,Male,0,1,36,30,Electronics,6742,2,3,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 882 | 1,Male,0,1,38,24,UsedCar,7814,3,3,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 883 | 1,Male,0,0,48,24,UsedCar,9277,2,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1 884 | 1,Male,0,1,36,30,NewCar,2181,4,4,2,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 885 | 1,Female,0,0,65,18,Electronics,1098,4,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,1,0,0,0 886 | -1,Male,0,0,43,24,Furniture,4057,3,3,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 887 | -1,Female,0,0,53,12,Education,795,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 888 | 1,Male,0,1,34,24,Business,2825,4,3,2,2,1,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 889 | -1,Male,0,1,23,48,Business,15672,2,2,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 890 | 1,Male,0,1,34,36,NewCar,6614,4,4,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 891 | 1,Male,0,1,40,28,UsedCar,7824,3,4,2,2,1,0,0,0,0,1,1,0,1,0,0,1,0,1,0,1,0,1 892 | 1,Male,0,1,43,27,Business,2442,4,4,4,2,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 893 | 1,Male,0,1,46,15,Electronics,1829,4,4,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 894 | 1,Male,1,1,38,12,NewCar,2171,4,4,2,1,0,0,0,0,0,1,0,1,1,0,0,0,1,0,0,0,0,0 895 | 1,Male,0,1,34,36,UsedCar,5800,3,4,2,1,1,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 896 | 1,Male,0,1,29,18,Electronics,1169,4,3,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 897 | 1,Male,0,1,31,36,UsedCar,8947,3,2,1,2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1 898 | 1,Female,0,0,28,21,Electronics,2606,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 899 | 1,Female,1,0,35,12,Furniture,1592,3,2,1,1,0,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,1,1 900 | 1,Female,0,0,33,15,Furniture,2186,1,4,1,1,0,0,0,0,0,1,0,0,1,0,0,0,0,1,0,0,1,0 901 | -1,Male,0,1,42,18,Furniture,4153,2,3,1,1,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,0,1 902 | -1,Male,0,1,43,16,NewCar,2625,2,4,1,1,1,0,0,0,0,1,0,1,1,0,0,1,0,1,0,0,1,1 903 | 1,Male,0,0,44,20,NewCar,3485,2,4,2,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0,1 904 | 1,Male,0,1,42,36,UsedCar,10477,2,4,2,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,1,1 905 | 1,Male,0,0,40,15,Electronics,1386,4,2,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1 906 | 1,Male,0,1,36,24,Electronics,1278,4,1,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 907 | 1,Male,0,1,20,12,Electronics,1107,2,2,1,2,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1 908 | 1,Male,1,1,24,21,NewCar,3763,2,2,1,1,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,0,1,0 909 | 1,Male,0,0,27,36,Education,3711,2,2,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 910 | 1,Female,0,0,46,15,UsedCar,3594,1,2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0 911 | 1,Female,0,0,33,9,NewCar,3195,1,2,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 912 | 1,Female,0,0,34,36,Electronics,4454,4,4,2,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1 913 | -1,Female,0,0,25,24,Furniture,4736,2,4,1,1,0,1,0,0,0,1,0,1,1,0,0,0,1,0,0,1,0,0 914 | 1,Female,0,0,25,30,Electronics,2991,2,4,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 915 | 1,Male,0,0,28,11,Business,2142,1,2,1,1,1,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0,1,1 916 | -1,Male,0,1,31,24,Business,3161,4,2,1,1,1,0,0,0,0,1,1,0,0,0,0,0,0,1,0,0,0,1 917 | -1,Female,1,0,32,48,Other,18424,1,2,1,1,1,1,0,0,0,1,0,0,1,0,0,0,1,0,0,0,0,1 918 | 1,Male,0,1,32,10,UsedCar,2848,1,2,1,2,0,0,0,1,0,1,0,0,0,0,1,0,1,0,0,0,0,1 919 | -1,Male,0,1,68,6,NewCar,14896,1,4,1,1,1,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,1,1 920 | -1,Male,0,0,33,24,Furniture,2359,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,1,0,0,1 921 | -1,Male,0,1,39,24,Furniture,3345,4,2,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 922 | 1,Female,0,0,28,18,Furniture,1817,4,2,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 923 | 1,Male,0,1,37,48,Electronics,12749,4,1,1,1,1,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,1,1 924 | -1,Female,0,0,22,9,Electronics,1366,3,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 925 | 1,Male,0,1,30,12,NewCar,2002,3,4,1,2,1,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 926 | -1,Male,0,0,55,24,Furniture,6872,2,1,1,1,1,0,0,0,0,1,1,0,1,0,0,0,1,0,0,1,0,1 927 | -1,Male,0,1,46,12,NewCar,697,4,2,2,1,1,0,0,0,0,1,1,0,1,0,0,0,1,0,0,1,0,1 928 | 1,Female,0,0,21,18,Furniture,1049,4,4,1,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,1 929 | -1,Male,0,1,39,48,UsedCar,10297,4,4,3,2,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1 930 | 1,Male,0,1,58,30,Electronics,1867,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 931 | 1,Male,0,1,43,12,NewCar,1344,4,2,2,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0 932 | 1,Male,1,1,24,24,Furniture,1747,4,1,1,1,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,1,0,0 933 | -1,Female,0,0,22,9,Electronics,1670,4,2,1,1,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 934 | 1,Male,0,1,30,9,NewCar,1224,3,1,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 935 | 1,Male,0,1,42,12,Electronics,522,4,4,2,2,1,0,0,1,1,1,0,1,0,0,0,0,1,0,0,0,1,1 936 | 1,Female,0,0,23,12,Electronics,1498,4,1,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,0,0,1 937 | -1,Male,0,1,30,30,Electronics,1919,4,3,2,1,0,1,0,1,0,0,0,0,0,0,0,0,1,0,0,1,0,1 938 | -1,Female,0,0,28,9,Electronics,745,3,2,1,1,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 939 | 1,Male,0,0,30,6,Electronics,2063,4,3,1,1,1,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1 940 | -1,Male,0,1,42,60,Education,6288,4,4,1,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1 941 | 1,Male,0,1,46,24,UsedCar,6842,2,4,2,2,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 942 | 1,Male,0,1,45,12,NewCar,3527,2,3,1,2,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 943 | 1,Male,1,1,31,10,NewCar,1546,3,2,1,2,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 944 | 1,Male,0,1,31,24,Furniture,929,4,2,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 945 | 1,Male,0,1,42,4,NewCar,1455,2,1,3,2,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 946 | 1,Female,0,0,46,15,Furniture,1845,4,1,1,1,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,1,0,1 947 | 1,Female,0,0,30,48,NewCar,8358,1,1,2,1,0,1,0,1,1,1,0,0,0,0,0,0,1,0,0,1,0,1 948 | -1,Male,0,1,30,24,Furniture,3349,4,4,1,2,1,0,0,1,1,1,1,0,0,0,0,0,0,0,0,1,0,1 949 | 1,Male,0,1,38,12,NewCar,2859,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,1 950 | -1,Male,0,0,43,18,Furniture,1533,4,1,1,2,0,0,0,0,0,1,0,0,0,0,1,0,1,0,0,1,0,0 951 | -1,Male,0,1,31,24,Electronics,3621,2,4,2,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,1,1 952 | 1,Male,0,0,40,18,Business,3590,3,3,3,2,1,1,0,0,0,1,0,1,0,0,0,0,1,0,1,0,0,0 953 | -1,Male,0,1,24,36,Business,2145,2,1,2,1,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1 954 | -1,Female,0,0,28,24,UsedCar,4113,3,4,1,1,0,1,0,1,1,1,0,0,0,0,0,0,0,1,0,1,0,1 955 | -1,Female,0,0,26,36,Furniture,10974,4,2,2,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,1 956 | 1,Female,0,0,29,12,NewCar,1893,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,1 957 | 1,Female,0,0,57,24,Electronics,1231,4,4,2,1,1,0,0,1,1,1,0,1,0,0,0,0,0,1,0,0,1,1 958 | 1,Male,0,1,49,30,Electronics,3656,4,4,2,1,0,1,1,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 959 | 1,Male,0,1,37,9,Electronics,1154,2,4,3,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 960 | -1,Male,0,1,45,28,NewCar,4006,3,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 961 | 1,Male,0,1,30,24,Furniture,3069,4,4,1,1,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,1 962 | 1,Male,0,0,30,6,Electronics,1740,2,2,2,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,1,1 963 | 1,Male,0,0,47,21,NewCar,2353,1,4,2,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1 964 | 1,Male,0,1,29,15,NewCar,3556,3,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 965 | -1,Male,0,1,35,24,Electronics,2397,3,2,2,1,1,0,0,1,1,1,0,0,1,0,0,0,1,0,0,0,1,1 966 | 1,Male,0,0,22,6,Repairs,454,3,1,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,1,0,0 967 | 1,Female,0,0,26,30,Electronics,1715,4,1,1,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 968 | -1,Male,0,1,23,27,Electronics,2520,4,2,2,1,0,1,0,1,1,1,0,1,0,0,0,0,1,0,0,0,0,0 969 | 1,Female,0,0,54,15,Electronics,3568,4,2,1,1,1,0,0,0,0,1,0,0,1,0,0,0,0,1,0,0,1,1 970 | 1,Male,0,0,29,42,Electronics,7166,2,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,1 971 | 1,Male,0,1,40,11,NewCar,3939,1,2,2,2,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0 972 | 1,Male,0,1,22,15,Repairs,1514,4,2,1,1,0,1,0,1,0,1,0,0,0,0,0,1,1,0,0,0,0,1 973 | 1,Male,0,1,43,24,NewCar,7393,1,4,1,2,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0 974 | -1,Female,0,0,29,24,NewCar,1193,1,4,2,1,0,0,0,0,0,1,1,0,0,0,1,0,0,1,1,0,0,0 975 | -1,Male,0,1,36,60,Business,7297,4,4,1,1,0,0,0,0,0,1,0,0,0,0,1,0,0,1,0,0,1,1 976 | 1,Female,0,0,33,30,Electronics,2831,4,2,1,1,1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1 977 | 1,Female,0,0,57,24,Electronics,1258,3,3,1,1,0,1,1,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0 978 | 1,Female,0,0,64,6,Electronics,753,2,3,1,1,0,1,0,0,0,1,0,0,0,0,0,1,1,0,0,0,0,1 979 | 1,Male,0,1,42,18,Business,2427,4,2,2,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,1 980 | -1,Male,0,1,47,24,NewCar,2538,4,4,2,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0 981 | -1,Male,0,0,25,15,NewCar,1264,2,2,1,1,0,1,0,1,0,1,1,0,0,0,0,0,0,1,0,0,0,1 982 | -1,Male,0,1,49,30,Furniture,8386,2,2,1,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,1 983 | -1,Male,0,1,33,48,Business,4844,3,2,1,1,1,0,0,0,0,1,0,0,1,0,0,0,0,1,1,0,0,1 984 | 1,Female,0,0,28,21,NewCar,2923,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,0,1,0,0,0,0,1 985 | -1,Male,0,1,26,36,UsedCar,8229,2,2,1,2,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 986 | 1,Male,0,1,30,24,Furniture,2028,2,2,2,1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 987 | 1,Female,0,0,25,15,Furniture,1433,4,3,2,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1 988 | 1,Male,0,0,33,42,Business,6289,2,1,2,1,0,1,1,0,0,1,0,0,0,0,0,0,1,0,0,1,0,1 989 | 1,Female,0,0,64,13,Electronics,1409,2,4,1,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,1,0,0,1 990 | 1,Male,0,1,29,24,UsedCar,6579,4,2,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,1 991 | 1,Male,0,1,48,24,Electronics,1743,4,2,2,1,0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0 992 | 1,Male,0,1,37,12,Education,3565,2,1,2,2,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,1,0,0 993 | 1,Male,0,1,34,15,Electronics,1569,4,4,1,2,0,0,0,1,0,1,1,0,1,0,0,0,1,0,0,0,1,0 994 | 1,Male,0,0,23,18,Electronics,1936,2,4,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,1,0 995 | 1,Male,0,1,30,36,Furniture,3959,4,3,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,1 996 | 1,Male,0,1,50,12,NewCar,2390,4,3,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 997 | 1,Female,0,0,31,12,Furniture,1736,3,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,0 998 | 1,Male,0,0,40,30,UsedCar,3857,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,1 999 | 1,Male,0,1,38,12,Electronics,804,4,4,1,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0,0,0,1,1 1000 | -1,Male,0,1,23,45,Electronics,1845,4,4,1,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1 1001 | 1,Male,0,1,27,45,UsedCar,4576,3,4,1,1,0,1,0,1,0,1,0,1,0,0,0,0,1,0,1,0,0,1 1002 | -------------------------------------------------------------------------------- /data/mnist/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dylan-slack/Modeling-Uncertainty-Local-Explainability/72c4330bc7d29150b55018b0946f85f7df88e107/data/mnist/__init__.py -------------------------------------------------------------------------------- /data/mnist/mnist_cnn.pt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dylan-slack/Modeling-Uncertainty-Local-Explainability/72c4330bc7d29150b55018b0946f85f7df88e107/data/mnist/mnist_cnn.pt -------------------------------------------------------------------------------- /data/mnist/mnist_model.py: -------------------------------------------------------------------------------- 1 | """The code used to train the MNIST model. 2 | 3 | This code comes from https://github.com/pytorch/examples/blob/master/mnist/main.py 4 | """ 5 | from __future__ import print_function 6 | import argparse 7 | import torch 8 | import torch.nn as nn 9 | import torch.nn.functional as F 10 | import torch.optim as optim 11 | from torchvision import datasets, transforms 12 | from torch.optim.lr_scheduler import StepLR 13 | 14 | 15 | class Net(nn.Module): 16 | def __init__(self): 17 | super(Net, self).__init__() 18 | self.conv1 = nn.Conv2d(1, 32, 3, 1) 19 | self.conv2 = nn.Conv2d(32, 64, 3, 1) 20 | self.dropout1 = nn.Dropout2d(0.25) 21 | self.dropout2 = nn.Dropout2d(0.5) 22 | self.fc1 = nn.Linear(9216, 128) 23 | self.fc2 = nn.Linear(128, 10) 24 | 25 | def forward(self, x): 26 | x = self.conv1(x) 27 | x = F.relu(x) 28 | x = self.conv2(x) 29 | x = F.relu(x) 30 | x = F.max_pool2d(x, 2) 31 | x = self.dropout1(x) 32 | x = torch.flatten(x, 1) 33 | x = self.fc1(x) 34 | x = F.relu(x) 35 | x = self.dropout2(x) 36 | x = self.fc2(x) 37 | output = F.log_softmax(x, dim=1) 38 | return output 39 | 40 | def train(args, model, device, train_loader, optimizer, epoch): 41 | model.train() 42 | for batch_idx, (data, target) in enumerate(train_loader): 43 | data, target = data.to(device), target.to(device) 44 | optimizer.zero_grad() 45 | output = model(data) 46 | loss = F.nll_loss(output, target) 47 | loss.backward() 48 | optimizer.step() 49 | if batch_idx % args.log_interval == 0: 50 | print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format( 51 | epoch, batch_idx * len(data), len(train_loader.dataset), 52 | 100. * batch_idx / len(train_loader), loss.item())) 53 | 54 | 55 | def test(model, device, test_loader): 56 | model.eval() 57 | test_loss = 0 58 | correct = 0 59 | with torch.no_grad(): 60 | for data, target in test_loader: 61 | data, target = data.to(device), target.to(device) 62 | output = model(data) 63 | test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss 64 | pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability 65 | correct += pred.eq(target.view_as(pred)).sum().item() 66 | 67 | test_loss /= len(test_loader.dataset) 68 | 69 | print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format( 70 | test_loss, correct, len(test_loader.dataset), 71 | 100. * correct / len(test_loader.dataset))) 72 | 73 | 74 | def main(): 75 | # Training settings 76 | parser = argparse.ArgumentParser(description='PyTorch MNIST Example') 77 | parser.add_argument('--batch-size', type=int, default=64, metavar='N', 78 | help='input batch size for training (default: 64)') 79 | parser.add_argument('--test-batch-size', type=int, default=1000, metavar='N', 80 | help='input batch size for testing (default: 1000)') 81 | parser.add_argument('--epochs', type=int, default=14, metavar='N', 82 | help='number of epochs to train (default: 14)') 83 | parser.add_argument('--lr', type=float, default=1.0, metavar='LR', 84 | help='learning rate (default: 1.0)') 85 | parser.add_argument('--gamma', type=float, default=0.7, metavar='M', 86 | help='Learning rate step gamma (default: 0.7)') 87 | parser.add_argument('--no-cuda', action='store_true', default=False, 88 | help='disables CUDA training') 89 | parser.add_argument('--seed', type=int, default=1, metavar='S', 90 | help='random seed (default: 1)') 91 | parser.add_argument('--log-interval', type=int, default=10, metavar='N', 92 | help='how many batches to wait before logging training status') 93 | 94 | parser.add_argument('--save-model', action='store_true', default=False, 95 | help='For Saving the current Model') 96 | args = parser.parse_args() 97 | use_cuda = not args.no_cuda and torch.cuda.is_available() 98 | 99 | torch.manual_seed(args.seed) 100 | 101 | device = torch.device("cuda" if use_cuda else "cpu") 102 | 103 | kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {} 104 | train_loader = torch.utils.data.DataLoader( 105 | datasets.MNIST('../data', train=True, download=True, 106 | transform=transforms.Compose([ 107 | transforms.ToTensor(), 108 | transforms.Normalize((0.1307,), (0.3081,)) 109 | ])), 110 | batch_size=args.batch_size, shuffle=True, **kwargs) 111 | test_loader = torch.utils.data.DataLoader( 112 | datasets.MNIST('../data', train=False, transform=transforms.Compose([ 113 | transforms.ToTensor(), 114 | transforms.Normalize((0.1307,), (0.3081,)) 115 | ])), 116 | batch_size=args.test_batch_size, shuffle=True, **kwargs) 117 | 118 | model = Net().to(device) 119 | optimizer = optim.Adadelta(model.parameters(), lr=args.lr) 120 | 121 | scheduler = StepLR(optimizer, step_size=1, gamma=args.gamma) 122 | for epoch in range(1, args.epochs + 1): 123 | train(args, model, device, train_loader, optimizer, epoch) 124 | test(model, device, test_loader) 125 | scheduler.step() 126 | 127 | if args.save_model: 128 | torch.save(model.state_dict(), "mnist_cnn.pt") 129 | 130 | 131 | if __name__ == '__main__': 132 | main() 133 | -------------------------------------------------------------------------------- /data/posteriors_fig_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dylan-slack/Modeling-Uncertainty-Local-Explainability/72c4330bc7d29150b55018b0946f85f7df88e107/data/posteriors_fig_1.png -------------------------------------------------------------------------------- /experiments/calibration.py: -------------------------------------------------------------------------------- 1 | """Calibration experiments.""" 2 | import argparse 3 | import os 4 | from os.path import exists, dirname 5 | import logging 6 | import warnings 7 | import sys 8 | 9 | from matplotlib import pyplot as plt 10 | import numpy as np 11 | import pickle as pkl 12 | import tqdm 13 | 14 | import lime.lime_tabular as baseline_lime_tabular 15 | import shap 16 | 17 | # Make sure we can get bayes explanations 18 | parent_dir = dirname(os.path.abspath(os.getcwd())) 19 | sys.path.append(parent_dir) 20 | 21 | from bayes.explanations import BayesLocalExplanations, explain_many 22 | from bayes.data_routines import get_dataset_by_name 23 | from bayes.models import * 24 | 25 | 26 | parser = argparse.ArgumentParser() 27 | parser.add_argument("--kernel", required=True, help="The kernel, i.e., lime or shap.") 28 | parser.add_argument("--dataset", required=True, help="The dataset to run on.") 29 | parser.add_argument("--n_initial", default=100, type=int, help="The intial points to compute the calibration.") 30 | parser.add_argument("--n_true", default=10_000, type=int, help="The amount of perturbations to compute the converged explanation.") 31 | parser.add_argument("--n_threads", default=1, type=int, help="The number of threads to launch during the experiment.") 32 | parser.add_argument("--num", type=int, default=None, help="The number of instances to run on. Leave set to None to run on all test instances.") 33 | parser.add_argument("--verbose", action="store_true", help="Verbose output.") 34 | parser.add_argument("--balance_background_dataset", action="store_true", help="Whether to balance the background sampling. This helps with tabular calibration.") 35 | parser.add_argument("--seed", default=0, type=int) 36 | 37 | 38 | def get_creds(initial, final, total_init=0.0, inside_init=0.0): 39 | """Computes the calibration from the initial and psuedo ground truth feature importances.""" 40 | total, inside = total_init, inside_init 41 | for q, item in tqdm.tqdm(enumerate(initial)): 42 | creds = item.creds 43 | init_coef = item.coef_ 44 | for i, c in enumerate(item.coef_): 45 | total += 1.0 46 | if final[q][i] <= (init_coef[i] + creds[i]) and final[q][i] >= (init_coef[i] - creds[i]): 47 | inside += 1 48 | return inside / total, total, inside 49 | 50 | 51 | def run_calibration(args): 52 | """Runs the calibration experiment.""" 53 | 54 | # Get data and model 55 | data = get_dataset_by_name(args.dataset) 56 | 57 | if args.dataset in ["compas", "german"]: 58 | image_dataset = False 59 | model_and_data = process_tabular_data_get_model(data) 60 | elif args.dataset[:5] in ["mnist"]: 61 | image_dataset = True 62 | model_and_data = process_mnist_get_model(data) 63 | elif args.dataset[:8] == "imagenet": 64 | image_dataset = True 65 | model_and_data = process_imagenet_get_model(data) 66 | else: 67 | raise NotImplementedError 68 | 69 | if image_dataset: 70 | xtest = model_and_data["xtest"] 71 | ytest = model_and_data["ytest"] 72 | segs = model_and_data["xtest_segs"] 73 | get_model = model_and_data["model"] 74 | label = model_and_data["label"] 75 | 76 | if args.num is None: 77 | args.num = xtest.shape[0] 78 | 79 | total, inside = 0.0, 0.0 80 | for i in tqdm.tqdm(range(args.num)): 81 | instance = xtest[i] 82 | segments = segs[i] 83 | cur_model = get_model(instance, segments) 84 | xtrain = get_xtrain(segments) 85 | 86 | # Get initial 87 | exp_init = BayesLocalExplanations(training_data=xtrain, 88 | data="image", 89 | kernel=args.kernel, 90 | categorical_features=np.arange(xtrain.shape[1]), 91 | verbose=args.verbose) 92 | rout = exp_init.explain(classifier_f=cur_model, 93 | data=np.ones_like(xtrain[0]), 94 | label=ytest[i], 95 | n_samples=args.n_initial, 96 | focus_sample=False, 97 | only_coef=False, 98 | only_blr=False) 99 | 100 | out = rout['blr'] 101 | max_coef = rout['max_coefs'] 102 | 103 | # Get 'ground truth' 104 | exp_final = BayesLocalExplanations(training_data=xtrain, 105 | data="image", 106 | kernel=args.kernel, 107 | categorical_features=np.arange(xtrain.shape[1]), 108 | verbose=args.verbose) 109 | out_final = exp_init.explain(classifier_f=cur_model, 110 | data=np.ones_like(xtrain[0]), 111 | label=ytest[i], 112 | n_samples=args.n_true, 113 | focus_sample=False, 114 | only_coef=True, 115 | max_coefs=max_coef) 116 | 117 | out = [out] 118 | out_final = [out_final] 119 | 120 | pct, total, inside = get_creds(out, out_final, total, inside) 121 | tqdm.tqdm.write(f"Calibration over {i+1} instances is {np.round(pct, 4)}") 122 | print(f"Final Calibration {pct} for {args.kernel} {args.dataset}") 123 | else: 124 | # For table datasets, we use explain_many here to allow parallel runs through n_threads 125 | xtrain = model_and_data["xtrain"] 126 | xtest = model_and_data["xtest"] 127 | model = model_and_data["model"] 128 | feature_names = data["column_names"] 129 | categorical_indices = data["cat_indices"] 130 | label = model_and_data["label"] 131 | 132 | if args.num is None: 133 | args.num = xtest.shape[0] 134 | 135 | print(f"Running calibration for {args.num} test instances...") 136 | 137 | init_kwargs = { 138 | "training_data": xtrain, 139 | "data": "tabular", 140 | "kernel": args.kernel, 141 | "discretize_continuous": True, 142 | "verbose": True, 143 | "categorical_features": categorical_indices, 144 | } 145 | exp_kwargs = { 146 | "classifier_f": model.predict_proba, 147 | "label": label, 148 | "n_samples": args.n_initial, 149 | "focus_sample": False, 150 | "only_coef": False, 151 | "only_blr": False 152 | } 153 | 154 | labels = model.predict(xtest[:args.num]) 155 | initial = explain_many(xtest[:args.num], init_kwargs, exp_kwargs, pool_size=args.n_threads, verbose=args.verbose, labels=labels, args=args) 156 | max_coefs = [explanation['max_coefs'] for explanation in initial] 157 | init_blrs = [explanation['blr'] for explanation in initial] 158 | 159 | exp_kwargs = { 160 | "classifier_f": model.predict_proba, 161 | "label": label, 162 | "n_samples": args.n_true, 163 | "focus_sample": False, 164 | "only_coef": True, 165 | "only_blr": False 166 | } 167 | 168 | labels = model.predict(xtest[:args.num]) 169 | final = explain_many(xtest[:args.num], init_kwargs, exp_kwargs, pool_size=args.n_threads, verbose=args.verbose, labels=labels, max_coefs=max_coefs, args=args) 170 | pct_included = get_creds(init_blrs, final) 171 | print(f"Final Calibration: {pct_included} for {args.kernel} {args.dataset}") 172 | 173 | if __name__ == "__main__": 174 | args = parser.parse_args() 175 | np.random.seed(args.seed) 176 | run_calibration(args) -------------------------------------------------------------------------------- /experiments/plotting/PTG Plots.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## PTG Plotting Routines\n", 8 | "\n", 9 | "Create a plot like figure 2 in the paper" 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 2, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "import matplotlib\n", 19 | "from matplotlib.pyplot import figure\n", 20 | "from matplotlib import pyplot as plt\n", 21 | "import numpy as np\n", 22 | "import pickle as pkl" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": null, 28 | "metadata": {}, 29 | "outputs": [], 30 | "source": [ 31 | "# File where ptg info is stored\n", 32 | "filename = #########\n", 33 | "# The desired credible interval widths for the experiment\n", 34 | "widths = [5e-3, 6e-3, 7e-3, 8e-3, 9e-3, 1e-2]\n", 35 | "widths_str = [\"5e-3\", \"6e-3\", \"7e-3\", \"8e-3\", \"9e-3\", \"1e-2\"]\n", 36 | "# The initial perturbations used in the experiment\n", 37 | "inital = \"200\"" 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": null, 43 | "metadata": {}, 44 | "outputs": [], 45 | "source": [ 46 | "# Load data\n", 47 | "with open(filename, 'rb') as f:\n", 48 | " data = pkl.load(f)\n", 49 | "data_np = np.array(data)" 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": null, 55 | "metadata": {}, 56 | "outputs": [], 57 | "source": [ 58 | "# Plots\n", 59 | "matplotlib.rcParams.update({'font.size': 35})\n", 60 | "figure(figsize=(13,10))\n", 61 | "plt.rc('xtick',labelsize=35)\n", 62 | "plt.rc('ytick',labelsize=35)\n", 63 | "ax = plt.gca() \n", 64 | "\n", 65 | "# Plot boxplots \n", 66 | "ax.boxplot(data, labels=widths_str, notch=False, widths=[.6] * len(widths), showfliers=False)\n", 67 | "\n", 68 | "# Setup yticks\n", 69 | "ax.set_yticklabels([\"5e-3\", \"7e-3\", \"9e-3\", \"1.1e-2\", \"1.3e-2\"][::-1])\n", 70 | "ax.set_yticks([5e-3, 7e-3, 9e-3, 1.1e-2, 1.3e-2][::-1])\n", 71 | "\n", 72 | "# Plot along x axis\n", 73 | "ax.plot([i+1 for i in range(len(widths))], widths ,c=\"#92DCE5\",linewidth=5.0)\n", 74 | "ax.scatter([i+1 for i in range(len(widths))], widths, s=75)\n", 75 | "\n", 76 | "# Setup labels\n", 77 | "ax.set_title(\"Bayes{}, 200 Initial Perturbations\".format(\"LIME\"), fontsize=35)\n", 78 | "ax.set_ylabel(\"Observed W\")\n", 79 | "ax.set_xlabel(\"Desired W\")\n", 80 | "plt.tight_layout()\n", 81 | "plt.savefig(\"ptg.pdf\")" 82 | ] 83 | } 84 | ], 85 | "metadata": { 86 | "kernelspec": { 87 | "display_name": "Python 3", 88 | "language": "python", 89 | "name": "python3" 90 | }, 91 | "language_info": { 92 | "codemirror_mode": { 93 | "name": "ipython", 94 | "version": 3 95 | }, 96 | "file_extension": ".py", 97 | "mimetype": "text/x-python", 98 | "name": "python", 99 | "nbconvert_exporter": "python", 100 | "pygments_lexer": "ipython3", 101 | "version": "3.7.4" 102 | } 103 | }, 104 | "nbformat": 4, 105 | "nbformat_minor": 2 106 | } 107 | -------------------------------------------------------------------------------- /experiments/plotting/Stability Plots.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import pickle as pkl\n", 10 | "import numpy as np\n", 11 | "from matplotlib import pyplot as plt\n", 12 | "from scipy import stats\n", 13 | "import seaborn as sns\n", 14 | "import pandas as pd" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": null, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "BASELINE, BAYES = 0, 1\n", 24 | "BONCORRECT_COEF = 2\n", 25 | "names = [[\"ImageNet\"]*40, [\"MNIST\"]*40, [\"German\"]*40, [\"Compas\"]*40]" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": null, 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "lime_stability = []\n", 35 | "with open(\"imagenet_blime_stability\", \"rb\") as f:\n", 36 | " lime_stability.append(pkl.load(f))\n", 37 | "with open(\"mnist_4_blime_stability\", \"rb\") as f:\n", 38 | " lime_stability.append(pkl.load(f))\n", 39 | "with open(\"german_blime_stability\", \"rb\") as f:\n", 40 | " lime_stability.append(pkl.load(f))\n", 41 | "with open(\"compas_blime_stability\", \"rb\") as f:\n", 42 | " lime_stability.append(pkl.load(f))\n", 43 | " \n", 44 | "shap_stability = []\n", 45 | "with open(\"imagenet_bshap_stability\", \"rb\") as f:\n", 46 | " shap_stability.append(pkl.load(f))\n", 47 | "with open(\"mnist_4_bshap_stability\", \"rb\") as f:\n", 48 | " shap_stability.append(pkl.load(f))\n", 49 | "with open(\"german_bshap_stability\", \"rb\") as f:\n", 50 | " shap_stability.append(pkl.load(f))\n", 51 | "with open(\"german_blime_stability\", \"rb\") as f:\n", 52 | " shap_stability.append(pkl.load(f))\n", 53 | "lime_stability = np.array(lime_stability)\n", 54 | "shap_stability = np.array(shap_stability)" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": null, 60 | "metadata": {}, 61 | "outputs": [], 62 | "source": [ 63 | "def get_pct_change(baseline, bayes):\n", 64 | " baseline, bayes = baseline.tolist(), bayes.tolist()\n", 65 | " pcts = []\n", 66 | " for bs, by in zip(baseline, bayes):\n", 67 | " dif = bs - by\n", 68 | " abs_dif = abs(dif)\n", 69 | " if dif > 0:\n", 70 | " pcts.append(abs_dif / bs)\n", 71 | " else:\n", 72 | " pcts.append(-1 * abs_dif / bs)\n", 73 | " return np.array(pcts) * 100" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": null, 79 | "metadata": {}, 80 | "outputs": [], 81 | "source": [ 82 | "# LIME \n", 83 | "lime_pct_changes = []\n", 84 | "for i in range(lime_stability.shape[0]):\n", 85 | " lime_pct_changes.append(get_pct_change(lime_stability[i][BASELINE], lime_stability[i][BAYES]))\n", 86 | " print(\"Wilcoxon Alternative Less:\", BONCORRECT_COEF * stats.wilcoxon(lime_stability[i][BASELINE], lime_stability[i][BAYES], alternative='greater')[1])\n", 87 | " print(\"Wilcoxon Alternative Equal:\", BONCORRECT_COEF * stats.wilcoxon(lime_stability[i][BASELINE], lime_stability[i][BAYES])[1])" 88 | ] 89 | }, 90 | { 91 | "cell_type": "code", 92 | "execution_count": null, 93 | "metadata": {}, 94 | "outputs": [], 95 | "source": [ 96 | "# SHAP\n", 97 | "shap_pct_changes = []\n", 98 | "for i in range(shap_stability.shape[0]):\n", 99 | " shap_pct_changes.append(get_pct_change(shap_stability[i][BASELINE], shap_stability[i][BAYES]))\n", 100 | " print(\"Wilcoxon Alternative Less:\", BONCORRECT_COEF * stats.wilcoxon(shap_stability[i][BASELINE], shap_stability[i][BAYES], alternative='greater')[1])\n", 101 | " print(\"Wilcoxon Alternative Equal:\", BONCORRECT_COEF * stats.wilcoxon(shap_stability[i][BASELINE], shap_stability[i][BAYES])[1])" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": null, 107 | "metadata": {}, 108 | "outputs": [], 109 | "source": [ 110 | "lime_df = pd.DataFrame()\n", 111 | "lime_df['names'] = np.concatenate(names)\n", 112 | "lime_df['BayesLIME % Increase in Stability'] = np.concatenate(lime_pct_changes)\n", 113 | "\n", 114 | "plt.cla()\n", 115 | "sns.set(font_scale=2.2, style=\"whitegrid\")\n", 116 | "plt.figure(figsize=(10,4))\n", 117 | "\n", 118 | "ax = sns.boxplot(x='BayesLIME % Increase in Stability', y='names', data=lime_df, color='skyblue', fliersize=0)\n", 119 | "ax = sns.stripplot(x='BayesLIME % Increase in Stability', y='names', data=lime_df, color='b', size=5, alpha=0.8)\n", 120 | "\n", 121 | "plt.plot([0,0],[-5,5], color='r', linestyle=\"--\", linewidth=3.0)\n", 122 | "ax.set(ylabel=None)\n", 123 | "ax.grid(False)\n", 124 | "ax.set(ylim=[-1,4])\n", 125 | "plt.tight_layout()\n", 126 | "plt.savefig(f'stability_lime.pdf')" 127 | ] 128 | }, 129 | { 130 | "cell_type": "code", 131 | "execution_count": null, 132 | "metadata": {}, 133 | "outputs": [], 134 | "source": [ 135 | "shap_df = pd.DataFrame()\n", 136 | "shap_df['names'] = np.concatenate(names)\n", 137 | "shap_df['BayesSHAP % Increase in Stability'] = np.concatenate(shap_pct_changes)\n", 138 | "\n", 139 | "plt.cla()\n", 140 | "sns.set(font_scale=2.2, style=\"whitegrid\")\n", 141 | "plt.figure(figsize=(10,4))\n", 142 | "\n", 143 | "ax = sns.boxplot(x='BayesSHAP % Increase in Stability', y='names', data=shap_df, color='skyblue', fliersize=0)\n", 144 | "ax = sns.stripplot(x='BayesSHAP % Increase in Stability', y='names', data=shap_df, color='b', size=5, alpha=0.8)\n", 145 | "\n", 146 | "plt.plot([0,0],[-5,5], color='r', linestyle=\"--\", linewidth=3.0)\n", 147 | "ax.set(ylabel=None)\n", 148 | "ax.grid(False)\n", 149 | "ax.set(ylim=[-1,4])\n", 150 | "plt.tight_layout()\n", 151 | "plt.savefig(f'stability_shap.pdf')" 152 | ] 153 | } 154 | ], 155 | "metadata": { 156 | "kernelspec": { 157 | "display_name": "Python 3", 158 | "language": "python", 159 | "name": "python3" 160 | }, 161 | "language_info": { 162 | "codemirror_mode": { 163 | "name": "ipython", 164 | "version": 3 165 | }, 166 | "file_extension": ".py", 167 | "mimetype": "text/x-python", 168 | "name": "python", 169 | "nbconvert_exporter": "python", 170 | "pygments_lexer": "ipython3", 171 | "version": "3.7.4" 172 | } 173 | }, 174 | "nbformat": 4, 175 | "nbformat_minor": 2 176 | } 177 | -------------------------------------------------------------------------------- /experiments/ptg.py: -------------------------------------------------------------------------------- 1 | """PTG experiments.""" 2 | import argparse 3 | import os 4 | from os.path import exists, dirname 5 | import logging 6 | import warnings 7 | import sys 8 | 9 | import numpy as np 10 | import pickle as pkl 11 | import tqdm 12 | 13 | import lime.lime_tabular as baseline_lime_tabular 14 | import shap 15 | 16 | parent_dir = dirname(os.path.abspath(os.getcwd())) 17 | sys.path.append(parent_dir) 18 | 19 | from bayes.explanations import BayesLocalExplanations, explain_many 20 | from bayes.data_routines import get_dataset_by_name 21 | from bayes.models import process_tabular_data_get_model, process_mnist_get_model, get_xtrain 22 | 23 | parser = argparse.ArgumentParser() 24 | parser.add_argument("--kernel", required=True) 25 | parser.add_argument("--dataset", required=True) 26 | parser.add_argument("--n_initial", default=200, type=int) 27 | parser.add_argument("--n_threads", default=1, type=int) 28 | parser.add_argument("--num", type=int, default=None) 29 | parser.add_argument("--verbose", type=bool, default=False) 30 | parser.add_argument("--datatype", type=str, default="tabular") 31 | parser.add_argument("--save_loc", type=str, default="results") 32 | parser.add_argument("--widths", type=str, default="5e-3 6e-3 7e-3 8e-3 9e-3 1e-2") 33 | parser.add_argument("--seed", default=0, type=int) 34 | 35 | 36 | def run_ptg(args): 37 | """Runs the ptg experiment.""" 38 | assert not exists(args.save_loc), f"Save location {args.save_loc} already has data" 39 | 40 | # Get data and model 41 | data = get_dataset_by_name(args.dataset) 42 | 43 | if args.dataset in ["compas", "german"]: 44 | image_dataset = False 45 | model_and_data = process_tabular_data_get_model(data) 46 | elif args.dataset[:5] in ["mnist"]: 47 | image_dataset = True 48 | model_and_data = process_mnist_get_model(data) 49 | else: 50 | raise NotImplementedError 51 | 52 | desired_widths = [float(w) for w in args.widths.split()] 53 | print(f"Using widths {desired_widths}") 54 | 55 | if image_dataset: 56 | 57 | # Get data 58 | xtest = model_and_data["xtest"] 59 | ytest = model_and_data["ytest"] 60 | segs = model_and_data["xtest_segs"] 61 | get_model = model_and_data["model"] 62 | label = model_and_data["label"] 63 | if args.num is None: 64 | args.num = xtest.shape[0] 65 | 66 | # Compute ptg for images 67 | results = [] 68 | for w in tqdm.tqdm(desired_widths): 69 | tqdm.tqdm.write(f"Running with width {w}") 70 | cur_creds = [] 71 | for i in tqdm.tqdm(range(args.num)): 72 | 73 | # Wrap the image model for current 74 | # instance + segments 75 | instance = xtest[i] 76 | segments = segs[i] 77 | model = get_model(instance, segments) 78 | xtrain = get_xtrain(segments) 79 | 80 | # Compute the explanation 81 | exp_init = BayesLocalExplanations(training_data=xtrain, 82 | data=args.datatype, 83 | kernel=args.kernel, 84 | categorical_features=np.arange(xtrain.shape[1]), 85 | verbose=args.verbose) 86 | out = exp_init.explain(classifier_f=model, 87 | data=np.ones_like(xtrain[0]), 88 | label=ytest[0], 89 | cred_width=w, 90 | focus_sample=False, 91 | ptg_initial_points=args.n_initial) 92 | 93 | cur_creds.extend(out['blr'].creds) 94 | 95 | # Print out update for the current median observed widths 96 | c_median = np.round(np.median(cur_creds), 8) 97 | tqdm.tqdm.write(f"Median observed width is {c_median} for desired width {w}.") 98 | results.append(cur_creds) 99 | else: 100 | xtrain = model_and_data["xtrain"] 101 | xtest = model_and_data["xtest"] 102 | model = model_and_data["model"] 103 | categorical_indices = data["cat_indices"] 104 | label = model_and_data["label"] 105 | 106 | if args.num == None: 107 | args.num = xtest.shape[0] 108 | 109 | print(f"Running ptg for {args.num} test instances...") 110 | labels = model.predict(xtest[:args.num]) 111 | results = [] 112 | for w in tqdm.tqdm(desired_widths): 113 | tqdm.tqdm.write(f"Running with width {w}") 114 | cur_creds = [] 115 | for i in tqdm.tqdm(range(args.num)): 116 | exp_init = BayesLocalExplanations(training_data=xtrain, 117 | data=args.datatype, 118 | kernel=args.kernel, 119 | categorical_features=categorical_indices, 120 | discretize_continuous=True, 121 | verbose=args.verbose) 122 | out = exp_init.explain(classifier_f=model.predict_proba, 123 | data=xtest[i], 124 | label=labels[i], 125 | cred_width=w, 126 | focus_sample=False) 127 | cur_creds.extend(out['blr'].creds) 128 | results.append(cur_creds) 129 | 130 | with open(args.save_loc, "wb") as f: 131 | pkl.dump(results, f) 132 | 133 | 134 | if __name__ == "__main__": 135 | args = parser.parse_args() 136 | np.random.seed(args.seed) 137 | run_ptg(args) 138 | -------------------------------------------------------------------------------- /experiments/stability.py: -------------------------------------------------------------------------------- 1 | """Stability experiments.""" 2 | import argparse 3 | from copy import deepcopy 4 | import logging 5 | import os 6 | from os.path import exists, dirname 7 | import sys 8 | import warnings 9 | 10 | import numpy as np 11 | import pickle as pkl 12 | import tqdm 13 | 14 | import lime.lime_tabular as baseline_lime_tabular 15 | import shap 16 | 17 | parent_dir = dirname(os.path.abspath(os.getcwd())) 18 | sys.path.append(parent_dir) 19 | 20 | from bayes.explanations import BayesLocalExplanations, explain_many 21 | from bayes.data_routines import get_dataset_by_name 22 | from bayes.models import process_tabular_data_get_model, process_mnist_get_model, get_xtrain, process_imagenet_get_model 23 | 24 | import torch 25 | 26 | parser = argparse.ArgumentParser() 27 | parser.add_argument("--kernel", required=True) 28 | parser.add_argument("--dataset", required=True) 29 | parser.add_argument("--save_loc", required=True) 30 | parser.add_argument("--n_examples", required=True, type=int, default=10) 31 | parser.add_argument("--n_threads", type=int, default=2, help="Number of threads to start while generating explanations.") 32 | parser.add_argument("--n_samples", default=5_000, type=int) 33 | parser.add_argument("--batch_size", default=2_500, type=int) 34 | parser.add_argument("--overwrite_save", default=False, type=bool) 35 | parser.add_argument("--verbose", action='store_true') 36 | parser.add_argument("--seed", default=0, type=int) 37 | 38 | 39 | def get_epsilon_tightness(args): 40 | """Gets the epsilon tightness for stability experiments.""" 41 | if args.dataset == "compas": 42 | eps = 0.1 43 | elif args.dataset == "german": 44 | eps = 0.1 45 | else: 46 | raise NotImplementedError 47 | return eps 48 | 49 | 50 | def get_all_points_less_than_eps(epsilon, X, n_examples=100): 51 | """Gets all points in the data closer than l2 distance of epsilon.""" 52 | assert n_examples <= X.shape[0], f"n_examples is {n_examples} but data only contains {X.shape[0]} instances." 53 | 54 | neighbors = [] 55 | pbar = tqdm.tqdm(total=n_examples) 56 | i = 0 57 | 58 | while len(neighbors) < n_examples: 59 | c_epsilon = epsilon 60 | c_x = X[i] 61 | c_neighbors = [] 62 | while len(c_neighbors) == 0: 63 | for j in range(X.shape[0]): 64 | if i == j or all(c_x == X[j]): 65 | continue 66 | if np.linalg.norm(c_x - X[j]) < c_epsilon: 67 | c_neighbors.append(X[j]) 68 | # If we can't find neighbors in radius, increment 69 | # melis leaves this at 0.1 it seems like but 70 | # it doesn't always includes enough points, so increment 71 | # until we get at least one 72 | c_epsilon += 0.1 73 | neighbors.append(c_neighbors) 74 | 75 | if i == X.shape[0]: 76 | raise NameError("Couldn't find enough points with neighbors") 77 | 78 | i += 1 79 | pbar.update(1) 80 | 81 | return neighbors 82 | 83 | 84 | def map_to_list(exp, label): 85 | exp = exp.local_exp[label] 86 | return np.array([item[1] for item in exp]) 87 | 88 | 89 | def get_baseline_explanation(model, instance, args, X, feature_names, categorical_indices, label): 90 | """Gets the either lime or shap baseline.""" 91 | if args.kernel == "shap": 92 | # Make sure that discretization is the same as in bayesshap 93 | get_discretizer = BayesLocalExplanations(X, 94 | data="tabular", 95 | kernel=args.kernel, 96 | discretize_continuous=True, 97 | verbose=False, 98 | categorical_features=categorical_indices) 99 | discretized_instance = get_discretizer.shap_info.discretizer.discretize(instance) 100 | 101 | # Prediction function 102 | def predict(arr): 103 | substituted_instances = deepcopy(arr) 104 | for i in range(substituted_instances.shape[0]): 105 | substituted_instances[i, substituted_instances[i] == 1] = discretized_instance[0, substituted_instances[i] == 1] 106 | final = get_discretizer.shap_info.discretizer.undiscretize(substituted_instances) 107 | return model(final) 108 | 109 | explainer = shap.KernelExplainer(predict, np.zeros_like(discretized_instance)) 110 | exp = explainer.shap_values(discretized_instance, n_samples=args.n_samples, l1_reg="num_features({})".format(X.shape[1])) 111 | original_mean = exp[label][0] 112 | else: 113 | explainer = baseline_lime_tabular.LimeTabularExplainer(X, 114 | discretize_continuous=True, 115 | categorical_features=categorical_indices) 116 | exp = explainer.explain_instance(instance[0], 117 | model, 118 | num_samples=args.n_samples, 119 | num_features=X.shape[1], 120 | labels=(label,)) 121 | original_mean = map_to_list(exp, label) 122 | return original_mean 123 | 124 | 125 | def add_image_epsilon(X, n_examples=100, mnist=False): 126 | """Performs the epsilon permutation for n_examples number of images.""" 127 | X_to_eps = X[:n_examples] 128 | 129 | if mnist: 130 | n_p = np.random.normal(loc=0, scale=1e-2, size=(n_examples, 5, 1, 28, 28)) 131 | else: 132 | n_p = np.random.normal(loc=0, scale=1, size=(n_examples, 5, 224, 224, 3)) 133 | 134 | # Perturb instances 135 | neighbors = [] 136 | for i in range(n_examples): 137 | neighbors.append(X_to_eps[i] + n_p[i]) 138 | neighbors = np.array(neighbors) 139 | 140 | return neighbors 141 | 142 | 143 | def calculate_lip(neighbor_means, neighbors_points, original_mean, original_point): 144 | max_lip = 0 145 | for m, p in zip(neighbor_means, neighbors_points): 146 | lip = np.linalg.norm(original_mean - m) / np.linalg.norm(original_point - p) 147 | if lip > 1e30: 148 | warnings.warn("LIP overflow") 149 | continue 150 | max_lip = max(lip, max_lip) 151 | return max_lip 152 | 153 | 154 | def run_stability(args): 155 | """Runs the stability experiment.""" 156 | if not args.overwrite_save: 157 | assert not exists(args.save_loc), f"Save location {args.save_loc} already has data" 158 | 159 | # Get data and model 160 | data = get_dataset_by_name(args.dataset) 161 | 162 | if args.dataset in ["compas", "german"]: 163 | image_dataset = False 164 | model_and_data = process_tabular_data_get_model(data) 165 | elif args.dataset[:5] in ["mnist"]: 166 | image_dataset = True 167 | mnist = True 168 | model_and_data = process_mnist_get_model(data) 169 | elif args.dataset[:8] == "imagenet": 170 | image_dataset = True 171 | mnist = False 172 | model_and_data = process_imagenet_get_model(data) 173 | else: 174 | raise NotImplementedError 175 | 176 | if image_dataset: 177 | 178 | xtest = model_and_data["xtest"] 179 | ytest = model_and_data["ytest"] 180 | segs = model_and_data["xtest_segs"] 181 | get_model = model_and_data["model"] 182 | label = model_and_data["label"] 183 | 184 | all_neighborhoods = add_image_epsilon(xtest, n_examples=args.n_examples, mnist=mnist) 185 | 186 | ########### Baseline LIP ########### 187 | logging.info(f"Running {args.kernel} stability baselines on {args.dataset}") 188 | baseline_max_lips = [] 189 | 190 | for i in tqdm.tqdm(range(args.n_examples)): 191 | 192 | # Wrap model for current instance and segments 193 | instance = xtest[i] 194 | segments = segs[i] 195 | xtrain = get_xtrain(segments) 196 | model = get_model(instance, segments) 197 | 198 | # Get baseline explanation on original instance 199 | original_mean = get_baseline_explanation(model=model, 200 | instance=np.ones_like(xtrain[:1]), 201 | args=args, 202 | X=xtrain, 203 | feature_names=None, 204 | categorical_indices=np.arange(xtrain.shape[1]), 205 | label=ytest[i]) 206 | # Get explanation on all neighbors 207 | neighbor_means = [] 208 | neighbors = [] 209 | for q in range(len(all_neighborhoods[i])): 210 | instance = all_neighborhoods[i][q] 211 | segments = segs[i] 212 | neighbor_model = get_model(instance, segments) 213 | neighbor_mean = get_baseline_explanation(model=neighbor_model, 214 | instance=np.ones_like(xtrain[:1]), 215 | args=args, 216 | X=xtrain, 217 | feature_names=None, 218 | categorical_indices=np.arange(xtrain.shape[1]), 219 | label=ytest[i]) 220 | neighbors.append(all_neighborhoods[i][q]) 221 | neighbor_means.append(neighbor_mean) 222 | baseline_max_lips.append(calculate_lip(neighbor_means, neighbors, original_mean, xtest[i])) 223 | print(baseline_max_lips) 224 | bayes_max_lips = [] 225 | ########### Bayes LIP ########### 226 | for i in tqdm.tqdm(range(args.n_examples)): 227 | instance = xtest[i] 228 | segments = segs[i] 229 | model = get_model(instance, segments) 230 | exp_init = BayesLocalExplanations(training_data=xtrain, 231 | data="image", 232 | kernel=args.kernel, 233 | categorical_features=np.arange(xtrain.shape[1]), 234 | verbose=args.verbose) 235 | out = exp_init.explain(classifier_f=model, 236 | data=np.ones_like(xtrain[0]), 237 | label=ytest[i], 238 | n_samples=args.n_samples, 239 | focus_sample=True, 240 | focus_sample_batch_size=args.batch_size, 241 | feature_selection=False) 242 | bayes_original_mean = out['blr'].coef_ 243 | 244 | bayes_neighbor_means = [] 245 | bayes_neighbors = [] 246 | for q in range(len(all_neighborhoods[i])): 247 | instance = all_neighborhoods[i][q] 248 | neighbor_model = get_model(instance, segments) 249 | out = exp_init.explain(classifier_f=neighbor_model, 250 | data=np.ones_like(xtrain[0]), 251 | label=ytest[i], 252 | n_samples=args.n_samples, 253 | focus_sample=True, 254 | focus_sample_batch_size=args.batch_size, 255 | feature_selection=False) 256 | bayes_neighbors.append(all_neighborhoods[i][q]) 257 | bayes_neighbor_means.append(out['blr'].coef_) 258 | bayes_max_lips.append(calculate_lip(bayes_neighbor_means, bayes_neighbors, bayes_original_mean, xtest[i])) 259 | print(bayes_max_lips) 260 | else: 261 | epsilon = get_epsilon_tightness(args) 262 | 263 | xtrain = model_and_data["xtrain"] 264 | xtest = model_and_data["xtest"] 265 | model = model_and_data["model"] 266 | feature_names = data["column_names"] 267 | categorical_indices = data["cat_indices"] 268 | label = model_and_data["label"] 269 | 270 | # Get neighbors 271 | all_neighborhoods = get_all_points_less_than_eps(epsilon, xtest, n_examples=args.n_examples) 272 | 273 | ########### Baseline LIP ########### 274 | logging.info(f"Running {args.kernel} stability baselines on {args.dataset}") 275 | baseline_max_lips = [] 276 | for i in tqdm.tqdm(range(args.n_examples)): 277 | original_mean = get_baseline_explanation(model=model.predict_proba, 278 | instance=xtest[i:i+1], 279 | args=args, 280 | X=xtrain, 281 | feature_names=feature_names, 282 | categorical_indices=categorical_indices, 283 | label=label) 284 | neighbor_means = [] 285 | neighbors = [] 286 | for q in range(len(all_neighborhoods[i])): 287 | neighbor_mean = get_baseline_explanation(model=model.predict_proba, 288 | instance=all_neighborhoods[i][q].reshape(1, -1), 289 | args=args, 290 | X=xtrain, 291 | feature_names=feature_names, 292 | categorical_indices=categorical_indices, 293 | label=label) 294 | 295 | 296 | neighbors.append(all_neighborhoods[i][q]) 297 | neighbor_means.append(neighbor_mean) 298 | baseline_max_lips.append(calculate_lip(neighbor_means, neighbors, original_mean, xtest[i])) 299 | print(baseline_max_lips) 300 | ################################### 301 | 302 | ########### Focused LIP ########### 303 | logging.info(f"Running {args.kernel} stability focused sampling on {args.dataset}") 304 | bayes_max_lips = [] 305 | for i in tqdm.tqdm(range(args.n_examples)): 306 | neighbor_means = [] 307 | neighbors = [] 308 | 309 | init_kwargs = { 310 | "training_data": xtrain, 311 | "data": "tabular", 312 | "kernel": args.kernel, 313 | "discretize_continuous": True, 314 | "verbose": True, 315 | "categorical_features": categorical_indices 316 | } 317 | 318 | exp_kwargs = { 319 | "classifier_f": model.predict_proba, 320 | "label": label, 321 | "n_samples": args.n_samples, 322 | "focus_sample": True, 323 | "only_coef": True, 324 | "feature_selection": False, 325 | "focus_sample_batch_size": args.batch_size, 326 | "enumerate_initial": True, 327 | } 328 | 329 | data_to_explain = np.concatenate((xtest[i].reshape(1, -1), np.array(all_neighborhoods[i])), axis=0) 330 | output = explain_many(data_to_explain, init_kwargs, exp_kwargs, pool_size=args.n_threads) 331 | original_mean = output[0] 332 | neighbor_means = output[1:] 333 | bayes_max_lips.append(calculate_lip(neighbor_means, all_neighborhoods[i], original_mean, xtest[i])) 334 | print(bayes_max_lips) 335 | 336 | with open(args.save_loc, "wb") as f: 337 | pkl.dump([baseline_max_lips, bayes_max_lips], f) 338 | 339 | 340 | if __name__ == "__main__": 341 | args = parser.parse_args() 342 | np.random.seed(args.seed) 343 | run_stability(args) 344 | 345 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | certifi==2022.6.15 2 | charset-normalizer==2.1.1 3 | cloudpickle==2.1.0 4 | cycler==0.11.0 5 | fonttools==4.37.1 6 | idna==3.3 7 | imageio==2.21.1 8 | importlib-metadata==4.12.0 9 | joblib==1.1.0 10 | kiwisolver==1.4.4 11 | lime==0.2.0.1 12 | llvmlite==0.39.0 13 | matplotlib==3.5.3 14 | networkx==2.6.3 15 | numba==0.56.0 16 | numpy==1.21.6 17 | packaging==21.3 18 | pandas==1.3.5 19 | Pillow==9.2.0 20 | pip==22.1.2 21 | pyparsing==3.0.9 22 | python-dateutil==2.8.2 23 | pytz==2022.2.1 24 | PyWavelets==1.3.0 25 | requests==2.28.1 26 | scikit-image==0.19.3 27 | scikit-learn==1.0.2 28 | scipy==1.7.3 29 | setuptools==63.4.1 30 | shap==0.41.0 31 | six==1.16.0 32 | slicer==0.0.7 33 | threadpoolctl==3.1.0 34 | tifffile==2021.11.2 35 | torch==1.12.1 36 | torchvision==0.13.1 37 | tqdm==4.64.0 38 | typing_extensions==4.3.0 39 | urllib3==1.26.12 40 | wheel==0.37.1 41 | zipp==3.8.1 42 | -------------------------------------------------------------------------------- /visualization/diego.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dylan-slack/Modeling-Uncertainty-Local-Explainability/72c4330bc7d29150b55018b0946f85f7df88e107/visualization/diego.gif -------------------------------------------------------------------------------- /visualization/image_posterior.py: -------------------------------------------------------------------------------- 1 | """Create a gif sampling from the posterior from an image. 2 | 3 | The file includes routines to create gifs of posterior samples for image 4 | explanations. To create the gif, we sample a number of draws from the posterior, 5 | plot the explanation and the image, and repeat this to stitch together a gif. 6 | 7 | The interpretation is that regions of the image that more frequency show up as 8 | green are more likely to positively impact the prediction. Similarly, regions that 9 | more frequently show up as red are more likey to negatively impact the prediction. 10 | """ 11 | import os 12 | from os.path import exists, dirname 13 | import sys 14 | 15 | import imageio 16 | import matplotlib.pyplot as plt 17 | import numpy as np 18 | from skimage.segmentation import mark_boundaries 19 | import tempfile 20 | from tqdm import tqdm 21 | 22 | import lime.lime_tabular as baseline_lime_tabular 23 | import shap 24 | 25 | # Make sure we can get bayes explanations 26 | parent_dir = dirname(os.path.abspath(os.getcwd())) 27 | sys.path.append(parent_dir) 28 | 29 | from bayes.explanations import BayesLocalExplanations, explain_many 30 | from bayes.data_routines import get_dataset_by_name 31 | from bayes.models import * 32 | 33 | def fill_segmentation(values, segmentation, image, n_max=5): 34 | max_segs = np.argsort(abs(values))[-n_max:] 35 | out = np.zeros((224, 224)) 36 | c_image = np.zeros(image.shape) 37 | for i in range(len(values)): 38 | if i in max_segs: 39 | out[segmentation == i] = 1 if values[i] > 0 else -1 40 | c = 1 if values[i] > 0 else 0 41 | c_image[segmentation == i, c] = np.max(image) 42 | return c_image.astype(int), out.astype(int) 43 | 44 | def create_gif(explanation_blr, segments, image, save_loc, n_images=20, n_max=5): 45 | """Create the gif corresponding to the image explanation. 46 | 47 | Arguments: 48 | explanation_coefficients: The explanation blr object. 49 | segments: The image segmentation. 50 | image: The image for which to compute the explantion. 51 | save_loc: The location to save the gif. 52 | n_images: Number of images to create the gif with. 53 | n_max: The number of superpixels to draw on the image. 54 | """ 55 | draws = explanation_blr.draw_posterior_samples(n_images) 56 | # Setup temporary directory to store paths in 57 | with tempfile.TemporaryDirectory() as tmpdirname: 58 | paths = [] 59 | for i, d in tqdm(enumerate(draws)): 60 | c_image, filled_segs = fill_segmentation(d, segments, image, n_max=n_max) 61 | plt.cla() 62 | plt.axis('off') 63 | plt.imshow(mark_boundaries(image, filled_segs)) 64 | plt.imshow(c_image, alpha=0.3) 65 | paths.append(os.path.join(tmpdirname, f"{i}.png")) 66 | plt.savefig(paths[-1]) 67 | 68 | # Save to gif 69 | # https://stackoverflow.com/questions/61716066/creating-an-animation-out-of-matplotlib-pngs 70 | print(f"Saving gif to {save_loc}") 71 | ims = [imageio.imread(f) for f in paths] 72 | imageio.mimwrite(save_loc, ims) 73 | 74 | -------------------------------------------------------------------------------- /visualization/image_posterior_example.py: -------------------------------------------------------------------------------- 1 | """An example of generating a gif explanation for an image of my dog.""" 2 | import argparse 3 | import os 4 | from os.path import exists, dirname 5 | import sys 6 | 7 | parent_dir = dirname(os.path.abspath(os.getcwd())) 8 | sys.path.append(parent_dir) 9 | 10 | from bayes.explanations import BayesLocalExplanations, explain_many 11 | from bayes.data_routines import get_dataset_by_name 12 | from bayes.models import * 13 | from image_posterior import create_gif 14 | 15 | parser = argparse.ArgumentParser() 16 | parser.add_argument("--cred_width", type=float, default=0.1) 17 | parser.add_argument("--save_loc", type=str, required=True) 18 | parser.add_argument("--n_top_segs", type=int, default=5) 19 | parser.add_argument("--n_gif_images", type=int, default=20) 20 | 21 | IMAGE_NAME = "imagenet_diego" 22 | BLENHEIM_SPANIEL_CLASS = 156 23 | 24 | 25 | def get_image_data(): 26 | """Gets the image data and model.""" 27 | puppy_image = get_dataset_by_name(IMAGE_NAME, get_label=False) 28 | model_and_data = process_imagenet_get_model(puppy_image) 29 | return puppy_image, model_and_data 30 | 31 | 32 | def main(args): 33 | puppy_image, model_and_data = get_image_data() 34 | 35 | # Unpack data 36 | xtest = model_and_data["xtest"] 37 | ytest = model_and_data["ytest"] 38 | segs = model_and_data["xtest_segs"] 39 | get_model = model_and_data["model"] 40 | label = model_and_data["label"] 41 | 42 | # Unpack instance and segments 43 | instance = xtest[0] 44 | segments = segs[0] 45 | 46 | # Get wrapped model 47 | cur_model = get_model(instance, segments) 48 | 49 | # Get background data 50 | xtrain = get_xtrain(segments) 51 | 52 | prediction = np.argmax(cur_model(xtrain[:1]), axis=1) 53 | assert prediction == BLENHEIM_SPANIEL_CLASS, f"Prediction is {prediction} not {BLENHEIM_SPANIEL_CLASS}" 54 | 55 | # Compute explanation 56 | exp_init = BayesLocalExplanations(training_data=xtrain, 57 | data="image", 58 | kernel="lime", 59 | categorical_features=np.arange(xtrain.shape[1]), 60 | verbose=True) 61 | rout = exp_init.explain(classifier_f=cur_model, 62 | data=np.ones_like(xtrain[0]), 63 | label=BLENHEIM_SPANIEL_CLASS, 64 | cred_width=args.cred_width, 65 | focus_sample=False, 66 | l2=False) 67 | 68 | # Create the gif of the explanation 69 | create_gif(rout['blr'], segments, instance, args.save_loc, args.n_gif_images, args.n_top_segs) 70 | 71 | 72 | 73 | if __name__ == "__main__": 74 | args = parser.parse_args() 75 | main(args) 76 | --------------------------------------------------------------------------------