├── .gitignore
├── .idea
├── .gitignore
├── XBNet.iml
├── inspectionProfiles
│ ├── Project_Default.xml
│ └── profiles_settings.xml
├── misc.xml
├── modules.xml
└── vcs.xml
├── CODE-OF-CONDUCT.md
├── CONTRIBUTING.md
├── License.txt
├── Modelly
├── app.py
├── results_graph.png
├── static
│ ├── images
│ │ └── Training_graphs.png
│ └── index.css
├── templates
│ ├── base.html
│ ├── index.html
│ ├── layers.html
│ ├── partials
│ │ └── _sawo.html
│ ├── results.html
│ ├── treesinp.html
│ └── upload.html
└── xbnet_testAccuracy_66.7_trainAccuracy_63.4.pt
├── READ.md
├── README.md
├── Research_Paper
└── XBNET_paper.pdf
├── Untitled.png
├── XBNet
├── Seq.py
├── __init__.py
├── __pycache__
│ ├── __init__.cpython-37.pyc
│ └── run.cpython-37.pyc
├── main.py
├── models.py
├── run.py
└── training_utils.py
├── index.rst
├── screenshots
├── Results_metrics.png
└── results_graph.png
├── setup.py
└── test
├── Iris (1).csv
├── data (2).csv
├── diabetes.csv
├── digit_recognizer.py
├── digits.py
├── german.data
├── gui_test.py
├── test.py
├── titanic.csv
├── titanic_test.csv
├── train.csv
└── wine.py
/.gitignore:
--------------------------------------------------------------------------------
1 | Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
98 | __pypackages__/
99 |
100 | # Celery stuff
101 | celerybeat-schedule
102 | celerybeat.pid
103 |
104 | # SageMath parsed files
105 | *.sage.py
106 |
107 | # Environments
108 | .env
109 | .venv
110 | env/
111 | venv/
112 | ENV/
113 | env.bak/
114 | venv.bak/
115 |
116 | # Spyder project settings
117 | .spyderproject
118 | .spyproject
119 |
120 | # Rope project settings
121 | .ropeproject
122 |
123 | # mkdocs documentation
124 | /site
125 |
126 | # mypy
127 | .mypy_cache/
128 | .dmypy.json
129 | dmypy.json
130 |
131 | # Pyre type checker
132 | .pyre/
133 |
134 | # pytype static type analyzer
135 | .pytype/
136 |
137 | # Cython debug symbols
138 | cython_debug/
139 |
--------------------------------------------------------------------------------
/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 |
--------------------------------------------------------------------------------
/.idea/XBNet.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/.idea/inspectionProfiles/Project_Default.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/.idea/inspectionProfiles/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/CODE-OF-CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Code of Conduct
2 |
3 | Our ethics and morals guide us in our day-to-day interactions and decision-making. Our open source projects are no exception. Trust, respect, collaboration and transparency are core values we believe should live and breathe within our projects. Our community welcomes participants from around the world with different experiences, unique perspectives, and great ideas to share.
4 |
5 | ## Our Pledge
6 |
7 | In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation.
8 |
9 | ## Our Standards
10 |
11 | Examples of behavior that contributes to creating a positive environment include:
12 |
13 | - Using welcoming and inclusive language
14 | - Being respectful of differing viewpoints and experiences
15 | - Gracefully accepting constructive criticism
16 | - Attempting collaboration before conflict
17 | - Focusing on what is best for the community
18 | - Showing empathy towards other community members
19 |
20 | Examples of unacceptable behavior by participants include:
21 |
22 | - Violence, threats of violence, or inciting others to commit self-harm
23 | - The use of sexualized language or imagery and unwelcome sexual attention or advances
24 | - Trolling, intentionally spreading misinformation, insulting/derogatory comments, and personal or political attacks
25 | - Public or private harassment
26 | - Publishing others' private information, such as a physical or electronic address, without explicit permission
27 | - Abuse of the reporting process to intentionally harass or exclude others
28 | - Advocating for, or encouraging, any of the above behavior
29 | - Other conduct which could reasonably be considered inappropriate in a professional setting
30 |
31 | ## Our Responsibilities
32 |
33 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
34 |
35 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
36 |
37 | ## Scope
38 |
39 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
40 |
41 | ## Enforcement
42 |
43 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting us anonymously through [this form](https://forms.gle/tPsrKs6Q3TEVRDG19). All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident.
44 |
45 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
46 |
47 | If you are unsure whether an incident is a violation, or whether the space where the incident took place is covered by our Code of Conduct, **we encourage you to still report it**. We would prefer to have a few extra reports where we decide to take no action, than to leave an incident go unnoticed and unresolved that may result in an individual or group to feel like they can no longer participate in the community. Reports deemed as not a violation will also allow us to improve our Code of Conduct and processes surrounding it. If you witness a dangerous situation or someone in distress, we encourage you to report even if you are only an observer.
48 |
49 | ## Attribution
50 |
51 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
52 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to XBNet
2 |
3 | A big welcome and thank you for considering contributing to XBNet! It’s people like you that make it a reality for users in our community.
4 |
5 | Reading and following these guidelines will help us make the contribution process easy and effective for everyone involved. It also communicates that you agree to respect the time of the developers managing and developing these open source projects. In return, we will reciprocate that respect by addressing your issue, assessing changes, and helping you finalize your pull requests.
6 |
7 | ## Quicklinks
8 |
9 | * [Code of Conduct](#code-of-conduct)
10 | * [Getting Started](#getting-started)
11 | * [Issues](#issues)
12 | * [Pull Requests](#pull-requests)
13 | * [Getting Help](#getting-help)
14 |
15 | ## Code of Conduct
16 |
17 | We take our open source community seriously and hold ourselves and other contributors to high standards of communication. By participating and contributing to this project, you agree to uphold our [Code of Conduct](https://github.com/tusharsarkar3/XBNet/blob/master/CODE-OF-CONDUCT.md).
18 |
19 | ## Getting Started
20 |
21 | Contributions are made to this repo via Issues and Pull Requests (PRs). A few general guidelines that cover both:
22 |
23 | - Search for existing Issues and PRs before creating your own.
24 | - We work hard to makes sure issues are handled in a timely manner but, depending on the impact, it could take a while to investigate the root cause. A friendly ping in the comment thread to the submitter or a contributor can help draw attention if your issue is blocking.
25 | - If you've never contributed before, see [the first timer's guide on our blog](https://auth0.com/blog/a-first-timers-guide-to-an-open-source-project/) for resources and tips on how to get started.
26 |
27 | ### Issues
28 |
29 | Issues should be used to report problems with the library, request a new feature, or to discuss potential changes before a PR is created. When you create a new Issue, a template will be loaded that will guide you through collecting and providing the information we need to investigate.
30 |
31 | If you find an Issue that addresses the problem you're having, please add your own reproduction information to the existing issue rather than creating a new one. Adding a [reaction](https://github.blog/2016-03-10-add-reactions-to-pull-requests-issues-and-comments/) can also help be indicating to our maintainers that a particular problem is affecting more than just the reporter.
32 |
33 | ### Pull Requests
34 |
35 | PRs to our libraries are always welcome and can be a quick way to get your fix or improvement slated for the next release. In general, PRs should:
36 |
37 | - Only fix/add the functionality in question **OR** address wide-spread whitespace/style issues, not both.
38 | - Add unit or integration tests for fixed or changed functionality (if a test suite already exists).
39 | - Address a single concern in the least number of changed lines as possible.
40 | - Include documentation in the repo or on our [docs site](https://xbnet.readthedocs.io/en/latest/).
41 | - Be accompanied by a complete Pull Request template (loaded automatically when a PR is created).
42 |
43 | For changes that address core functionality or would require breaking changes (e.g. a major release), it's best to open an Issue to discuss your proposal first. This is not required but can save time creating and reviewing changes.
44 |
45 | In general, we follow the ["fork-and-pull" Git workflow](https://github.com/susam/gitpr)
46 |
47 | 1. Fork the repository to your own Github account
48 | 2. Clone the project to your machine
49 | 3. Create a branch locally with a succinct but descriptive name
50 | 4. Commit changes to the branch
51 | 5. Following any formatting and testing guidelines specific to this repo
52 | 6. Push changes to your fork
53 | 7. Open a PR in our repository and follow the PR template so that we can efficiently review the changes.
54 |
55 | ## Getting Help
56 |
57 | Join us in the [Analytica Community](https://t.me/analyticadata) and post your question there in the correct category with a descriptive tag.
58 |
--------------------------------------------------------------------------------
/License.txt:
--------------------------------------------------------------------------------
1 | Copyright (c) 2018 The Python Packaging Authority
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy
4 | of this software and associated documentation files (the "Software"), to deal
5 | in the Software without restriction, including without limitation the rights
6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 | copies of the Software, and to permit persons to whom the Software is
8 | furnished to do so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in all
11 | copies or substantial portions of the Software.
12 |
13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
19 | SOFTWARE.
20 |
--------------------------------------------------------------------------------
/Modelly/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, render_template, request, send_file
2 | import csv
3 | import pandas as pd
4 | from xgboost import XGBClassifier
5 | from sklearn.ensemble import RandomForestClassifier
6 | from sklearn.tree import DecisionTreeClassifier
7 | from lightgbm import LGBMClassifier
8 | import torch
9 | import numpy as np
10 | from sklearn.preprocessing import LabelEncoder
11 | from sklearn.model_selection import train_test_split
12 | from XBNet.training_utils import training, predict
13 | from XBNet.models import XBNETClassifier
14 | from XBNet.run import run_XBNET
15 | import matplotlib.pyplot as plt
16 | import os
17 | import shutil
18 | import pickle
19 |
20 | app = Flask(__name__)
21 |
22 |
23 | @app.route('/')
24 | def uploady_file():
25 | return render_template('index.html')
26 |
27 |
28 | @app.route('/uploader', methods=['GET', 'POST'])
29 | def upload_file():
30 | global df
31 | global model_name
32 | global layers
33 | global target, n_layers_boosted
34 | if request.method == 'POST':
35 | df = pd.read_csv(request.files['csvfile'])
36 | model_name = request.form['model']
37 | layers = request.form['num_layers']
38 | target = request.form['target']
39 |
40 | model_name = model_name.lower()
41 | if len(layers) > 0:
42 | layers = int(layers)
43 | target = target
44 | if model_name.lower() == "xbnet":
45 | n_layers_boosted = 1
46 | layers = [i + 1 for i in range(int(layers))]
47 | process_input()
48 | return render_template('layers.html', layers=layers)
49 | # self.net_model()
50 | elif (model_name == "xgboost" or model_name == "randomforest"
51 | or model_name == "decision tree" or model_name == "lightgbm" or "Logistic Regression"):
52 | process_input()
53 | return render_template('treesinp.html', layers=layers)
54 | # self.tree_model()
55 | elif model_name.lower() == "neural network":
56 | n_layers_boosted = 0
57 | layers = [i + 1 for i in range(int(layers))]
58 | process_input()
59 | return render_template('layers.html', layers=layers)
60 | # self.net_model()
61 |
62 | # get number of layers, preferably produce a list
63 |
64 | # return 'file uploaded successfully'
65 |
66 |
67 | @app.route('/layers', methods=['GET', 'POST'])
68 | def getlayers():
69 | global layers_dims
70 | fileExt = r".pt"
71 | file = [_ for _ in os.listdir(os.getcwd()) if _.endswith(fileExt)]
72 | if len(file) > 0:
73 | os.remove(file[0])
74 | fileExt = r".pkl"
75 | file = [_ for _ in os.listdir(os.getcwd()) if _.endswith(fileExt)]
76 | if len(file) > 0:
77 | os.remove(file[0])
78 | layers_dims = []
79 | if request.method == 'POST':
80 | if model_name.lower() == "xbnet" or model_name.lower() == "neural network":
81 | for i in layers:
82 | layers_dims.append(int(request.form["i" + str(i)]))
83 | layers_dims.append(int(request.form["o" + str(i)]))
84 | print(layers_dims)
85 | train()
86 | path = os.path.join("static/", "images")
87 | print(path)
88 | if os.path.isdir(path) == False:
89 | os.mkdir(path)
90 | if os.path.isfile("static\images\Training_graphs.png") == True:
91 | os.remove("static\images\Training_graphs.png")
92 | shutil.move("Training_graphs.png", path)
93 | return render_template("results.html", info={"training_acc": acc[-1], "testing_acc": val_ac[-1],
94 | "img": True})
95 |
96 | elif (model_name == "xgboost" or model_name == "randomforest"
97 | or model_name == "decision tree" or model_name == "lightgbm"):
98 | for i in request.form.keys():
99 | try:
100 | layers_dims.append(int(request.form[i]))
101 | except:
102 | layers_dims.append(float(request.form[i]))
103 | print(layers_dims)
104 | train()
105 |
106 | return render_template("results.html", info={"training_acc": training_acc*100,
107 | "testing_acc": testing_acc*100, "img": False})
108 |
109 |
110 | @app.route('/default', methods=['GET', 'POST'])
111 | def default():
112 | global layers_dims
113 | global layers_dims
114 | fileExt = r".pt"
115 | file = [_ for _ in os.listdir(os.getcwd()) if _.endswith(fileExt)]
116 | if len(file) > 0:
117 | os.remove(file[0])
118 | fileExt = r".pkl"
119 | file = [_ for _ in os.listdir(os.getcwd()) if _.endswith(fileExt)]
120 | if len(file) > 0:
121 | os.remove(file[0])
122 | layers_dims = [100, 6, 0.3, 1, 1]
123 | train()
124 | return render_template("results.html", info={"training_acc": training_acc*100, "testing_acc": testing_acc*100,
125 | "img": False})
126 |
127 |
128 | @app.route('/download', methods=['GET', 'POST'])
129 | def download():
130 | if model_name.lower() == "xbnet" or model_name.lower() == "neural network":
131 | fileExt = r".pt"
132 | file = [_ for _ in os.listdir(os.getcwd()) if _.endswith(fileExt)][0]
133 | return send_file(file)
134 | else:
135 | fileExt = r".pkl"
136 | file = [_ for _ in os.listdir(os.getcwd()) if _.endswith(fileExt)][0]
137 | return send_file(file)
138 |
139 |
140 | def process_input():
141 | global x_data, y_data, label_encoded, imputations, label_y, columns_finally_used, y_label_encoder
142 | column_to_predict = target
143 | data = df
144 | n_df = len(data)
145 | label_encoded = {}
146 | imputations = {}
147 | for i in data.columns:
148 | imputations[i] = data[i].mode()
149 | if data[i].isnull().sum() / n_df >= 0.15:
150 | data.drop(i, axis=1, inplace=True)
151 | elif data[i].isnull().sum() / n_df < 0.15 and data[i].isnull().sum() / n_df > 0:
152 | data[i].fillna(data[i].mode(), inplace=True)
153 | imputations[i] = data[i].mode()
154 | columns_object = list(data.dtypes[data.dtypes == object].index)
155 | for i in columns_object:
156 | if i != column_to_predict:
157 | if data[i].nunique() / n_df < 0.4:
158 | le = LabelEncoder()
159 | data[i] = le.fit_transform(data[i])
160 | label_encoded[i] = le
161 | else:
162 | data.drop(i, axis=1, inplace=True)
163 |
164 | x_data = data.drop(column_to_predict, axis=1).to_numpy()
165 | columns_finally_used = data.drop(column_to_predict, axis=1).columns
166 |
167 | y_data = data[column_to_predict].to_numpy()
168 | label_y = False
169 | if y_data.dtype == object:
170 | label_y = True
171 | y_label_encoder = LabelEncoder()
172 | y_data = y_label_encoder.fit_transform(y_data)
173 | print("Number of features are: " + str(x_data.shape[1]) +
174 | " classes are: " + str(len(np.unique(y_data))))
175 |
176 |
177 | def train():
178 | global model_tree, model_trained, acc, val_ac, training_acc, testing_acc
179 | X_train, X_test, y_train, y_test = train_test_split(x_data, y_data,
180 | test_size=0.3, random_state=0)
181 | if model_name == "xbnet" or model_name == "neural network":
182 | m = model_name
183 | print(layers)
184 | print(layers_dims, n_layers_boosted)
185 | model = XBNETClassifier(X_train, y_train, num_layers=int(len(layers) / 2), num_layers_boosted=n_layers_boosted,
186 | input_through_cmd=True, inputs_for_gui=layers_dims,
187 | )
188 | criterion = torch.nn.CrossEntropyLoss()
189 | optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
190 |
191 | model_trained, acc, lo, val_ac, val_lo = run_XBNET(X_train, X_test, y_train, y_test, model,
192 | criterion, optimizer, 32, 300, save=True)
193 | model_trained.save(m + "_testAccuracy_" + str(max(val_ac))[:4] + "_trainAccuracy_" +
194 | str(max(acc))[:4] + ".pt", )
195 | # toast("Test Accuracy is: " +str(max(val_ac))[:4] +" and Training Accuracy is: " +
196 | # str(max(acc))[:4] + " and model is saved.",duration= 10)
197 | return render_template("results.html", info={"training_acc": acc, "testing_acc": val_ac})
198 |
199 | elif (model_name == "xgboost" or model_name == "randomforest"
200 | or model_name == "decision tree" or model_name == "lightgbm"):
201 | if model_name == "xgboost":
202 | model_tree = XGBClassifier(n_estimators=layers_dims[0],
203 | max_depth=layers_dims[1],
204 | learning_rate=layers_dims[2],
205 | subsample=layers_dims[3],
206 | colsample_bylevel=layers_dims[4],
207 | random_state=0, n_jobs=-1,
208 | )
209 | model_tree.fit(X_train, y_train, eval_metric="mlogloss")
210 | training_acc = model_tree.score(X_train, y_train)
211 | testing_acc = model_tree.score(X_test, y_test)
212 | elif model_name == "randomforest":
213 | model_tree = RandomForestClassifier(n_estimators=layers_dims[0],
214 | max_depth=layers_dims[1],
215 | random_state=0, n_jobs=-1)
216 | model_tree.fit(X_train, y_train)
217 | training_acc = model_tree.score(X_train, y_train)
218 | testing_acc = model_tree.score(X_test, y_test)
219 | elif model_name == "decision tree":
220 | model_tree = DecisionTreeClassifier(max_depth=layers_dims[1], random_state=0)
221 | model_tree.fit(X_train, y_train)
222 | training_acc = model_tree.score(X_train, y_train)
223 | testing_acc = model_tree.score(X_test, y_test)
224 | elif model_name == "lightgbm":
225 | model_tree = LGBMClassifier(n_estimators=layers_dims[0],
226 | max_depth=layers_dims[1],
227 | learning_rate=layers_dims[2],
228 | subsample=layers_dims[3],
229 | colsample_bylevel=layers_dims[4],
230 | random_state=0, n_jobs=-1, )
231 | model_tree.fit(X_train, y_train, eval_metric="mlogloss")
232 | training_acc = model_tree.score(X_train, y_train)
233 | testing_acc = model_tree.score(X_test, y_test)
234 | print("Training and Testing accuracies are " + str(training_acc * 100)
235 | + " " + str(testing_acc * 100) + " respectively and model is stored")
236 | with open(model_name + "_testAccuracy_" + str(testing_acc)[:4] + "_trainAccuracy_" +
237 | str(training_acc)[:4] + ".pkl", 'wb') as outfile:
238 | pickle.dump(model_tree, outfile)
239 |
240 |
241 | @app.route('/predict', methods=['GET', 'POST'])
242 | def predict_results():
243 | df_predict = pd.read_csv(request.files["csvpredictfile"])
244 | print(list(columns_finally_used))
245 | data = df_predict[list(columns_finally_used)]
246 | for i in data.columns:
247 | if data[i].isnull().sum() > 0:
248 | data[i].fillna(imputations[i], inplace=True)
249 | if i in label_encoded.keys():
250 | data[i] = label_encoded[i].transform(data[i])
251 | if (model_name == "xgboost" or model_name == "randomforest"
252 | or model_name == "decision tree" or model_name == "lightgbm"):
253 | predictions = model_tree.predict(data.to_numpy())
254 | else:
255 | predictions = predict(model_trained, data.to_numpy())
256 | if label_y == True:
257 | df_predict[target] = y_label_encoder.inverse_transform(predictions)
258 | else:
259 | df_predict[target] = predictions
260 | df_predict.to_csv("Predicted_Results.csv", index=False)
261 | return send_file("Predicted_Results.csv")
262 | # toast(text="Predicted_Results.csv in this directory has the results",
263 | # duration = 10)
264 |
265 |
266 | if __name__ == '__main__':
267 | app.run(debug=True)
--------------------------------------------------------------------------------
/Modelly/results_graph.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tusharsarkar3/XBNet/2b771d6beb9f15ae48e2a365f46d6f996811086f/Modelly/results_graph.png
--------------------------------------------------------------------------------
/Modelly/static/images/Training_graphs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tusharsarkar3/XBNet/2b771d6beb9f15ae48e2a365f46d6f996811086f/Modelly/static/images/Training_graphs.png
--------------------------------------------------------------------------------
/Modelly/static/index.css:
--------------------------------------------------------------------------------
1 | *{
2 | font-family:sans-serif;
3 | }
4 |
5 |
6 | .intro{
7 | background-color:rgba(13, 13, 77, 0.644);
8 | height:100%;
9 | color: white;
10 | vertical-align:middle;
11 | margin-top: 0;
12 |
13 | }
14 | .desc{
15 | font-weight:10;
16 |
17 | }
18 | .xbnet{
19 | color:#ec660c;
20 | }
21 |
22 |
23 | body{
24 | background-image: url(https://freedesignfile.com/upload/2017/06/Origami-black-background-vectors.jpg);
25 | background-position: 100%;
26 | background-size: cover;
27 |
28 | }
29 |
30 | main {
31 | height:100%;
32 | }
33 |
34 | .mybts{
35 | background-color: #ec660c;
36 | color: white;
37 | }
38 |
39 | .mybts:hover{
40 | background-color: #ec660c8a;
41 | }
42 |
43 | .landingMain {
44 | width: 100%;
45 | height: 100%;
46 | overflow-x: hidden;
47 | }
48 |
49 | .heading {
50 |
51 | display: flex;
52 | flex-direction: row;
53 | }
54 |
55 | .form-card {
56 |
57 | margin-left: 5%;
58 | height: 300px;
59 | align-items: center;
60 | justify-content: right;
61 |
62 | }
63 | .card {
64 | align-content: center;
65 | color: #ffffff;
66 | font-weight: bolder;
67 | background-color: rgba(255, 255, 255, 0.192);
68 | }
69 | .card-title{
70 | text-align: center;
71 | }
72 |
73 | html,body,.container {
74 | height:100%;
75 | }
76 | .container {
77 | justify-content: center;
78 | display:table;
79 | width: 100%;
80 | padding: 3% 0 5% 0;
81 | box-sizing: border-box;
82 | }
83 |
84 | .row {
85 | height: 100%;
86 | display: table-row;
87 | }
88 |
89 | .row .no-float {
90 | display: table-cell;
91 | float: none;
92 | }
--------------------------------------------------------------------------------
/Modelly/templates/base.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | Model Maker
9 |
15 |
16 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
Modelly
30 |
31 | Make Your Models
32 |
33 |
Your one stop destination for the state of the art machine and deep learning models.
34 |
35 |
36 | {% block content %} {% endblock %}
37 |
38 |
39 |
40 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/Modelly/templates/index.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block content %}
3 |
4 |
54 |
55 |
56 |
61 |
62 |
63 | {% endblock %}
--------------------------------------------------------------------------------
/Modelly/templates/layers.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block content %}
3 |
4 |
45 |
46 |
47 |
48 |
49 |
54 |
55 |
56 | {% endblock %}
--------------------------------------------------------------------------------
/Modelly/templates/partials/_sawo.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
27 |
--------------------------------------------------------------------------------
/Modelly/templates/results.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block content %}
3 |
4 |
5 |
6 |
7 |
Results
8 |
9 | {% if info['img']==True %}
10 |
11 | {% endif %}
12 |
13 |
21 |
22 |
23 |
24 |
25 |
26 |
27 | {% endblock %}
28 |
--------------------------------------------------------------------------------
/Modelly/templates/treesinp.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block content %}
3 |
4 |
77 |
78 |
79 |
80 |
81 |
86 |
87 |
88 | {% endblock %}
--------------------------------------------------------------------------------
/Modelly/templates/upload.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/Modelly/xbnet_testAccuracy_66.7_trainAccuracy_63.4.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tusharsarkar3/XBNet/2b771d6beb9f15ae48e2a365f46d6f996811086f/Modelly/xbnet_testAccuracy_66.7_trainAccuracy_63.4.pt
--------------------------------------------------------------------------------
/READ.md:
--------------------------------------------------------------------------------
1 | # XBNet - Xtremely Boosted Network
2 | ## Boosted neural network for tabular data
3 |
4 | [](https://pytorch.org/ "PyTorch")
5 | [](https://paperswithcode.com/sota/iris-classification-on-iris?p=xbnet-an-extremely-boosted-neural-network)
6 | [](https://paperswithcode.com/sota/diabetes-prediction-on-diabetes?p=xbnet-an-extremely-boosted-neural-network)
7 | [](https://paperswithcode.com/sota/survival-prediction-on-titanic?p=xbnet-an-extremely-boosted-neural-network)
8 | [](https://paperswithcode.com/sota/breast-cancer-detection-on-breast-cancer-1?p=xbnet-an-extremely-boosted-neural-network)
9 | [](https://paperswithcode.com/sota/fraud-detection-on-kaggle-credit-card-fraud?p=xbnet-an-extremely-boosted-neural-network)
10 |
11 | XBNET that is built on PyTorch combines tree-based models with neural networks to create a robust architecture that is trained by using
12 | a novel optimization technique, Boosted Gradient Descent for Tabular
13 | Data which increases its interpretability and performance. Boosted Gradient Descent is initialized with the
14 | feature importance of a gradient boosted tree, and it updates the weights of each
15 | layer in the neural network in two steps:
16 | - Update weights by gradient descent.
17 | - Update weights by using feature importance of a gradient boosted tree
18 | in every intermediate layer.
19 |
20 | ## Features
21 |
22 | - Better performance, training stability and interpretability for tabular data.
23 | - Easy to implement with rapid prototyping capabilities
24 | - Minimum Code requirements for creating any neural network with or without boosting
25 |
26 | ---
27 | ### Comparison with XGBOOST
28 | XBNET VS XGBOOST testing accuracy on different datasets with no hyperparameter tuning
29 |
30 | | Dataset | XBNET | XGBOOST |
31 | | ---------------- | ---------------- | ---------------- |
32 | | Iris | 100 | 97.7 |
33 | | Breast Cancer | 96.49 | 96.47 |
34 | | Diabetes | 78.78 | 77.48 |
35 | | Titanic | 79.85 | 80.5 |
36 | | German Credit | 71.33 | 77.66 |
37 |
38 | ---
39 | ### Installation :
40 | ```
41 | pip install --upgrade git+https://github.com/tusharsarkar3/XBNet.git
42 | ```
43 | ---
44 |
45 | ### Example for using
46 | ```
47 | import torch
48 | import numpy as np
49 | import pandas as pd
50 | from sklearn.preprocessing import LabelEncoder
51 | from sklearn.model_selection import train_test_split
52 | from XBNet.training_utils import training,predict
53 | from XBNet.models import XBNETClassifier
54 | from XBNet.run import run_XBNET
55 |
56 | data = pd.read_csv('test\Iris (1).csv')
57 | print(data.shape)
58 | x_data = data[data.columns[:-1]]
59 | print(x_data.shape)
60 | y_data = data[data.columns[-1]]
61 | le = LabelEncoder()
62 | y_data = np.array(le.fit_transform(y_data))
63 | print(le.classes_)
64 |
65 | X_train,X_test,y_train,y_test = train_test_split(x_data.to_numpy(),y_data,test_size = 0.3,random_state = 0)
66 | model = XBNETClassifier(X_train,y_train,2)
67 |
68 | criterion = torch.nn.CrossEntropyLoss()
69 | optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
70 |
71 | m,acc, lo, val_ac, val_lo = run_XBNET(X_train,X_test,y_train,y_test,model,criterion,optimizer,32,300)
72 | print(predict(m,x_data.to_numpy()[0,:]))
73 | ```
74 | ---
75 | ### Output images
76 | 
77 | 
78 | ---
79 | ### Reference
80 | If you make use of this software for your work, we would appreciate it if you would cite us:
81 | ```
82 | @misc{sarkar2021xbnet,
83 | title={XBNet : An Extremely Boosted Neural Network},
84 | author={Tushar Sarkar},
85 | year={2021},
86 | eprint={2106.05239},
87 | archivePrefix={arXiv},
88 | primaryClass={cs.LG}
89 | }
90 | ```
91 | ---
92 | #### Features to be added :
93 | - Metrics for different requirements
94 | - Addition of some other types of layers
95 |
96 | ---
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # XBNet - Xtremely Boosted Network
2 | ## Boosted neural network for tabular data
3 |
4 | [](https://pytorch.org/ "PyTorch")
5 | [](https://paperswithcode.com/sota/iris-classification-on-iris?p=xbnet-an-extremely-boosted-neural-network)
6 | [](https://paperswithcode.com/sota/diabetes-prediction-on-diabetes?p=xbnet-an-extremely-boosted-neural-network)
7 | [](https://paperswithcode.com/sota/survival-prediction-on-titanic?p=xbnet-an-extremely-boosted-neural-network)
8 | [](https://paperswithcode.com/sota/breast-cancer-detection-on-breast-cancer-1?p=xbnet-an-extremely-boosted-neural-network)
9 | [](https://paperswithcode.com/sota/fraud-detection-on-kaggle-credit-card-fraud?p=xbnet-an-extremely-boosted-neural-network)
10 |
11 |
12 | [](https://pepy.tech/project/xbnet)
13 |
15 |
16 | XBNET that is built on PyTorch combines tree-based models with neural networks to create a robust architecture that is trained by using
17 | a novel optimization technique, Boosted Gradient Descent for Tabular
18 | Data which increases its interpretability and performance. Boosted Gradient Descent is initialized with the
19 | feature importance of a gradient boosted tree, and it updates the weights of each
20 | layer in the neural network in two steps:
21 | - Update weights by gradient descent.
22 | - Update weights by using feature importance of a gradient boosted tree
23 | in every intermediate layer.
24 |
25 | ## Features
26 |
27 | - Better performance, training stability and interpretability for tabular data.
28 | - Easy to implement with rapid prototyping capabilities
29 | - Minimum Code requirements for creating any neural network with or without boosting
30 | ---
31 | ### Comparison with XGBOOST
32 | XBNET VS XGBOOST testing accuracy on different datasets with no hyperparameter tuning
33 |
34 | | Dataset | XBNET | XGBOOST |
35 | | ---------------- | ---------------- | ---------------- |
36 | | Iris | 100 | 97.7 |
37 | | Breast Cancer | 96.49 | 96.47 |
38 | | Wine | 97.22 | 97.22 |
39 | | Diabetes | 78.78 | 77.48 |
40 | | Titanic | 79.85 | 80.5 |
41 | | German Credit | 71.33 | 77.66 |
42 | | Digit Completion | 86.11 85.9 | 77.66 |
43 |
44 | ---
45 | ### Installation :
46 | ```
47 | pip install --upgrade git+https://github.com/tusharsarkar3/XBNet.git
48 | ```
49 | ---
50 |
51 | ### Example for using
52 | ```
53 | import torch
54 | import numpy as np
55 | import pandas as pd
56 | from sklearn.preprocessing import LabelEncoder
57 | from sklearn.model_selection import train_test_split
58 | from XBNet.training_utils import training,predict
59 | from XBNet.models import XBNETClassifier
60 | from XBNet.run import run_XBNET
61 |
62 | data = pd.read_csv('test\Iris (1).csv')
63 | print(data.shape)
64 | x_data = data[data.columns[:-1]]
65 | print(x_data.shape)
66 | y_data = data[data.columns[-1]]
67 | le = LabelEncoder()
68 | y_data = np.array(le.fit_transform(y_data))
69 | print(le.classes_)
70 |
71 | X_train,X_test,y_train,y_test = train_test_split(x_data.to_numpy(),y_data,test_size = 0.3,random_state = 0)
72 | model = XBNETClassifier(X_train,y_train,2)
73 |
74 | criterion = torch.nn.CrossEntropyLoss()
75 | optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
76 |
77 | m,acc, lo, val_ac, val_lo = run_XBNET(X_train,X_test,y_train,y_test,model,criterion,optimizer,32,300)
78 | print(predict(m,x_data.to_numpy()[0,:]))
79 | ```
80 | ---
81 | ### Output images :
82 |
83 | 
84 | 
85 | ---
86 |
87 | ### Reference
88 | If you make use of this software for your work, we would appreciate it if you would cite us:
89 | ```
90 | @misc{sarkar2021xbnet,
91 | title={XBNet : An Extremely Boosted Neural Network},
92 | author={Tushar Sarkar},
93 | year={2021},
94 | eprint={2106.05239},
95 | archivePrefix={arXiv},
96 | primaryClass={cs.LG}
97 | }
98 | ```
99 |
100 | ```
101 | @misc{1aa4d286-fae9-431e-bd08-63c1b9c848e2,
102 | title = {Library XBNet for tabular data which helps you to create a custom extremely boosted neural network},
103 | author = {Tushar Sarkar},
104 | journal = {Software Impacts},
105 | doi = {10.24433/CO.8976286.v1},
106 | howpublished = {\url{https://www.codeocean.com/}},
107 | year = 2021,
108 | month = {6},
109 | version = {v1}
110 | }
111 | ```
112 |
113 | ---
114 | #### Features to be added :
115 | - Metrics for different requirements
116 | - Addition of some other types of layers
117 |
118 | ---
119 |
120 | Developed with :heart: by Tushar Sarkar
121 |
--------------------------------------------------------------------------------
/Research_Paper/XBNET_paper.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tusharsarkar3/XBNet/2b771d6beb9f15ae48e2a365f46d6f996811086f/Research_Paper/XBNET_paper.pdf
--------------------------------------------------------------------------------
/Untitled.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tusharsarkar3/XBNet/2b771d6beb9f15ae48e2a365f46d6f996811086f/Untitled.png
--------------------------------------------------------------------------------
/XBNet/Seq.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import numpy as np
3 | from collections import OrderedDict
4 |
5 | class Seq(torch.nn.Sequential):
6 | '''
7 | Seq uses sequential module to implement tree in the forward.
8 | '''
9 | def give(self, xg, num_layers_boosted, ep=0.001):
10 | '''
11 | Saves various information into the object for further usage in the training process
12 | :param xg(object of XGBoostClassifier): Object og XGBoostClassifier
13 | :param num_layers_boosted(int,optional): Number of layers to be boosted in the neural network.
14 | :param ep(int,optional): Epsilon for smoothing. Deafult: 0.001
15 | '''
16 | self.xg = xg
17 | self.epsilon = ep
18 | self.boosted_layers = OrderedDict()
19 | self.num_layers_boosted = num_layers_boosted
20 |
21 | def forward(self, input,train,l=torch.Tensor([1])):
22 | l,train = train,l
23 | for i, module in enumerate(self):
24 | input = module(input)
25 | x0 = input
26 | if train:
27 | self.l = l
28 | if i < self.num_layers_boosted:
29 | try:
30 | self.boosted_layers[i] = torch.from_numpy(np.array(
31 | self.xg.fit(x0.detach().numpy(), (self.l).detach().numpy(),eval_metric="mlogloss").feature_importances_) + self.epsilon)
32 | except:
33 | pass
34 | return input
--------------------------------------------------------------------------------
/XBNet/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tusharsarkar3/XBNet/2b771d6beb9f15ae48e2a365f46d6f996811086f/XBNet/__init__.py
--------------------------------------------------------------------------------
/XBNet/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tusharsarkar3/XBNet/2b771d6beb9f15ae48e2a365f46d6f996811086f/XBNet/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/XBNet/__pycache__/run.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tusharsarkar3/XBNet/2b771d6beb9f15ae48e2a365f46d6f996811086f/XBNet/__pycache__/run.cpython-37.pyc
--------------------------------------------------------------------------------
/XBNet/main.py:
--------------------------------------------------------------------------------
1 | from kivymd.app import MDApp
2 | from kivy.uix.widget import Widget
3 | from kivy.uix.actionbar import ActionBar
4 | from kivy.uix.scrollview import ScrollView
5 | from kivy.uix.boxlayout import BoxLayout
6 | from kivymd.theming import ThemableBehavior
7 | from kivymd.uix.list import OneLineListItem, MDList, TwoLineListItem, ThreeLineListItem
8 | from kivymd.uix.list import MDList
9 | from kivymd.uix.textfield import MDTextField
10 | from kivy.uix.button import Button
11 | from kivy.lang import Builder
12 | from kivymd.toast import toast
13 | from kivy.uix.screenmanager import Screen, ScreenManager
14 | import time
15 | from kivy.core.window import Window
16 | from kivymd.uix.label import MDLabel
17 | from kivy.uix.modalview import ModalView
18 | from kivymd.uix.filemanager import MDFileManager
19 | from kivymd.theming import ThemeManager
20 | import requests
21 | from kivy.uix.popup import Popup
22 | import os
23 | from xgboost import XGBClassifier
24 | from sklearn.ensemble import RandomForestClassifier
25 | from sklearn.tree import DecisionTreeClassifier
26 | from lightgbm import LGBMClassifier
27 | import torch
28 | import numpy as np
29 | import pandas as pd
30 | from sklearn.preprocessing import LabelEncoder
31 | from sklearn.model_selection import train_test_split
32 | from XBNet.training_utils import training,predict
33 | from XBNet.models import XBNETClassifier
34 | from XBNet.run import run_XBNET
35 | from os import environ
36 | import pickle
37 |
38 | def suppress_qt_warnings():
39 | environ["QT_DEVICE_PIXEL_RATIO"] = "0"
40 | environ["QT_AUTO_SCREEN_SCALE_FACTOR"] = "1"
41 | environ["QT_SCREEN_SCALE_FACTORS"] = "1"
42 | environ["QT_SCALE_FACTOR"] = "1"
43 |
44 | Login_Page = """
45 | ScreenManager:
46 | LoginPage
47 | ModelDetails
48 | FileManage
49 |
50 | :
51 | name:"Login"
52 | MDFloatLayout:
53 | Image:
54 | id: imageView
55 | source: 'Untitled.png'
56 | allow_stretch: True
57 | halign: 'center'
58 | pos_hint: {"center_x":0.23, "center_y":0.5}
59 |
60 | MDRoundFlatIconButton:
61 | id: filemanage
62 | text: "Select Dataset"
63 | icon: "folder"
64 | pos_hint: {'center_x': .77, 'center_y': .85}
65 | on_release: root.manager.current = "File"
66 |
67 |
68 | MDTextField:
69 | id: modelname
70 | hint_text:"Enter the model name: "
71 | pos_hint:{"center_x":0.77,"center_y":0.7}
72 | current_hint_text_color:0,0,0,1
73 | size_hint_x:0.4
74 | required: True
75 |
76 | MDTextField:
77 | id: layers
78 | hint_text:"Enter number of layers(For XBNet or NN): "
79 | pos_hint:{"center_x":0.77,"center_y":0.55}
80 | current_hint_text_color:0,0,0,1
81 | size_hint_x:0.4
82 |
83 | MDTextField:
84 | id: target
85 | hint_text:"Enter name of target feature: "
86 | pos_hint:{"center_x":0.77,"center_y":0.40}
87 | current_hint_text_color:0,0,0,1
88 | size_hint_x:0.4
89 | required: True
90 |
91 | MDRaisedButton:
92 | text:"Build model"
93 | pos_hint:{"center_x":0.77,"center_y":0.25}
94 | size_hint_x:0.3
95 | on_release: root.manager.current = "Model"
96 | on_press: app.get_model(modelname.text,target.text,layers.text)
97 | theme_text_color:"Custom"
98 | text_color:0,0,0,1
99 |
100 |
101 | :
102 | name:"Model"
103 | MDFloatLayout:
104 | Image:
105 | id: imageView
106 | source: 'Untitled.png'
107 | allow_stretch: True
108 | halign: 'center'
109 | pos_hint: {"center_x":0.23, "center_y":0.5}
110 |
111 | MDRaisedButton:
112 | text:"Train"
113 | pos_hint:{"center_x":0.63,"center_y":0.15}
114 | size_hint_x:0.2
115 | # on_release: root.manager.current = "Model"
116 | on_press: app.get_layers()
117 | theme_text_color:"Custom"
118 | text_color:0,0,0,1
119 |
120 | MDRaisedButton:
121 | text:"Predict"
122 | pos_hint:{"center_x":0.88,"center_y":0.15}
123 | size_hint_x:0.2
124 | # on_release: root.manager.current = "Model"
125 | on_press: app.predict()
126 | theme_text_color:"Custom"
127 | text_color:0,0,0,1
128 |
129 |
130 | :
131 | name:"File"
132 | BoxLayout:
133 | FileChooserListView:
134 | canvas.before:
135 | Color:
136 | rgb: 0.1, 0.2, 0.5
137 | Rectangle:
138 | pos: self.pos
139 | size: self.size
140 | on_selection: app.get_path(*args)
141 |
142 | """
143 |
144 | class LoginPage(Screen):
145 | pass
146 |
147 | class ModelDetails(Screen):
148 | pass
149 |
150 | class CustomDropDown(BoxLayout):
151 | pass
152 |
153 | class FileManage(Screen):
154 | pass
155 |
156 | sm = ScreenManager()
157 | sm.add_widget(LoginPage(name="Login"))
158 | sm.add_widget(ModelDetails(name="Model"))
159 | sm.add_widget(FileManage(name="File"))
160 |
161 | class XBNetGUI(MDApp):
162 |
163 | def __init__(self):
164 | super(XBNetGUI, self).__init__()
165 | self.predict_phase = False
166 |
167 | class ContentNavigationDrawer(BoxLayout):
168 | pass
169 |
170 | class DrawerList(ThemableBehavior, MDList):
171 | pass
172 |
173 | def build(self):
174 | self.theme_cls.primary_palette = "Blue"
175 | login_page = Builder.load_string(Login_Page)
176 |
177 | return login_page
178 |
179 | def get_layers(self):
180 | self.layers_dims = []
181 | if self.model == "xbnet" or self.model == "neural network":
182 | for i,j in self.fields.items():
183 | self.layers_dims.append(int(j.text))
184 | print(j.text)
185 | elif (self.model == "xgboost" or self.model == "randomforest"
186 | or self.model == "decision tree" or self.model == "lightgbm"):
187 | for i,j in self.fields.items():
188 | try:
189 | self.layers_dims.append(int(j.text))
190 | except:
191 | self.layers_dims.append(float(j.text))
192 |
193 | self.train()
194 |
195 | def process_input(self):
196 | suppress_qt_warnings()
197 | column_to_predict = self.target
198 | data = pd.read_csv(self.file_selected)
199 | n_df = len(data)
200 | label_encoded = {}
201 | imputations = {}
202 | for i in data.columns:
203 | imputations[i] = data[i].mode()
204 | if data[i].isnull().sum() / n_df >= 0.15:
205 | data.drop(i, axis=1, inplace=True)
206 | elif data[i].isnull().sum() / n_df < 0.15 and data[i].isnull().sum() / n_df > 0:
207 | data[i].fillna(data[i].mode(), inplace=True)
208 | imputations[i] = data[i].mode()
209 | columns_object = list(data.dtypes[data.dtypes == object].index)
210 | for i in columns_object:
211 | if i != column_to_predict:
212 | if data[i].nunique() / n_df < 0.4:
213 | le = LabelEncoder()
214 | data[i] = le.fit_transform(data[i])
215 | label_encoded[i] = le
216 | else:
217 | data.drop(i, axis=1, inplace=True)
218 |
219 | x_data = data.drop(column_to_predict, axis=1).to_numpy()
220 | self.columns_finally_used = data.drop(column_to_predict, axis=1).columns
221 |
222 | y_data = data[column_to_predict].to_numpy()
223 | self.label_y = False
224 | if y_data.dtype == object:
225 | self.label_y = True
226 | self.y_label_encoder = LabelEncoder()
227 | y_data = self.y_label_encoder.fit_transform(y_data)
228 | self.label_encoded = label_encoded
229 | self.imputations = imputations
230 | toast("Number of features are: " + str(x_data.shape[1]) +
231 | " classes are: "+ str(len(np.unique(y_data))),duration=5)
232 | self.x_data = x_data
233 | self.y_data = y_data
234 |
235 | def train(self):
236 | X_train, X_test, y_train, y_test = train_test_split(self.x_data, self.y_data,
237 | test_size=0.3, random_state=0)
238 | if self.model == "xbnet" or self.model =="neural network":
239 | print(self.layers_dims)
240 | m = self.model
241 | model = XBNETClassifier( X_train, y_train, self.layers,
242 | input_through_cmd=True, inputs_for_gui=self.layers_dims,
243 | num_layers_boosted=self.n_layers_boosted
244 | )
245 | criterion = torch.nn.CrossEntropyLoss()
246 | optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
247 |
248 | self.model, self.acc, self.lo, self.val_ac, self.val_lo = run_XBNET(X_train, X_test, y_train, y_test, model, criterion, optimizer, 32, 10)
249 | model.save(m+"_testAccuracy_" +str(max(self.val_ac))[:4] +"_trainAccuracy_" +
250 | str(max(self.acc))[:4]+ ".pt",)
251 | toast("Test Accuracy is: " +str(max(self.val_ac))[:4] +" and Training Accuracy is: " +
252 | str(max(self.acc))[:4] + " and model is saved.",duration= 10)
253 |
254 | elif (self.model == "xgboost" or self.model == "randomforest"
255 | or self.model == "decision tree" or self.model == "lightgbm"):
256 | if self.model == "xgboost":
257 | self.model_tree = XGBClassifier(n_estimators=self.layers_dims[0],
258 | max_depth=self.layers_dims[1],
259 | learning_rate= self.layers_dims[2],
260 | subsample= self.layers_dims[3],
261 | colsample_bylevel = self.layers_dims[4],
262 | random_state=0,n_jobs=-1,
263 | )
264 | self.model_tree.fit(X_train, y_train,eval_metric="mlogloss")
265 | training_acc = self.model_tree.score(X_train, y_train)
266 | testing_acc = self.model_tree.score(X_test,y_test)
267 | elif self.model == "randomforest":
268 | self.model_tree = RandomForestClassifier(n_estimators=self.layers_dims[0],
269 | max_depth=self.layers_dims[1],
270 | random_state=0,n_jobs=-1)
271 | self.model_tree.fit(X_train, y_train)
272 | training_acc = self.model_tree.score(X_train, y_train)
273 | testing_acc = self.model_tree.score(X_test,y_test)
274 | elif self.model == "decision tree":
275 | self.model_tree = DecisionTreeClassifier(max_depth=self.layers_dims[1],random_state=0)
276 | self.model_tree.fit(X_train, y_train)
277 | training_acc = self.model_tree.score(X_train, y_train)
278 | testing_acc = self.model_tree.score(X_test,y_test)
279 | elif self.model == "lightgbm":
280 | self.model_tree = LGBMClassifier(n_estimators=self.layers_dims[0],
281 | max_depth=self.layers_dims[1],
282 | learning_rate= self.layers_dims[2],
283 | subsample= self.layers_dims[3],
284 | colsample_bylevel = self.layers_dims[4],
285 | random_state=0,n_jobs=-1,)
286 | self.model_tree.fit(X_train, y_train,eval_metric="mlogloss")
287 | training_acc = self.model_tree.score(X_train, y_train)
288 | testing_acc = self.model_tree.score(X_test,y_test)
289 | toast(text="Training and Testing accuracies are "+str(training_acc*100)
290 | +" "+str(testing_acc*100) + " respectively and model is stored",duration=7)
291 | with open(self.model+"_testAccuracy_" +str(testing_acc)[:4] +"_trainAccuracy_" +
292 | str(training_acc)[:4]+ ".pkl", 'wb') as outfile:
293 | pickle.dump(self.model_tree,outfile)
294 |
295 | def predict(self):
296 | self.predict_phase = True
297 | self.root.current = "File"
298 |
299 | def predict_results(self):
300 | df = pd.read_csv(self.file_selected)
301 | data = df[self.columns_finally_used]
302 | for i in data.columns:
303 | if data[i].isnull().sum() > 0:
304 | data[i].fillna(self.imputations[i], inplace=True)
305 | if i in self.label_encoded.keys():
306 | data[i] = self.label_encoded[i].transform(data[i])
307 | if (self.model == "xgboost" or self.model == "randomforest"
308 | or self.model == "decision tree" or self.model == "lightgbm"):
309 | predictions = self.model_tree.predict(data.to_numpy())
310 | else:
311 | predictions = predict(self.model, data.to_numpy())
312 | if self.label_y == True:
313 | df[self.target] = self.y_label_encoder.inverse_transform(predictions)
314 | else:
315 | df[self.target] = predictions
316 | df.to_csv("Predicted_Results.csv",index=False)
317 | toast(text="Predicted_Results.csv in this directory has the results",
318 | duration = 10)
319 |
320 |
321 | def get_model(self,model,target,layers):
322 | self.model = model.lower()
323 | if len(layers) > 0:
324 | self.layers = int(layers)
325 | self.target = target
326 | if self.model.lower() == "xbnet":
327 | self.n_layers_boosted = 1
328 | self.net_model()
329 | elif (self.model == "xgboost" or self.model == "randomforest"
330 | or self.model == "decision tree" or self.model == "lightgbm"):
331 | self.tree_model()
332 | elif self.model.lower() == "neural network":
333 | self.n_layers_boosted = 0
334 | self.net_model()
335 |
336 | self.process_input()
337 |
338 | def net_model(self):
339 | layout = self.root.get_screen('Model')
340 | gap = 1/(2*self.layers+2)
341 | counter = 1
342 | self.fields = {}
343 | for i in range(self.layers):
344 | lab1 = MDTextField(hint_text="Enter input dimensions of layer "+ str(i+1) +":",
345 | pos_hint={"center_x":0.77,"center_y":1-gap*(counter)},
346 | size_hint_x=.4, current_hint_text_color=[0,0,0,1] )
347 |
348 | counter+=1
349 | lab2 = MDTextField(hint_text="Enter output dimensions of layer "+ str(i+1) +":",
350 | pos_hint={"center_x":0.77,"center_y":1-gap*(counter)},
351 | size_hint_x=.4, current_hint_text_color=[0,0,0,1] )
352 | counter +=1
353 | layout.add_widget(lab1)
354 | layout.add_widget(lab2)
355 | self.fields["input_"+str(i+1)] = lab1
356 | self.fields["output_" + str(i+1)] = lab2
357 |
358 | def tree_model(self):
359 | layout = self.root.get_screen('Model')
360 | self.fields = {}
361 | lab1 = MDTextField(hint_text="Enter number of estimators: ",
362 | pos_hint={"center_x":0.77,"center_y":0.85},
363 | size_hint_x=.4, current_hint_text_color=[0,0,0,1] )
364 |
365 | lab2 = MDTextField(hint_text="Enter depth of trees[default:6](Typical 3-10): ",
366 | pos_hint={"center_x":0.77,"center_y":0.7},
367 | size_hint_x=.4, current_hint_text_color=[0,0,0,1] )
368 |
369 | lab3 = MDTextField(hint_text="Enter learning rate forr XGBoost(eta)[default:0.3]: ",
370 | pos_hint={"center_x":0.77,"center_y":0.55},
371 | size_hint_x=.4, current_hint_text_color=[0,0,0,1] )
372 |
373 | lab4 = MDTextField(hint_text="Enter size of subsample[default:1](Typical 0.5-1): ",
374 | pos_hint={"center_x":0.77,"center_y":0.4},
375 | size_hint_x=.4, current_hint_text_color=[0,0,0,1] )
376 |
377 | lab5 = MDTextField(hint_text="Enter size of colsample_bytree[default:1](Typical 0.5-1): ",
378 | pos_hint={"center_x":0.77,"center_y":0.25},
379 | size_hint_x=.4, current_hint_text_color=[0,0,0,1] )
380 |
381 | layout.add_widget(lab1)
382 | layout.add_widget(lab2)
383 | layout.add_widget(lab3)
384 | layout.add_widget(lab4)
385 | layout.add_widget(lab5)
386 | self.fields["no_trees"] = lab1
387 | self.fields["depth"] = lab2
388 | self.fields["learning_rate"] = lab3
389 | self.fields["subsample"] = lab4
390 | self.fields["colsample_bytree"] = lab5
391 |
392 | def get_path(self,*args):
393 | print(args)
394 | self.file_selected = args[1][0]
395 | print(self.file_selected)
396 | if self.predict_phase:
397 | self.root.current = "Model"
398 | print("hellooo")
399 | self.predict_results()
400 | else:
401 | self.root.current = "Login"
402 |
403 | if __name__ == "__main__":
404 | XBNetGUI().run()
--------------------------------------------------------------------------------
/XBNet/models.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import numpy as np
3 | from xgboost import XGBClassifier,XGBRegressor
4 | from collections import OrderedDict
5 | from XBNet.Seq import Seq
6 |
7 | class XBNETClassifier(torch.nn.Module):
8 | '''
9 | XBNetClassifier is a model for classification tasks that tries to combine tree-based models with
10 | neural networks to create a robust architecture.
11 | :param X_values(numpy array): Features on which model has to be trained
12 | :param y_values(numpy array): Labels of the features i.e target variable
13 | :param num_layers(int): Number of layers in the neural network
14 | :param num_layers_boosted(int,optional): Number of layers to be boosted in the neural network. Default value: 1
15 | :param input_through_cmd(Boolean): Use to tell how you provide the inputs
16 | :param inputs_for_gui(list): Use only for providing inputs through list and when input_through_cmd is
17 | set to True
18 | '''
19 | def __init__(self, X_values, y_values, num_layers, num_layers_boosted=1,
20 | input_through_cmd = False,inputs_for_gui=None):
21 | super(XBNETClassifier, self).__init__()
22 | self.name = "Classification"
23 | self.layers = OrderedDict()
24 | self.boosted_layers = {}
25 | self.num_layers = num_layers
26 | self.num_layers_boosted = num_layers_boosted
27 | self.X = X_values
28 | self.y = y_values
29 | self.gui = input_through_cmd
30 | self.inputs_layers_gui = inputs_for_gui
31 |
32 | self.take_layers_dim()
33 | self.base_tree()
34 |
35 | self.layers[str(0)].weight = torch.nn.Parameter(torch.from_numpy(self.temp.T))
36 |
37 |
38 | self.xg = XGBClassifier(n_estimators=100)
39 |
40 | self.sequential = Seq(self.layers)
41 | self.sequential.give(self.xg, self.num_layers_boosted)
42 | self.feature_importances_ = None
43 |
44 | def get(self, l):
45 | '''
46 | Gets the set of current actual outputs of the inputs
47 | :param l(tensor): Labels of the current set of inputs that are getting processed.
48 | '''
49 | self.l = l
50 |
51 |
52 | def take_layers_dim(self):
53 | '''
54 | Creates the neural network by taking input from the user
55 | :param gyi(Boolean): Is it being for GUI building purposes
56 | '''
57 | if self.gui == True:
58 | counter = 0
59 | for i in range(self.num_layers):
60 | inp = self.inputs_layers_gui[counter]
61 | counter += 1
62 | out = self.inputs_layers_gui[counter]
63 | counter += 1
64 | set_bias = True
65 | self.layers[str(i)] = torch.nn.Linear(inp, out, bias=set_bias)
66 | if i == 0:
67 | self.input_out_dim = out
68 | self.labels = out
69 | else:
70 | print("Enter dimensions of linear layers: ")
71 | for i in range(self.num_layers):
72 | inp = int(input("Enter input dimensions of layer " + str(i + 1) + ": "))
73 | out = int(input("Enter output dimensions of layer " + str(i + 1)+ ": "))
74 | set_bias = bool(input("Set bias as True or False: "))
75 | self.layers[str(i)] = torch.nn.Linear(inp, out, bias=set_bias)
76 | if i == 0:
77 | self.input_out_dim = out
78 | self.labels = out
79 | print("Enter your last layer ")
80 | self.ch = int(input("1. Sigmoid \n2. Softmax \n3. None \n"))
81 | if self.ch == 1:
82 | self.layers[str(self.num_layers)] = torch.nn.Sigmoid()
83 | elif self.ch == 2:
84 | dimension = int(input("Enter dimension for Softmax: "))
85 | self.layers[str(self.num_layers)] = torch.nn.Softmax(dim=dimension)
86 | else:
87 | pass
88 |
89 | def base_tree(self):
90 | '''
91 | Instantiates and trains a XGBRegressor on the first layer of the neural network to set its feature importances
92 | as the weights of the layer
93 | '''
94 | self.temp1 = XGBClassifier(n_estimators=100).fit(self.X, self.y,eval_metric="mlogloss").feature_importances_
95 | self.temp = self.temp1
96 | for i in range(1, self.input_out_dim):
97 | self.temp = np.column_stack((self.temp, self.temp1))
98 |
99 | def forward(self, x, train=True):
100 | x = self.sequential(x, self.l,train)
101 | return x
102 |
103 | def save(self,path):
104 | '''
105 | Saves the entire model in the provided path
106 | :param path(string): Path where model should be saved
107 | '''
108 | torch.save(self,path)
109 |
110 |
111 | class XBNETRegressor(torch.nn.Module):
112 | '''
113 | XBNETRegressor is a model for regression tasks that tries to combine tree-based models with
114 | neural networks to create a robust architecture.
115 | :param X_values(numpy array): Features on which model has to be trained
116 | :param y_values(numpy array): Labels of the features i.e target variable
117 | :param num_layers(int): Number of layers in the neural network
118 | :param num_layers_boosted(int,optional): Number of layers to be boosted in the neural network. Default value: 1
119 | '''
120 | def __init__(self, X_values, y_values, num_layers, num_layers_boosted=1):
121 | super(XBNETRegressor, self).__init__()
122 | self.name = "Regression"
123 | self.layers = OrderedDict()
124 | self.boosted_layers = {}
125 | self.num_layers = num_layers
126 | self.num_layers_boosted = num_layers_boosted
127 | self.X = X_values
128 | self.y = y_values
129 |
130 | self.take_layers_dim()
131 | self.base_tree()
132 |
133 | self.layers[str(0)].weight = torch.nn.Parameter(torch.from_numpy(self.temp.T))
134 |
135 |
136 | self.xg = XGBRegressor(n_estimators=100)
137 |
138 | self.sequential = Seq(self.layers)
139 | self.sequential.give(self.xg, self.num_layers_boosted)
140 | self.sigmoid = torch.nn.Sigmoid()
141 | self.feature_importances_ = None
142 |
143 | def get(self, l):
144 | '''
145 | Gets the set of current actual outputs of the inputs
146 | :param l(tensor): Labels of the current set of inputs that are getting processed.
147 | '''
148 | self.l = l
149 |
150 |
151 | def take_layers_dim(self):
152 | '''
153 | Creates the neural network by taking input from the user
154 | '''
155 | print("Enter dimensions of linear layers: ")
156 | for i in range(self.num_layers):
157 | inp = int(input("Enter input dimensions of layer " + str(i + 1) + ": "))
158 | out = int(input("Enter output dimensions of layer " + str(i + 1)+ ": "))
159 | set_bias = bool(input("Set bias as True or False: "))
160 | self.layers[str(i)] = torch.nn.Linear(inp, out, bias=set_bias)
161 | if i == 0:
162 | self.input_out_dim = out
163 | self.labels = out
164 |
165 | print("Enter your last layer ")
166 | self.ch = int(input("1. Sigmoid \n2. Softmax \n3. None \n"))
167 | if self.ch == 1:
168 | self.layers[str(self.num_layers)] = torch.nn.Sigmoid()
169 | elif self.ch == 2:
170 | dimension = int(input("Enter dimension for Softmax: "))
171 | self.layers[str(self.num_layers)] = torch.nn.Softmax(dim=dimension)
172 | else:
173 | pass
174 |
175 | def base_tree(self):
176 | '''
177 | Instantiates and trains a XGBRegressor on the first layer of the neural network to set its feature importances
178 | as the weights of the layer
179 | '''
180 | self.temp1 = XGBRegressor(n_estimators=100).fit(self.X, self.y,eval_metric="mlogloss").feature_importances_
181 | self.temp = self.temp1
182 | for i in range(1, self.input_out_dim):
183 | self.temp = np.column_stack((self.temp, self.temp1))
184 |
185 | def forward(self, x, train=True):
186 | x = self.sequential(x,self.l,train)
187 | return x
188 |
189 | def save(self,path):
190 | '''
191 | Saves the entire model in the provided path
192 | :param path(string): Path where model should be saved
193 | '''
194 | torch.save(self,path)
--------------------------------------------------------------------------------
/XBNet/run.py:
--------------------------------------------------------------------------------
1 | import torch
2 | from torch.utils.data import Dataset,DataLoader
3 | from XBNet.training_utils import training
4 |
5 | class Data(Dataset):
6 | '''
7 | Dataset class for loading the data into dataloader
8 | :param X(numpy array): array of features
9 | :param y(numpy array): array of labels of the features
10 | '''
11 | def __init__(self, X, y):
12 | self.X = torch.from_numpy(X)
13 | self.y = torch.from_numpy(y)
14 |
15 | def __len__(self):
16 | return len(self.X)
17 |
18 | def __getitem__(self, idx):
19 | return self.X[idx], self.y[idx]
20 |
21 | def run_XBNET(X_train,X_test,y_train,y_test,model,
22 | criterion,optimizer,batch_size = 16,epochs=100, save = False):
23 | '''
24 | run_XBNET actually executes the entire training and validation methods for the models. Prints the metrics for the task
25 | and plots the graphs of accuracy vs epochs and loss vs epochs.
26 | :param X_train(numpy array): Features on which model has to be trained
27 | :param y_train(numpy array): Labels of X_train i.e target variable
28 | :param X_test(numpy array): Features on which model has to be validated
29 | :param y_test(numpy array): Labels of X_test i.e target variable
30 | :param model(XBNET Classifier/Regressor): model to be trained
31 | :param criterion(object of loss function): Loss function to be used for training
32 | :param optimizer(object of Optimizer): Optimizer used for training
33 | :param batch_size(int,optional): Batch size used for training and validation. Default value: 16
34 | :param epochs(int,optional): Number of epochs for training the model. Default value: 100
35 | :return:
36 | model object, list of training accuracy, training loss, testing accuracy, testing loss for all the epochs
37 | '''
38 | trainDataload = DataLoader(Data(X_train, y_train), batch_size=batch_size)
39 | testDataload = DataLoader(Data(X_test, y_test), batch_size=batch_size)
40 | acc, lo, val_ac, val_lo = training(model, trainDataload, testDataload,
41 | criterion, optimizer, epochs,save= save)
42 | return model,acc, lo, val_ac, val_lo
--------------------------------------------------------------------------------
/XBNet/training_utils.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from sklearn.metrics import classification_report,r2_score,mean_absolute_error,mean_squared_error,mean_squared_log_error
3 | import matplotlib.pyplot as plt
4 | import torch
5 | from tqdm import tqdm
6 |
7 |
8 | def training(model,trainDataload,testDataload,criterion,optimizer,epochs = 100,save = False):
9 | '''
10 | Training function for training the model with the given data
11 | :param model(XBNET Classifier/Regressor): model to be trained
12 | :param trainDataload(object of DataLoader): DataLoader with training data
13 | :param testDataload(object of DataLoader): DataLoader with testing data
14 | :param criterion(object of loss function): Loss function to be used for training
15 | :param optimizer(object of Optimizer): Optimizer used for training
16 | :param epochs(int,optional): Number of epochs for training the model. Default value: 100
17 | :return:
18 | list of training accuracy, training loss, testing accuracy, testing loss for all the epochs
19 | '''
20 | accuracy = []
21 | lossing = []
22 | val_acc = []
23 | val_loss = []
24 | for epochs in tqdm(range(epochs),desc="Percentage training completed: "):
25 | running_loss = 0
26 | predictions = []
27 | act = []
28 | correct = 0
29 | total = 0
30 | loss = None
31 | for inp, out in trainDataload:
32 | try:
33 | if out.shape[0] >= 1:
34 | out = torch.squeeze(out, 1)
35 | except:
36 | pass
37 | model.get(out.float())
38 | y_pred = model(inp.float())
39 | if model.labels == 1:
40 | loss = criterion(y_pred, out.view(-1, 1).float())
41 | else:
42 | loss = criterion(y_pred, out.long())
43 | running_loss += loss.item()
44 | loss.backward()
45 | optimizer.step()
46 | optimizer.zero_grad()
47 | for i, p in enumerate(model.parameters()):
48 | if i < model.num_layers_boosted:
49 | l0 = torch.unsqueeze(model.sequential.boosted_layers[i], 1)
50 | lMin = torch.min(p.grad)
51 | lPower = torch.log(torch.abs(lMin))
52 | if lMin != 0:
53 | l0 = l0 * 10 ** lPower
54 | p.grad += l0
55 | else:
56 | pass
57 | else:
58 | pass
59 | outputs = model(inp.float(),train = False)
60 | predicted = outputs
61 | total += out.float().size(0)
62 | if model.name == "Regression":
63 | pass
64 | else:
65 | if model.labels == 1:
66 | for i in range(len(predicted)):
67 | if predicted[i] < torch.Tensor([0.5]):
68 | predicted[i] = 0
69 | else:
70 | predicted[i] =1
71 |
72 | if predicted[i].type(torch.LongTensor) == out[i]:
73 | correct += 1
74 | else:
75 | _, predicted = torch.max(outputs.data, 1)
76 | correct += (predicted == out.long()).sum().item()
77 |
78 | predictions.extend(predicted.detach().numpy())
79 | act.extend(out.detach().numpy())
80 | lossing.append(running_loss/len(trainDataload))
81 | if model.name == "Classification":
82 | accuracy.append(100 * correct / total)
83 | print("Training Loss after epoch {} is {} and Accuracy is {}".format(epochs + 1,
84 | running_loss / len(trainDataload),
85 | 100 * correct / total))
86 | else:
87 | accuracy.append(100*r2_score(out.detach().numpy(),predicted.detach().numpy()))
88 | print("Training Loss after epoch {} is {} and Accuracy is {}".format(epochs+1,running_loss/len(trainDataload),accuracy[-1]))
89 | v_l,v_a = validate(model,testDataload,criterion,epochs)
90 | val_acc.extend(v_a)
91 | val_loss.extend(v_l)
92 | if model.name == "Classification":
93 | print(classification_report(np.array(act),np.array(predictions)))
94 | else:
95 | print("R_2 Score: ", r2_score(np.array(act),np.array(predictions)))
96 | print("Mean Absolute error Score: ", mean_absolute_error(np.array(act),np.array(predictions)))
97 | print("Mean Squared error Score: ", mean_squared_error(np.array(act),np.array(predictions)))
98 | print("Root Mean Squared error Score: ", np.sqrt(mean_squared_error(np.array(act),np.array(predictions))))
99 | validate(model,testDataload,criterion,epochs,True)
100 |
101 | model.feature_importances_ = torch.nn.Softmax(dim=0)(model.layers["0"].weight[1]).detach().numpy()
102 |
103 | figure, axis = plt.subplots(2)
104 | figure.suptitle('Performance of XBNET')
105 |
106 | axis[0].plot(accuracy, label="Training Accuracy")
107 | axis[0].plot(val_acc, label="Testing Accuracy")
108 | axis[0].set_xlabel('Epochs')
109 | axis[0].set_ylabel('Accuracy')
110 | axis[0].set_title("XBNet Accuracy ")
111 | axis[0].legend()
112 |
113 |
114 | axis[1].plot(lossing, label="Training Loss")
115 | axis[1].plot(val_loss, label="Testing Loss")
116 | axis[1].set_xlabel('Epochs')
117 | axis[1].set_ylabel('Loss value')
118 | axis[1].set_title("XBNet Loss")
119 | axis[1].legend()
120 | if save == True:
121 | plt.savefig("Training_graphs.png")
122 | else:
123 | plt.show()
124 |
125 | return accuracy,lossing,val_acc,val_loss
126 |
127 |
128 | @torch.no_grad()
129 | def validate(model,testDataload,criterion,epochs,last=False):
130 | '''
131 | Function for validating the training on testing/validation data.
132 | :param model(XBNET Classifier/Regressor): model to be trained
133 | :param testDataload(object of DataLoader): DataLoader with testing data
134 | :param criterion(object of loss function): Loss function to be used for training
135 | :param epochs(int,optional): Number of epochs for training the model. Default value: 100
136 | :param last(Boolean, optional): Checks if the current epoch is the last epoch. Default: False
137 | :return:
138 | list of validation loss,accuracy
139 | '''
140 | valid_loss = 0
141 | accuracy = []
142 | lossing = []
143 | predictions = []
144 | act = []
145 | correct = 0
146 | total = 0
147 | for inp, out in testDataload:
148 | model.get(out.float())
149 | y_pred = model(inp.float(), train=False)
150 | if model.labels == 1:
151 | loss = criterion(y_pred, out.view(-1, 1).float())
152 | else:
153 | loss = criterion(y_pred, out.long())
154 | valid_loss += loss
155 | total += out.float().size(0)
156 | predicted = y_pred
157 | if model.name == "Regression":
158 | pass
159 | else:
160 | if model.labels == 1:
161 | for i in range(len(y_pred)):
162 | if y_pred[i] < torch.Tensor([0.5]):
163 | y_pred[i] = 0
164 | else:
165 | y_pred[i] = 1
166 | if y_pred[i].type(torch.LongTensor) == out[i]:
167 | correct += 1
168 | else:
169 | _, predicted = torch.max(y_pred.data, 1)
170 | correct += (predicted == out.long()).sum().item()
171 |
172 | predictions.extend(predicted.detach().numpy())
173 | act.extend(out.detach().numpy())
174 | lossing.append(valid_loss / len(testDataload))
175 | if model.name == "Classification":
176 | accuracy.append(100 * correct / total)
177 | else:
178 | accuracy.append(100 * r2_score(np.array(act), np.array(predictions)))
179 | if last:
180 | if model.name == "Classification":
181 | print(classification_report(np.array(act), np.array(predictions)))
182 | else:
183 | print("R_2 Score: ", r2_score(np.array(act), np.array(predictions)))
184 | print("Mean Absolute error Score: ", mean_absolute_error(np.array(act), np.array(predictions)))
185 | print("Mean Squared error Score: ", mean_squared_error(np.array(act), np.array(predictions)))
186 | print("Root Mean Squared error Score: ", np.sqrt(mean_squared_error(np.array(act), np.array(predictions))))
187 | if model.name == "Classification":
188 | print("Validation Loss after epoch {} is {} and Accuracy is {}".format(epochs+1, valid_loss / len(testDataload),
189 | 100 * correct / total))
190 | else:
191 | print("Validation Loss after epoch {} is {} and Accuracy is {}".format(epochs+1, valid_loss / len(testDataload),
192 | 100*r2_score(np.array(act), np.array(predictions))))
193 | return lossing, accuracy
194 |
195 | def predict(model,X):
196 | '''
197 | Predicts the output given the correct input data
198 | :param model(XBNET Classifier/Regressor): model to be trained
199 | :param X: Feature for which prediction is required
200 | :return:
201 | predicted value(int)
202 | '''
203 | X = torch.from_numpy(X)
204 | y_pred = model(X.float(), train=False)
205 | if model.name == "Classification":
206 | if model.labels == 1:
207 | if y_pred < torch.Tensor([0.5]):
208 | y_pred = 0
209 | else:
210 | y_pred = 1
211 | else:
212 | y_pred = np.argmax(y_pred.detach().numpy(),axis=1)
213 | return y_pred
214 | else:
215 | return y_pred.detach().numpy()[0]
216 |
217 | def predict_proba(model,X):
218 | '''
219 | Predicts the output given the correct input data
220 | :param model(XBNET Classifier/Regressor): model to be trained
221 | :param X: Feature for which prediction is required
222 | :return:
223 | predicted probabilties value(int)
224 | '''
225 | X = torch.from_numpy(X)
226 | y_pred = model(X.float(), train=False)
227 | return y_pred
--------------------------------------------------------------------------------
/index.rst:
--------------------------------------------------------------------------------
1 | XBNet - Xtremely Boosted Network
2 | ================================
3 |
4 | Boosted neural network for tabular data
5 | ---------------------------------------
6 |
7 | |image0| |PWC| |PWC| |PWC| |PWC| |PWC|
8 |
9 | .. raw:: html
10 |
11 |
13 |
14 | .. raw:: html
15 |
16 |
17 |
18 | |Downloads|
19 |
20 | XBNET that is built on PyTorch combines tree-based models with neural
21 | networks to create a robust architecture that is trained by using a
22 | novel optimization technique, Boosted Gradient Descent for Tabular Data
23 | which increases its interpretability and performance. Boosted Gradient
24 | Descent is initialized with the feature importance of a gradient boosted
25 | tree, and it updates the weights of each layer in the neural network in
26 | two steps: - Update weights by gradient descent. - Update weights by
27 | using feature importance of a gradient boosted tree in every
28 | intermediate layer.
29 |
30 | Features
31 | --------
32 |
33 | - Better performance, training stability and interpretability for
34 | tabular data.
35 | - Easy to implement with rapid prototyping capabilities
36 | - Minimum Code requirements for creating any neural network with or without boosting
37 | ----------------------------------------------------------------------------------
38 |
39 | Comparison with XGBOOST
40 | ~~~~~~~~~~~~~~~~~~~~~~~
41 |
42 | XBNET VS XGBOOST testing accuracy on different datasets with no
43 | hyperparameter tuning
44 |
45 | +--------------------+--------------+-----------+
46 | | Dataset | XBNET | XGBOOST |
47 | +====================+==============+===========+
48 | | Iris | 100 | 97.7 |
49 | +--------------------+--------------+-----------+
50 | | Breast Cancer | 96.49 | 96.47 |
51 | +--------------------+--------------+-----------+
52 | | Wine | 97.22 | 97.22 |
53 | +--------------------+--------------+-----------+
54 | | Diabetes | 78.78 | 77.48 |
55 | +--------------------+--------------+-----------+
56 | | Titanic | 79.85 | 80.5 |
57 | +--------------------+--------------+-----------+
58 | | German Credit | 71.33 | 77.66 |
59 | +--------------------+--------------+-----------+
60 | | Digit Completion | 86.11 85.9 | 77.66 |
61 | +--------------------+--------------+-----------+
62 |
63 | Example for using
64 | ~~~~~~~~~~~~~~~~~
65 |
66 | ::
67 |
68 | import torch
69 | import numpy as np
70 | import pandas as pd
71 | from sklearn.preprocessing import LabelEncoder
72 | from sklearn.model_selection import train_test_split
73 | from XBNet.training_utils import training,predict
74 | from XBNet.models import XBNETClassifier
75 | from XBNet.run import run_XBNET
76 |
77 | data = pd.read_csv('test\Iris (1).csv')
78 | print(data.shape)
79 | x_data = data[data.columns[:-1]]
80 | print(x_data.shape)
81 | y_data = data[data.columns[-1]]
82 | le = LabelEncoder()
83 | y_data = np.array(le.fit_transform(y_data))
84 | print(le.classes_)
85 |
86 | X_train,X_test,y_train,y_test = train_test_split(x_data.to_numpy(),y_data,test_size = 0.3,random_state = 0)
87 | model = XBNETClassifier(X_train,y_train,2)
88 |
89 | criterion = torch.nn.CrossEntropyLoss()
90 | optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
91 |
92 | m,acc, lo, val_ac, val_lo = run_XBNET(X_train,X_test,y_train,y_test,model,criterion,optimizer,32,300)
93 | print(predict(m,x_data.to_numpy()[0,:]))
94 |
95 | Reference
96 | ~~~~~~~~~
97 |
98 | If you make use of this software for your work, we would appreciate it
99 | if you would cite us:
100 |
101 | ::
102 |
103 | @misc{sarkar2021xbnet,
104 | title={XBNet : An Extremely Boosted Neural Network},
105 | author={Tushar Sarkar},
106 | year={2021},
107 | eprint={2106.05239},
108 | archivePrefix={arXiv},
109 | primaryClass={cs.LG}
110 | }
111 |
112 | ::
113 |
114 | @misc{1aa4d286-fae9-431e-bd08-63c1b9c848e2,
115 | title = {Library XBNet for tabular data which helps you to create a custom extremely boosted neural network},
116 | author = {Tushar Sarkar},
117 | journal = {Software Impacts},
118 | doi = {10.24433/CO.8976286.v1},
119 | howpublished = {\url{https://www.codeocean.com/}},
120 | year = 2021,
121 | month = {6},
122 | version = {v1}
123 | }
124 |
125 | .. raw:: html
126 |
127 |
128 |
129 | Developed with :heart: by Tushar Sarkar
130 |
131 | .. |image0| image:: https://img.shields.io/badge/Made_with-PyTorch-res?style=for-the-badge&logo=pytorch
132 | :target: https://pytorch.org/
133 | .. |PWC| image:: https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/xbnet-an-extremely-boosted-neural-network/iris-classification-on-iris
134 | :target: https://paperswithcode.com/sota/iris-classification-on-iris?p=xbnet-an-extremely-boosted-neural-network
135 | .. |PWC| image:: https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/xbnet-an-extremely-boosted-neural-network/diabetes-prediction-on-diabetes
136 | :target: https://paperswithcode.com/sota/diabetes-prediction-on-diabetes?p=xbnet-an-extremely-boosted-neural-network
137 | .. |PWC| image:: https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/xbnet-an-extremely-boosted-neural-network/survival-prediction-on-titanic
138 | :target: https://paperswithcode.com/sota/survival-prediction-on-titanic?p=xbnet-an-extremely-boosted-neural-network
139 | .. |PWC| image:: https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/xbnet-an-extremely-boosted-neural-network/breast-cancer-detection-on-breast-cancer-1
140 | :target: https://paperswithcode.com/sota/breast-cancer-detection-on-breast-cancer-1?p=xbnet-an-extremely-boosted-neural-network
141 | .. |PWC| image:: https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/xbnet-an-extremely-boosted-neural-network/fraud-detection-on-kaggle-credit-card-fraud
142 | :target: https://paperswithcode.com/sota/fraud-detection-on-kaggle-credit-card-fraud?p=xbnet-an-extremely-boosted-neural-network
143 | .. |Downloads| image:: https://pepy.tech/badge/xbnet
144 | :target: https://pepy.tech/project/xbnet
145 |
--------------------------------------------------------------------------------
/screenshots/Results_metrics.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tusharsarkar3/XBNet/2b771d6beb9f15ae48e2a365f46d6f996811086f/screenshots/Results_metrics.png
--------------------------------------------------------------------------------
/screenshots/results_graph.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tusharsarkar3/XBNet/2b771d6beb9f15ae48e2a365f46d6f996811086f/screenshots/results_graph.png
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import setuptools
2 |
3 | with open("READ.md", "r") as fh:
4 | long_description = fh.read()
5 |
6 | setuptools.setup(
7 | name="XBNet",
8 |
9 | version="1.4.6",
10 |
11 | author="Tushar Sarkar",
12 |
13 | author_email="tushar.sarkar@somaiya.edu",
14 |
15 | # #Small Description about module
16 | description="XBNet is an open source project which is built with PyTorch that works as a Boosted neural network for tabular data",
17 |
18 | long_description=long_description,
19 | long_description_content_type="text/markdown",
20 |
21 | url="https://github.com/tusharsarkar3/",
22 | packages=setuptools.find_packages(),
23 |
24 |
25 | # if module has dependecies i.e. if your package rely on other package at pypi.org
26 | # then you must add there, in order to download every requirement of package
27 |
28 |
29 |
30 | install_requires=[
31 | "sklearn==0.0",
32 | "numpy == 1.22.0",
33 | "pandas == 1.3.3",
34 | "matplotlib==3.4.3",
35 | "torch==1.9.0",
36 | "xgboost==1.4.2",
37 | "tqdm==4.62.2"
38 |
39 | ],
40 |
41 |
42 | license="MIT",
43 |
44 | # classifiers like program is suitable for python3, just leave as it is.
45 | classifiers=[
46 | "Programming Language :: Python :: 3",
47 | "License :: OSI Approved :: MIT License",
48 | "Operating System :: OS Independent",
49 | ],
50 | )
51 |
--------------------------------------------------------------------------------
/test/Iris (1).csv:
--------------------------------------------------------------------------------
1 | Id,SepalLengthCm,SepalWidthCm,PetalLengthCm,PetalWidthCm,Species
2 | 1,5.1,3.5,1.4,0.2,Iris-setosa
3 | 2,4.9,3.0,1.4,0.2,Iris-setosa
4 | 3,4.7,3.2,1.3,0.2,Iris-setosa
5 | 4,4.6,3.1,1.5,0.2,Iris-setosa
6 | 5,5.0,3.6,1.4,0.2,Iris-setosa
7 | 6,5.4,3.9,1.7,0.4,Iris-setosa
8 | 7,4.6,3.4,1.4,0.3,Iris-setosa
9 | 8,5.0,3.4,1.5,0.2,Iris-setosa
10 | 9,4.4,2.9,1.4,0.2,Iris-setosa
11 | 10,4.9,3.1,1.5,0.1,Iris-setosa
12 | 11,5.4,3.7,1.5,0.2,Iris-setosa
13 | 12,4.8,3.4,1.6,0.2,Iris-setosa
14 | 13,4.8,3.0,1.4,0.1,Iris-setosa
15 | 14,4.3,3.0,1.1,0.1,Iris-setosa
16 | 15,5.8,4.0,1.2,0.2,Iris-setosa
17 | 16,5.7,4.4,1.5,0.4,Iris-setosa
18 | 17,5.4,3.9,1.3,0.4,Iris-setosa
19 | 18,5.1,3.5,1.4,0.3,Iris-setosa
20 | 19,5.7,3.8,1.7,0.3,Iris-setosa
21 | 20,5.1,3.8,1.5,0.3,Iris-setosa
22 | 21,5.4,3.4,1.7,0.2,Iris-setosa
23 | 22,5.1,3.7,1.5,0.4,Iris-setosa
24 | 23,4.6,3.6,1.0,0.2,Iris-setosa
25 | 24,5.1,3.3,1.7,0.5,Iris-setosa
26 | 25,4.8,3.4,1.9,0.2,Iris-setosa
27 | 26,5.0,3.0,1.6,0.2,Iris-setosa
28 | 27,5.0,3.4,1.6,0.4,Iris-setosa
29 | 28,5.2,3.5,1.5,0.2,Iris-setosa
30 | 29,5.2,3.4,1.4,0.2,Iris-setosa
31 | 30,4.7,3.2,1.6,0.2,Iris-setosa
32 | 31,4.8,3.1,1.6,0.2,Iris-setosa
33 | 32,5.4,3.4,1.5,0.4,Iris-setosa
34 | 33,5.2,4.1,1.5,0.1,Iris-setosa
35 | 34,5.5,4.2,1.4,0.2,Iris-setosa
36 | 35,4.9,3.1,1.5,0.1,Iris-setosa
37 | 36,5.0,3.2,1.2,0.2,Iris-setosa
38 | 37,5.5,3.5,1.3,0.2,Iris-setosa
39 | 38,4.9,3.1,1.5,0.1,Iris-setosa
40 | 39,4.4,3.0,1.3,0.2,Iris-setosa
41 | 40,5.1,3.4,1.5,0.2,Iris-setosa
42 | 41,5.0,3.5,1.3,0.3,Iris-setosa
43 | 42,4.5,2.3,1.3,0.3,Iris-setosa
44 | 43,4.4,3.2,1.3,0.2,Iris-setosa
45 | 44,5.0,3.5,1.6,0.6,Iris-setosa
46 | 45,5.1,3.8,1.9,0.4,Iris-setosa
47 | 46,4.8,3.0,1.4,0.3,Iris-setosa
48 | 47,5.1,3.8,1.6,0.2,Iris-setosa
49 | 48,4.6,3.2,1.4,0.2,Iris-setosa
50 | 49,5.3,3.7,1.5,0.2,Iris-setosa
51 | 50,5.0,3.3,1.4,0.2,Iris-setosa
52 | 51,7.0,3.2,4.7,1.4,Iris-versicolor
53 | 52,6.4,3.2,4.5,1.5,Iris-versicolor
54 | 53,6.9,3.1,4.9,1.5,Iris-versicolor
55 | 54,5.5,2.3,4.0,1.3,Iris-versicolor
56 | 55,6.5,2.8,4.6,1.5,Iris-versicolor
57 | 56,5.7,2.8,4.5,1.3,Iris-versicolor
58 | 57,6.3,3.3,4.7,1.6,Iris-versicolor
59 | 58,4.9,2.4,3.3,1.0,Iris-versicolor
60 | 59,6.6,2.9,4.6,1.3,Iris-versicolor
61 | 60,5.2,2.7,3.9,1.4,Iris-versicolor
62 | 61,5.0,2.0,3.5,1.0,Iris-versicolor
63 | 62,5.9,3.0,4.2,1.5,Iris-versicolor
64 | 63,6.0,2.2,4.0,1.0,Iris-versicolor
65 | 64,6.1,2.9,4.7,1.4,Iris-versicolor
66 | 65,5.6,2.9,3.6,1.3,Iris-versicolor
67 | 66,6.7,3.1,4.4,1.4,Iris-versicolor
68 | 67,5.6,3.0,4.5,1.5,Iris-versicolor
69 | 68,5.8,2.7,4.1,1.0,Iris-versicolor
70 | 69,6.2,2.2,4.5,1.5,Iris-versicolor
71 | 70,5.6,2.5,3.9,1.1,Iris-versicolor
72 | 71,5.9,3.2,4.8,1.8,Iris-versicolor
73 | 72,6.1,2.8,4.0,1.3,Iris-versicolor
74 | 73,6.3,2.5,4.9,1.5,Iris-versicolor
75 | 74,6.1,2.8,4.7,1.2,Iris-versicolor
76 | 75,6.4,2.9,4.3,1.3,Iris-versicolor
77 | 76,6.6,3.0,4.4,1.4,Iris-versicolor
78 | 77,6.8,2.8,4.8,1.4,Iris-versicolor
79 | 78,6.7,3.0,5.0,1.7,Iris-versicolor
80 | 79,6.0,2.9,4.5,1.5,Iris-versicolor
81 | 80,5.7,2.6,3.5,1.0,Iris-versicolor
82 | 81,5.5,2.4,3.8,1.1,Iris-versicolor
83 | 82,5.5,2.4,3.7,1.0,Iris-versicolor
84 | 83,5.8,2.7,3.9,1.2,Iris-versicolor
85 | 84,6.0,2.7,5.1,1.6,Iris-versicolor
86 | 85,5.4,3.0,4.5,1.5,Iris-versicolor
87 | 86,6.0,3.4,4.5,1.6,Iris-versicolor
88 | 87,6.7,3.1,4.7,1.5,Iris-versicolor
89 | 88,6.3,2.3,4.4,1.3,Iris-versicolor
90 | 89,5.6,3.0,4.1,1.3,Iris-versicolor
91 | 90,5.5,2.5,4.0,1.3,Iris-versicolor
92 | 91,5.5,2.6,4.4,1.2,Iris-versicolor
93 | 92,6.1,3.0,4.6,1.4,Iris-versicolor
94 | 93,5.8,2.6,4.0,1.2,Iris-versicolor
95 | 94,5.0,2.3,3.3,1.0,Iris-versicolor
96 | 95,5.6,2.7,4.2,1.3,Iris-versicolor
97 | 96,5.7,3.0,4.2,1.2,Iris-versicolor
98 | 97,5.7,2.9,4.2,1.3,Iris-versicolor
99 | 98,6.2,2.9,4.3,1.3,Iris-versicolor
100 | 99,5.1,2.5,3.0,1.1,Iris-versicolor
101 | 100,5.7,2.8,4.1,1.3,Iris-versicolor
102 | 101,6.3,3.3,6.0,2.5,Iris-virginica
103 | 102,5.8,2.7,5.1,1.9,Iris-virginica
104 | 103,7.1,3.0,5.9,2.1,Iris-virginica
105 | 104,6.3,2.9,5.6,1.8,Iris-virginica
106 | 105,6.5,3.0,5.8,2.2,Iris-virginica
107 | 106,7.6,3.0,6.6,2.1,Iris-virginica
108 | 107,4.9,2.5,4.5,1.7,Iris-virginica
109 | 108,7.3,2.9,6.3,1.8,Iris-virginica
110 | 109,6.7,2.5,5.8,1.8,Iris-virginica
111 | 110,7.2,3.6,6.1,2.5,Iris-virginica
112 | 111,6.5,3.2,5.1,2.0,Iris-virginica
113 | 112,6.4,2.7,5.3,1.9,Iris-virginica
114 | 113,6.8,3.0,5.5,2.1,Iris-virginica
115 | 114,5.7,2.5,5.0,2.0,Iris-virginica
116 | 115,5.8,2.8,5.1,2.4,Iris-virginica
117 | 116,6.4,3.2,5.3,2.3,Iris-virginica
118 | 117,6.5,3.0,5.5,1.8,Iris-virginica
119 | 118,7.7,3.8,6.7,2.2,Iris-virginica
120 | 119,7.7,2.6,6.9,2.3,Iris-virginica
121 | 120,6.0,2.2,5.0,1.5,Iris-virginica
122 | 121,6.9,3.2,5.7,2.3,Iris-virginica
123 | 122,5.6,2.8,4.9,2.0,Iris-virginica
124 | 123,7.7,2.8,6.7,2.0,Iris-virginica
125 | 124,6.3,2.7,4.9,1.8,Iris-virginica
126 | 125,6.7,3.3,5.7,2.1,Iris-virginica
127 | 126,7.2,3.2,6.0,1.8,Iris-virginica
128 | 127,6.2,2.8,4.8,1.8,Iris-virginica
129 | 128,6.1,3.0,4.9,1.8,Iris-virginica
130 | 129,6.4,2.8,5.6,2.1,Iris-virginica
131 | 130,7.2,3.0,5.8,1.6,Iris-virginica
132 | 131,7.4,2.8,6.1,1.9,Iris-virginica
133 | 132,7.9,3.8,6.4,2.0,Iris-virginica
134 | 133,6.4,2.8,5.6,2.2,Iris-virginica
135 | 134,6.3,2.8,5.1,1.5,Iris-virginica
136 | 135,6.1,2.6,5.6,1.4,Iris-virginica
137 | 136,7.7,3.0,6.1,2.3,Iris-virginica
138 | 137,6.3,3.4,5.6,2.4,Iris-virginica
139 | 138,6.4,3.1,5.5,1.8,Iris-virginica
140 | 139,6.0,3.0,4.8,1.8,Iris-virginica
141 | 140,6.9,3.1,5.4,2.1,Iris-virginica
142 | 141,6.7,3.1,5.6,2.4,Iris-virginica
143 | 142,6.9,3.1,5.1,2.3,Iris-virginica
144 | 143,5.8,2.7,5.1,1.9,Iris-virginica
145 | 144,6.8,3.2,5.9,2.3,Iris-virginica
146 | 145,6.7,3.3,5.7,2.5,Iris-virginica
147 | 146,6.7,3.0,5.2,2.3,Iris-virginica
148 | 147,6.3,2.5,5.0,1.9,Iris-virginica
149 | 148,6.5,3.0,5.2,2.0,Iris-virginica
150 | 149,6.2,3.4,5.4,2.3,Iris-virginica
151 | 150,5.9,3.0,5.1,1.8,Iris-virginica
152 |
--------------------------------------------------------------------------------
/test/diabetes.csv:
--------------------------------------------------------------------------------
1 | Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction,Age,Outcome
2 | 6,148,72,35,0,33.6,0.627,50,1
3 | 1,85,66,29,0,26.6,0.351,31,0
4 | 8,183,64,0,0,23.3,0.672,32,1
5 | 1,89,66,23,94,28.1,0.167,21,0
6 | 0,137,40,35,168,43.1,2.288,33,1
7 | 5,116,74,0,0,25.6,0.201,30,0
8 | 3,78,50,32,88,31,0.248,26,1
9 | 10,115,0,0,0,35.3,0.134,29,0
10 | 2,197,70,45,543,30.5,0.158,53,1
11 | 8,125,96,0,0,0,0.232,54,1
12 | 4,110,92,0,0,37.6,0.191,30,0
13 | 10,168,74,0,0,38,0.537,34,1
14 | 10,139,80,0,0,27.1,1.441,57,0
15 | 1,189,60,23,846,30.1,0.398,59,1
16 | 5,166,72,19,175,25.8,0.587,51,1
17 | 7,100,0,0,0,30,0.484,32,1
18 | 0,118,84,47,230,45.8,0.551,31,1
19 | 7,107,74,0,0,29.6,0.254,31,1
20 | 1,103,30,38,83,43.3,0.183,33,0
21 | 1,115,70,30,96,34.6,0.529,32,1
22 | 3,126,88,41,235,39.3,0.704,27,0
23 | 8,99,84,0,0,35.4,0.388,50,0
24 | 7,196,90,0,0,39.8,0.451,41,1
25 | 9,119,80,35,0,29,0.263,29,1
26 | 11,143,94,33,146,36.6,0.254,51,1
27 | 10,125,70,26,115,31.1,0.205,41,1
28 | 7,147,76,0,0,39.4,0.257,43,1
29 | 1,97,66,15,140,23.2,0.487,22,0
30 | 13,145,82,19,110,22.2,0.245,57,0
31 | 5,117,92,0,0,34.1,0.337,38,0
32 | 5,109,75,26,0,36,0.546,60,0
33 | 3,158,76,36,245,31.6,0.851,28,1
34 | 3,88,58,11,54,24.8,0.267,22,0
35 | 6,92,92,0,0,19.9,0.188,28,0
36 | 10,122,78,31,0,27.6,0.512,45,0
37 | 4,103,60,33,192,24,0.966,33,0
38 | 11,138,76,0,0,33.2,0.42,35,0
39 | 9,102,76,37,0,32.9,0.665,46,1
40 | 2,90,68,42,0,38.2,0.503,27,1
41 | 4,111,72,47,207,37.1,1.39,56,1
42 | 3,180,64,25,70,34,0.271,26,0
43 | 7,133,84,0,0,40.2,0.696,37,0
44 | 7,106,92,18,0,22.7,0.235,48,0
45 | 9,171,110,24,240,45.4,0.721,54,1
46 | 7,159,64,0,0,27.4,0.294,40,0
47 | 0,180,66,39,0,42,1.893,25,1
48 | 1,146,56,0,0,29.7,0.564,29,0
49 | 2,71,70,27,0,28,0.586,22,0
50 | 7,103,66,32,0,39.1,0.344,31,1
51 | 7,105,0,0,0,0,0.305,24,0
52 | 1,103,80,11,82,19.4,0.491,22,0
53 | 1,101,50,15,36,24.2,0.526,26,0
54 | 5,88,66,21,23,24.4,0.342,30,0
55 | 8,176,90,34,300,33.7,0.467,58,1
56 | 7,150,66,42,342,34.7,0.718,42,0
57 | 1,73,50,10,0,23,0.248,21,0
58 | 7,187,68,39,304,37.7,0.254,41,1
59 | 0,100,88,60,110,46.8,0.962,31,0
60 | 0,146,82,0,0,40.5,1.781,44,0
61 | 0,105,64,41,142,41.5,0.173,22,0
62 | 2,84,0,0,0,0,0.304,21,0
63 | 8,133,72,0,0,32.9,0.27,39,1
64 | 5,44,62,0,0,25,0.587,36,0
65 | 2,141,58,34,128,25.4,0.699,24,0
66 | 7,114,66,0,0,32.8,0.258,42,1
67 | 5,99,74,27,0,29,0.203,32,0
68 | 0,109,88,30,0,32.5,0.855,38,1
69 | 2,109,92,0,0,42.7,0.845,54,0
70 | 1,95,66,13,38,19.6,0.334,25,0
71 | 4,146,85,27,100,28.9,0.189,27,0
72 | 2,100,66,20,90,32.9,0.867,28,1
73 | 5,139,64,35,140,28.6,0.411,26,0
74 | 13,126,90,0,0,43.4,0.583,42,1
75 | 4,129,86,20,270,35.1,0.231,23,0
76 | 1,79,75,30,0,32,0.396,22,0
77 | 1,0,48,20,0,24.7,0.14,22,0
78 | 7,62,78,0,0,32.6,0.391,41,0
79 | 5,95,72,33,0,37.7,0.37,27,0
80 | 0,131,0,0,0,43.2,0.27,26,1
81 | 2,112,66,22,0,25,0.307,24,0
82 | 3,113,44,13,0,22.4,0.14,22,0
83 | 2,74,0,0,0,0,0.102,22,0
84 | 7,83,78,26,71,29.3,0.767,36,0
85 | 0,101,65,28,0,24.6,0.237,22,0
86 | 5,137,108,0,0,48.8,0.227,37,1
87 | 2,110,74,29,125,32.4,0.698,27,0
88 | 13,106,72,54,0,36.6,0.178,45,0
89 | 2,100,68,25,71,38.5,0.324,26,0
90 | 15,136,70,32,110,37.1,0.153,43,1
91 | 1,107,68,19,0,26.5,0.165,24,0
92 | 1,80,55,0,0,19.1,0.258,21,0
93 | 4,123,80,15,176,32,0.443,34,0
94 | 7,81,78,40,48,46.7,0.261,42,0
95 | 4,134,72,0,0,23.8,0.277,60,1
96 | 2,142,82,18,64,24.7,0.761,21,0
97 | 6,144,72,27,228,33.9,0.255,40,0
98 | 2,92,62,28,0,31.6,0.13,24,0
99 | 1,71,48,18,76,20.4,0.323,22,0
100 | 6,93,50,30,64,28.7,0.356,23,0
101 | 1,122,90,51,220,49.7,0.325,31,1
102 | 1,163,72,0,0,39,1.222,33,1
103 | 1,151,60,0,0,26.1,0.179,22,0
104 | 0,125,96,0,0,22.5,0.262,21,0
105 | 1,81,72,18,40,26.6,0.283,24,0
106 | 2,85,65,0,0,39.6,0.93,27,0
107 | 1,126,56,29,152,28.7,0.801,21,0
108 | 1,96,122,0,0,22.4,0.207,27,0
109 | 4,144,58,28,140,29.5,0.287,37,0
110 | 3,83,58,31,18,34.3,0.336,25,0
111 | 0,95,85,25,36,37.4,0.247,24,1
112 | 3,171,72,33,135,33.3,0.199,24,1
113 | 8,155,62,26,495,34,0.543,46,1
114 | 1,89,76,34,37,31.2,0.192,23,0
115 | 4,76,62,0,0,34,0.391,25,0
116 | 7,160,54,32,175,30.5,0.588,39,1
117 | 4,146,92,0,0,31.2,0.539,61,1
118 | 5,124,74,0,0,34,0.22,38,1
119 | 5,78,48,0,0,33.7,0.654,25,0
120 | 4,97,60,23,0,28.2,0.443,22,0
121 | 4,99,76,15,51,23.2,0.223,21,0
122 | 0,162,76,56,100,53.2,0.759,25,1
123 | 6,111,64,39,0,34.2,0.26,24,0
124 | 2,107,74,30,100,33.6,0.404,23,0
125 | 5,132,80,0,0,26.8,0.186,69,0
126 | 0,113,76,0,0,33.3,0.278,23,1
127 | 1,88,30,42,99,55,0.496,26,1
128 | 3,120,70,30,135,42.9,0.452,30,0
129 | 1,118,58,36,94,33.3,0.261,23,0
130 | 1,117,88,24,145,34.5,0.403,40,1
131 | 0,105,84,0,0,27.9,0.741,62,1
132 | 4,173,70,14,168,29.7,0.361,33,1
133 | 9,122,56,0,0,33.3,1.114,33,1
134 | 3,170,64,37,225,34.5,0.356,30,1
135 | 8,84,74,31,0,38.3,0.457,39,0
136 | 2,96,68,13,49,21.1,0.647,26,0
137 | 2,125,60,20,140,33.8,0.088,31,0
138 | 0,100,70,26,50,30.8,0.597,21,0
139 | 0,93,60,25,92,28.7,0.532,22,0
140 | 0,129,80,0,0,31.2,0.703,29,0
141 | 5,105,72,29,325,36.9,0.159,28,0
142 | 3,128,78,0,0,21.1,0.268,55,0
143 | 5,106,82,30,0,39.5,0.286,38,0
144 | 2,108,52,26,63,32.5,0.318,22,0
145 | 10,108,66,0,0,32.4,0.272,42,1
146 | 4,154,62,31,284,32.8,0.237,23,0
147 | 0,102,75,23,0,0,0.572,21,0
148 | 9,57,80,37,0,32.8,0.096,41,0
149 | 2,106,64,35,119,30.5,1.4,34,0
150 | 5,147,78,0,0,33.7,0.218,65,0
151 | 2,90,70,17,0,27.3,0.085,22,0
152 | 1,136,74,50,204,37.4,0.399,24,0
153 | 4,114,65,0,0,21.9,0.432,37,0
154 | 9,156,86,28,155,34.3,1.189,42,1
155 | 1,153,82,42,485,40.6,0.687,23,0
156 | 8,188,78,0,0,47.9,0.137,43,1
157 | 7,152,88,44,0,50,0.337,36,1
158 | 2,99,52,15,94,24.6,0.637,21,0
159 | 1,109,56,21,135,25.2,0.833,23,0
160 | 2,88,74,19,53,29,0.229,22,0
161 | 17,163,72,41,114,40.9,0.817,47,1
162 | 4,151,90,38,0,29.7,0.294,36,0
163 | 7,102,74,40,105,37.2,0.204,45,0
164 | 0,114,80,34,285,44.2,0.167,27,0
165 | 2,100,64,23,0,29.7,0.368,21,0
166 | 0,131,88,0,0,31.6,0.743,32,1
167 | 6,104,74,18,156,29.9,0.722,41,1
168 | 3,148,66,25,0,32.5,0.256,22,0
169 | 4,120,68,0,0,29.6,0.709,34,0
170 | 4,110,66,0,0,31.9,0.471,29,0
171 | 3,111,90,12,78,28.4,0.495,29,0
172 | 6,102,82,0,0,30.8,0.18,36,1
173 | 6,134,70,23,130,35.4,0.542,29,1
174 | 2,87,0,23,0,28.9,0.773,25,0
175 | 1,79,60,42,48,43.5,0.678,23,0
176 | 2,75,64,24,55,29.7,0.37,33,0
177 | 8,179,72,42,130,32.7,0.719,36,1
178 | 6,85,78,0,0,31.2,0.382,42,0
179 | 0,129,110,46,130,67.1,0.319,26,1
180 | 5,143,78,0,0,45,0.19,47,0
181 | 5,130,82,0,0,39.1,0.956,37,1
182 | 6,87,80,0,0,23.2,0.084,32,0
183 | 0,119,64,18,92,34.9,0.725,23,0
184 | 1,0,74,20,23,27.7,0.299,21,0
185 | 5,73,60,0,0,26.8,0.268,27,0
186 | 4,141,74,0,0,27.6,0.244,40,0
187 | 7,194,68,28,0,35.9,0.745,41,1
188 | 8,181,68,36,495,30.1,0.615,60,1
189 | 1,128,98,41,58,32,1.321,33,1
190 | 8,109,76,39,114,27.9,0.64,31,1
191 | 5,139,80,35,160,31.6,0.361,25,1
192 | 3,111,62,0,0,22.6,0.142,21,0
193 | 9,123,70,44,94,33.1,0.374,40,0
194 | 7,159,66,0,0,30.4,0.383,36,1
195 | 11,135,0,0,0,52.3,0.578,40,1
196 | 8,85,55,20,0,24.4,0.136,42,0
197 | 5,158,84,41,210,39.4,0.395,29,1
198 | 1,105,58,0,0,24.3,0.187,21,0
199 | 3,107,62,13,48,22.9,0.678,23,1
200 | 4,109,64,44,99,34.8,0.905,26,1
201 | 4,148,60,27,318,30.9,0.15,29,1
202 | 0,113,80,16,0,31,0.874,21,0
203 | 1,138,82,0,0,40.1,0.236,28,0
204 | 0,108,68,20,0,27.3,0.787,32,0
205 | 2,99,70,16,44,20.4,0.235,27,0
206 | 6,103,72,32,190,37.7,0.324,55,0
207 | 5,111,72,28,0,23.9,0.407,27,0
208 | 8,196,76,29,280,37.5,0.605,57,1
209 | 5,162,104,0,0,37.7,0.151,52,1
210 | 1,96,64,27,87,33.2,0.289,21,0
211 | 7,184,84,33,0,35.5,0.355,41,1
212 | 2,81,60,22,0,27.7,0.29,25,0
213 | 0,147,85,54,0,42.8,0.375,24,0
214 | 7,179,95,31,0,34.2,0.164,60,0
215 | 0,140,65,26,130,42.6,0.431,24,1
216 | 9,112,82,32,175,34.2,0.26,36,1
217 | 12,151,70,40,271,41.8,0.742,38,1
218 | 5,109,62,41,129,35.8,0.514,25,1
219 | 6,125,68,30,120,30,0.464,32,0
220 | 5,85,74,22,0,29,1.224,32,1
221 | 5,112,66,0,0,37.8,0.261,41,1
222 | 0,177,60,29,478,34.6,1.072,21,1
223 | 2,158,90,0,0,31.6,0.805,66,1
224 | 7,119,0,0,0,25.2,0.209,37,0
225 | 7,142,60,33,190,28.8,0.687,61,0
226 | 1,100,66,15,56,23.6,0.666,26,0
227 | 1,87,78,27,32,34.6,0.101,22,0
228 | 0,101,76,0,0,35.7,0.198,26,0
229 | 3,162,52,38,0,37.2,0.652,24,1
230 | 4,197,70,39,744,36.7,2.329,31,0
231 | 0,117,80,31,53,45.2,0.089,24,0
232 | 4,142,86,0,0,44,0.645,22,1
233 | 6,134,80,37,370,46.2,0.238,46,1
234 | 1,79,80,25,37,25.4,0.583,22,0
235 | 4,122,68,0,0,35,0.394,29,0
236 | 3,74,68,28,45,29.7,0.293,23,0
237 | 4,171,72,0,0,43.6,0.479,26,1
238 | 7,181,84,21,192,35.9,0.586,51,1
239 | 0,179,90,27,0,44.1,0.686,23,1
240 | 9,164,84,21,0,30.8,0.831,32,1
241 | 0,104,76,0,0,18.4,0.582,27,0
242 | 1,91,64,24,0,29.2,0.192,21,0
243 | 4,91,70,32,88,33.1,0.446,22,0
244 | 3,139,54,0,0,25.6,0.402,22,1
245 | 6,119,50,22,176,27.1,1.318,33,1
246 | 2,146,76,35,194,38.2,0.329,29,0
247 | 9,184,85,15,0,30,1.213,49,1
248 | 10,122,68,0,0,31.2,0.258,41,0
249 | 0,165,90,33,680,52.3,0.427,23,0
250 | 9,124,70,33,402,35.4,0.282,34,0
251 | 1,111,86,19,0,30.1,0.143,23,0
252 | 9,106,52,0,0,31.2,0.38,42,0
253 | 2,129,84,0,0,28,0.284,27,0
254 | 2,90,80,14,55,24.4,0.249,24,0
255 | 0,86,68,32,0,35.8,0.238,25,0
256 | 12,92,62,7,258,27.6,0.926,44,1
257 | 1,113,64,35,0,33.6,0.543,21,1
258 | 3,111,56,39,0,30.1,0.557,30,0
259 | 2,114,68,22,0,28.7,0.092,25,0
260 | 1,193,50,16,375,25.9,0.655,24,0
261 | 11,155,76,28,150,33.3,1.353,51,1
262 | 3,191,68,15,130,30.9,0.299,34,0
263 | 3,141,0,0,0,30,0.761,27,1
264 | 4,95,70,32,0,32.1,0.612,24,0
265 | 3,142,80,15,0,32.4,0.2,63,0
266 | 4,123,62,0,0,32,0.226,35,1
267 | 5,96,74,18,67,33.6,0.997,43,0
268 | 0,138,0,0,0,36.3,0.933,25,1
269 | 2,128,64,42,0,40,1.101,24,0
270 | 0,102,52,0,0,25.1,0.078,21,0
271 | 2,146,0,0,0,27.5,0.24,28,1
272 | 10,101,86,37,0,45.6,1.136,38,1
273 | 2,108,62,32,56,25.2,0.128,21,0
274 | 3,122,78,0,0,23,0.254,40,0
275 | 1,71,78,50,45,33.2,0.422,21,0
276 | 13,106,70,0,0,34.2,0.251,52,0
277 | 2,100,70,52,57,40.5,0.677,25,0
278 | 7,106,60,24,0,26.5,0.296,29,1
279 | 0,104,64,23,116,27.8,0.454,23,0
280 | 5,114,74,0,0,24.9,0.744,57,0
281 | 2,108,62,10,278,25.3,0.881,22,0
282 | 0,146,70,0,0,37.9,0.334,28,1
283 | 10,129,76,28,122,35.9,0.28,39,0
284 | 7,133,88,15,155,32.4,0.262,37,0
285 | 7,161,86,0,0,30.4,0.165,47,1
286 | 2,108,80,0,0,27,0.259,52,1
287 | 7,136,74,26,135,26,0.647,51,0
288 | 5,155,84,44,545,38.7,0.619,34,0
289 | 1,119,86,39,220,45.6,0.808,29,1
290 | 4,96,56,17,49,20.8,0.34,26,0
291 | 5,108,72,43,75,36.1,0.263,33,0
292 | 0,78,88,29,40,36.9,0.434,21,0
293 | 0,107,62,30,74,36.6,0.757,25,1
294 | 2,128,78,37,182,43.3,1.224,31,1
295 | 1,128,48,45,194,40.5,0.613,24,1
296 | 0,161,50,0,0,21.9,0.254,65,0
297 | 6,151,62,31,120,35.5,0.692,28,0
298 | 2,146,70,38,360,28,0.337,29,1
299 | 0,126,84,29,215,30.7,0.52,24,0
300 | 14,100,78,25,184,36.6,0.412,46,1
301 | 8,112,72,0,0,23.6,0.84,58,0
302 | 0,167,0,0,0,32.3,0.839,30,1
303 | 2,144,58,33,135,31.6,0.422,25,1
304 | 5,77,82,41,42,35.8,0.156,35,0
305 | 5,115,98,0,0,52.9,0.209,28,1
306 | 3,150,76,0,0,21,0.207,37,0
307 | 2,120,76,37,105,39.7,0.215,29,0
308 | 10,161,68,23,132,25.5,0.326,47,1
309 | 0,137,68,14,148,24.8,0.143,21,0
310 | 0,128,68,19,180,30.5,1.391,25,1
311 | 2,124,68,28,205,32.9,0.875,30,1
312 | 6,80,66,30,0,26.2,0.313,41,0
313 | 0,106,70,37,148,39.4,0.605,22,0
314 | 2,155,74,17,96,26.6,0.433,27,1
315 | 3,113,50,10,85,29.5,0.626,25,0
316 | 7,109,80,31,0,35.9,1.127,43,1
317 | 2,112,68,22,94,34.1,0.315,26,0
318 | 3,99,80,11,64,19.3,0.284,30,0
319 | 3,182,74,0,0,30.5,0.345,29,1
320 | 3,115,66,39,140,38.1,0.15,28,0
321 | 6,194,78,0,0,23.5,0.129,59,1
322 | 4,129,60,12,231,27.5,0.527,31,0
323 | 3,112,74,30,0,31.6,0.197,25,1
324 | 0,124,70,20,0,27.4,0.254,36,1
325 | 13,152,90,33,29,26.8,0.731,43,1
326 | 2,112,75,32,0,35.7,0.148,21,0
327 | 1,157,72,21,168,25.6,0.123,24,0
328 | 1,122,64,32,156,35.1,0.692,30,1
329 | 10,179,70,0,0,35.1,0.2,37,0
330 | 2,102,86,36,120,45.5,0.127,23,1
331 | 6,105,70,32,68,30.8,0.122,37,0
332 | 8,118,72,19,0,23.1,1.476,46,0
333 | 2,87,58,16,52,32.7,0.166,25,0
334 | 1,180,0,0,0,43.3,0.282,41,1
335 | 12,106,80,0,0,23.6,0.137,44,0
336 | 1,95,60,18,58,23.9,0.26,22,0
337 | 0,165,76,43,255,47.9,0.259,26,0
338 | 0,117,0,0,0,33.8,0.932,44,0
339 | 5,115,76,0,0,31.2,0.343,44,1
340 | 9,152,78,34,171,34.2,0.893,33,1
341 | 7,178,84,0,0,39.9,0.331,41,1
342 | 1,130,70,13,105,25.9,0.472,22,0
343 | 1,95,74,21,73,25.9,0.673,36,0
344 | 1,0,68,35,0,32,0.389,22,0
345 | 5,122,86,0,0,34.7,0.29,33,0
346 | 8,95,72,0,0,36.8,0.485,57,0
347 | 8,126,88,36,108,38.5,0.349,49,0
348 | 1,139,46,19,83,28.7,0.654,22,0
349 | 3,116,0,0,0,23.5,0.187,23,0
350 | 3,99,62,19,74,21.8,0.279,26,0
351 | 5,0,80,32,0,41,0.346,37,1
352 | 4,92,80,0,0,42.2,0.237,29,0
353 | 4,137,84,0,0,31.2,0.252,30,0
354 | 3,61,82,28,0,34.4,0.243,46,0
355 | 1,90,62,12,43,27.2,0.58,24,0
356 | 3,90,78,0,0,42.7,0.559,21,0
357 | 9,165,88,0,0,30.4,0.302,49,1
358 | 1,125,50,40,167,33.3,0.962,28,1
359 | 13,129,0,30,0,39.9,0.569,44,1
360 | 12,88,74,40,54,35.3,0.378,48,0
361 | 1,196,76,36,249,36.5,0.875,29,1
362 | 5,189,64,33,325,31.2,0.583,29,1
363 | 5,158,70,0,0,29.8,0.207,63,0
364 | 5,103,108,37,0,39.2,0.305,65,0
365 | 4,146,78,0,0,38.5,0.52,67,1
366 | 4,147,74,25,293,34.9,0.385,30,0
367 | 5,99,54,28,83,34,0.499,30,0
368 | 6,124,72,0,0,27.6,0.368,29,1
369 | 0,101,64,17,0,21,0.252,21,0
370 | 3,81,86,16,66,27.5,0.306,22,0
371 | 1,133,102,28,140,32.8,0.234,45,1
372 | 3,173,82,48,465,38.4,2.137,25,1
373 | 0,118,64,23,89,0,1.731,21,0
374 | 0,84,64,22,66,35.8,0.545,21,0
375 | 2,105,58,40,94,34.9,0.225,25,0
376 | 2,122,52,43,158,36.2,0.816,28,0
377 | 12,140,82,43,325,39.2,0.528,58,1
378 | 0,98,82,15,84,25.2,0.299,22,0
379 | 1,87,60,37,75,37.2,0.509,22,0
380 | 4,156,75,0,0,48.3,0.238,32,1
381 | 0,93,100,39,72,43.4,1.021,35,0
382 | 1,107,72,30,82,30.8,0.821,24,0
383 | 0,105,68,22,0,20,0.236,22,0
384 | 1,109,60,8,182,25.4,0.947,21,0
385 | 1,90,62,18,59,25.1,1.268,25,0
386 | 1,125,70,24,110,24.3,0.221,25,0
387 | 1,119,54,13,50,22.3,0.205,24,0
388 | 5,116,74,29,0,32.3,0.66,35,1
389 | 8,105,100,36,0,43.3,0.239,45,1
390 | 5,144,82,26,285,32,0.452,58,1
391 | 3,100,68,23,81,31.6,0.949,28,0
392 | 1,100,66,29,196,32,0.444,42,0
393 | 5,166,76,0,0,45.7,0.34,27,1
394 | 1,131,64,14,415,23.7,0.389,21,0
395 | 4,116,72,12,87,22.1,0.463,37,0
396 | 4,158,78,0,0,32.9,0.803,31,1
397 | 2,127,58,24,275,27.7,1.6,25,0
398 | 3,96,56,34,115,24.7,0.944,39,0
399 | 0,131,66,40,0,34.3,0.196,22,1
400 | 3,82,70,0,0,21.1,0.389,25,0
401 | 3,193,70,31,0,34.9,0.241,25,1
402 | 4,95,64,0,0,32,0.161,31,1
403 | 6,137,61,0,0,24.2,0.151,55,0
404 | 5,136,84,41,88,35,0.286,35,1
405 | 9,72,78,25,0,31.6,0.28,38,0
406 | 5,168,64,0,0,32.9,0.135,41,1
407 | 2,123,48,32,165,42.1,0.52,26,0
408 | 4,115,72,0,0,28.9,0.376,46,1
409 | 0,101,62,0,0,21.9,0.336,25,0
410 | 8,197,74,0,0,25.9,1.191,39,1
411 | 1,172,68,49,579,42.4,0.702,28,1
412 | 6,102,90,39,0,35.7,0.674,28,0
413 | 1,112,72,30,176,34.4,0.528,25,0
414 | 1,143,84,23,310,42.4,1.076,22,0
415 | 1,143,74,22,61,26.2,0.256,21,0
416 | 0,138,60,35,167,34.6,0.534,21,1
417 | 3,173,84,33,474,35.7,0.258,22,1
418 | 1,97,68,21,0,27.2,1.095,22,0
419 | 4,144,82,32,0,38.5,0.554,37,1
420 | 1,83,68,0,0,18.2,0.624,27,0
421 | 3,129,64,29,115,26.4,0.219,28,1
422 | 1,119,88,41,170,45.3,0.507,26,0
423 | 2,94,68,18,76,26,0.561,21,0
424 | 0,102,64,46,78,40.6,0.496,21,0
425 | 2,115,64,22,0,30.8,0.421,21,0
426 | 8,151,78,32,210,42.9,0.516,36,1
427 | 4,184,78,39,277,37,0.264,31,1
428 | 0,94,0,0,0,0,0.256,25,0
429 | 1,181,64,30,180,34.1,0.328,38,1
430 | 0,135,94,46,145,40.6,0.284,26,0
431 | 1,95,82,25,180,35,0.233,43,1
432 | 2,99,0,0,0,22.2,0.108,23,0
433 | 3,89,74,16,85,30.4,0.551,38,0
434 | 1,80,74,11,60,30,0.527,22,0
435 | 2,139,75,0,0,25.6,0.167,29,0
436 | 1,90,68,8,0,24.5,1.138,36,0
437 | 0,141,0,0,0,42.4,0.205,29,1
438 | 12,140,85,33,0,37.4,0.244,41,0
439 | 5,147,75,0,0,29.9,0.434,28,0
440 | 1,97,70,15,0,18.2,0.147,21,0
441 | 6,107,88,0,0,36.8,0.727,31,0
442 | 0,189,104,25,0,34.3,0.435,41,1
443 | 2,83,66,23,50,32.2,0.497,22,0
444 | 4,117,64,27,120,33.2,0.23,24,0
445 | 8,108,70,0,0,30.5,0.955,33,1
446 | 4,117,62,12,0,29.7,0.38,30,1
447 | 0,180,78,63,14,59.4,2.42,25,1
448 | 1,100,72,12,70,25.3,0.658,28,0
449 | 0,95,80,45,92,36.5,0.33,26,0
450 | 0,104,64,37,64,33.6,0.51,22,1
451 | 0,120,74,18,63,30.5,0.285,26,0
452 | 1,82,64,13,95,21.2,0.415,23,0
453 | 2,134,70,0,0,28.9,0.542,23,1
454 | 0,91,68,32,210,39.9,0.381,25,0
455 | 2,119,0,0,0,19.6,0.832,72,0
456 | 2,100,54,28,105,37.8,0.498,24,0
457 | 14,175,62,30,0,33.6,0.212,38,1
458 | 1,135,54,0,0,26.7,0.687,62,0
459 | 5,86,68,28,71,30.2,0.364,24,0
460 | 10,148,84,48,237,37.6,1.001,51,1
461 | 9,134,74,33,60,25.9,0.46,81,0
462 | 9,120,72,22,56,20.8,0.733,48,0
463 | 1,71,62,0,0,21.8,0.416,26,0
464 | 8,74,70,40,49,35.3,0.705,39,0
465 | 5,88,78,30,0,27.6,0.258,37,0
466 | 10,115,98,0,0,24,1.022,34,0
467 | 0,124,56,13,105,21.8,0.452,21,0
468 | 0,74,52,10,36,27.8,0.269,22,0
469 | 0,97,64,36,100,36.8,0.6,25,0
470 | 8,120,0,0,0,30,0.183,38,1
471 | 6,154,78,41,140,46.1,0.571,27,0
472 | 1,144,82,40,0,41.3,0.607,28,0
473 | 0,137,70,38,0,33.2,0.17,22,0
474 | 0,119,66,27,0,38.8,0.259,22,0
475 | 7,136,90,0,0,29.9,0.21,50,0
476 | 4,114,64,0,0,28.9,0.126,24,0
477 | 0,137,84,27,0,27.3,0.231,59,0
478 | 2,105,80,45,191,33.7,0.711,29,1
479 | 7,114,76,17,110,23.8,0.466,31,0
480 | 8,126,74,38,75,25.9,0.162,39,0
481 | 4,132,86,31,0,28,0.419,63,0
482 | 3,158,70,30,328,35.5,0.344,35,1
483 | 0,123,88,37,0,35.2,0.197,29,0
484 | 4,85,58,22,49,27.8,0.306,28,0
485 | 0,84,82,31,125,38.2,0.233,23,0
486 | 0,145,0,0,0,44.2,0.63,31,1
487 | 0,135,68,42,250,42.3,0.365,24,1
488 | 1,139,62,41,480,40.7,0.536,21,0
489 | 0,173,78,32,265,46.5,1.159,58,0
490 | 4,99,72,17,0,25.6,0.294,28,0
491 | 8,194,80,0,0,26.1,0.551,67,0
492 | 2,83,65,28,66,36.8,0.629,24,0
493 | 2,89,90,30,0,33.5,0.292,42,0
494 | 4,99,68,38,0,32.8,0.145,33,0
495 | 4,125,70,18,122,28.9,1.144,45,1
496 | 3,80,0,0,0,0,0.174,22,0
497 | 6,166,74,0,0,26.6,0.304,66,0
498 | 5,110,68,0,0,26,0.292,30,0
499 | 2,81,72,15,76,30.1,0.547,25,0
500 | 7,195,70,33,145,25.1,0.163,55,1
501 | 6,154,74,32,193,29.3,0.839,39,0
502 | 2,117,90,19,71,25.2,0.313,21,0
503 | 3,84,72,32,0,37.2,0.267,28,0
504 | 6,0,68,41,0,39,0.727,41,1
505 | 7,94,64,25,79,33.3,0.738,41,0
506 | 3,96,78,39,0,37.3,0.238,40,0
507 | 10,75,82,0,0,33.3,0.263,38,0
508 | 0,180,90,26,90,36.5,0.314,35,1
509 | 1,130,60,23,170,28.6,0.692,21,0
510 | 2,84,50,23,76,30.4,0.968,21,0
511 | 8,120,78,0,0,25,0.409,64,0
512 | 12,84,72,31,0,29.7,0.297,46,1
513 | 0,139,62,17,210,22.1,0.207,21,0
514 | 9,91,68,0,0,24.2,0.2,58,0
515 | 2,91,62,0,0,27.3,0.525,22,0
516 | 3,99,54,19,86,25.6,0.154,24,0
517 | 3,163,70,18,105,31.6,0.268,28,1
518 | 9,145,88,34,165,30.3,0.771,53,1
519 | 7,125,86,0,0,37.6,0.304,51,0
520 | 13,76,60,0,0,32.8,0.18,41,0
521 | 6,129,90,7,326,19.6,0.582,60,0
522 | 2,68,70,32,66,25,0.187,25,0
523 | 3,124,80,33,130,33.2,0.305,26,0
524 | 6,114,0,0,0,0,0.189,26,0
525 | 9,130,70,0,0,34.2,0.652,45,1
526 | 3,125,58,0,0,31.6,0.151,24,0
527 | 3,87,60,18,0,21.8,0.444,21,0
528 | 1,97,64,19,82,18.2,0.299,21,0
529 | 3,116,74,15,105,26.3,0.107,24,0
530 | 0,117,66,31,188,30.8,0.493,22,0
531 | 0,111,65,0,0,24.6,0.66,31,0
532 | 2,122,60,18,106,29.8,0.717,22,0
533 | 0,107,76,0,0,45.3,0.686,24,0
534 | 1,86,66,52,65,41.3,0.917,29,0
535 | 6,91,0,0,0,29.8,0.501,31,0
536 | 1,77,56,30,56,33.3,1.251,24,0
537 | 4,132,0,0,0,32.9,0.302,23,1
538 | 0,105,90,0,0,29.6,0.197,46,0
539 | 0,57,60,0,0,21.7,0.735,67,0
540 | 0,127,80,37,210,36.3,0.804,23,0
541 | 3,129,92,49,155,36.4,0.968,32,1
542 | 8,100,74,40,215,39.4,0.661,43,1
543 | 3,128,72,25,190,32.4,0.549,27,1
544 | 10,90,85,32,0,34.9,0.825,56,1
545 | 4,84,90,23,56,39.5,0.159,25,0
546 | 1,88,78,29,76,32,0.365,29,0
547 | 8,186,90,35,225,34.5,0.423,37,1
548 | 5,187,76,27,207,43.6,1.034,53,1
549 | 4,131,68,21,166,33.1,0.16,28,0
550 | 1,164,82,43,67,32.8,0.341,50,0
551 | 4,189,110,31,0,28.5,0.68,37,0
552 | 1,116,70,28,0,27.4,0.204,21,0
553 | 3,84,68,30,106,31.9,0.591,25,0
554 | 6,114,88,0,0,27.8,0.247,66,0
555 | 1,88,62,24,44,29.9,0.422,23,0
556 | 1,84,64,23,115,36.9,0.471,28,0
557 | 7,124,70,33,215,25.5,0.161,37,0
558 | 1,97,70,40,0,38.1,0.218,30,0
559 | 8,110,76,0,0,27.8,0.237,58,0
560 | 11,103,68,40,0,46.2,0.126,42,0
561 | 11,85,74,0,0,30.1,0.3,35,0
562 | 6,125,76,0,0,33.8,0.121,54,1
563 | 0,198,66,32,274,41.3,0.502,28,1
564 | 1,87,68,34,77,37.6,0.401,24,0
565 | 6,99,60,19,54,26.9,0.497,32,0
566 | 0,91,80,0,0,32.4,0.601,27,0
567 | 2,95,54,14,88,26.1,0.748,22,0
568 | 1,99,72,30,18,38.6,0.412,21,0
569 | 6,92,62,32,126,32,0.085,46,0
570 | 4,154,72,29,126,31.3,0.338,37,0
571 | 0,121,66,30,165,34.3,0.203,33,1
572 | 3,78,70,0,0,32.5,0.27,39,0
573 | 2,130,96,0,0,22.6,0.268,21,0
574 | 3,111,58,31,44,29.5,0.43,22,0
575 | 2,98,60,17,120,34.7,0.198,22,0
576 | 1,143,86,30,330,30.1,0.892,23,0
577 | 1,119,44,47,63,35.5,0.28,25,0
578 | 6,108,44,20,130,24,0.813,35,0
579 | 2,118,80,0,0,42.9,0.693,21,1
580 | 10,133,68,0,0,27,0.245,36,0
581 | 2,197,70,99,0,34.7,0.575,62,1
582 | 0,151,90,46,0,42.1,0.371,21,1
583 | 6,109,60,27,0,25,0.206,27,0
584 | 12,121,78,17,0,26.5,0.259,62,0
585 | 8,100,76,0,0,38.7,0.19,42,0
586 | 8,124,76,24,600,28.7,0.687,52,1
587 | 1,93,56,11,0,22.5,0.417,22,0
588 | 8,143,66,0,0,34.9,0.129,41,1
589 | 6,103,66,0,0,24.3,0.249,29,0
590 | 3,176,86,27,156,33.3,1.154,52,1
591 | 0,73,0,0,0,21.1,0.342,25,0
592 | 11,111,84,40,0,46.8,0.925,45,1
593 | 2,112,78,50,140,39.4,0.175,24,0
594 | 3,132,80,0,0,34.4,0.402,44,1
595 | 2,82,52,22,115,28.5,1.699,25,0
596 | 6,123,72,45,230,33.6,0.733,34,0
597 | 0,188,82,14,185,32,0.682,22,1
598 | 0,67,76,0,0,45.3,0.194,46,0
599 | 1,89,24,19,25,27.8,0.559,21,0
600 | 1,173,74,0,0,36.8,0.088,38,1
601 | 1,109,38,18,120,23.1,0.407,26,0
602 | 1,108,88,19,0,27.1,0.4,24,0
603 | 6,96,0,0,0,23.7,0.19,28,0
604 | 1,124,74,36,0,27.8,0.1,30,0
605 | 7,150,78,29,126,35.2,0.692,54,1
606 | 4,183,0,0,0,28.4,0.212,36,1
607 | 1,124,60,32,0,35.8,0.514,21,0
608 | 1,181,78,42,293,40,1.258,22,1
609 | 1,92,62,25,41,19.5,0.482,25,0
610 | 0,152,82,39,272,41.5,0.27,27,0
611 | 1,111,62,13,182,24,0.138,23,0
612 | 3,106,54,21,158,30.9,0.292,24,0
613 | 3,174,58,22,194,32.9,0.593,36,1
614 | 7,168,88,42,321,38.2,0.787,40,1
615 | 6,105,80,28,0,32.5,0.878,26,0
616 | 11,138,74,26,144,36.1,0.557,50,1
617 | 3,106,72,0,0,25.8,0.207,27,0
618 | 6,117,96,0,0,28.7,0.157,30,0
619 | 2,68,62,13,15,20.1,0.257,23,0
620 | 9,112,82,24,0,28.2,1.282,50,1
621 | 0,119,0,0,0,32.4,0.141,24,1
622 | 2,112,86,42,160,38.4,0.246,28,0
623 | 2,92,76,20,0,24.2,1.698,28,0
624 | 6,183,94,0,0,40.8,1.461,45,0
625 | 0,94,70,27,115,43.5,0.347,21,0
626 | 2,108,64,0,0,30.8,0.158,21,0
627 | 4,90,88,47,54,37.7,0.362,29,0
628 | 0,125,68,0,0,24.7,0.206,21,0
629 | 0,132,78,0,0,32.4,0.393,21,0
630 | 5,128,80,0,0,34.6,0.144,45,0
631 | 4,94,65,22,0,24.7,0.148,21,0
632 | 7,114,64,0,0,27.4,0.732,34,1
633 | 0,102,78,40,90,34.5,0.238,24,0
634 | 2,111,60,0,0,26.2,0.343,23,0
635 | 1,128,82,17,183,27.5,0.115,22,0
636 | 10,92,62,0,0,25.9,0.167,31,0
637 | 13,104,72,0,0,31.2,0.465,38,1
638 | 5,104,74,0,0,28.8,0.153,48,0
639 | 2,94,76,18,66,31.6,0.649,23,0
640 | 7,97,76,32,91,40.9,0.871,32,1
641 | 1,100,74,12,46,19.5,0.149,28,0
642 | 0,102,86,17,105,29.3,0.695,27,0
643 | 4,128,70,0,0,34.3,0.303,24,0
644 | 6,147,80,0,0,29.5,0.178,50,1
645 | 4,90,0,0,0,28,0.61,31,0
646 | 3,103,72,30,152,27.6,0.73,27,0
647 | 2,157,74,35,440,39.4,0.134,30,0
648 | 1,167,74,17,144,23.4,0.447,33,1
649 | 0,179,50,36,159,37.8,0.455,22,1
650 | 11,136,84,35,130,28.3,0.26,42,1
651 | 0,107,60,25,0,26.4,0.133,23,0
652 | 1,91,54,25,100,25.2,0.234,23,0
653 | 1,117,60,23,106,33.8,0.466,27,0
654 | 5,123,74,40,77,34.1,0.269,28,0
655 | 2,120,54,0,0,26.8,0.455,27,0
656 | 1,106,70,28,135,34.2,0.142,22,0
657 | 2,155,52,27,540,38.7,0.24,25,1
658 | 2,101,58,35,90,21.8,0.155,22,0
659 | 1,120,80,48,200,38.9,1.162,41,0
660 | 11,127,106,0,0,39,0.19,51,0
661 | 3,80,82,31,70,34.2,1.292,27,1
662 | 10,162,84,0,0,27.7,0.182,54,0
663 | 1,199,76,43,0,42.9,1.394,22,1
664 | 8,167,106,46,231,37.6,0.165,43,1
665 | 9,145,80,46,130,37.9,0.637,40,1
666 | 6,115,60,39,0,33.7,0.245,40,1
667 | 1,112,80,45,132,34.8,0.217,24,0
668 | 4,145,82,18,0,32.5,0.235,70,1
669 | 10,111,70,27,0,27.5,0.141,40,1
670 | 6,98,58,33,190,34,0.43,43,0
671 | 9,154,78,30,100,30.9,0.164,45,0
672 | 6,165,68,26,168,33.6,0.631,49,0
673 | 1,99,58,10,0,25.4,0.551,21,0
674 | 10,68,106,23,49,35.5,0.285,47,0
675 | 3,123,100,35,240,57.3,0.88,22,0
676 | 8,91,82,0,0,35.6,0.587,68,0
677 | 6,195,70,0,0,30.9,0.328,31,1
678 | 9,156,86,0,0,24.8,0.23,53,1
679 | 0,93,60,0,0,35.3,0.263,25,0
680 | 3,121,52,0,0,36,0.127,25,1
681 | 2,101,58,17,265,24.2,0.614,23,0
682 | 2,56,56,28,45,24.2,0.332,22,0
683 | 0,162,76,36,0,49.6,0.364,26,1
684 | 0,95,64,39,105,44.6,0.366,22,0
685 | 4,125,80,0,0,32.3,0.536,27,1
686 | 5,136,82,0,0,0,0.64,69,0
687 | 2,129,74,26,205,33.2,0.591,25,0
688 | 3,130,64,0,0,23.1,0.314,22,0
689 | 1,107,50,19,0,28.3,0.181,29,0
690 | 1,140,74,26,180,24.1,0.828,23,0
691 | 1,144,82,46,180,46.1,0.335,46,1
692 | 8,107,80,0,0,24.6,0.856,34,0
693 | 13,158,114,0,0,42.3,0.257,44,1
694 | 2,121,70,32,95,39.1,0.886,23,0
695 | 7,129,68,49,125,38.5,0.439,43,1
696 | 2,90,60,0,0,23.5,0.191,25,0
697 | 7,142,90,24,480,30.4,0.128,43,1
698 | 3,169,74,19,125,29.9,0.268,31,1
699 | 0,99,0,0,0,25,0.253,22,0
700 | 4,127,88,11,155,34.5,0.598,28,0
701 | 4,118,70,0,0,44.5,0.904,26,0
702 | 2,122,76,27,200,35.9,0.483,26,0
703 | 6,125,78,31,0,27.6,0.565,49,1
704 | 1,168,88,29,0,35,0.905,52,1
705 | 2,129,0,0,0,38.5,0.304,41,0
706 | 4,110,76,20,100,28.4,0.118,27,0
707 | 6,80,80,36,0,39.8,0.177,28,0
708 | 10,115,0,0,0,0,0.261,30,1
709 | 2,127,46,21,335,34.4,0.176,22,0
710 | 9,164,78,0,0,32.8,0.148,45,1
711 | 2,93,64,32,160,38,0.674,23,1
712 | 3,158,64,13,387,31.2,0.295,24,0
713 | 5,126,78,27,22,29.6,0.439,40,0
714 | 10,129,62,36,0,41.2,0.441,38,1
715 | 0,134,58,20,291,26.4,0.352,21,0
716 | 3,102,74,0,0,29.5,0.121,32,0
717 | 7,187,50,33,392,33.9,0.826,34,1
718 | 3,173,78,39,185,33.8,0.97,31,1
719 | 10,94,72,18,0,23.1,0.595,56,0
720 | 1,108,60,46,178,35.5,0.415,24,0
721 | 5,97,76,27,0,35.6,0.378,52,1
722 | 4,83,86,19,0,29.3,0.317,34,0
723 | 1,114,66,36,200,38.1,0.289,21,0
724 | 1,149,68,29,127,29.3,0.349,42,1
725 | 5,117,86,30,105,39.1,0.251,42,0
726 | 1,111,94,0,0,32.8,0.265,45,0
727 | 4,112,78,40,0,39.4,0.236,38,0
728 | 1,116,78,29,180,36.1,0.496,25,0
729 | 0,141,84,26,0,32.4,0.433,22,0
730 | 2,175,88,0,0,22.9,0.326,22,0
731 | 2,92,52,0,0,30.1,0.141,22,0
732 | 3,130,78,23,79,28.4,0.323,34,1
733 | 8,120,86,0,0,28.4,0.259,22,1
734 | 2,174,88,37,120,44.5,0.646,24,1
735 | 2,106,56,27,165,29,0.426,22,0
736 | 2,105,75,0,0,23.3,0.56,53,0
737 | 4,95,60,32,0,35.4,0.284,28,0
738 | 0,126,86,27,120,27.4,0.515,21,0
739 | 8,65,72,23,0,32,0.6,42,0
740 | 2,99,60,17,160,36.6,0.453,21,0
741 | 1,102,74,0,0,39.5,0.293,42,1
742 | 11,120,80,37,150,42.3,0.785,48,1
743 | 3,102,44,20,94,30.8,0.4,26,0
744 | 1,109,58,18,116,28.5,0.219,22,0
745 | 9,140,94,0,0,32.7,0.734,45,1
746 | 13,153,88,37,140,40.6,1.174,39,0
747 | 12,100,84,33,105,30,0.488,46,0
748 | 1,147,94,41,0,49.3,0.358,27,1
749 | 1,81,74,41,57,46.3,1.096,32,0
750 | 3,187,70,22,200,36.4,0.408,36,1
751 | 6,162,62,0,0,24.3,0.178,50,1
752 | 4,136,70,0,0,31.2,1.182,22,1
753 | 1,121,78,39,74,39,0.261,28,0
754 | 3,108,62,24,0,26,0.223,25,0
755 | 0,181,88,44,510,43.3,0.222,26,1
756 | 8,154,78,32,0,32.4,0.443,45,1
757 | 1,128,88,39,110,36.5,1.057,37,1
758 | 7,137,90,41,0,32,0.391,39,0
759 | 0,123,72,0,0,36.3,0.258,52,1
760 | 1,106,76,0,0,37.5,0.197,26,0
761 | 6,190,92,0,0,35.5,0.278,66,1
762 | 2,88,58,26,16,28.4,0.766,22,0
763 | 9,170,74,31,0,44,0.403,43,1
764 | 9,89,62,0,0,22.5,0.142,33,0
765 | 10,101,76,48,180,32.9,0.171,63,0
766 | 2,122,70,27,0,36.8,0.34,27,0
767 | 5,121,72,23,112,26.2,0.245,30,0
768 | 1,126,60,0,0,30.1,0.349,47,1
769 | 1,93,70,31,0,30.4,0.315,23,0
--------------------------------------------------------------------------------
/test/digit_recognizer.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import numpy as np
3 | from sklearn.model_selection import train_test_split
4 | from XBNet.training_utils import training,predict
5 | from XBNet.models import XBNETClassifier
6 | from XBNet.run import run_XBNET
7 | import pandas as pd
8 |
9 | data = pd.read_csv('test/train.csv')
10 | y=data[['label']].to_numpy()
11 | x=data.loc[:,'pixel0':].to_numpy()
12 | x_train,x_test,y_train,y_test = train_test_split(x,y,test_size= 0.20, random_state= True,stratify=y)
13 | y_train=y_train.reshape((-1))
14 | y_test=y_test.reshape((-1))
15 | model = XBNETClassifier(x_train,y_train,num_layers=2)
16 | criterion = torch.nn.CrossEntropyLoss()
17 | optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
18 | m,acc, lo, val_ac, val_lo = run_XBNET(x_train,x_test,y_train,y_test,model,criterion,optimizer,epochs=1,batch_size=32)
19 | model.save("model_dr.pb")
--------------------------------------------------------------------------------
/test/digits.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import numpy as np
3 | from sklearn.model_selection import train_test_split
4 | from XBNet.training_utils import training,predict
5 | from XBNet.models import XBNETClassifier
6 | from XBNet.run import run_XBNET
7 | import pandas as pd
8 |
9 | from sklearn.datasets import load_digits
10 | data = load_digits()
11 | x = data.data
12 | y = data.target
13 | x_train,x_test,y_train,y_test = train_test_split(x,y,test_size= 0.20, random_state= True,stratify=y)
14 | model = XBNETClassifier(x_train,y_train,num_layers=2)
15 | criterion = torch.nn.CrossEntropyLoss()
16 | optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
17 | m,acc, lo, val_ac, val_lo = run_XBNET(x_train,x_test,y_train,y_test,model,criterion,optimizer,epochs=300,batch_size=32)
18 |
--------------------------------------------------------------------------------
/test/gui_test.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import numpy as np
3 | import pandas as pd
4 | from sklearn.preprocessing import LabelEncoder
5 | from sklearn.model_selection import train_test_split
6 | from XBNet.training_utils import training,predict
7 | from XBNet.models import XBNETClassifier
8 | from XBNet.run import run_XBNET
9 |
10 | from os import environ
11 |
12 | def suppress_qt_warnings():
13 | environ["QT_DEVICE_PIXEL_RATIO"] = "0"
14 | environ["QT_AUTO_SCREEN_SCALE_FACTOR"] = "1"
15 | environ["QT_SCREEN_SCALE_FACTORS"] = "1"
16 | environ["QT_SCALE_FACTOR"] = "1"
17 |
18 | suppress_qt_warnings()
19 | column_to_predict = input("Column to classify: ")
20 | data = pd.read_csv(r'test\Iris (1).csv')
21 | n_df = len(data)
22 | label_encoded = {}
23 | imputations = {}
24 | for i in data.columns:
25 | imputations[i] = data[i].mode()
26 | if data[i].isnull().sum()/n_df >= 0.15:
27 | data.drop(i,axis = 1,inplace=True)
28 | elif data[i].isnull().sum()/n_df < 0.15 and data[i].isnull().sum()/n_df > 0:
29 | data[i].fillna(data[i].mode(),inplace=True)
30 | imputations[i] = data[i].mode()
31 | columns_object = list(data.dtypes[data.dtypes==object].index)
32 | for i in columns_object:
33 | if i != column_to_predict:
34 | if data[i].nunique()/n_df < 0.4:
35 | le = LabelEncoder()
36 | data[i] = le.fit_transform(data[i])
37 | label_encoded[i] = le
38 | else:
39 | data.drop(i,axis=1,inplace=True)
40 |
41 | x_data = data.drop(column_to_predict,axis=1).to_numpy()
42 | columns_finally_used = data.drop(column_to_predict,axis=1).columns
43 | print(x_data[0,:])
44 | print("Number of features are: ",x_data.shape[1])
45 |
46 |
47 | y_data = data[column_to_predict].to_numpy()
48 | if y_data.dtype == object:
49 | y_label_encoder = LabelEncoder()
50 | y_data = y_label_encoder.fit_transform(y_data)
51 |
52 | print("Number of classes are: ", np.unique(y_data,return_counts=True))
53 |
54 | X_train,X_test,y_train,y_test = train_test_split(x_data,y_data,test_size = 0.3,random_state = 0)
55 | # model = torch.load("model.pb")
56 | model = XBNETClassifier(X_train,y_train,2,input_through_cmd=True,inputs_for_gui=[10,4,4,2])
57 |
58 | criterion = torch.nn.CrossEntropyLoss()
59 | optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
60 |
61 | m,acc, lo, val_ac, val_lo = run_XBNET(X_train,X_test,y_train,y_test,model,criterion,optimizer,32,300)
62 |
63 | print(predict(m, x_data))
64 | print(model.feature_importances_)
65 |
66 |
67 |
68 | def process_for_predict(df,columns,imputations,encodings):
69 | data = df[columns]
70 | n = len(data)
71 | for i in data.columns:
72 | if data[i].isnull().sum() >0:
73 | data[i].fillna(imputations[i], inplace=True)
74 | if i in encodings.keys():
75 | data[i] = encodings[i].transform(data[i])
76 | print(predict(m, data.to_numpy()))
77 |
78 | process_for_predict(pd.read_csv(r"test\titanic_test.csv"),columns_finally_used,imputations,label_encoded)
--------------------------------------------------------------------------------
/test/test.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import numpy as np
3 | import pandas as pd
4 | import matplotlib.pyplot as plt
5 | from sklearn.metrics import plot_roc_curve, precision_recall_curve, roc_auc_score, roc_curve
6 | from sklearn.preprocessing import LabelEncoder
7 | from sklearn.model_selection import train_test_split
8 | from XBNet.training_utils import training,predict,predict_proba
9 | from XBNet.models import XBNETClassifier
10 | from XBNet.run import run_XBNET
11 |
12 | from os import environ
13 |
14 | def suppress_qt_warnings():
15 | environ["QT_DEVICE_PIXEL_RATIO"] = "0"
16 | environ["QT_AUTO_SCREEN_SCALE_FACTOR"] = "1"
17 | environ["QT_SCREEN_SCALE_FACTORS"] = "1"
18 | environ["QT_SCALE_FACTOR"] = "1"
19 |
20 | suppress_qt_warnings()
21 | data = pd.read_csv('test\data (2).csv')
22 | print(data.shape)
23 | x_data = data[data.columns[2:-1]]
24 | print(x_data.shape)
25 | y_data = data[data.columns[1]]
26 | le = LabelEncoder()
27 | y_data = np.array(le.fit_transform(y_data))
28 | print(le.classes_)
29 |
30 | X_train,X_test,y_train,y_test = train_test_split(x_data.to_numpy(),y_data,test_size = 0.3,random_state = 0)
31 | # model = torch.load("model.pb")
32 | model = XBNETClassifier(X_train,y_train,3) #Model Intialisation
33 | criterion = torch.nn.BCELoss() #Define criterion
34 | optimizer = torch.optim.Adam(model.parameters(), lr=0.01) #Define optimizer
35 | model, acc, lo, val_ac, val_lo = run_XBNET(X_train,X_test,y_train,y_test,model,criterion,optimizer,32,300) #Train model
36 | print(predict_proba(model,x_data)) # Prediction with probabilities
37 | print(model.feature_importances_) #View feature importances
38 | model.save("trained_model.pb") #Save trained model
39 |
40 | print(predict_proba(model,x_data.to_numpy()[0,:]).detach().numpy())
41 |
42 | ns_probs = [0 for _ in range(len(y_test))]
43 | ns_auc = roc_auc_score(y_test, ns_probs)
44 | lr_auc = roc_auc_score(y_test, predict_proba(model,X_test).detach().numpy())
45 | # summarize scores
46 | print('No Skill: ROC AUC=%.3f' % (ns_auc))
47 | print('XBNet: ROC AUC=%.3f' % (lr_auc))
48 | # calculate roc curves
49 | ns_fpr, ns_tpr, _ = roc_curve(y_test, ns_probs)
50 | lr_fpr, lr_tpr, _ = roc_curve(y_test, predict_proba(model,X_test).detach().numpy())
51 | # plot the roc curve for the model
52 | plt.plot(ns_fpr, ns_tpr, linestyle='--', label='No Skill')
53 | plt.plot(lr_fpr, lr_tpr, marker='.', label='XBNet')
54 | # axis labels
55 | plt.xlabel('False Positive Rate')
56 | plt.ylabel('True Positive Rate')
57 | # show the legend
58 | plt.legend()
59 | plt.show()
60 | # show the plot
61 | precision, recall, thresholds = precision_recall_curve(y_test, predict_proba(model,X_test).detach().numpy())
62 | #
63 | # #create precision recall curve
64 | fig, ax = plt.subplots()
65 | ax.plot(recall, precision, color='purple')
66 |
67 | #add axis labels to plot
68 | ax.set_title('Precision-Recall Curve')
69 | ax.set_ylabel('Precision')
70 | ax.set_xlabel('Recall')
71 |
72 | #display plot
73 | plt.show()
74 |
75 | print(model.feature_importances_)
--------------------------------------------------------------------------------
/test/titanic.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived,Pclass,Name,Sex,Age,SibSp,Parch,Ticket,Fare,Cabin,Embarked,relatives,travelled_alone
2 | 1,0,3,"Braund, Mr. Owen Harris",male,22,1,0,A/5 21171,7.25,,S,1,No
3 | 2,1,1,"Cumings, Mrs. John Bradley (Florence Briggs Thayer)",female,38,1,0,PC 17599,71.2833,C85,C,1,No
4 | 3,1,3,"Heikkinen, Miss. Laina",female,26,0,0,STON/O2. 3101282,7.925,,S,0,Yes
5 | 4,1,1,"Futrelle, Mrs. Jacques Heath (Lily May Peel)",female,35,1,0,113803,53.1,C123,S,1,No
6 | 5,0,3,"Allen, Mr. William Henry",male,35,0,0,373450,8.05,,S,0,Yes
7 | 6,0,3,"Moran, Mr. James",male,18,0,0,330877,8.4583,,Q,0,Yes
8 | 7,0,1,"McCarthy, Mr. Timothy J",male,54,0,0,17463,51.8625,E46,S,0,Yes
9 | 8,0,3,"Palsson, Master. Gosta Leonard",male,2,3,1,349909,21.075,,S,4,No
10 | 9,1,3,"Johnson, Mrs. Oscar W (Elisabeth Vilhelmina Berg)",female,27,0,2,347742,11.1333,,S,2,No
11 | 10,1,2,"Nasser, Mrs. Nicholas (Adele Achem)",female,14,1,0,237736,30.0708,,C,1,No
12 | 11,1,3,"Sandstrom, Miss. Marguerite Rut",female,4,1,1,PP 9549,16.7,G6,S,2,No
13 | 12,1,1,"Bonnell, Miss. Elizabeth",female,58,0,0,113783,26.55,C103,S,0,Yes
14 | 13,0,3,"Saundercock, Mr. William Henry",male,20,0,0,A/5. 2151,8.05,,S,0,Yes
15 | 14,0,3,"Andersson, Mr. Anders Johan",male,39,1,5,347082,31.275,,S,6,No
16 | 15,0,3,"Vestrom, Miss. Hulda Amanda Adolfina",female,14,0,0,350406,7.8542,,S,0,Yes
17 | 16,1,2,"Hewlett, Mrs. (Mary D Kingcome) ",female,55,0,0,248706,16.0,,S,0,Yes
18 | 17,0,3,"Rice, Master. Eugene",male,2,4,1,382652,29.125,,Q,5,No
19 | 18,1,2,"Williams, Mr. Charles Eugene",male,27,0,0,244373,13.0,,S,0,Yes
20 | 19,0,3,"Vander Planke, Mrs. Julius (Emelia Maria Vandemoortele)",female,31,1,0,345763,18.0,,S,1,No
21 | 20,1,3,"Masselmani, Mrs. Fatima",female,32,0,0,2649,7.225,,C,0,Yes
22 | 21,0,2,"Fynney, Mr. Joseph J",male,35,0,0,239865,26.0,,S,0,Yes
23 | 22,1,2,"Beesley, Mr. Lawrence",male,34,0,0,248698,13.0,D56,S,0,Yes
24 | 23,1,3,"McGowan, Miss. Anna ""Annie""",female,15,0,0,330923,8.0292,,Q,0,Yes
25 | 24,1,1,"Sloper, Mr. William Thompson",male,28,0,0,113788,35.5,A6,S,0,Yes
26 | 25,0,3,"Palsson, Miss. Torborg Danira",female,8,3,1,349909,21.075,,S,4,No
27 | 26,1,3,"Asplund, Mrs. Carl Oscar (Selma Augusta Emilia Johansson)",female,38,1,5,347077,31.3875,,S,6,No
28 | 27,0,3,"Emir, Mr. Farred Chehab",male,36,0,0,2631,7.225,,C,0,Yes
29 | 28,0,1,"Fortune, Mr. Charles Alexander",male,19,3,2,19950,263.0,C23 C25 C27,S,5,No
30 | 29,1,3,"O'Dwyer, Miss. Ellen ""Nellie""",female,23,0,0,330959,7.8792,,Q,0,Yes
31 | 30,0,3,"Todoroff, Mr. Lalio",male,41,0,0,349216,7.8958,,S,0,Yes
32 | 31,0,1,"Uruchurtu, Don. Manuel E",male,40,0,0,PC 17601,27.7208,,C,0,Yes
33 | 32,1,1,"Spencer, Mrs. William Augustus (Marie Eugenie)",female,28,1,0,PC 17569,146.5208,B78,C,1,No
34 | 33,1,3,"Glynn, Miss. Mary Agatha",female,26,0,0,335677,7.75,,Q,0,Yes
35 | 34,0,2,"Wheadon, Mr. Edward H",male,66,0,0,C.A. 24579,10.5,,S,0,Yes
36 | 35,0,1,"Meyer, Mr. Edgar Joseph",male,28,1,0,PC 17604,82.1708,,C,1,No
37 | 36,0,1,"Holverson, Mr. Alexander Oskar",male,42,1,0,113789,52.0,,S,1,No
38 | 37,1,3,"Mamee, Mr. Hanna",male,41,0,0,2677,7.2292,,C,0,Yes
39 | 38,0,3,"Cann, Mr. Ernest Charles",male,21,0,0,A./5. 2152,8.05,,S,0,Yes
40 | 39,0,3,"Vander Planke, Miss. Augusta Maria",female,18,2,0,345764,18.0,,S,2,No
41 | 40,1,3,"Nicola-Yarred, Miss. Jamila",female,14,1,0,2651,11.2417,,C,1,No
42 | 41,0,3,"Ahlin, Mrs. Johan (Johanna Persdotter Larsson)",female,40,1,0,7546,9.475,,S,1,No
43 | 42,0,2,"Turpin, Mrs. William John Robert (Dorothy Ann Wonnacott)",female,27,1,0,11668,21.0,,S,1,No
44 | 43,0,3,"Kraeff, Mr. Theodor",male,27,0,0,349253,7.8958,,C,0,Yes
45 | 44,1,2,"Laroche, Miss. Simonne Marie Anne Andree",female,3,1,2,SC/Paris 2123,41.5792,,C,3,No
46 | 45,1,3,"Devaney, Miss. Margaret Delia",female,19,0,0,330958,7.8792,,Q,0,Yes
47 | 46,0,3,"Rogers, Mr. William John",male,34,0,0,S.C./A.4. 23567,8.05,,S,0,Yes
48 | 47,0,3,"Lennon, Mr. Denis",male,16,1,0,370371,15.5,,Q,1,No
49 | 48,1,3,"O'Driscoll, Miss. Bridget",female,26,0,0,14311,7.75,,Q,0,Yes
50 | 49,0,3,"Samaan, Mr. Youssef",male,22,2,0,2662,21.6792,,C,2,No
51 | 50,0,3,"Arnold-Franchi, Mrs. Josef (Josefine Franchi)",female,18,1,0,349237,17.8,,S,1,No
52 | 51,0,3,"Panula, Master. Juha Niilo",male,7,4,1,3101295,39.6875,,S,5,No
53 | 52,0,3,"Nosworthy, Mr. Richard Cater",male,21,0,0,A/4. 39886,7.8,,S,0,Yes
54 | 53,1,1,"Harper, Mrs. Henry Sleeper (Myna Haxtun)",female,49,1,0,PC 17572,76.7292,D33,C,1,No
55 | 54,1,2,"Faunthorpe, Mrs. Lizzie (Elizabeth Anne Wilkinson)",female,29,1,0,2926,26.0,,S,1,No
56 | 55,0,1,"Ostby, Mr. Engelhart Cornelius",male,65,0,1,113509,61.9792,B30,C,1,No
57 | 56,1,1,"Woolner, Mr. Hugh",male,19,0,0,19947,35.5,C52,S,0,Yes
58 | 57,1,2,"Rugg, Miss. Emily",female,21,0,0,C.A. 31026,10.5,,S,0,Yes
59 | 58,0,3,"Novel, Mr. Mansouer",male,28,0,0,2697,7.2292,,C,0,Yes
60 | 59,1,2,"West, Miss. Constance Mirium",female,5,1,2,C.A. 34651,27.75,,S,3,No
61 | 60,0,3,"Goodwin, Master. William Frederick",male,11,5,2,CA 2144,46.9,,S,7,No
62 | 61,0,3,"Sirayanian, Mr. Orsen",male,22,0,0,2669,7.2292,,C,0,Yes
63 | 62,1,1,"Icard, Miss. Amelie",female,38,0,0,113572,80.0,B28,,0,Yes
64 | 63,0,1,"Harris, Mr. Henry Birkhardt",male,45,1,0,36973,83.475,C83,S,1,No
65 | 64,0,3,"Skoog, Master. Harald",male,4,3,2,347088,27.9,,S,5,No
66 | 65,0,1,"Stewart, Mr. Albert A",male,27,0,0,PC 17605,27.7208,,C,0,Yes
67 | 66,1,3,"Moubarek, Master. Gerios",male,39,1,1,2661,15.2458,,C,2,No
68 | 67,1,2,"Nye, Mrs. (Elizabeth Ramell)",female,29,0,0,C.A. 29395,10.5,F33,S,0,Yes
69 | 68,0,3,"Crease, Mr. Ernest James",male,19,0,0,S.P. 3464,8.1583,,S,0,Yes
70 | 69,1,3,"Andersson, Miss. Erna Alexandra",female,17,4,2,3101281,7.925,,S,6,No
71 | 70,0,3,"Kink, Mr. Vincenz",male,26,2,0,315151,8.6625,,S,2,No
72 | 71,0,2,"Jenkin, Mr. Stephen Curnow",male,32,0,0,C.A. 33111,10.5,,S,0,Yes
73 | 72,0,3,"Goodwin, Miss. Lillian Amy",female,16,5,2,CA 2144,46.9,,S,7,No
74 | 73,0,2,"Hood, Mr. Ambrose Jr",male,21,0,0,S.O.C. 14879,73.5,,S,0,Yes
75 | 74,0,3,"Chronopoulos, Mr. Apostolos",male,26,1,0,2680,14.4542,,C,1,No
76 | 75,1,3,"Bing, Mr. Lee",male,32,0,0,1601,56.4958,,S,0,Yes
77 | 76,0,3,"Moen, Mr. Sigurd Hansen",male,25,0,0,348123,7.65,F G73,S,0,Yes
78 | 77,0,3,"Staneff, Mr. Ivan",male,37,0,0,349208,7.8958,,S,0,Yes
79 | 78,0,3,"Moutal, Mr. Rahamin Haim",male,37,0,0,374746,8.05,,S,0,Yes
80 | 79,1,2,"Caldwell, Master. Alden Gates",male,0,0,2,248738,29.0,,S,2,No
81 | 80,1,3,"Dowdell, Miss. Elizabeth",female,30,0,0,364516,12.475,,S,0,Yes
82 | 81,0,3,"Waelens, Mr. Achille",male,22,0,0,345767,9.0,,S,0,Yes
83 | 82,1,3,"Sheerlinck, Mr. Jan Baptist",male,29,0,0,345779,9.5,,S,0,Yes
84 | 83,1,3,"McDermott, Miss. Brigdet Delia",female,20,0,0,330932,7.7875,,Q,0,Yes
85 | 84,0,1,"Carrau, Mr. Francisco M",male,28,0,0,113059,47.1,,S,0,Yes
86 | 85,1,2,"Ilett, Miss. Bertha",female,17,0,0,SO/C 14885,10.5,,S,0,Yes
87 | 86,1,3,"Backstrom, Mrs. Karl Alfred (Maria Mathilda Gustafsson)",female,33,3,0,3101278,15.85,,S,3,No
88 | 87,0,3,"Ford, Mr. William Neal",male,16,1,3,W./C. 6608,34.375,,S,4,No
89 | 88,0,3,"Slocovski, Mr. Selman Francis",male,37,0,0,SOTON/OQ 392086,8.05,,S,0,Yes
90 | 89,1,1,"Fortune, Miss. Mabel Helen",female,23,3,2,19950,263.0,C23 C25 C27,S,5,No
91 | 90,0,3,"Celotti, Mr. Francesco",male,24,0,0,343275,8.05,,S,0,Yes
92 | 91,0,3,"Christmann, Mr. Emil",male,29,0,0,343276,8.05,,S,0,Yes
93 | 92,0,3,"Andreasson, Mr. Paul Edvin",male,20,0,0,347466,7.8542,,S,0,Yes
94 | 93,0,1,"Chaffee, Mr. Herbert Fuller",male,46,1,0,W.E.P. 5734,61.175,E31,S,1,No
95 | 94,0,3,"Dean, Mr. Bertram Frank",male,26,1,2,C.A. 2315,20.575,,S,3,No
96 | 95,0,3,"Coxon, Mr. Daniel",male,59,0,0,364500,7.25,,S,0,Yes
97 | 96,0,3,"Shorney, Mr. Charles Joseph",male,17,0,0,374910,8.05,,S,0,Yes
98 | 97,0,1,"Goldschmidt, Mr. George B",male,71,0,0,PC 17754,34.6542,A5,C,0,Yes
99 | 98,1,1,"Greenfield, Mr. William Bertram",male,23,0,1,PC 17759,63.3583,D10 D12,C,1,No
100 | 99,1,2,"Doling, Mrs. John T (Ada Julia Bone)",female,34,0,1,231919,23.0,,S,1,No
101 | 100,0,2,"Kantor, Mr. Sinai",male,34,1,0,244367,26.0,,S,1,No
102 | 101,0,3,"Petranec, Miss. Matilda",female,28,0,0,349245,7.8958,,S,0,Yes
103 | 102,0,3,"Petroff, Mr. Pastcho (""Pentcho"")",male,31,0,0,349215,7.8958,,S,0,Yes
104 | 103,0,1,"White, Mr. Richard Frasar",male,21,0,1,35281,77.2875,D26,S,1,No
105 | 104,0,3,"Johansson, Mr. Gustaf Joel",male,33,0,0,7540,8.6542,,S,0,Yes
106 | 105,0,3,"Gustafsson, Mr. Anders Vilhelm",male,37,2,0,3101276,7.925,,S,2,No
107 | 106,0,3,"Mionoff, Mr. Stoytcho",male,28,0,0,349207,7.8958,,S,0,Yes
108 | 107,1,3,"Salkjelsvik, Miss. Anna Kristine",female,21,0,0,343120,7.65,,S,0,Yes
109 | 108,1,3,"Moss, Mr. Albert Johan",male,31,0,0,312991,7.775,,S,0,Yes
110 | 109,0,3,"Rekic, Mr. Tido",male,38,0,0,349249,7.8958,,S,0,Yes
111 | 110,1,3,"Moran, Miss. Bertha",female,40,1,0,371110,24.15,,Q,1,No
112 | 111,0,1,"Porter, Mr. Walter Chamberlain",male,47,0,0,110465,52.0,C110,S,0,Yes
113 | 112,0,3,"Zabour, Miss. Hileni",female,14,1,0,2665,14.4542,,C,1,No
114 | 113,0,3,"Barton, Mr. David John",male,22,0,0,324669,8.05,,S,0,Yes
115 | 114,0,3,"Jussila, Miss. Katriina",female,20,1,0,4136,9.825,,S,1,No
116 | 115,0,3,"Attalah, Miss. Malake",female,17,0,0,2627,14.4583,,C,0,Yes
117 | 116,0,3,"Pekoniemi, Mr. Edvard",male,21,0,0,STON/O 2. 3101294,7.925,,S,0,Yes
118 | 117,0,3,"Connors, Mr. Patrick",male,70,0,0,370369,7.75,,Q,0,Yes
119 | 118,0,2,"Turpin, Mr. William John Robert",male,29,1,0,11668,21.0,,S,1,No
120 | 119,0,1,"Baxter, Mr. Quigg Edmond",male,24,0,1,PC 17558,247.5208,B58 B60,C,1,No
121 | 120,0,3,"Andersson, Miss. Ellis Anna Maria",female,2,4,2,347082,31.275,,S,6,No
122 | 121,0,2,"Hickman, Mr. Stanley George",male,21,2,0,S.O.C. 14879,73.5,,S,2,No
123 | 122,0,3,"Moore, Mr. Leonard Charles",male,41,0,0,A4. 54510,8.05,,S,0,Yes
124 | 123,0,2,"Nasser, Mr. Nicholas",male,32,1,0,237736,30.0708,,C,1,No
125 | 124,1,2,"Webber, Miss. Susan",female,32,0,0,27267,13.0,E101,S,0,Yes
126 | 125,0,1,"White, Mr. Percival Wayland",male,54,0,1,35281,77.2875,D26,S,1,No
127 | 126,1,3,"Nicola-Yarred, Master. Elias",male,12,1,0,2651,11.2417,,C,1,No
128 | 127,0,3,"McMahon, Mr. Martin",male,39,0,0,370372,7.75,,Q,0,Yes
129 | 128,1,3,"Madsen, Mr. Fridtjof Arne",male,24,0,0,C 17369,7.1417,,S,0,Yes
130 | 129,1,3,"Peter, Miss. Anna",female,38,1,1,2668,22.3583,F E69,C,2,No
131 | 130,0,3,"Ekstrom, Mr. Johan",male,45,0,0,347061,6.975,,S,0,Yes
132 | 131,0,3,"Drazenoic, Mr. Jozef",male,33,0,0,349241,7.8958,,C,0,Yes
133 | 132,0,3,"Coelho, Mr. Domingos Fernandeo",male,20,0,0,SOTON/O.Q. 3101307,7.05,,S,0,Yes
134 | 133,0,3,"Robins, Mrs. Alexander A (Grace Charity Laury)",female,47,1,0,A/5. 3337,14.5,,S,1,No
135 | 134,1,2,"Weisz, Mrs. Leopold (Mathilde Francoise Pede)",female,29,1,0,228414,26.0,,S,1,No
136 | 135,0,2,"Sobey, Mr. Samuel James Hayden",male,25,0,0,C.A. 29178,13.0,,S,0,Yes
137 | 136,0,2,"Richard, Mr. Emile",male,23,0,0,SC/PARIS 2133,15.0458,,C,0,Yes
138 | 137,1,1,"Newsom, Miss. Helen Monypeny",female,19,0,2,11752,26.2833,D47,S,2,No
139 | 138,0,1,"Futrelle, Mr. Jacques Heath",male,37,1,0,113803,53.1,C123,S,1,No
140 | 139,0,3,"Osen, Mr. Olaf Elon",male,16,0,0,7534,9.2167,,S,0,Yes
141 | 140,0,1,"Giglio, Mr. Victor",male,24,0,0,PC 17593,79.2,B86,C,0,Yes
142 | 141,0,3,"Boulos, Mrs. Joseph (Sultana)",female,42,0,2,2678,15.2458,,C,2,No
143 | 142,1,3,"Nysten, Miss. Anna Sofia",female,22,0,0,347081,7.75,,S,0,Yes
144 | 143,1,3,"Hakkarainen, Mrs. Pekka Pietari (Elin Matilda Dolck)",female,24,1,0,STON/O2. 3101279,15.85,,S,1,No
145 | 144,0,3,"Burke, Mr. Jeremiah",male,19,0,0,365222,6.75,,Q,0,Yes
146 | 145,0,2,"Andrew, Mr. Edgardo Samuel",male,18,0,0,231945,11.5,,S,0,Yes
147 | 146,0,2,"Nicholls, Mr. Joseph Charles",male,19,1,1,C.A. 33112,36.75,,S,2,No
148 | 147,1,3,"Andersson, Mr. August Edvard (""Wennerstrom"")",male,27,0,0,350043,7.7958,,S,0,Yes
149 | 148,0,3,"Ford, Miss. Robina Maggie ""Ruby""",female,9,2,2,W./C. 6608,34.375,,S,4,No
150 | 149,0,2,"Navratil, Mr. Michel (""Louis M Hoffman"")",male,36,0,2,230080,26.0,F2,S,2,No
151 | 150,0,2,"Byles, Rev. Thomas Roussel Davids",male,42,0,0,244310,13.0,,S,0,Yes
152 | 151,0,2,"Bateman, Rev. Robert James",male,51,0,0,S.O.P. 1166,12.525,,S,0,Yes
153 | 152,1,1,"Pears, Mrs. Thomas (Edith Wearne)",female,22,1,0,113776,66.6,C2,S,1,No
154 | 153,0,3,"Meo, Mr. Alfonzo",male,55,0,0,A.5. 11206,8.05,,S,0,Yes
155 | 154,0,3,"van Billiard, Mr. Austin Blyler",male,40,0,2,A/5. 851,14.5,,S,2,No
156 | 155,0,3,"Olsen, Mr. Ole Martin",male,27,0,0,Fa 265302,7.3125,,S,0,Yes
157 | 156,0,1,"Williams, Mr. Charles Duane",male,51,0,1,PC 17597,61.3792,,C,1,No
158 | 157,1,3,"Gilnagh, Miss. Katherine ""Katie""",female,16,0,0,35851,7.7333,,Q,0,Yes
159 | 158,0,3,"Corn, Mr. Harry",male,30,0,0,SOTON/OQ 392090,8.05,,S,0,Yes
160 | 159,0,3,"Smiljanic, Mr. Mile",male,17,0,0,315037,8.6625,,S,0,Yes
161 | 160,0,3,"Sage, Master. Thomas Henry",male,25,8,2,CA. 2343,69.55,,S,10,No
162 | 161,0,3,"Cribb, Mr. John Hatfield",male,44,0,1,371362,16.1,,S,1,No
163 | 162,1,2,"Watt, Mrs. James (Elizabeth ""Bessie"" Inglis Milne)",female,40,0,0,C.A. 33595,15.75,,S,0,Yes
164 | 163,0,3,"Bengtsson, Mr. John Viktor",male,26,0,0,347068,7.775,,S,0,Yes
165 | 164,0,3,"Calic, Mr. Jovo",male,17,0,0,315093,8.6625,,S,0,Yes
166 | 165,0,3,"Panula, Master. Eino Viljami",male,1,4,1,3101295,39.6875,,S,5,No
167 | 166,1,3,"Goldsmith, Master. Frank John William ""Frankie""",male,9,0,2,363291,20.525,,S,2,No
168 | 167,1,1,"Chibnall, Mrs. (Edith Martha Bowerman)",female,40,0,1,113505,55.0,E33,S,1,No
169 | 168,0,3,"Skoog, Mrs. William (Anna Bernhardina Karlsson)",female,45,1,4,347088,27.9,,S,5,No
170 | 169,0,1,"Baumann, Mr. John D",male,30,0,0,PC 17318,25.925,,S,0,Yes
171 | 170,0,3,"Ling, Mr. Lee",male,28,0,0,1601,56.4958,,S,0,Yes
172 | 171,0,1,"Van der hoef, Mr. Wyckoff",male,61,0,0,111240,33.5,B19,S,0,Yes
173 | 172,0,3,"Rice, Master. Arthur",male,4,4,1,382652,29.125,,Q,5,No
174 | 173,1,3,"Johnson, Miss. Eleanor Ileen",female,1,1,1,347742,11.1333,,S,2,No
175 | 174,0,3,"Sivola, Mr. Antti Wilhelm",male,21,0,0,STON/O 2. 3101280,7.925,,S,0,Yes
176 | 175,0,1,"Smith, Mr. James Clinch",male,56,0,0,17764,30.6958,A7,C,0,Yes
177 | 176,0,3,"Klasen, Mr. Klas Albin",male,18,1,1,350404,7.8542,,S,2,No
178 | 177,0,3,"Lefebre, Master. Henry Forbes",male,28,3,1,4133,25.4667,,S,4,No
179 | 178,0,1,"Isham, Miss. Ann Elizabeth",female,50,0,0,PC 17595,28.7125,C49,C,0,Yes
180 | 179,0,2,"Hale, Mr. Reginald",male,30,0,0,250653,13.0,,S,0,Yes
181 | 180,0,3,"Leonard, Mr. Lionel",male,36,0,0,LINE,0.0,,S,0,Yes
182 | 181,0,3,"Sage, Miss. Constance Gladys",female,33,8,2,CA. 2343,69.55,,S,10,No
183 | 182,0,2,"Pernot, Mr. Rene",male,24,0,0,SC/PARIS 2131,15.05,,C,0,Yes
184 | 183,0,3,"Asplund, Master. Clarence Gustaf Hugo",male,9,4,2,347077,31.3875,,S,6,No
185 | 184,1,2,"Becker, Master. Richard F",male,1,2,1,230136,39.0,F4,S,3,No
186 | 185,1,3,"Kink-Heilmann, Miss. Luise Gretchen",female,4,0,2,315153,22.025,,S,2,No
187 | 186,0,1,"Rood, Mr. Hugh Roscoe",male,37,0,0,113767,50.0,A32,S,0,Yes
188 | 187,1,3,"O'Brien, Mrs. Thomas (Johanna ""Hannah"" Godfrey)",female,25,1,0,370365,15.5,,Q,1,No
189 | 188,1,1,"Romaine, Mr. Charles Hallace (""Mr C Rolmane"")",male,45,0,0,111428,26.55,,S,0,Yes
190 | 189,0,3,"Bourke, Mr. John",male,40,1,1,364849,15.5,,Q,2,No
191 | 190,0,3,"Turcin, Mr. Stjepan",male,36,0,0,349247,7.8958,,S,0,Yes
192 | 191,1,2,"Pinsky, Mrs. (Rosa)",female,32,0,0,234604,13.0,,S,0,Yes
193 | 192,0,2,"Carbines, Mr. William",male,19,0,0,28424,13.0,,S,0,Yes
194 | 193,1,3,"Andersen-Jensen, Miss. Carla Christine Nielsine",female,19,1,0,350046,7.8542,,S,1,No
195 | 194,1,2,"Navratil, Master. Michel M",male,3,1,1,230080,26.0,F2,S,2,No
196 | 195,1,1,"Brown, Mrs. James Joseph (Margaret Tobin)",female,44,0,0,PC 17610,27.7208,B4,C,0,Yes
197 | 196,1,1,"Lurette, Miss. Elise",female,58,0,0,PC 17569,146.5208,B80,C,0,Yes
198 | 197,0,3,"Mernagh, Mr. Robert",male,29,0,0,368703,7.75,,Q,0,Yes
199 | 198,0,3,"Olsen, Mr. Karl Siegwart Andreas",male,42,0,1,4579,8.4042,,S,1,No
200 | 199,1,3,"Madigan, Miss. Margaret ""Maggie""",female,34,0,0,370370,7.75,,Q,0,Yes
201 | 200,0,2,"Yrois, Miss. Henriette (""Mrs Harbeck"")",female,24,0,0,248747,13.0,,S,0,Yes
202 | 201,0,3,"Vande Walle, Mr. Nestor Cyriel",male,28,0,0,345770,9.5,,S,0,Yes
203 | 202,0,3,"Sage, Mr. Frederick",male,19,8,2,CA. 2343,69.55,,S,10,No
204 | 203,0,3,"Johanson, Mr. Jakob Alfred",male,34,0,0,3101264,6.4958,,S,0,Yes
205 | 204,0,3,"Youseff, Mr. Gerious",male,45,0,0,2628,7.225,,C,0,Yes
206 | 205,1,3,"Cohen, Mr. Gurshon ""Gus""",male,18,0,0,A/5 3540,8.05,,S,0,Yes
207 | 206,0,3,"Strom, Miss. Telma Matilda",female,2,0,1,347054,10.4625,G6,S,1,No
208 | 207,0,3,"Backstrom, Mr. Karl Alfred",male,32,1,0,3101278,15.85,,S,1,No
209 | 208,1,3,"Albimona, Mr. Nassef Cassem",male,26,0,0,2699,18.7875,,C,0,Yes
210 | 209,1,3,"Carr, Miss. Helen ""Ellen""",female,16,0,0,367231,7.75,,Q,0,Yes
211 | 210,1,1,"Blank, Mr. Henry",male,40,0,0,112277,31.0,A31,C,0,Yes
212 | 211,0,3,"Ali, Mr. Ahmed",male,24,0,0,SOTON/O.Q. 3101311,7.05,,S,0,Yes
213 | 212,1,2,"Cameron, Miss. Clear Annie",female,35,0,0,F.C.C. 13528,21.0,,S,0,Yes
214 | 213,0,3,"Perkin, Mr. John Henry",male,22,0,0,A/5 21174,7.25,,S,0,Yes
215 | 214,0,2,"Givard, Mr. Hans Kristensen",male,30,0,0,250646,13.0,,S,0,Yes
216 | 215,0,3,"Kiernan, Mr. Philip",male,32,1,0,367229,7.75,,Q,1,No
217 | 216,1,1,"Newell, Miss. Madeleine",female,31,1,0,35273,113.275,D36,C,1,No
218 | 217,1,3,"Honkanen, Miss. Eliina",female,27,0,0,STON/O2. 3101283,7.925,,S,0,Yes
219 | 218,0,2,"Jacobsohn, Mr. Sidney Samuel",male,42,1,0,243847,27.0,,S,1,No
220 | 219,1,1,"Bazzani, Miss. Albina",female,32,0,0,11813,76.2917,D15,C,0,Yes
221 | 220,0,2,"Harris, Mr. Walter",male,30,0,0,W/C 14208,10.5,,S,0,Yes
222 | 221,1,3,"Sunderland, Mr. Victor Francis",male,16,0,0,SOTON/OQ 392089,8.05,,S,0,Yes
223 | 222,0,2,"Bracken, Mr. James H",male,27,0,0,220367,13.0,,S,0,Yes
224 | 223,0,3,"Green, Mr. George Henry",male,51,0,0,21440,8.05,,S,0,Yes
225 | 224,0,3,"Nenkoff, Mr. Christo",male,34,0,0,349234,7.8958,,S,0,Yes
226 | 225,1,1,"Hoyt, Mr. Frederick Maxfield",male,38,1,0,19943,90.0,C93,S,1,No
227 | 226,0,3,"Berglund, Mr. Karl Ivar Sven",male,22,0,0,PP 4348,9.35,,S,0,Yes
228 | 227,1,2,"Mellors, Mr. William John",male,19,0,0,SW/PP 751,10.5,,S,0,Yes
229 | 228,0,3,"Lovell, Mr. John Hall (""Henry"")",male,20,0,0,A/5 21173,7.25,,S,0,Yes
230 | 229,0,2,"Fahlstrom, Mr. Arne Jonas",male,18,0,0,236171,13.0,,S,0,Yes
231 | 230,0,3,"Lefebre, Miss. Mathilde",female,31,3,1,4133,25.4667,,S,4,No
232 | 231,1,1,"Harris, Mrs. Henry Birkhardt (Irene Wallach)",female,35,1,0,36973,83.475,C83,S,1,No
233 | 232,0,3,"Larsson, Mr. Bengt Edvin",male,29,0,0,347067,7.775,,S,0,Yes
234 | 233,0,2,"Sjostedt, Mr. Ernst Adolf",male,59,0,0,237442,13.5,,S,0,Yes
235 | 234,1,3,"Asplund, Miss. Lillian Gertrud",female,5,4,2,347077,31.3875,,S,6,No
236 | 235,0,2,"Leyson, Mr. Robert William Norman",male,24,0,0,C.A. 29566,10.5,,S,0,Yes
237 | 236,0,3,"Harknett, Miss. Alice Phoebe",female,22,0,0,W./C. 6609,7.55,,S,0,Yes
238 | 237,0,2,"Hold, Mr. Stephen",male,44,1,0,26707,26.0,,S,1,No
239 | 238,1,2,"Collyer, Miss. Marjorie ""Lottie""",female,8,0,2,C.A. 31921,26.25,,S,2,No
240 | 239,0,2,"Pengelly, Mr. Frederick William",male,19,0,0,28665,10.5,,S,0,Yes
241 | 240,0,2,"Hunt, Mr. George Henry",male,33,0,0,SCO/W 1585,12.275,,S,0,Yes
242 | 241,0,3,"Zabour, Miss. Thamine",female,32,1,0,2665,14.4542,,C,1,No
243 | 242,1,3,"Murphy, Miss. Katherine ""Kate""",female,24,1,0,367230,15.5,,Q,1,No
244 | 243,0,2,"Coleridge, Mr. Reginald Charles",male,29,0,0,W./C. 14263,10.5,,S,0,Yes
245 | 244,0,3,"Maenpaa, Mr. Matti Alexanteri",male,22,0,0,STON/O 2. 3101275,7.125,,S,0,Yes
246 | 245,0,3,"Attalah, Mr. Sleiman",male,30,0,0,2694,7.225,,C,0,Yes
247 | 246,0,1,"Minahan, Dr. William Edward",male,44,2,0,19928,90.0,C78,Q,2,No
248 | 247,0,3,"Lindahl, Miss. Agda Thorilda Viktoria",female,25,0,0,347071,7.775,,S,0,Yes
249 | 248,1,2,"Hamalainen, Mrs. William (Anna)",female,24,0,2,250649,14.5,,S,2,No
250 | 249,1,1,"Beckwith, Mr. Richard Leonard",male,37,1,1,11751,52.5542,D35,S,2,No
251 | 250,0,2,"Carter, Rev. Ernest Courtenay",male,54,1,0,244252,26.0,,S,1,No
252 | 251,0,3,"Reed, Mr. James George",male,19,0,0,362316,7.25,,S,0,Yes
253 | 252,0,3,"Strom, Mrs. Wilhelm (Elna Matilda Persson)",female,29,1,1,347054,10.4625,G6,S,2,No
254 | 253,0,1,"Stead, Mr. William Thomas",male,62,0,0,113514,26.55,C87,S,0,Yes
255 | 254,0,3,"Lobb, Mr. William Arthur",male,30,1,0,A/5. 3336,16.1,,S,1,No
256 | 255,0,3,"Rosblom, Mrs. Viktor (Helena Wilhelmina)",female,41,0,2,370129,20.2125,,S,2,No
257 | 256,1,3,"Touma, Mrs. Darwis (Hanne Youssef Razi)",female,29,0,2,2650,15.2458,,C,2,No
258 | 257,1,1,"Thorne, Mrs. Gertrude Maybelle",female,15,0,0,PC 17585,79.2,,C,0,Yes
259 | 258,1,1,"Cherry, Miss. Gladys",female,30,0,0,110152,86.5,B77,S,0,Yes
260 | 259,1,1,"Ward, Miss. Anna",female,35,0,0,PC 17755,512.3292,,C,0,Yes
261 | 260,1,2,"Parrish, Mrs. (Lutie Davis)",female,50,0,1,230433,26.0,,S,1,No
262 | 261,0,3,"Smith, Mr. Thomas",male,39,0,0,384461,7.75,,Q,0,Yes
263 | 262,1,3,"Asplund, Master. Edvin Rojj Felix",male,3,4,2,347077,31.3875,,S,6,No
264 | 263,0,1,"Taussig, Mr. Emil",male,52,1,1,110413,79.65,E67,S,2,No
265 | 264,0,1,"Harrison, Mr. William",male,40,0,0,112059,0.0,B94,S,0,Yes
266 | 265,0,3,"Henry, Miss. Delia",female,33,0,0,382649,7.75,,Q,0,Yes
267 | 266,0,2,"Reeves, Mr. David",male,36,0,0,C.A. 17248,10.5,,S,0,Yes
268 | 267,0,3,"Panula, Mr. Ernesti Arvid",male,16,4,1,3101295,39.6875,,S,5,No
269 | 268,1,3,"Persson, Mr. Ernst Ulrik",male,25,1,0,347083,7.775,,S,1,No
270 | 269,1,1,"Graham, Mrs. William Thompson (Edith Junkins)",female,58,0,1,PC 17582,153.4625,C125,S,1,No
271 | 270,1,1,"Bissette, Miss. Amelia",female,35,0,0,PC 17760,135.6333,C99,S,0,Yes
272 | 271,0,1,"Cairns, Mr. Alexander",male,31,0,0,113798,31.0,,S,0,Yes
273 | 272,1,3,"Tornquist, Mr. William Henry",male,25,0,0,LINE,0.0,,S,0,Yes
274 | 273,1,2,"Mellinger, Mrs. (Elizabeth Anne Maidment)",female,41,0,1,250644,19.5,,S,1,No
275 | 274,0,1,"Natsch, Mr. Charles H",male,37,0,1,PC 17596,29.7,C118,C,1,No
276 | 275,1,3,"Healy, Miss. Hanora ""Nora""",female,38,0,0,370375,7.75,,Q,0,Yes
277 | 276,1,1,"Andrews, Miss. Kornelia Theodosia",female,63,1,0,13502,77.9583,D7,S,1,No
278 | 277,0,3,"Lindblom, Miss. Augusta Charlotta",female,45,0,0,347073,7.75,,S,0,Yes
279 | 278,0,2,"Parkes, Mr. Francis ""Frank""",male,37,0,0,239853,0.0,,S,0,Yes
280 | 279,0,3,"Rice, Master. Eric",male,7,4,1,382652,29.125,,Q,5,No
281 | 280,1,3,"Abbott, Mrs. Stanton (Rosa Hunt)",female,35,1,1,C.A. 2673,20.25,,S,2,No
282 | 281,0,3,"Duane, Mr. Frank",male,65,0,0,336439,7.75,,Q,0,Yes
283 | 282,0,3,"Olsson, Mr. Nils Johan Goransson",male,28,0,0,347464,7.8542,,S,0,Yes
284 | 283,0,3,"de Pelsmaeker, Mr. Alfons",male,16,0,0,345778,9.5,,S,0,Yes
285 | 284,1,3,"Dorking, Mr. Edward Arthur",male,19,0,0,A/5. 10482,8.05,,S,0,Yes
286 | 285,0,1,"Smith, Mr. Richard William",male,34,0,0,113056,26.0,A19,S,0,Yes
287 | 286,0,3,"Stankovic, Mr. Ivan",male,33,0,0,349239,8.6625,,C,0,Yes
288 | 287,1,3,"de Mulder, Mr. Theodore",male,30,0,0,345774,9.5,,S,0,Yes
289 | 288,0,3,"Naidenoff, Mr. Penko",male,22,0,0,349206,7.8958,,S,0,Yes
290 | 289,1,2,"Hosono, Mr. Masabumi",male,42,0,0,237798,13.0,,S,0,Yes
291 | 290,1,3,"Connolly, Miss. Kate",female,22,0,0,370373,7.75,,Q,0,Yes
292 | 291,1,1,"Barber, Miss. Ellen ""Nellie""",female,26,0,0,19877,78.85,,S,0,Yes
293 | 292,1,1,"Bishop, Mrs. Dickinson H (Helen Walton)",female,19,1,0,11967,91.0792,B49,C,1,No
294 | 293,0,2,"Levy, Mr. Rene Jacques",male,36,0,0,SC/Paris 2163,12.875,D,C,0,Yes
295 | 294,0,3,"Haas, Miss. Aloisia",female,24,0,0,349236,8.85,,S,0,Yes
296 | 295,0,3,"Mineff, Mr. Ivan",male,24,0,0,349233,7.8958,,S,0,Yes
297 | 296,0,1,"Lewy, Mr. Ervin G",male,34,0,0,PC 17612,27.7208,,C,0,Yes
298 | 297,0,3,"Hanna, Mr. Mansour",male,23,0,0,2693,7.2292,,C,0,Yes
299 | 298,0,1,"Allison, Miss. Helen Loraine",female,2,1,2,113781,151.55,C22 C26,S,3,No
300 | 299,1,1,"Saalfeld, Mr. Adolphe",male,34,0,0,19988,30.5,C106,S,0,Yes
301 | 300,1,1,"Baxter, Mrs. James (Helene DeLaudeniere Chaput)",female,50,0,1,PC 17558,247.5208,B58 B60,C,1,No
302 | 301,1,3,"Kelly, Miss. Anna Katherine ""Annie Kate""",female,27,0,0,9234,7.75,,Q,0,Yes
303 | 302,1,3,"McCoy, Mr. Bernard",male,19,2,0,367226,23.25,,Q,2,No
304 | 303,0,3,"Johnson, Mr. William Cahoone Jr",male,19,0,0,LINE,0.0,,S,0,Yes
305 | 304,1,2,"Keane, Miss. Nora A",female,28,0,0,226593,12.35,E101,Q,0,Yes
306 | 305,0,3,"Williams, Mr. Howard Hugh ""Harry""",male,22,0,0,A/5 2466,8.05,,S,0,Yes
307 | 306,1,1,"Allison, Master. Hudson Trevor",male,0,1,2,113781,151.55,C22 C26,S,3,No
308 | 307,1,1,"Fleming, Miss. Margaret",female,28,0,0,17421,110.8833,,C,0,Yes
309 | 308,1,1,"Penasco y Castellana, Mrs. Victor de Satode (Maria Josefa Perez de Soto y Vallejo)",female,17,1,0,PC 17758,108.9,C65,C,1,No
310 | 309,0,2,"Abelson, Mr. Samuel",male,30,1,0,P/PP 3381,24.0,,C,1,No
311 | 310,1,1,"Francatelli, Miss. Laura Mabel",female,30,0,0,PC 17485,56.9292,E36,C,0,Yes
312 | 311,1,1,"Hays, Miss. Margaret Bechstein",female,24,0,0,11767,83.1583,C54,C,0,Yes
313 | 312,1,1,"Ryerson, Miss. Emily Borie",female,18,2,2,PC 17608,262.375,B57 B59 B63 B66,C,4,No
314 | 313,0,2,"Lahtinen, Mrs. William (Anna Sylfven)",female,26,1,1,250651,26.0,,S,2,No
315 | 314,0,3,"Hendekovic, Mr. Ignjac",male,28,0,0,349243,7.8958,,S,0,Yes
316 | 315,0,2,"Hart, Mr. Benjamin",male,43,1,1,F.C.C. 13529,26.25,,S,2,No
317 | 316,1,3,"Nilsson, Miss. Helmina Josefina",female,26,0,0,347470,7.8542,,S,0,Yes
318 | 317,1,2,"Kantor, Mrs. Sinai (Miriam Sternin)",female,24,1,0,244367,26.0,,S,1,No
319 | 318,0,2,"Moraweck, Dr. Ernest",male,54,0,0,29011,14.0,,S,0,Yes
320 | 319,1,1,"Wick, Miss. Mary Natalie",female,31,0,2,36928,164.8667,C7,S,2,No
321 | 320,1,1,"Spedden, Mrs. Frederic Oakley (Margaretta Corning Stone)",female,40,1,1,16966,134.5,E34,C,2,No
322 | 321,0,3,"Dennis, Mr. Samuel",male,22,0,0,A/5 21172,7.25,,S,0,Yes
323 | 322,0,3,"Danoff, Mr. Yoto",male,27,0,0,349219,7.8958,,S,0,Yes
324 | 323,1,2,"Slayter, Miss. Hilda Mary",female,30,0,0,234818,12.35,,Q,0,Yes
325 | 324,1,2,"Caldwell, Mrs. Albert Francis (Sylvia Mae Harbaugh)",female,22,1,1,248738,29.0,,S,2,No
326 | 325,0,3,"Sage, Mr. George John Jr",male,16,8,2,CA. 2343,69.55,,S,10,No
327 | 326,1,1,"Young, Miss. Marie Grice",female,36,0,0,PC 17760,135.6333,C32,C,0,Yes
328 | 327,0,3,"Nysveen, Mr. Johan Hansen",male,61,0,0,345364,6.2375,,S,0,Yes
329 | 328,1,2,"Ball, Mrs. (Ada E Hall)",female,36,0,0,28551,13.0,D,S,0,Yes
330 | 329,1,3,"Goldsmith, Mrs. Frank John (Emily Alice Brown)",female,31,1,1,363291,20.525,,S,2,No
331 | 330,1,1,"Hippach, Miss. Jean Gertrude",female,16,0,1,111361,57.9792,B18,C,1,No
332 | 331,1,3,"McCoy, Miss. Agnes",female,29,2,0,367226,23.25,,Q,2,No
333 | 332,0,1,"Partner, Mr. Austen",male,45,0,0,113043,28.5,C124,S,0,Yes
334 | 333,0,1,"Graham, Mr. George Edward",male,38,0,1,PC 17582,153.4625,C91,S,1,No
335 | 334,0,3,"Vander Planke, Mr. Leo Edmondus",male,16,2,0,345764,18.0,,S,2,No
336 | 335,1,1,"Frauenthal, Mrs. Henry William (Clara Heinsheimer)",female,26,1,0,PC 17611,133.65,,S,1,No
337 | 336,0,3,"Denkoff, Mr. Mitto",male,42,0,0,349225,7.8958,,S,0,Yes
338 | 337,0,1,"Pears, Mr. Thomas Clinton",male,29,1,0,113776,66.6,C2,S,1,No
339 | 338,1,1,"Burns, Miss. Elizabeth Margaret",female,41,0,0,16966,134.5,E40,C,0,Yes
340 | 339,1,3,"Dahl, Mr. Karl Edwart",male,45,0,0,7598,8.05,,S,0,Yes
341 | 340,0,1,"Blackwell, Mr. Stephen Weart",male,45,0,0,113784,35.5,T,S,0,Yes
342 | 341,1,2,"Navratil, Master. Edmond Roger",male,2,1,1,230080,26.0,F2,S,2,No
343 | 342,1,1,"Fortune, Miss. Alice Elizabeth",female,24,3,2,19950,263.0,C23 C25 C27,S,5,No
344 | 343,0,2,"Collander, Mr. Erik Gustaf",male,28,0,0,248740,13.0,,S,0,Yes
345 | 344,0,2,"Sedgwick, Mr. Charles Frederick Waddington",male,25,0,0,244361,13.0,,S,0,Yes
346 | 345,0,2,"Fox, Mr. Stanley Hubert",male,36,0,0,229236,13.0,,S,0,Yes
347 | 346,1,2,"Brown, Miss. Amelia ""Mildred""",female,24,0,0,248733,13.0,F33,S,0,Yes
348 | 347,1,2,"Smith, Miss. Marion Elsie",female,40,0,0,31418,13.0,,S,0,Yes
349 | 348,1,3,"Davison, Mrs. Thomas Henry (Mary E Finck)",female,18,1,0,386525,16.1,,S,1,No
350 | 349,1,3,"Coutts, Master. William Loch ""William""",male,3,1,1,C.A. 37671,15.9,,S,2,No
351 | 350,0,3,"Dimic, Mr. Jovan",male,42,0,0,315088,8.6625,,S,0,Yes
352 | 351,0,3,"Odahl, Mr. Nils Martin",male,23,0,0,7267,9.225,,S,0,Yes
353 | 352,0,1,"Williams-Lambert, Mr. Fletcher Fellows",male,23,0,0,113510,35.0,C128,S,0,Yes
354 | 353,0,3,"Elias, Mr. Tannous",male,15,1,1,2695,7.2292,,C,2,No
355 | 354,0,3,"Arnold-Franchi, Mr. Josef",male,25,1,0,349237,17.8,,S,1,No
356 | 355,0,3,"Yousif, Mr. Wazli",male,26,0,0,2647,7.225,,C,0,Yes
357 | 356,0,3,"Vanden Steen, Mr. Leo Peter",male,28,0,0,345783,9.5,,S,0,Yes
358 | 357,1,1,"Bowerman, Miss. Elsie Edith",female,22,0,1,113505,55.0,E33,S,1,No
359 | 358,0,2,"Funk, Miss. Annie Clemmer",female,38,0,0,237671,13.0,,S,0,Yes
360 | 359,1,3,"McGovern, Miss. Mary",female,23,0,0,330931,7.8792,,Q,0,Yes
361 | 360,1,3,"Mockler, Miss. Helen Mary ""Ellie""",female,27,0,0,330980,7.8792,,Q,0,Yes
362 | 361,0,3,"Skoog, Mr. Wilhelm",male,40,1,4,347088,27.9,,S,5,No
363 | 362,0,2,"del Carlo, Mr. Sebastiano",male,29,1,0,SC/PARIS 2167,27.7208,,C,1,No
364 | 363,0,3,"Barbara, Mrs. (Catherine David)",female,45,0,1,2691,14.4542,,C,1,No
365 | 364,0,3,"Asim, Mr. Adola",male,35,0,0,SOTON/O.Q. 3101310,7.05,,S,0,Yes
366 | 365,0,3,"O'Brien, Mr. Thomas",male,34,1,0,370365,15.5,,Q,1,No
367 | 366,0,3,"Adahl, Mr. Mauritz Nils Martin",male,30,0,0,C 7076,7.25,,S,0,Yes
368 | 367,1,1,"Warren, Mrs. Frank Manley (Anna Sophia Atkinson)",female,60,1,0,110813,75.25,D37,C,1,No
369 | 368,1,3,"Moussa, Mrs. (Mantoura Boulos)",female,37,0,0,2626,7.2292,,C,0,Yes
370 | 369,1,3,"Jermyn, Miss. Annie",female,24,0,0,14313,7.75,,Q,0,Yes
371 | 370,1,1,"Aubart, Mme. Leontine Pauline",female,24,0,0,PC 17477,69.3,B35,C,0,Yes
372 | 371,1,1,"Harder, Mr. George Achilles",male,25,1,0,11765,55.4417,E50,C,1,No
373 | 372,0,3,"Wiklund, Mr. Jakob Alfred",male,18,1,0,3101267,6.4958,,S,1,No
374 | 373,0,3,"Beavan, Mr. William Thomas",male,19,0,0,323951,8.05,,S,0,Yes
375 | 374,0,1,"Ringhini, Mr. Sante",male,22,0,0,PC 17760,135.6333,,C,0,Yes
376 | 375,0,3,"Palsson, Miss. Stina Viola",female,3,3,1,349909,21.075,,S,4,No
377 | 376,1,1,"Meyer, Mrs. Edgar Joseph (Leila Saks)",female,29,1,0,PC 17604,82.1708,,C,1,No
378 | 377,1,3,"Landergren, Miss. Aurora Adelia",female,22,0,0,C 7077,7.25,,S,0,Yes
379 | 378,0,1,"Widener, Mr. Harry Elkins",male,27,0,2,113503,211.5,C82,C,2,No
380 | 379,0,3,"Betros, Mr. Tannous",male,20,0,0,2648,4.0125,,C,0,Yes
381 | 380,0,3,"Gustafsson, Mr. Karl Gideon",male,19,0,0,347069,7.775,,S,0,Yes
382 | 381,1,1,"Bidois, Miss. Rosalie",female,42,0,0,PC 17757,227.525,,C,0,Yes
383 | 382,1,3,"Nakid, Miss. Maria (""Mary"")",female,1,0,2,2653,15.7417,,C,2,No
384 | 383,0,3,"Tikkanen, Mr. Juho",male,32,0,0,STON/O 2. 3101293,7.925,,S,0,Yes
385 | 384,1,1,"Holverson, Mrs. Alexander Oskar (Mary Aline Towner)",female,35,1,0,113789,52.0,,S,1,No
386 | 385,0,3,"Plotcharsky, Mr. Vasil",male,33,0,0,349227,7.8958,,S,0,Yes
387 | 386,0,2,"Davies, Mr. Charles Henry",male,18,0,0,S.O.C. 14879,73.5,,S,0,Yes
388 | 387,0,3,"Goodwin, Master. Sidney Leonard",male,1,5,2,CA 2144,46.9,,S,7,No
389 | 388,1,2,"Buss, Miss. Kate",female,36,0,0,27849,13.0,,S,0,Yes
390 | 389,0,3,"Sadlier, Mr. Matthew",male,32,0,0,367655,7.7292,,Q,0,Yes
391 | 390,1,2,"Lehmann, Miss. Bertha",female,17,0,0,SC 1748,12.0,,C,0,Yes
392 | 391,1,1,"Carter, Mr. William Ernest",male,36,1,2,113760,120.0,B96 B98,S,3,No
393 | 392,1,3,"Jansson, Mr. Carl Olof",male,21,0,0,350034,7.7958,,S,0,Yes
394 | 393,0,3,"Gustafsson, Mr. Johan Birger",male,28,2,0,3101277,7.925,,S,2,No
395 | 394,1,1,"Newell, Miss. Marjorie",female,23,1,0,35273,113.275,D36,C,1,No
396 | 395,1,3,"Sandstrom, Mrs. Hjalmar (Agnes Charlotta Bengtsson)",female,24,0,2,PP 9549,16.7,G6,S,2,No
397 | 396,0,3,"Johansson, Mr. Erik",male,22,0,0,350052,7.7958,,S,0,Yes
398 | 397,0,3,"Olsson, Miss. Elina",female,31,0,0,350407,7.8542,,S,0,Yes
399 | 398,0,2,"McKane, Mr. Peter David",male,46,0,0,28403,26.0,,S,0,Yes
400 | 399,0,2,"Pain, Dr. Alfred",male,23,0,0,244278,10.5,,S,0,Yes
401 | 400,1,2,"Trout, Mrs. William H (Jessie L)",female,28,0,0,240929,12.65,,S,0,Yes
402 | 401,1,3,"Niskanen, Mr. Juha",male,39,0,0,STON/O 2. 3101289,7.925,,S,0,Yes
403 | 402,0,3,"Adams, Mr. John",male,26,0,0,341826,8.05,,S,0,Yes
404 | 403,0,3,"Jussila, Miss. Mari Aina",female,21,1,0,4137,9.825,,S,1,No
405 | 404,0,3,"Hakkarainen, Mr. Pekka Pietari",male,28,1,0,STON/O2. 3101279,15.85,,S,1,No
406 | 405,0,3,"Oreskovic, Miss. Marija",female,20,0,0,315096,8.6625,,S,0,Yes
407 | 406,0,2,"Gale, Mr. Shadrach",male,34,1,0,28664,21.0,,S,1,No
408 | 407,0,3,"Widegren, Mr. Carl/Charles Peter",male,51,0,0,347064,7.75,,S,0,Yes
409 | 408,1,2,"Richards, Master. William Rowe",male,3,1,1,29106,18.75,,S,2,No
410 | 409,0,3,"Birkeland, Mr. Hans Martin Monsen",male,21,0,0,312992,7.775,,S,0,Yes
411 | 410,0,3,"Lefebre, Miss. Ida",female,23,3,1,4133,25.4667,,S,4,No
412 | 411,0,3,"Sdycoff, Mr. Todor",male,17,0,0,349222,7.8958,,S,0,Yes
413 | 412,0,3,"Hart, Mr. Henry",male,38,0,0,394140,6.8583,,Q,0,Yes
414 | 413,1,1,"Minahan, Miss. Daisy E",female,33,1,0,19928,90.0,C78,Q,1,No
415 | 414,0,2,"Cunningham, Mr. Alfred Fleming",male,38,0,0,239853,0.0,,S,0,Yes
416 | 415,1,3,"Sundman, Mr. Johan Julian",male,44,0,0,STON/O 2. 3101269,7.925,,S,0,Yes
417 | 416,0,3,"Meek, Mrs. Thomas (Annie Louise Rowley)",female,20,0,0,343095,8.05,,S,0,Yes
418 | 417,1,2,"Drew, Mrs. James Vivian (Lulu Thorne Christian)",female,34,1,1,28220,32.5,,S,2,No
419 | 418,1,2,"Silven, Miss. Lyyli Karoliina",female,18,0,2,250652,13.0,,S,2,No
420 | 419,0,2,"Matthews, Mr. William John",male,30,0,0,28228,13.0,,S,0,Yes
421 | 420,0,3,"Van Impe, Miss. Catharina",female,10,0,2,345773,24.15,,S,2,No
422 | 421,0,3,"Gheorgheff, Mr. Stanio",male,39,0,0,349254,7.8958,,C,0,Yes
423 | 422,0,3,"Charters, Mr. David",male,21,0,0,A/5. 13032,7.7333,,Q,0,Yes
424 | 423,0,3,"Zimmerman, Mr. Leo",male,29,0,0,315082,7.875,,S,0,Yes
425 | 424,0,3,"Danbom, Mrs. Ernst Gilbert (Anna Sigrid Maria Brogren)",female,28,1,1,347080,14.4,,S,2,No
426 | 425,0,3,"Rosblom, Mr. Viktor Richard",male,18,1,1,370129,20.2125,,S,2,No
427 | 426,0,3,"Wiseman, Mr. Phillippe",male,25,0,0,A/4. 34244,7.25,,S,0,Yes
428 | 427,1,2,"Clarke, Mrs. Charles V (Ada Maria Winfield)",female,28,1,0,2003,26.0,,S,1,No
429 | 428,1,2,"Phillips, Miss. Kate Florence (""Mrs Kate Louise Phillips Marshall"")",female,19,0,0,250655,26.0,,S,0,Yes
430 | 429,0,3,"Flynn, Mr. James",male,33,0,0,364851,7.75,,Q,0,Yes
431 | 430,1,3,"Pickard, Mr. Berk (Berk Trembisky)",male,32,0,0,SOTON/O.Q. 392078,8.05,E10,S,0,Yes
432 | 431,1,1,"Bjornstrom-Steffansson, Mr. Mauritz Hakan",male,28,0,0,110564,26.55,C52,S,0,Yes
433 | 432,1,3,"Thorneycroft, Mrs. Percival (Florence Kate White)",female,21,1,0,376564,16.1,,S,1,No
434 | 433,1,2,"Louch, Mrs. Charles Alexander (Alice Adelaide Slow)",female,42,1,0,SC/AH 3085,26.0,,S,1,No
435 | 434,0,3,"Kallio, Mr. Nikolai Erland",male,17,0,0,STON/O 2. 3101274,7.125,,S,0,Yes
436 | 435,0,1,"Silvey, Mr. William Baird",male,50,1,0,13507,55.9,E44,S,1,No
437 | 436,1,1,"Carter, Miss. Lucile Polk",female,14,1,2,113760,120.0,B96 B98,S,3,No
438 | 437,0,3,"Ford, Miss. Doolina Margaret ""Daisy""",female,21,2,2,W./C. 6608,34.375,,S,4,No
439 | 438,1,2,"Richards, Mrs. Sidney (Emily Hocking)",female,24,2,3,29106,18.75,,S,5,No
440 | 439,0,1,"Fortune, Mr. Mark",male,64,1,4,19950,263.0,C23 C25 C27,S,5,No
441 | 440,0,2,"Kvillner, Mr. Johan Henrik Johannesson",male,31,0,0,C.A. 18723,10.5,,S,0,Yes
442 | 441,1,2,"Hart, Mrs. Benjamin (Esther Ada Bloomfield)",female,45,1,1,F.C.C. 13529,26.25,,S,2,No
443 | 442,0,3,"Hampe, Mr. Leon",male,20,0,0,345769,9.5,,S,0,Yes
444 | 443,0,3,"Petterson, Mr. Johan Emil",male,25,1,0,347076,7.775,,S,1,No
445 | 444,1,2,"Reynaldo, Ms. Encarnacion",female,28,0,0,230434,13.0,,S,0,Yes
446 | 445,1,3,"Johannesen-Bratthammer, Mr. Bernt",male,40,0,0,65306,8.1125,,S,0,Yes
447 | 446,1,1,"Dodge, Master. Washington",male,4,0,2,33638,81.8583,A34,S,2,No
448 | 447,1,2,"Mellinger, Miss. Madeleine Violet",female,13,0,1,250644,19.5,,S,1,No
449 | 448,1,1,"Seward, Mr. Frederic Kimber",male,34,0,0,113794,26.55,,S,0,Yes
450 | 449,1,3,"Baclini, Miss. Marie Catherine",female,5,2,1,2666,19.2583,,C,3,No
451 | 450,1,1,"Peuchen, Major. Arthur Godfrey",male,52,0,0,113786,30.5,C104,S,0,Yes
452 | 451,0,2,"West, Mr. Edwy Arthur",male,36,1,2,C.A. 34651,27.75,,S,3,No
453 | 452,0,3,"Hagland, Mr. Ingvald Olai Olsen",male,39,1,0,65303,19.9667,,S,1,No
454 | 453,0,1,"Foreman, Mr. Benjamin Laventall",male,30,0,0,113051,27.75,C111,C,0,Yes
455 | 454,1,1,"Goldenberg, Mr. Samuel L",male,49,1,0,17453,89.1042,C92,C,1,No
456 | 455,0,3,"Peduzzi, Mr. Joseph",male,30,0,0,A/5 2817,8.05,,S,0,Yes
457 | 456,1,3,"Jalsevac, Mr. Ivan",male,29,0,0,349240,7.8958,,C,0,Yes
458 | 457,0,1,"Millet, Mr. Francis Davis",male,65,0,0,13509,26.55,E38,S,0,Yes
459 | 458,1,1,"Kenyon, Mrs. Frederick R (Marion)",female,18,1,0,17464,51.8625,D21,S,1,No
460 | 459,1,2,"Toomey, Miss. Ellen",female,50,0,0,F.C.C. 13531,10.5,,S,0,Yes
461 | 460,0,3,"O'Connor, Mr. Maurice",male,35,0,0,371060,7.75,,Q,0,Yes
462 | 461,1,1,"Anderson, Mr. Harry",male,48,0,0,19952,26.55,E12,S,0,Yes
463 | 462,0,3,"Morley, Mr. William",male,34,0,0,364506,8.05,,S,0,Yes
464 | 463,0,1,"Gee, Mr. Arthur H",male,47,0,0,111320,38.5,E63,S,0,Yes
465 | 464,0,2,"Milling, Mr. Jacob Christian",male,48,0,0,234360,13.0,,S,0,Yes
466 | 465,0,3,"Maisner, Mr. Simon",male,28,0,0,A/S 2816,8.05,,S,0,Yes
467 | 466,0,3,"Goncalves, Mr. Manuel Estanslas",male,38,0,0,SOTON/O.Q. 3101306,7.05,,S,0,Yes
468 | 467,0,2,"Campbell, Mr. William",male,35,0,0,239853,0.0,,S,0,Yes
469 | 468,0,1,"Smart, Mr. John Montgomery",male,56,0,0,113792,26.55,,S,0,Yes
470 | 469,0,3,"Scanlan, Mr. James",male,42,0,0,36209,7.725,,Q,0,Yes
471 | 470,1,3,"Baclini, Miss. Helene Barbara",female,0,2,1,2666,19.2583,,C,3,No
472 | 471,0,3,"Keefe, Mr. Arthur",male,32,0,0,323592,7.25,,S,0,Yes
473 | 472,0,3,"Cacic, Mr. Luka",male,38,0,0,315089,8.6625,,S,0,Yes
474 | 473,1,2,"West, Mrs. Edwy Arthur (Ada Mary Worth)",female,33,1,2,C.A. 34651,27.75,,S,3,No
475 | 474,1,2,"Jerwan, Mrs. Amin S (Marie Marthe Thuillard)",female,23,0,0,SC/AH Basle 541,13.7917,D,C,0,Yes
476 | 475,0,3,"Strandberg, Miss. Ida Sofia",female,22,0,0,7553,9.8375,,S,0,Yes
477 | 476,0,1,"Clifford, Mr. George Quincy",male,24,0,0,110465,52.0,A14,S,0,Yes
478 | 477,0,2,"Renouf, Mr. Peter Henry",male,34,1,0,31027,21.0,,S,1,No
479 | 478,0,3,"Braund, Mr. Lewis Richard",male,29,1,0,3460,7.0458,,S,1,No
480 | 479,0,3,"Karlsson, Mr. Nils August",male,22,0,0,350060,7.5208,,S,0,Yes
481 | 480,1,3,"Hirvonen, Miss. Hildur E",female,2,0,1,3101298,12.2875,,S,1,No
482 | 481,0,3,"Goodwin, Master. Harold Victor",male,9,5,2,CA 2144,46.9,,S,7,No
483 | 482,0,2,"Frost, Mr. Anthony Wood ""Archie""",male,28,0,0,239854,0.0,,S,0,Yes
484 | 483,0,3,"Rouse, Mr. Richard Henry",male,50,0,0,A/5 3594,8.05,,S,0,Yes
485 | 484,1,3,"Turkula, Mrs. (Hedwig)",female,63,0,0,4134,9.5875,,S,0,Yes
486 | 485,1,1,"Bishop, Mr. Dickinson H",male,25,1,0,11967,91.0792,B49,C,1,No
487 | 486,0,3,"Lefebre, Miss. Jeannie",female,38,3,1,4133,25.4667,,S,4,No
488 | 487,1,1,"Hoyt, Mrs. Frederick Maxfield (Jane Anne Forby)",female,35,1,0,19943,90.0,C93,S,1,No
489 | 488,0,1,"Kent, Mr. Edward Austin",male,58,0,0,11771,29.7,B37,C,0,Yes
490 | 489,0,3,"Somerton, Mr. Francis William",male,30,0,0,A.5. 18509,8.05,,S,0,Yes
491 | 490,1,3,"Coutts, Master. Eden Leslie ""Neville""",male,9,1,1,C.A. 37671,15.9,,S,2,No
492 | 491,0,3,"Hagland, Mr. Konrad Mathias Reiersen",male,16,1,0,65304,19.9667,,S,1,No
493 | 492,0,3,"Windelov, Mr. Einar",male,21,0,0,SOTON/OQ 3101317,7.25,,S,0,Yes
494 | 493,0,1,"Molson, Mr. Harry Markland",male,55,0,0,113787,30.5,C30,S,0,Yes
495 | 494,0,1,"Artagaveytia, Mr. Ramon",male,71,0,0,PC 17609,49.5042,,C,0,Yes
496 | 495,0,3,"Stanley, Mr. Edward Roland",male,21,0,0,A/4 45380,8.05,,S,0,Yes
497 | 496,0,3,"Yousseff, Mr. Gerious",male,33,0,0,2627,14.4583,,C,0,Yes
498 | 497,1,1,"Eustis, Miss. Elizabeth Mussey",female,54,1,0,36947,78.2667,D20,C,1,No
499 | 498,0,3,"Shellard, Mr. Frederick William",male,37,0,0,C.A. 6212,15.1,,S,0,Yes
500 | 499,0,1,"Allison, Mrs. Hudson J C (Bessie Waldo Daniels)",female,25,1,2,113781,151.55,C22 C26,S,3,No
501 | 500,0,3,"Svensson, Mr. Olof",male,24,0,0,350035,7.7958,,S,0,Yes
502 | 501,0,3,"Calic, Mr. Petar",male,17,0,0,315086,8.6625,,S,0,Yes
503 | 502,0,3,"Canavan, Miss. Mary",female,21,0,0,364846,7.75,,Q,0,Yes
504 | 503,0,3,"O'Sullivan, Miss. Bridget Mary",female,29,0,0,330909,7.6292,,Q,0,Yes
505 | 504,0,3,"Laitinen, Miss. Kristina Sofia",female,37,0,0,4135,9.5875,,S,0,Yes
506 | 505,1,1,"Maioni, Miss. Roberta",female,16,0,0,110152,86.5,B79,S,0,Yes
507 | 506,0,1,"Penasco y Castellana, Mr. Victor de Satode",male,18,1,0,PC 17758,108.9,C65,C,1,No
508 | 507,1,2,"Quick, Mrs. Frederick Charles (Jane Richards)",female,33,0,2,26360,26.0,,S,2,No
509 | 508,1,1,"Bradley, Mr. George (""George Arthur Brayton"")",male,33,0,0,111427,26.55,,S,0,Yes
510 | 509,0,3,"Olsen, Mr. Henry Margido",male,28,0,0,C 4001,22.525,,S,0,Yes
511 | 510,1,3,"Lang, Mr. Fang",male,26,0,0,1601,56.4958,,S,0,Yes
512 | 511,1,3,"Daly, Mr. Eugene Patrick",male,29,0,0,382651,7.75,,Q,0,Yes
513 | 512,0,3,"Webber, Mr. James",male,33,0,0,SOTON/OQ 3101316,8.05,,S,0,Yes
514 | 513,1,1,"McGough, Mr. James Robert",male,36,0,0,PC 17473,26.2875,E25,S,0,Yes
515 | 514,1,1,"Rothschild, Mrs. Martin (Elizabeth L. Barrett)",female,54,1,0,PC 17603,59.4,,C,1,No
516 | 515,0,3,"Coleff, Mr. Satio",male,24,0,0,349209,7.4958,,S,0,Yes
517 | 516,0,1,"Walker, Mr. William Anderson",male,47,0,0,36967,34.0208,D46,S,0,Yes
518 | 517,1,2,"Lemore, Mrs. (Amelia Milley)",female,34,0,0,C.A. 34260,10.5,F33,S,0,Yes
519 | 518,0,3,"Ryan, Mr. Patrick",male,15,0,0,371110,24.15,,Q,0,Yes
520 | 519,1,2,"Angle, Mrs. William A (Florence ""Mary"" Agnes Hughes)",female,36,1,0,226875,26.0,,S,1,No
521 | 520,0,3,"Pavlovic, Mr. Stefo",male,32,0,0,349242,7.8958,,S,0,Yes
522 | 521,1,1,"Perreault, Miss. Anne",female,30,0,0,12749,93.5,B73,S,0,Yes
523 | 522,0,3,"Vovk, Mr. Janko",male,22,0,0,349252,7.8958,,S,0,Yes
524 | 523,0,3,"Lahoud, Mr. Sarkis",male,36,0,0,2624,7.225,,C,0,Yes
525 | 524,1,1,"Hippach, Mrs. Louis Albert (Ida Sophia Fischer)",female,44,0,1,111361,57.9792,B18,C,1,No
526 | 525,0,3,"Kassem, Mr. Fared",male,31,0,0,2700,7.2292,,C,0,Yes
527 | 526,0,3,"Farrell, Mr. James",male,40,0,0,367232,7.75,,Q,0,Yes
528 | 527,1,2,"Ridsdale, Miss. Lucy",female,50,0,0,W./C. 14258,10.5,,S,0,Yes
529 | 528,0,1,"Farthing, Mr. John",male,28,0,0,PC 17483,221.7792,C95,S,0,Yes
530 | 529,0,3,"Salonen, Mr. Johan Werner",male,39,0,0,3101296,7.925,,S,0,Yes
531 | 530,0,2,"Hocking, Mr. Richard George",male,23,2,1,29104,11.5,,S,3,No
532 | 531,1,2,"Quick, Miss. Phyllis May",female,2,1,1,26360,26.0,,S,2,No
533 | 532,0,3,"Toufik, Mr. Nakli",male,35,0,0,2641,7.2292,,C,0,Yes
534 | 533,0,3,"Elias, Mr. Joseph Jr",male,17,1,1,2690,7.2292,,C,2,No
535 | 534,1,3,"Peter, Mrs. Catherine (Catherine Rizk)",female,20,0,2,2668,22.3583,,C,2,No
536 | 535,0,3,"Cacic, Miss. Marija",female,30,0,0,315084,8.6625,,S,0,Yes
537 | 536,1,2,"Hart, Miss. Eva Miriam",female,7,0,2,F.C.C. 13529,26.25,,S,2,No
538 | 537,0,1,"Butt, Major. Archibald Willingham",male,45,0,0,113050,26.55,B38,S,0,Yes
539 | 538,1,1,"LeRoy, Miss. Bertha",female,30,0,0,PC 17761,106.425,,C,0,Yes
540 | 539,0,3,"Risien, Mr. Samuel Beard",male,34,0,0,364498,14.5,,S,0,Yes
541 | 540,1,1,"Frolicher, Miss. Hedwig Margaritha",female,22,0,2,13568,49.5,B39,C,2,No
542 | 541,1,1,"Crosby, Miss. Harriet R",female,36,0,2,WE/P 5735,71.0,B22,S,2,No
543 | 542,0,3,"Andersson, Miss. Ingeborg Constanzia",female,9,4,2,347082,31.275,,S,6,No
544 | 543,0,3,"Andersson, Miss. Sigrid Elisabeth",female,11,4,2,347082,31.275,,S,6,No
545 | 544,1,2,"Beane, Mr. Edward",male,32,1,0,2908,26.0,,S,1,No
546 | 545,0,1,"Douglas, Mr. Walter Donald",male,50,1,0,PC 17761,106.425,C86,C,1,No
547 | 546,0,1,"Nicholson, Mr. Arthur Ernest",male,64,0,0,693,26.0,,S,0,Yes
548 | 547,1,2,"Beane, Mrs. Edward (Ethel Clarke)",female,19,1,0,2908,26.0,,S,1,No
549 | 548,1,2,"Padro y Manent, Mr. Julian",male,40,0,0,SC/PARIS 2146,13.8625,,C,0,Yes
550 | 549,0,3,"Goldsmith, Mr. Frank John",male,33,1,1,363291,20.525,,S,2,No
551 | 550,1,2,"Davies, Master. John Morgan Jr",male,8,1,1,C.A. 33112,36.75,,S,2,No
552 | 551,1,1,"Thayer, Mr. John Borland Jr",male,17,0,2,17421,110.8833,C70,C,2,No
553 | 552,0,2,"Sharp, Mr. Percival James R",male,27,0,0,244358,26.0,,S,0,Yes
554 | 553,0,3,"O'Brien, Mr. Timothy",male,27,0,0,330979,7.8292,,Q,0,Yes
555 | 554,1,3,"Leeni, Mr. Fahim (""Philip Zenni"")",male,22,0,0,2620,7.225,,C,0,Yes
556 | 555,1,3,"Ohman, Miss. Velin",female,22,0,0,347085,7.775,,S,0,Yes
557 | 556,0,1,"Wright, Mr. George",male,62,0,0,113807,26.55,,S,0,Yes
558 | 557,1,1,"Duff Gordon, Lady. (Lucille Christiana Sutherland) (""Mrs Morgan"")",female,48,1,0,11755,39.6,A16,C,1,No
559 | 558,0,1,"Robbins, Mr. Victor",male,18,0,0,PC 17757,227.525,,C,0,Yes
560 | 559,1,1,"Taussig, Mrs. Emil (Tillie Mandelbaum)",female,39,1,1,110413,79.65,E67,S,2,No
561 | 560,1,3,"de Messemaeker, Mrs. Guillaume Joseph (Emma)",female,36,1,0,345572,17.4,,S,1,No
562 | 561,0,3,"Morrow, Mr. Thomas Rowan",male,33,0,0,372622,7.75,,Q,0,Yes
563 | 562,0,3,"Sivic, Mr. Husein",male,40,0,0,349251,7.8958,,S,0,Yes
564 | 563,0,2,"Norman, Mr. Robert Douglas",male,28,0,0,218629,13.5,,S,0,Yes
565 | 564,0,3,"Simmons, Mr. John",male,41,0,0,SOTON/OQ 392082,8.05,,S,0,Yes
566 | 565,0,3,"Meanwell, Miss. (Marion Ogden)",female,25,0,0,SOTON/O.Q. 392087,8.05,,S,0,Yes
567 | 566,0,3,"Davies, Mr. Alfred J",male,24,2,0,A/4 48871,24.15,,S,2,No
568 | 567,0,3,"Stoytcheff, Mr. Ilia",male,19,0,0,349205,7.8958,,S,0,Yes
569 | 568,0,3,"Palsson, Mrs. Nils (Alma Cornelia Berglund)",female,29,0,4,349909,21.075,,S,4,No
570 | 569,0,3,"Doharr, Mr. Tannous",male,35,0,0,2686,7.2292,,C,0,Yes
571 | 570,1,3,"Jonsson, Mr. Carl",male,32,0,0,350417,7.8542,,S,0,Yes
572 | 571,1,2,"Harris, Mr. George",male,62,0,0,S.W./PP 752,10.5,,S,0,Yes
573 | 572,1,1,"Appleton, Mrs. Edward Dale (Charlotte Lamson)",female,53,2,0,11769,51.4792,C101,S,2,No
574 | 573,1,1,"Flynn, Mr. John Irwin (""Irving"")",male,36,0,0,PC 17474,26.3875,E25,S,0,Yes
575 | 574,1,3,"Kelly, Miss. Mary",female,19,0,0,14312,7.75,,Q,0,Yes
576 | 575,0,3,"Rush, Mr. Alfred George John",male,16,0,0,A/4. 20589,8.05,,S,0,Yes
577 | 576,0,3,"Patchett, Mr. George",male,19,0,0,358585,14.5,,S,0,Yes
578 | 577,1,2,"Garside, Miss. Ethel",female,34,0,0,243880,13.0,,S,0,Yes
579 | 578,1,1,"Silvey, Mrs. William Baird (Alice Munger)",female,39,1,0,13507,55.9,E44,S,1,No
580 | 579,0,3,"Caram, Mrs. Joseph (Maria Elias)",female,16,1,0,2689,14.4583,,C,1,No
581 | 580,1,3,"Jussila, Mr. Eiriik",male,32,0,0,STON/O 2. 3101286,7.925,,S,0,Yes
582 | 581,1,2,"Christy, Miss. Julie Rachel",female,25,1,1,237789,30.0,,S,2,No
583 | 582,1,1,"Thayer, Mrs. John Borland (Marian Longstreth Morris)",female,39,1,1,17421,110.8833,C68,C,2,No
584 | 583,0,2,"Downton, Mr. William James",male,54,0,0,28403,26.0,,S,0,Yes
585 | 584,0,1,"Ross, Mr. John Hugo",male,36,0,0,13049,40.125,A10,C,0,Yes
586 | 585,0,3,"Paulner, Mr. Uscher",male,30,0,0,3411,8.7125,,C,0,Yes
587 | 586,1,1,"Taussig, Miss. Ruth",female,18,0,2,110413,79.65,E68,S,2,No
588 | 587,0,2,"Jarvis, Mr. John Denzil",male,47,0,0,237565,15.0,,S,0,Yes
589 | 588,1,1,"Frolicher-Stehli, Mr. Maxmillian",male,60,1,1,13567,79.2,B41,C,2,No
590 | 589,0,3,"Gilinski, Mr. Eliezer",male,22,0,0,14973,8.05,,S,0,Yes
591 | 590,0,3,"Murdlin, Mr. Joseph",male,40,0,0,A./5. 3235,8.05,,S,0,Yes
592 | 591,0,3,"Rintamaki, Mr. Matti",male,35,0,0,STON/O 2. 3101273,7.125,,S,0,Yes
593 | 592,1,1,"Stephenson, Mrs. Walter Bertram (Martha Eustis)",female,52,1,0,36947,78.2667,D20,C,1,No
594 | 593,0,3,"Elsbury, Mr. William James",male,47,0,0,A/5 3902,7.25,,S,0,Yes
595 | 594,0,3,"Bourke, Miss. Mary",female,16,0,2,364848,7.75,,Q,2,No
596 | 595,0,2,"Chapman, Mr. John Henry",male,37,1,0,SC/AH 29037,26.0,,S,1,No
597 | 596,0,3,"Van Impe, Mr. Jean Baptiste",male,36,1,1,345773,24.15,,S,2,No
598 | 597,1,2,"Leitch, Miss. Jessie Wills",female,29,0,0,248727,33.0,,S,0,Yes
599 | 598,0,3,"Johnson, Mr. Alfred",male,49,0,0,LINE,0.0,,S,0,Yes
600 | 599,0,3,"Boulos, Mr. Hanna",male,27,0,0,2664,7.225,,C,0,Yes
601 | 600,1,1,"Duff Gordon, Sir. Cosmo Edmund (""Mr Morgan"")",male,49,1,0,PC 17485,56.9292,A20,C,1,No
602 | 601,1,2,"Jacobsohn, Mrs. Sidney Samuel (Amy Frances Christy)",female,24,2,1,243847,27.0,,S,3,No
603 | 602,0,3,"Slabenoff, Mr. Petco",male,30,0,0,349214,7.8958,,S,0,Yes
604 | 603,0,1,"Harrington, Mr. Charles H",male,19,0,0,113796,42.4,,S,0,Yes
605 | 604,0,3,"Torber, Mr. Ernst William",male,44,0,0,364511,8.05,,S,0,Yes
606 | 605,1,1,"Homer, Mr. Harry (""Mr E Haven"")",male,35,0,0,111426,26.55,,C,0,Yes
607 | 606,0,3,"Lindell, Mr. Edvard Bengtsson",male,36,1,0,349910,15.55,,S,1,No
608 | 607,0,3,"Karaic, Mr. Milan",male,30,0,0,349246,7.8958,,S,0,Yes
609 | 608,1,1,"Daniel, Mr. Robert Williams",male,27,0,0,113804,30.5,,S,0,Yes
610 | 609,1,2,"Laroche, Mrs. Joseph (Juliette Marie Louise Lafargue)",female,22,1,2,SC/Paris 2123,41.5792,,C,3,No
611 | 610,1,1,"Shutes, Miss. Elizabeth W",female,40,0,0,PC 17582,153.4625,C125,S,0,Yes
612 | 611,0,3,"Andersson, Mrs. Anders Johan (Alfrida Konstantia Brogren)",female,39,1,5,347082,31.275,,S,6,No
613 | 612,0,3,"Jardin, Mr. Jose Neto",male,33,0,0,SOTON/O.Q. 3101305,7.05,,S,0,Yes
614 | 613,1,3,"Murphy, Miss. Margaret Jane",female,40,1,0,367230,15.5,,Q,1,No
615 | 614,0,3,"Horgan, Mr. John",male,42,0,0,370377,7.75,,Q,0,Yes
616 | 615,0,3,"Brocklebank, Mr. William Alfred",male,35,0,0,364512,8.05,,S,0,Yes
617 | 616,1,2,"Herman, Miss. Alice",female,24,1,2,220845,65.0,,S,3,No
618 | 617,0,3,"Danbom, Mr. Ernst Gilbert",male,34,1,1,347080,14.4,,S,2,No
619 | 618,0,3,"Lobb, Mrs. William Arthur (Cordelia K Stanlick)",female,26,1,0,A/5. 3336,16.1,,S,1,No
620 | 619,1,2,"Becker, Miss. Marion Louise",female,4,2,1,230136,39.0,F4,S,3,No
621 | 620,0,2,"Gavey, Mr. Lawrence",male,26,0,0,31028,10.5,,S,0,Yes
622 | 621,0,3,"Yasbeck, Mr. Antoni",male,27,1,0,2659,14.4542,,C,1,No
623 | 622,1,1,"Kimball, Mr. Edwin Nelson Jr",male,42,1,0,11753,52.5542,D19,S,1,No
624 | 623,1,3,"Nakid, Mr. Sahid",male,20,1,1,2653,15.7417,,C,2,No
625 | 624,0,3,"Hansen, Mr. Henry Damsgaard",male,21,0,0,350029,7.8542,,S,0,Yes
626 | 625,0,3,"Bowen, Mr. David John ""Dai""",male,21,0,0,54636,16.1,,S,0,Yes
627 | 626,0,1,"Sutton, Mr. Frederick",male,61,0,0,36963,32.3208,D50,S,0,Yes
628 | 627,0,2,"Kirkland, Rev. Charles Leonard",male,57,0,0,219533,12.35,,Q,0,Yes
629 | 628,1,1,"Longley, Miss. Gretchen Fiske",female,21,0,0,13502,77.9583,D9,S,0,Yes
630 | 629,0,3,"Bostandyeff, Mr. Guentcho",male,26,0,0,349224,7.8958,,S,0,Yes
631 | 630,0,3,"O'Connell, Mr. Patrick D",male,19,0,0,334912,7.7333,,Q,0,Yes
632 | 631,1,1,"Barkworth, Mr. Algernon Henry Wilson",male,80,0,0,27042,30.0,A23,S,0,Yes
633 | 632,0,3,"Lundahl, Mr. Johan Svensson",male,51,0,0,347743,7.0542,,S,0,Yes
634 | 633,1,1,"Stahelin-Maeglin, Dr. Max",male,32,0,0,13214,30.5,B50,C,0,Yes
635 | 634,0,1,"Parr, Mr. William Henry Marsh",male,16,0,0,112052,0.0,,S,0,Yes
636 | 635,0,3,"Skoog, Miss. Mabel",female,9,3,2,347088,27.9,,S,5,No
637 | 636,1,2,"Davis, Miss. Mary",female,28,0,0,237668,13.0,,S,0,Yes
638 | 637,0,3,"Leinonen, Mr. Antti Gustaf",male,32,0,0,STON/O 2. 3101292,7.925,,S,0,Yes
639 | 638,0,2,"Collyer, Mr. Harvey",male,31,1,1,C.A. 31921,26.25,,S,2,No
640 | 639,0,3,"Panula, Mrs. Juha (Maria Emilia Ojala)",female,41,0,5,3101295,39.6875,,S,5,No
641 | 640,0,3,"Thorneycroft, Mr. Percival",male,41,1,0,376564,16.1,,S,1,No
642 | 641,0,3,"Jensen, Mr. Hans Peder",male,20,0,0,350050,7.8542,,S,0,Yes
643 | 642,1,1,"Sagesser, Mlle. Emma",female,24,0,0,PC 17477,69.3,B35,C,0,Yes
644 | 643,0,3,"Skoog, Miss. Margit Elizabeth",female,2,3,2,347088,27.9,,S,5,No
645 | 644,1,3,"Foo, Mr. Choong",male,23,0,0,1601,56.4958,,S,0,Yes
646 | 645,1,3,"Baclini, Miss. Eugenie",female,0,2,1,2666,19.2583,,C,3,No
647 | 646,1,1,"Harper, Mr. Henry Sleeper",male,48,1,0,PC 17572,76.7292,D33,C,1,No
648 | 647,0,3,"Cor, Mr. Liudevit",male,19,0,0,349231,7.8958,,S,0,Yes
649 | 648,1,1,"Simonius-Blumer, Col. Oberst Alfons",male,56,0,0,13213,35.5,A26,C,0,Yes
650 | 649,0,3,"Willey, Mr. Edward",male,21,0,0,S.O./P.P. 751,7.55,,S,0,Yes
651 | 650,1,3,"Stanley, Miss. Amy Zillah Elsie",female,23,0,0,CA. 2314,7.55,,S,0,Yes
652 | 651,0,3,"Mitkoff, Mr. Mito",male,15,0,0,349221,7.8958,,S,0,Yes
653 | 652,1,2,"Doling, Miss. Elsie",female,18,0,1,231919,23.0,,S,1,No
654 | 653,0,3,"Kalvik, Mr. Johannes Halvorsen",male,21,0,0,8475,8.4333,,S,0,Yes
655 | 654,1,3,"O'Leary, Miss. Hanora ""Norah""",female,34,0,0,330919,7.8292,,Q,0,Yes
656 | 655,0,3,"Hegarty, Miss. Hanora ""Nora""",female,18,0,0,365226,6.75,,Q,0,Yes
657 | 656,0,2,"Hickman, Mr. Leonard Mark",male,24,2,0,S.O.C. 14879,73.5,,S,2,No
658 | 657,0,3,"Radeff, Mr. Alexander",male,23,0,0,349223,7.8958,,S,0,Yes
659 | 658,0,3,"Bourke, Mrs. John (Catherine)",female,32,1,1,364849,15.5,,Q,2,No
660 | 659,0,2,"Eitemiller, Mr. George Floyd",male,23,0,0,29751,13.0,,S,0,Yes
661 | 660,0,1,"Newell, Mr. Arthur Webster",male,58,0,2,35273,113.275,D48,C,2,No
662 | 661,1,1,"Frauenthal, Dr. Henry William",male,50,2,0,PC 17611,133.65,,S,2,No
663 | 662,0,3,"Badt, Mr. Mohamed",male,40,0,0,2623,7.225,,C,0,Yes
664 | 663,0,1,"Colley, Mr. Edward Pomeroy",male,47,0,0,5727,25.5875,E58,S,0,Yes
665 | 664,0,3,"Coleff, Mr. Peju",male,36,0,0,349210,7.4958,,S,0,Yes
666 | 665,1,3,"Lindqvist, Mr. Eino William",male,20,1,0,STON/O 2. 3101285,7.925,,S,1,No
667 | 666,0,2,"Hickman, Mr. Lewis",male,32,2,0,S.O.C. 14879,73.5,,S,2,No
668 | 667,0,2,"Butler, Mr. Reginald Fenton",male,25,0,0,234686,13.0,,S,0,Yes
669 | 668,0,3,"Rommetvedt, Mr. Knud Paust",male,21,0,0,312993,7.775,,S,0,Yes
670 | 669,0,3,"Cook, Mr. Jacob",male,43,0,0,A/5 3536,8.05,,S,0,Yes
671 | 670,1,1,"Taylor, Mrs. Elmer Zebley (Juliet Cummins Wright)",female,38,1,0,19996,52.0,C126,S,1,No
672 | 671,1,2,"Brown, Mrs. Thomas William Solomon (Elizabeth Catherine Ford)",female,40,1,1,29750,39.0,,S,2,No
673 | 672,0,1,"Davidson, Mr. Thornton",male,31,1,0,F.C. 12750,52.0,B71,S,1,No
674 | 673,0,2,"Mitchell, Mr. Henry Michael",male,70,0,0,C.A. 24580,10.5,,S,0,Yes
675 | 674,1,2,"Wilhelms, Mr. Charles",male,31,0,0,244270,13.0,,S,0,Yes
676 | 675,0,2,"Watson, Mr. Ennis Hastings",male,30,0,0,239856,0.0,,S,0,Yes
677 | 676,0,3,"Edvardsson, Mr. Gustaf Hjalmar",male,18,0,0,349912,7.775,,S,0,Yes
678 | 677,0,3,"Sawyer, Mr. Frederick Charles",male,24,0,0,342826,8.05,,S,0,Yes
679 | 678,1,3,"Turja, Miss. Anna Sofia",female,18,0,0,4138,9.8417,,S,0,Yes
680 | 679,0,3,"Goodwin, Mrs. Frederick (Augusta Tyler)",female,43,1,6,CA 2144,46.9,,S,7,No
681 | 680,1,1,"Cardeza, Mr. Thomas Drake Martinez",male,36,0,1,PC 17755,512.3292,B51 B53 B55,C,1,No
682 | 681,0,3,"Peters, Miss. Katie",female,35,0,0,330935,8.1375,,Q,0,Yes
683 | 682,1,1,"Hassab, Mr. Hammad",male,27,0,0,PC 17572,76.7292,D49,C,0,Yes
684 | 683,0,3,"Olsvigen, Mr. Thor Anderson",male,20,0,0,6563,9.225,,S,0,Yes
685 | 684,0,3,"Goodwin, Mr. Charles Edward",male,14,5,2,CA 2144,46.9,,S,7,No
686 | 685,0,2,"Brown, Mr. Thomas William Solomon",male,60,1,1,29750,39.0,,S,2,No
687 | 686,0,2,"Laroche, Mr. Joseph Philippe Lemercier",male,25,1,2,SC/Paris 2123,41.5792,,C,3,No
688 | 687,0,3,"Panula, Mr. Jaako Arnold",male,14,4,1,3101295,39.6875,,S,5,No
689 | 688,0,3,"Dakic, Mr. Branko",male,19,0,0,349228,10.1708,,S,0,Yes
690 | 689,0,3,"Fischer, Mr. Eberhard Thelander",male,18,0,0,350036,7.7958,,S,0,Yes
691 | 690,1,1,"Madill, Miss. Georgette Alexandra",female,15,0,1,24160,211.3375,B5,S,1,No
692 | 691,1,1,"Dick, Mr. Albert Adrian",male,31,1,0,17474,57.0,B20,S,1,No
693 | 692,1,3,"Karun, Miss. Manca",female,4,0,1,349256,13.4167,,C,1,No
694 | 693,1,3,"Lam, Mr. Ali",male,26,0,0,1601,56.4958,,S,0,Yes
695 | 694,0,3,"Saad, Mr. Khalil",male,25,0,0,2672,7.225,,C,0,Yes
696 | 695,0,1,"Weir, Col. John",male,60,0,0,113800,26.55,,S,0,Yes
697 | 696,0,2,"Chapman, Mr. Charles Henry",male,52,0,0,248731,13.5,,S,0,Yes
698 | 697,0,3,"Kelly, Mr. James",male,44,0,0,363592,8.05,,S,0,Yes
699 | 698,1,3,"Mullens, Miss. Katherine ""Katie""",female,33,0,0,35852,7.7333,,Q,0,Yes
700 | 699,0,1,"Thayer, Mr. John Borland",male,49,1,1,17421,110.8833,C68,C,2,No
701 | 700,0,3,"Humblen, Mr. Adolf Mathias Nicolai Olsen",male,42,0,0,348121,7.65,F G63,S,0,Yes
702 | 701,1,1,"Astor, Mrs. John Jacob (Madeleine Talmadge Force)",female,18,1,0,PC 17757,227.525,C62 C64,C,1,No
703 | 702,1,1,"Silverthorne, Mr. Spencer Victor",male,35,0,0,PC 17475,26.2875,E24,S,0,Yes
704 | 703,0,3,"Barbara, Miss. Saiide",female,18,0,1,2691,14.4542,,C,1,No
705 | 704,0,3,"Gallagher, Mr. Martin",male,25,0,0,36864,7.7417,,Q,0,Yes
706 | 705,0,3,"Hansen, Mr. Henrik Juul",male,26,1,0,350025,7.8542,,S,1,No
707 | 706,0,2,"Morley, Mr. Henry Samuel (""Mr Henry Marshall"")",male,39,0,0,250655,26.0,,S,0,Yes
708 | 707,1,2,"Kelly, Mrs. Florence ""Fannie""",female,45,0,0,223596,13.5,,S,0,Yes
709 | 708,1,1,"Calderhead, Mr. Edward Pennington",male,42,0,0,PC 17476,26.2875,E24,S,0,Yes
710 | 709,1,1,"Cleaver, Miss. Alice",female,22,0,0,113781,151.55,,S,0,Yes
711 | 710,1,3,"Moubarek, Master. Halim Gonios (""William George"")",male,31,1,1,2661,15.2458,,C,2,No
712 | 711,1,1,"Mayne, Mlle. Berthe Antonine (""Mrs de Villiers"")",female,24,0,0,PC 17482,49.5042,C90,C,0,Yes
713 | 712,0,1,"Klaber, Mr. Herman",male,28,0,0,113028,26.55,C124,S,0,Yes
714 | 713,1,1,"Taylor, Mr. Elmer Zebley",male,48,1,0,19996,52.0,C126,S,1,No
715 | 714,0,3,"Larsson, Mr. August Viktor",male,29,0,0,7545,9.4833,,S,0,Yes
716 | 715,0,2,"Greenberg, Mr. Samuel",male,52,0,0,250647,13.0,,S,0,Yes
717 | 716,0,3,"Soholt, Mr. Peter Andreas Lauritz Andersen",male,19,0,0,348124,7.65,F G73,S,0,Yes
718 | 717,1,1,"Endres, Miss. Caroline Louise",female,38,0,0,PC 17757,227.525,C45,C,0,Yes
719 | 718,1,2,"Troutt, Miss. Edwina Celia ""Winnie""",female,27,0,0,34218,10.5,E101,S,0,Yes
720 | 719,0,3,"McEvoy, Mr. Michael",male,23,0,0,36568,15.5,,Q,0,Yes
721 | 720,0,3,"Johnson, Mr. Malkolm Joackim",male,33,0,0,347062,7.775,,S,0,Yes
722 | 721,1,2,"Harper, Miss. Annie Jessie ""Nina""",female,6,0,1,248727,33.0,,S,1,No
723 | 722,0,3,"Jensen, Mr. Svend Lauritz",male,17,1,0,350048,7.0542,,S,1,No
724 | 723,0,2,"Gillespie, Mr. William Henry",male,34,0,0,12233,13.0,,S,0,Yes
725 | 724,0,2,"Hodges, Mr. Henry Price",male,50,0,0,250643,13.0,,S,0,Yes
726 | 725,1,1,"Chambers, Mr. Norman Campbell",male,27,1,0,113806,53.1,E8,S,1,No
727 | 726,0,3,"Oreskovic, Mr. Luka",male,20,0,0,315094,8.6625,,S,0,Yes
728 | 727,1,2,"Renouf, Mrs. Peter Henry (Lillian Jefferys)",female,30,3,0,31027,21.0,,S,3,No
729 | 728,1,3,"Mannion, Miss. Margareth",female,21,0,0,36866,7.7375,,Q,0,Yes
730 | 729,0,2,"Bryhl, Mr. Kurt Arnold Gottfrid",male,25,1,0,236853,26.0,,S,1,No
731 | 730,0,3,"Ilmakangas, Miss. Pieta Sofia",female,25,1,0,STON/O2. 3101271,7.925,,S,1,No
732 | 731,1,1,"Allen, Miss. Elisabeth Walton",female,29,0,0,24160,211.3375,B5,S,0,Yes
733 | 732,0,3,"Hassan, Mr. Houssein G N",male,11,0,0,2699,18.7875,,C,0,Yes
734 | 733,0,2,"Knight, Mr. Robert J",male,39,0,0,239855,0.0,,S,0,Yes
735 | 734,0,2,"Berriman, Mr. William John",male,23,0,0,28425,13.0,,S,0,Yes
736 | 735,0,2,"Troupiansky, Mr. Moses Aaron",male,23,0,0,233639,13.0,,S,0,Yes
737 | 736,0,3,"Williams, Mr. Leslie",male,28,0,0,54636,16.1,,S,0,Yes
738 | 737,0,3,"Ford, Mrs. Edward (Margaret Ann Watson)",female,48,1,3,W./C. 6608,34.375,,S,4,No
739 | 738,1,1,"Lesurer, Mr. Gustave J",male,35,0,0,PC 17755,512.3292,B101,C,0,Yes
740 | 739,0,3,"Ivanoff, Mr. Kanio",male,35,0,0,349201,7.8958,,S,0,Yes
741 | 740,0,3,"Nankoff, Mr. Minko",male,21,0,0,349218,7.8958,,S,0,Yes
742 | 741,1,1,"Hawksford, Mr. Walter James",male,33,0,0,16988,30.0,D45,S,0,Yes
743 | 742,0,1,"Cavendish, Mr. Tyrell William",male,36,1,0,19877,78.85,C46,S,1,No
744 | 743,1,1,"Ryerson, Miss. Susan Parker ""Suzette""",female,21,2,2,PC 17608,262.375,B57 B59 B63 B66,C,4,No
745 | 744,0,3,"McNamee, Mr. Neal",male,24,1,0,376566,16.1,,S,1,No
746 | 745,1,3,"Stranden, Mr. Juho",male,31,0,0,STON/O 2. 3101288,7.925,,S,0,Yes
747 | 746,0,1,"Crosby, Capt. Edward Gifford",male,70,1,1,WE/P 5735,71.0,B22,S,2,No
748 | 747,0,3,"Abbott, Mr. Rossmore Edward",male,16,1,1,C.A. 2673,20.25,,S,2,No
749 | 748,1,2,"Sinkkonen, Miss. Anna",female,30,0,0,250648,13.0,,S,0,Yes
750 | 749,0,1,"Marvin, Mr. Daniel Warner",male,19,1,0,113773,53.1,D30,S,1,No
751 | 750,0,3,"Connaghton, Mr. Michael",male,31,0,0,335097,7.75,,Q,0,Yes
752 | 751,1,2,"Wells, Miss. Joan",female,4,1,1,29103,23.0,,S,2,No
753 | 752,1,3,"Moor, Master. Meier",male,6,0,1,392096,12.475,E121,S,1,No
754 | 753,0,3,"Vande Velde, Mr. Johannes Joseph",male,33,0,0,345780,9.5,,S,0,Yes
755 | 754,0,3,"Jonkoff, Mr. Lalio",male,23,0,0,349204,7.8958,,S,0,Yes
756 | 755,1,2,"Herman, Mrs. Samuel (Jane Laver)",female,48,1,2,220845,65.0,,S,3,No
757 | 756,1,2,"Hamalainen, Master. Viljo",male,0,1,1,250649,14.5,,S,2,No
758 | 757,0,3,"Carlsson, Mr. August Sigfrid",male,28,0,0,350042,7.7958,,S,0,Yes
759 | 758,0,2,"Bailey, Mr. Percy Andrew",male,18,0,0,29108,11.5,,S,0,Yes
760 | 759,0,3,"Theobald, Mr. Thomas Leonard",male,34,0,0,363294,8.05,,S,0,Yes
761 | 760,1,1,"Rothes, the Countess. of (Lucy Noel Martha Dyer-Edwards)",female,33,0,0,110152,86.5,B77,S,0,Yes
762 | 761,0,3,"Garfirth, Mr. John",male,34,0,0,358585,14.5,,S,0,Yes
763 | 762,0,3,"Nirva, Mr. Iisakki Antino Aijo",male,41,0,0,SOTON/O2 3101272,7.125,,S,0,Yes
764 | 763,1,3,"Barah, Mr. Hanna Assi",male,20,0,0,2663,7.2292,,C,0,Yes
765 | 764,1,1,"Carter, Mrs. William Ernest (Lucile Polk)",female,36,1,2,113760,120.0,B96 B98,S,3,No
766 | 765,0,3,"Eklund, Mr. Hans Linus",male,16,0,0,347074,7.775,,S,0,Yes
767 | 766,1,1,"Hogeboom, Mrs. John C (Anna Andrews)",female,51,1,0,13502,77.9583,D11,S,1,No
768 | 767,0,1,"Brewe, Dr. Arthur Jackson",male,17,0,0,112379,39.6,,C,0,Yes
769 | 768,0,3,"Mangan, Miss. Mary",female,30,0,0,364850,7.75,,Q,0,Yes
770 | 769,0,3,"Moran, Mr. Daniel J",male,20,1,0,371110,24.15,,Q,1,No
771 | 770,0,3,"Gronnestad, Mr. Daniel Danielsen",male,32,0,0,8471,8.3625,,S,0,Yes
772 | 771,0,3,"Lievens, Mr. Rene Aime",male,24,0,0,345781,9.5,,S,0,Yes
773 | 772,0,3,"Jensen, Mr. Niels Peder",male,48,0,0,350047,7.8542,,S,0,Yes
774 | 773,0,2,"Mack, Mrs. (Mary)",female,57,0,0,S.O./P.P. 3,10.5,E77,S,0,Yes
775 | 774,0,3,"Elias, Mr. Dibo",male,19,0,0,2674,7.225,,C,0,Yes
776 | 775,1,2,"Hocking, Mrs. Elizabeth (Eliza Needs)",female,54,1,3,29105,23.0,,S,4,No
777 | 776,0,3,"Myhrman, Mr. Pehr Fabian Oliver Malkolm",male,18,0,0,347078,7.75,,S,0,Yes
778 | 777,0,3,"Tobin, Mr. Roger",male,17,0,0,383121,7.75,F38,Q,0,Yes
779 | 778,1,3,"Emanuel, Miss. Virginia Ethel",female,5,0,0,364516,12.475,,S,0,Yes
780 | 779,0,3,"Kilgannon, Mr. Thomas J",male,17,0,0,36865,7.7375,,Q,0,Yes
781 | 780,1,1,"Robert, Mrs. Edward Scott (Elisabeth Walton McMillan)",female,43,0,1,24160,211.3375,B3,S,1,No
782 | 781,1,3,"Ayoub, Miss. Banoura",female,13,0,0,2687,7.2292,,C,0,Yes
783 | 782,1,1,"Dick, Mrs. Albert Adrian (Vera Gillespie)",female,17,1,0,17474,57.0,B20,S,1,No
784 | 783,0,1,"Long, Mr. Milton Clyde",male,29,0,0,113501,30.0,D6,S,0,Yes
785 | 784,0,3,"Johnston, Mr. Andrew G",male,39,1,2,W./C. 6607,23.45,,S,3,No
786 | 785,0,3,"Ali, Mr. William",male,25,0,0,SOTON/O.Q. 3101312,7.05,,S,0,Yes
787 | 786,0,3,"Harmer, Mr. Abraham (David Lishin)",male,25,0,0,374887,7.25,,S,0,Yes
788 | 787,1,3,"Sjoblom, Miss. Anna Sofia",female,18,0,0,3101265,7.4958,,S,0,Yes
789 | 788,0,3,"Rice, Master. George Hugh",male,8,4,1,382652,29.125,,Q,5,No
790 | 789,1,3,"Dean, Master. Bertram Vere",male,1,1,2,C.A. 2315,20.575,,S,3,No
791 | 790,0,1,"Guggenheim, Mr. Benjamin",male,46,0,0,PC 17593,79.2,B82 B84,C,0,Yes
792 | 791,0,3,"Keane, Mr. Andrew ""Andy""",male,33,0,0,12460,7.75,,Q,0,Yes
793 | 792,0,2,"Gaskell, Mr. Alfred",male,16,0,0,239865,26.0,,S,0,Yes
794 | 793,0,3,"Sage, Miss. Stella Anna",female,21,8,2,CA. 2343,69.55,,S,10,No
795 | 794,0,1,"Hoyt, Mr. William Fisher",male,32,0,0,PC 17600,30.6958,,C,0,Yes
796 | 795,0,3,"Dantcheff, Mr. Ristiu",male,25,0,0,349203,7.8958,,S,0,Yes
797 | 796,0,2,"Otter, Mr. Richard",male,39,0,0,28213,13.0,,S,0,Yes
798 | 797,1,1,"Leader, Dr. Alice (Farnham)",female,49,0,0,17465,25.9292,D17,S,0,Yes
799 | 798,1,3,"Osman, Mrs. Mara",female,31,0,0,349244,8.6833,,S,0,Yes
800 | 799,0,3,"Ibrahim Shawah, Mr. Yousseff",male,30,0,0,2685,7.2292,,C,0,Yes
801 | 800,0,3,"Van Impe, Mrs. Jean Baptiste (Rosalie Paula Govaert)",female,30,1,1,345773,24.15,,S,2,No
802 | 801,0,2,"Ponesell, Mr. Martin",male,34,0,0,250647,13.0,,S,0,Yes
803 | 802,1,2,"Collyer, Mrs. Harvey (Charlotte Annie Tate)",female,31,1,1,C.A. 31921,26.25,,S,2,No
804 | 803,1,1,"Carter, Master. William Thornton II",male,11,1,2,113760,120.0,B96 B98,S,3,No
805 | 804,1,3,"Thomas, Master. Assad Alexander",male,0,0,1,2625,8.5167,,C,1,No
806 | 805,1,3,"Hedman, Mr. Oskar Arvid",male,27,0,0,347089,6.975,,S,0,Yes
807 | 806,0,3,"Johansson, Mr. Karl Johan",male,31,0,0,347063,7.775,,S,0,Yes
808 | 807,0,1,"Andrews, Mr. Thomas Jr",male,39,0,0,112050,0.0,A36,S,0,Yes
809 | 808,0,3,"Pettersson, Miss. Ellen Natalia",female,18,0,0,347087,7.775,,S,0,Yes
810 | 809,0,2,"Meyer, Mr. August",male,39,0,0,248723,13.0,,S,0,Yes
811 | 810,1,1,"Chambers, Mrs. Norman Campbell (Bertha Griggs)",female,33,1,0,113806,53.1,E8,S,1,No
812 | 811,0,3,"Alexander, Mr. William",male,26,0,0,3474,7.8875,,S,0,Yes
813 | 812,0,3,"Lester, Mr. James",male,39,0,0,A/4 48871,24.15,,S,0,Yes
814 | 813,0,2,"Slemen, Mr. Richard James",male,35,0,0,28206,10.5,,S,0,Yes
815 | 814,0,3,"Andersson, Miss. Ebba Iris Alfrida",female,6,4,2,347082,31.275,,S,6,No
816 | 815,0,3,"Tomlin, Mr. Ernest Portage",male,30,0,0,364499,8.05,,S,0,Yes
817 | 816,0,1,"Fry, Mr. Richard",male,40,0,0,112058,0.0,B102,S,0,Yes
818 | 817,0,3,"Heininen, Miss. Wendla Maria",female,23,0,0,STON/O2. 3101290,7.925,,S,0,Yes
819 | 818,0,2,"Mallet, Mr. Albert",male,31,1,1,S.C./PARIS 2079,37.0042,,C,2,No
820 | 819,0,3,"Holm, Mr. John Fredrik Alexander",male,43,0,0,C 7075,6.45,,S,0,Yes
821 | 820,0,3,"Skoog, Master. Karl Thorsten",male,10,3,2,347088,27.9,,S,5,No
822 | 821,1,1,"Hays, Mrs. Charles Melville (Clara Jennings Gregg)",female,52,1,1,12749,93.5,B69,S,2,No
823 | 822,1,3,"Lulic, Mr. Nikola",male,27,0,0,315098,8.6625,,S,0,Yes
824 | 823,0,1,"Reuchlin, Jonkheer. John George",male,38,0,0,19972,0.0,,S,0,Yes
825 | 824,1,3,"Moor, Mrs. (Beila)",female,27,0,1,392096,12.475,E121,S,1,No
826 | 825,0,3,"Panula, Master. Urho Abraham",male,2,4,1,3101295,39.6875,,S,5,No
827 | 826,0,3,"Flynn, Mr. John",male,16,0,0,368323,6.95,,Q,0,Yes
828 | 827,0,3,"Lam, Mr. Len",male,37,0,0,1601,56.4958,,S,0,Yes
829 | 828,1,2,"Mallet, Master. Andre",male,1,0,2,S.C./PARIS 2079,37.0042,,C,2,No
830 | 829,1,3,"McCormack, Mr. Thomas Joseph",male,27,0,0,367228,7.75,,Q,0,Yes
831 | 830,1,1,"Stone, Mrs. George Nelson (Martha Evelyn)",female,62,0,0,113572,80.0,B28,,0,Yes
832 | 831,1,3,"Yasbeck, Mrs. Antoni (Selini Alexander)",female,15,1,0,2659,14.4542,,C,1,No
833 | 832,1,2,"Richards, Master. George Sibley",male,0,1,1,29106,18.75,,S,2,No
834 | 833,0,3,"Saad, Mr. Amin",male,38,0,0,2671,7.2292,,C,0,Yes
835 | 834,0,3,"Augustsson, Mr. Albert",male,23,0,0,347468,7.8542,,S,0,Yes
836 | 835,0,3,"Allum, Mr. Owen George",male,18,0,0,2223,8.3,,S,0,Yes
837 | 836,1,1,"Compton, Miss. Sara Rebecca",female,39,1,1,PC 17756,83.1583,E49,C,2,No
838 | 837,0,3,"Pasic, Mr. Jakob",male,21,0,0,315097,8.6625,,S,0,Yes
839 | 838,0,3,"Sirota, Mr. Maurice",male,41,0,0,392092,8.05,,S,0,Yes
840 | 839,1,3,"Chip, Mr. Chang",male,32,0,0,1601,56.4958,,S,0,Yes
841 | 840,1,1,"Marechal, Mr. Pierre",male,28,0,0,11774,29.7,C47,C,0,Yes
842 | 841,0,3,"Alhomaki, Mr. Ilmari Rudolf",male,20,0,0,SOTON/O2 3101287,7.925,,S,0,Yes
843 | 842,0,2,"Mudd, Mr. Thomas Charles",male,16,0,0,S.O./P.P. 3,10.5,,S,0,Yes
844 | 843,1,1,"Serepeca, Miss. Augusta",female,30,0,0,113798,31.0,,C,0,Yes
845 | 844,0,3,"Lemberopolous, Mr. Peter L",male,34,0,0,2683,6.4375,,C,0,Yes
846 | 845,0,3,"Culumovic, Mr. Jeso",male,17,0,0,315090,8.6625,,S,0,Yes
847 | 846,0,3,"Abbing, Mr. Anthony",male,42,0,0,C.A. 5547,7.55,,S,0,Yes
848 | 847,0,3,"Sage, Mr. Douglas Bullen",male,33,8,2,CA. 2343,69.55,,S,10,No
849 | 848,0,3,"Markoff, Mr. Marin",male,35,0,0,349213,7.8958,,C,0,Yes
850 | 849,0,2,"Harper, Rev. John",male,28,0,1,248727,33.0,,S,1,No
851 | 850,1,1,"Goldenberg, Mrs. Samuel L (Edwiga Grabowska)",female,16,1,0,17453,89.1042,C92,C,1,No
852 | 851,0,3,"Andersson, Master. Sigvard Harald Elias",male,4,4,2,347082,31.275,,S,6,No
853 | 852,0,3,"Svensson, Mr. Johan",male,74,0,0,347060,7.775,,S,0,Yes
854 | 853,0,3,"Boulos, Miss. Nourelain",female,9,1,1,2678,15.2458,,C,2,No
855 | 854,1,1,"Lines, Miss. Mary Conover",female,16,0,1,PC 17592,39.4,D28,S,1,No
856 | 855,0,2,"Carter, Mrs. Ernest Courtenay (Lilian Hughes)",female,44,1,0,244252,26.0,,S,1,No
857 | 856,1,3,"Aks, Mrs. Sam (Leah Rosen)",female,18,0,1,392091,9.35,,S,1,No
858 | 857,1,1,"Wick, Mrs. George Dennick (Mary Hitchcock)",female,45,1,1,36928,164.8667,,S,2,No
859 | 858,1,1,"Daly, Mr. Peter Denis ",male,51,0,0,113055,26.55,E17,S,0,Yes
860 | 859,1,3,"Baclini, Mrs. Solomon (Latifa Qurban)",female,24,0,3,2666,19.2583,,C,3,No
861 | 860,0,3,"Razi, Mr. Raihed",male,19,0,0,2629,7.2292,,C,0,Yes
862 | 861,0,3,"Hansen, Mr. Claus Peter",male,41,2,0,350026,14.1083,,S,2,No
863 | 862,0,2,"Giles, Mr. Frederick Edward",male,21,1,0,28134,11.5,,S,1,No
864 | 863,1,1,"Swift, Mrs. Frederick Joel (Margaret Welles Barron)",female,48,0,0,17466,25.9292,D17,S,0,Yes
865 | 864,0,3,"Sage, Miss. Dorothy Edith ""Dolly""",female,28,8,2,CA. 2343,69.55,,S,10,No
866 | 865,0,2,"Gill, Mr. John William",male,24,0,0,233866,13.0,,S,0,Yes
867 | 866,1,2,"Bystrom, Mrs. (Karolina)",female,42,0,0,236852,13.0,,S,0,Yes
868 | 867,1,2,"Duran y More, Miss. Asuncion",female,27,1,0,SC/PARIS 2149,13.8583,,C,1,No
869 | 868,0,1,"Roebling, Mr. Washington Augustus II",male,31,0,0,PC 17590,50.4958,A24,S,0,Yes
870 | 869,0,3,"van Melkebeke, Mr. Philemon",male,40,0,0,345777,9.5,,S,0,Yes
871 | 870,1,3,"Johnson, Master. Harold Theodor",male,4,1,1,347742,11.1333,,S,2,No
872 | 871,0,3,"Balkic, Mr. Cerin",male,26,0,0,349248,7.8958,,S,0,Yes
873 | 872,1,1,"Beckwith, Mrs. Richard Leonard (Sallie Monypeny)",female,47,1,1,11751,52.5542,D35,S,2,No
874 | 873,0,1,"Carlsson, Mr. Frans Olof",male,33,0,0,695,5.0,B51 B53 B55,S,0,Yes
875 | 874,0,3,"Vander Cruyssen, Mr. Victor",male,47,0,0,345765,9.0,,S,0,Yes
876 | 875,1,2,"Abelson, Mrs. Samuel (Hannah Wizosky)",female,28,1,0,P/PP 3381,24.0,,C,1,No
877 | 876,1,3,"Najib, Miss. Adele Kiamie ""Jane""",female,15,0,0,2667,7.225,,C,0,Yes
878 | 877,0,3,"Gustafsson, Mr. Alfred Ossian",male,20,0,0,7534,9.8458,,S,0,Yes
879 | 878,0,3,"Petroff, Mr. Nedelio",male,19,0,0,349212,7.8958,,S,0,Yes
880 | 879,0,3,"Laleff, Mr. Kristo",male,16,0,0,349217,7.8958,,S,0,Yes
881 | 880,1,1,"Potter, Mrs. Thomas Jr (Lily Alexenia Wilson)",female,56,0,1,11767,83.1583,C50,C,1,No
882 | 881,1,2,"Shelley, Mrs. William (Imanita Parrish Hall)",female,25,0,1,230433,26.0,,S,1,No
883 | 882,0,3,"Markun, Mr. Johann",male,33,0,0,349257,7.8958,,S,0,Yes
884 | 883,0,3,"Dahlberg, Miss. Gerda Ulrika",female,22,0,0,7552,10.5167,,S,0,Yes
885 | 884,0,2,"Banfield, Mr. Frederick James",male,28,0,0,C.A./SOTON 34068,10.5,,S,0,Yes
886 | 885,0,3,"Sutehall, Mr. Henry Jr",male,25,0,0,SOTON/OQ 392076,7.05,,S,0,Yes
887 | 886,0,3,"Rice, Mrs. William (Margaret Norton)",female,39,0,5,382652,29.125,,Q,5,No
888 | 887,0,2,"Montvila, Rev. Juozas",male,27,0,0,211536,13.0,,S,0,Yes
889 | 888,1,1,"Graham, Miss. Margaret Edith",female,19,0,0,112053,30.0,B42,S,0,Yes
890 | 889,0,3,"Johnston, Miss. Catherine Helen ""Carrie""",female,36,1,2,W./C. 6607,23.45,,S,3,No
891 | 890,1,1,"Behr, Mr. Karl Howell",male,26,0,0,111369,30.0,C148,C,0,Yes
892 | 891,0,3,"Dooley, Mr. Patrick",male,32,0,0,370376,7.75,,Q,0,Yes
893 |
--------------------------------------------------------------------------------
/test/titanic_test.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived,Pclass,Name,Sex,Age,SibSp,Parch,Ticket,Fare,Cabin,Embarked,relatives,travelled_alone
2 | 1,0,3,"Braund, Mr. Owen Harris",male,22,1,0,A/5 21171,7.25,,S,1,No
3 | 2,1,1,"Cumings, Mrs. John Bradley (Florence Briggs Thayer)",female,38,1,0,PC 17599,71.2833,C85,C,1,No
4 | 3,1,3,"Heikkinen, Miss. Laina",female,26,0,0,STON/O2. 3101282,7.925,,S,0,Yes
5 | 4,1,1,"Futrelle, Mrs. Jacques Heath (Lily May Peel)",female,35,1,0,113803,53.1,C123,S,1,No
6 | 5,0,3,"Allen, Mr. William Henry",male,35,0,0,373450,8.05,,S,0,Yes
7 | 6,0,3,"Moran, Mr. James",male,18,0,0,330877,8.4583,,Q,0,Yes
8 | 7,0,1,"McCarthy, Mr. Timothy J",male,54,0,0,17463,51.8625,E46,S,0,Yes
9 | 8,0,3,"Palsson, Master. Gosta Leonard",male,2,3,1,349909,21.075,,S,4,No
10 | 9,1,3,"Johnson, Mrs. Oscar W (Elisabeth Vilhelmina Berg)",female,27,0,2,347742,11.1333,,S,2,No
11 | 10,1,2,"Nasser, Mrs. Nicholas (Adele Achem)",female,14,1,0,237736,30.0708,,C,1,No
12 | 11,1,3,"Sandstrom, Miss. Marguerite Rut",female,4,1,1,PP 9549,16.7,G6,S,2,No
13 | 12,1,1,"Bonnell, Miss. Elizabeth",female,58,0,0,113783,26.55,C103,S,0,Yes
14 | 13,0,3,"Saundercock, Mr. William Henry",male,20,0,0,A/5. 2151,8.05,,S,0,Yes
15 | 14,0,3,"Andersson, Mr. Anders Johan",male,39,1,5,347082,31.275,,S,6,No
16 | 15,0,3,"Vestrom, Miss. Hulda Amanda Adolfina",female,14,0,0,350406,7.8542,,S,0,Yes
17 | 16,1,2,"Hewlett, Mrs. (Mary D Kingcome) ",female,55,0,0,248706,16,,S,0,Yes
18 | 17,0,3,"Rice, Master. Eugene",male,2,4,1,382652,29.125,,Q,5,No
19 | 18,1,2,"Williams, Mr. Charles Eugene",male,27,0,0,244373,13,,S,0,Yes
20 | 19,0,3,"Vander Planke, Mrs. Julius (Emelia Maria Vandemoortele)",female,31,1,0,345763,18,,S,1,No
21 |
--------------------------------------------------------------------------------
/test/wine.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import numpy as np
3 | from sklearn.model_selection import train_test_split
4 | from XBNet.training_utils import training,predict
5 | from XBNet.models import XBNETClassifier
6 | from XBNet.run import run_XBNET
7 | import pandas as pd
8 |
9 | from sklearn.datasets import load_wine
10 | data = load_wine()
11 | x = data.data
12 | y = data.target
13 | x_train,x_test,y_train,y_test = train_test_split(x,y,test_size= 0.20, random_state= True)
14 | model = XBNETClassifier(x_train,y_train,num_layers=2)
15 | criterion = torch.nn.CrossEntropyLoss()
16 | optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
17 | m,acc, lo, val_ac, val_lo = run_XBNET(x_train,x_test,y_train,y_test,model,criterion,optimizer,epochs=300,batch_size=32)
18 |
--------------------------------------------------------------------------------