├── .gitignore
├── README.md
├── boston-housing
├── boston_housing.html
├── boston_housing.ipynb
└── housing.csv
├── customer-segments
├── README.md
├── customer_segments.html
├── customer_segments.ipynb
├── customers.csv
├── renders.py
└── renders.pyc
├── smartcab
├── README.md
├── SmartCabReport.md
├── SmartCabReport.pdf
└── smartcab
│ ├── __init__.py
│ ├── agent_original.py
│ ├── agent_smart.py
│ ├── environment.py
│ ├── environment.pyc
│ ├── images
│ ├── car-black.png
│ ├── car-blue.png
│ ├── car-cyan.png
│ ├── car-green.png
│ ├── car-magenta.png
│ ├── car-orange.png
│ ├── car-red.png
│ ├── car-white.png
│ └── car-yellow.png
│ ├── planner.py
│ ├── planner.pyc
│ ├── simulator.py
│ └── simulator.pyc
├── student-intervention
├── student-data.csv
├── student_intervention.html
└── student_intervention.ipynb
└── titanic-survival
├── Titanic_Survival_Exploration.html
├── Titanic_Survival_Exploration.ipynb
├── test.csv
└── train.csv
/.gitignore:
--------------------------------------------------------------------------------
1 | **/.DS_Store
2 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Machine Learning Projects
2 |
3 | Udacity Machine Learning Nanodegree projects and examples.
4 |
5 | - Titanic Survivors - Basic ML Concepts
6 | - Boston Home Prices - Model Validation
7 | - Student Intervention - Supervised Learning
8 | - Customer Segments - Unsupervised Learning
9 | - SmartCab - Reinforcement Learning
10 |
--------------------------------------------------------------------------------
/boston-housing/housing.csv:
--------------------------------------------------------------------------------
1 | RM,LSTAT,PTRATIO,MDEV
2 | 6.575,4.98,15.3,504000.0
3 | 6.421,9.14,17.8,453600.0
4 | 7.185,4.03,17.8,728700.0
5 | 6.998,2.94,18.7,701400.0
6 | 7.147,5.33,18.7,760200.0
7 | 6.43,5.21,18.7,602700.0
8 | 6.012,12.43,15.2,480900.0
9 | 6.172,19.15,15.2,569100.0
10 | 5.631,29.93,15.2,346500.0
11 | 6.004,17.1,15.2,396900.0
12 | 6.377,20.45,15.2,315000.0
13 | 6.009,13.27,15.2,396900.0
14 | 5.889,15.71,15.2,455700.0
15 | 5.949,8.26,21.0,428400.0
16 | 6.096,10.26,21.0,382200.0
17 | 5.834,8.47,21.0,417900.0
18 | 5.935,6.58,21.0,485100.0
19 | 5.99,14.67,21.0,367500.0
20 | 5.456,11.69,21.0,424200.0
21 | 5.727,11.28,21.0,382200.0
22 | 5.57,21.02,21.0,285600.0
23 | 5.965,13.83,21.0,411600.0
24 | 6.142,18.72,21.0,319200.0
25 | 5.813,19.88,21.0,304500.0
26 | 5.924,16.3,21.0,327600.0
27 | 5.599,16.51,21.0,291900.0
28 | 5.813,14.81,21.0,348600.0
29 | 6.047,17.28,21.0,310800.0
30 | 6.495,12.8,21.0,386400.0
31 | 6.674,11.98,21.0,441000.0
32 | 5.713,22.6,21.0,266700.0
33 | 6.072,13.04,21.0,304500.0
34 | 5.95,27.71,21.0,277200.0
35 | 5.701,18.35,21.0,275100.0
36 | 6.096,20.34,21.0,283500.0
37 | 5.933,9.68,19.2,396900.0
38 | 5.841,11.41,19.2,420000.0
39 | 5.85,8.77,19.2,441000.0
40 | 5.966,10.13,19.2,518700.0
41 | 6.595,4.32,18.3,646800.0
42 | 7.024,1.98,18.3,732900.0
43 | 6.77,4.84,17.9,558600.0
44 | 6.169,5.81,17.9,531300.0
45 | 6.211,7.44,17.9,518700.0
46 | 6.069,9.55,17.9,445200.0
47 | 5.682,10.21,17.9,405300.0
48 | 5.786,14.15,17.9,420000.0
49 | 6.03,18.8,17.9,348600.0
50 | 5.399,30.81,17.9,302400.0
51 | 5.602,16.2,17.9,407400.0
52 | 5.963,13.45,16.8,413700.0
53 | 6.115,9.43,16.8,430500.0
54 | 6.511,5.28,16.8,525000.0
55 | 5.998,8.43,16.8,491400.0
56 | 5.888,14.8,21.1,396900.0
57 | 7.249,4.81,17.9,743400.0
58 | 6.383,5.77,17.3,518700.0
59 | 6.816,3.95,15.1,663600.0
60 | 6.145,6.86,19.7,489300.0
61 | 5.927,9.22,19.7,411600.0
62 | 5.741,13.15,19.7,392700.0
63 | 5.966,14.44,19.7,336000.0
64 | 6.456,6.73,19.7,466200.0
65 | 6.762,9.5,19.7,525000.0
66 | 7.104,8.05,18.6,693000.0
67 | 6.29,4.67,16.1,493500.0
68 | 5.787,10.24,16.1,407400.0
69 | 5.878,8.1,18.9,462000.0
70 | 5.594,13.09,18.9,365400.0
71 | 5.885,8.79,18.9,438900.0
72 | 6.417,6.72,19.2,508200.0
73 | 5.961,9.88,19.2,455700.0
74 | 6.065,5.52,19.2,478800.0
75 | 6.245,7.54,19.2,491400.0
76 | 6.273,6.78,18.7,506100.0
77 | 6.286,8.94,18.7,449400.0
78 | 6.279,11.97,18.7,420000.0
79 | 6.14,10.27,18.7,436800.0
80 | 6.232,12.34,18.7,445200.0
81 | 5.874,9.1,18.7,426300.0
82 | 6.727,5.29,19.0,588000.0
83 | 6.619,7.22,19.0,501900.0
84 | 6.302,6.72,19.0,520800.0
85 | 6.167,7.51,19.0,480900.0
86 | 6.389,9.62,18.5,501900.0
87 | 6.63,6.53,18.5,558600.0
88 | 6.015,12.86,18.5,472500.0
89 | 6.121,8.44,18.5,466200.0
90 | 7.007,5.5,17.8,495600.0
91 | 7.079,5.7,17.8,602700.0
92 | 6.417,8.81,17.8,474600.0
93 | 6.405,8.2,17.8,462000.0
94 | 6.442,8.16,18.2,480900.0
95 | 6.211,6.21,18.2,525000.0
96 | 6.249,10.59,18.2,432600.0
97 | 6.625,6.65,18.0,596400.0
98 | 6.163,11.34,18.0,449400.0
99 | 8.069,4.21,18.0,812700.0
100 | 7.82,3.57,18.0,919800.0
101 | 7.416,6.19,18.0,697200.0
102 | 6.727,9.42,20.9,577500.0
103 | 6.781,7.67,20.9,556500.0
104 | 6.405,10.63,20.9,390600.0
105 | 6.137,13.44,20.9,405300.0
106 | 6.167,12.33,20.9,422100.0
107 | 5.851,16.47,20.9,409500.0
108 | 5.836,18.66,20.9,409500.0
109 | 6.127,14.09,20.9,428400.0
110 | 6.474,12.27,20.9,415800.0
111 | 6.229,15.55,20.9,407400.0
112 | 6.195,13.0,20.9,455700.0
113 | 6.715,10.16,17.8,478800.0
114 | 5.913,16.21,17.8,394800.0
115 | 6.092,17.09,17.8,392700.0
116 | 6.254,10.45,17.8,388500.0
117 | 5.928,15.76,17.8,384300.0
118 | 6.176,12.04,17.8,445200.0
119 | 6.021,10.3,17.8,403200.0
120 | 5.872,15.37,17.8,428400.0
121 | 5.731,13.61,17.8,405300.0
122 | 5.87,14.37,19.1,462000.0
123 | 6.004,14.27,19.1,426300.0
124 | 5.961,17.93,19.1,430500.0
125 | 5.856,25.41,19.1,363300.0
126 | 5.879,17.58,19.1,394800.0
127 | 5.986,14.81,19.1,449400.0
128 | 5.613,27.26,19.1,329700.0
129 | 5.693,17.19,21.2,340200.0
130 | 6.431,15.39,21.2,378000.0
131 | 5.637,18.34,21.2,300300.0
132 | 6.458,12.6,21.2,403200.0
133 | 6.326,12.26,21.2,411600.0
134 | 6.372,11.12,21.2,483000.0
135 | 5.822,15.03,21.2,386400.0
136 | 5.757,17.31,21.2,327600.0
137 | 6.335,16.96,21.2,380100.0
138 | 5.942,16.9,21.2,365400.0
139 | 6.454,14.59,21.2,359100.0
140 | 5.857,21.32,21.2,279300.0
141 | 6.151,18.46,21.2,373800.0
142 | 6.174,24.16,21.2,294000.0
143 | 5.019,34.41,21.2,302400.0
144 | 5.403,26.82,14.7,281400.0
145 | 5.468,26.42,14.7,327600.0
146 | 4.903,29.29,14.7,247800.0
147 | 6.13,27.8,14.7,289800.0
148 | 5.628,16.65,14.7,327600.0
149 | 4.926,29.53,14.7,306600.0
150 | 5.186,28.32,14.7,373800.0
151 | 5.597,21.45,14.7,323400.0
152 | 6.122,14.1,14.7,451500.0
153 | 5.404,13.28,14.7,411600.0
154 | 5.012,12.12,14.7,321300.0
155 | 5.709,15.79,14.7,407400.0
156 | 6.129,15.12,14.7,357000.0
157 | 6.152,15.02,14.7,327600.0
158 | 5.272,16.14,14.7,275100.0
159 | 6.943,4.59,14.7,867300.0
160 | 6.066,6.43,14.7,510300.0
161 | 6.51,7.39,14.7,489300.0
162 | 6.25,5.5,14.7,567000.0
163 | 5.854,11.64,14.7,476700.0
164 | 6.101,9.81,14.7,525000.0
165 | 5.877,12.14,14.7,499800.0
166 | 6.319,11.1,14.7,499800.0
167 | 6.402,11.32,14.7,468300.0
168 | 5.875,14.43,14.7,365400.0
169 | 5.88,12.03,14.7,401100.0
170 | 5.572,14.69,16.6,485100.0
171 | 6.416,9.04,16.6,495600.0
172 | 5.859,9.64,16.6,474600.0
173 | 6.546,5.33,16.6,617400.0
174 | 6.02,10.11,16.6,487200.0
175 | 6.315,6.29,16.6,516600.0
176 | 6.86,6.92,16.6,627900.0
177 | 6.98,5.04,17.8,781200.0
178 | 7.765,7.56,17.8,835800.0
179 | 6.144,9.45,17.8,760200.0
180 | 7.155,4.82,17.8,795900.0
181 | 6.563,5.68,17.8,682500.0
182 | 5.604,13.98,17.8,554400.0
183 | 6.153,13.15,17.8,621600.0
184 | 6.782,6.68,15.2,672000.0
185 | 6.556,4.56,15.2,625800.0
186 | 7.185,5.39,15.2,732900.0
187 | 6.951,5.1,15.2,777000.0
188 | 6.739,4.69,15.2,640500.0
189 | 7.178,2.87,15.2,764400.0
190 | 6.8,5.03,15.6,653100.0
191 | 6.604,4.38,15.6,611100.0
192 | 7.287,4.08,12.6,699300.0
193 | 7.107,8.61,12.6,636300.0
194 | 7.274,6.62,12.6,726600.0
195 | 6.975,4.56,17.0,732900.0
196 | 7.135,4.45,17.0,690900.0
197 | 6.162,7.43,14.7,506100.0
198 | 7.61,3.11,14.7,888300.0
199 | 7.853,3.81,14.7,1018500.0
200 | 5.891,10.87,18.6,474600.0
201 | 6.326,10.97,18.6,512400.0
202 | 5.783,18.06,18.6,472500.0
203 | 6.064,14.66,18.6,512400.0
204 | 5.344,23.09,18.6,420000.0
205 | 5.96,17.27,18.6,455700.0
206 | 5.404,23.98,18.6,405300.0
207 | 5.807,16.03,18.6,470400.0
208 | 6.375,9.38,18.6,590100.0
209 | 5.412,29.55,18.6,497700.0
210 | 6.182,9.47,18.6,525000.0
211 | 5.888,13.51,16.4,489300.0
212 | 6.642,9.69,16.4,602700.0
213 | 5.951,17.92,16.4,451500.0
214 | 6.373,10.5,16.4,483000.0
215 | 6.951,9.71,17.4,560700.0
216 | 6.164,21.46,17.4,455700.0
217 | 6.879,9.93,17.4,577500.0
218 | 6.618,7.6,17.4,632100.0
219 | 8.266,4.14,17.4,940800.0
220 | 8.04,3.13,17.4,789600.0
221 | 7.163,6.36,17.4,663600.0
222 | 7.686,3.92,17.4,980700.0
223 | 6.552,3.76,17.4,661500.0
224 | 5.981,11.65,17.4,510300.0
225 | 7.412,5.25,17.4,665700.0
226 | 8.337,2.47,17.4,875700.0
227 | 8.247,3.95,17.4,1014300.0
228 | 6.726,8.05,17.4,609000.0
229 | 6.086,10.88,17.4,504000.0
230 | 6.631,9.54,17.4,527100.0
231 | 7.358,4.73,17.4,661500.0
232 | 6.481,6.36,16.6,497700.0
233 | 6.606,7.37,16.6,489300.0
234 | 6.897,11.38,16.6,462000.0
235 | 6.095,12.4,16.6,422100.0
236 | 6.358,11.22,16.6,466200.0
237 | 6.393,5.19,16.6,497700.0
238 | 5.593,12.5,19.1,369600.0
239 | 5.605,18.46,19.1,388500.0
240 | 6.108,9.16,19.1,510300.0
241 | 6.226,10.15,19.1,430500.0
242 | 6.433,9.52,19.1,514500.0
243 | 6.718,6.56,19.1,550200.0
244 | 6.487,5.9,19.1,512400.0
245 | 6.438,3.59,19.1,520800.0
246 | 6.957,3.53,19.1,621600.0
247 | 8.259,3.54,19.1,898800.0
248 | 6.108,6.57,16.4,459900.0
249 | 5.876,9.25,16.4,438900.0
250 | 7.454,3.11,15.9,924000.0
251 | 7.333,7.79,13.0,756000.0
252 | 6.842,6.9,13.0,632100.0
253 | 7.203,9.59,13.0,709800.0
254 | 7.52,7.26,13.0,905100.0
255 | 8.398,5.91,13.0,1024800.0
256 | 7.327,11.25,13.0,651000.0
257 | 7.206,8.1,13.0,766500.0
258 | 5.56,10.45,13.0,478800.0
259 | 7.014,14.79,13.0,644700.0
260 | 7.47,3.16,13.0,913500.0
261 | 5.92,13.65,18.6,434700.0
262 | 5.856,13.0,18.6,443100.0
263 | 6.24,6.59,18.6,529200.0
264 | 6.538,7.73,18.6,512400.0
265 | 7.691,6.58,18.6,739200.0
266 | 6.758,3.53,17.6,680400.0
267 | 6.854,2.98,17.6,672000.0
268 | 7.267,6.05,17.6,697200.0
269 | 6.826,4.16,17.6,695100.0
270 | 6.482,7.19,17.6,611100.0
271 | 6.812,4.85,14.9,737100.0
272 | 7.82,3.76,14.9,953400.0
273 | 6.968,4.59,14.9,743400.0
274 | 7.645,3.01,14.9,966000.0
275 | 7.088,7.85,15.3,676200.0
276 | 6.453,8.23,15.3,462000.0
277 | 6.23,12.93,18.2,422100.0
278 | 6.209,7.14,16.6,487200.0
279 | 6.315,7.6,16.6,468300.0
280 | 6.565,9.51,16.6,520800.0
281 | 6.861,3.33,19.2,598500.0
282 | 7.148,3.56,19.2,783300.0
283 | 6.63,4.7,19.2,585900.0
284 | 6.127,8.58,16.0,501900.0
285 | 6.009,10.4,16.0,455700.0
286 | 6.678,6.27,16.0,600600.0
287 | 6.549,7.39,16.0,569100.0
288 | 5.79,15.84,16.0,426300.0
289 | 6.345,4.97,14.8,472500.0
290 | 7.041,4.74,14.8,609000.0
291 | 6.871,6.07,14.8,520800.0
292 | 6.59,9.5,16.1,462000.0
293 | 6.495,8.67,16.1,554400.0
294 | 6.982,4.86,16.1,695100.0
295 | 7.236,6.93,18.4,758100.0
296 | 6.616,8.93,18.4,596400.0
297 | 7.42,6.47,18.4,701400.0
298 | 6.849,7.53,18.4,592200.0
299 | 6.635,4.54,18.4,478800.0
300 | 5.972,9.97,18.4,426300.0
301 | 4.973,12.64,18.4,338100.0
302 | 6.122,5.98,18.4,464100.0
303 | 6.023,11.72,18.4,407400.0
304 | 6.266,7.9,18.4,453600.0
305 | 6.567,9.28,18.4,499800.0
306 | 5.705,11.5,18.4,340200.0
307 | 5.914,18.33,18.4,373800.0
308 | 5.782,15.94,18.4,415800.0
309 | 6.382,10.36,18.4,485100.0
310 | 6.113,12.73,18.4,441000.0
311 | 6.426,7.2,19.6,499800.0
312 | 6.376,6.87,19.6,485100.0
313 | 6.041,7.7,19.6,428400.0
314 | 5.708,11.74,19.6,388500.0
315 | 6.415,6.12,19.6,525000.0
316 | 6.431,5.08,19.6,516600.0
317 | 6.312,6.15,19.6,483000.0
318 | 6.083,12.79,19.6,466200.0
319 | 5.868,9.97,16.9,405300.0
320 | 6.333,7.34,16.9,474600.0
321 | 6.144,9.09,16.9,415800.0
322 | 5.706,12.43,16.9,359100.0
323 | 6.031,7.83,16.9,407400.0
324 | 6.316,5.68,20.2,466200.0
325 | 6.31,6.75,20.2,434700.0
326 | 6.037,8.01,20.2,443100.0
327 | 5.869,9.8,20.2,409500.0
328 | 5.895,10.56,20.2,388500.0
329 | 6.059,8.51,20.2,432600.0
330 | 5.985,9.74,20.2,399000.0
331 | 5.968,9.29,20.2,392700.0
332 | 7.241,5.49,15.5,686700.0
333 | 6.54,8.65,15.9,346500.0
334 | 6.696,7.18,17.6,501900.0
335 | 6.874,4.61,17.6,655200.0
336 | 6.014,10.53,18.8,367500.0
337 | 5.898,12.67,18.8,361200.0
338 | 6.516,6.36,17.9,485100.0
339 | 6.635,5.99,17.0,514500.0
340 | 6.939,5.89,19.7,558600.0
341 | 6.49,5.98,19.7,480900.0
342 | 6.579,5.49,18.3,506100.0
343 | 5.884,7.79,18.3,390600.0
344 | 6.728,4.5,17.0,632100.0
345 | 5.663,8.05,22.0,382200.0
346 | 5.936,5.57,22.0,432600.0
347 | 6.212,17.6,20.2,373800.0
348 | 6.395,13.27,20.2,455700.0
349 | 6.127,11.48,20.2,476700.0
350 | 6.112,12.67,20.2,474600.0
351 | 6.398,7.79,20.2,525000.0
352 | 6.251,14.19,20.2,417900.0
353 | 5.362,10.19,20.2,436800.0
354 | 5.803,14.64,20.2,352800.0
355 | 3.561,7.12,20.2,577500.0
356 | 4.963,14.0,20.2,459900.0
357 | 3.863,13.33,20.2,485100.0
358 | 4.906,34.77,20.2,289800.0
359 | 4.138,37.97,20.2,289800.0
360 | 7.313,13.44,20.2,315000.0
361 | 6.649,23.24,20.2,291900.0
362 | 6.794,21.24,20.2,279300.0
363 | 6.38,23.69,20.2,275100.0
364 | 6.223,21.78,20.2,214200.0
365 | 6.968,17.21,20.2,218400.0
366 | 6.545,21.08,20.2,228900.0
367 | 5.536,23.6,20.2,237300.0
368 | 5.52,24.56,20.2,258300.0
369 | 4.368,30.63,20.2,184800.0
370 | 5.277,30.81,20.2,151200.0
371 | 4.652,28.28,20.2,220500.0
372 | 5.0,31.99,20.2,155400.0
373 | 4.88,30.62,20.2,214200.0
374 | 5.39,20.85,20.2,241500.0
375 | 5.713,17.11,20.2,317100.0
376 | 6.051,18.76,20.2,487200.0
377 | 5.036,25.68,20.2,203700.0
378 | 6.193,15.17,20.2,289800.0
379 | 5.887,16.35,20.2,266700.0
380 | 6.471,17.12,20.2,275100.0
381 | 6.405,19.37,20.2,262500.0
382 | 5.747,19.92,20.2,178500.0
383 | 5.453,30.59,20.2,105000.0
384 | 5.852,29.97,20.2,132300.0
385 | 5.987,26.77,20.2,117600.0
386 | 6.343,20.32,20.2,151200.0
387 | 6.404,20.31,20.2,254100.0
388 | 5.349,19.77,20.2,174300.0
389 | 5.531,27.38,20.2,178500.0
390 | 5.683,22.98,20.2,105000.0
391 | 4.138,23.34,20.2,249900.0
392 | 5.608,12.13,20.2,585900.0
393 | 5.617,26.4,20.2,361200.0
394 | 6.852,19.78,20.2,577500.0
395 | 5.757,10.11,20.2,315000.0
396 | 6.657,21.22,20.2,361200.0
397 | 4.628,34.37,20.2,375900.0
398 | 5.155,20.08,20.2,342300.0
399 | 4.519,36.98,20.2,147000.0
400 | 6.434,29.05,20.2,151200.0
401 | 6.782,25.79,20.2,157500.0
402 | 5.304,26.64,20.2,218400.0
403 | 5.957,20.62,20.2,184800.0
404 | 6.824,22.74,20.2,176400.0
405 | 6.411,15.02,20.2,350700.0
406 | 6.006,15.7,20.2,298200.0
407 | 5.648,14.1,20.2,436800.0
408 | 6.103,23.29,20.2,281400.0
409 | 5.565,17.16,20.2,245700.0
410 | 5.896,24.39,20.2,174300.0
411 | 5.837,15.69,20.2,214200.0
412 | 6.202,14.52,20.2,228900.0
413 | 6.193,21.52,20.2,231000.0
414 | 6.38,24.08,20.2,199500.0
415 | 6.348,17.64,20.2,304500.0
416 | 6.833,19.69,20.2,296100.0
417 | 6.425,12.03,20.2,338100.0
418 | 6.436,16.22,20.2,300300.0
419 | 6.208,15.17,20.2,245700.0
420 | 6.629,23.27,20.2,281400.0
421 | 6.461,18.05,20.2,201600.0
422 | 6.152,26.45,20.2,182700.0
423 | 5.935,34.02,20.2,176400.0
424 | 5.627,22.88,20.2,268800.0
425 | 5.818,22.11,20.2,220500.0
426 | 6.406,19.52,20.2,359100.0
427 | 6.219,16.59,20.2,386400.0
428 | 6.485,18.85,20.2,323400.0
429 | 5.854,23.79,20.2,226800.0
430 | 6.459,23.98,20.2,247800.0
431 | 6.341,17.79,20.2,312900.0
432 | 6.251,16.44,20.2,264600.0
433 | 6.185,18.13,20.2,296100.0
434 | 6.417,19.31,20.2,273000.0
435 | 6.749,17.44,20.2,281400.0
436 | 6.655,17.73,20.2,319200.0
437 | 6.297,17.27,20.2,338100.0
438 | 7.393,16.74,20.2,373800.0
439 | 6.728,18.71,20.2,312900.0
440 | 6.525,18.13,20.2,296100.0
441 | 5.976,19.01,20.2,266700.0
442 | 5.936,16.94,20.2,283500.0
443 | 6.301,16.23,20.2,312900.0
444 | 6.081,14.7,20.2,420000.0
445 | 6.701,16.42,20.2,344400.0
446 | 6.376,14.65,20.2,371700.0
447 | 6.317,13.99,20.2,409500.0
448 | 6.513,10.29,20.2,424200.0
449 | 6.209,13.22,20.2,449400.0
450 | 5.759,14.13,20.2,417900.0
451 | 5.952,17.15,20.2,399000.0
452 | 6.003,21.32,20.2,401100.0
453 | 5.926,18.13,20.2,401100.0
454 | 5.713,14.76,20.2,422100.0
455 | 6.167,16.29,20.2,417900.0
456 | 6.229,12.87,20.2,411600.0
457 | 6.437,14.36,20.2,487200.0
458 | 6.98,11.66,20.2,625800.0
459 | 5.427,18.14,20.2,289800.0
460 | 6.162,24.1,20.2,279300.0
461 | 6.484,18.68,20.2,350700.0
462 | 5.304,24.91,20.2,252000.0
463 | 6.185,18.03,20.2,306600.0
464 | 6.229,13.11,20.2,449400.0
465 | 6.242,10.74,20.2,483000.0
466 | 6.75,7.74,20.2,497700.0
467 | 7.061,7.01,20.2,525000.0
468 | 5.762,10.42,20.2,457800.0
469 | 5.871,13.34,20.2,432600.0
470 | 6.312,10.58,20.2,445200.0
471 | 6.114,14.98,20.2,401100.0
472 | 5.905,11.45,20.2,432600.0
473 | 5.454,18.06,20.1,319200.0
474 | 5.414,23.97,20.1,147000.0
475 | 5.093,29.68,20.1,170100.0
476 | 5.983,18.07,20.1,285600.0
477 | 5.983,13.35,20.1,422100.0
478 | 5.707,12.01,19.2,457800.0
479 | 5.926,13.59,19.2,514500.0
480 | 5.67,17.6,19.2,485100.0
481 | 5.39,21.14,19.2,413700.0
482 | 5.794,14.1,19.2,384300.0
483 | 6.019,12.92,19.2,445200.0
484 | 5.569,15.1,19.2,367500.0
485 | 6.027,14.33,19.2,352800.0
486 | 6.593,9.67,21.0,470400.0
487 | 6.12,9.08,21.0,432600.0
488 | 6.976,5.64,21.0,501900.0
489 | 6.794,6.48,21.0,462000.0
490 | 6.03,7.88,21.0,249900.0
491 |
--------------------------------------------------------------------------------
/customer-segments/README.md:
--------------------------------------------------------------------------------
1 | # Project 3: Unsupervised Learning
2 | ## Creating Customer Segments
3 |
4 | ### Install
5 |
6 | This project requires **Python 2.7** and the following Python libraries installed:
7 |
8 | - [NumPy](http://www.numpy.org/)
9 | - [Pandas](http://pandas.pydata.org)
10 | - [matplotlib](http://matplotlib.org/)
11 | - [scikit-learn](http://scikit-learn.org/stable/)
12 |
13 | You will also need to have software installed to run and execute an [iPython Notebook](http://ipython.org/notebook.html)
14 |
15 | Udacity recommends our students install [Anaconda](https://www.continuum.io/downloads), i pre-packaged Python distribution that contains all of the necessary libraries and software for this project.
16 |
17 | ### Code
18 |
19 | Template code is provided in the notebook `customer_segments.ipynb` notebook file. Additional supporting code can be found in `renders.py`. While some code has already been implemented to get you started, you will need to implement additional functionality when requested to successfully complete the project.
20 |
21 | ### Run
22 |
23 | In a terminal or command window, navigate to the top-level project directory `creating_customer_segments/` (that contains this README) and run one of the following commands:
24 |
25 | ```ipython notebook customer_segments.ipynb```
26 | ```jupyter notebook customer_segments.ipynb```
27 |
28 | This will open the iPython Notebook software and project file in your browser.
29 |
30 | ## Data
31 |
32 | The dataset used in this project is included as `customers.csv`. You can find more information on this dataset on the [UCI Machine Learning Repository](https://archive.ics.uci.edu/ml/datasets/Wholesale+customers) page.
--------------------------------------------------------------------------------
/customer-segments/customers.csv:
--------------------------------------------------------------------------------
1 | Channel,Region,Fresh,Milk,Grocery,Frozen,Detergents_Paper,Delicatessen
2 | 2,3,12669,9656,7561,214,2674,1338
3 | 2,3,7057,9810,9568,1762,3293,1776
4 | 2,3,6353,8808,7684,2405,3516,7844
5 | 1,3,13265,1196,4221,6404,507,1788
6 | 2,3,22615,5410,7198,3915,1777,5185
7 | 2,3,9413,8259,5126,666,1795,1451
8 | 2,3,12126,3199,6975,480,3140,545
9 | 2,3,7579,4956,9426,1669,3321,2566
10 | 1,3,5963,3648,6192,425,1716,750
11 | 2,3,6006,11093,18881,1159,7425,2098
12 | 2,3,3366,5403,12974,4400,5977,1744
13 | 2,3,13146,1124,4523,1420,549,497
14 | 2,3,31714,12319,11757,287,3881,2931
15 | 2,3,21217,6208,14982,3095,6707,602
16 | 2,3,24653,9465,12091,294,5058,2168
17 | 1,3,10253,1114,3821,397,964,412
18 | 2,3,1020,8816,12121,134,4508,1080
19 | 1,3,5876,6157,2933,839,370,4478
20 | 2,3,18601,6327,10099,2205,2767,3181
21 | 1,3,7780,2495,9464,669,2518,501
22 | 2,3,17546,4519,4602,1066,2259,2124
23 | 1,3,5567,871,2010,3383,375,569
24 | 1,3,31276,1917,4469,9408,2381,4334
25 | 2,3,26373,36423,22019,5154,4337,16523
26 | 2,3,22647,9776,13792,2915,4482,5778
27 | 2,3,16165,4230,7595,201,4003,57
28 | 1,3,9898,961,2861,3151,242,833
29 | 1,3,14276,803,3045,485,100,518
30 | 2,3,4113,20484,25957,1158,8604,5206
31 | 1,3,43088,2100,2609,1200,1107,823
32 | 1,3,18815,3610,11107,1148,2134,2963
33 | 1,3,2612,4339,3133,2088,820,985
34 | 1,3,21632,1318,2886,266,918,405
35 | 1,3,29729,4786,7326,6130,361,1083
36 | 1,3,1502,1979,2262,425,483,395
37 | 2,3,688,5491,11091,833,4239,436
38 | 1,3,29955,4362,5428,1729,862,4626
39 | 2,3,15168,10556,12477,1920,6506,714
40 | 2,3,4591,15729,16709,33,6956,433
41 | 1,3,56159,555,902,10002,212,2916
42 | 1,3,24025,4332,4757,9510,1145,5864
43 | 1,3,19176,3065,5956,2033,2575,2802
44 | 2,3,10850,7555,14961,188,6899,46
45 | 2,3,630,11095,23998,787,9529,72
46 | 2,3,9670,7027,10471,541,4618,65
47 | 2,3,5181,22044,21531,1740,7353,4985
48 | 2,3,3103,14069,21955,1668,6792,1452
49 | 2,3,44466,54259,55571,7782,24171,6465
50 | 2,3,11519,6152,10868,584,5121,1476
51 | 2,3,4967,21412,28921,1798,13583,1163
52 | 1,3,6269,1095,1980,3860,609,2162
53 | 1,3,3347,4051,6996,239,1538,301
54 | 2,3,40721,3916,5876,532,2587,1278
55 | 2,3,491,10473,11532,744,5611,224
56 | 1,3,27329,1449,1947,2436,204,1333
57 | 1,3,5264,3683,5005,1057,2024,1130
58 | 2,3,4098,29892,26866,2616,17740,1340
59 | 2,3,5417,9933,10487,38,7572,1282
60 | 1,3,13779,1970,1648,596,227,436
61 | 1,3,6137,5360,8040,129,3084,1603
62 | 2,3,8590,3045,7854,96,4095,225
63 | 2,3,35942,38369,59598,3254,26701,2017
64 | 2,3,7823,6245,6544,4154,4074,964
65 | 2,3,9396,11601,15775,2896,7677,1295
66 | 1,3,4760,1227,3250,3724,1247,1145
67 | 2,3,85,20959,45828,36,24231,1423
68 | 1,3,9,1534,7417,175,3468,27
69 | 2,3,19913,6759,13462,1256,5141,834
70 | 1,3,2446,7260,3993,5870,788,3095
71 | 1,3,8352,2820,1293,779,656,144
72 | 1,3,16705,2037,3202,10643,116,1365
73 | 1,3,18291,1266,21042,5373,4173,14472
74 | 1,3,4420,5139,2661,8872,1321,181
75 | 2,3,19899,5332,8713,8132,764,648
76 | 2,3,8190,6343,9794,1285,1901,1780
77 | 1,3,20398,1137,3,4407,3,975
78 | 1,3,717,3587,6532,7530,529,894
79 | 2,3,12205,12697,28540,869,12034,1009
80 | 1,3,10766,1175,2067,2096,301,167
81 | 1,3,1640,3259,3655,868,1202,1653
82 | 1,3,7005,829,3009,430,610,529
83 | 2,3,219,9540,14403,283,7818,156
84 | 2,3,10362,9232,11009,737,3537,2342
85 | 1,3,20874,1563,1783,2320,550,772
86 | 2,3,11867,3327,4814,1178,3837,120
87 | 2,3,16117,46197,92780,1026,40827,2944
88 | 2,3,22925,73498,32114,987,20070,903
89 | 1,3,43265,5025,8117,6312,1579,14351
90 | 1,3,7864,542,4042,9735,165,46
91 | 1,3,24904,3836,5330,3443,454,3178
92 | 1,3,11405,596,1638,3347,69,360
93 | 1,3,12754,2762,2530,8693,627,1117
94 | 2,3,9198,27472,32034,3232,18906,5130
95 | 1,3,11314,3090,2062,35009,71,2698
96 | 2,3,5626,12220,11323,206,5038,244
97 | 1,3,3,2920,6252,440,223,709
98 | 2,3,23,2616,8118,145,3874,217
99 | 1,3,403,254,610,774,54,63
100 | 1,3,503,112,778,895,56,132
101 | 1,3,9658,2182,1909,5639,215,323
102 | 2,3,11594,7779,12144,3252,8035,3029
103 | 2,3,1420,10810,16267,1593,6766,1838
104 | 2,3,2932,6459,7677,2561,4573,1386
105 | 1,3,56082,3504,8906,18028,1480,2498
106 | 1,3,14100,2132,3445,1336,1491,548
107 | 1,3,15587,1014,3970,910,139,1378
108 | 2,3,1454,6337,10704,133,6830,1831
109 | 2,3,8797,10646,14886,2471,8969,1438
110 | 2,3,1531,8397,6981,247,2505,1236
111 | 2,3,1406,16729,28986,673,836,3
112 | 1,3,11818,1648,1694,2276,169,1647
113 | 2,3,12579,11114,17569,805,6457,1519
114 | 1,3,19046,2770,2469,8853,483,2708
115 | 1,3,14438,2295,1733,3220,585,1561
116 | 1,3,18044,1080,2000,2555,118,1266
117 | 1,3,11134,793,2988,2715,276,610
118 | 1,3,11173,2521,3355,1517,310,222
119 | 1,3,6990,3880,5380,1647,319,1160
120 | 1,3,20049,1891,2362,5343,411,933
121 | 1,3,8258,2344,2147,3896,266,635
122 | 1,3,17160,1200,3412,2417,174,1136
123 | 1,3,4020,3234,1498,2395,264,255
124 | 1,3,12212,201,245,1991,25,860
125 | 2,3,11170,10769,8814,2194,1976,143
126 | 1,3,36050,1642,2961,4787,500,1621
127 | 1,3,76237,3473,7102,16538,778,918
128 | 1,3,19219,1840,1658,8195,349,483
129 | 2,3,21465,7243,10685,880,2386,2749
130 | 1,3,140,8847,3823,142,1062,3
131 | 1,3,42312,926,1510,1718,410,1819
132 | 1,3,7149,2428,699,6316,395,911
133 | 1,3,2101,589,314,346,70,310
134 | 1,3,14903,2032,2479,576,955,328
135 | 1,3,9434,1042,1235,436,256,396
136 | 1,3,7388,1882,2174,720,47,537
137 | 1,3,6300,1289,2591,1170,199,326
138 | 1,3,4625,8579,7030,4575,2447,1542
139 | 1,3,3087,8080,8282,661,721,36
140 | 1,3,13537,4257,5034,155,249,3271
141 | 1,3,5387,4979,3343,825,637,929
142 | 1,3,17623,4280,7305,2279,960,2616
143 | 1,3,30379,13252,5189,321,51,1450
144 | 1,3,37036,7152,8253,2995,20,3
145 | 1,3,10405,1596,1096,8425,399,318
146 | 1,3,18827,3677,1988,118,516,201
147 | 2,3,22039,8384,34792,42,12591,4430
148 | 1,3,7769,1936,2177,926,73,520
149 | 1,3,9203,3373,2707,1286,1082,526
150 | 1,3,5924,584,542,4052,283,434
151 | 1,3,31812,1433,1651,800,113,1440
152 | 1,3,16225,1825,1765,853,170,1067
153 | 1,3,1289,3328,2022,531,255,1774
154 | 1,3,18840,1371,3135,3001,352,184
155 | 1,3,3463,9250,2368,779,302,1627
156 | 1,3,622,55,137,75,7,8
157 | 2,3,1989,10690,19460,233,11577,2153
158 | 2,3,3830,5291,14855,317,6694,3182
159 | 1,3,17773,1366,2474,3378,811,418
160 | 2,3,2861,6570,9618,930,4004,1682
161 | 2,3,355,7704,14682,398,8077,303
162 | 2,3,1725,3651,12822,824,4424,2157
163 | 1,3,12434,540,283,1092,3,2233
164 | 1,3,15177,2024,3810,2665,232,610
165 | 2,3,5531,15726,26870,2367,13726,446
166 | 2,3,5224,7603,8584,2540,3674,238
167 | 2,3,15615,12653,19858,4425,7108,2379
168 | 2,3,4822,6721,9170,993,4973,3637
169 | 1,3,2926,3195,3268,405,1680,693
170 | 1,3,5809,735,803,1393,79,429
171 | 1,3,5414,717,2155,2399,69,750
172 | 2,3,260,8675,13430,1116,7015,323
173 | 2,3,200,25862,19816,651,8773,6250
174 | 1,3,955,5479,6536,333,2840,707
175 | 2,3,514,7677,19805,937,9836,716
176 | 1,3,286,1208,5241,2515,153,1442
177 | 2,3,2343,7845,11874,52,4196,1697
178 | 1,3,45640,6958,6536,7368,1532,230
179 | 1,3,12759,7330,4533,1752,20,2631
180 | 1,3,11002,7075,4945,1152,120,395
181 | 1,3,3157,4888,2500,4477,273,2165
182 | 1,3,12356,6036,8887,402,1382,2794
183 | 1,3,112151,29627,18148,16745,4948,8550
184 | 1,3,694,8533,10518,443,6907,156
185 | 1,3,36847,43950,20170,36534,239,47943
186 | 1,3,327,918,4710,74,334,11
187 | 1,3,8170,6448,1139,2181,58,247
188 | 1,3,3009,521,854,3470,949,727
189 | 1,3,2438,8002,9819,6269,3459,3
190 | 2,3,8040,7639,11687,2758,6839,404
191 | 2,3,834,11577,11522,275,4027,1856
192 | 1,3,16936,6250,1981,7332,118,64
193 | 1,3,13624,295,1381,890,43,84
194 | 1,3,5509,1461,2251,547,187,409
195 | 2,3,180,3485,20292,959,5618,666
196 | 1,3,7107,1012,2974,806,355,1142
197 | 1,3,17023,5139,5230,7888,330,1755
198 | 1,1,30624,7209,4897,18711,763,2876
199 | 2,1,2427,7097,10391,1127,4314,1468
200 | 1,1,11686,2154,6824,3527,592,697
201 | 1,1,9670,2280,2112,520,402,347
202 | 2,1,3067,13240,23127,3941,9959,731
203 | 2,1,4484,14399,24708,3549,14235,1681
204 | 1,1,25203,11487,9490,5065,284,6854
205 | 1,1,583,685,2216,469,954,18
206 | 1,1,1956,891,5226,1383,5,1328
207 | 2,1,1107,11711,23596,955,9265,710
208 | 1,1,6373,780,950,878,288,285
209 | 2,1,2541,4737,6089,2946,5316,120
210 | 1,1,1537,3748,5838,1859,3381,806
211 | 2,1,5550,12729,16767,864,12420,797
212 | 1,1,18567,1895,1393,1801,244,2100
213 | 2,1,12119,28326,39694,4736,19410,2870
214 | 1,1,7291,1012,2062,1291,240,1775
215 | 1,1,3317,6602,6861,1329,3961,1215
216 | 2,1,2362,6551,11364,913,5957,791
217 | 1,1,2806,10765,15538,1374,5828,2388
218 | 2,1,2532,16599,36486,179,13308,674
219 | 1,1,18044,1475,2046,2532,130,1158
220 | 2,1,18,7504,15205,1285,4797,6372
221 | 1,1,4155,367,1390,2306,86,130
222 | 1,1,14755,899,1382,1765,56,749
223 | 1,1,5396,7503,10646,91,4167,239
224 | 1,1,5041,1115,2856,7496,256,375
225 | 2,1,2790,2527,5265,5612,788,1360
226 | 1,1,7274,659,1499,784,70,659
227 | 1,1,12680,3243,4157,660,761,786
228 | 2,1,20782,5921,9212,1759,2568,1553
229 | 1,1,4042,2204,1563,2286,263,689
230 | 1,1,1869,577,572,950,4762,203
231 | 1,1,8656,2746,2501,6845,694,980
232 | 2,1,11072,5989,5615,8321,955,2137
233 | 1,1,2344,10678,3828,1439,1566,490
234 | 1,1,25962,1780,3838,638,284,834
235 | 1,1,964,4984,3316,937,409,7
236 | 1,1,15603,2703,3833,4260,325,2563
237 | 1,1,1838,6380,2824,1218,1216,295
238 | 1,1,8635,820,3047,2312,415,225
239 | 1,1,18692,3838,593,4634,28,1215
240 | 1,1,7363,475,585,1112,72,216
241 | 1,1,47493,2567,3779,5243,828,2253
242 | 1,1,22096,3575,7041,11422,343,2564
243 | 1,1,24929,1801,2475,2216,412,1047
244 | 1,1,18226,659,2914,3752,586,578
245 | 1,1,11210,3576,5119,561,1682,2398
246 | 1,1,6202,7775,10817,1183,3143,1970
247 | 2,1,3062,6154,13916,230,8933,2784
248 | 1,1,8885,2428,1777,1777,430,610
249 | 1,1,13569,346,489,2077,44,659
250 | 1,1,15671,5279,2406,559,562,572
251 | 1,1,8040,3795,2070,6340,918,291
252 | 1,1,3191,1993,1799,1730,234,710
253 | 2,1,6134,23133,33586,6746,18594,5121
254 | 1,1,6623,1860,4740,7683,205,1693
255 | 1,1,29526,7961,16966,432,363,1391
256 | 1,1,10379,17972,4748,4686,1547,3265
257 | 1,1,31614,489,1495,3242,111,615
258 | 1,1,11092,5008,5249,453,392,373
259 | 1,1,8475,1931,1883,5004,3593,987
260 | 1,1,56083,4563,2124,6422,730,3321
261 | 1,1,53205,4959,7336,3012,967,818
262 | 1,1,9193,4885,2157,327,780,548
263 | 1,1,7858,1110,1094,6818,49,287
264 | 1,1,23257,1372,1677,982,429,655
265 | 1,1,2153,1115,6684,4324,2894,411
266 | 2,1,1073,9679,15445,61,5980,1265
267 | 1,1,5909,23527,13699,10155,830,3636
268 | 2,1,572,9763,22182,2221,4882,2563
269 | 1,1,20893,1222,2576,3975,737,3628
270 | 2,1,11908,8053,19847,1069,6374,698
271 | 1,1,15218,258,1138,2516,333,204
272 | 1,1,4720,1032,975,5500,197,56
273 | 1,1,2083,5007,1563,1120,147,1550
274 | 1,1,514,8323,6869,529,93,1040
275 | 1,3,36817,3045,1493,4802,210,1824
276 | 1,3,894,1703,1841,744,759,1153
277 | 1,3,680,1610,223,862,96,379
278 | 1,3,27901,3749,6964,4479,603,2503
279 | 1,3,9061,829,683,16919,621,139
280 | 1,3,11693,2317,2543,5845,274,1409
281 | 2,3,17360,6200,9694,1293,3620,1721
282 | 1,3,3366,2884,2431,977,167,1104
283 | 2,3,12238,7108,6235,1093,2328,2079
284 | 1,3,49063,3965,4252,5970,1041,1404
285 | 1,3,25767,3613,2013,10303,314,1384
286 | 1,3,68951,4411,12609,8692,751,2406
287 | 1,3,40254,640,3600,1042,436,18
288 | 1,3,7149,2247,1242,1619,1226,128
289 | 1,3,15354,2102,2828,8366,386,1027
290 | 1,3,16260,594,1296,848,445,258
291 | 1,3,42786,286,471,1388,32,22
292 | 1,3,2708,2160,2642,502,965,1522
293 | 1,3,6022,3354,3261,2507,212,686
294 | 1,3,2838,3086,4329,3838,825,1060
295 | 2,2,3996,11103,12469,902,5952,741
296 | 1,2,21273,2013,6550,909,811,1854
297 | 2,2,7588,1897,5234,417,2208,254
298 | 1,2,19087,1304,3643,3045,710,898
299 | 2,2,8090,3199,6986,1455,3712,531
300 | 2,2,6758,4560,9965,934,4538,1037
301 | 1,2,444,879,2060,264,290,259
302 | 2,2,16448,6243,6360,824,2662,2005
303 | 2,2,5283,13316,20399,1809,8752,172
304 | 2,2,2886,5302,9785,364,6236,555
305 | 2,2,2599,3688,13829,492,10069,59
306 | 2,2,161,7460,24773,617,11783,2410
307 | 2,2,243,12939,8852,799,3909,211
308 | 2,2,6468,12867,21570,1840,7558,1543
309 | 1,2,17327,2374,2842,1149,351,925
310 | 1,2,6987,1020,3007,416,257,656
311 | 2,2,918,20655,13567,1465,6846,806
312 | 1,2,7034,1492,2405,12569,299,1117
313 | 1,2,29635,2335,8280,3046,371,117
314 | 2,2,2137,3737,19172,1274,17120,142
315 | 1,2,9784,925,2405,4447,183,297
316 | 1,2,10617,1795,7647,1483,857,1233
317 | 2,2,1479,14982,11924,662,3891,3508
318 | 1,2,7127,1375,2201,2679,83,1059
319 | 1,2,1182,3088,6114,978,821,1637
320 | 1,2,11800,2713,3558,2121,706,51
321 | 2,2,9759,25071,17645,1128,12408,1625
322 | 1,2,1774,3696,2280,514,275,834
323 | 1,2,9155,1897,5167,2714,228,1113
324 | 1,2,15881,713,3315,3703,1470,229
325 | 1,2,13360,944,11593,915,1679,573
326 | 1,2,25977,3587,2464,2369,140,1092
327 | 1,2,32717,16784,13626,60869,1272,5609
328 | 1,2,4414,1610,1431,3498,387,834
329 | 1,2,542,899,1664,414,88,522
330 | 1,2,16933,2209,3389,7849,210,1534
331 | 1,2,5113,1486,4583,5127,492,739
332 | 1,2,9790,1786,5109,3570,182,1043
333 | 2,2,11223,14881,26839,1234,9606,1102
334 | 1,2,22321,3216,1447,2208,178,2602
335 | 2,2,8565,4980,67298,131,38102,1215
336 | 2,2,16823,928,2743,11559,332,3486
337 | 2,2,27082,6817,10790,1365,4111,2139
338 | 1,2,13970,1511,1330,650,146,778
339 | 1,2,9351,1347,2611,8170,442,868
340 | 1,2,3,333,7021,15601,15,550
341 | 1,2,2617,1188,5332,9584,573,1942
342 | 2,3,381,4025,9670,388,7271,1371
343 | 2,3,2320,5763,11238,767,5162,2158
344 | 1,3,255,5758,5923,349,4595,1328
345 | 2,3,1689,6964,26316,1456,15469,37
346 | 1,3,3043,1172,1763,2234,217,379
347 | 1,3,1198,2602,8335,402,3843,303
348 | 2,3,2771,6939,15541,2693,6600,1115
349 | 2,3,27380,7184,12311,2809,4621,1022
350 | 1,3,3428,2380,2028,1341,1184,665
351 | 2,3,5981,14641,20521,2005,12218,445
352 | 1,3,3521,1099,1997,1796,173,995
353 | 2,3,1210,10044,22294,1741,12638,3137
354 | 1,3,608,1106,1533,830,90,195
355 | 2,3,117,6264,21203,228,8682,1111
356 | 1,3,14039,7393,2548,6386,1333,2341
357 | 1,3,190,727,2012,245,184,127
358 | 1,3,22686,134,218,3157,9,548
359 | 2,3,37,1275,22272,137,6747,110
360 | 1,3,759,18664,1660,6114,536,4100
361 | 1,3,796,5878,2109,340,232,776
362 | 1,3,19746,2872,2006,2601,468,503
363 | 1,3,4734,607,864,1206,159,405
364 | 1,3,2121,1601,2453,560,179,712
365 | 1,3,4627,997,4438,191,1335,314
366 | 1,3,2615,873,1524,1103,514,468
367 | 2,3,4692,6128,8025,1619,4515,3105
368 | 1,3,9561,2217,1664,1173,222,447
369 | 1,3,3477,894,534,1457,252,342
370 | 1,3,22335,1196,2406,2046,101,558
371 | 1,3,6211,337,683,1089,41,296
372 | 2,3,39679,3944,4955,1364,523,2235
373 | 1,3,20105,1887,1939,8164,716,790
374 | 1,3,3884,3801,1641,876,397,4829
375 | 2,3,15076,6257,7398,1504,1916,3113
376 | 1,3,6338,2256,1668,1492,311,686
377 | 1,3,5841,1450,1162,597,476,70
378 | 2,3,3136,8630,13586,5641,4666,1426
379 | 1,3,38793,3154,2648,1034,96,1242
380 | 1,3,3225,3294,1902,282,68,1114
381 | 2,3,4048,5164,10391,130,813,179
382 | 1,3,28257,944,2146,3881,600,270
383 | 1,3,17770,4591,1617,9927,246,532
384 | 1,3,34454,7435,8469,2540,1711,2893
385 | 1,3,1821,1364,3450,4006,397,361
386 | 1,3,10683,21858,15400,3635,282,5120
387 | 1,3,11635,922,1614,2583,192,1068
388 | 1,3,1206,3620,2857,1945,353,967
389 | 1,3,20918,1916,1573,1960,231,961
390 | 1,3,9785,848,1172,1677,200,406
391 | 1,3,9385,1530,1422,3019,227,684
392 | 1,3,3352,1181,1328,5502,311,1000
393 | 1,3,2647,2761,2313,907,95,1827
394 | 1,3,518,4180,3600,659,122,654
395 | 1,3,23632,6730,3842,8620,385,819
396 | 1,3,12377,865,3204,1398,149,452
397 | 1,3,9602,1316,1263,2921,841,290
398 | 2,3,4515,11991,9345,2644,3378,2213
399 | 1,3,11535,1666,1428,6838,64,743
400 | 1,3,11442,1032,582,5390,74,247
401 | 1,3,9612,577,935,1601,469,375
402 | 1,3,4446,906,1238,3576,153,1014
403 | 1,3,27167,2801,2128,13223,92,1902
404 | 1,3,26539,4753,5091,220,10,340
405 | 1,3,25606,11006,4604,127,632,288
406 | 1,3,18073,4613,3444,4324,914,715
407 | 1,3,6884,1046,1167,2069,593,378
408 | 1,3,25066,5010,5026,9806,1092,960
409 | 2,3,7362,12844,18683,2854,7883,553
410 | 2,3,8257,3880,6407,1646,2730,344
411 | 1,3,8708,3634,6100,2349,2123,5137
412 | 1,3,6633,2096,4563,1389,1860,1892
413 | 1,3,2126,3289,3281,1535,235,4365
414 | 1,3,97,3605,12400,98,2970,62
415 | 1,3,4983,4859,6633,17866,912,2435
416 | 1,3,5969,1990,3417,5679,1135,290
417 | 2,3,7842,6046,8552,1691,3540,1874
418 | 2,3,4389,10940,10908,848,6728,993
419 | 1,3,5065,5499,11055,364,3485,1063
420 | 2,3,660,8494,18622,133,6740,776
421 | 1,3,8861,3783,2223,633,1580,1521
422 | 1,3,4456,5266,13227,25,6818,1393
423 | 2,3,17063,4847,9053,1031,3415,1784
424 | 1,3,26400,1377,4172,830,948,1218
425 | 2,3,17565,3686,4657,1059,1803,668
426 | 2,3,16980,2884,12232,874,3213,249
427 | 1,3,11243,2408,2593,15348,108,1886
428 | 1,3,13134,9347,14316,3141,5079,1894
429 | 1,3,31012,16687,5429,15082,439,1163
430 | 1,3,3047,5970,4910,2198,850,317
431 | 1,3,8607,1750,3580,47,84,2501
432 | 1,3,3097,4230,16483,575,241,2080
433 | 1,3,8533,5506,5160,13486,1377,1498
434 | 1,3,21117,1162,4754,269,1328,395
435 | 1,3,1982,3218,1493,1541,356,1449
436 | 1,3,16731,3922,7994,688,2371,838
437 | 1,3,29703,12051,16027,13135,182,2204
438 | 1,3,39228,1431,764,4510,93,2346
439 | 2,3,14531,15488,30243,437,14841,1867
440 | 1,3,10290,1981,2232,1038,168,2125
441 | 1,3,2787,1698,2510,65,477,52
442 |
--------------------------------------------------------------------------------
/customer-segments/renders.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 | import matplotlib.cm as cm
3 | import pandas as pd
4 | import numpy as np
5 | from sklearn.decomposition import pca
6 |
7 | def pca_results(good_data, pca):
8 | '''
9 | Create a DataFrame of the PCA results
10 | Includes dimension feature weights and explained variance
11 | Visualizes the PCA results
12 | '''
13 |
14 | # Dimension indexing
15 | dimensions = dimensions = ['Dimension {}'.format(i) for i in range(1,len(pca.components_)+1)]
16 |
17 | # PCA components
18 | components = pd.DataFrame(np.round(pca.components_, 4), columns = good_data.keys())
19 | components.index = dimensions
20 |
21 | # PCA explained variance
22 | ratios = pca.explained_variance_ratio_.reshape(len(pca.components_), 1)
23 | variance_ratios = pd.DataFrame(np.round(ratios, 4), columns = ['Explained Variance'])
24 | variance_ratios.index = dimensions
25 |
26 | # Create a bar plot visualization
27 | fig, ax = plt.subplots(figsize = (14,8))
28 |
29 | # Plot the feature weights as a function of the components
30 | components.plot(ax = ax, kind = 'bar');
31 | ax.set_ylabel("Feature Weights")
32 | ax.set_xticklabels(dimensions, rotation=0)
33 |
34 |
35 | # Display the explained variance ratios
36 | for i, ev in enumerate(pca.explained_variance_ratio_):
37 | ax.text(i-0.40, ax.get_ylim()[1] + 0.05, "Explained Variance\n %.4f"%(ev))
38 |
39 | # Return a concatenated DataFrame
40 | return pd.concat([variance_ratios, components], axis = 1)
41 |
42 | def cluster_results(reduced_data, preds, centers, pca_samples):
43 | '''
44 | Visualizes the PCA-reduced cluster data in two dimensions
45 | Adds cues for cluster centers and student-selected sample data
46 | '''
47 |
48 | predictions = pd.DataFrame(preds, columns = ['Cluster'])
49 | plot_data = pd.concat([predictions, reduced_data], axis = 1)
50 |
51 | # Generate the cluster plot
52 | fig, ax = plt.subplots(figsize = (14,8))
53 |
54 | # Color map
55 | cmap = cm.get_cmap('gist_rainbow')
56 |
57 | # Color the points based on assigned cluster
58 | for i, cluster in plot_data.groupby('Cluster'):
59 | cluster.plot(ax = ax, kind = 'scatter', x = 'Dimension 1', y = 'Dimension 2', \
60 | color = cmap((i)*1.0/(len(centers)-1)), label = 'Cluster %i'%(i), s=30);
61 |
62 | # Plot centers with indicators
63 | for i, c in enumerate(centers):
64 | ax.scatter(x = c[0], y = c[1], color = 'white', edgecolors = 'black', \
65 | alpha = 1, linewidth = 2, marker = 'o', s=200);
66 | ax.scatter(x = c[0], y = c[1], marker='$%d$'%(i), alpha = 1, s=100);
67 |
68 | # Plot transformed sample points
69 | ax.scatter(x = pca_samples[:,0], y = pca_samples[:,1], \
70 | s = 150, linewidth = 4, color = 'black', marker = 'x');
71 |
72 | # Set plot title
73 | ax.set_title("Cluster Learning on PCA-Reduced Data - Centroids Marked by Number\nTransformed Sample Data Marked by Black Cross");
74 |
75 |
76 | def channel_results(reduced_data, outliers, pca_samples):
77 | '''
78 | Visualizes the PCA-reduced cluster data in two dimensions using the full dataset
79 | Data is labeled by "Channel" and cues added for student-selected sample data
80 | '''
81 |
82 | # Check that the dataset is loadable
83 | try:
84 | full_data = pd.read_csv("customers.csv")
85 | except:
86 | print "Dataset could not be loaded. Is the file missing?"
87 | return False
88 |
89 | # Create the Channel DataFrame
90 | channel = pd.DataFrame(full_data['Channel'], columns = ['Channel'])
91 | channel = channel.drop(channel.index[outliers]).reset_index(drop = True)
92 | labeled = pd.concat([reduced_data, channel], axis = 1)
93 |
94 | # Generate the cluster plot
95 | fig, ax = plt.subplots(figsize = (14,8))
96 |
97 | # Color map
98 | cmap = cm.get_cmap('gist_rainbow')
99 |
100 | # Color the points based on assigned Channel
101 | labels = ['Hotel/Restaurant/Cafe', 'Retailer']
102 | grouped = labeled.groupby('Channel')
103 | for i, channel in grouped:
104 | channel.plot(ax = ax, kind = 'scatter', x = 'Dimension 1', y = 'Dimension 2', \
105 | color = cmap((i-1)*1.0/2), label = labels[i-1], s=30);
106 |
107 | # Plot transformed sample points
108 | for i, sample in enumerate(pca_samples):
109 | ax.scatter(x = sample[0], y = sample[1], \
110 | s = 200, linewidth = 3, color = 'black', marker = 'o', facecolors = 'none');
111 | ax.scatter(x = sample[0]+0.25, y = sample[1]+0.3, marker='$%d$'%(i), alpha = 1, s=125);
112 |
113 | # Set plot title
114 | ax.set_title("PCA-Reduced Data Labeled by 'Channel'\nTransformed Sample Data Circled");
--------------------------------------------------------------------------------
/customer-segments/renders.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codediodeio/machine-learning-nanodegree/be2c56bcf2b6346a3528fc1058ed10c299572164/customer-segments/renders.pyc
--------------------------------------------------------------------------------
/smartcab/README.md:
--------------------------------------------------------------------------------
1 | # Project 4: Reinforcement Learning
2 | ## Train a Smartcab How to Drive
3 |
4 | ### Install
5 |
6 | This project requires **Python 2.7** with the [pygame](https://www.pygame.org/wiki/GettingStarted
7 | ) library installed
8 |
9 | ### Code
10 |
11 | Template code is provided in the `smartcab/agent.py` python file. Additional supporting python code can be found in `smartcab/enviroment.py`, `smartcab/planner.py`, and `smartcab/simulator.py`. Supporting images for the graphical user interface can be found in the `images` folder. While some code has already been implemented to get you started, you will need to implement additional functionality for the `LearningAgent` class in `agent.py` when requested to successfully complete the project.
12 |
13 | ### Run
14 |
15 | In a terminal or command window, navigate to the top-level project directory `smartcab/` (that contains this README) and run one of the following commands:
16 |
17 | ```python smartcab/agent.py```
18 | ```python -m smartcab.agent```
19 |
20 | This will run the `agent.py` file and execute your agent code.
21 |
--------------------------------------------------------------------------------
/smartcab/SmartCabReport.md:
--------------------------------------------------------------------------------
1 | # SmartCab Reinforcement Learning Report
2 |
3 | ### QUESTION: Observe what you see with the agent's behavior as it takes random actions. Does the smart cab eventually make it to the destination? Are there any other interesting observations to note?
4 |
5 | When taking turns randomly, the car does not reliably reach its destination. Sometimes it gets lucky, but it usually runs out of turns before successfully reaching the destination. After watching the visual simulation in Pygame, it appears rare for the car to encounter traffic from the left or right. These inputs are unlikely to be useful when implementing the learning algorithm.
6 |
7 | After running 100 iterations (enforce_deadline=True) of the trial with random actions, only 18 (18%) reached the destination.
8 |
9 | ### QUESTION: What states have you identified that are appropriate for modeling the smart cab and environment? Why do you believe each of these states to be appropriate for this problem?
10 |
11 | I set the state to a tuple with the with the indices representing Light, Waypoint, and Oncoming traffic. In total, this provides 96 possible combinations for the state. I chose this pattern because it maximizes information while minimizing complexity. It also accounts for all reasonable traffic laws and collision avoidance in the environment.
12 |
13 | I could have added right traffic to the tuple to bring it to 384 possible combinations. However, in the United States oncoming traffic from the right would not cause an issue for driver, so this has been left out. I should also note that Deadline was left out because it would increase the number of states beyond a reasonable number. Adding a deadline or right traffic would cause the model to suffer from the Curse of Dimensionality.
14 |
15 | `example_state = ( light, waypoint, oncoming, left )
16 | >>> ('red', 'left', None, None)`
17 |
18 | - Light: Necessary for training the cab to obey traffic rules.
19 | - Waypoint: Necessary for providing a general sense of direction. I imagine this input like a passenger in the back seat giving left/right/forward directions to the driver. Without this data, the driver would still be randomly searching for the endpoint.
20 | - Oncoming Traffic: Necessary for avoiding traffic accidents. Of the three traffic inputs, oncoming appears to contain the most information.
21 | - Left Traffic: Necessary for obeying right of way rules. Oncoming traffic to the left would have the right of way in this scenario.
22 |
23 |
24 | ### OPTIONAL: How many states in total exist for the smart cab in this environment? Does this number seem reasonable given that the goal of Q-Learning is to learn and make informed decisions about each state? Why or why not?
25 |
26 | Total features can be determined by multiplying the total count of possible values from each input.
27 |
28 | inputs = light(2), waypoint(3), oncoming(4), left(4), right(4)
29 |
30 | With the inputs above, possible state combinations are 2x3x4x4x4 = 384.
31 |
32 | If you add another state for each number of turns left in deadline(50), possible states go up to 19,200.
33 |
34 | With 100 turns to learn, 19,200 is not reasonable. The driver would not enter many of the possible states during the course of the trials. Most of this information should be consolidated into a simplified structure.
35 |
36 |
37 | ### QUESTION: What changes do you notice in the agent's behavior when compared to the basic driving agent when random actions were always taken? Why is this behavior occurring?
38 |
39 | Updating the Q table with the current reward value creates a much more effective driver. The algorithm does not consider the impact of future moves, but it still has a much higher success rate compared to complete randomness. Visually, the smartcab appears to be more patient and deliberate with its actions. It drives in straight lines and is not afraid to wait at traffic lights. After 100 trials, the driver failed to reach the destination only 10 times for a 90% success rate.
40 |
41 | ### QUESTION: Report the different values for the parameters tuned in your basic implementation of Q-Learning. For which set of parameters does the agent perform best? How well does the final driving agent perform?
42 |
43 | - α (alpha) is the learning rate. A higher alpha value will make the algorithm more sensitive to changes in rewards.
44 | - γ (gamma) is the discount rate on future rewards. A higher gamma will make the algorithm more sensitive to rewards from future actions.
45 | - ε (epsilon) is the exploration rate. A higher epsilon will cause the algorithm to make more random choices.
46 | - λ (lambda/decay) is the decay constant to reduce epsilon for (K) number of trials. A higher lambda will cause epsilon to decay faster, reducing randomness earlier in the trial run.
47 |
48 | I ran experiments with the tuning parameters listed below. Each experiment ran with 100 trials.
49 |
50 | **α=0.3, γ=0.3, ε=1.0, λ=0.01** (Random/Explorer Learner)
51 |
52 | The parameters above are designed to create a random explorer. At the beginning, the model will be almost completely random, gradually decaying by 1% each turn. This policy resulted in a 46 failed runs, or a success rate of 54%. This is an improvement, but I suspect the slow decay rate of epsilon is causing the algorithm too behave with too much randomness.
53 |
54 | **α=0.5, γ=0.5, ε=1.0, λ=0.1** (Balanced Learner)
55 |
56 | For the second trial run, the epsilon decay rate was increased to 10% per trial. This combination eliminates much of the randomness in the previous example. For instance, the model from 100% random at turn 0, to 31% random by turn 10, and 0.4% by turn 50, and so on. In terms of learning, it puts an equal weight on the current action reward and the future action reward.
57 |
58 | The failed runs were reduced to only 16, for a total success rate of 84%. The balanced learner puts the same amount of weight on the reward in the current move as it does for the future move.
59 |
60 | **α=0.8, γ=0.2, ε=0.8, λ=0.1** (Short-Term/Greedy Learner) WINNER
61 |
62 | The lowered epsilon value makes this a greedy combination that does make exploratory moves often. The alpha was increased to put a heavier weight on new information, while the gamma was decreased to make future rewards less import to the driver.
63 |
64 | Short term learning worked well, with only 4 failed runs or a 96% success rate. Comparatively, this combination leads to a policy with more negative Q value rewards, or discouragement from performing bad moves. Overall, the Short-Term/Greedy Learner appears to the optimal policy for the smart cab problem.
65 |
66 | **α=0.2, γ=0.8, ε=0.8, λ=0.1** (Long-Term/Greedy Learner)
67 |
68 | To test the relationship between alpha and gamma, I left the epsilon values unchanged for this trial run. The learning rate was decreased to make it harder to change established values, while gamma was increased to put a higher weight on future rewards. This model should be more farsighted and not as
69 |
70 | This policy leads to larger positive Q values, or encouragement for successful moves. It failed 13 runs, for a 87% success rate. By the end of the 100 trials, this policy was still receiving frequent negative rewards because it failed to recognize issues or opportunities in the current state.
71 |
72 | ### QUESTION: Does your agent get close to finding an optimal policy, i.e. reach the destination in the minimum possible time, and not incur any penalties? How would you describe an optimal policy for this problem?
73 |
74 | The optimal policy turned out to **Short-Term/Greedy** strategy.
75 |
76 | Towards the end of the 100 trials, the agent is far less likely fail and the total reward received was always greater than 0. Most failed runs are found within the first 20 attempts. In fact, it is common for the driver to avoid all negative rewards during many of the later trials, which is optimal behavior.
77 |
78 | The agent learns very quickly with the input states provided, which leads me to believe that a short term policy is optimal. It only takes about 5 to 10 turns for the Q table to be filled with effective knowledge about the most common turns. There does not appear to be any intricate patters between the states that justify a more complex model. In other words, the learning rate (α) should be high, while the gamma (γ) and epsilon (ε) should be low. This combination will force the algorithm to weigh its decisions mostly on the current move. Only during times when the next move has a significant reward will the resulting action be influenced.
79 |
80 | The sample of results below show the difference between the rewards received on trials 1, 10, 50, and 100. The mean reward received gradually increases with the number of trials and negative rewards become less common.
81 |
82 | **Trial 1 Rewards**
83 |
84 | Mean: -0.125
85 | Min: -1.0
86 | Max: 2.0
87 |
88 | **Trial 10 Rewards**
89 |
90 | Mean: 0.71
91 | Min: -1.0
92 | Max: 2.0
93 |
94 | **Trial 50 Rewards**
95 |
96 | Mean: 1.43
97 | Min: 0.0
98 | Max: 2.0
99 |
100 | **Trial 100 Rewards**
101 |
102 | Mean: 1.64
103 | Min: 0.0
104 | Max: 2.0
105 |
106 |
--------------------------------------------------------------------------------
/smartcab/SmartCabReport.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codediodeio/machine-learning-nanodegree/be2c56bcf2b6346a3528fc1058ed10c299572164/smartcab/SmartCabReport.pdf
--------------------------------------------------------------------------------
/smartcab/smartcab/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codediodeio/machine-learning-nanodegree/be2c56bcf2b6346a3528fc1058ed10c299572164/smartcab/smartcab/__init__.py
--------------------------------------------------------------------------------
/smartcab/smartcab/agent_original.py:
--------------------------------------------------------------------------------
1 | import random
2 | from environment import Agent, Environment
3 | from planner import RoutePlanner
4 | from simulator import Simulator
5 |
6 | class LearningAgent(Agent):
7 | """An agent that learns to drive in the smartcab world."""
8 |
9 | def __init__(self, env):
10 | super(LearningAgent, self).__init__(env) # sets self.env = env, state = None, next_waypoint = None, and a default color
11 | self.color = 'red' # override color
12 | self.planner = RoutePlanner(self.env, self) # simple route planner to get next_waypoint
13 | # TODO: Initialize any additional variables here
14 |
15 | def reset(self, destination=None):
16 | self.planner.route_to(destination)
17 | # TODO: Prepare for a new trip; reset any variables here, if required
18 |
19 | def update(self, t):
20 | # Gather inputs
21 | self.next_waypoint = self.planner.next_waypoint() # from route planner, also displayed by simulator
22 | inputs = self.env.sense(self)
23 | deadline = self.env.get_deadline(self)
24 |
25 | # TODO: Update state
26 |
27 | # TODO: Select action according to your policy
28 | action = None
29 |
30 | # Execute action and get reward
31 | reward = self.env.act(self, action)
32 |
33 | # TODO: Learn policy based on state, action, reward
34 |
35 | print "LearningAgent.update(): deadline = {}, inputs = {}, action = {}, reward = {}".format(deadline, inputs, action, reward) # [debug]
36 |
37 |
38 | def run():
39 | """Run the agent for a finite number of trials."""
40 |
41 | # Set up environment and agent
42 | e = Environment() # create environment (also adds some dummy traffic)
43 | a = e.create_agent(LearningAgent) # create agent
44 | e.set_primary_agent(a, enforce_deadline=True) # specify agent to track
45 | # NOTE: You can set enforce_deadline=False while debugging to allow longer trials
46 |
47 | # Now simulate it
48 | sim = Simulator(e, update_delay=0.5, display=True) # create simulator (uses pygame when display=True, if available)
49 | # NOTE: To speed up simulation, reduce update_delay and/or set display=False
50 |
51 | sim.run(n_trials=100) # run for a specified number of trials
52 | # NOTE: To quit midway, press Esc or close pygame window, or hit Ctrl+C on the command-line
53 |
54 |
55 | if __name__ == '__main__':
56 | run()
57 |
--------------------------------------------------------------------------------
/smartcab/smartcab/agent_smart.py:
--------------------------------------------------------------------------------
1 | import random
2 | import pandas as pd
3 | import numpy as np
4 | import itertools
5 | from environment import Agent, Environment
6 | from planner import RoutePlanner
7 | from simulator import Simulator
8 |
9 | class LearningAgent(Agent):
10 | """An agent that learns to drive in the smartcab world."""
11 |
12 | def __init__(self, env):
13 | super(LearningAgent, self).__init__(env) # sets self.env = env, state = None, next_waypoint = None, and a default color
14 | self.color = 'red' # override color
15 | self.planner = RoutePlanner(self.env, self) # simple route planner to get next_waypoint
16 |
17 | self.actions = Environment.valid_actions # All possible actions
18 | self.states = self.list_valid_actions() # All possible states
19 | self.q_table = self.init_q_table() # Q Table with zeros
20 |
21 | # Tuning Parameters
22 | self.alpha = 0.8 # learning rate 1=sensitive, 0=insensitive
23 | self.gamma = 0.2 # discount rate 1=long_term, 0=short_term
24 | self.epsilon = 0.8 # exploration rate 1=random, 0=specific
25 | self.decay = 0.1 # decay rate of epsilon 1=faster, 0=slower
26 |
27 | # Data logged for debugging/analysis
28 | self.n_trials = 0
29 | self.review = pd.DataFrame(columns=['trial', 'deadline', 'reward'])
30 |
31 | def reset(self, destination=None):
32 | self.planner.route_to(destination)
33 | self.n_trials += 1
34 | self.epsilon = self.epsilon * (1 - self.decay)
35 | self.debug() # debug helper
36 |
37 | def update(self, t):
38 | # Gather inputs
39 | self.next_waypoint = self.planner.next_waypoint() # from route planner, also displayed by simulator
40 | inputs = self.env.sense(self)
41 | deadline = self.env.get_deadline(self)
42 |
43 | # Update state
44 | self.state = (inputs['light'], self.planner.next_waypoint(), inputs['oncoming'], inputs['left'])
45 |
46 | # Select action according to your policy
47 | action = self.choose_action(self.state)
48 |
49 | # Execute action and get reward
50 | reward = self.env.act(self, action)
51 |
52 | # Learn policy based on state, action, reward
53 | self.learn(self.state, action, reward)
54 |
55 | # Collect for each turn debugging/analysis
56 | review_df = pd.DataFrame([[self.n_trials, deadline, reward]], columns=['trial', 'deadline', 'reward'])
57 | self.review = self.review.append(review_df)
58 |
59 | print "LearningAgent.update(): deadline = {}, inputs = {}, action = {}, reward = {}".format(deadline, inputs, action, reward) # [debug]
60 |
61 | def list_valid_actions(self):
62 | # Create a list of all possible state combinations
63 | light_states = ['red', 'green']
64 | waypoint_states = self.actions
65 | oncoming_states = Environment.valid_inputs['oncoming']
66 | left_states = Environment.valid_inputs['oncoming']
67 | arr = [light_states, waypoint_states, oncoming_states, left_states]
68 | possible_combos = list(itertools.product(*arr))
69 | return possible_combos
70 |
71 |
72 | def init_q_table(self):
73 | # Create a pandas dataframe for the Q Table and initilize with all zeros
74 | shape = (len(self.states), len(self.actions))
75 | zeros = np.zeros(shape, dtype=float)
76 | index = pd.MultiIndex.from_tuples(self.states, names=['light', 'waypoint', 'oncoming', 'left'])
77 | q_table = pd.DataFrame(zeros, index=index, columns=self.actions)
78 | return q_table
79 |
80 | def get_q_values(self, state):
81 | # Helper method to return a series of values from the Q Table
82 | light, waypoint = state[0], state[1]
83 | oncoming = state[2] or np.nan # convert None to NaN
84 | left = state[3] or np.nan
85 | series = self.q_table.loc[(light, waypoint, oncoming, left)]
86 | return series
87 |
88 | def random_action(self):
89 | # Helper method to perform a random action
90 | return random.choice(self.actions)
91 |
92 | def choose_action(self, state):
93 | # Select the state index in the Q Table, then get max Q Values
94 | state = self.state
95 | q_values = self.get_q_values(state)
96 | q_max = q_values.values.max()
97 |
98 | if (self.epsilon ) > random.random():
99 | # Choose a random action if epsilon is greater than random value
100 | action = self.random_action()
101 | else:
102 | # Select best action (use random choice to break ties)
103 | best_actions = q_values[q_values == q_max]
104 | action = random.choice(best_actions.index)
105 | return action
106 |
107 | def learn(self, state, action, reward):
108 | # Query the current q value for state/action pair
109 | q_values = self.get_q_values(state)
110 | q_current = q_values[action]
111 |
112 | # Get the max Q value in the next state
113 | inputs = self.env.sense(self)
114 | next_state = (inputs['light'], self.planner.next_waypoint(), inputs['oncoming'], inputs['left'])
115 | q_values_next = self.get_q_values(next_state)
116 | q_max_next = q_values_next.values.max()
117 |
118 | # Update Q table for the action taken using the Q formula
119 | q = q_current + self.alpha * ( reward + (self.gamma * q_max_next) - q_current)
120 | q_values.loc[action] = q.round(5)
121 |
122 | def debug(self):
123 | print ("Epsilon ---> ", self.epsilon)
124 | print(self.q_table)
125 | reward = self.review[self.review['trial'] == self.n_trials-1].reward
126 | print("Mean --> ", reward.mean())
127 | print("Min --> ", reward.min())
128 | print("Max --> ", reward.max())
129 |
130 | def run():
131 | """Run the agent for a finite number of trials."""
132 |
133 | # Set up environment and agent
134 | e = Environment() # create environment (also adds some dummy traffic)
135 | a = e.create_agent(LearningAgent) # create agent
136 | e.set_primary_agent(a, enforce_deadline=True) # specify agent to track
137 | # NOTE: You can set enforce_deadline=False while debugging to allow longer trials
138 |
139 | # Now simulate it
140 | sim = Simulator(e, update_delay=0.01, display=False) # create simulator (uses pygame when display=True, if available)
141 | # NOTE: To speed up simulation, reduce update_delay and/or set display=False
142 |
143 | sim.run(n_trials=100) # run for a specified number of trials
144 | # NOTE: To quit midway, press Esc or close pygame window, or hit Ctrl+C on the command-line
145 |
146 |
147 | if __name__ == '__main__':
148 | run()
149 |
--------------------------------------------------------------------------------
/smartcab/smartcab/environment.py:
--------------------------------------------------------------------------------
1 | import time
2 | import random
3 | from collections import OrderedDict
4 |
5 | from simulator import Simulator
6 |
7 | class TrafficLight(object):
8 | """A traffic light that switches periodically."""
9 |
10 | valid_states = [True, False] # True = NS open, False = EW open
11 |
12 | def __init__(self, state=None, period=None):
13 | self.state = state if state is not None else random.choice(self.valid_states)
14 | self.period = period if period is not None else random.choice([3, 4, 5])
15 | self.last_updated = 0
16 |
17 | def reset(self):
18 | self.last_updated = 0
19 |
20 | def update(self, t):
21 | if t - self.last_updated >= self.period:
22 | self.state = not self.state # assuming state is boolean
23 | self.last_updated = t
24 |
25 |
26 | class Environment(object):
27 | """Environment within which all agents operate."""
28 |
29 | valid_actions = [None, 'forward', 'left', 'right']
30 | valid_inputs = {'light': TrafficLight.valid_states, 'oncoming': valid_actions, 'left': valid_actions, 'right': valid_actions}
31 | valid_headings = [(1, 0), (0, -1), (-1, 0), (0, 1)] # ENWS
32 | hard_time_limit = -100 # even if enforce_deadline is False, end trial when deadline reaches this value (to avoid deadlocks)
33 |
34 | def __init__(self):
35 | self.done = False
36 | self.t = 0
37 | self.agent_states = OrderedDict()
38 | self.status_text = ""
39 |
40 | # Road network
41 | self.grid_size = (8, 6) # (cols, rows)
42 | self.bounds = (1, 1, self.grid_size[0], self.grid_size[1])
43 | self.block_size = 100
44 | self.intersections = OrderedDict()
45 | self.roads = []
46 | for x in xrange(self.bounds[0], self.bounds[2] + 1):
47 | for y in xrange(self.bounds[1], self.bounds[3] + 1):
48 | self.intersections[(x, y)] = TrafficLight() # a traffic light at each intersection
49 |
50 | for a in self.intersections:
51 | for b in self.intersections:
52 | if a == b:
53 | continue
54 | if (abs(a[0] - b[0]) + abs(a[1] - b[1])) == 1: # L1 distance = 1
55 | self.roads.append((a, b))
56 |
57 | # Dummy agents
58 | self.num_dummies = 3 # no. of dummy agents
59 | for i in xrange(self.num_dummies):
60 | self.create_agent(DummyAgent)
61 |
62 | # Primary agent
63 | self.primary_agent = None # to be set explicitly
64 | self.enforce_deadline = False
65 |
66 | def create_agent(self, agent_class, *args, **kwargs):
67 | agent = agent_class(self, *args, **kwargs)
68 | self.agent_states[agent] = {'location': random.choice(self.intersections.keys()), 'heading': (0, 1)}
69 | return agent
70 |
71 | def set_primary_agent(self, agent, enforce_deadline=False):
72 | self.primary_agent = agent
73 | self.enforce_deadline = enforce_deadline
74 |
75 | def reset(self):
76 | self.done = False
77 | self.t = 0
78 |
79 | # Reset traffic lights
80 | for traffic_light in self.intersections.itervalues():
81 | traffic_light.reset()
82 |
83 | # Pick a start and a destination
84 | start = random.choice(self.intersections.keys())
85 | destination = random.choice(self.intersections.keys())
86 |
87 | # Ensure starting location and destination are not too close
88 | while self.compute_dist(start, destination) < 4:
89 | start = random.choice(self.intersections.keys())
90 | destination = random.choice(self.intersections.keys())
91 |
92 | start_heading = random.choice(self.valid_headings)
93 | deadline = self.compute_dist(start, destination) * 5
94 | print "Environment.reset(): Trial set up with start = {}, destination = {}, deadline = {}".format(start, destination, deadline)
95 |
96 | # Initialize agent(s)
97 | for agent in self.agent_states.iterkeys():
98 | self.agent_states[agent] = {
99 | 'location': start if agent is self.primary_agent else random.choice(self.intersections.keys()),
100 | 'heading': start_heading if agent is self.primary_agent else random.choice(self.valid_headings),
101 | 'destination': destination if agent is self.primary_agent else None,
102 | 'deadline': deadline if agent is self.primary_agent else None}
103 | agent.reset(destination=(destination if agent is self.primary_agent else None))
104 |
105 | def step(self):
106 | #print "Environment.step(): t = {}".format(self.t) # [debug]
107 |
108 | # Update traffic lights
109 | for intersection, traffic_light in self.intersections.iteritems():
110 | traffic_light.update(self.t)
111 |
112 | # Update agents
113 | for agent in self.agent_states.iterkeys():
114 | agent.update(self.t)
115 |
116 | self.t += 1
117 | if self.primary_agent is not None:
118 | agent_deadline = self.agent_states[self.primary_agent]['deadline']
119 | if agent_deadline <= self.hard_time_limit:
120 | self.done = True
121 | print "Environment.step(): Primary agent hit hard time limit ({})! Trial aborted.".format(self.hard_time_limit)
122 | elif self.enforce_deadline and agent_deadline <= 0:
123 | self.done = True
124 | print "Environment.step(): Primary agent ran out of time! Trial aborted."
125 | self.agent_states[self.primary_agent]['deadline'] = agent_deadline - 1
126 |
127 | def sense(self, agent):
128 | assert agent in self.agent_states, "Unknown agent!"
129 |
130 | state = self.agent_states[agent]
131 | location = state['location']
132 | heading = state['heading']
133 | light = 'green' if (self.intersections[location].state and heading[1] != 0) or ((not self.intersections[location].state) and heading[0] != 0) else 'red'
134 |
135 | # Populate oncoming, left, right
136 | oncoming = None
137 | left = None
138 | right = None
139 | for other_agent, other_state in self.agent_states.iteritems():
140 | if agent == other_agent or location != other_state['location'] or (heading[0] == other_state['heading'][0] and heading[1] == other_state['heading'][1]):
141 | continue
142 | other_heading = other_agent.get_next_waypoint()
143 | if (heading[0] * other_state['heading'][0] + heading[1] * other_state['heading'][1]) == -1:
144 | if oncoming != 'left': # we don't want to override oncoming == 'left'
145 | oncoming = other_heading
146 | elif (heading[1] == other_state['heading'][0] and -heading[0] == other_state['heading'][1]):
147 | if right != 'forward' and right != 'left': # we don't want to override right == 'forward or 'left'
148 | right = other_heading
149 | else:
150 | if left != 'forward': # we don't want to override left == 'forward'
151 | left = other_heading
152 |
153 | return {'light': light, 'oncoming': oncoming, 'left': left, 'right': right} # TODO: make this a namedtuple
154 |
155 | def get_deadline(self, agent):
156 | return self.agent_states[agent]['deadline'] if agent is self.primary_agent else None
157 |
158 | def act(self, agent, action):
159 | assert agent in self.agent_states, "Unknown agent!"
160 | assert action in self.valid_actions, "Invalid action!"
161 |
162 | state = self.agent_states[agent]
163 | location = state['location']
164 | heading = state['heading']
165 | light = 'green' if (self.intersections[location].state and heading[1] != 0) or ((not self.intersections[location].state) and heading[0] != 0) else 'red'
166 | sense = self.sense(agent)
167 |
168 | # Move agent if within bounds and obeys traffic rules
169 | reward = 0 # reward/penalty
170 | move_okay = True
171 | if action == 'forward':
172 | if light != 'green':
173 | move_okay = False
174 | elif action == 'left':
175 | if light == 'green' and (sense['oncoming'] == None or sense['oncoming'] == 'left'):
176 | heading = (heading[1], -heading[0])
177 | else:
178 | move_okay = False
179 | elif action == 'right':
180 | if light == 'green' or sense['left'] != 'straight':
181 | heading = (-heading[1], heading[0])
182 | else:
183 | move_okay = False
184 |
185 | if move_okay:
186 | # Valid move (could be null)
187 | if action is not None:
188 | # Valid non-null move
189 | location = ((location[0] + heading[0] - self.bounds[0]) % (self.bounds[2] - self.bounds[0] + 1) + self.bounds[0],
190 | (location[1] + heading[1] - self.bounds[1]) % (self.bounds[3] - self.bounds[1] + 1) + self.bounds[1]) # wrap-around
191 | #if self.bounds[0] <= location[0] <= self.bounds[2] and self.bounds[1] <= location[1] <= self.bounds[3]: # bounded
192 | state['location'] = location
193 | state['heading'] = heading
194 | reward = 2.0 if action == agent.get_next_waypoint() else -0.5 # valid, but is it correct? (as per waypoint)
195 | else:
196 | # Valid null move
197 | reward = 0.0
198 | else:
199 | # Invalid move
200 | reward = -1.0
201 |
202 | if agent is self.primary_agent:
203 | if state['location'] == state['destination']:
204 | if state['deadline'] >= 0:
205 | reward += 10 # bonus
206 | self.done = True
207 | print "Environment.act(): Primary agent has reached destination!" # [debug]
208 | self.status_text = "state: {}\naction: {}\nreward: {}".format(agent.get_state(), action, reward)
209 | #print "Environment.act() [POST]: location: {}, heading: {}, action: {}, reward: {}".format(location, heading, action, reward) # [debug]
210 |
211 | return reward
212 |
213 | def compute_dist(self, a, b):
214 | """L1 distance between two points."""
215 | return abs(b[0] - a[0]) + abs(b[1] - a[1])
216 |
217 |
218 | class Agent(object):
219 | """Base class for all agents."""
220 |
221 | def __init__(self, env):
222 | self.env = env
223 | self.state = None
224 | self.next_waypoint = None
225 | self.color = 'cyan'
226 |
227 | def reset(self, destination=None):
228 | pass
229 |
230 | def update(self, t):
231 | pass
232 |
233 | def get_state(self):
234 | return self.state
235 |
236 | def get_next_waypoint(self):
237 | return self.next_waypoint
238 |
239 |
240 | class DummyAgent(Agent):
241 | color_choices = ['blue', 'cyan', 'magenta', 'orange']
242 |
243 | def __init__(self, env):
244 | super(DummyAgent, self).__init__(env) # sets self.env = env, state = None, next_waypoint = None, and a default color
245 | self.next_waypoint = random.choice(Environment.valid_actions[1:])
246 | self.color = random.choice(self.color_choices)
247 |
248 | def update(self, t):
249 | inputs = self.env.sense(self)
250 |
251 | action_okay = True
252 | if self.next_waypoint == 'right':
253 | if inputs['light'] == 'red' and inputs['left'] == 'forward':
254 | action_okay = False
255 | elif self.next_waypoint == 'forward':
256 | if inputs['light'] == 'red':
257 | action_okay = False
258 | elif self.next_waypoint == 'left':
259 | if inputs['light'] == 'red' or (inputs['oncoming'] == 'forward' or inputs['oncoming'] == 'right'):
260 | action_okay = False
261 |
262 | action = None
263 | if action_okay:
264 | action = self.next_waypoint
265 | self.next_waypoint = random.choice(Environment.valid_actions[1:])
266 | reward = self.env.act(self, action)
267 | #print "DummyAgent.update(): t = {}, inputs = {}, action = {}, reward = {}".format(t, inputs, action, reward) # [debug]
268 | #print "DummyAgent.update(): next_waypoint = {}".format(self.next_waypoint) # [debug]
269 |
--------------------------------------------------------------------------------
/smartcab/smartcab/environment.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codediodeio/machine-learning-nanodegree/be2c56bcf2b6346a3528fc1058ed10c299572164/smartcab/smartcab/environment.pyc
--------------------------------------------------------------------------------
/smartcab/smartcab/images/car-black.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codediodeio/machine-learning-nanodegree/be2c56bcf2b6346a3528fc1058ed10c299572164/smartcab/smartcab/images/car-black.png
--------------------------------------------------------------------------------
/smartcab/smartcab/images/car-blue.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codediodeio/machine-learning-nanodegree/be2c56bcf2b6346a3528fc1058ed10c299572164/smartcab/smartcab/images/car-blue.png
--------------------------------------------------------------------------------
/smartcab/smartcab/images/car-cyan.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codediodeio/machine-learning-nanodegree/be2c56bcf2b6346a3528fc1058ed10c299572164/smartcab/smartcab/images/car-cyan.png
--------------------------------------------------------------------------------
/smartcab/smartcab/images/car-green.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codediodeio/machine-learning-nanodegree/be2c56bcf2b6346a3528fc1058ed10c299572164/smartcab/smartcab/images/car-green.png
--------------------------------------------------------------------------------
/smartcab/smartcab/images/car-magenta.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codediodeio/machine-learning-nanodegree/be2c56bcf2b6346a3528fc1058ed10c299572164/smartcab/smartcab/images/car-magenta.png
--------------------------------------------------------------------------------
/smartcab/smartcab/images/car-orange.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codediodeio/machine-learning-nanodegree/be2c56bcf2b6346a3528fc1058ed10c299572164/smartcab/smartcab/images/car-orange.png
--------------------------------------------------------------------------------
/smartcab/smartcab/images/car-red.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codediodeio/machine-learning-nanodegree/be2c56bcf2b6346a3528fc1058ed10c299572164/smartcab/smartcab/images/car-red.png
--------------------------------------------------------------------------------
/smartcab/smartcab/images/car-white.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codediodeio/machine-learning-nanodegree/be2c56bcf2b6346a3528fc1058ed10c299572164/smartcab/smartcab/images/car-white.png
--------------------------------------------------------------------------------
/smartcab/smartcab/images/car-yellow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codediodeio/machine-learning-nanodegree/be2c56bcf2b6346a3528fc1058ed10c299572164/smartcab/smartcab/images/car-yellow.png
--------------------------------------------------------------------------------
/smartcab/smartcab/planner.py:
--------------------------------------------------------------------------------
1 | import random
2 |
3 | class RoutePlanner(object):
4 | """Silly route planner that is meant for a perpendicular grid network."""
5 |
6 | def __init__(self, env, agent):
7 | self.env = env
8 | self.agent = agent
9 | self.destination = None
10 |
11 | def route_to(self, destination=None):
12 | self.destination = destination if destination is not None else random.choice(self.env.intersections.keys())
13 | print "RoutePlanner.route_to(): destination = {}".format(destination) # [debug]
14 |
15 | def next_waypoint(self):
16 | location = self.env.agent_states[self.agent]['location']
17 | heading = self.env.agent_states[self.agent]['heading']
18 | delta = (self.destination[0] - location[0], self.destination[1] - location[1])
19 | if delta[0] == 0 and delta[1] == 0:
20 | return None
21 | elif delta[0] != 0: # EW difference
22 | if delta[0] * heading[0] > 0: # facing correct EW direction
23 | return 'forward'
24 | elif delta[0] * heading[0] < 0: # facing opposite EW direction
25 | return 'right' # long U-turn
26 | elif delta[0] * heading[1] > 0:
27 | return 'left'
28 | else:
29 | return 'right'
30 | elif delta[1] != 0: # NS difference (turn logic is slightly different)
31 | if delta[1] * heading[1] > 0: # facing correct NS direction
32 | return 'forward'
33 | elif delta[1] * heading[1] < 0: # facing opposite NS direction
34 | return 'right' # long U-turn
35 | elif delta[1] * heading[0] > 0:
36 | return 'right'
37 | else:
38 | return 'left'
39 |
--------------------------------------------------------------------------------
/smartcab/smartcab/planner.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codediodeio/machine-learning-nanodegree/be2c56bcf2b6346a3528fc1058ed10c299572164/smartcab/smartcab/planner.pyc
--------------------------------------------------------------------------------
/smartcab/smartcab/simulator.py:
--------------------------------------------------------------------------------
1 | import os
2 | import time
3 | import random
4 | import importlib
5 |
6 | class Simulator(object):
7 | """Simulates agents in a dynamic smartcab environment.
8 |
9 | Uses PyGame to display GUI, if available.
10 | """
11 |
12 | colors = {
13 | 'black' : ( 0, 0, 0),
14 | 'white' : (255, 255, 255),
15 | 'red' : (255, 0, 0),
16 | 'green' : ( 0, 255, 0),
17 | 'blue' : ( 0, 0, 255),
18 | 'cyan' : ( 0, 200, 200),
19 | 'magenta' : (200, 0, 200),
20 | 'yellow' : (255, 255, 0),
21 | 'orange' : (255, 128, 0)
22 | }
23 |
24 | def __init__(self, env, size=None, update_delay=1.0, display=True):
25 | self.env = env
26 | self.size = size if size is not None else ((self.env.grid_size[0] + 1) * self.env.block_size, (self.env.grid_size[1] + 1) * self.env.block_size)
27 | self.width, self.height = self.size
28 |
29 | self.bg_color = self.colors['white']
30 | self.road_width = 5
31 | self.road_color = self.colors['black']
32 |
33 | self.quit = False
34 | self.start_time = None
35 | self.current_time = 0.0
36 | self.last_updated = 0.0
37 | self.update_delay = update_delay
38 |
39 | self.display = display
40 | if self.display:
41 | try:
42 | self.pygame = importlib.import_module('pygame')
43 | self.pygame.init()
44 | self.screen = self.pygame.display.set_mode(self.size)
45 |
46 | self.frame_delay = max(1, int(self.update_delay * 1000)) # delay between GUI frames in ms (min: 1)
47 | self.agent_sprite_size = (32, 32)
48 | self.agent_circle_radius = 10 # radius of circle, when using simple representation
49 | for agent in self.env.agent_states:
50 | agent._sprite = self.pygame.transform.smoothscale(self.pygame.image.load(os.path.join("images", "car-{}.png".format(agent.color))), self.agent_sprite_size)
51 | agent._sprite_size = (agent._sprite.get_width(), agent._sprite.get_height())
52 |
53 | self.font = self.pygame.font.Font(None, 28)
54 | self.paused = False
55 | except ImportError as e:
56 | self.display = False
57 | print "Simulator.__init__(): Unable to import pygame; display disabled.\n{}: {}".format(e.__class__.__name__, e)
58 | except Exception as e:
59 | self.display = False
60 | print "Simulator.__init__(): Error initializing GUI objects; display disabled.\n{}: {}".format(e.__class__.__name__, e)
61 |
62 | def run(self, n_trials=1):
63 | self.quit = False
64 | for trial in xrange(n_trials):
65 | print "Simulator.run(): Trial {}".format(trial) # [debug]
66 | self.env.reset()
67 | self.current_time = 0.0
68 | self.last_updated = 0.0
69 | self.start_time = time.time()
70 | while True:
71 | try:
72 | # Update current time
73 | self.current_time = time.time() - self.start_time
74 | #print "Simulator.run(): current_time = {:.3f}".format(self.current_time)
75 |
76 | # Handle GUI events
77 | if self.display:
78 | for event in self.pygame.event.get():
79 | if event.type == self.pygame.QUIT:
80 | self.quit = True
81 | elif event.type == self.pygame.KEYDOWN:
82 | if event.key == 27: # Esc
83 | self.quit = True
84 | elif event.unicode == u' ':
85 | self.paused = True
86 |
87 | if self.paused:
88 | self.pause()
89 |
90 | # Update environment
91 | if self.current_time - self.last_updated >= self.update_delay:
92 | self.env.step()
93 | self.last_updated = self.current_time
94 |
95 | # Render GUI and sleep
96 | if self.display:
97 | self.render()
98 | self.pygame.time.wait(self.frame_delay)
99 | except KeyboardInterrupt:
100 | self.quit = True
101 | finally:
102 | if self.quit or self.env.done:
103 | break
104 |
105 | if self.quit:
106 | break
107 |
108 | def render(self):
109 | # Clear screen
110 | self.screen.fill(self.bg_color)
111 |
112 | # Draw elements
113 | # * Static elements
114 | for road in self.env.roads:
115 | self.pygame.draw.line(self.screen, self.road_color, (road[0][0] * self.env.block_size, road[0][1] * self.env.block_size), (road[1][0] * self.env.block_size, road[1][1] * self.env.block_size), self.road_width)
116 |
117 | for intersection, traffic_light in self.env.intersections.iteritems():
118 | self.pygame.draw.circle(self.screen, self.road_color, (intersection[0] * self.env.block_size, intersection[1] * self.env.block_size), 10)
119 | if traffic_light.state: # North-South is open
120 | self.pygame.draw.line(self.screen, self.colors['green'],
121 | (intersection[0] * self.env.block_size, intersection[1] * self.env.block_size - 15),
122 | (intersection[0] * self.env.block_size, intersection[1] * self.env.block_size + 15), self.road_width)
123 | else: # East-West is open
124 | self.pygame.draw.line(self.screen, self.colors['green'],
125 | (intersection[0] * self.env.block_size - 15, intersection[1] * self.env.block_size),
126 | (intersection[0] * self.env.block_size + 15, intersection[1] * self.env.block_size), self.road_width)
127 |
128 | # * Dynamic elements
129 | for agent, state in self.env.agent_states.iteritems():
130 | # Compute precise agent location here (back from the intersection some)
131 | agent_offset = (2 * state['heading'][0] * self.agent_circle_radius, 2 * state['heading'][1] * self.agent_circle_radius)
132 | agent_pos = (state['location'][0] * self.env.block_size - agent_offset[0], state['location'][1] * self.env.block_size - agent_offset[1])
133 | agent_color = self.colors[agent.color]
134 | if hasattr(agent, '_sprite') and agent._sprite is not None:
135 | # Draw agent sprite (image), properly rotated
136 | rotated_sprite = agent._sprite if state['heading'] == (1, 0) else self.pygame.transform.rotate(agent._sprite, 180 if state['heading'][0] == -1 else state['heading'][1] * -90)
137 | self.screen.blit(rotated_sprite,
138 | self.pygame.rect.Rect(agent_pos[0] - agent._sprite_size[0] / 2, agent_pos[1] - agent._sprite_size[1] / 2,
139 | agent._sprite_size[0], agent._sprite_size[1]))
140 | else:
141 | # Draw simple agent (circle with a short line segment poking out to indicate heading)
142 | self.pygame.draw.circle(self.screen, agent_color, agent_pos, self.agent_circle_radius)
143 | self.pygame.draw.line(self.screen, agent_color, agent_pos, state['location'], self.road_width)
144 | if agent.get_next_waypoint() is not None:
145 | self.screen.blit(self.font.render(agent.get_next_waypoint(), True, agent_color, self.bg_color), (agent_pos[0] + 10, agent_pos[1] + 10))
146 | if state['destination'] is not None:
147 | self.pygame.draw.circle(self.screen, agent_color, (state['destination'][0] * self.env.block_size, state['destination'][1] * self.env.block_size), 6)
148 | self.pygame.draw.circle(self.screen, agent_color, (state['destination'][0] * self.env.block_size, state['destination'][1] * self.env.block_size), 15, 2)
149 |
150 | # * Overlays
151 | text_y = 10
152 | for text in self.env.status_text.split('\n'):
153 | self.screen.blit(self.font.render(text, True, self.colors['red'], self.bg_color), (100, text_y))
154 | text_y += 20
155 |
156 | # Flip buffers
157 | self.pygame.display.flip()
158 |
159 | def pause(self):
160 | abs_pause_time = time.time()
161 | pause_text = "[PAUSED] Press any key to continue..."
162 | self.screen.blit(self.font.render(pause_text, True, self.colors['cyan'], self.bg_color), (100, self.height - 40))
163 | self.pygame.display.flip()
164 | print pause_text # [debug]
165 | while self.paused:
166 | for event in self.pygame.event.get():
167 | if event.type == self.pygame.KEYDOWN:
168 | self.paused = False
169 | self.pygame.time.wait(self.frame_delay)
170 | self.screen.blit(self.font.render(pause_text, True, self.bg_color, self.bg_color), (100, self.height - 40))
171 | self.start_time += (time.time() - abs_pause_time)
172 |
--------------------------------------------------------------------------------
/smartcab/smartcab/simulator.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codediodeio/machine-learning-nanodegree/be2c56bcf2b6346a3528fc1058ed10c299572164/smartcab/smartcab/simulator.pyc
--------------------------------------------------------------------------------
/student-intervention/student-data.csv:
--------------------------------------------------------------------------------
1 | school,sex,age,address,famsize,Pstatus,Medu,Fedu,Mjob,Fjob,reason,guardian,traveltime,studytime,failures,schoolsup,famsup,paid,activities,nursery,higher,internet,romantic,famrel,freetime,goout,Dalc,Walc,health,absences,passed
2 | GP,F,18,U,GT3,A,4,4,at_home,teacher,course,mother,2,2,0,yes,no,no,no,yes,yes,no,no,4,3,4,1,1,3,6,no
3 | GP,F,17,U,GT3,T,1,1,at_home,other,course,father,1,2,0,no,yes,no,no,no,yes,yes,no,5,3,3,1,1,3,4,no
4 | GP,F,15,U,LE3,T,1,1,at_home,other,other,mother,1,2,3,yes,no,yes,no,yes,yes,yes,no,4,3,2,2,3,3,10,yes
5 | GP,F,15,U,GT3,T,4,2,health,services,home,mother,1,3,0,no,yes,yes,yes,yes,yes,yes,yes,3,2,2,1,1,5,2,yes
6 | GP,F,16,U,GT3,T,3,3,other,other,home,father,1,2,0,no,yes,yes,no,yes,yes,no,no,4,3,2,1,2,5,4,yes
7 | GP,M,16,U,LE3,T,4,3,services,other,reputation,mother,1,2,0,no,yes,yes,yes,yes,yes,yes,no,5,4,2,1,2,5,10,yes
8 | GP,M,16,U,LE3,T,2,2,other,other,home,mother,1,2,0,no,no,no,no,yes,yes,yes,no,4,4,4,1,1,3,0,yes
9 | GP,F,17,U,GT3,A,4,4,other,teacher,home,mother,2,2,0,yes,yes,no,no,yes,yes,no,no,4,1,4,1,1,1,6,no
10 | GP,M,15,U,LE3,A,3,2,services,other,home,mother,1,2,0,no,yes,yes,no,yes,yes,yes,no,4,2,2,1,1,1,0,yes
11 | GP,M,15,U,GT3,T,3,4,other,other,home,mother,1,2,0,no,yes,yes,yes,yes,yes,yes,no,5,5,1,1,1,5,0,yes
12 | GP,F,15,U,GT3,T,4,4,teacher,health,reputation,mother,1,2,0,no,yes,yes,no,yes,yes,yes,no,3,3,3,1,2,2,0,no
13 | GP,F,15,U,GT3,T,2,1,services,other,reputation,father,3,3,0,no,yes,no,yes,yes,yes,yes,no,5,2,2,1,1,4,4,yes
14 | GP,M,15,U,LE3,T,4,4,health,services,course,father,1,1,0,no,yes,yes,yes,yes,yes,yes,no,4,3,3,1,3,5,2,yes
15 | GP,M,15,U,GT3,T,4,3,teacher,other,course,mother,2,2,0,no,yes,yes,no,yes,yes,yes,no,5,4,3,1,2,3,2,yes
16 | GP,M,15,U,GT3,A,2,2,other,other,home,other,1,3,0,no,yes,no,no,yes,yes,yes,yes,4,5,2,1,1,3,0,yes
17 | GP,F,16,U,GT3,T,4,4,health,other,home,mother,1,1,0,no,yes,no,no,yes,yes,yes,no,4,4,4,1,2,2,4,yes
18 | GP,F,16,U,GT3,T,4,4,services,services,reputation,mother,1,3,0,no,yes,yes,yes,yes,yes,yes,no,3,2,3,1,2,2,6,yes
19 | GP,F,16,U,GT3,T,3,3,other,other,reputation,mother,3,2,0,yes,yes,no,yes,yes,yes,no,no,5,3,2,1,1,4,4,yes
20 | GP,M,17,U,GT3,T,3,2,services,services,course,mother,1,1,3,no,yes,no,yes,yes,yes,yes,no,5,5,5,2,4,5,16,no
21 | GP,M,16,U,LE3,T,4,3,health,other,home,father,1,1,0,no,no,yes,yes,yes,yes,yes,no,3,1,3,1,3,5,4,yes
22 | GP,M,15,U,GT3,T,4,3,teacher,other,reputation,mother,1,2,0,no,no,no,no,yes,yes,yes,no,4,4,1,1,1,1,0,yes
23 | GP,M,15,U,GT3,T,4,4,health,health,other,father,1,1,0,no,yes,yes,no,yes,yes,yes,no,5,4,2,1,1,5,0,yes
24 | GP,M,16,U,LE3,T,4,2,teacher,other,course,mother,1,2,0,no,no,no,yes,yes,yes,yes,no,4,5,1,1,3,5,2,yes
25 | GP,M,16,U,LE3,T,2,2,other,other,reputation,mother,2,2,0,no,yes,no,yes,yes,yes,yes,no,5,4,4,2,4,5,0,yes
26 | GP,F,15,R,GT3,T,2,4,services,health,course,mother,1,3,0,yes,yes,yes,yes,yes,yes,yes,no,4,3,2,1,1,5,2,no
27 | GP,F,16,U,GT3,T,2,2,services,services,home,mother,1,1,2,no,yes,yes,no,no,yes,yes,no,1,2,2,1,3,5,14,no
28 | GP,M,15,U,GT3,T,2,2,other,other,home,mother,1,1,0,no,yes,yes,no,yes,yes,yes,no,4,2,2,1,2,5,2,yes
29 | GP,M,15,U,GT3,T,4,2,health,services,other,mother,1,1,0,no,no,yes,no,yes,yes,yes,no,2,2,4,2,4,1,4,yes
30 | GP,M,16,U,LE3,A,3,4,services,other,home,mother,1,2,0,yes,yes,no,yes,yes,yes,yes,no,5,3,3,1,1,5,4,yes
31 | GP,M,16,U,GT3,T,4,4,teacher,teacher,home,mother,1,2,0,no,yes,yes,yes,yes,yes,yes,yes,4,4,5,5,5,5,16,yes
32 | GP,M,15,U,GT3,T,4,4,health,services,home,mother,1,2,0,no,yes,yes,no,no,yes,yes,no,5,4,2,3,4,5,0,yes
33 | GP,M,15,U,GT3,T,4,4,services,services,reputation,mother,2,2,0,no,yes,no,yes,yes,yes,yes,no,4,3,1,1,1,5,0,yes
34 | GP,M,15,R,GT3,T,4,3,teacher,at_home,course,mother,1,2,0,no,yes,no,yes,yes,yes,yes,yes,4,5,2,1,1,5,0,yes
35 | GP,M,15,U,LE3,T,3,3,other,other,course,mother,1,2,0,no,no,no,yes,no,yes,yes,no,5,3,2,1,1,2,0,yes
36 | GP,M,16,U,GT3,T,3,2,other,other,home,mother,1,1,0,no,yes,yes,no,no,yes,yes,no,5,4,3,1,1,5,0,yes
37 | GP,F,15,U,GT3,T,2,3,other,other,other,father,2,1,0,no,yes,no,yes,yes,yes,no,no,3,5,1,1,1,5,0,no
38 | GP,M,15,U,LE3,T,4,3,teacher,services,home,mother,1,3,0,no,yes,no,yes,yes,yes,yes,no,5,4,3,1,1,4,2,yes
39 | GP,M,16,R,GT3,A,4,4,other,teacher,reputation,mother,2,3,0,no,yes,no,yes,yes,yes,yes,yes,2,4,3,1,1,5,7,yes
40 | GP,F,15,R,GT3,T,3,4,services,health,course,mother,1,3,0,yes,yes,yes,yes,yes,yes,yes,no,4,3,2,1,1,5,2,yes
41 | GP,F,15,R,GT3,T,2,2,at_home,other,reputation,mother,1,1,0,yes,yes,yes,yes,yes,yes,no,no,4,3,1,1,1,2,8,yes
42 | GP,F,16,U,LE3,T,2,2,other,other,home,mother,2,2,1,no,yes,no,yes,no,yes,yes,yes,3,3,3,1,2,3,25,yes
43 | GP,M,15,U,LE3,T,4,4,teacher,other,home,other,1,1,0,no,yes,no,no,no,yes,yes,yes,5,4,3,2,4,5,8,yes
44 | GP,M,15,U,GT3,T,4,4,services,teacher,course,father,1,2,0,no,yes,no,yes,yes,yes,yes,no,4,3,3,1,1,5,2,yes
45 | GP,M,15,U,GT3,T,2,2,services,services,course,father,1,1,0,yes,yes,no,no,yes,yes,yes,no,5,4,1,1,1,1,0,yes
46 | GP,F,16,U,LE3,T,2,2,other,at_home,course,father,2,2,1,yes,no,no,yes,yes,yes,yes,no,4,3,3,2,2,5,14,no
47 | GP,F,15,U,LE3,A,4,3,other,other,course,mother,1,2,0,yes,yes,yes,yes,yes,yes,yes,yes,5,2,2,1,1,5,8,no
48 | GP,F,16,U,LE3,A,3,3,other,services,home,mother,1,2,0,no,yes,no,no,yes,yes,yes,no,2,3,5,1,4,3,12,yes
49 | GP,M,16,U,GT3,T,4,3,health,services,reputation,mother,1,4,0,no,no,no,yes,yes,yes,yes,no,4,2,2,1,1,2,4,yes
50 | GP,M,15,U,GT3,T,4,2,teacher,other,home,mother,1,2,0,no,yes,yes,no,yes,yes,no,no,4,3,3,2,2,5,2,yes
51 | GP,F,15,U,GT3,T,4,4,services,teacher,other,father,1,2,1,yes,yes,no,yes,no,yes,yes,no,4,4,4,1,1,3,2,no
52 | GP,F,16,U,LE3,T,2,2,services,services,course,mother,3,2,0,no,yes,yes,no,yes,yes,yes,no,4,3,3,2,3,4,2,yes
53 | GP,F,15,U,LE3,T,4,2,health,other,other,mother,1,2,0,no,yes,yes,no,yes,yes,yes,no,4,3,3,1,1,5,2,yes
54 | GP,M,15,U,LE3,A,4,2,health,health,other,father,2,1,1,no,no,no,no,yes,yes,no,no,5,5,5,3,4,5,6,yes
55 | GP,F,15,U,GT3,T,4,4,services,services,course,mother,1,1,0,yes,yes,yes,no,yes,yes,yes,no,3,3,4,2,3,5,0,yes
56 | GP,F,15,U,LE3,A,3,3,other,other,other,mother,1,1,0,no,no,yes,no,yes,yes,yes,no,5,3,4,4,4,1,6,yes
57 | GP,F,16,U,GT3,A,2,1,other,other,other,mother,1,2,0,no,no,yes,yes,yes,yes,yes,yes,5,3,4,1,1,2,8,yes
58 | GP,F,15,U,GT3,A,4,3,services,services,reputation,mother,1,2,0,no,yes,yes,yes,yes,yes,yes,no,4,3,2,1,1,1,0,yes
59 | GP,M,15,U,GT3,T,4,4,teacher,health,reputation,mother,1,2,0,no,yes,no,yes,yes,yes,no,no,3,2,2,1,1,5,4,yes
60 | GP,M,15,U,LE3,T,1,2,other,at_home,home,father,1,2,0,yes,yes,no,yes,yes,yes,yes,no,4,3,2,1,1,5,2,no
61 | GP,F,16,U,GT3,T,4,2,services,other,course,mother,1,2,0,no,yes,no,no,yes,yes,yes,no,4,2,3,1,1,5,2,yes
62 | GP,F,16,R,GT3,T,4,4,health,teacher,other,mother,1,2,0,no,yes,no,yes,yes,yes,no,no,2,4,4,2,3,4,6,yes
63 | GP,F,16,U,GT3,T,1,1,services,services,course,father,4,1,0,yes,yes,no,yes,no,yes,yes,yes,5,5,5,5,5,5,6,yes
64 | GP,F,16,U,LE3,T,1,2,other,services,reputation,father,1,2,0,yes,no,no,yes,yes,yes,yes,no,4,4,3,1,1,1,4,no
65 | GP,F,16,U,GT3,T,4,3,teacher,health,home,mother,1,3,0,yes,yes,yes,yes,yes,yes,yes,no,3,4,4,2,4,4,2,no
66 | GP,F,15,U,LE3,T,4,3,services,services,reputation,father,1,2,0,yes,no,no,yes,yes,yes,yes,yes,4,4,4,2,4,2,0,yes
67 | GP,F,16,U,LE3,T,4,3,teacher,services,course,mother,3,2,0,no,yes,no,yes,yes,yes,yes,no,5,4,3,1,2,1,2,yes
68 | GP,M,15,U,GT3,A,4,4,other,services,reputation,mother,1,4,0,no,yes,no,yes,no,yes,yes,yes,1,3,3,5,5,3,4,yes
69 | GP,F,16,U,GT3,T,3,1,services,other,course,mother,1,4,0,yes,yes,yes,no,yes,yes,yes,no,4,3,3,1,2,5,4,no
70 | GP,F,15,R,LE3,T,2,2,health,services,reputation,mother,2,2,0,yes,yes,yes,no,yes,yes,yes,no,4,1,3,1,3,4,2,no
71 | GP,F,15,R,LE3,T,3,1,other,other,reputation,father,2,4,0,no,yes,no,no,no,yes,yes,no,4,4,2,2,3,3,12,yes
72 | GP,M,16,U,GT3,T,3,1,other,other,reputation,father,2,4,0,no,yes,yes,no,yes,yes,yes,no,4,3,2,1,1,5,0,yes
73 | GP,M,15,U,GT3,T,4,2,other,other,course,mother,1,4,0,no,no,no,no,yes,yes,yes,no,3,3,3,1,1,3,0,yes
74 | GP,F,15,R,GT3,T,1,1,other,other,reputation,mother,1,2,2,yes,yes,no,no,no,yes,yes,yes,3,3,4,2,4,5,2,no
75 | GP,M,16,U,GT3,T,3,1,other,other,reputation,mother,1,1,0,no,no,no,yes,yes,yes,no,no,5,3,2,2,2,5,2,yes
76 | GP,F,16,U,GT3,T,3,3,other,services,home,mother,1,2,0,yes,yes,yes,yes,yes,yes,yes,no,4,3,3,2,4,5,54,yes
77 | GP,M,15,U,GT3,T,4,3,teacher,other,home,mother,1,2,0,no,yes,yes,yes,yes,yes,yes,no,4,3,3,2,3,5,6,yes
78 | GP,M,15,U,GT3,T,4,0,teacher,other,course,mother,2,4,0,no,no,no,yes,yes,yes,yes,no,3,4,3,1,1,1,8,yes
79 | GP,F,16,U,GT3,T,2,2,other,other,reputation,mother,1,4,0,no,no,yes,no,yes,yes,yes,yes,5,2,3,1,3,3,0,yes
80 | GP,M,17,U,GT3,T,2,1,other,other,home,mother,2,1,3,yes,yes,no,yes,yes,no,yes,no,4,5,1,1,1,3,2,yes
81 | GP,F,16,U,GT3,T,3,4,at_home,other,course,mother,1,2,0,no,yes,no,no,yes,yes,yes,no,2,4,3,1,2,3,12,no
82 | GP,M,15,U,GT3,T,2,3,other,services,course,father,1,1,0,yes,yes,yes,yes,no,yes,yes,yes,3,2,2,1,3,3,2,yes
83 | GP,M,15,U,GT3,T,2,3,other,other,home,mother,1,3,0,yes,no,yes,no,no,yes,yes,no,5,3,2,1,2,5,4,yes
84 | GP,F,15,U,LE3,T,3,2,services,other,reputation,mother,1,2,0,no,yes,yes,no,yes,yes,yes,no,4,4,4,1,1,5,10,no
85 | GP,M,15,U,LE3,T,2,2,services,services,home,mother,2,2,0,no,no,yes,yes,yes,yes,yes,no,5,3,3,1,3,4,4,yes
86 | GP,F,15,U,GT3,T,1,1,other,other,home,father,1,2,0,no,yes,no,yes,no,yes,yes,no,4,3,2,2,3,4,2,yes
87 | GP,F,15,U,GT3,T,4,4,services,services,reputation,father,2,2,2,no,no,yes,no,yes,yes,yes,yes,4,4,4,2,3,5,6,no
88 | GP,F,16,U,LE3,T,2,2,at_home,other,course,mother,1,2,0,no,yes,no,no,yes,yes,no,no,4,3,4,1,2,2,4,no
89 | GP,F,15,U,GT3,T,4,2,other,other,reputation,mother,1,3,0,no,yes,no,yes,yes,yes,yes,no,5,3,3,1,3,1,4,yes
90 | GP,M,16,U,GT3,T,2,2,services,other,reputation,father,2,2,1,no,no,yes,yes,no,yes,yes,no,4,4,2,1,1,3,12,yes
91 | GP,M,16,U,LE3,A,4,4,teacher,health,reputation,mother,1,2,0,no,yes,no,no,yes,yes,no,no,4,1,3,3,5,5,18,no
92 | GP,F,16,U,GT3,T,3,3,other,other,home,mother,1,3,0,no,yes,yes,no,yes,yes,yes,yes,4,3,3,1,3,4,0,no
93 | GP,F,15,U,GT3,T,4,3,services,other,reputation,mother,1,1,0,no,no,yes,yes,yes,yes,yes,no,4,5,5,1,3,1,4,yes
94 | GP,F,16,U,LE3,T,3,1,other,other,home,father,1,2,0,yes,yes,no,no,yes,yes,no,no,3,3,3,2,3,2,4,no
95 | GP,F,16,U,GT3,T,4,2,teacher,services,home,mother,2,2,0,no,yes,yes,yes,yes,yes,yes,no,5,3,3,1,1,1,0,yes
96 | GP,M,15,U,LE3,T,2,2,services,health,reputation,mother,1,4,0,no,yes,no,yes,yes,yes,yes,no,4,3,4,1,1,4,6,yes
97 | GP,F,15,R,GT3,T,1,1,at_home,other,home,mother,2,4,1,yes,yes,yes,yes,yes,yes,yes,no,3,1,2,1,1,1,2,yes
98 | GP,M,16,R,GT3,T,4,3,services,other,reputation,mother,2,1,0,yes,yes,no,yes,no,yes,yes,no,3,3,3,1,1,4,2,yes
99 | GP,F,16,U,GT3,T,2,1,other,other,course,mother,1,2,0,no,yes,yes,no,yes,yes,no,yes,4,3,5,1,1,5,2,yes
100 | GP,F,16,U,GT3,T,4,4,other,other,reputation,mother,1,1,0,no,no,no,yes,no,yes,yes,no,5,3,4,1,2,1,6,yes
101 | GP,F,16,U,GT3,T,4,3,other,at_home,course,mother,1,3,0,yes,yes,yes,no,yes,yes,yes,no,5,3,5,1,1,3,0,no
102 | GP,M,16,U,GT3,T,4,4,services,services,other,mother,1,1,0,yes,yes,yes,yes,yes,yes,yes,no,4,5,5,5,5,4,14,no
103 | GP,M,16,U,GT3,T,4,4,services,teacher,other,father,1,3,0,no,yes,no,yes,yes,yes,yes,yes,4,4,3,1,1,4,0,yes
104 | GP,M,15,U,GT3,T,4,4,services,other,course,mother,1,1,0,no,yes,no,yes,no,yes,yes,no,5,3,3,1,1,5,4,yes
105 | GP,F,15,U,GT3,T,3,2,services,other,home,mother,2,2,0,yes,yes,yes,no,yes,yes,yes,no,4,3,5,1,1,2,26,no
106 | GP,M,15,U,GT3,A,3,4,services,other,course,mother,1,2,0,no,yes,yes,yes,yes,yes,yes,no,5,4,4,1,1,1,0,yes
107 | GP,F,15,U,GT3,A,3,3,other,health,reputation,father,1,4,0,yes,no,no,no,yes,yes,no,no,4,3,3,1,1,4,10,yes
108 | GP,F,15,U,GT3,T,2,2,other,other,course,mother,1,4,0,yes,yes,yes,no,yes,yes,yes,no,5,1,2,1,1,3,8,no
109 | GP,M,16,U,GT3,T,3,3,services,other,home,father,1,3,0,no,yes,no,yes,yes,yes,yes,no,5,3,3,1,1,5,2,yes
110 | GP,M,15,R,GT3,T,4,4,other,other,home,father,4,4,0,no,yes,yes,yes,yes,yes,yes,yes,1,3,5,3,5,1,6,yes
111 | GP,F,16,U,LE3,T,4,4,health,health,other,mother,1,3,0,no,yes,yes,yes,yes,yes,yes,yes,5,4,5,1,1,4,4,yes
112 | GP,M,15,U,LE3,A,4,4,teacher,teacher,course,mother,1,1,0,no,no,no,yes,yes,yes,yes,no,5,5,3,1,1,4,6,yes
113 | GP,F,16,R,GT3,T,3,3,services,other,reputation,father,1,3,1,yes,yes,no,yes,yes,yes,yes,no,4,1,2,1,1,2,0,yes
114 | GP,F,16,U,GT3,T,2,2,at_home,other,home,mother,1,2,1,yes,no,no,yes,yes,yes,yes,no,3,1,2,1,1,5,6,yes
115 | GP,M,15,U,LE3,T,4,2,teacher,other,course,mother,1,1,0,no,no,no,no,yes,yes,yes,no,3,5,2,1,1,3,10,yes
116 | GP,M,15,R,GT3,T,2,1,health,services,reputation,mother,1,2,0,no,no,no,yes,yes,yes,yes,yes,5,4,2,1,1,5,8,no
117 | GP,M,16,U,GT3,T,4,4,teacher,teacher,course,father,1,2,0,no,yes,no,yes,yes,yes,yes,no,5,4,4,1,2,5,2,yes
118 | GP,M,15,U,GT3,T,4,4,other,teacher,reputation,father,2,2,0,no,yes,no,yes,yes,yes,no,no,4,4,3,1,1,2,2,yes
119 | GP,M,16,U,GT3,T,3,3,other,services,home,father,2,1,0,no,no,no,yes,yes,yes,yes,no,5,4,2,1,1,5,0,yes
120 | GP,M,17,R,GT3,T,1,3,other,other,course,father,3,2,1,no,yes,no,yes,yes,yes,yes,no,5,2,4,1,4,5,20,no
121 | GP,M,15,U,GT3,T,3,4,other,other,reputation,father,1,1,0,no,no,no,no,yes,yes,yes,no,3,4,3,1,2,4,6,yes
122 | GP,F,15,U,GT3,T,1,2,at_home,services,course,mother,1,2,0,no,no,no,no,no,yes,yes,no,3,2,3,1,2,1,2,yes
123 | GP,M,15,U,GT3,T,2,2,services,services,home,father,1,4,0,no,yes,yes,yes,yes,yes,yes,no,5,5,4,1,2,5,6,yes
124 | GP,F,16,U,LE3,T,2,4,other,health,course,father,2,2,0,no,yes,yes,yes,yes,yes,yes,yes,4,2,2,1,2,5,2,yes
125 | GP,M,16,U,GT3,T,4,4,health,other,course,mother,1,1,0,no,yes,no,yes,yes,yes,yes,no,3,4,4,1,4,5,18,yes
126 | GP,F,16,U,GT3,T,2,2,other,other,home,mother,1,2,0,no,no,yes,no,yes,yes,yes,yes,5,4,4,1,1,5,0,no
127 | GP,M,15,U,GT3,T,3,4,services,services,home,father,1,1,0,yes,no,no,no,yes,yes,yes,no,5,5,5,3,2,5,0,yes
128 | GP,F,15,U,LE3,A,3,4,other,other,home,mother,1,2,0,yes,no,no,yes,yes,yes,yes,yes,5,3,2,1,1,1,0,yes
129 | GP,F,19,U,GT3,T,0,1,at_home,other,course,other,1,2,3,no,yes,no,no,no,no,no,no,3,4,2,1,1,5,2,no
130 | GP,M,18,R,GT3,T,2,2,services,other,reputation,mother,1,1,2,no,yes,no,yes,yes,yes,yes,no,3,3,3,1,2,4,0,no
131 | GP,M,16,R,GT3,T,4,4,teacher,teacher,course,mother,1,1,0,no,no,yes,yes,yes,yes,yes,no,3,5,5,2,5,4,8,yes
132 | GP,F,15,R,GT3,T,3,4,services,teacher,course,father,2,3,2,no,yes,no,no,yes,yes,yes,yes,4,2,2,2,2,5,0,no
133 | GP,F,15,U,GT3,T,1,1,at_home,other,course,mother,3,1,0,no,yes,no,yes,no,yes,yes,yes,4,3,3,1,2,4,0,no
134 | GP,F,17,U,LE3,T,2,2,other,other,course,father,1,1,0,no,yes,no,no,yes,yes,yes,yes,3,4,4,1,3,5,12,yes
135 | GP,F,16,U,GT3,A,3,4,services,other,course,father,1,1,0,no,no,no,no,yes,yes,yes,no,3,2,1,1,4,5,16,yes
136 | GP,M,15,R,GT3,T,3,4,at_home,teacher,course,mother,4,2,0,no,yes,no,no,yes,yes,no,yes,5,3,3,1,1,5,0,no
137 | GP,F,15,U,GT3,T,4,4,services,at_home,course,mother,1,3,0,no,yes,no,yes,yes,yes,yes,yes,4,3,3,1,1,5,0,no
138 | GP,M,17,R,GT3,T,3,4,at_home,other,course,mother,3,2,0,no,no,no,no,yes,yes,no,no,5,4,5,2,4,5,0,no
139 | GP,F,16,U,GT3,A,3,3,other,other,course,other,2,1,2,no,yes,no,yes,no,yes,yes,yes,4,3,2,1,1,5,0,no
140 | GP,M,16,U,LE3,T,1,1,services,other,course,mother,1,2,1,no,no,no,no,yes,yes,no,yes,4,4,4,1,3,5,0,yes
141 | GP,F,15,U,GT3,T,4,4,teacher,teacher,course,mother,2,1,0,no,no,no,yes,yes,yes,yes,no,4,3,2,1,1,5,0,yes
142 | GP,M,15,U,GT3,T,4,3,teacher,services,course,father,2,4,0,yes,yes,no,no,yes,yes,yes,no,2,2,2,1,1,3,0,no
143 | GP,M,16,U,LE3,T,2,2,services,services,reputation,father,2,1,2,no,yes,no,yes,yes,yes,yes,no,2,3,3,2,2,2,8,no
144 | GP,F,15,U,GT3,T,4,4,teacher,services,course,mother,1,3,0,no,yes,yes,yes,yes,yes,yes,no,4,2,2,1,1,5,2,yes
145 | GP,F,16,U,LE3,T,1,1,at_home,at_home,course,mother,1,1,0,no,no,no,no,yes,yes,yes,no,3,4,4,3,3,1,2,yes
146 | GP,M,17,U,GT3,T,2,1,other,other,home,mother,1,1,3,no,yes,no,no,yes,yes,yes,no,5,4,5,1,2,5,0,no
147 | GP,F,15,U,GT3,T,1,1,other,services,course,father,1,2,0,no,yes,yes,no,yes,yes,yes,no,4,4,2,1,2,5,0,yes
148 | GP,F,15,U,GT3,T,3,2,health,services,home,father,1,2,3,no,yes,no,no,yes,yes,yes,no,3,3,2,1,1,3,0,no
149 | GP,F,15,U,GT3,T,1,2,at_home,other,course,mother,1,2,0,no,yes,yes,no,no,yes,yes,no,4,3,2,1,1,5,2,yes
150 | GP,M,16,U,GT3,T,4,4,teacher,teacher,course,mother,1,1,0,no,yes,no,no,yes,no,yes,yes,3,3,2,2,1,5,0,no
151 | GP,M,15,U,LE3,A,2,1,services,other,course,mother,4,1,3,no,no,no,no,yes,yes,yes,no,4,5,5,2,5,5,0,yes
152 | GP,M,18,U,LE3,T,1,1,other,other,course,mother,1,1,3,no,no,no,no,yes,no,yes,yes,2,3,5,2,5,4,0,no
153 | GP,M,16,U,LE3,T,2,1,at_home,other,course,mother,1,1,1,no,no,no,yes,yes,yes,no,yes,4,4,4,3,5,5,6,yes
154 | GP,F,15,R,GT3,T,3,3,services,services,reputation,other,2,3,2,no,yes,yes,yes,yes,yes,yes,yes,4,2,1,2,3,3,8,yes
155 | GP,M,19,U,GT3,T,3,2,services,at_home,home,mother,1,1,3,no,yes,no,no,yes,no,yes,yes,4,5,4,1,1,4,0,no
156 | GP,F,17,U,GT3,T,4,4,other,teacher,course,mother,1,1,0,yes,yes,no,no,yes,yes,no,yes,4,2,1,1,1,4,0,yes
157 | GP,M,15,R,GT3,T,2,3,at_home,services,course,mother,1,2,0,yes,no,yes,yes,yes,yes,no,no,4,4,4,1,1,1,2,no
158 | GP,M,17,R,LE3,T,1,2,other,other,reputation,mother,1,1,0,no,no,no,no,yes,yes,no,no,2,2,2,3,3,5,8,yes
159 | GP,F,18,R,GT3,T,1,1,at_home,other,course,mother,3,1,3,no,yes,no,yes,no,yes,no,no,5,2,5,1,5,4,6,yes
160 | GP,M,16,R,GT3,T,2,2,at_home,other,course,mother,3,1,0,no,no,no,no,no,yes,no,no,4,2,2,1,2,3,2,yes
161 | GP,M,16,U,GT3,T,3,3,other,services,course,father,1,2,1,no,yes,yes,no,yes,yes,yes,yes,4,5,5,4,4,5,4,yes
162 | GP,M,17,R,LE3,T,2,1,at_home,other,course,mother,2,1,2,no,no,no,yes,yes,no,yes,yes,3,3,2,2,2,5,0,no
163 | GP,M,15,R,GT3,T,3,2,other,other,course,mother,2,2,2,yes,yes,no,no,yes,yes,yes,yes,4,4,4,1,4,3,6,no
164 | GP,M,16,U,LE3,T,1,2,other,other,course,mother,2,1,1,no,no,no,yes,yes,yes,no,no,4,4,4,2,4,5,0,no
165 | GP,M,17,U,GT3,T,1,3,at_home,services,course,father,1,1,0,no,no,no,no,yes,no,yes,no,5,3,3,1,4,2,2,yes
166 | GP,M,17,R,LE3,T,1,1,other,services,course,mother,4,2,3,no,no,no,yes,yes,no,no,yes,5,3,5,1,5,5,0,no
167 | GP,M,16,U,GT3,T,3,2,services,services,course,mother,2,1,1,no,yes,no,yes,no,no,no,no,4,5,2,1,1,2,16,yes
168 | GP,M,16,U,GT3,T,2,2,other,other,course,father,1,2,0,no,no,no,no,yes,no,yes,no,4,3,5,2,4,4,4,yes
169 | GP,F,16,U,GT3,T,4,2,health,services,home,father,1,2,0,no,no,yes,no,yes,yes,yes,yes,4,2,3,1,1,3,0,yes
170 | GP,F,16,U,GT3,T,2,2,other,other,home,mother,1,2,0,no,yes,yes,no,no,yes,yes,no,5,1,5,1,1,4,0,no
171 | GP,F,16,U,GT3,T,4,4,health,health,reputation,mother,1,2,0,no,yes,yes,no,yes,yes,yes,yes,4,4,2,1,1,3,0,yes
172 | GP,M,16,U,GT3,T,3,4,other,other,course,father,3,1,2,no,yes,no,yes,no,yes,yes,no,3,4,5,2,4,2,0,no
173 | GP,M,16,U,GT3,T,1,0,other,other,reputation,mother,2,2,0,no,yes,yes,yes,yes,yes,yes,yes,4,3,2,1,1,3,2,yes
174 | GP,M,17,U,LE3,T,4,4,teacher,other,reputation,mother,1,2,0,no,yes,yes,yes,yes,yes,yes,no,4,4,4,1,3,5,0,yes
175 | GP,F,16,U,GT3,T,1,3,at_home,services,home,mother,1,2,3,no,no,no,yes,no,yes,yes,yes,4,3,5,1,1,3,0,no
176 | GP,F,16,U,LE3,T,3,3,other,other,reputation,mother,2,2,0,no,yes,yes,yes,yes,yes,yes,no,4,4,5,1,1,4,4,no
177 | GP,M,17,U,LE3,T,4,3,teacher,other,course,mother,2,2,0,no,no,yes,yes,yes,yes,yes,no,4,4,4,4,4,4,4,no
178 | GP,F,16,U,GT3,T,2,2,services,other,reputation,mother,2,2,0,no,no,yes,yes,no,yes,yes,no,3,4,4,1,4,5,2,yes
179 | GP,M,17,U,GT3,T,3,3,other,other,reputation,father,1,2,0,no,no,no,yes,no,yes,yes,no,4,3,4,1,4,4,4,no
180 | GP,M,16,R,GT3,T,4,2,teacher,services,other,mother,1,1,0,no,yes,no,yes,yes,yes,yes,yes,4,3,3,3,4,3,10,no
181 | GP,M,17,U,GT3,T,4,3,other,other,course,mother,1,2,0,no,yes,no,yes,yes,yes,yes,yes,5,2,3,1,1,2,4,yes
182 | GP,M,16,U,GT3,T,4,3,teacher,other,home,mother,1,2,0,no,yes,yes,yes,yes,yes,yes,no,3,4,3,2,3,3,10,no
183 | GP,M,16,U,GT3,T,3,3,services,other,home,mother,1,2,0,no,no,yes,yes,yes,yes,yes,yes,4,2,3,1,2,3,2,yes
184 | GP,F,17,U,GT3,T,2,4,services,services,reputation,father,1,2,0,no,yes,no,yes,yes,yes,no,no,5,4,2,2,3,5,0,yes
185 | GP,F,17,U,LE3,T,3,3,other,other,reputation,mother,1,2,0,no,yes,no,yes,yes,yes,yes,yes,5,3,3,2,3,1,56,no
186 | GP,F,16,U,GT3,T,3,2,other,other,reputation,mother,1,2,0,no,yes,yes,no,yes,yes,yes,no,1,2,2,1,2,1,14,yes
187 | GP,M,17,U,GT3,T,3,3,services,services,other,mother,1,2,0,no,yes,no,yes,yes,yes,yes,yes,4,3,4,2,3,4,12,yes
188 | GP,M,16,U,GT3,T,1,2,services,services,other,mother,1,1,0,no,yes,yes,yes,yes,yes,yes,yes,3,3,3,1,2,3,2,yes
189 | GP,M,16,U,LE3,T,2,1,other,other,course,mother,1,2,0,no,no,yes,yes,yes,yes,yes,yes,4,2,3,1,2,5,0,yes
190 | GP,F,17,U,GT3,A,3,3,health,other,reputation,mother,1,2,0,no,yes,no,no,no,yes,yes,yes,3,3,3,1,3,3,6,no
191 | GP,M,17,R,GT3,T,1,2,at_home,other,home,mother,1,2,0,no,no,no,no,yes,yes,no,no,3,1,3,1,5,3,4,yes
192 | GP,F,16,U,GT3,T,2,3,services,services,course,mother,1,2,0,no,no,no,no,yes,yes,yes,no,4,3,3,1,1,2,10,yes
193 | GP,F,17,U,GT3,T,1,1,at_home,services,course,mother,1,2,0,no,no,no,yes,yes,yes,yes,no,5,3,3,1,1,3,0,no
194 | GP,M,17,U,GT3,T,1,2,at_home,services,other,other,2,2,0,no,no,yes,yes,no,yes,yes,no,4,4,4,4,5,5,12,no
195 | GP,M,16,R,GT3,T,3,3,services,services,reputation,mother,1,1,0,no,yes,no,yes,yes,yes,yes,no,4,3,2,3,4,5,8,yes
196 | GP,M,16,U,GT3,T,2,3,other,other,home,father,2,1,0,no,no,no,no,yes,yes,yes,no,5,3,3,1,1,3,0,yes
197 | GP,F,17,U,LE3,T,2,4,services,services,course,father,1,2,0,no,no,no,yes,yes,yes,yes,yes,4,3,2,1,1,5,0,yes
198 | GP,M,17,U,GT3,T,4,4,services,teacher,home,mother,1,1,0,no,no,no,no,yes,yes,yes,no,5,2,3,1,2,5,4,yes
199 | GP,M,16,R,LE3,T,3,3,teacher,other,home,father,3,1,0,no,yes,yes,yes,yes,yes,yes,no,3,3,4,3,5,3,8,yes
200 | GP,F,17,U,GT3,T,4,4,services,teacher,home,mother,2,1,1,no,yes,no,no,yes,yes,yes,no,4,2,4,2,3,2,24,yes
201 | GP,F,16,U,LE3,T,4,4,teacher,teacher,reputation,mother,1,2,0,no,yes,yes,no,yes,yes,yes,no,4,5,2,1,2,3,0,yes
202 | GP,F,16,U,GT3,T,4,3,health,other,home,mother,1,2,0,no,yes,no,yes,yes,yes,yes,no,4,3,5,1,5,2,2,yes
203 | GP,F,16,U,GT3,T,2,3,other,other,reputation,mother,1,2,0,yes,yes,yes,yes,yes,yes,no,no,4,4,3,1,3,4,6,yes
204 | GP,F,17,U,GT3,T,1,1,other,other,course,mother,1,2,0,no,yes,yes,no,no,yes,no,no,4,4,4,1,3,1,4,yes
205 | GP,F,17,R,GT3,T,2,2,other,other,reputation,mother,1,1,0,no,yes,no,no,yes,yes,yes,no,5,3,2,1,2,3,18,no
206 | GP,F,16,R,GT3,T,2,2,services,services,reputation,mother,2,4,0,no,yes,yes,yes,no,yes,yes,no,5,3,5,1,1,5,6,yes
207 | GP,F,17,U,GT3,T,3,4,at_home,services,home,mother,1,3,1,no,yes,yes,no,yes,yes,yes,yes,4,4,3,3,4,5,28,no
208 | GP,F,16,U,GT3,A,3,1,services,other,course,mother,1,2,3,no,yes,yes,no,yes,yes,yes,no,2,3,3,2,2,4,5,no
209 | GP,F,16,U,GT3,T,4,3,teacher,other,other,mother,1,2,0,no,no,yes,yes,yes,yes,yes,yes,1,3,2,1,1,1,10,yes
210 | GP,F,16,U,GT3,T,1,1,at_home,other,home,mother,2,1,0,no,yes,yes,no,yes,yes,no,no,4,3,2,1,4,5,6,yes
211 | GP,F,17,R,GT3,T,4,3,teacher,other,reputation,mother,2,3,0,no,yes,yes,yes,yes,yes,yes,yes,4,4,2,1,1,4,6,no
212 | GP,F,19,U,GT3,T,3,3,other,other,reputation,other,1,4,0,no,yes,yes,yes,yes,yes,yes,no,4,3,3,1,2,3,10,no
213 | GP,M,17,U,LE3,T,4,4,services,other,home,mother,1,2,0,no,yes,yes,no,yes,yes,yes,yes,5,3,5,4,5,3,13,yes
214 | GP,F,16,U,GT3,A,2,2,other,other,reputation,mother,1,2,0,yes,yes,yes,no,yes,yes,yes,no,3,3,4,1,1,4,0,yes
215 | GP,M,18,U,GT3,T,2,2,services,other,home,mother,1,2,1,no,yes,yes,yes,yes,yes,yes,no,4,4,4,2,4,5,15,no
216 | GP,F,17,R,LE3,T,4,4,services,other,other,mother,1,1,0,no,yes,yes,no,yes,yes,no,no,5,2,1,1,2,3,12,yes
217 | GP,F,17,U,LE3,T,3,2,other,other,reputation,mother,2,2,0,no,no,yes,no,yes,yes,yes,no,4,4,4,1,3,1,2,yes
218 | GP,F,17,U,GT3,T,4,3,other,other,reputation,mother,1,2,2,no,no,yes,no,yes,yes,yes,yes,3,4,5,2,4,1,22,no
219 | GP,M,18,U,LE3,T,3,3,services,health,home,father,1,2,1,no,yes,yes,no,yes,yes,yes,no,3,2,4,2,4,4,13,no
220 | GP,F,17,U,GT3,T,2,3,at_home,other,home,father,2,1,0,no,yes,yes,no,yes,yes,no,no,3,3,3,1,4,3,3,no
221 | GP,F,17,U,GT3,T,2,2,at_home,at_home,course,mother,1,3,0,no,yes,yes,yes,yes,yes,yes,no,4,3,3,1,1,4,4,yes
222 | GP,F,17,R,GT3,T,2,1,at_home,services,reputation,mother,2,2,0,no,yes,no,yes,yes,yes,yes,no,4,2,5,1,2,5,2,no
223 | GP,F,17,U,GT3,T,1,1,at_home,other,reputation,mother,1,3,1,no,yes,no,yes,yes,yes,no,yes,4,3,4,1,1,5,0,no
224 | GP,F,16,U,GT3,T,2,3,services,teacher,other,mother,1,2,0,yes,no,no,no,yes,yes,yes,no,2,3,1,1,1,3,2,yes
225 | GP,M,18,U,GT3,T,2,2,other,other,home,mother,2,2,0,no,yes,yes,no,yes,yes,yes,no,3,3,3,5,5,4,0,yes
226 | GP,F,16,U,GT3,T,4,4,teacher,services,home,mother,1,3,0,no,yes,no,yes,no,yes,yes,no,5,3,2,1,1,5,0,yes
227 | GP,F,18,R,GT3,T,3,1,other,other,reputation,mother,1,2,1,no,no,no,yes,yes,yes,yes,yes,5,3,3,1,1,4,16,no
228 | GP,F,17,U,GT3,T,3,2,other,other,course,mother,1,2,0,no,no,no,yes,no,yes,yes,no,5,3,4,1,3,3,10,yes
229 | GP,M,17,U,LE3,T,2,3,services,services,reputation,father,1,2,0,no,yes,yes,no,no,yes,yes,no,5,3,3,1,3,3,2,yes
230 | GP,M,18,U,LE3,T,2,1,at_home,other,course,mother,4,2,0,yes,yes,yes,yes,yes,yes,yes,yes,4,3,2,4,5,3,14,no
231 | GP,F,17,U,GT3,A,2,1,other,other,course,mother,2,3,0,no,no,no,yes,yes,yes,yes,yes,3,2,3,1,2,3,10,yes
232 | GP,F,17,U,LE3,T,4,3,health,other,reputation,father,1,2,0,no,no,no,yes,yes,yes,yes,yes,3,2,3,1,2,3,14,yes
233 | GP,M,17,R,GT3,T,2,2,other,other,course,father,2,2,0,no,yes,yes,yes,yes,yes,yes,no,4,5,2,1,1,1,4,yes
234 | GP,M,17,U,GT3,T,4,4,teacher,teacher,reputation,mother,1,2,0,yes,yes,no,yes,yes,yes,yes,yes,4,5,5,1,3,2,14,no
235 | GP,M,16,U,GT3,T,4,4,health,other,reputation,father,1,2,0,no,yes,yes,yes,yes,yes,yes,no,4,2,4,2,4,1,2,yes
236 | GP,M,16,U,LE3,T,1,1,other,other,home,mother,2,2,0,no,yes,yes,no,yes,yes,yes,no,3,4,2,1,1,5,18,no
237 | GP,M,16,U,GT3,T,3,2,at_home,other,reputation,mother,2,3,0,no,no,no,yes,yes,yes,yes,yes,5,3,3,1,3,2,10,yes
238 | GP,M,17,U,LE3,T,2,2,other,other,home,father,1,2,0,no,no,yes,yes,no,yes,yes,yes,4,4,2,5,5,4,4,yes
239 | GP,F,16,U,GT3,T,2,1,other,other,home,mother,1,1,0,no,no,no,no,yes,yes,yes,yes,4,5,2,1,1,5,20,yes
240 | GP,F,17,R,GT3,T,2,1,at_home,services,course,mother,3,2,0,no,no,no,yes,yes,yes,no,no,2,1,1,1,1,3,2,yes
241 | GP,M,18,U,GT3,T,2,2,other,services,reputation,father,1,2,1,no,no,no,no,yes,no,yes,no,5,5,4,3,5,2,0,no
242 | GP,M,17,U,LE3,T,4,3,health,other,course,mother,2,2,0,no,no,no,yes,yes,yes,yes,yes,2,5,5,1,4,5,14,yes
243 | GP,M,17,R,LE3,A,4,4,teacher,other,course,mother,2,2,0,no,yes,yes,no,yes,yes,yes,no,3,3,3,2,3,4,2,yes
244 | GP,M,16,U,LE3,T,4,3,teacher,other,course,mother,1,1,0,no,no,no,yes,no,yes,yes,no,5,4,5,1,1,3,0,no
245 | GP,M,16,U,GT3,T,4,4,services,services,course,mother,1,1,0,no,no,no,yes,yes,yes,yes,no,5,3,2,1,2,5,0,yes
246 | GP,F,18,U,GT3,T,2,1,other,other,course,other,2,3,0,no,yes,yes,no,no,yes,yes,yes,4,4,4,1,1,3,0,no
247 | GP,M,16,U,GT3,T,2,1,other,other,course,mother,3,1,0,no,no,no,no,yes,yes,yes,no,4,3,3,1,1,4,6,yes
248 | GP,M,17,U,GT3,T,2,3,other,other,course,father,2,1,0,no,no,no,no,yes,yes,yes,no,5,2,2,1,1,2,4,yes
249 | GP,M,22,U,GT3,T,3,1,services,services,other,mother,1,1,3,no,no,no,no,no,no,yes,yes,5,4,5,5,5,1,16,no
250 | GP,M,18,R,LE3,T,3,3,other,services,course,mother,1,2,1,no,yes,no,no,yes,yes,yes,yes,4,3,3,1,3,5,8,no
251 | GP,M,16,U,GT3,T,0,2,other,other,other,mother,1,1,0,no,no,yes,no,no,yes,yes,no,4,3,2,2,4,5,0,yes
252 | GP,M,18,U,GT3,T,3,2,services,other,course,mother,2,1,1,no,no,no,no,yes,no,yes,no,4,4,5,2,4,5,0,no
253 | GP,M,16,U,GT3,T,3,3,at_home,other,reputation,other,3,2,0,yes,yes,no,no,no,yes,yes,no,5,3,3,1,3,2,6,yes
254 | GP,M,18,U,GT3,T,2,1,services,services,other,mother,1,1,1,no,no,no,no,no,no,yes,no,3,2,5,2,5,5,4,no
255 | GP,M,16,R,GT3,T,2,1,other,other,course,mother,2,1,0,no,no,no,yes,no,yes,no,no,3,3,2,1,3,3,0,no
256 | GP,M,17,R,GT3,T,2,1,other,other,course,mother,1,1,0,no,no,no,no,no,yes,yes,no,4,4,2,2,4,5,0,yes
257 | GP,M,17,U,LE3,T,1,1,health,other,course,mother,2,1,1,no,yes,no,yes,yes,yes,yes,no,4,4,4,1,2,5,2,no
258 | GP,F,17,U,LE3,T,4,2,teacher,services,reputation,mother,1,4,0,no,yes,yes,yes,yes,yes,yes,no,4,2,3,1,1,4,6,yes
259 | GP,M,19,U,LE3,A,4,3,services,at_home,reputation,mother,1,2,0,no,yes,no,no,yes,yes,yes,no,4,3,1,1,1,1,12,yes
260 | GP,M,18,U,GT3,T,2,1,other,other,home,mother,1,2,0,no,no,no,yes,yes,yes,yes,no,5,2,4,1,2,4,8,yes
261 | GP,F,17,U,LE3,T,2,2,services,services,course,father,1,4,0,no,no,yes,yes,yes,yes,yes,yes,3,4,1,1,1,2,0,no
262 | GP,F,18,U,GT3,T,4,3,services,other,home,father,1,2,0,no,yes,yes,no,yes,yes,yes,yes,3,1,2,1,3,2,21,yes
263 | GP,M,18,U,GT3,T,4,3,teacher,other,course,mother,1,2,0,no,yes,yes,no,no,yes,yes,no,4,3,2,1,1,3,2,no
264 | GP,M,18,R,GT3,T,3,2,other,other,course,mother,1,3,0,no,no,no,yes,no,yes,no,no,5,3,2,1,1,3,1,yes
265 | GP,F,17,U,GT3,T,3,3,other,other,home,mother,1,3,0,no,no,no,yes,no,yes,no,no,3,2,3,1,1,4,4,no
266 | GP,F,18,U,GT3,T,2,2,at_home,services,home,mother,1,3,0,no,yes,yes,yes,yes,yes,yes,yes,4,3,3,1,1,3,0,no
267 | GP,M,18,R,LE3,A,3,4,other,other,reputation,mother,2,2,0,no,yes,yes,yes,yes,yes,yes,no,4,2,5,3,4,1,13,yes
268 | GP,M,17,U,GT3,T,3,1,services,other,other,mother,1,2,0,no,no,yes,yes,yes,yes,yes,yes,5,4,4,3,4,5,2,yes
269 | GP,F,18,R,GT3,T,4,4,teacher,other,reputation,mother,2,2,0,no,no,yes,yes,yes,yes,yes,no,4,3,4,2,2,4,8,yes
270 | GP,M,18,U,GT3,T,4,2,health,other,reputation,father,1,2,0,no,yes,yes,yes,yes,yes,yes,yes,5,4,5,1,3,5,10,yes
271 | GP,F,18,R,GT3,T,2,1,other,other,reputation,mother,2,2,0,no,yes,no,no,yes,no,yes,yes,4,3,5,1,2,3,0,no
272 | GP,F,19,U,GT3,T,3,3,other,services,home,other,1,2,2,no,yes,yes,yes,yes,yes,yes,no,4,3,5,3,3,5,15,no
273 | GP,F,18,U,GT3,T,2,3,other,services,reputation,father,1,4,0,no,yes,yes,yes,yes,yes,yes,yes,4,5,5,1,3,2,4,yes
274 | GP,F,18,U,LE3,T,1,1,other,other,home,mother,2,2,0,no,yes,yes,no,no,yes,no,no,4,4,3,1,1,3,2,yes
275 | GP,M,17,R,GT3,T,1,2,at_home,at_home,home,mother,1,2,0,no,yes,yes,yes,no,yes,no,yes,3,5,2,2,2,1,2,yes
276 | GP,F,17,U,GT3,T,2,4,at_home,health,reputation,mother,2,2,0,no,yes,yes,no,yes,yes,yes,yes,4,3,3,1,1,1,2,yes
277 | GP,F,17,U,LE3,T,2,2,services,other,course,mother,2,2,0,yes,yes,yes,no,yes,yes,yes,yes,4,4,4,2,3,5,6,yes
278 | GP,F,18,R,GT3,A,3,2,other,services,home,mother,2,2,0,no,no,no,no,no,no,yes,yes,4,1,1,1,1,5,75,no
279 | GP,M,18,U,GT3,T,4,4,teacher,services,home,mother,2,1,0,no,no,yes,yes,yes,yes,yes,no,3,2,4,1,4,3,22,no
280 | GP,F,18,U,GT3,T,4,4,health,health,reputation,father,1,2,1,yes,yes,no,yes,yes,yes,yes,yes,2,4,4,1,1,4,15,no
281 | GP,M,18,U,LE3,T,4,3,teacher,services,course,mother,2,1,0,no,no,yes,yes,yes,yes,yes,no,4,2,3,1,2,1,8,yes
282 | GP,M,17,U,LE3,A,4,1,services,other,home,mother,2,1,0,no,no,yes,yes,yes,yes,yes,yes,4,5,4,2,4,5,30,no
283 | GP,M,17,U,LE3,A,3,2,teacher,services,home,mother,1,1,1,no,no,no,no,yes,yes,yes,no,4,4,4,3,4,3,19,yes
284 | GP,F,18,R,LE3,T,1,1,at_home,other,reputation,mother,2,4,0,no,yes,yes,yes,yes,yes,no,no,5,2,2,1,1,3,1,yes
285 | GP,F,18,U,GT3,T,1,1,other,other,home,mother,2,2,0,yes,no,no,yes,yes,yes,yes,no,5,4,4,1,1,4,4,yes
286 | GP,F,17,U,GT3,T,2,2,other,other,course,mother,1,2,0,no,yes,no,no,no,yes,yes,no,5,4,5,1,2,5,4,yes
287 | GP,M,17,U,GT3,T,1,1,other,other,reputation,father,1,2,0,no,no,yes,no,no,yes,yes,no,4,3,3,1,2,4,2,yes
288 | GP,F,18,U,GT3,T,2,2,at_home,at_home,other,mother,1,3,0,no,yes,yes,no,yes,yes,yes,no,4,3,3,1,2,2,5,yes
289 | GP,F,17,U,GT3,T,1,1,services,teacher,reputation,mother,1,3,0,no,yes,yes,no,yes,yes,yes,no,4,3,3,1,1,3,6,yes
290 | GP,M,18,U,GT3,T,2,1,services,services,reputation,mother,1,3,0,no,no,yes,yes,yes,yes,yes,no,4,2,4,1,3,2,6,yes
291 | GP,M,18,U,LE3,A,4,4,teacher,teacher,reputation,mother,1,2,0,no,yes,yes,yes,yes,yes,yes,no,5,4,3,1,1,2,9,yes
292 | GP,M,18,U,GT3,T,4,2,teacher,other,home,mother,1,2,0,no,yes,yes,yes,yes,yes,yes,yes,4,3,2,1,4,5,11,yes
293 | GP,F,17,U,GT3,T,4,3,health,services,reputation,mother,1,3,0,no,yes,yes,no,yes,yes,yes,no,4,2,2,1,2,3,0,yes
294 | GP,F,18,U,LE3,T,2,1,services,at_home,reputation,mother,1,2,1,no,no,no,no,yes,yes,yes,yes,5,4,3,1,1,5,12,yes
295 | GP,F,17,R,LE3,T,3,1,services,other,reputation,mother,2,4,0,no,yes,yes,no,yes,yes,no,no,3,1,2,1,1,3,6,yes
296 | GP,M,18,R,LE3,T,3,2,services,other,reputation,mother,2,3,0,no,yes,yes,yes,yes,yes,yes,no,5,4,2,1,1,4,8,yes
297 | GP,M,17,U,GT3,T,3,3,health,other,home,mother,1,1,0,no,yes,yes,no,yes,yes,yes,no,4,4,3,1,3,5,4,yes
298 | GP,F,19,U,GT3,T,4,4,health,other,reputation,other,2,2,0,no,yes,yes,yes,yes,yes,yes,no,2,3,4,2,3,2,0,no
299 | GP,F,18,U,LE3,T,4,3,other,other,home,other,2,2,0,no,yes,yes,no,yes,yes,yes,yes,4,4,5,1,2,2,10,no
300 | GP,F,18,U,GT3,T,4,3,other,other,reputation,father,1,4,0,no,yes,yes,no,yes,yes,yes,no,4,3,3,1,1,3,0,yes
301 | GP,M,18,U,LE3,T,4,4,teacher,teacher,home,mother,1,1,0,no,yes,yes,no,yes,yes,yes,yes,1,4,2,2,2,1,5,yes
302 | GP,F,18,U,LE3,A,4,4,health,other,home,mother,1,2,0,no,yes,no,no,yes,yes,yes,yes,4,2,4,1,1,4,14,yes
303 | GP,M,17,U,LE3,T,4,4,other,teacher,home,father,2,1,0,no,no,yes,no,yes,yes,yes,no,4,1,1,2,2,5,0,yes
304 | GP,F,17,U,GT3,T,4,2,other,other,reputation,mother,2,3,0,no,yes,yes,no,yes,yes,yes,no,4,3,3,1,1,3,0,yes
305 | GP,F,17,U,GT3,T,3,2,health,health,reputation,father,1,4,0,no,yes,yes,yes,no,yes,yes,no,5,2,2,1,2,5,0,yes
306 | GP,M,19,U,GT3,T,3,3,other,other,home,other,1,2,1,no,yes,no,yes,yes,yes,yes,yes,4,4,4,1,1,3,20,yes
307 | GP,F,18,U,GT3,T,2,4,services,at_home,reputation,other,1,2,1,no,yes,yes,yes,yes,yes,yes,no,4,4,3,1,1,3,8,yes
308 | GP,M,20,U,GT3,A,3,2,services,other,course,other,1,1,0,no,no,no,yes,yes,yes,no,no,5,5,3,1,1,5,0,yes
309 | GP,M,19,U,GT3,T,4,4,teacher,services,reputation,other,2,1,1,no,yes,yes,no,yes,yes,yes,yes,4,3,4,1,1,4,38,no
310 | GP,M,19,R,GT3,T,3,3,other,services,reputation,father,1,2,1,no,no,no,yes,yes,yes,no,yes,4,5,3,1,2,5,0,yes
311 | GP,F,19,U,LE3,T,1,1,at_home,other,reputation,other,1,2,1,yes,yes,no,yes,no,yes,yes,no,4,4,3,1,3,3,18,yes
312 | GP,F,19,U,LE3,T,1,2,services,services,home,other,1,2,1,no,no,no,yes,no,yes,no,yes,4,2,4,2,2,3,0,no
313 | GP,F,19,U,GT3,T,2,1,at_home,other,other,other,3,2,0,no,yes,no,no,yes,no,yes,yes,3,4,1,1,1,2,20,yes
314 | GP,M,19,U,GT3,T,1,2,other,services,course,other,1,2,1,no,no,no,no,no,yes,yes,no,4,5,2,2,2,4,3,yes
315 | GP,F,19,U,LE3,T,3,2,services,other,reputation,other,2,2,1,no,yes,yes,no,no,yes,yes,yes,4,2,2,1,2,1,22,yes
316 | GP,F,19,U,GT3,T,1,1,at_home,health,home,other,1,3,2,no,no,no,no,no,yes,yes,yes,4,1,2,1,1,3,14,yes
317 | GP,F,19,R,GT3,T,2,3,other,other,reputation,other,1,3,1,no,no,no,no,yes,yes,yes,yes,4,1,2,1,1,3,40,yes
318 | GP,F,18,U,GT3,T,2,1,services,other,course,mother,2,2,0,no,yes,yes,yes,yes,yes,yes,no,5,3,3,1,2,1,0,no
319 | GP,F,18,U,GT3,T,4,3,other,other,course,mother,1,3,0,no,yes,yes,yes,yes,yes,yes,yes,4,3,4,1,1,5,9,no
320 | GP,F,17,R,GT3,T,3,4,at_home,services,course,father,1,3,0,no,yes,yes,yes,no,yes,yes,no,4,3,4,2,5,5,0,yes
321 | GP,F,18,U,GT3,T,4,4,teacher,other,course,mother,1,2,0,no,yes,yes,no,yes,yes,yes,no,4,4,4,3,3,5,2,yes
322 | GP,F,17,U,GT3,A,4,3,services,services,course,mother,1,2,0,no,yes,yes,no,yes,yes,yes,yes,5,2,2,1,2,5,23,yes
323 | GP,F,17,U,GT3,T,2,2,other,other,course,mother,1,2,0,no,yes,no,no,yes,yes,no,yes,4,2,2,1,1,3,12,no
324 | GP,F,17,R,LE3,T,2,2,services,services,course,mother,1,3,0,no,yes,yes,yes,yes,yes,yes,no,3,3,2,2,2,3,3,yes
325 | GP,F,17,U,GT3,T,3,1,services,services,course,father,1,3,0,no,yes,no,no,no,yes,yes,no,3,4,3,2,3,5,1,yes
326 | GP,F,17,U,LE3,T,0,2,at_home,at_home,home,father,2,3,0,no,no,no,no,yes,yes,yes,no,3,3,3,2,3,2,0,yes
327 | GP,M,18,U,GT3,T,4,4,other,other,course,mother,1,3,0,no,no,no,yes,yes,yes,yes,no,4,3,3,2,2,3,3,yes
328 | GP,M,17,U,GT3,T,3,3,other,services,reputation,mother,1,1,0,no,no,no,yes,no,yes,yes,no,4,3,5,3,5,5,3,yes
329 | GP,M,17,R,GT3,T,2,2,services,other,course,mother,4,1,0,no,yes,no,no,yes,yes,yes,no,4,4,5,5,5,4,8,yes
330 | GP,F,17,U,GT3,T,4,4,teacher,services,course,mother,1,3,0,no,yes,yes,yes,yes,yes,yes,no,5,4,4,1,3,4,7,no
331 | GP,F,17,U,GT3,T,4,4,teacher,teacher,course,mother,2,3,0,no,yes,yes,no,no,yes,yes,yes,4,3,3,1,2,4,4,yes
332 | GP,M,18,U,LE3,T,2,2,other,other,course,mother,1,4,0,no,yes,no,yes,yes,yes,yes,no,4,5,5,2,4,5,2,no
333 | GP,F,17,R,GT3,T,2,4,at_home,other,course,father,1,3,0,no,yes,no,no,yes,yes,yes,yes,4,4,3,1,1,5,7,yes
334 | GP,F,18,U,GT3,T,3,3,services,services,home,mother,1,2,0,no,no,no,yes,yes,yes,yes,no,5,3,4,1,1,4,0,no
335 | GP,F,18,U,LE3,T,2,2,other,other,home,other,1,2,0,no,no,no,yes,no,yes,yes,yes,4,3,3,1,1,2,0,no
336 | GP,F,18,R,GT3,T,2,2,at_home,other,course,mother,2,4,0,no,no,no,yes,yes,yes,no,no,4,4,4,1,1,4,0,no
337 | GP,F,17,U,GT3,T,3,4,services,other,course,mother,1,3,0,no,no,no,no,yes,yes,yes,no,4,4,5,1,3,5,16,yes
338 | GP,F,19,R,GT3,A,3,1,services,at_home,home,other,1,3,1,no,no,yes,no,yes,yes,no,no,5,4,3,1,2,5,12,yes
339 | GP,F,17,U,GT3,T,3,2,other,other,home,mother,1,2,0,no,yes,yes,no,yes,yes,yes,yes,4,3,2,2,3,2,0,no
340 | GP,F,18,U,LE3,T,3,3,services,services,home,mother,1,4,0,no,yes,no,no,yes,yes,yes,no,5,3,3,1,1,1,7,yes
341 | GP,F,17,R,GT3,A,3,2,other,other,home,mother,1,2,0,no,yes,yes,no,yes,yes,yes,no,4,3,3,2,3,2,4,yes
342 | GP,F,19,U,GT3,T,2,1,services,services,home,other,1,3,1,no,no,yes,yes,yes,yes,yes,yes,4,3,4,1,3,3,4,yes
343 | GP,M,18,U,GT3,T,4,4,teacher,services,home,father,1,2,1,no,yes,no,yes,yes,yes,yes,no,4,3,3,2,2,2,0,no
344 | GP,M,18,U,LE3,T,3,4,services,other,home,mother,1,2,0,no,no,no,yes,yes,yes,yes,yes,4,3,3,1,3,5,11,yes
345 | GP,F,17,U,GT3,A,2,2,at_home,at_home,home,father,1,2,1,no,yes,no,no,yes,yes,yes,yes,3,3,1,1,2,4,0,no
346 | GP,F,18,U,GT3,T,2,3,at_home,other,course,mother,1,3,0,no,yes,no,no,yes,yes,yes,no,4,3,3,1,2,3,4,yes
347 | GP,F,18,U,GT3,T,3,2,other,services,other,mother,1,3,0,no,no,no,no,yes,yes,yes,yes,5,4,3,2,3,1,7,yes
348 | GP,M,18,R,GT3,T,4,3,teacher,services,course,mother,1,3,0,no,no,no,no,yes,yes,yes,yes,5,3,2,1,2,4,9,yes
349 | GP,M,18,U,GT3,T,4,3,teacher,other,course,mother,1,3,0,no,yes,yes,no,yes,yes,yes,yes,5,4,5,2,3,5,0,no
350 | GP,F,17,U,GT3,T,4,3,health,other,reputation,mother,1,3,0,no,yes,yes,yes,yes,yes,yes,yes,4,4,3,1,3,4,0,yes
351 | MS,M,18,R,GT3,T,3,2,other,other,course,mother,2,1,1,no,yes,no,no,no,yes,yes,no,2,5,5,5,5,5,10,yes
352 | MS,M,19,R,GT3,T,1,1,other,services,home,other,3,2,3,no,no,no,no,yes,yes,yes,no,5,4,4,3,3,2,8,no
353 | MS,M,17,U,GT3,T,3,3,health,other,course,mother,2,2,0,no,yes,yes,no,yes,yes,yes,no,4,5,4,2,3,3,2,yes
354 | MS,M,18,U,LE3,T,1,3,at_home,services,course,mother,1,1,1,no,no,no,no,yes,no,yes,yes,4,3,3,2,3,3,7,no
355 | MS,M,19,R,GT3,T,1,1,other,other,home,other,3,1,1,no,yes,no,no,yes,yes,yes,no,4,4,4,3,3,5,4,no
356 | MS,M,17,R,GT3,T,4,3,services,other,home,mother,2,2,0,no,yes,yes,yes,no,yes,yes,yes,4,5,5,1,3,2,4,yes
357 | MS,F,18,U,GT3,T,3,3,services,services,course,father,1,2,0,no,yes,no,no,yes,yes,no,yes,5,3,4,1,1,5,0,no
358 | MS,F,17,R,GT3,T,4,4,teacher,services,other,father,2,2,0,no,yes,yes,yes,yes,yes,yes,no,4,3,3,1,2,5,4,yes
359 | MS,F,17,U,LE3,A,3,2,services,other,reputation,mother,2,2,0,no,no,no,no,yes,yes,no,yes,1,2,3,1,2,5,2,yes
360 | MS,M,18,U,LE3,T,1,1,other,services,home,father,2,1,0,no,no,no,no,no,yes,yes,yes,3,3,2,1,2,3,4,yes
361 | MS,F,18,U,LE3,T,1,1,at_home,services,course,father,2,3,0,no,no,no,no,yes,yes,yes,no,5,3,2,1,1,4,0,yes
362 | MS,F,18,R,LE3,A,1,4,at_home,other,course,mother,3,2,0,no,no,no,no,yes,yes,no,yes,4,3,4,1,4,5,0,yes
363 | MS,M,18,R,LE3,T,1,1,at_home,other,other,mother,2,2,1,no,no,no,yes,no,no,no,no,4,4,3,2,3,5,2,yes
364 | MS,F,18,U,GT3,T,3,3,services,services,other,mother,2,2,0,no,yes,no,no,yes,yes,yes,yes,4,3,2,1,3,3,0,yes
365 | MS,F,17,U,LE3,T,4,4,at_home,at_home,course,mother,1,2,0,no,yes,yes,yes,yes,yes,yes,yes,2,3,4,1,1,1,0,yes
366 | MS,F,17,R,GT3,T,1,2,other,services,course,father,2,2,0,no,no,no,no,no,yes,no,no,3,2,2,1,2,3,0,yes
367 | MS,M,18,R,GT3,T,1,3,at_home,other,course,mother,2,2,0,no,yes,yes,no,yes,yes,no,no,3,3,4,2,4,3,4,yes
368 | MS,M,18,U,LE3,T,4,4,teacher,services,other,mother,2,3,0,no,no,yes,no,yes,yes,yes,yes,4,2,2,2,2,5,0,yes
369 | MS,F,17,R,GT3,T,1,1,other,services,reputation,mother,3,1,1,no,yes,yes,no,yes,yes,yes,yes,5,2,1,1,2,1,0,no
370 | MS,F,18,U,GT3,T,2,3,at_home,services,course,father,2,1,0,no,yes,yes,no,yes,yes,yes,yes,5,2,3,1,2,4,0,yes
371 | MS,F,18,R,GT3,T,4,4,other,teacher,other,father,3,2,0,no,yes,yes,no,no,yes,yes,yes,3,2,2,4,2,5,10,yes
372 | MS,F,19,U,LE3,T,3,2,services,services,home,other,2,2,2,no,no,no,yes,yes,yes,no,yes,3,2,2,1,1,3,4,no
373 | MS,M,18,R,LE3,T,1,2,at_home,services,other,father,3,1,0,no,yes,yes,yes,yes,no,yes,yes,4,3,3,2,3,3,3,yes
374 | MS,F,17,U,GT3,T,2,2,other,at_home,home,mother,1,3,0,no,no,no,yes,yes,yes,no,yes,3,4,3,1,1,3,8,yes
375 | MS,F,17,R,GT3,T,1,2,other,other,course,mother,1,1,0,no,no,no,yes,yes,yes,yes,no,3,5,5,1,3,1,14,no
376 | MS,F,18,R,LE3,T,4,4,other,other,reputation,mother,2,3,0,no,no,no,no,yes,yes,yes,no,5,4,4,1,1,1,0,yes
377 | MS,F,18,R,GT3,T,1,1,other,other,home,mother,4,3,0,no,no,no,no,yes,yes,yes,no,4,3,2,1,2,4,2,yes
378 | MS,F,20,U,GT3,T,4,2,health,other,course,other,2,3,2,no,yes,yes,no,no,yes,yes,yes,5,4,3,1,1,3,4,yes
379 | MS,F,18,R,LE3,T,4,4,teacher,services,course,mother,1,2,0,no,no,yes,yes,yes,yes,yes,no,5,4,3,3,4,2,4,yes
380 | MS,F,18,U,GT3,T,3,3,other,other,home,mother,1,2,0,no,no,yes,no,yes,yes,yes,yes,4,1,3,1,2,1,0,yes
381 | MS,F,17,R,GT3,T,3,1,at_home,other,reputation,mother,1,2,0,no,yes,yes,yes,no,yes,yes,no,4,5,4,2,3,1,17,yes
382 | MS,M,18,U,GT3,T,4,4,teacher,teacher,home,father,1,2,0,no,no,yes,yes,no,yes,yes,no,3,2,4,1,4,2,4,yes
383 | MS,M,18,R,GT3,T,2,1,other,other,other,mother,2,1,0,no,no,no,yes,no,yes,yes,yes,4,4,3,1,3,5,5,no
384 | MS,M,17,U,GT3,T,2,3,other,services,home,father,2,2,0,no,no,no,yes,yes,yes,yes,no,4,4,3,1,1,3,2,yes
385 | MS,M,19,R,GT3,T,1,1,other,services,other,mother,2,1,1,no,no,no,no,yes,yes,no,no,4,3,2,1,3,5,0,no
386 | MS,M,18,R,GT3,T,4,2,other,other,home,father,2,1,1,no,no,yes,no,yes,yes,no,no,5,4,3,4,3,3,14,no
387 | MS,F,18,R,GT3,T,2,2,at_home,other,other,mother,2,3,0,no,no,yes,no,yes,yes,no,no,5,3,3,1,3,4,2,yes
388 | MS,F,18,R,GT3,T,4,4,teacher,at_home,reputation,mother,3,1,0,no,yes,yes,yes,yes,yes,yes,yes,4,4,3,2,2,5,7,no
389 | MS,F,19,R,GT3,T,2,3,services,other,course,mother,1,3,1,no,no,no,yes,no,yes,yes,no,5,4,2,1,2,5,0,no
390 | MS,F,18,U,LE3,T,3,1,teacher,services,course,mother,1,2,0,no,yes,yes,no,yes,yes,yes,no,4,3,4,1,1,1,0,no
391 | MS,F,18,U,GT3,T,1,1,other,other,course,mother,2,2,1,no,no,no,yes,yes,yes,no,no,1,1,1,1,1,5,0,no
392 | MS,M,20,U,LE3,A,2,2,services,services,course,other,1,2,2,no,yes,yes,no,yes,yes,no,no,5,5,4,4,5,4,11,no
393 | MS,M,17,U,LE3,T,3,1,services,services,course,mother,2,1,0,no,no,no,no,no,yes,yes,no,2,4,5,3,4,2,3,yes
394 | MS,M,21,R,GT3,T,1,1,other,other,course,other,1,1,3,no,no,no,no,no,yes,no,no,5,5,3,3,3,3,3,no
395 | MS,M,18,R,LE3,T,3,2,services,other,course,mother,3,1,0,no,no,no,no,no,yes,yes,no,4,4,1,3,4,5,0,yes
396 | MS,M,19,U,LE3,T,1,1,other,at_home,course,father,1,1,0,no,no,no,no,yes,yes,yes,no,3,2,3,3,3,5,5,no
397 |
--------------------------------------------------------------------------------
/student-intervention/student_intervention.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Machine Learning Engineer Nanodegree\n",
8 | "## Supervised Learning\n",
9 | "## Project 2: Building a Student Intervention System"
10 | ]
11 | },
12 | {
13 | "cell_type": "markdown",
14 | "metadata": {},
15 | "source": [
16 | "Welcome to the second project of the Machine Learning Engineer Nanodegree! In this notebook, some template code has already been provided for you, and it will be your job to implement the additional functionality necessary to successfully complete this project. Sections that begin with **'Implementation'** in the header indicate that the following block of code will require additional functionality which you must provide. Instructions will be provided for each section and the specifics of the implementation are marked in the code block with a `'TODO'` statement. Please be sure to read the instructions carefully!\n",
17 | "\n",
18 | "In addition to implementing code, there will be questions that you must answer which relate to the project and your implementation. Each section where you will answer a question is preceded by a **'Question X'** header. Carefully read each question and provide thorough answers in the following text boxes that begin with **'Answer:'**. Your project submission will be evaluated based on your answers to each of the questions and the implementation you provide. \n",
19 | "\n",
20 | ">**Note:** Code and Markdown cells can be executed using the **Shift + Enter** keyboard shortcut. In addition, Markdown cells can be edited by typically double-clicking the cell to enter edit mode."
21 | ]
22 | },
23 | {
24 | "cell_type": "markdown",
25 | "metadata": {},
26 | "source": [
27 | "### Question 1 - Classification vs. Regression\n",
28 | "*Your goal for this project is to identify students who might need early intervention before they fail to graduate. Which type of supervised learning problem is this, classification or regression? Why?*"
29 | ]
30 | },
31 | {
32 | "cell_type": "markdown",
33 | "metadata": {},
34 | "source": [
35 | "**Answer: ** Classification. This problem involves categorizing data into a discrete boolean value. The model will predict a True/False value based on their need for early intervention. This differs from regression, which is used to predict continuous values. "
36 | ]
37 | },
38 | {
39 | "cell_type": "markdown",
40 | "metadata": {},
41 | "source": [
42 | "## Exploring the Data\n",
43 | "Run the code cell below to load necessary Python libraries and load the student data. Note that the last column from this dataset, `'passed'`, will be our target label (whether the student graduated or didn't graduate). All other columns are features about each student."
44 | ]
45 | },
46 | {
47 | "cell_type": "code",
48 | "execution_count": 1,
49 | "metadata": {
50 | "collapsed": false
51 | },
52 | "outputs": [
53 | {
54 | "name": "stdout",
55 | "output_type": "stream",
56 | "text": [
57 | "Student data read successfully!\n"
58 | ]
59 | }
60 | ],
61 | "source": [
62 | "# Import libraries\n",
63 | "import numpy as np\n",
64 | "import pandas as pd\n",
65 | "from time import time\n",
66 | "from sklearn.metrics import f1_score\n",
67 | "\n",
68 | "# Get the data\n",
69 | "student_data = pd.read_csv(\"student-data.csv\")\n",
70 | "print \"Student data read successfully!\""
71 | ]
72 | },
73 | {
74 | "cell_type": "markdown",
75 | "metadata": {},
76 | "source": [
77 | "### Implementation: Data Exploration\n",
78 | "Let's begin by investigating the dataset to determine how many students we have information on, and learn about the graduation rate among these students. In the code cell below, you will need to compute the following:\n",
79 | "- The total number of students, `n_students`.\n",
80 | "- The total number of features for each student, `n_features`.\n",
81 | "- The number of those students who passed, `n_passed`.\n",
82 | "- The number of those students who failed, `n_failed`.\n",
83 | "- The graduation rate of the class, `grad_rate`, in percent (%).\n"
84 | ]
85 | },
86 | {
87 | "cell_type": "code",
88 | "execution_count": 54,
89 | "metadata": {
90 | "collapsed": false
91 | },
92 | "outputs": [
93 | {
94 | "name": "stdout",
95 | "output_type": "stream",
96 | "text": [
97 | "Total number of students: 395\n",
98 | "Number of features: 30\n",
99 | "Number of students who passed: 265\n",
100 | "Number of students who failed: 130\n",
101 | "Graduation rate of the class: 67.09%\n"
102 | ]
103 | }
104 | ],
105 | "source": [
106 | "# TODO: Calculate number of students\n",
107 | "n_students = student_data.shape[0]\n",
108 | "\n",
109 | "# TODO: Calculate number of features\n",
110 | "n_features = student_data.shape[1]-1\n",
111 | "\n",
112 | "# TODO: Calculate passing students\n",
113 | "n_passed = student_data[student_data['passed'] == 'yes'].shape[0]\n",
114 | "\n",
115 | "# TODO: Calculate failing students\n",
116 | "n_failed = student_data[student_data['passed'] == 'no'].shape[0]\n",
117 | "\n",
118 | "# TODO: Calculate graduation rate\n",
119 | "grad_rate = 100 * (n_passed / float(n_students))\n",
120 | "\n",
121 | "# Print the results\n",
122 | "print \"Total number of students: {}\".format(n_students)\n",
123 | "print \"Number of features: {}\".format(n_features)\n",
124 | "print \"Number of students who passed: {}\".format(n_passed)\n",
125 | "print \"Number of students who failed: {}\".format(n_failed)\n",
126 | "print \"Graduation rate of the class: {:.2f}%\".format(grad_rate)"
127 | ]
128 | },
129 | {
130 | "cell_type": "markdown",
131 | "metadata": {},
132 | "source": [
133 | "## Preparing the Data\n",
134 | "In this section, we will prepare the data for modeling, training and testing.\n",
135 | "\n",
136 | "### Identify feature and target columns\n",
137 | "It is often the case that the data you obtain contains non-numeric features. This can be a problem, as most machine learning algorithms expect numeric data to perform computations with.\n",
138 | "\n",
139 | "Run the code cell below to separate the student data into feature and target columns to see if any features are non-numeric."
140 | ]
141 | },
142 | {
143 | "cell_type": "code",
144 | "execution_count": 3,
145 | "metadata": {
146 | "collapsed": false
147 | },
148 | "outputs": [
149 | {
150 | "name": "stdout",
151 | "output_type": "stream",
152 | "text": [
153 | "Feature columns:\n",
154 | "['school', 'sex', 'age', 'address', 'famsize', 'Pstatus', 'Medu', 'Fedu', 'Mjob', 'Fjob', 'reason', 'guardian', 'traveltime', 'studytime', 'failures', 'schoolsup', 'famsup', 'paid', 'activities', 'nursery', 'higher', 'internet', 'romantic', 'famrel', 'freetime', 'goout', 'Dalc', 'Walc', 'health', 'absences']\n",
155 | "\n",
156 | "Target column: passed\n",
157 | "\n",
158 | "Feature values:\n",
159 | " school sex age address famsize Pstatus Medu Fedu Mjob Fjob \\\n",
160 | "0 GP F 18 U GT3 A 4 4 at_home teacher \n",
161 | "1 GP F 17 U GT3 T 1 1 at_home other \n",
162 | "2 GP F 15 U LE3 T 1 1 at_home other \n",
163 | "3 GP F 15 U GT3 T 4 2 health services \n",
164 | "4 GP F 16 U GT3 T 3 3 other other \n",
165 | "\n",
166 | " ... higher internet romantic famrel freetime goout Dalc Walc health \\\n",
167 | "0 ... yes no no 4 3 4 1 1 3 \n",
168 | "1 ... yes yes no 5 3 3 1 1 3 \n",
169 | "2 ... yes yes no 4 3 2 2 3 3 \n",
170 | "3 ... yes yes yes 3 2 2 1 1 5 \n",
171 | "4 ... yes no no 4 3 2 1 2 5 \n",
172 | "\n",
173 | " absences \n",
174 | "0 6 \n",
175 | "1 4 \n",
176 | "2 10 \n",
177 | "3 2 \n",
178 | "4 4 \n",
179 | "\n",
180 | "[5 rows x 30 columns]\n"
181 | ]
182 | }
183 | ],
184 | "source": [
185 | "# Extract feature columns\n",
186 | "feature_cols = list(student_data.columns[:-1])\n",
187 | "\n",
188 | "# Extract target column 'passed'\n",
189 | "target_col = student_data.columns[-1] \n",
190 | "\n",
191 | "# Show the list of columns\n",
192 | "print \"Feature columns:\\n{}\".format(feature_cols)\n",
193 | "print \"\\nTarget column: {}\".format(target_col)\n",
194 | "\n",
195 | "# Separate the data into feature data and target data (X_all and y_all, respectively)\n",
196 | "X_all = student_data[feature_cols]\n",
197 | "y_all = student_data[target_col]\n",
198 | "\n",
199 | "# Show the feature information by printing the first five rows\n",
200 | "print \"\\nFeature values:\"\n",
201 | "print X_all.head()"
202 | ]
203 | },
204 | {
205 | "cell_type": "markdown",
206 | "metadata": {},
207 | "source": [
208 | "### Preprocess Feature Columns\n",
209 | "\n",
210 | "As you can see, there are several non-numeric columns that need to be converted! Many of them are simply `yes`/`no`, e.g. `internet`. These can be reasonably converted into `1`/`0` (binary) values.\n",
211 | "\n",
212 | "Other columns, like `Mjob` and `Fjob`, have more than two values, and are known as _categorical variables_. The recommended way to handle such a column is to create as many columns as possible values (e.g. `Fjob_teacher`, `Fjob_other`, `Fjob_services`, etc.), and assign a `1` to one of them and `0` to all others.\n",
213 | "\n",
214 | "These generated columns are sometimes called _dummy variables_, and we will use the [`pandas.get_dummies()`](http://pandas.pydata.org/pandas-docs/stable/generated/pandas.get_dummies.html?highlight=get_dummies#pandas.get_dummies) function to perform this transformation. Run the code cell below to perform the preprocessing routine discussed in this section."
215 | ]
216 | },
217 | {
218 | "cell_type": "code",
219 | "execution_count": 4,
220 | "metadata": {
221 | "collapsed": false
222 | },
223 | "outputs": [
224 | {
225 | "name": "stdout",
226 | "output_type": "stream",
227 | "text": [
228 | "Processed feature columns (48 total features):\n",
229 | "['school_GP', 'school_MS', 'sex_F', 'sex_M', 'age', 'address_R', 'address_U', 'famsize_GT3', 'famsize_LE3', 'Pstatus_A', 'Pstatus_T', 'Medu', 'Fedu', 'Mjob_at_home', 'Mjob_health', 'Mjob_other', 'Mjob_services', 'Mjob_teacher', 'Fjob_at_home', 'Fjob_health', 'Fjob_other', 'Fjob_services', 'Fjob_teacher', 'reason_course', 'reason_home', 'reason_other', 'reason_reputation', 'guardian_father', 'guardian_mother', 'guardian_other', 'traveltime', 'studytime', 'failures', 'schoolsup', 'famsup', 'paid', 'activities', 'nursery', 'higher', 'internet', 'romantic', 'famrel', 'freetime', 'goout', 'Dalc', 'Walc', 'health', 'absences']\n"
230 | ]
231 | }
232 | ],
233 | "source": [
234 | "def preprocess_features(X):\n",
235 | " ''' Preprocesses the student data and converts non-numeric binary variables into\n",
236 | " binary (0/1) variables. Converts categorical variables into dummy variables. '''\n",
237 | " \n",
238 | " # Initialize new output DataFrame\n",
239 | " output = pd.DataFrame(index = X.index)\n",
240 | "\n",
241 | " # Investigate each feature column for the data\n",
242 | " for col, col_data in X.iteritems():\n",
243 | " \n",
244 | " # If data type is non-numeric, replace all yes/no values with 1/0\n",
245 | " if col_data.dtype == object:\n",
246 | " col_data = col_data.replace(['yes', 'no'], [1, 0])\n",
247 | "\n",
248 | " # If data type is categorical, convert to dummy variables\n",
249 | " if col_data.dtype == object:\n",
250 | " # Example: 'school' => 'school_GP' and 'school_MS'\n",
251 | " col_data = pd.get_dummies(col_data, prefix = col) \n",
252 | " \n",
253 | " # Collect the revised columns\n",
254 | " output = output.join(col_data)\n",
255 | " \n",
256 | " return output\n",
257 | "\n",
258 | "X_all = preprocess_features(X_all)\n",
259 | "print \"Processed feature columns ({} total features):\\n{}\".format(len(X_all.columns), list(X_all.columns))"
260 | ]
261 | },
262 | {
263 | "cell_type": "markdown",
264 | "metadata": {},
265 | "source": [
266 | "### Implementation: Training and Testing Data Split\n",
267 | "So far, we have converted all _categorical_ features into numeric values. For the next step, we split the data (both features and corresponding labels) into training and test sets. In the following code cell below, you will need to implement the following:\n",
268 | "- Randomly shuffle and split the data (`X_all`, `y_all`) into training and testing subsets.\n",
269 | " - Use 300 training points (approximately 75%) and 95 testing points (approximately 25%).\n",
270 | " - Set a `random_state` for the function(s) you use, if provided.\n",
271 | " - Store the results in `X_train`, `X_test`, `y_train`, and `y_test`."
272 | ]
273 | },
274 | {
275 | "cell_type": "code",
276 | "execution_count": 53,
277 | "metadata": {
278 | "collapsed": false
279 | },
280 | "outputs": [
281 | {
282 | "name": "stdout",
283 | "output_type": "stream",
284 | "text": [
285 | "Training set has 300 samples.\n",
286 | "Testing set has 95 samples.\n"
287 | ]
288 | }
289 | ],
290 | "source": [
291 | "# TODO: Import any additional functionality you may need here\n",
292 | "from sklearn.cross_validation import train_test_split\n",
293 | "\n",
294 | "# TODO: Set the number of training points\n",
295 | "num_train = 300\n",
296 | "\n",
297 | "# Set the number of testing points\n",
298 | "num_test = X_all.shape[0] - num_train\n",
299 | "\n",
300 | "\n",
301 | "# TODO: Shuffle and split the dataset into the number of training and testing points above\n",
302 | "\n",
303 | "X_train, X_test, y_train, y_test = train_test_split(X_all, y_all, test_size=num_test, random_state=23)\n",
304 | "\n",
305 | "# Show the results of the split\n",
306 | "print \"Training set has {} samples.\".format(X_train.shape[0])\n",
307 | "print \"Testing set has {} samples.\".format(X_test.shape[0])"
308 | ]
309 | },
310 | {
311 | "cell_type": "markdown",
312 | "metadata": {},
313 | "source": [
314 | "## Training and Evaluating Models\n",
315 | "In this section, you will choose 3 supervised learning models that are appropriate for this problem and available in `scikit-learn`. You will first discuss the reasoning behind choosing these three models by considering what you know about the data and each model's strengths and weaknesses. You will then fit the model to varying sizes of training data (100 data points, 200 data points, and 300 data points) and measure the F1 score. You will need to produce three tables (one for each model) that shows the training set size, training time, prediction time, F1 score on the training set, and F1 score on the testing set."
316 | ]
317 | },
318 | {
319 | "cell_type": "markdown",
320 | "metadata": {},
321 | "source": [
322 | "### Question 2 - Model Application\n",
323 | "*List three supervised learning models that are appropriate for this problem. What are the general applications of each model? What are their strengths and weaknesses? Given what you know about the data, why did you choose these models to be applied?*"
324 | ]
325 | },
326 | {
327 | "cell_type": "markdown",
328 | "metadata": {},
329 | "source": [
330 | "**Answer: ** \n",
331 | "\n",
332 | "**Decision Tree** \n",
333 | "\n",
334 | "General Applications: The decision tree algorithm creates a branched network of logical tests. This allows data to flow through a pathway of probabilistic decisions based on its features, resulting in a prediction. Decision trees are versatile and can be used to predict user behavior in web applications, choose relevant web advertisements based on cookie data, or in medical diagnostics to predict appropriate treatments. \n",
335 | "\n",
336 | "Strengths: \n",
337 | " - Data does not need to be linearly separable. \n",
338 | " - Acquires data in a human readable pattern. \n",
339 | " - Can gain insight from both small and large datasets. \n",
340 | "\n",
341 | "Weaknesses: \n",
342 | "- Tendency to overfit the data. \n",
343 | "- More branches may lead to a higher error due from variance. \n",
344 | "\n",
345 | "Why?: The decision tree was chosen for this application because the dataset contains labeled features that may contain outliners and non-linear separation. Decision trees are ideal for \n",
346 | "\n",
347 | "**Naive Bayes** \n",
348 | "\n",
349 | "General Applications: The Naive Bayes algorithm is based on conditional probability. It calculates the probability of an outcome, given that some other outcome has already been observed. It is commonly applied to text reading programs, such as email spam filters and content classification tools for social media feeds. \n",
350 | "\n",
351 | "Strengths:\n",
352 | "- Works especially well in high bias/low variance environments\n",
353 | "- Effective on small datasets\n",
354 | "- Fast and efficient to run compared to other algorithms\n",
355 | "\n",
356 | "Weaknesses: \n",
357 | "- Completely oblivious to dependent variable relationships in the data\n",
358 | "- Less effective as the data becomes larger and more complex \n",
359 | "\n",
360 | "Why?: The training set in this example is relatively small and many of the features are likely to be independent. If the features are in fact mostly dependent, a Naive Bayes model will be very effective. \n",
361 | "\n",
362 | "**Support Vector Machines (SVM)**\n",
363 | "\n",
364 | "General Applications: The SVM algorithm works by assigning data into a binary category, resulting in the optimal hyperplane for classifying data. The hyperplane divides the maximum margin between points in either classification. Text classification is a common SVM application, especially when dealing the a large number of features. SVMs are also used in speech recognition and facial expression classification in images. \n",
365 | "\n",
366 | "Strengths: \n",
367 | "- Different kernel methods can fine tune the algorithm to find a clear decision boundary. \n",
368 | "- Can separate non-linear boundaries. \n",
369 | "- Works well in high dimensional spaces.\n",
370 | "\n",
371 | "Weaknesses: \n",
372 | "- Tendency for the algorithm to be difficult to tune - bad kernel selection can cause overfitting. \n",
373 | "- Large computational requirements. \n",
374 | "\n",
375 | "\n",
376 | "Why?: The SVM algorithm is suitable for this application because the data contains many features. Using the kernel trick to make these features linearly separable will be effective for classifying passing/failing students. "
377 | ]
378 | },
379 | {
380 | "cell_type": "markdown",
381 | "metadata": {},
382 | "source": [
383 | "### Setup\n",
384 | "Run the code cell below to initialize three helper functions which you can use for training and testing the three supervised learning models you've chosen above. The functions are as follows:\n",
385 | "- `train_classifier` - takes as input a classifier and training data and fits the classifier to the data.\n",
386 | "- `predict_labels` - takes as input a fit classifier, features, and a target labeling and makes predictions using the F1 score.\n",
387 | "- `train_predict` - takes as input a classifier, and the training and testing data, and performs `train_clasifier` and `predict_labels`.\n",
388 | " - This function will report the F1 score for both the training and testing data separately."
389 | ]
390 | },
391 | {
392 | "cell_type": "code",
393 | "execution_count": 26,
394 | "metadata": {
395 | "collapsed": false
396 | },
397 | "outputs": [],
398 | "source": [
399 | "def train_classifier(clf, X_train, y_train):\n",
400 | " ''' Fits a classifier to the training data. '''\n",
401 | " \n",
402 | " # Start the clock, train the classifier, then stop the clock\n",
403 | " start = time()\n",
404 | " clf.fit(X_train, y_train)\n",
405 | " end = time()\n",
406 | " \n",
407 | " # Print the results\n",
408 | " print \"Trained model in {:.4f} seconds\".format(end - start)\n",
409 | "\n",
410 | " \n",
411 | "def predict_labels(clf, features, target):\n",
412 | " ''' Makes predictions using a fit classifier based on F1 score. '''\n",
413 | " \n",
414 | " # Start the clock, make predictions, then stop the clock\n",
415 | " start = time()\n",
416 | " y_pred = clf.predict(features)\n",
417 | " end = time()\n",
418 | " \n",
419 | " # Print and return results\n",
420 | " print \"Made predictions in {:.4f} seconds.\".format(end - start)\n",
421 | " return f1_score(target.values, y_pred, pos_label='yes')\n",
422 | "\n",
423 | "\n",
424 | "def train_predict(clf, X_train, y_train, X_test, y_test):\n",
425 | " ''' Train and predict using a classifer based on F1 score. '''\n",
426 | " \n",
427 | " # Indicate the classifier and the training set size\n",
428 | " print \"Training a {} using a training set size of {}. . .\".format(clf.__class__.__name__, len(X_train))\n",
429 | " \n",
430 | " # Train the classifier\n",
431 | " train_classifier(clf, X_train, y_train)\n",
432 | " \n",
433 | " # Print the results of prediction for both training and testing\n",
434 | " print \"F1 score for training set: {:.4f}.\".format(predict_labels(clf, X_train, y_train))\n",
435 | " print \"F1 score for test set: {:.4f}.\".format(predict_labels(clf, X_test, y_test))"
436 | ]
437 | },
438 | {
439 | "cell_type": "markdown",
440 | "metadata": {},
441 | "source": [
442 | "### Implementation: Model Performance Metrics\n",
443 | "With the predefined functions above, you will now import the three supervised learning models of your choice and run the `train_predict` function for each one. Remember that you will need to train and predict on each classifier for three different training set sizes: 100, 200, and 300. Hence, you should expect to have 9 different outputs below — 3 for each model using the varying training set sizes. In the following code cell, you will need to implement the following:\n",
444 | "- Import the three supervised learning models you've discussed in the previous section.\n",
445 | "- Initialize the three models and store them in `clf_A`, `clf_B`, and `clf_C`.\n",
446 | " - Use a `random_state` for each model you use, if provided.\n",
447 | " - **Note:** Use the default settings for each model — you will tune one specific model in a later section.\n",
448 | "- Create the different training set sizes to be used to train each model.\n",
449 | " - *Do not reshuffle and resplit the data! The new training points should be drawn from `X_train` and `y_train`.*\n",
450 | "- Fit each model with each training set size and make predictions on the test set (9 in total). \n",
451 | "**Note:** Three tables are provided after the following code cell which can be used to store your results."
452 | ]
453 | },
454 | {
455 | "cell_type": "code",
456 | "execution_count": 56,
457 | "metadata": {
458 | "collapsed": false
459 | },
460 | "outputs": [
461 | {
462 | "name": "stdout",
463 | "output_type": "stream",
464 | "text": [
465 | "Training a DecisionTreeClassifier using a training set size of 100. . .\n",
466 | "Trained model in 0.0015 seconds\n",
467 | "Made predictions in 0.0002 seconds.\n",
468 | "F1 score for training set: 1.0000.\n",
469 | "Made predictions in 0.0004 seconds.\n",
470 | "F1 score for test set: 0.7704.\n",
471 | "\n",
472 | "\n",
473 | "Training a DecisionTreeClassifier using a training set size of 200. . .\n",
474 | "Trained model in 0.0015 seconds\n",
475 | "Made predictions in 0.0002 seconds.\n",
476 | "F1 score for training set: 1.0000.\n",
477 | "Made predictions in 0.0002 seconds.\n",
478 | "F1 score for test set: 0.7164.\n",
479 | "\n",
480 | "\n",
481 | "Training a DecisionTreeClassifier using a training set size of 300. . .\n",
482 | "Trained model in 0.0019 seconds\n",
483 | "Made predictions in 0.0003 seconds.\n",
484 | "F1 score for training set: 1.0000.\n",
485 | "Made predictions in 0.0002 seconds.\n",
486 | "F1 score for test set: 0.7259.\n",
487 | "\n",
488 | "\n",
489 | "Training a MultinomialNB using a training set size of 100. . .\n",
490 | "Trained model in 0.0010 seconds\n",
491 | "Made predictions in 0.0003 seconds.\n",
492 | "F1 score for training set: 0.7812.\n",
493 | "Made predictions in 0.0003 seconds.\n",
494 | "F1 score for test set: 0.7324.\n",
495 | "\n",
496 | "\n",
497 | "Training a MultinomialNB using a training set size of 200. . .\n",
498 | "Trained model in 0.0019 seconds\n",
499 | "Made predictions in 0.0004 seconds.\n",
500 | "F1 score for training set: 0.7653.\n",
501 | "Made predictions in 0.0003 seconds.\n",
502 | "F1 score for test set: 0.7639.\n",
503 | "\n",
504 | "\n",
505 | "Training a MultinomialNB using a training set size of 300. . .\n",
506 | "Trained model in 0.0013 seconds\n",
507 | "Made predictions in 0.0003 seconds.\n",
508 | "F1 score for training set: 0.7775.\n",
509 | "Made predictions in 0.0002 seconds.\n",
510 | "F1 score for test set: 0.7500.\n",
511 | "\n",
512 | "\n",
513 | "Training a SVC using a training set size of 100. . .\n",
514 | "Trained model in 0.0015 seconds\n",
515 | "Made predictions in 0.0009 seconds.\n",
516 | "F1 score for training set: 0.8551.\n",
517 | "Made predictions in 0.0015 seconds.\n",
518 | "F1 score for test set: 0.8079.\n",
519 | "\n",
520 | "\n",
521 | "Training a SVC using a training set size of 200. . .\n",
522 | "Trained model in 0.0039 seconds\n",
523 | "Made predictions in 0.0035 seconds.\n",
524 | "F1 score for training set: 0.8649.\n",
525 | "Made predictions in 0.0017 seconds.\n",
526 | "F1 score for test set: 0.8079.\n",
527 | "\n",
528 | "\n",
529 | "Training a SVC using a training set size of 300. . .\n",
530 | "Trained model in 0.0090 seconds\n",
531 | "Made predictions in 0.0053 seconds.\n",
532 | "F1 score for training set: 0.8724.\n",
533 | "Made predictions in 0.0019 seconds.\n",
534 | "F1 score for test set: 0.8054.\n",
535 | "\n",
536 | "\n"
537 | ]
538 | }
539 | ],
540 | "source": [
541 | "# TODO: Import the three supervised learning models from sklearn\n",
542 | "from sklearn.tree import DecisionTreeClassifier\n",
543 | "from sklearn.naive_bayes import MultinomialNB\n",
544 | "from sklearn.svm import SVC\n",
545 | "\n",
546 | "# TODO: Initialize the three models\n",
547 | "clf_A = DecisionTreeClassifier()\n",
548 | "clf_B = MultinomialNB()\n",
549 | "clf_C = SVC()\n",
550 | "\n",
551 | "# TODO: Set up the training set sizes\n",
552 | "X_train_100 = X_train[:100]\n",
553 | "y_train_100 = y_train[:100]\n",
554 | "\n",
555 | "X_train_200 = X_train[:200]\n",
556 | "y_train_200 = y_train[:200]\n",
557 | "\n",
558 | "X_train_300 = X_train\n",
559 | "y_train_300 = y_train\n",
560 | "\n",
561 | "# TODO: Execute the 'train_predict' function for each classifier and each training set size\n",
562 | "# train_predict(clf_C, X_train_100, y_train_100, X_test, y_test)\n",
563 | "\n",
564 | "for clf in [clf_A, clf_B, clf_C]:\n",
565 | " for size in [100, 200, 300]:\n",
566 | " train_predict(clf, X_train[:size], y_train[:size], X_test, y_test)\n",
567 | " print '\\n'"
568 | ]
569 | },
570 | {
571 | "cell_type": "markdown",
572 | "metadata": {},
573 | "source": [
574 | "### Tabular Results\n",
575 | "Edit the cell below to see how a table can be designed in [Markdown](https://github.com/adam-p/markdown-here/wiki/Markdown-Cheatsheet#tables). You can record your results from above in the tables provided."
576 | ]
577 | },
578 | {
579 | "cell_type": "markdown",
580 | "metadata": {},
581 | "source": [
582 | "** Classifer 1 - Decision Tree ** \n",
583 | "\n",
584 | "| Training Set Size | Training Time | Prediction Time (test) | F1 Score (train) | F1 Score (test) |\n",
585 | "| :---------------: | :---------------------: | :--------------------: | :--------------: | :-------------: |\n",
586 | "| 100 | 0.0015 | 0.0002 | 1.0000 | 0.7704 |\n",
587 | "| 200 | 0.0015 | 0.0002 | 1.0000 | 0.7164 |\n",
588 | "| 300 | 0.0017 | 0.0004 | 1.0000 | 0.7259 |\n",
589 | "\n",
590 | "** Classifer 2 - Multinomial Naive Bayes** \n",
591 | "\n",
592 | "| Training Set Size | Training Time | Prediction Time (test) | F1 Score (train) | F1 Score (test) |\n",
593 | "| :---------------: | :---------------------: | :--------------------: | :--------------: | :-------------: |\n",
594 | "| 100 | 0.0010 | 0.0003 | 0.7812 | 0.7324 |\n",
595 | "| 200 | 0.0019 | 0.0003 | 0.7653 | 0.7639 | \n",
596 | "| 300 | 0.0013 | 0.0003 | 0.7775 | 0.7500 |\n",
597 | "\n",
598 | "** Classifer 3 - SVC** \n",
599 | "\n",
600 | "| Training Set Size | Training Time | Prediction Time (test) | F1 Score (train) | F1 Score (test) |\n",
601 | "| :---------------: | :---------------------: | :--------------------: | :--------------: | :-------------: |\n",
602 | "| 100 | 0.0015 | 0.0009 | 0.8551 | 0.8079 |\n",
603 | "| 200 | 0.0039 | 0.0035 | 0.8649 | 0.8079 |\n",
604 | "| 300 | 0.0090 | 0.0023 | 0.8724 | 0.8054 |"
605 | ]
606 | },
607 | {
608 | "cell_type": "markdown",
609 | "metadata": {},
610 | "source": [
611 | "## Choosing the Best Model\n",
612 | "In this final section, you will choose from the three supervised learning models the *best* model to use on the student data. You will then perform a grid search optimization for the model over the entire training set (`X_train` and `y_train`) by tuning at least one parameter to improve upon the untuned model's F1 score. "
613 | ]
614 | },
615 | {
616 | "cell_type": "markdown",
617 | "metadata": {},
618 | "source": [
619 | "### Question 3 - Chosing the Best Model\n",
620 | "*Based on the experiments you performed earlier, in one to two paragraphs, explain to the board of supervisors what single model you chose as the best model. Which model is generally the most appropriate based on the available data, limited resources, cost, and performance?*"
621 | ]
622 | },
623 | {
624 | "cell_type": "markdown",
625 | "metadata": {},
626 | "source": [
627 | "**Answer: ** Based on the experiments performed above, it appears the Support Vector Machine (SVM) algorithm is most appropiate in this scenario. As the training data increases in size, it creates a more generalized model with an increasing F1 score on testing data. It has the slowest computational performance, but this cost is acceptable given its superior ability to generalize the data. "
628 | ]
629 | },
630 | {
631 | "cell_type": "markdown",
632 | "metadata": {},
633 | "source": [
634 | "### Question 4 - Model in Layman's Terms\n",
635 | "*In one to two paragraphs, explain to the board of directors in layman's terms how the final model chosen is supposed to work. For example if you've chosen to use a decision tree or a support vector machine, how does the model go about making a prediction?*"
636 | ]
637 | },
638 | {
639 | "cell_type": "markdown",
640 | "metadata": {},
641 | "source": [
642 | "**Answer: ** Support Vector Machines work by focusing on data that is most difficult classify, such as two points near a decision boundary. If these two points can be classified accurately, intuition would tell us that the same formula can be applied to the remainder of the data. In other words, a SVM attempts to make sense of data that is hard to tell apart, then uses this logic to make predictions among the entire dataset. \n",
643 | "\n",
644 | "The prediction is made by with a technique known as the kernel trick, which transforms the shape of the data to become more linear. It starts by identifying the two closest points, known as support vectors, then taking the difference between them. The distance between each data point is maximized, while the optimal separation line will run directly through these two points. This line maximizes the margin between each point. A larger maximum margin will be more robust to classification errors. As more data is trained, it will create a boundary that separates passing students from failing students. \n",
645 | "\n"
646 | ]
647 | },
648 | {
649 | "cell_type": "markdown",
650 | "metadata": {},
651 | "source": [
652 | "### Implementation: Model Tuning\n",
653 | "Fine tune the chosen model. Use grid search (`GridSearchCV`) with at least one important parameter tuned with at least 3 different values. You will need to use the entire training set for this. In the code cell below, you will need to implement the following:\n",
654 | "- Import [`sklearn.grid_search.gridSearchCV`](http://scikit-learn.org/stable/modules/generated/sklearn.grid_search.GridSearchCV.html) and [`sklearn.metrics.make_scorer`](http://scikit-learn.org/stable/modules/generated/sklearn.metrics.make_scorer.html).\n",
655 | "- Create a dictionary of parameters you wish to tune for the chosen model.\n",
656 | " - Example: `parameters = {'parameter' : [list of values]}`.\n",
657 | "- Initialize the classifier you've chosen and store it in `clf`.\n",
658 | "- Create the F1 scoring function using `make_scorer` and store it in `f1_scorer`.\n",
659 | " - Set the `pos_label` parameter to the correct value!\n",
660 | "- Perform grid search on the classifier `clf` using `f1_scorer` as the scoring method, and store it in `grid_obj`.\n",
661 | "- Fit the grid search object to the training data (`X_train`, `y_train`), and store it in `grid_obj`."
662 | ]
663 | },
664 | {
665 | "cell_type": "code",
666 | "execution_count": 61,
667 | "metadata": {
668 | "collapsed": false
669 | },
670 | "outputs": [
671 | {
672 | "name": "stdout",
673 | "output_type": "stream",
674 | "text": [
675 | "Made predictions in 0.0059 seconds.\n",
676 | "Tuned model has a training F1 score of 0.8188.\n",
677 | "Made predictions in 0.0098 seconds.\n",
678 | "Tuned model has a testing F1 score of 0.8608.\n"
679 | ]
680 | }
681 | ],
682 | "source": [
683 | "# TODO: Import 'GridSearchCV' and 'make_scorer'\n",
684 | "from sklearn.metrics import make_scorer \n",
685 | "from sklearn.grid_search import GridSearchCV\n",
686 | "\n",
687 | "# TODO: Create the parameters list you wish to tune\n",
688 | "parameters = {'kernel':('rbf', 'linear'), 'C': (0.5, 1.0, 2.0, 5.0, 10.0), 'gamma' : ( 0.001, 1, 10) }\n",
689 | "\n",
690 | "\n",
691 | "# TODO: Initialize the classifier\n",
692 | "clf = SVC()\n",
693 | "\n",
694 | "# TODO: Make an f1 scoring function using 'make_scorer' \n",
695 | "f1_scorer = make_scorer(f1_score, pos_label='yes')\n",
696 | "\n",
697 | "# TODO: Perform grid search on the classifier using the f1_scorer as the scoring method\n",
698 | "grid_obj = GridSearchCV(clf, parameters, scoring=f1_scorer)\n",
699 | "\n",
700 | "# TODO: Fit the grid search object to the training data and find the optimal parameters\n",
701 | "grid_obj = grid_obj.fit(X_train, y_train)\n",
702 | "\n",
703 | "# Get the estimator\n",
704 | "clf = grid_obj.best_estimator_\n",
705 | "\n",
706 | "# Report the final F1 score for training and testing after parameter tuning\n",
707 | "print \"Tuned model has a training F1 score of {:.4f}.\".format(predict_labels(clf, X_train, y_train))\n",
708 | "print \"Tuned model has a testing F1 score of {:.4f}.\".format(predict_labels(clf, X_test, y_test))"
709 | ]
710 | },
711 | {
712 | "cell_type": "markdown",
713 | "metadata": {},
714 | "source": [
715 | "### Question 5 - Final F1 Score\n",
716 | "*What is the final model's F1 score for training and testing? How does that score compare to the untuned model?*"
717 | ]
718 | },
719 | {
720 | "cell_type": "markdown",
721 | "metadata": {},
722 | "source": [
723 | "**Answer: ** The final training/testing F1 score for the model is and 0.8188 and 0.8608. The original scores were 0.8724 and 0.8054. \n",
724 | "\n",
725 | "Decreasing the F1 score on the training data increases bias and decreases variance. This leads to a more generalized algorithm that is better at predicting unseen data. The F1 score for the testing data improved to 0.8608, and improvement of 4.2%. "
726 | ]
727 | },
728 | {
729 | "cell_type": "markdown",
730 | "metadata": {},
731 | "source": [
732 | "> **Note**: Once you have completed all of the code implementations and successfully answered each question above, you may finalize your work by exporting the iPython Notebook as an HTML document. You can do this by using the menu above and navigating to \n",
733 | "**File -> Download as -> HTML (.html)**. Include the finished document along with this notebook as your submission."
734 | ]
735 | }
736 | ],
737 | "metadata": {
738 | "kernelspec": {
739 | "display_name": "Python 2",
740 | "language": "python",
741 | "name": "python2"
742 | },
743 | "language_info": {
744 | "codemirror_mode": {
745 | "name": "ipython",
746 | "version": 2
747 | },
748 | "file_extension": ".py",
749 | "mimetype": "text/x-python",
750 | "name": "python",
751 | "nbconvert_exporter": "python",
752 | "pygments_lexer": "ipython2",
753 | "version": "2.7"
754 | }
755 | },
756 | "nbformat": 4,
757 | "nbformat_minor": 0
758 | }
759 |
--------------------------------------------------------------------------------
/titanic-survival/test.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Pclass,Name,Sex,Age,SibSp,Parch,Ticket,Fare,Cabin,Embarked
2 | 892,3,"Kelly, Mr. James",male,34.5,0,0,330911,7.8292,,Q
3 | 893,3,"Wilkes, Mrs. James (Ellen Needs)",female,47,1,0,363272,7,,S
4 | 894,2,"Myles, Mr. Thomas Francis",male,62,0,0,240276,9.6875,,Q
5 | 895,3,"Wirz, Mr. Albert",male,27,0,0,315154,8.6625,,S
6 | 896,3,"Hirvonen, Mrs. Alexander (Helga E Lindqvist)",female,22,1,1,3101298,12.2875,,S
7 | 897,3,"Svensson, Mr. Johan Cervin",male,14,0,0,7538,9.225,,S
8 | 898,3,"Connolly, Miss. Kate",female,30,0,0,330972,7.6292,,Q
9 | 899,2,"Caldwell, Mr. Albert Francis",male,26,1,1,248738,29,,S
10 | 900,3,"Abrahim, Mrs. Joseph (Sophie Halaut Easu)",female,18,0,0,2657,7.2292,,C
11 | 901,3,"Davies, Mr. John Samuel",male,21,2,0,A/4 48871,24.15,,S
12 | 902,3,"Ilieff, Mr. Ylio",male,,0,0,349220,7.8958,,S
13 | 903,1,"Jones, Mr. Charles Cresson",male,46,0,0,694,26,,S
14 | 904,1,"Snyder, Mrs. John Pillsbury (Nelle Stevenson)",female,23,1,0,21228,82.2667,B45,S
15 | 905,2,"Howard, Mr. Benjamin",male,63,1,0,24065,26,,S
16 | 906,1,"Chaffee, Mrs. Herbert Fuller (Carrie Constance Toogood)",female,47,1,0,W.E.P. 5734,61.175,E31,S
17 | 907,2,"del Carlo, Mrs. Sebastiano (Argenia Genovesi)",female,24,1,0,SC/PARIS 2167,27.7208,,C
18 | 908,2,"Keane, Mr. Daniel",male,35,0,0,233734,12.35,,Q
19 | 909,3,"Assaf, Mr. Gerios",male,21,0,0,2692,7.225,,C
20 | 910,3,"Ilmakangas, Miss. Ida Livija",female,27,1,0,STON/O2. 3101270,7.925,,S
21 | 911,3,"Assaf Khalil, Mrs. Mariana (Miriam"")""",female,45,0,0,2696,7.225,,C
22 | 912,1,"Rothschild, Mr. Martin",male,55,1,0,PC 17603,59.4,,C
23 | 913,3,"Olsen, Master. Artur Karl",male,9,0,1,C 17368,3.1708,,S
24 | 914,1,"Flegenheim, Mrs. Alfred (Antoinette)",female,,0,0,PC 17598,31.6833,,S
25 | 915,1,"Williams, Mr. Richard Norris II",male,21,0,1,PC 17597,61.3792,,C
26 | 916,1,"Ryerson, Mrs. Arthur Larned (Emily Maria Borie)",female,48,1,3,PC 17608,262.375,B57 B59 B63 B66,C
27 | 917,3,"Robins, Mr. Alexander A",male,50,1,0,A/5. 3337,14.5,,S
28 | 918,1,"Ostby, Miss. Helene Ragnhild",female,22,0,1,113509,61.9792,B36,C
29 | 919,3,"Daher, Mr. Shedid",male,22.5,0,0,2698,7.225,,C
30 | 920,1,"Brady, Mr. John Bertram",male,41,0,0,113054,30.5,A21,S
31 | 921,3,"Samaan, Mr. Elias",male,,2,0,2662,21.6792,,C
32 | 922,2,"Louch, Mr. Charles Alexander",male,50,1,0,SC/AH 3085,26,,S
33 | 923,2,"Jefferys, Mr. Clifford Thomas",male,24,2,0,C.A. 31029,31.5,,S
34 | 924,3,"Dean, Mrs. Bertram (Eva Georgetta Light)",female,33,1,2,C.A. 2315,20.575,,S
35 | 925,3,"Johnston, Mrs. Andrew G (Elizabeth Lily"" Watson)""",female,,1,2,W./C. 6607,23.45,,S
36 | 926,1,"Mock, Mr. Philipp Edmund",male,30,1,0,13236,57.75,C78,C
37 | 927,3,"Katavelas, Mr. Vassilios (Catavelas Vassilios"")""",male,18.5,0,0,2682,7.2292,,C
38 | 928,3,"Roth, Miss. Sarah A",female,,0,0,342712,8.05,,S
39 | 929,3,"Cacic, Miss. Manda",female,21,0,0,315087,8.6625,,S
40 | 930,3,"Sap, Mr. Julius",male,25,0,0,345768,9.5,,S
41 | 931,3,"Hee, Mr. Ling",male,,0,0,1601,56.4958,,S
42 | 932,3,"Karun, Mr. Franz",male,39,0,1,349256,13.4167,,C
43 | 933,1,"Franklin, Mr. Thomas Parham",male,,0,0,113778,26.55,D34,S
44 | 934,3,"Goldsmith, Mr. Nathan",male,41,0,0,SOTON/O.Q. 3101263,7.85,,S
45 | 935,2,"Corbett, Mrs. Walter H (Irene Colvin)",female,30,0,0,237249,13,,S
46 | 936,1,"Kimball, Mrs. Edwin Nelson Jr (Gertrude Parsons)",female,45,1,0,11753,52.5542,D19,S
47 | 937,3,"Peltomaki, Mr. Nikolai Johannes",male,25,0,0,STON/O 2. 3101291,7.925,,S
48 | 938,1,"Chevre, Mr. Paul Romaine",male,45,0,0,PC 17594,29.7,A9,C
49 | 939,3,"Shaughnessy, Mr. Patrick",male,,0,0,370374,7.75,,Q
50 | 940,1,"Bucknell, Mrs. William Robert (Emma Eliza Ward)",female,60,0,0,11813,76.2917,D15,C
51 | 941,3,"Coutts, Mrs. William (Winnie Minnie"" Treanor)""",female,36,0,2,C.A. 37671,15.9,,S
52 | 942,1,"Smith, Mr. Lucien Philip",male,24,1,0,13695,60,C31,S
53 | 943,2,"Pulbaum, Mr. Franz",male,27,0,0,SC/PARIS 2168,15.0333,,C
54 | 944,2,"Hocking, Miss. Ellen Nellie""""",female,20,2,1,29105,23,,S
55 | 945,1,"Fortune, Miss. Ethel Flora",female,28,3,2,19950,263,C23 C25 C27,S
56 | 946,2,"Mangiavacchi, Mr. Serafino Emilio",male,,0,0,SC/A.3 2861,15.5792,,C
57 | 947,3,"Rice, Master. Albert",male,10,4,1,382652,29.125,,Q
58 | 948,3,"Cor, Mr. Bartol",male,35,0,0,349230,7.8958,,S
59 | 949,3,"Abelseth, Mr. Olaus Jorgensen",male,25,0,0,348122,7.65,F G63,S
60 | 950,3,"Davison, Mr. Thomas Henry",male,,1,0,386525,16.1,,S
61 | 951,1,"Chaudanson, Miss. Victorine",female,36,0,0,PC 17608,262.375,B61,C
62 | 952,3,"Dika, Mr. Mirko",male,17,0,0,349232,7.8958,,S
63 | 953,2,"McCrae, Mr. Arthur Gordon",male,32,0,0,237216,13.5,,S
64 | 954,3,"Bjorklund, Mr. Ernst Herbert",male,18,0,0,347090,7.75,,S
65 | 955,3,"Bradley, Miss. Bridget Delia",female,22,0,0,334914,7.725,,Q
66 | 956,1,"Ryerson, Master. John Borie",male,13,2,2,PC 17608,262.375,B57 B59 B63 B66,C
67 | 957,2,"Corey, Mrs. Percy C (Mary Phyllis Elizabeth Miller)",female,,0,0,F.C.C. 13534,21,,S
68 | 958,3,"Burns, Miss. Mary Delia",female,18,0,0,330963,7.8792,,Q
69 | 959,1,"Moore, Mr. Clarence Bloomfield",male,47,0,0,113796,42.4,,S
70 | 960,1,"Tucker, Mr. Gilbert Milligan Jr",male,31,0,0,2543,28.5375,C53,C
71 | 961,1,"Fortune, Mrs. Mark (Mary McDougald)",female,60,1,4,19950,263,C23 C25 C27,S
72 | 962,3,"Mulvihill, Miss. Bertha E",female,24,0,0,382653,7.75,,Q
73 | 963,3,"Minkoff, Mr. Lazar",male,21,0,0,349211,7.8958,,S
74 | 964,3,"Nieminen, Miss. Manta Josefina",female,29,0,0,3101297,7.925,,S
75 | 965,1,"Ovies y Rodriguez, Mr. Servando",male,28.5,0,0,PC 17562,27.7208,D43,C
76 | 966,1,"Geiger, Miss. Amalie",female,35,0,0,113503,211.5,C130,C
77 | 967,1,"Keeping, Mr. Edwin",male,32.5,0,0,113503,211.5,C132,C
78 | 968,3,"Miles, Mr. Frank",male,,0,0,359306,8.05,,S
79 | 969,1,"Cornell, Mrs. Robert Clifford (Malvina Helen Lamson)",female,55,2,0,11770,25.7,C101,S
80 | 970,2,"Aldworth, Mr. Charles Augustus",male,30,0,0,248744,13,,S
81 | 971,3,"Doyle, Miss. Elizabeth",female,24,0,0,368702,7.75,,Q
82 | 972,3,"Boulos, Master. Akar",male,6,1,1,2678,15.2458,,C
83 | 973,1,"Straus, Mr. Isidor",male,67,1,0,PC 17483,221.7792,C55 C57,S
84 | 974,1,"Case, Mr. Howard Brown",male,49,0,0,19924,26,,S
85 | 975,3,"Demetri, Mr. Marinko",male,,0,0,349238,7.8958,,S
86 | 976,2,"Lamb, Mr. John Joseph",male,,0,0,240261,10.7083,,Q
87 | 977,3,"Khalil, Mr. Betros",male,,1,0,2660,14.4542,,C
88 | 978,3,"Barry, Miss. Julia",female,27,0,0,330844,7.8792,,Q
89 | 979,3,"Badman, Miss. Emily Louisa",female,18,0,0,A/4 31416,8.05,,S
90 | 980,3,"O'Donoghue, Ms. Bridget",female,,0,0,364856,7.75,,Q
91 | 981,2,"Wells, Master. Ralph Lester",male,2,1,1,29103,23,,S
92 | 982,3,"Dyker, Mrs. Adolf Fredrik (Anna Elisabeth Judith Andersson)",female,22,1,0,347072,13.9,,S
93 | 983,3,"Pedersen, Mr. Olaf",male,,0,0,345498,7.775,,S
94 | 984,1,"Davidson, Mrs. Thornton (Orian Hays)",female,27,1,2,F.C. 12750,52,B71,S
95 | 985,3,"Guest, Mr. Robert",male,,0,0,376563,8.05,,S
96 | 986,1,"Birnbaum, Mr. Jakob",male,25,0,0,13905,26,,C
97 | 987,3,"Tenglin, Mr. Gunnar Isidor",male,25,0,0,350033,7.7958,,S
98 | 988,1,"Cavendish, Mrs. Tyrell William (Julia Florence Siegel)",female,76,1,0,19877,78.85,C46,S
99 | 989,3,"Makinen, Mr. Kalle Edvard",male,29,0,0,STON/O 2. 3101268,7.925,,S
100 | 990,3,"Braf, Miss. Elin Ester Maria",female,20,0,0,347471,7.8542,,S
101 | 991,3,"Nancarrow, Mr. William Henry",male,33,0,0,A./5. 3338,8.05,,S
102 | 992,1,"Stengel, Mrs. Charles Emil Henry (Annie May Morris)",female,43,1,0,11778,55.4417,C116,C
103 | 993,2,"Weisz, Mr. Leopold",male,27,1,0,228414,26,,S
104 | 994,3,"Foley, Mr. William",male,,0,0,365235,7.75,,Q
105 | 995,3,"Johansson Palmquist, Mr. Oskar Leander",male,26,0,0,347070,7.775,,S
106 | 996,3,"Thomas, Mrs. Alexander (Thamine Thelma"")""",female,16,1,1,2625,8.5167,,C
107 | 997,3,"Holthen, Mr. Johan Martin",male,28,0,0,C 4001,22.525,,S
108 | 998,3,"Buckley, Mr. Daniel",male,21,0,0,330920,7.8208,,Q
109 | 999,3,"Ryan, Mr. Edward",male,,0,0,383162,7.75,,Q
110 | 1000,3,"Willer, Mr. Aaron (Abi Weller"")""",male,,0,0,3410,8.7125,,S
111 | 1001,2,"Swane, Mr. George",male,18.5,0,0,248734,13,F,S
112 | 1002,2,"Stanton, Mr. Samuel Ward",male,41,0,0,237734,15.0458,,C
113 | 1003,3,"Shine, Miss. Ellen Natalia",female,,0,0,330968,7.7792,,Q
114 | 1004,1,"Evans, Miss. Edith Corse",female,36,0,0,PC 17531,31.6792,A29,C
115 | 1005,3,"Buckley, Miss. Katherine",female,18.5,0,0,329944,7.2833,,Q
116 | 1006,1,"Straus, Mrs. Isidor (Rosalie Ida Blun)",female,63,1,0,PC 17483,221.7792,C55 C57,S
117 | 1007,3,"Chronopoulos, Mr. Demetrios",male,18,1,0,2680,14.4542,,C
118 | 1008,3,"Thomas, Mr. John",male,,0,0,2681,6.4375,,C
119 | 1009,3,"Sandstrom, Miss. Beatrice Irene",female,1,1,1,PP 9549,16.7,G6,S
120 | 1010,1,"Beattie, Mr. Thomson",male,36,0,0,13050,75.2417,C6,C
121 | 1011,2,"Chapman, Mrs. John Henry (Sara Elizabeth Lawry)",female,29,1,0,SC/AH 29037,26,,S
122 | 1012,2,"Watt, Miss. Bertha J",female,12,0,0,C.A. 33595,15.75,,S
123 | 1013,3,"Kiernan, Mr. John",male,,1,0,367227,7.75,,Q
124 | 1014,1,"Schabert, Mrs. Paul (Emma Mock)",female,35,1,0,13236,57.75,C28,C
125 | 1015,3,"Carver, Mr. Alfred John",male,28,0,0,392095,7.25,,S
126 | 1016,3,"Kennedy, Mr. John",male,,0,0,368783,7.75,,Q
127 | 1017,3,"Cribb, Miss. Laura Alice",female,17,0,1,371362,16.1,,S
128 | 1018,3,"Brobeck, Mr. Karl Rudolf",male,22,0,0,350045,7.7958,,S
129 | 1019,3,"McCoy, Miss. Alicia",female,,2,0,367226,23.25,,Q
130 | 1020,2,"Bowenur, Mr. Solomon",male,42,0,0,211535,13,,S
131 | 1021,3,"Petersen, Mr. Marius",male,24,0,0,342441,8.05,,S
132 | 1022,3,"Spinner, Mr. Henry John",male,32,0,0,STON/OQ. 369943,8.05,,S
133 | 1023,1,"Gracie, Col. Archibald IV",male,53,0,0,113780,28.5,C51,C
134 | 1024,3,"Lefebre, Mrs. Frank (Frances)",female,,0,4,4133,25.4667,,S
135 | 1025,3,"Thomas, Mr. Charles P",male,,1,0,2621,6.4375,,C
136 | 1026,3,"Dintcheff, Mr. Valtcho",male,43,0,0,349226,7.8958,,S
137 | 1027,3,"Carlsson, Mr. Carl Robert",male,24,0,0,350409,7.8542,,S
138 | 1028,3,"Zakarian, Mr. Mapriededer",male,26.5,0,0,2656,7.225,,C
139 | 1029,2,"Schmidt, Mr. August",male,26,0,0,248659,13,,S
140 | 1030,3,"Drapkin, Miss. Jennie",female,23,0,0,SOTON/OQ 392083,8.05,,S
141 | 1031,3,"Goodwin, Mr. Charles Frederick",male,40,1,6,CA 2144,46.9,,S
142 | 1032,3,"Goodwin, Miss. Jessie Allis",female,10,5,2,CA 2144,46.9,,S
143 | 1033,1,"Daniels, Miss. Sarah",female,33,0,0,113781,151.55,,S
144 | 1034,1,"Ryerson, Mr. Arthur Larned",male,61,1,3,PC 17608,262.375,B57 B59 B63 B66,C
145 | 1035,2,"Beauchamp, Mr. Henry James",male,28,0,0,244358,26,,S
146 | 1036,1,"Lindeberg-Lind, Mr. Erik Gustaf (Mr Edward Lingrey"")""",male,42,0,0,17475,26.55,,S
147 | 1037,3,"Vander Planke, Mr. Julius",male,31,3,0,345763,18,,S
148 | 1038,1,"Hilliard, Mr. Herbert Henry",male,,0,0,17463,51.8625,E46,S
149 | 1039,3,"Davies, Mr. Evan",male,22,0,0,SC/A4 23568,8.05,,S
150 | 1040,1,"Crafton, Mr. John Bertram",male,,0,0,113791,26.55,,S
151 | 1041,2,"Lahtinen, Rev. William",male,30,1,1,250651,26,,S
152 | 1042,1,"Earnshaw, Mrs. Boulton (Olive Potter)",female,23,0,1,11767,83.1583,C54,C
153 | 1043,3,"Matinoff, Mr. Nicola",male,,0,0,349255,7.8958,,C
154 | 1044,3,"Storey, Mr. Thomas",male,60.5,0,0,3701,,,S
155 | 1045,3,"Klasen, Mrs. (Hulda Kristina Eugenia Lofqvist)",female,36,0,2,350405,12.1833,,S
156 | 1046,3,"Asplund, Master. Filip Oscar",male,13,4,2,347077,31.3875,,S
157 | 1047,3,"Duquemin, Mr. Joseph",male,24,0,0,S.O./P.P. 752,7.55,,S
158 | 1048,1,"Bird, Miss. Ellen",female,29,0,0,PC 17483,221.7792,C97,S
159 | 1049,3,"Lundin, Miss. Olga Elida",female,23,0,0,347469,7.8542,,S
160 | 1050,1,"Borebank, Mr. John James",male,42,0,0,110489,26.55,D22,S
161 | 1051,3,"Peacock, Mrs. Benjamin (Edith Nile)",female,26,0,2,SOTON/O.Q. 3101315,13.775,,S
162 | 1052,3,"Smyth, Miss. Julia",female,,0,0,335432,7.7333,,Q
163 | 1053,3,"Touma, Master. Georges Youssef",male,7,1,1,2650,15.2458,,C
164 | 1054,2,"Wright, Miss. Marion",female,26,0,0,220844,13.5,,S
165 | 1055,3,"Pearce, Mr. Ernest",male,,0,0,343271,7,,S
166 | 1056,2,"Peruschitz, Rev. Joseph Maria",male,41,0,0,237393,13,,S
167 | 1057,3,"Kink-Heilmann, Mrs. Anton (Luise Heilmann)",female,26,1,1,315153,22.025,,S
168 | 1058,1,"Brandeis, Mr. Emil",male,48,0,0,PC 17591,50.4958,B10,C
169 | 1059,3,"Ford, Mr. Edward Watson",male,18,2,2,W./C. 6608,34.375,,S
170 | 1060,1,"Cassebeer, Mrs. Henry Arthur Jr (Eleanor Genevieve Fosdick)",female,,0,0,17770,27.7208,,C
171 | 1061,3,"Hellstrom, Miss. Hilda Maria",female,22,0,0,7548,8.9625,,S
172 | 1062,3,"Lithman, Mr. Simon",male,,0,0,S.O./P.P. 251,7.55,,S
173 | 1063,3,"Zakarian, Mr. Ortin",male,27,0,0,2670,7.225,,C
174 | 1064,3,"Dyker, Mr. Adolf Fredrik",male,23,1,0,347072,13.9,,S
175 | 1065,3,"Torfa, Mr. Assad",male,,0,0,2673,7.2292,,C
176 | 1066,3,"Asplund, Mr. Carl Oscar Vilhelm Gustafsson",male,40,1,5,347077,31.3875,,S
177 | 1067,2,"Brown, Miss. Edith Eileen",female,15,0,2,29750,39,,S
178 | 1068,2,"Sincock, Miss. Maude",female,20,0,0,C.A. 33112,36.75,,S
179 | 1069,1,"Stengel, Mr. Charles Emil Henry",male,54,1,0,11778,55.4417,C116,C
180 | 1070,2,"Becker, Mrs. Allen Oliver (Nellie E Baumgardner)",female,36,0,3,230136,39,F4,S
181 | 1071,1,"Compton, Mrs. Alexander Taylor (Mary Eliza Ingersoll)",female,64,0,2,PC 17756,83.1583,E45,C
182 | 1072,2,"McCrie, Mr. James Matthew",male,30,0,0,233478,13,,S
183 | 1073,1,"Compton, Mr. Alexander Taylor Jr",male,37,1,1,PC 17756,83.1583,E52,C
184 | 1074,1,"Marvin, Mrs. Daniel Warner (Mary Graham Carmichael Farquarson)",female,18,1,0,113773,53.1,D30,S
185 | 1075,3,"Lane, Mr. Patrick",male,,0,0,7935,7.75,,Q
186 | 1076,1,"Douglas, Mrs. Frederick Charles (Mary Helene Baxter)",female,27,1,1,PC 17558,247.5208,B58 B60,C
187 | 1077,2,"Maybery, Mr. Frank Hubert",male,40,0,0,239059,16,,S
188 | 1078,2,"Phillips, Miss. Alice Frances Louisa",female,21,0,1,S.O./P.P. 2,21,,S
189 | 1079,3,"Davies, Mr. Joseph",male,17,2,0,A/4 48873,8.05,,S
190 | 1080,3,"Sage, Miss. Ada",female,,8,2,CA. 2343,69.55,,S
191 | 1081,2,"Veal, Mr. James",male,40,0,0,28221,13,,S
192 | 1082,2,"Angle, Mr. William A",male,34,1,0,226875,26,,S
193 | 1083,1,"Salomon, Mr. Abraham L",male,,0,0,111163,26,,S
194 | 1084,3,"van Billiard, Master. Walter John",male,11.5,1,1,A/5. 851,14.5,,S
195 | 1085,2,"Lingane, Mr. John",male,61,0,0,235509,12.35,,Q
196 | 1086,2,"Drew, Master. Marshall Brines",male,8,0,2,28220,32.5,,S
197 | 1087,3,"Karlsson, Mr. Julius Konrad Eugen",male,33,0,0,347465,7.8542,,S
198 | 1088,1,"Spedden, Master. Robert Douglas",male,6,0,2,16966,134.5,E34,C
199 | 1089,3,"Nilsson, Miss. Berta Olivia",female,18,0,0,347066,7.775,,S
200 | 1090,2,"Baimbrigge, Mr. Charles Robert",male,23,0,0,C.A. 31030,10.5,,S
201 | 1091,3,"Rasmussen, Mrs. (Lena Jacobsen Solvang)",female,,0,0,65305,8.1125,,S
202 | 1092,3,"Murphy, Miss. Nora",female,,0,0,36568,15.5,,Q
203 | 1093,3,"Danbom, Master. Gilbert Sigvard Emanuel",male,0.33,0,2,347080,14.4,,S
204 | 1094,1,"Astor, Col. John Jacob",male,47,1,0,PC 17757,227.525,C62 C64,C
205 | 1095,2,"Quick, Miss. Winifred Vera",female,8,1,1,26360,26,,S
206 | 1096,2,"Andrew, Mr. Frank Thomas",male,25,0,0,C.A. 34050,10.5,,S
207 | 1097,1,"Omont, Mr. Alfred Fernand",male,,0,0,F.C. 12998,25.7417,,C
208 | 1098,3,"McGowan, Miss. Katherine",female,35,0,0,9232,7.75,,Q
209 | 1099,2,"Collett, Mr. Sidney C Stuart",male,24,0,0,28034,10.5,,S
210 | 1100,1,"Rosenbaum, Miss. Edith Louise",female,33,0,0,PC 17613,27.7208,A11,C
211 | 1101,3,"Delalic, Mr. Redjo",male,25,0,0,349250,7.8958,,S
212 | 1102,3,"Andersen, Mr. Albert Karvin",male,32,0,0,C 4001,22.525,,S
213 | 1103,3,"Finoli, Mr. Luigi",male,,0,0,SOTON/O.Q. 3101308,7.05,,S
214 | 1104,2,"Deacon, Mr. Percy William",male,17,0,0,S.O.C. 14879,73.5,,S
215 | 1105,2,"Howard, Mrs. Benjamin (Ellen Truelove Arman)",female,60,1,0,24065,26,,S
216 | 1106,3,"Andersson, Miss. Ida Augusta Margareta",female,38,4,2,347091,7.775,,S
217 | 1107,1,"Head, Mr. Christopher",male,42,0,0,113038,42.5,B11,S
218 | 1108,3,"Mahon, Miss. Bridget Delia",female,,0,0,330924,7.8792,,Q
219 | 1109,1,"Wick, Mr. George Dennick",male,57,1,1,36928,164.8667,,S
220 | 1110,1,"Widener, Mrs. George Dunton (Eleanor Elkins)",female,50,1,1,113503,211.5,C80,C
221 | 1111,3,"Thomson, Mr. Alexander Morrison",male,,0,0,32302,8.05,,S
222 | 1112,2,"Duran y More, Miss. Florentina",female,30,1,0,SC/PARIS 2148,13.8583,,C
223 | 1113,3,"Reynolds, Mr. Harold J",male,21,0,0,342684,8.05,,S
224 | 1114,2,"Cook, Mrs. (Selena Rogers)",female,22,0,0,W./C. 14266,10.5,F33,S
225 | 1115,3,"Karlsson, Mr. Einar Gervasius",male,21,0,0,350053,7.7958,,S
226 | 1116,1,"Candee, Mrs. Edward (Helen Churchill Hungerford)",female,53,0,0,PC 17606,27.4458,,C
227 | 1117,3,"Moubarek, Mrs. George (Omine Amenia"" Alexander)""",female,,0,2,2661,15.2458,,C
228 | 1118,3,"Asplund, Mr. Johan Charles",male,23,0,0,350054,7.7958,,S
229 | 1119,3,"McNeill, Miss. Bridget",female,,0,0,370368,7.75,,Q
230 | 1120,3,"Everett, Mr. Thomas James",male,40.5,0,0,C.A. 6212,15.1,,S
231 | 1121,2,"Hocking, Mr. Samuel James Metcalfe",male,36,0,0,242963,13,,S
232 | 1122,2,"Sweet, Mr. George Frederick",male,14,0,0,220845,65,,S
233 | 1123,1,"Willard, Miss. Constance",female,21,0,0,113795,26.55,,S
234 | 1124,3,"Wiklund, Mr. Karl Johan",male,21,1,0,3101266,6.4958,,S
235 | 1125,3,"Linehan, Mr. Michael",male,,0,0,330971,7.8792,,Q
236 | 1126,1,"Cumings, Mr. John Bradley",male,39,1,0,PC 17599,71.2833,C85,C
237 | 1127,3,"Vendel, Mr. Olof Edvin",male,20,0,0,350416,7.8542,,S
238 | 1128,1,"Warren, Mr. Frank Manley",male,64,1,0,110813,75.25,D37,C
239 | 1129,3,"Baccos, Mr. Raffull",male,20,0,0,2679,7.225,,C
240 | 1130,2,"Hiltunen, Miss. Marta",female,18,1,1,250650,13,,S
241 | 1131,1,"Douglas, Mrs. Walter Donald (Mahala Dutton)",female,48,1,0,PC 17761,106.425,C86,C
242 | 1132,1,"Lindstrom, Mrs. Carl Johan (Sigrid Posse)",female,55,0,0,112377,27.7208,,C
243 | 1133,2,"Christy, Mrs. (Alice Frances)",female,45,0,2,237789,30,,S
244 | 1134,1,"Spedden, Mr. Frederic Oakley",male,45,1,1,16966,134.5,E34,C
245 | 1135,3,"Hyman, Mr. Abraham",male,,0,0,3470,7.8875,,S
246 | 1136,3,"Johnston, Master. William Arthur Willie""""",male,,1,2,W./C. 6607,23.45,,S
247 | 1137,1,"Kenyon, Mr. Frederick R",male,41,1,0,17464,51.8625,D21,S
248 | 1138,2,"Karnes, Mrs. J Frank (Claire Bennett)",female,22,0,0,F.C.C. 13534,21,,S
249 | 1139,2,"Drew, Mr. James Vivian",male,42,1,1,28220,32.5,,S
250 | 1140,2,"Hold, Mrs. Stephen (Annie Margaret Hill)",female,29,1,0,26707,26,,S
251 | 1141,3,"Khalil, Mrs. Betros (Zahie Maria"" Elias)""",female,,1,0,2660,14.4542,,C
252 | 1142,2,"West, Miss. Barbara J",female,0.92,1,2,C.A. 34651,27.75,,S
253 | 1143,3,"Abrahamsson, Mr. Abraham August Johannes",male,20,0,0,SOTON/O2 3101284,7.925,,S
254 | 1144,1,"Clark, Mr. Walter Miller",male,27,1,0,13508,136.7792,C89,C
255 | 1145,3,"Salander, Mr. Karl Johan",male,24,0,0,7266,9.325,,S
256 | 1146,3,"Wenzel, Mr. Linhart",male,32.5,0,0,345775,9.5,,S
257 | 1147,3,"MacKay, Mr. George William",male,,0,0,C.A. 42795,7.55,,S
258 | 1148,3,"Mahon, Mr. John",male,,0,0,AQ/4 3130,7.75,,Q
259 | 1149,3,"Niklasson, Mr. Samuel",male,28,0,0,363611,8.05,,S
260 | 1150,2,"Bentham, Miss. Lilian W",female,19,0,0,28404,13,,S
261 | 1151,3,"Midtsjo, Mr. Karl Albert",male,21,0,0,345501,7.775,,S
262 | 1152,3,"de Messemaeker, Mr. Guillaume Joseph",male,36.5,1,0,345572,17.4,,S
263 | 1153,3,"Nilsson, Mr. August Ferdinand",male,21,0,0,350410,7.8542,,S
264 | 1154,2,"Wells, Mrs. Arthur Henry (Addie"" Dart Trevaskis)""",female,29,0,2,29103,23,,S
265 | 1155,3,"Klasen, Miss. Gertrud Emilia",female,1,1,1,350405,12.1833,,S
266 | 1156,2,"Portaluppi, Mr. Emilio Ilario Giuseppe",male,30,0,0,C.A. 34644,12.7375,,C
267 | 1157,3,"Lyntakoff, Mr. Stanko",male,,0,0,349235,7.8958,,S
268 | 1158,1,"Chisholm, Mr. Roderick Robert Crispin",male,,0,0,112051,0,,S
269 | 1159,3,"Warren, Mr. Charles William",male,,0,0,C.A. 49867,7.55,,S
270 | 1160,3,"Howard, Miss. May Elizabeth",female,,0,0,A. 2. 39186,8.05,,S
271 | 1161,3,"Pokrnic, Mr. Mate",male,17,0,0,315095,8.6625,,S
272 | 1162,1,"McCaffry, Mr. Thomas Francis",male,46,0,0,13050,75.2417,C6,C
273 | 1163,3,"Fox, Mr. Patrick",male,,0,0,368573,7.75,,Q
274 | 1164,1,"Clark, Mrs. Walter Miller (Virginia McDowell)",female,26,1,0,13508,136.7792,C89,C
275 | 1165,3,"Lennon, Miss. Mary",female,,1,0,370371,15.5,,Q
276 | 1166,3,"Saade, Mr. Jean Nassr",male,,0,0,2676,7.225,,C
277 | 1167,2,"Bryhl, Miss. Dagmar Jenny Ingeborg ",female,20,1,0,236853,26,,S
278 | 1168,2,"Parker, Mr. Clifford Richard",male,28,0,0,SC 14888,10.5,,S
279 | 1169,2,"Faunthorpe, Mr. Harry",male,40,1,0,2926,26,,S
280 | 1170,2,"Ware, Mr. John James",male,30,1,0,CA 31352,21,,S
281 | 1171,2,"Oxenham, Mr. Percy Thomas",male,22,0,0,W./C. 14260,10.5,,S
282 | 1172,3,"Oreskovic, Miss. Jelka",female,23,0,0,315085,8.6625,,S
283 | 1173,3,"Peacock, Master. Alfred Edward",male,0.75,1,1,SOTON/O.Q. 3101315,13.775,,S
284 | 1174,3,"Fleming, Miss. Honora",female,,0,0,364859,7.75,,Q
285 | 1175,3,"Touma, Miss. Maria Youssef",female,9,1,1,2650,15.2458,,C
286 | 1176,3,"Rosblom, Miss. Salli Helena",female,2,1,1,370129,20.2125,,S
287 | 1177,3,"Dennis, Mr. William",male,36,0,0,A/5 21175,7.25,,S
288 | 1178,3,"Franklin, Mr. Charles (Charles Fardon)",male,,0,0,SOTON/O.Q. 3101314,7.25,,S
289 | 1179,1,"Snyder, Mr. John Pillsbury",male,24,1,0,21228,82.2667,B45,S
290 | 1180,3,"Mardirosian, Mr. Sarkis",male,,0,0,2655,7.2292,F E46,C
291 | 1181,3,"Ford, Mr. Arthur",male,,0,0,A/5 1478,8.05,,S
292 | 1182,1,"Rheims, Mr. George Alexander Lucien",male,,0,0,PC 17607,39.6,,S
293 | 1183,3,"Daly, Miss. Margaret Marcella Maggie""""",female,30,0,0,382650,6.95,,Q
294 | 1184,3,"Nasr, Mr. Mustafa",male,,0,0,2652,7.2292,,C
295 | 1185,1,"Dodge, Dr. Washington",male,53,1,1,33638,81.8583,A34,S
296 | 1186,3,"Wittevrongel, Mr. Camille",male,36,0,0,345771,9.5,,S
297 | 1187,3,"Angheloff, Mr. Minko",male,26,0,0,349202,7.8958,,S
298 | 1188,2,"Laroche, Miss. Louise",female,1,1,2,SC/Paris 2123,41.5792,,C
299 | 1189,3,"Samaan, Mr. Hanna",male,,2,0,2662,21.6792,,C
300 | 1190,1,"Loring, Mr. Joseph Holland",male,30,0,0,113801,45.5,,S
301 | 1191,3,"Johansson, Mr. Nils",male,29,0,0,347467,7.8542,,S
302 | 1192,3,"Olsson, Mr. Oscar Wilhelm",male,32,0,0,347079,7.775,,S
303 | 1193,2,"Malachard, Mr. Noel",male,,0,0,237735,15.0458,D,C
304 | 1194,2,"Phillips, Mr. Escott Robert",male,43,0,1,S.O./P.P. 2,21,,S
305 | 1195,3,"Pokrnic, Mr. Tome",male,24,0,0,315092,8.6625,,S
306 | 1196,3,"McCarthy, Miss. Catherine Katie""""",female,,0,0,383123,7.75,,Q
307 | 1197,1,"Crosby, Mrs. Edward Gifford (Catherine Elizabeth Halstead)",female,64,1,1,112901,26.55,B26,S
308 | 1198,1,"Allison, Mr. Hudson Joshua Creighton",male,30,1,2,113781,151.55,C22 C26,S
309 | 1199,3,"Aks, Master. Philip Frank",male,0.83,0,1,392091,9.35,,S
310 | 1200,1,"Hays, Mr. Charles Melville",male,55,1,1,12749,93.5,B69,S
311 | 1201,3,"Hansen, Mrs. Claus Peter (Jennie L Howard)",female,45,1,0,350026,14.1083,,S
312 | 1202,3,"Cacic, Mr. Jego Grga",male,18,0,0,315091,8.6625,,S
313 | 1203,3,"Vartanian, Mr. David",male,22,0,0,2658,7.225,,C
314 | 1204,3,"Sadowitz, Mr. Harry",male,,0,0,LP 1588,7.575,,S
315 | 1205,3,"Carr, Miss. Jeannie",female,37,0,0,368364,7.75,,Q
316 | 1206,1,"White, Mrs. John Stuart (Ella Holmes)",female,55,0,0,PC 17760,135.6333,C32,C
317 | 1207,3,"Hagardon, Miss. Kate",female,17,0,0,AQ/3. 30631,7.7333,,Q
318 | 1208,1,"Spencer, Mr. William Augustus",male,57,1,0,PC 17569,146.5208,B78,C
319 | 1209,2,"Rogers, Mr. Reginald Harry",male,19,0,0,28004,10.5,,S
320 | 1210,3,"Jonsson, Mr. Nils Hilding",male,27,0,0,350408,7.8542,,S
321 | 1211,2,"Jefferys, Mr. Ernest Wilfred",male,22,2,0,C.A. 31029,31.5,,S
322 | 1212,3,"Andersson, Mr. Johan Samuel",male,26,0,0,347075,7.775,,S
323 | 1213,3,"Krekorian, Mr. Neshan",male,25,0,0,2654,7.2292,F E57,C
324 | 1214,2,"Nesson, Mr. Israel",male,26,0,0,244368,13,F2,S
325 | 1215,1,"Rowe, Mr. Alfred G",male,33,0,0,113790,26.55,,S
326 | 1216,1,"Kreuchen, Miss. Emilie",female,39,0,0,24160,211.3375,,S
327 | 1217,3,"Assam, Mr. Ali",male,23,0,0,SOTON/O.Q. 3101309,7.05,,S
328 | 1218,2,"Becker, Miss. Ruth Elizabeth",female,12,2,1,230136,39,F4,S
329 | 1219,1,"Rosenshine, Mr. George (Mr George Thorne"")""",male,46,0,0,PC 17585,79.2,,C
330 | 1220,2,"Clarke, Mr. Charles Valentine",male,29,1,0,2003,26,,S
331 | 1221,2,"Enander, Mr. Ingvar",male,21,0,0,236854,13,,S
332 | 1222,2,"Davies, Mrs. John Morgan (Elizabeth Agnes Mary White) ",female,48,0,2,C.A. 33112,36.75,,S
333 | 1223,1,"Dulles, Mr. William Crothers",male,39,0,0,PC 17580,29.7,A18,C
334 | 1224,3,"Thomas, Mr. Tannous",male,,0,0,2684,7.225,,C
335 | 1225,3,"Nakid, Mrs. Said (Waika Mary"" Mowad)""",female,19,1,1,2653,15.7417,,C
336 | 1226,3,"Cor, Mr. Ivan",male,27,0,0,349229,7.8958,,S
337 | 1227,1,"Maguire, Mr. John Edward",male,30,0,0,110469,26,C106,S
338 | 1228,2,"de Brito, Mr. Jose Joaquim",male,32,0,0,244360,13,,S
339 | 1229,3,"Elias, Mr. Joseph",male,39,0,2,2675,7.2292,,C
340 | 1230,2,"Denbury, Mr. Herbert",male,25,0,0,C.A. 31029,31.5,,S
341 | 1231,3,"Betros, Master. Seman",male,,0,0,2622,7.2292,,C
342 | 1232,2,"Fillbrook, Mr. Joseph Charles",male,18,0,0,C.A. 15185,10.5,,S
343 | 1233,3,"Lundstrom, Mr. Thure Edvin",male,32,0,0,350403,7.5792,,S
344 | 1234,3,"Sage, Mr. John George",male,,1,9,CA. 2343,69.55,,S
345 | 1235,1,"Cardeza, Mrs. James Warburton Martinez (Charlotte Wardle Drake)",female,58,0,1,PC 17755,512.3292,B51 B53 B55,C
346 | 1236,3,"van Billiard, Master. James William",male,,1,1,A/5. 851,14.5,,S
347 | 1237,3,"Abelseth, Miss. Karen Marie",female,16,0,0,348125,7.65,,S
348 | 1238,2,"Botsford, Mr. William Hull",male,26,0,0,237670,13,,S
349 | 1239,3,"Whabee, Mrs. George Joseph (Shawneene Abi-Saab)",female,38,0,0,2688,7.2292,,C
350 | 1240,2,"Giles, Mr. Ralph",male,24,0,0,248726,13.5,,S
351 | 1241,2,"Walcroft, Miss. Nellie",female,31,0,0,F.C.C. 13528,21,,S
352 | 1242,1,"Greenfield, Mrs. Leo David (Blanche Strouse)",female,45,0,1,PC 17759,63.3583,D10 D12,C
353 | 1243,2,"Stokes, Mr. Philip Joseph",male,25,0,0,F.C.C. 13540,10.5,,S
354 | 1244,2,"Dibden, Mr. William",male,18,0,0,S.O.C. 14879,73.5,,S
355 | 1245,2,"Herman, Mr. Samuel",male,49,1,2,220845,65,,S
356 | 1246,3,"Dean, Miss. Elizabeth Gladys Millvina""""",female,0.17,1,2,C.A. 2315,20.575,,S
357 | 1247,1,"Julian, Mr. Henry Forbes",male,50,0,0,113044,26,E60,S
358 | 1248,1,"Brown, Mrs. John Murray (Caroline Lane Lamson)",female,59,2,0,11769,51.4792,C101,S
359 | 1249,3,"Lockyer, Mr. Edward",male,,0,0,1222,7.8792,,S
360 | 1250,3,"O'Keefe, Mr. Patrick",male,,0,0,368402,7.75,,Q
361 | 1251,3,"Lindell, Mrs. Edvard Bengtsson (Elin Gerda Persson)",female,30,1,0,349910,15.55,,S
362 | 1252,3,"Sage, Master. William Henry",male,14.5,8,2,CA. 2343,69.55,,S
363 | 1253,2,"Mallet, Mrs. Albert (Antoinette Magnin)",female,24,1,1,S.C./PARIS 2079,37.0042,,C
364 | 1254,2,"Ware, Mrs. John James (Florence Louise Long)",female,31,0,0,CA 31352,21,,S
365 | 1255,3,"Strilic, Mr. Ivan",male,27,0,0,315083,8.6625,,S
366 | 1256,1,"Harder, Mrs. George Achilles (Dorothy Annan)",female,25,1,0,11765,55.4417,E50,C
367 | 1257,3,"Sage, Mrs. John (Annie Bullen)",female,,1,9,CA. 2343,69.55,,S
368 | 1258,3,"Caram, Mr. Joseph",male,,1,0,2689,14.4583,,C
369 | 1259,3,"Riihivouri, Miss. Susanna Juhantytar Sanni""""",female,22,0,0,3101295,39.6875,,S
370 | 1260,1,"Gibson, Mrs. Leonard (Pauline C Boeson)",female,45,0,1,112378,59.4,,C
371 | 1261,2,"Pallas y Castello, Mr. Emilio",male,29,0,0,SC/PARIS 2147,13.8583,,C
372 | 1262,2,"Giles, Mr. Edgar",male,21,1,0,28133,11.5,,S
373 | 1263,1,"Wilson, Miss. Helen Alice",female,31,0,0,16966,134.5,E39 E41,C
374 | 1264,1,"Ismay, Mr. Joseph Bruce",male,49,0,0,112058,0,B52 B54 B56,S
375 | 1265,2,"Harbeck, Mr. William H",male,44,0,0,248746,13,,S
376 | 1266,1,"Dodge, Mrs. Washington (Ruth Vidaver)",female,54,1,1,33638,81.8583,A34,S
377 | 1267,1,"Bowen, Miss. Grace Scott",female,45,0,0,PC 17608,262.375,,C
378 | 1268,3,"Kink, Miss. Maria",female,22,2,0,315152,8.6625,,S
379 | 1269,2,"Cotterill, Mr. Henry Harry""""",male,21,0,0,29107,11.5,,S
380 | 1270,1,"Hipkins, Mr. William Edward",male,55,0,0,680,50,C39,S
381 | 1271,3,"Asplund, Master. Carl Edgar",male,5,4,2,347077,31.3875,,S
382 | 1272,3,"O'Connor, Mr. Patrick",male,,0,0,366713,7.75,,Q
383 | 1273,3,"Foley, Mr. Joseph",male,26,0,0,330910,7.8792,,Q
384 | 1274,3,"Risien, Mrs. Samuel (Emma)",female,,0,0,364498,14.5,,S
385 | 1275,3,"McNamee, Mrs. Neal (Eileen O'Leary)",female,19,1,0,376566,16.1,,S
386 | 1276,2,"Wheeler, Mr. Edwin Frederick""""",male,,0,0,SC/PARIS 2159,12.875,,S
387 | 1277,2,"Herman, Miss. Kate",female,24,1,2,220845,65,,S
388 | 1278,3,"Aronsson, Mr. Ernst Axel Algot",male,24,0,0,349911,7.775,,S
389 | 1279,2,"Ashby, Mr. John",male,57,0,0,244346,13,,S
390 | 1280,3,"Canavan, Mr. Patrick",male,21,0,0,364858,7.75,,Q
391 | 1281,3,"Palsson, Master. Paul Folke",male,6,3,1,349909,21.075,,S
392 | 1282,1,"Payne, Mr. Vivian Ponsonby",male,23,0,0,12749,93.5,B24,S
393 | 1283,1,"Lines, Mrs. Ernest H (Elizabeth Lindsey James)",female,51,0,1,PC 17592,39.4,D28,S
394 | 1284,3,"Abbott, Master. Eugene Joseph",male,13,0,2,C.A. 2673,20.25,,S
395 | 1285,2,"Gilbert, Mr. William",male,47,0,0,C.A. 30769,10.5,,S
396 | 1286,3,"Kink-Heilmann, Mr. Anton",male,29,3,1,315153,22.025,,S
397 | 1287,1,"Smith, Mrs. Lucien Philip (Mary Eloise Hughes)",female,18,1,0,13695,60,C31,S
398 | 1288,3,"Colbert, Mr. Patrick",male,24,0,0,371109,7.25,,Q
399 | 1289,1,"Frolicher-Stehli, Mrs. Maxmillian (Margaretha Emerentia Stehli)",female,48,1,1,13567,79.2,B41,C
400 | 1290,3,"Larsson-Rondberg, Mr. Edvard A",male,22,0,0,347065,7.775,,S
401 | 1291,3,"Conlon, Mr. Thomas Henry",male,31,0,0,21332,7.7333,,Q
402 | 1292,1,"Bonnell, Miss. Caroline",female,30,0,0,36928,164.8667,C7,S
403 | 1293,2,"Gale, Mr. Harry",male,38,1,0,28664,21,,S
404 | 1294,1,"Gibson, Miss. Dorothy Winifred",female,22,0,1,112378,59.4,,C
405 | 1295,1,"Carrau, Mr. Jose Pedro",male,17,0,0,113059,47.1,,S
406 | 1296,1,"Frauenthal, Mr. Isaac Gerald",male,43,1,0,17765,27.7208,D40,C
407 | 1297,2,"Nourney, Mr. Alfred (Baron von Drachstedt"")""",male,20,0,0,SC/PARIS 2166,13.8625,D38,C
408 | 1298,2,"Ware, Mr. William Jeffery",male,23,1,0,28666,10.5,,S
409 | 1299,1,"Widener, Mr. George Dunton",male,50,1,1,113503,211.5,C80,C
410 | 1300,3,"Riordan, Miss. Johanna Hannah""""",female,,0,0,334915,7.7208,,Q
411 | 1301,3,"Peacock, Miss. Treasteall",female,3,1,1,SOTON/O.Q. 3101315,13.775,,S
412 | 1302,3,"Naughton, Miss. Hannah",female,,0,0,365237,7.75,,Q
413 | 1303,1,"Minahan, Mrs. William Edward (Lillian E Thorpe)",female,37,1,0,19928,90,C78,Q
414 | 1304,3,"Henriksson, Miss. Jenny Lovisa",female,28,0,0,347086,7.775,,S
415 | 1305,3,"Spector, Mr. Woolf",male,,0,0,A.5. 3236,8.05,,S
416 | 1306,1,"Oliva y Ocana, Dona. Fermina",female,39,0,0,PC 17758,108.9,C105,C
417 | 1307,3,"Saether, Mr. Simon Sivertsen",male,38.5,0,0,SOTON/O.Q. 3101262,7.25,,S
418 | 1308,3,"Ware, Mr. Frederick",male,,0,0,359309,8.05,,S
419 | 1309,3,"Peter, Master. Michael J",male,,1,1,2668,22.3583,,C
420 |
--------------------------------------------------------------------------------