├── DecisionTree
├── Original_Data
│ └── car.data
├── car_integer_exceptY.csv
└── decisionTree.py
├── Golf_Predictions.ipynb
├── LinearRegression.py
├── Logistic_Regression.ipynb
├── Logistic_Regression.py
├── MNIST_ANN.ipynb
├── NBA_Basketball_Exploration
├── Basketball_Data_Exploration.ipynb
├── basketball_data_exploration.py
└── nba_2013.csv
├── Neural_Networks
├── Neural_Networks.ipynb
└── housepricedata.csv
├── Predict_Boston_Housing_Price.ipynb
├── README.md
├── Replace_Strings_With_Numbers
├── car.csv
└── replace_strings_with_numbers.py
├── SVM_Stock
├── FB_30_days.csv
├── SVM.ipynb
└── svm.py
├── breast_cancer_detection
├── Breast_Cancer_Detection.ipynb
├── breast_cancer_detection.py
└── data.csv
├── concatenate_file.py
├── mnist_ann.py
├── remove_empty_row.py
├── scrape.py
├── sentiment.py
├── stock.ipynb
└── stock.py
/DecisionTree/car_integer_exceptY.csv:
--------------------------------------------------------------------------------
1 | ,buying,maint,doors,persons,lug_boot,safety,values
2 | 0,4,4,2,2,1,1,unacc
3 | 1,4,4,2,2,1,2,unacc
4 | 2,4,4,2,2,1,3,unacc
5 | 3,4,4,2,2,2,1,unacc
6 | 4,4,4,2,2,2,2,unacc
7 | 5,4,4,2,2,2,3,unacc
8 | 6,4,4,2,2,3,1,unacc
9 | 7,4,4,2,2,3,2,unacc
10 | 8,4,4,2,2,3,3,unacc
11 | 9,4,4,2,4,1,1,unacc
12 | 10,4,4,2,4,1,2,unacc
13 | 11,4,4,2,4,1,3,unacc
14 | 12,4,4,2,4,2,1,unacc
15 | 13,4,4,2,4,2,2,unacc
16 | 14,4,4,2,4,2,3,unacc
17 | 15,4,4,2,4,3,1,unacc
18 | 16,4,4,2,4,3,2,unacc
19 | 17,4,4,2,4,3,3,unacc
20 | 18,4,4,2,6,1,1,unacc
21 | 19,4,4,2,6,1,2,unacc
22 | 20,4,4,2,6,1,3,unacc
23 | 21,4,4,2,6,2,1,unacc
24 | 22,4,4,2,6,2,2,unacc
25 | 23,4,4,2,6,2,3,unacc
26 | 24,4,4,2,6,3,1,unacc
27 | 25,4,4,2,6,3,2,unacc
28 | 26,4,4,2,6,3,3,unacc
29 | 27,4,4,3,2,1,1,unacc
30 | 28,4,4,3,2,1,2,unacc
31 | 29,4,4,3,2,1,3,unacc
32 | 30,4,4,3,2,2,1,unacc
33 | 31,4,4,3,2,2,2,unacc
34 | 32,4,4,3,2,2,3,unacc
35 | 33,4,4,3,2,3,1,unacc
36 | 34,4,4,3,2,3,2,unacc
37 | 35,4,4,3,2,3,3,unacc
38 | 36,4,4,3,4,1,1,unacc
39 | 37,4,4,3,4,1,2,unacc
40 | 38,4,4,3,4,1,3,unacc
41 | 39,4,4,3,4,2,1,unacc
42 | 40,4,4,3,4,2,2,unacc
43 | 41,4,4,3,4,2,3,unacc
44 | 42,4,4,3,4,3,1,unacc
45 | 43,4,4,3,4,3,2,unacc
46 | 44,4,4,3,4,3,3,unacc
47 | 45,4,4,3,6,1,1,unacc
48 | 46,4,4,3,6,1,2,unacc
49 | 47,4,4,3,6,1,3,unacc
50 | 48,4,4,3,6,2,1,unacc
51 | 49,4,4,3,6,2,2,unacc
52 | 50,4,4,3,6,2,3,unacc
53 | 51,4,4,3,6,3,1,unacc
54 | 52,4,4,3,6,3,2,unacc
55 | 53,4,4,3,6,3,3,unacc
56 | 54,4,4,4,2,1,1,unacc
57 | 55,4,4,4,2,1,2,unacc
58 | 56,4,4,4,2,1,3,unacc
59 | 57,4,4,4,2,2,1,unacc
60 | 58,4,4,4,2,2,2,unacc
61 | 59,4,4,4,2,2,3,unacc
62 | 60,4,4,4,2,3,1,unacc
63 | 61,4,4,4,2,3,2,unacc
64 | 62,4,4,4,2,3,3,unacc
65 | 63,4,4,4,4,1,1,unacc
66 | 64,4,4,4,4,1,2,unacc
67 | 65,4,4,4,4,1,3,unacc
68 | 66,4,4,4,4,2,1,unacc
69 | 67,4,4,4,4,2,2,unacc
70 | 68,4,4,4,4,2,3,unacc
71 | 69,4,4,4,4,3,1,unacc
72 | 70,4,4,4,4,3,2,unacc
73 | 71,4,4,4,4,3,3,unacc
74 | 72,4,4,4,6,1,1,unacc
75 | 73,4,4,4,6,1,2,unacc
76 | 74,4,4,4,6,1,3,unacc
77 | 75,4,4,4,6,2,1,unacc
78 | 76,4,4,4,6,2,2,unacc
79 | 77,4,4,4,6,2,3,unacc
80 | 78,4,4,4,6,3,1,unacc
81 | 79,4,4,4,6,3,2,unacc
82 | 80,4,4,4,6,3,3,unacc
83 | 81,4,4,5,2,1,1,unacc
84 | 82,4,4,5,2,1,2,unacc
85 | 83,4,4,5,2,1,3,unacc
86 | 84,4,4,5,2,2,1,unacc
87 | 85,4,4,5,2,2,2,unacc
88 | 86,4,4,5,2,2,3,unacc
89 | 87,4,4,5,2,3,1,unacc
90 | 88,4,4,5,2,3,2,unacc
91 | 89,4,4,5,2,3,3,unacc
92 | 90,4,4,5,4,1,1,unacc
93 | 91,4,4,5,4,1,2,unacc
94 | 92,4,4,5,4,1,3,unacc
95 | 93,4,4,5,4,2,1,unacc
96 | 94,4,4,5,4,2,2,unacc
97 | 95,4,4,5,4,2,3,unacc
98 | 96,4,4,5,4,3,1,unacc
99 | 97,4,4,5,4,3,2,unacc
100 | 98,4,4,5,4,3,3,unacc
101 | 99,4,4,5,6,1,1,unacc
102 | 100,4,4,5,6,1,2,unacc
103 | 101,4,4,5,6,1,3,unacc
104 | 102,4,4,5,6,2,1,unacc
105 | 103,4,4,5,6,2,2,unacc
106 | 104,4,4,5,6,2,3,unacc
107 | 105,4,4,5,6,3,1,unacc
108 | 106,4,4,5,6,3,2,unacc
109 | 107,4,4,5,6,3,3,unacc
110 | 108,4,3,2,2,1,1,unacc
111 | 109,4,3,2,2,1,2,unacc
112 | 110,4,3,2,2,1,3,unacc
113 | 111,4,3,2,2,2,1,unacc
114 | 112,4,3,2,2,2,2,unacc
115 | 113,4,3,2,2,2,3,unacc
116 | 114,4,3,2,2,3,1,unacc
117 | 115,4,3,2,2,3,2,unacc
118 | 116,4,3,2,2,3,3,unacc
119 | 117,4,3,2,4,1,1,unacc
120 | 118,4,3,2,4,1,2,unacc
121 | 119,4,3,2,4,1,3,unacc
122 | 120,4,3,2,4,2,1,unacc
123 | 121,4,3,2,4,2,2,unacc
124 | 122,4,3,2,4,2,3,unacc
125 | 123,4,3,2,4,3,1,unacc
126 | 124,4,3,2,4,3,2,unacc
127 | 125,4,3,2,4,3,3,unacc
128 | 126,4,3,2,6,1,1,unacc
129 | 127,4,3,2,6,1,2,unacc
130 | 128,4,3,2,6,1,3,unacc
131 | 129,4,3,2,6,2,1,unacc
132 | 130,4,3,2,6,2,2,unacc
133 | 131,4,3,2,6,2,3,unacc
134 | 132,4,3,2,6,3,1,unacc
135 | 133,4,3,2,6,3,2,unacc
136 | 134,4,3,2,6,3,3,unacc
137 | 135,4,3,3,2,1,1,unacc
138 | 136,4,3,3,2,1,2,unacc
139 | 137,4,3,3,2,1,3,unacc
140 | 138,4,3,3,2,2,1,unacc
141 | 139,4,3,3,2,2,2,unacc
142 | 140,4,3,3,2,2,3,unacc
143 | 141,4,3,3,2,3,1,unacc
144 | 142,4,3,3,2,3,2,unacc
145 | 143,4,3,3,2,3,3,unacc
146 | 144,4,3,3,4,1,1,unacc
147 | 145,4,3,3,4,1,2,unacc
148 | 146,4,3,3,4,1,3,unacc
149 | 147,4,3,3,4,2,1,unacc
150 | 148,4,3,3,4,2,2,unacc
151 | 149,4,3,3,4,2,3,unacc
152 | 150,4,3,3,4,3,1,unacc
153 | 151,4,3,3,4,3,2,unacc
154 | 152,4,3,3,4,3,3,unacc
155 | 153,4,3,3,6,1,1,unacc
156 | 154,4,3,3,6,1,2,unacc
157 | 155,4,3,3,6,1,3,unacc
158 | 156,4,3,3,6,2,1,unacc
159 | 157,4,3,3,6,2,2,unacc
160 | 158,4,3,3,6,2,3,unacc
161 | 159,4,3,3,6,3,1,unacc
162 | 160,4,3,3,6,3,2,unacc
163 | 161,4,3,3,6,3,3,unacc
164 | 162,4,3,4,2,1,1,unacc
165 | 163,4,3,4,2,1,2,unacc
166 | 164,4,3,4,2,1,3,unacc
167 | 165,4,3,4,2,2,1,unacc
168 | 166,4,3,4,2,2,2,unacc
169 | 167,4,3,4,2,2,3,unacc
170 | 168,4,3,4,2,3,1,unacc
171 | 169,4,3,4,2,3,2,unacc
172 | 170,4,3,4,2,3,3,unacc
173 | 171,4,3,4,4,1,1,unacc
174 | 172,4,3,4,4,1,2,unacc
175 | 173,4,3,4,4,1,3,unacc
176 | 174,4,3,4,4,2,1,unacc
177 | 175,4,3,4,4,2,2,unacc
178 | 176,4,3,4,4,2,3,unacc
179 | 177,4,3,4,4,3,1,unacc
180 | 178,4,3,4,4,3,2,unacc
181 | 179,4,3,4,4,3,3,unacc
182 | 180,4,3,4,6,1,1,unacc
183 | 181,4,3,4,6,1,2,unacc
184 | 182,4,3,4,6,1,3,unacc
185 | 183,4,3,4,6,2,1,unacc
186 | 184,4,3,4,6,2,2,unacc
187 | 185,4,3,4,6,2,3,unacc
188 | 186,4,3,4,6,3,1,unacc
189 | 187,4,3,4,6,3,2,unacc
190 | 188,4,3,4,6,3,3,unacc
191 | 189,4,3,5,2,1,1,unacc
192 | 190,4,3,5,2,1,2,unacc
193 | 191,4,3,5,2,1,3,unacc
194 | 192,4,3,5,2,2,1,unacc
195 | 193,4,3,5,2,2,2,unacc
196 | 194,4,3,5,2,2,3,unacc
197 | 195,4,3,5,2,3,1,unacc
198 | 196,4,3,5,2,3,2,unacc
199 | 197,4,3,5,2,3,3,unacc
200 | 198,4,3,5,4,1,1,unacc
201 | 199,4,3,5,4,1,2,unacc
202 | 200,4,3,5,4,1,3,unacc
203 | 201,4,3,5,4,2,1,unacc
204 | 202,4,3,5,4,2,2,unacc
205 | 203,4,3,5,4,2,3,unacc
206 | 204,4,3,5,4,3,1,unacc
207 | 205,4,3,5,4,3,2,unacc
208 | 206,4,3,5,4,3,3,unacc
209 | 207,4,3,5,6,1,1,unacc
210 | 208,4,3,5,6,1,2,unacc
211 | 209,4,3,5,6,1,3,unacc
212 | 210,4,3,5,6,2,1,unacc
213 | 211,4,3,5,6,2,2,unacc
214 | 212,4,3,5,6,2,3,unacc
215 | 213,4,3,5,6,3,1,unacc
216 | 214,4,3,5,6,3,2,unacc
217 | 215,4,3,5,6,3,3,unacc
218 | 216,4,2,2,2,1,1,unacc
219 | 217,4,2,2,2,1,2,unacc
220 | 218,4,2,2,2,1,3,unacc
221 | 219,4,2,2,2,2,1,unacc
222 | 220,4,2,2,2,2,2,unacc
223 | 221,4,2,2,2,2,3,unacc
224 | 222,4,2,2,2,3,1,unacc
225 | 223,4,2,2,2,3,2,unacc
226 | 224,4,2,2,2,3,3,unacc
227 | 225,4,2,2,4,1,1,unacc
228 | 226,4,2,2,4,1,2,unacc
229 | 227,4,2,2,4,1,3,acc
230 | 228,4,2,2,4,2,1,unacc
231 | 229,4,2,2,4,2,2,unacc
232 | 230,4,2,2,4,2,3,acc
233 | 231,4,2,2,4,3,1,unacc
234 | 232,4,2,2,4,3,2,acc
235 | 233,4,2,2,4,3,3,acc
236 | 234,4,2,2,6,1,1,unacc
237 | 235,4,2,2,6,1,2,unacc
238 | 236,4,2,2,6,1,3,unacc
239 | 237,4,2,2,6,2,1,unacc
240 | 238,4,2,2,6,2,2,unacc
241 | 239,4,2,2,6,2,3,acc
242 | 240,4,2,2,6,3,1,unacc
243 | 241,4,2,2,6,3,2,acc
244 | 242,4,2,2,6,3,3,acc
245 | 243,4,2,3,2,1,1,unacc
246 | 244,4,2,3,2,1,2,unacc
247 | 245,4,2,3,2,1,3,unacc
248 | 246,4,2,3,2,2,1,unacc
249 | 247,4,2,3,2,2,2,unacc
250 | 248,4,2,3,2,2,3,unacc
251 | 249,4,2,3,2,3,1,unacc
252 | 250,4,2,3,2,3,2,unacc
253 | 251,4,2,3,2,3,3,unacc
254 | 252,4,2,3,4,1,1,unacc
255 | 253,4,2,3,4,1,2,unacc
256 | 254,4,2,3,4,1,3,acc
257 | 255,4,2,3,4,2,1,unacc
258 | 256,4,2,3,4,2,2,unacc
259 | 257,4,2,3,4,2,3,acc
260 | 258,4,2,3,4,3,1,unacc
261 | 259,4,2,3,4,3,2,acc
262 | 260,4,2,3,4,3,3,acc
263 | 261,4,2,3,6,1,1,unacc
264 | 262,4,2,3,6,1,2,unacc
265 | 263,4,2,3,6,1,3,acc
266 | 264,4,2,3,6,2,1,unacc
267 | 265,4,2,3,6,2,2,acc
268 | 266,4,2,3,6,2,3,acc
269 | 267,4,2,3,6,3,1,unacc
270 | 268,4,2,3,6,3,2,acc
271 | 269,4,2,3,6,3,3,acc
272 | 270,4,2,4,2,1,1,unacc
273 | 271,4,2,4,2,1,2,unacc
274 | 272,4,2,4,2,1,3,unacc
275 | 273,4,2,4,2,2,1,unacc
276 | 274,4,2,4,2,2,2,unacc
277 | 275,4,2,4,2,2,3,unacc
278 | 276,4,2,4,2,3,1,unacc
279 | 277,4,2,4,2,3,2,unacc
280 | 278,4,2,4,2,3,3,unacc
281 | 279,4,2,4,4,1,1,unacc
282 | 280,4,2,4,4,1,2,unacc
283 | 281,4,2,4,4,1,3,acc
284 | 282,4,2,4,4,2,1,unacc
285 | 283,4,2,4,4,2,2,acc
286 | 284,4,2,4,4,2,3,acc
287 | 285,4,2,4,4,3,1,unacc
288 | 286,4,2,4,4,3,2,acc
289 | 287,4,2,4,4,3,3,acc
290 | 288,4,2,4,6,1,1,unacc
291 | 289,4,2,4,6,1,2,unacc
292 | 290,4,2,4,6,1,3,acc
293 | 291,4,2,4,6,2,1,unacc
294 | 292,4,2,4,6,2,2,acc
295 | 293,4,2,4,6,2,3,acc
296 | 294,4,2,4,6,3,1,unacc
297 | 295,4,2,4,6,3,2,acc
298 | 296,4,2,4,6,3,3,acc
299 | 297,4,2,5,2,1,1,unacc
300 | 298,4,2,5,2,1,2,unacc
301 | 299,4,2,5,2,1,3,unacc
302 | 300,4,2,5,2,2,1,unacc
303 | 301,4,2,5,2,2,2,unacc
304 | 302,4,2,5,2,2,3,unacc
305 | 303,4,2,5,2,3,1,unacc
306 | 304,4,2,5,2,3,2,unacc
307 | 305,4,2,5,2,3,3,unacc
308 | 306,4,2,5,4,1,1,unacc
309 | 307,4,2,5,4,1,2,unacc
310 | 308,4,2,5,4,1,3,acc
311 | 309,4,2,5,4,2,1,unacc
312 | 310,4,2,5,4,2,2,acc
313 | 311,4,2,5,4,2,3,acc
314 | 312,4,2,5,4,3,1,unacc
315 | 313,4,2,5,4,3,2,acc
316 | 314,4,2,5,4,3,3,acc
317 | 315,4,2,5,6,1,1,unacc
318 | 316,4,2,5,6,1,2,unacc
319 | 317,4,2,5,6,1,3,acc
320 | 318,4,2,5,6,2,1,unacc
321 | 319,4,2,5,6,2,2,acc
322 | 320,4,2,5,6,2,3,acc
323 | 321,4,2,5,6,3,1,unacc
324 | 322,4,2,5,6,3,2,acc
325 | 323,4,2,5,6,3,3,acc
326 | 324,4,1,2,2,1,1,unacc
327 | 325,4,1,2,2,1,2,unacc
328 | 326,4,1,2,2,1,3,unacc
329 | 327,4,1,2,2,2,1,unacc
330 | 328,4,1,2,2,2,2,unacc
331 | 329,4,1,2,2,2,3,unacc
332 | 330,4,1,2,2,3,1,unacc
333 | 331,4,1,2,2,3,2,unacc
334 | 332,4,1,2,2,3,3,unacc
335 | 333,4,1,2,4,1,1,unacc
336 | 334,4,1,2,4,1,2,unacc
337 | 335,4,1,2,4,1,3,acc
338 | 336,4,1,2,4,2,1,unacc
339 | 337,4,1,2,4,2,2,unacc
340 | 338,4,1,2,4,2,3,acc
341 | 339,4,1,2,4,3,1,unacc
342 | 340,4,1,2,4,3,2,acc
343 | 341,4,1,2,4,3,3,acc
344 | 342,4,1,2,6,1,1,unacc
345 | 343,4,1,2,6,1,2,unacc
346 | 344,4,1,2,6,1,3,unacc
347 | 345,4,1,2,6,2,1,unacc
348 | 346,4,1,2,6,2,2,unacc
349 | 347,4,1,2,6,2,3,acc
350 | 348,4,1,2,6,3,1,unacc
351 | 349,4,1,2,6,3,2,acc
352 | 350,4,1,2,6,3,3,acc
353 | 351,4,1,3,2,1,1,unacc
354 | 352,4,1,3,2,1,2,unacc
355 | 353,4,1,3,2,1,3,unacc
356 | 354,4,1,3,2,2,1,unacc
357 | 355,4,1,3,2,2,2,unacc
358 | 356,4,1,3,2,2,3,unacc
359 | 357,4,1,3,2,3,1,unacc
360 | 358,4,1,3,2,3,2,unacc
361 | 359,4,1,3,2,3,3,unacc
362 | 360,4,1,3,4,1,1,unacc
363 | 361,4,1,3,4,1,2,unacc
364 | 362,4,1,3,4,1,3,acc
365 | 363,4,1,3,4,2,1,unacc
366 | 364,4,1,3,4,2,2,unacc
367 | 365,4,1,3,4,2,3,acc
368 | 366,4,1,3,4,3,1,unacc
369 | 367,4,1,3,4,3,2,acc
370 | 368,4,1,3,4,3,3,acc
371 | 369,4,1,3,6,1,1,unacc
372 | 370,4,1,3,6,1,2,unacc
373 | 371,4,1,3,6,1,3,acc
374 | 372,4,1,3,6,2,1,unacc
375 | 373,4,1,3,6,2,2,acc
376 | 374,4,1,3,6,2,3,acc
377 | 375,4,1,3,6,3,1,unacc
378 | 376,4,1,3,6,3,2,acc
379 | 377,4,1,3,6,3,3,acc
380 | 378,4,1,4,2,1,1,unacc
381 | 379,4,1,4,2,1,2,unacc
382 | 380,4,1,4,2,1,3,unacc
383 | 381,4,1,4,2,2,1,unacc
384 | 382,4,1,4,2,2,2,unacc
385 | 383,4,1,4,2,2,3,unacc
386 | 384,4,1,4,2,3,1,unacc
387 | 385,4,1,4,2,3,2,unacc
388 | 386,4,1,4,2,3,3,unacc
389 | 387,4,1,4,4,1,1,unacc
390 | 388,4,1,4,4,1,2,unacc
391 | 389,4,1,4,4,1,3,acc
392 | 390,4,1,4,4,2,1,unacc
393 | 391,4,1,4,4,2,2,acc
394 | 392,4,1,4,4,2,3,acc
395 | 393,4,1,4,4,3,1,unacc
396 | 394,4,1,4,4,3,2,acc
397 | 395,4,1,4,4,3,3,acc
398 | 396,4,1,4,6,1,1,unacc
399 | 397,4,1,4,6,1,2,unacc
400 | 398,4,1,4,6,1,3,acc
401 | 399,4,1,4,6,2,1,unacc
402 | 400,4,1,4,6,2,2,acc
403 | 401,4,1,4,6,2,3,acc
404 | 402,4,1,4,6,3,1,unacc
405 | 403,4,1,4,6,3,2,acc
406 | 404,4,1,4,6,3,3,acc
407 | 405,4,1,5,2,1,1,unacc
408 | 406,4,1,5,2,1,2,unacc
409 | 407,4,1,5,2,1,3,unacc
410 | 408,4,1,5,2,2,1,unacc
411 | 409,4,1,5,2,2,2,unacc
412 | 410,4,1,5,2,2,3,unacc
413 | 411,4,1,5,2,3,1,unacc
414 | 412,4,1,5,2,3,2,unacc
415 | 413,4,1,5,2,3,3,unacc
416 | 414,4,1,5,4,1,1,unacc
417 | 415,4,1,5,4,1,2,unacc
418 | 416,4,1,5,4,1,3,acc
419 | 417,4,1,5,4,2,1,unacc
420 | 418,4,1,5,4,2,2,acc
421 | 419,4,1,5,4,2,3,acc
422 | 420,4,1,5,4,3,1,unacc
423 | 421,4,1,5,4,3,2,acc
424 | 422,4,1,5,4,3,3,acc
425 | 423,4,1,5,6,1,1,unacc
426 | 424,4,1,5,6,1,2,unacc
427 | 425,4,1,5,6,1,3,acc
428 | 426,4,1,5,6,2,1,unacc
429 | 427,4,1,5,6,2,2,acc
430 | 428,4,1,5,6,2,3,acc
431 | 429,4,1,5,6,3,1,unacc
432 | 430,4,1,5,6,3,2,acc
433 | 431,4,1,5,6,3,3,acc
434 | 432,3,4,2,2,1,1,unacc
435 | 433,3,4,2,2,1,2,unacc
436 | 434,3,4,2,2,1,3,unacc
437 | 435,3,4,2,2,2,1,unacc
438 | 436,3,4,2,2,2,2,unacc
439 | 437,3,4,2,2,2,3,unacc
440 | 438,3,4,2,2,3,1,unacc
441 | 439,3,4,2,2,3,2,unacc
442 | 440,3,4,2,2,3,3,unacc
443 | 441,3,4,2,4,1,1,unacc
444 | 442,3,4,2,4,1,2,unacc
445 | 443,3,4,2,4,1,3,unacc
446 | 444,3,4,2,4,2,1,unacc
447 | 445,3,4,2,4,2,2,unacc
448 | 446,3,4,2,4,2,3,unacc
449 | 447,3,4,2,4,3,1,unacc
450 | 448,3,4,2,4,3,2,unacc
451 | 449,3,4,2,4,3,3,unacc
452 | 450,3,4,2,6,1,1,unacc
453 | 451,3,4,2,6,1,2,unacc
454 | 452,3,4,2,6,1,3,unacc
455 | 453,3,4,2,6,2,1,unacc
456 | 454,3,4,2,6,2,2,unacc
457 | 455,3,4,2,6,2,3,unacc
458 | 456,3,4,2,6,3,1,unacc
459 | 457,3,4,2,6,3,2,unacc
460 | 458,3,4,2,6,3,3,unacc
461 | 459,3,4,3,2,1,1,unacc
462 | 460,3,4,3,2,1,2,unacc
463 | 461,3,4,3,2,1,3,unacc
464 | 462,3,4,3,2,2,1,unacc
465 | 463,3,4,3,2,2,2,unacc
466 | 464,3,4,3,2,2,3,unacc
467 | 465,3,4,3,2,3,1,unacc
468 | 466,3,4,3,2,3,2,unacc
469 | 467,3,4,3,2,3,3,unacc
470 | 468,3,4,3,4,1,1,unacc
471 | 469,3,4,3,4,1,2,unacc
472 | 470,3,4,3,4,1,3,unacc
473 | 471,3,4,3,4,2,1,unacc
474 | 472,3,4,3,4,2,2,unacc
475 | 473,3,4,3,4,2,3,unacc
476 | 474,3,4,3,4,3,1,unacc
477 | 475,3,4,3,4,3,2,unacc
478 | 476,3,4,3,4,3,3,unacc
479 | 477,3,4,3,6,1,1,unacc
480 | 478,3,4,3,6,1,2,unacc
481 | 479,3,4,3,6,1,3,unacc
482 | 480,3,4,3,6,2,1,unacc
483 | 481,3,4,3,6,2,2,unacc
484 | 482,3,4,3,6,2,3,unacc
485 | 483,3,4,3,6,3,1,unacc
486 | 484,3,4,3,6,3,2,unacc
487 | 485,3,4,3,6,3,3,unacc
488 | 486,3,4,4,2,1,1,unacc
489 | 487,3,4,4,2,1,2,unacc
490 | 488,3,4,4,2,1,3,unacc
491 | 489,3,4,4,2,2,1,unacc
492 | 490,3,4,4,2,2,2,unacc
493 | 491,3,4,4,2,2,3,unacc
494 | 492,3,4,4,2,3,1,unacc
495 | 493,3,4,4,2,3,2,unacc
496 | 494,3,4,4,2,3,3,unacc
497 | 495,3,4,4,4,1,1,unacc
498 | 496,3,4,4,4,1,2,unacc
499 | 497,3,4,4,4,1,3,unacc
500 | 498,3,4,4,4,2,1,unacc
501 | 499,3,4,4,4,2,2,unacc
502 | 500,3,4,4,4,2,3,unacc
503 | 501,3,4,4,4,3,1,unacc
504 | 502,3,4,4,4,3,2,unacc
505 | 503,3,4,4,4,3,3,unacc
506 | 504,3,4,4,6,1,1,unacc
507 | 505,3,4,4,6,1,2,unacc
508 | 506,3,4,4,6,1,3,unacc
509 | 507,3,4,4,6,2,1,unacc
510 | 508,3,4,4,6,2,2,unacc
511 | 509,3,4,4,6,2,3,unacc
512 | 510,3,4,4,6,3,1,unacc
513 | 511,3,4,4,6,3,2,unacc
514 | 512,3,4,4,6,3,3,unacc
515 | 513,3,4,5,2,1,1,unacc
516 | 514,3,4,5,2,1,2,unacc
517 | 515,3,4,5,2,1,3,unacc
518 | 516,3,4,5,2,2,1,unacc
519 | 517,3,4,5,2,2,2,unacc
520 | 518,3,4,5,2,2,3,unacc
521 | 519,3,4,5,2,3,1,unacc
522 | 520,3,4,5,2,3,2,unacc
523 | 521,3,4,5,2,3,3,unacc
524 | 522,3,4,5,4,1,1,unacc
525 | 523,3,4,5,4,1,2,unacc
526 | 524,3,4,5,4,1,3,unacc
527 | 525,3,4,5,4,2,1,unacc
528 | 526,3,4,5,4,2,2,unacc
529 | 527,3,4,5,4,2,3,unacc
530 | 528,3,4,5,4,3,1,unacc
531 | 529,3,4,5,4,3,2,unacc
532 | 530,3,4,5,4,3,3,unacc
533 | 531,3,4,5,6,1,1,unacc
534 | 532,3,4,5,6,1,2,unacc
535 | 533,3,4,5,6,1,3,unacc
536 | 534,3,4,5,6,2,1,unacc
537 | 535,3,4,5,6,2,2,unacc
538 | 536,3,4,5,6,2,3,unacc
539 | 537,3,4,5,6,3,1,unacc
540 | 538,3,4,5,6,3,2,unacc
541 | 539,3,4,5,6,3,3,unacc
542 | 540,3,3,2,2,1,1,unacc
543 | 541,3,3,2,2,1,2,unacc
544 | 542,3,3,2,2,1,3,unacc
545 | 543,3,3,2,2,2,1,unacc
546 | 544,3,3,2,2,2,2,unacc
547 | 545,3,3,2,2,2,3,unacc
548 | 546,3,3,2,2,3,1,unacc
549 | 547,3,3,2,2,3,2,unacc
550 | 548,3,3,2,2,3,3,unacc
551 | 549,3,3,2,4,1,1,unacc
552 | 550,3,3,2,4,1,2,unacc
553 | 551,3,3,2,4,1,3,acc
554 | 552,3,3,2,4,2,1,unacc
555 | 553,3,3,2,4,2,2,unacc
556 | 554,3,3,2,4,2,3,acc
557 | 555,3,3,2,4,3,1,unacc
558 | 556,3,3,2,4,3,2,acc
559 | 557,3,3,2,4,3,3,acc
560 | 558,3,3,2,6,1,1,unacc
561 | 559,3,3,2,6,1,2,unacc
562 | 560,3,3,2,6,1,3,unacc
563 | 561,3,3,2,6,2,1,unacc
564 | 562,3,3,2,6,2,2,unacc
565 | 563,3,3,2,6,2,3,acc
566 | 564,3,3,2,6,3,1,unacc
567 | 565,3,3,2,6,3,2,acc
568 | 566,3,3,2,6,3,3,acc
569 | 567,3,3,3,2,1,1,unacc
570 | 568,3,3,3,2,1,2,unacc
571 | 569,3,3,3,2,1,3,unacc
572 | 570,3,3,3,2,2,1,unacc
573 | 571,3,3,3,2,2,2,unacc
574 | 572,3,3,3,2,2,3,unacc
575 | 573,3,3,3,2,3,1,unacc
576 | 574,3,3,3,2,3,2,unacc
577 | 575,3,3,3,2,3,3,unacc
578 | 576,3,3,3,4,1,1,unacc
579 | 577,3,3,3,4,1,2,unacc
580 | 578,3,3,3,4,1,3,acc
581 | 579,3,3,3,4,2,1,unacc
582 | 580,3,3,3,4,2,2,unacc
583 | 581,3,3,3,4,2,3,acc
584 | 582,3,3,3,4,3,1,unacc
585 | 583,3,3,3,4,3,2,acc
586 | 584,3,3,3,4,3,3,acc
587 | 585,3,3,3,6,1,1,unacc
588 | 586,3,3,3,6,1,2,unacc
589 | 587,3,3,3,6,1,3,acc
590 | 588,3,3,3,6,2,1,unacc
591 | 589,3,3,3,6,2,2,acc
592 | 590,3,3,3,6,2,3,acc
593 | 591,3,3,3,6,3,1,unacc
594 | 592,3,3,3,6,3,2,acc
595 | 593,3,3,3,6,3,3,acc
596 | 594,3,3,4,2,1,1,unacc
597 | 595,3,3,4,2,1,2,unacc
598 | 596,3,3,4,2,1,3,unacc
599 | 597,3,3,4,2,2,1,unacc
600 | 598,3,3,4,2,2,2,unacc
601 | 599,3,3,4,2,2,3,unacc
602 | 600,3,3,4,2,3,1,unacc
603 | 601,3,3,4,2,3,2,unacc
604 | 602,3,3,4,2,3,3,unacc
605 | 603,3,3,4,4,1,1,unacc
606 | 604,3,3,4,4,1,2,unacc
607 | 605,3,3,4,4,1,3,acc
608 | 606,3,3,4,4,2,1,unacc
609 | 607,3,3,4,4,2,2,acc
610 | 608,3,3,4,4,2,3,acc
611 | 609,3,3,4,4,3,1,unacc
612 | 610,3,3,4,4,3,2,acc
613 | 611,3,3,4,4,3,3,acc
614 | 612,3,3,4,6,1,1,unacc
615 | 613,3,3,4,6,1,2,unacc
616 | 614,3,3,4,6,1,3,acc
617 | 615,3,3,4,6,2,1,unacc
618 | 616,3,3,4,6,2,2,acc
619 | 617,3,3,4,6,2,3,acc
620 | 618,3,3,4,6,3,1,unacc
621 | 619,3,3,4,6,3,2,acc
622 | 620,3,3,4,6,3,3,acc
623 | 621,3,3,5,2,1,1,unacc
624 | 622,3,3,5,2,1,2,unacc
625 | 623,3,3,5,2,1,3,unacc
626 | 624,3,3,5,2,2,1,unacc
627 | 625,3,3,5,2,2,2,unacc
628 | 626,3,3,5,2,2,3,unacc
629 | 627,3,3,5,2,3,1,unacc
630 | 628,3,3,5,2,3,2,unacc
631 | 629,3,3,5,2,3,3,unacc
632 | 630,3,3,5,4,1,1,unacc
633 | 631,3,3,5,4,1,2,unacc
634 | 632,3,3,5,4,1,3,acc
635 | 633,3,3,5,4,2,1,unacc
636 | 634,3,3,5,4,2,2,acc
637 | 635,3,3,5,4,2,3,acc
638 | 636,3,3,5,4,3,1,unacc
639 | 637,3,3,5,4,3,2,acc
640 | 638,3,3,5,4,3,3,acc
641 | 639,3,3,5,6,1,1,unacc
642 | 640,3,3,5,6,1,2,unacc
643 | 641,3,3,5,6,1,3,acc
644 | 642,3,3,5,6,2,1,unacc
645 | 643,3,3,5,6,2,2,acc
646 | 644,3,3,5,6,2,3,acc
647 | 645,3,3,5,6,3,1,unacc
648 | 646,3,3,5,6,3,2,acc
649 | 647,3,3,5,6,3,3,acc
650 | 648,3,2,2,2,1,1,unacc
651 | 649,3,2,2,2,1,2,unacc
652 | 650,3,2,2,2,1,3,unacc
653 | 651,3,2,2,2,2,1,unacc
654 | 652,3,2,2,2,2,2,unacc
655 | 653,3,2,2,2,2,3,unacc
656 | 654,3,2,2,2,3,1,unacc
657 | 655,3,2,2,2,3,2,unacc
658 | 656,3,2,2,2,3,3,unacc
659 | 657,3,2,2,4,1,1,unacc
660 | 658,3,2,2,4,1,2,unacc
661 | 659,3,2,2,4,1,3,acc
662 | 660,3,2,2,4,2,1,unacc
663 | 661,3,2,2,4,2,2,unacc
664 | 662,3,2,2,4,2,3,acc
665 | 663,3,2,2,4,3,1,unacc
666 | 664,3,2,2,4,3,2,acc
667 | 665,3,2,2,4,3,3,acc
668 | 666,3,2,2,6,1,1,unacc
669 | 667,3,2,2,6,1,2,unacc
670 | 668,3,2,2,6,1,3,unacc
671 | 669,3,2,2,6,2,1,unacc
672 | 670,3,2,2,6,2,2,unacc
673 | 671,3,2,2,6,2,3,acc
674 | 672,3,2,2,6,3,1,unacc
675 | 673,3,2,2,6,3,2,acc
676 | 674,3,2,2,6,3,3,acc
677 | 675,3,2,3,2,1,1,unacc
678 | 676,3,2,3,2,1,2,unacc
679 | 677,3,2,3,2,1,3,unacc
680 | 678,3,2,3,2,2,1,unacc
681 | 679,3,2,3,2,2,2,unacc
682 | 680,3,2,3,2,2,3,unacc
683 | 681,3,2,3,2,3,1,unacc
684 | 682,3,2,3,2,3,2,unacc
685 | 683,3,2,3,2,3,3,unacc
686 | 684,3,2,3,4,1,1,unacc
687 | 685,3,2,3,4,1,2,unacc
688 | 686,3,2,3,4,1,3,acc
689 | 687,3,2,3,4,2,1,unacc
690 | 688,3,2,3,4,2,2,unacc
691 | 689,3,2,3,4,2,3,acc
692 | 690,3,2,3,4,3,1,unacc
693 | 691,3,2,3,4,3,2,acc
694 | 692,3,2,3,4,3,3,acc
695 | 693,3,2,3,6,1,1,unacc
696 | 694,3,2,3,6,1,2,unacc
697 | 695,3,2,3,6,1,3,acc
698 | 696,3,2,3,6,2,1,unacc
699 | 697,3,2,3,6,2,2,acc
700 | 698,3,2,3,6,2,3,acc
701 | 699,3,2,3,6,3,1,unacc
702 | 700,3,2,3,6,3,2,acc
703 | 701,3,2,3,6,3,3,acc
704 | 702,3,2,4,2,1,1,unacc
705 | 703,3,2,4,2,1,2,unacc
706 | 704,3,2,4,2,1,3,unacc
707 | 705,3,2,4,2,2,1,unacc
708 | 706,3,2,4,2,2,2,unacc
709 | 707,3,2,4,2,2,3,unacc
710 | 708,3,2,4,2,3,1,unacc
711 | 709,3,2,4,2,3,2,unacc
712 | 710,3,2,4,2,3,3,unacc
713 | 711,3,2,4,4,1,1,unacc
714 | 712,3,2,4,4,1,2,unacc
715 | 713,3,2,4,4,1,3,acc
716 | 714,3,2,4,4,2,1,unacc
717 | 715,3,2,4,4,2,2,acc
718 | 716,3,2,4,4,2,3,acc
719 | 717,3,2,4,4,3,1,unacc
720 | 718,3,2,4,4,3,2,acc
721 | 719,3,2,4,4,3,3,acc
722 | 720,3,2,4,6,1,1,unacc
723 | 721,3,2,4,6,1,2,unacc
724 | 722,3,2,4,6,1,3,acc
725 | 723,3,2,4,6,2,1,unacc
726 | 724,3,2,4,6,2,2,acc
727 | 725,3,2,4,6,2,3,acc
728 | 726,3,2,4,6,3,1,unacc
729 | 727,3,2,4,6,3,2,acc
730 | 728,3,2,4,6,3,3,acc
731 | 729,3,2,5,2,1,1,unacc
732 | 730,3,2,5,2,1,2,unacc
733 | 731,3,2,5,2,1,3,unacc
734 | 732,3,2,5,2,2,1,unacc
735 | 733,3,2,5,2,2,2,unacc
736 | 734,3,2,5,2,2,3,unacc
737 | 735,3,2,5,2,3,1,unacc
738 | 736,3,2,5,2,3,2,unacc
739 | 737,3,2,5,2,3,3,unacc
740 | 738,3,2,5,4,1,1,unacc
741 | 739,3,2,5,4,1,2,unacc
742 | 740,3,2,5,4,1,3,acc
743 | 741,3,2,5,4,2,1,unacc
744 | 742,3,2,5,4,2,2,acc
745 | 743,3,2,5,4,2,3,acc
746 | 744,3,2,5,4,3,1,unacc
747 | 745,3,2,5,4,3,2,acc
748 | 746,3,2,5,4,3,3,acc
749 | 747,3,2,5,6,1,1,unacc
750 | 748,3,2,5,6,1,2,unacc
751 | 749,3,2,5,6,1,3,acc
752 | 750,3,2,5,6,2,1,unacc
753 | 751,3,2,5,6,2,2,acc
754 | 752,3,2,5,6,2,3,acc
755 | 753,3,2,5,6,3,1,unacc
756 | 754,3,2,5,6,3,2,acc
757 | 755,3,2,5,6,3,3,acc
758 | 756,3,1,2,2,1,1,unacc
759 | 757,3,1,2,2,1,2,unacc
760 | 758,3,1,2,2,1,3,unacc
761 | 759,3,1,2,2,2,1,unacc
762 | 760,3,1,2,2,2,2,unacc
763 | 761,3,1,2,2,2,3,unacc
764 | 762,3,1,2,2,3,1,unacc
765 | 763,3,1,2,2,3,2,unacc
766 | 764,3,1,2,2,3,3,unacc
767 | 765,3,1,2,4,1,1,unacc
768 | 766,3,1,2,4,1,2,unacc
769 | 767,3,1,2,4,1,3,acc
770 | 768,3,1,2,4,2,1,unacc
771 | 769,3,1,2,4,2,2,unacc
772 | 770,3,1,2,4,2,3,acc
773 | 771,3,1,2,4,3,1,unacc
774 | 772,3,1,2,4,3,2,acc
775 | 773,3,1,2,4,3,3,acc
776 | 774,3,1,2,6,1,1,unacc
777 | 775,3,1,2,6,1,2,unacc
778 | 776,3,1,2,6,1,3,unacc
779 | 777,3,1,2,6,2,1,unacc
780 | 778,3,1,2,6,2,2,unacc
781 | 779,3,1,2,6,2,3,acc
782 | 780,3,1,2,6,3,1,unacc
783 | 781,3,1,2,6,3,2,acc
784 | 782,3,1,2,6,3,3,acc
785 | 783,3,1,3,2,1,1,unacc
786 | 784,3,1,3,2,1,2,unacc
787 | 785,3,1,3,2,1,3,unacc
788 | 786,3,1,3,2,2,1,unacc
789 | 787,3,1,3,2,2,2,unacc
790 | 788,3,1,3,2,2,3,unacc
791 | 789,3,1,3,2,3,1,unacc
792 | 790,3,1,3,2,3,2,unacc
793 | 791,3,1,3,2,3,3,unacc
794 | 792,3,1,3,4,1,1,unacc
795 | 793,3,1,3,4,1,2,unacc
796 | 794,3,1,3,4,1,3,acc
797 | 795,3,1,3,4,2,1,unacc
798 | 796,3,1,3,4,2,2,unacc
799 | 797,3,1,3,4,2,3,acc
800 | 798,3,1,3,4,3,1,unacc
801 | 799,3,1,3,4,3,2,acc
802 | 800,3,1,3,4,3,3,acc
803 | 801,3,1,3,6,1,1,unacc
804 | 802,3,1,3,6,1,2,unacc
805 | 803,3,1,3,6,1,3,acc
806 | 804,3,1,3,6,2,1,unacc
807 | 805,3,1,3,6,2,2,acc
808 | 806,3,1,3,6,2,3,acc
809 | 807,3,1,3,6,3,1,unacc
810 | 808,3,1,3,6,3,2,acc
811 | 809,3,1,3,6,3,3,acc
812 | 810,3,1,4,2,1,1,unacc
813 | 811,3,1,4,2,1,2,unacc
814 | 812,3,1,4,2,1,3,unacc
815 | 813,3,1,4,2,2,1,unacc
816 | 814,3,1,4,2,2,2,unacc
817 | 815,3,1,4,2,2,3,unacc
818 | 816,3,1,4,2,3,1,unacc
819 | 817,3,1,4,2,3,2,unacc
820 | 818,3,1,4,2,3,3,unacc
821 | 819,3,1,4,4,1,1,unacc
822 | 820,3,1,4,4,1,2,unacc
823 | 821,3,1,4,4,1,3,acc
824 | 822,3,1,4,4,2,1,unacc
825 | 823,3,1,4,4,2,2,acc
826 | 824,3,1,4,4,2,3,acc
827 | 825,3,1,4,4,3,1,unacc
828 | 826,3,1,4,4,3,2,acc
829 | 827,3,1,4,4,3,3,acc
830 | 828,3,1,4,6,1,1,unacc
831 | 829,3,1,4,6,1,2,unacc
832 | 830,3,1,4,6,1,3,acc
833 | 831,3,1,4,6,2,1,unacc
834 | 832,3,1,4,6,2,2,acc
835 | 833,3,1,4,6,2,3,acc
836 | 834,3,1,4,6,3,1,unacc
837 | 835,3,1,4,6,3,2,acc
838 | 836,3,1,4,6,3,3,acc
839 | 837,3,1,5,2,1,1,unacc
840 | 838,3,1,5,2,1,2,unacc
841 | 839,3,1,5,2,1,3,unacc
842 | 840,3,1,5,2,2,1,unacc
843 | 841,3,1,5,2,2,2,unacc
844 | 842,3,1,5,2,2,3,unacc
845 | 843,3,1,5,2,3,1,unacc
846 | 844,3,1,5,2,3,2,unacc
847 | 845,3,1,5,2,3,3,unacc
848 | 846,3,1,5,4,1,1,unacc
849 | 847,3,1,5,4,1,2,unacc
850 | 848,3,1,5,4,1,3,acc
851 | 849,3,1,5,4,2,1,unacc
852 | 850,3,1,5,4,2,2,acc
853 | 851,3,1,5,4,2,3,acc
854 | 852,3,1,5,4,3,1,unacc
855 | 853,3,1,5,4,3,2,acc
856 | 854,3,1,5,4,3,3,acc
857 | 855,3,1,5,6,1,1,unacc
858 | 856,3,1,5,6,1,2,unacc
859 | 857,3,1,5,6,1,3,acc
860 | 858,3,1,5,6,2,1,unacc
861 | 859,3,1,5,6,2,2,acc
862 | 860,3,1,5,6,2,3,acc
863 | 861,3,1,5,6,3,1,unacc
864 | 862,3,1,5,6,3,2,acc
865 | 863,3,1,5,6,3,3,acc
866 | 864,2,4,2,2,1,1,unacc
867 | 865,2,4,2,2,1,2,unacc
868 | 866,2,4,2,2,1,3,unacc
869 | 867,2,4,2,2,2,1,unacc
870 | 868,2,4,2,2,2,2,unacc
871 | 869,2,4,2,2,2,3,unacc
872 | 870,2,4,2,2,3,1,unacc
873 | 871,2,4,2,2,3,2,unacc
874 | 872,2,4,2,2,3,3,unacc
875 | 873,2,4,2,4,1,1,unacc
876 | 874,2,4,2,4,1,2,unacc
877 | 875,2,4,2,4,1,3,acc
878 | 876,2,4,2,4,2,1,unacc
879 | 877,2,4,2,4,2,2,unacc
880 | 878,2,4,2,4,2,3,acc
881 | 879,2,4,2,4,3,1,unacc
882 | 880,2,4,2,4,3,2,acc
883 | 881,2,4,2,4,3,3,acc
884 | 882,2,4,2,6,1,1,unacc
885 | 883,2,4,2,6,1,2,unacc
886 | 884,2,4,2,6,1,3,unacc
887 | 885,2,4,2,6,2,1,unacc
888 | 886,2,4,2,6,2,2,unacc
889 | 887,2,4,2,6,2,3,acc
890 | 888,2,4,2,6,3,1,unacc
891 | 889,2,4,2,6,3,2,acc
892 | 890,2,4,2,6,3,3,acc
893 | 891,2,4,3,2,1,1,unacc
894 | 892,2,4,3,2,1,2,unacc
895 | 893,2,4,3,2,1,3,unacc
896 | 894,2,4,3,2,2,1,unacc
897 | 895,2,4,3,2,2,2,unacc
898 | 896,2,4,3,2,2,3,unacc
899 | 897,2,4,3,2,3,1,unacc
900 | 898,2,4,3,2,3,2,unacc
901 | 899,2,4,3,2,3,3,unacc
902 | 900,2,4,3,4,1,1,unacc
903 | 901,2,4,3,4,1,2,unacc
904 | 902,2,4,3,4,1,3,acc
905 | 903,2,4,3,4,2,1,unacc
906 | 904,2,4,3,4,2,2,unacc
907 | 905,2,4,3,4,2,3,acc
908 | 906,2,4,3,4,3,1,unacc
909 | 907,2,4,3,4,3,2,acc
910 | 908,2,4,3,4,3,3,acc
911 | 909,2,4,3,6,1,1,unacc
912 | 910,2,4,3,6,1,2,unacc
913 | 911,2,4,3,6,1,3,acc
914 | 912,2,4,3,6,2,1,unacc
915 | 913,2,4,3,6,2,2,acc
916 | 914,2,4,3,6,2,3,acc
917 | 915,2,4,3,6,3,1,unacc
918 | 916,2,4,3,6,3,2,acc
919 | 917,2,4,3,6,3,3,acc
920 | 918,2,4,4,2,1,1,unacc
921 | 919,2,4,4,2,1,2,unacc
922 | 920,2,4,4,2,1,3,unacc
923 | 921,2,4,4,2,2,1,unacc
924 | 922,2,4,4,2,2,2,unacc
925 | 923,2,4,4,2,2,3,unacc
926 | 924,2,4,4,2,3,1,unacc
927 | 925,2,4,4,2,3,2,unacc
928 | 926,2,4,4,2,3,3,unacc
929 | 927,2,4,4,4,1,1,unacc
930 | 928,2,4,4,4,1,2,unacc
931 | 929,2,4,4,4,1,3,acc
932 | 930,2,4,4,4,2,1,unacc
933 | 931,2,4,4,4,2,2,acc
934 | 932,2,4,4,4,2,3,acc
935 | 933,2,4,4,4,3,1,unacc
936 | 934,2,4,4,4,3,2,acc
937 | 935,2,4,4,4,3,3,acc
938 | 936,2,4,4,6,1,1,unacc
939 | 937,2,4,4,6,1,2,unacc
940 | 938,2,4,4,6,1,3,acc
941 | 939,2,4,4,6,2,1,unacc
942 | 940,2,4,4,6,2,2,acc
943 | 941,2,4,4,6,2,3,acc
944 | 942,2,4,4,6,3,1,unacc
945 | 943,2,4,4,6,3,2,acc
946 | 944,2,4,4,6,3,3,acc
947 | 945,2,4,5,2,1,1,unacc
948 | 946,2,4,5,2,1,2,unacc
949 | 947,2,4,5,2,1,3,unacc
950 | 948,2,4,5,2,2,1,unacc
951 | 949,2,4,5,2,2,2,unacc
952 | 950,2,4,5,2,2,3,unacc
953 | 951,2,4,5,2,3,1,unacc
954 | 952,2,4,5,2,3,2,unacc
955 | 953,2,4,5,2,3,3,unacc
956 | 954,2,4,5,4,1,1,unacc
957 | 955,2,4,5,4,1,2,unacc
958 | 956,2,4,5,4,1,3,acc
959 | 957,2,4,5,4,2,1,unacc
960 | 958,2,4,5,4,2,2,acc
961 | 959,2,4,5,4,2,3,acc
962 | 960,2,4,5,4,3,1,unacc
963 | 961,2,4,5,4,3,2,acc
964 | 962,2,4,5,4,3,3,acc
965 | 963,2,4,5,6,1,1,unacc
966 | 964,2,4,5,6,1,2,unacc
967 | 965,2,4,5,6,1,3,acc
968 | 966,2,4,5,6,2,1,unacc
969 | 967,2,4,5,6,2,2,acc
970 | 968,2,4,5,6,2,3,acc
971 | 969,2,4,5,6,3,1,unacc
972 | 970,2,4,5,6,3,2,acc
973 | 971,2,4,5,6,3,3,acc
974 | 972,2,3,2,2,1,1,unacc
975 | 973,2,3,2,2,1,2,unacc
976 | 974,2,3,2,2,1,3,unacc
977 | 975,2,3,2,2,2,1,unacc
978 | 976,2,3,2,2,2,2,unacc
979 | 977,2,3,2,2,2,3,unacc
980 | 978,2,3,2,2,3,1,unacc
981 | 979,2,3,2,2,3,2,unacc
982 | 980,2,3,2,2,3,3,unacc
983 | 981,2,3,2,4,1,1,unacc
984 | 982,2,3,2,4,1,2,unacc
985 | 983,2,3,2,4,1,3,acc
986 | 984,2,3,2,4,2,1,unacc
987 | 985,2,3,2,4,2,2,unacc
988 | 986,2,3,2,4,2,3,acc
989 | 987,2,3,2,4,3,1,unacc
990 | 988,2,3,2,4,3,2,acc
991 | 989,2,3,2,4,3,3,acc
992 | 990,2,3,2,6,1,1,unacc
993 | 991,2,3,2,6,1,2,unacc
994 | 992,2,3,2,6,1,3,unacc
995 | 993,2,3,2,6,2,1,unacc
996 | 994,2,3,2,6,2,2,unacc
997 | 995,2,3,2,6,2,3,acc
998 | 996,2,3,2,6,3,1,unacc
999 | 997,2,3,2,6,3,2,acc
1000 | 998,2,3,2,6,3,3,acc
1001 | 999,2,3,3,2,1,1,unacc
1002 | 1000,2,3,3,2,1,2,unacc
1003 | 1001,2,3,3,2,1,3,unacc
1004 | 1002,2,3,3,2,2,1,unacc
1005 | 1003,2,3,3,2,2,2,unacc
1006 | 1004,2,3,3,2,2,3,unacc
1007 | 1005,2,3,3,2,3,1,unacc
1008 | 1006,2,3,3,2,3,2,unacc
1009 | 1007,2,3,3,2,3,3,unacc
1010 | 1008,2,3,3,4,1,1,unacc
1011 | 1009,2,3,3,4,1,2,unacc
1012 | 1010,2,3,3,4,1,3,acc
1013 | 1011,2,3,3,4,2,1,unacc
1014 | 1012,2,3,3,4,2,2,unacc
1015 | 1013,2,3,3,4,2,3,acc
1016 | 1014,2,3,3,4,3,1,unacc
1017 | 1015,2,3,3,4,3,2,acc
1018 | 1016,2,3,3,4,3,3,acc
1019 | 1017,2,3,3,6,1,1,unacc
1020 | 1018,2,3,3,6,1,2,unacc
1021 | 1019,2,3,3,6,1,3,acc
1022 | 1020,2,3,3,6,2,1,unacc
1023 | 1021,2,3,3,6,2,2,acc
1024 | 1022,2,3,3,6,2,3,acc
1025 | 1023,2,3,3,6,3,1,unacc
1026 | 1024,2,3,3,6,3,2,acc
1027 | 1025,2,3,3,6,3,3,acc
1028 | 1026,2,3,4,2,1,1,unacc
1029 | 1027,2,3,4,2,1,2,unacc
1030 | 1028,2,3,4,2,1,3,unacc
1031 | 1029,2,3,4,2,2,1,unacc
1032 | 1030,2,3,4,2,2,2,unacc
1033 | 1031,2,3,4,2,2,3,unacc
1034 | 1032,2,3,4,2,3,1,unacc
1035 | 1033,2,3,4,2,3,2,unacc
1036 | 1034,2,3,4,2,3,3,unacc
1037 | 1035,2,3,4,4,1,1,unacc
1038 | 1036,2,3,4,4,1,2,unacc
1039 | 1037,2,3,4,4,1,3,acc
1040 | 1038,2,3,4,4,2,1,unacc
1041 | 1039,2,3,4,4,2,2,acc
1042 | 1040,2,3,4,4,2,3,acc
1043 | 1041,2,3,4,4,3,1,unacc
1044 | 1042,2,3,4,4,3,2,acc
1045 | 1043,2,3,4,4,3,3,acc
1046 | 1044,2,3,4,6,1,1,unacc
1047 | 1045,2,3,4,6,1,2,unacc
1048 | 1046,2,3,4,6,1,3,acc
1049 | 1047,2,3,4,6,2,1,unacc
1050 | 1048,2,3,4,6,2,2,acc
1051 | 1049,2,3,4,6,2,3,acc
1052 | 1050,2,3,4,6,3,1,unacc
1053 | 1051,2,3,4,6,3,2,acc
1054 | 1052,2,3,4,6,3,3,acc
1055 | 1053,2,3,5,2,1,1,unacc
1056 | 1054,2,3,5,2,1,2,unacc
1057 | 1055,2,3,5,2,1,3,unacc
1058 | 1056,2,3,5,2,2,1,unacc
1059 | 1057,2,3,5,2,2,2,unacc
1060 | 1058,2,3,5,2,2,3,unacc
1061 | 1059,2,3,5,2,3,1,unacc
1062 | 1060,2,3,5,2,3,2,unacc
1063 | 1061,2,3,5,2,3,3,unacc
1064 | 1062,2,3,5,4,1,1,unacc
1065 | 1063,2,3,5,4,1,2,unacc
1066 | 1064,2,3,5,4,1,3,acc
1067 | 1065,2,3,5,4,2,1,unacc
1068 | 1066,2,3,5,4,2,2,acc
1069 | 1067,2,3,5,4,2,3,acc
1070 | 1068,2,3,5,4,3,1,unacc
1071 | 1069,2,3,5,4,3,2,acc
1072 | 1070,2,3,5,4,3,3,acc
1073 | 1071,2,3,5,6,1,1,unacc
1074 | 1072,2,3,5,6,1,2,unacc
1075 | 1073,2,3,5,6,1,3,acc
1076 | 1074,2,3,5,6,2,1,unacc
1077 | 1075,2,3,5,6,2,2,acc
1078 | 1076,2,3,5,6,2,3,acc
1079 | 1077,2,3,5,6,3,1,unacc
1080 | 1078,2,3,5,6,3,2,acc
1081 | 1079,2,3,5,6,3,3,acc
1082 | 1080,2,2,2,2,1,1,unacc
1083 | 1081,2,2,2,2,1,2,unacc
1084 | 1082,2,2,2,2,1,3,unacc
1085 | 1083,2,2,2,2,2,1,unacc
1086 | 1084,2,2,2,2,2,2,unacc
1087 | 1085,2,2,2,2,2,3,unacc
1088 | 1086,2,2,2,2,3,1,unacc
1089 | 1087,2,2,2,2,3,2,unacc
1090 | 1088,2,2,2,2,3,3,unacc
1091 | 1089,2,2,2,4,1,1,unacc
1092 | 1090,2,2,2,4,1,2,acc
1093 | 1091,2,2,2,4,1,3,acc
1094 | 1092,2,2,2,4,2,1,unacc
1095 | 1093,2,2,2,4,2,2,acc
1096 | 1094,2,2,2,4,2,3,acc
1097 | 1095,2,2,2,4,3,1,unacc
1098 | 1096,2,2,2,4,3,2,acc
1099 | 1097,2,2,2,4,3,3,vgood
1100 | 1098,2,2,2,6,1,1,unacc
1101 | 1099,2,2,2,6,1,2,unacc
1102 | 1100,2,2,2,6,1,3,unacc
1103 | 1101,2,2,2,6,2,1,unacc
1104 | 1102,2,2,2,6,2,2,acc
1105 | 1103,2,2,2,6,2,3,acc
1106 | 1104,2,2,2,6,3,1,unacc
1107 | 1105,2,2,2,6,3,2,acc
1108 | 1106,2,2,2,6,3,3,vgood
1109 | 1107,2,2,3,2,1,1,unacc
1110 | 1108,2,2,3,2,1,2,unacc
1111 | 1109,2,2,3,2,1,3,unacc
1112 | 1110,2,2,3,2,2,1,unacc
1113 | 1111,2,2,3,2,2,2,unacc
1114 | 1112,2,2,3,2,2,3,unacc
1115 | 1113,2,2,3,2,3,1,unacc
1116 | 1114,2,2,3,2,3,2,unacc
1117 | 1115,2,2,3,2,3,3,unacc
1118 | 1116,2,2,3,4,1,1,unacc
1119 | 1117,2,2,3,4,1,2,acc
1120 | 1118,2,2,3,4,1,3,acc
1121 | 1119,2,2,3,4,2,1,unacc
1122 | 1120,2,2,3,4,2,2,acc
1123 | 1121,2,2,3,4,2,3,acc
1124 | 1122,2,2,3,4,3,1,unacc
1125 | 1123,2,2,3,4,3,2,acc
1126 | 1124,2,2,3,4,3,3,vgood
1127 | 1125,2,2,3,6,1,1,unacc
1128 | 1126,2,2,3,6,1,2,acc
1129 | 1127,2,2,3,6,1,3,acc
1130 | 1128,2,2,3,6,2,1,unacc
1131 | 1129,2,2,3,6,2,2,acc
1132 | 1130,2,2,3,6,2,3,vgood
1133 | 1131,2,2,3,6,3,1,unacc
1134 | 1132,2,2,3,6,3,2,acc
1135 | 1133,2,2,3,6,3,3,vgood
1136 | 1134,2,2,4,2,1,1,unacc
1137 | 1135,2,2,4,2,1,2,unacc
1138 | 1136,2,2,4,2,1,3,unacc
1139 | 1137,2,2,4,2,2,1,unacc
1140 | 1138,2,2,4,2,2,2,unacc
1141 | 1139,2,2,4,2,2,3,unacc
1142 | 1140,2,2,4,2,3,1,unacc
1143 | 1141,2,2,4,2,3,2,unacc
1144 | 1142,2,2,4,2,3,3,unacc
1145 | 1143,2,2,4,4,1,1,unacc
1146 | 1144,2,2,4,4,1,2,acc
1147 | 1145,2,2,4,4,1,3,acc
1148 | 1146,2,2,4,4,2,1,unacc
1149 | 1147,2,2,4,4,2,2,acc
1150 | 1148,2,2,4,4,2,3,vgood
1151 | 1149,2,2,4,4,3,1,unacc
1152 | 1150,2,2,4,4,3,2,acc
1153 | 1151,2,2,4,4,3,3,vgood
1154 | 1152,2,2,4,6,1,1,unacc
1155 | 1153,2,2,4,6,1,2,acc
1156 | 1154,2,2,4,6,1,3,acc
1157 | 1155,2,2,4,6,2,1,unacc
1158 | 1156,2,2,4,6,2,2,acc
1159 | 1157,2,2,4,6,2,3,vgood
1160 | 1158,2,2,4,6,3,1,unacc
1161 | 1159,2,2,4,6,3,2,acc
1162 | 1160,2,2,4,6,3,3,vgood
1163 | 1161,2,2,5,2,1,1,unacc
1164 | 1162,2,2,5,2,1,2,unacc
1165 | 1163,2,2,5,2,1,3,unacc
1166 | 1164,2,2,5,2,2,1,unacc
1167 | 1165,2,2,5,2,2,2,unacc
1168 | 1166,2,2,5,2,2,3,unacc
1169 | 1167,2,2,5,2,3,1,unacc
1170 | 1168,2,2,5,2,3,2,unacc
1171 | 1169,2,2,5,2,3,3,unacc
1172 | 1170,2,2,5,4,1,1,unacc
1173 | 1171,2,2,5,4,1,2,acc
1174 | 1172,2,2,5,4,1,3,acc
1175 | 1173,2,2,5,4,2,1,unacc
1176 | 1174,2,2,5,4,2,2,acc
1177 | 1175,2,2,5,4,2,3,vgood
1178 | 1176,2,2,5,4,3,1,unacc
1179 | 1177,2,2,5,4,3,2,acc
1180 | 1178,2,2,5,4,3,3,vgood
1181 | 1179,2,2,5,6,1,1,unacc
1182 | 1180,2,2,5,6,1,2,acc
1183 | 1181,2,2,5,6,1,3,acc
1184 | 1182,2,2,5,6,2,1,unacc
1185 | 1183,2,2,5,6,2,2,acc
1186 | 1184,2,2,5,6,2,3,vgood
1187 | 1185,2,2,5,6,3,1,unacc
1188 | 1186,2,2,5,6,3,2,acc
1189 | 1187,2,2,5,6,3,3,vgood
1190 | 1188,2,1,2,2,1,1,unacc
1191 | 1189,2,1,2,2,1,2,unacc
1192 | 1190,2,1,2,2,1,3,unacc
1193 | 1191,2,1,2,2,2,1,unacc
1194 | 1192,2,1,2,2,2,2,unacc
1195 | 1193,2,1,2,2,2,3,unacc
1196 | 1194,2,1,2,2,3,1,unacc
1197 | 1195,2,1,2,2,3,2,unacc
1198 | 1196,2,1,2,2,3,3,unacc
1199 | 1197,2,1,2,4,1,1,unacc
1200 | 1198,2,1,2,4,1,2,acc
1201 | 1199,2,1,2,4,1,3,good
1202 | 1200,2,1,2,4,2,1,unacc
1203 | 1201,2,1,2,4,2,2,acc
1204 | 1202,2,1,2,4,2,3,good
1205 | 1203,2,1,2,4,3,1,unacc
1206 | 1204,2,1,2,4,3,2,good
1207 | 1205,2,1,2,4,3,3,vgood
1208 | 1206,2,1,2,6,1,1,unacc
1209 | 1207,2,1,2,6,1,2,unacc
1210 | 1208,2,1,2,6,1,3,unacc
1211 | 1209,2,1,2,6,2,1,unacc
1212 | 1210,2,1,2,6,2,2,acc
1213 | 1211,2,1,2,6,2,3,good
1214 | 1212,2,1,2,6,3,1,unacc
1215 | 1213,2,1,2,6,3,2,good
1216 | 1214,2,1,2,6,3,3,vgood
1217 | 1215,2,1,3,2,1,1,unacc
1218 | 1216,2,1,3,2,1,2,unacc
1219 | 1217,2,1,3,2,1,3,unacc
1220 | 1218,2,1,3,2,2,1,unacc
1221 | 1219,2,1,3,2,2,2,unacc
1222 | 1220,2,1,3,2,2,3,unacc
1223 | 1221,2,1,3,2,3,1,unacc
1224 | 1222,2,1,3,2,3,2,unacc
1225 | 1223,2,1,3,2,3,3,unacc
1226 | 1224,2,1,3,4,1,1,unacc
1227 | 1225,2,1,3,4,1,2,acc
1228 | 1226,2,1,3,4,1,3,good
1229 | 1227,2,1,3,4,2,1,unacc
1230 | 1228,2,1,3,4,2,2,acc
1231 | 1229,2,1,3,4,2,3,good
1232 | 1230,2,1,3,4,3,1,unacc
1233 | 1231,2,1,3,4,3,2,good
1234 | 1232,2,1,3,4,3,3,vgood
1235 | 1233,2,1,3,6,1,1,unacc
1236 | 1234,2,1,3,6,1,2,acc
1237 | 1235,2,1,3,6,1,3,good
1238 | 1236,2,1,3,6,2,1,unacc
1239 | 1237,2,1,3,6,2,2,good
1240 | 1238,2,1,3,6,2,3,vgood
1241 | 1239,2,1,3,6,3,1,unacc
1242 | 1240,2,1,3,6,3,2,good
1243 | 1241,2,1,3,6,3,3,vgood
1244 | 1242,2,1,4,2,1,1,unacc
1245 | 1243,2,1,4,2,1,2,unacc
1246 | 1244,2,1,4,2,1,3,unacc
1247 | 1245,2,1,4,2,2,1,unacc
1248 | 1246,2,1,4,2,2,2,unacc
1249 | 1247,2,1,4,2,2,3,unacc
1250 | 1248,2,1,4,2,3,1,unacc
1251 | 1249,2,1,4,2,3,2,unacc
1252 | 1250,2,1,4,2,3,3,unacc
1253 | 1251,2,1,4,4,1,1,unacc
1254 | 1252,2,1,4,4,1,2,acc
1255 | 1253,2,1,4,4,1,3,good
1256 | 1254,2,1,4,4,2,1,unacc
1257 | 1255,2,1,4,4,2,2,good
1258 | 1256,2,1,4,4,2,3,vgood
1259 | 1257,2,1,4,4,3,1,unacc
1260 | 1258,2,1,4,4,3,2,good
1261 | 1259,2,1,4,4,3,3,vgood
1262 | 1260,2,1,4,6,1,1,unacc
1263 | 1261,2,1,4,6,1,2,acc
1264 | 1262,2,1,4,6,1,3,good
1265 | 1263,2,1,4,6,2,1,unacc
1266 | 1264,2,1,4,6,2,2,good
1267 | 1265,2,1,4,6,2,3,vgood
1268 | 1266,2,1,4,6,3,1,unacc
1269 | 1267,2,1,4,6,3,2,good
1270 | 1268,2,1,4,6,3,3,vgood
1271 | 1269,2,1,5,2,1,1,unacc
1272 | 1270,2,1,5,2,1,2,unacc
1273 | 1271,2,1,5,2,1,3,unacc
1274 | 1272,2,1,5,2,2,1,unacc
1275 | 1273,2,1,5,2,2,2,unacc
1276 | 1274,2,1,5,2,2,3,unacc
1277 | 1275,2,1,5,2,3,1,unacc
1278 | 1276,2,1,5,2,3,2,unacc
1279 | 1277,2,1,5,2,3,3,unacc
1280 | 1278,2,1,5,4,1,1,unacc
1281 | 1279,2,1,5,4,1,2,acc
1282 | 1280,2,1,5,4,1,3,good
1283 | 1281,2,1,5,4,2,1,unacc
1284 | 1282,2,1,5,4,2,2,good
1285 | 1283,2,1,5,4,2,3,vgood
1286 | 1284,2,1,5,4,3,1,unacc
1287 | 1285,2,1,5,4,3,2,good
1288 | 1286,2,1,5,4,3,3,vgood
1289 | 1287,2,1,5,6,1,1,unacc
1290 | 1288,2,1,5,6,1,2,acc
1291 | 1289,2,1,5,6,1,3,good
1292 | 1290,2,1,5,6,2,1,unacc
1293 | 1291,2,1,5,6,2,2,good
1294 | 1292,2,1,5,6,2,3,vgood
1295 | 1293,2,1,5,6,3,1,unacc
1296 | 1294,2,1,5,6,3,2,good
1297 | 1295,2,1,5,6,3,3,vgood
1298 | 1296,1,4,2,2,1,1,unacc
1299 | 1297,1,4,2,2,1,2,unacc
1300 | 1298,1,4,2,2,1,3,unacc
1301 | 1299,1,4,2,2,2,1,unacc
1302 | 1300,1,4,2,2,2,2,unacc
1303 | 1301,1,4,2,2,2,3,unacc
1304 | 1302,1,4,2,2,3,1,unacc
1305 | 1303,1,4,2,2,3,2,unacc
1306 | 1304,1,4,2,2,3,3,unacc
1307 | 1305,1,4,2,4,1,1,unacc
1308 | 1306,1,4,2,4,1,2,unacc
1309 | 1307,1,4,2,4,1,3,acc
1310 | 1308,1,4,2,4,2,1,unacc
1311 | 1309,1,4,2,4,2,2,unacc
1312 | 1310,1,4,2,4,2,3,acc
1313 | 1311,1,4,2,4,3,1,unacc
1314 | 1312,1,4,2,4,3,2,acc
1315 | 1313,1,4,2,4,3,3,acc
1316 | 1314,1,4,2,6,1,1,unacc
1317 | 1315,1,4,2,6,1,2,unacc
1318 | 1316,1,4,2,6,1,3,unacc
1319 | 1317,1,4,2,6,2,1,unacc
1320 | 1318,1,4,2,6,2,2,unacc
1321 | 1319,1,4,2,6,2,3,acc
1322 | 1320,1,4,2,6,3,1,unacc
1323 | 1321,1,4,2,6,3,2,acc
1324 | 1322,1,4,2,6,3,3,acc
1325 | 1323,1,4,3,2,1,1,unacc
1326 | 1324,1,4,3,2,1,2,unacc
1327 | 1325,1,4,3,2,1,3,unacc
1328 | 1326,1,4,3,2,2,1,unacc
1329 | 1327,1,4,3,2,2,2,unacc
1330 | 1328,1,4,3,2,2,3,unacc
1331 | 1329,1,4,3,2,3,1,unacc
1332 | 1330,1,4,3,2,3,2,unacc
1333 | 1331,1,4,3,2,3,3,unacc
1334 | 1332,1,4,3,4,1,1,unacc
1335 | 1333,1,4,3,4,1,2,unacc
1336 | 1334,1,4,3,4,1,3,acc
1337 | 1335,1,4,3,4,2,1,unacc
1338 | 1336,1,4,3,4,2,2,unacc
1339 | 1337,1,4,3,4,2,3,acc
1340 | 1338,1,4,3,4,3,1,unacc
1341 | 1339,1,4,3,4,3,2,acc
1342 | 1340,1,4,3,4,3,3,acc
1343 | 1341,1,4,3,6,1,1,unacc
1344 | 1342,1,4,3,6,1,2,unacc
1345 | 1343,1,4,3,6,1,3,acc
1346 | 1344,1,4,3,6,2,1,unacc
1347 | 1345,1,4,3,6,2,2,acc
1348 | 1346,1,4,3,6,2,3,acc
1349 | 1347,1,4,3,6,3,1,unacc
1350 | 1348,1,4,3,6,3,2,acc
1351 | 1349,1,4,3,6,3,3,acc
1352 | 1350,1,4,4,2,1,1,unacc
1353 | 1351,1,4,4,2,1,2,unacc
1354 | 1352,1,4,4,2,1,3,unacc
1355 | 1353,1,4,4,2,2,1,unacc
1356 | 1354,1,4,4,2,2,2,unacc
1357 | 1355,1,4,4,2,2,3,unacc
1358 | 1356,1,4,4,2,3,1,unacc
1359 | 1357,1,4,4,2,3,2,unacc
1360 | 1358,1,4,4,2,3,3,unacc
1361 | 1359,1,4,4,4,1,1,unacc
1362 | 1360,1,4,4,4,1,2,unacc
1363 | 1361,1,4,4,4,1,3,acc
1364 | 1362,1,4,4,4,2,1,unacc
1365 | 1363,1,4,4,4,2,2,acc
1366 | 1364,1,4,4,4,2,3,acc
1367 | 1365,1,4,4,4,3,1,unacc
1368 | 1366,1,4,4,4,3,2,acc
1369 | 1367,1,4,4,4,3,3,acc
1370 | 1368,1,4,4,6,1,1,unacc
1371 | 1369,1,4,4,6,1,2,unacc
1372 | 1370,1,4,4,6,1,3,acc
1373 | 1371,1,4,4,6,2,1,unacc
1374 | 1372,1,4,4,6,2,2,acc
1375 | 1373,1,4,4,6,2,3,acc
1376 | 1374,1,4,4,6,3,1,unacc
1377 | 1375,1,4,4,6,3,2,acc
1378 | 1376,1,4,4,6,3,3,acc
1379 | 1377,1,4,5,2,1,1,unacc
1380 | 1378,1,4,5,2,1,2,unacc
1381 | 1379,1,4,5,2,1,3,unacc
1382 | 1380,1,4,5,2,2,1,unacc
1383 | 1381,1,4,5,2,2,2,unacc
1384 | 1382,1,4,5,2,2,3,unacc
1385 | 1383,1,4,5,2,3,1,unacc
1386 | 1384,1,4,5,2,3,2,unacc
1387 | 1385,1,4,5,2,3,3,unacc
1388 | 1386,1,4,5,4,1,1,unacc
1389 | 1387,1,4,5,4,1,2,unacc
1390 | 1388,1,4,5,4,1,3,acc
1391 | 1389,1,4,5,4,2,1,unacc
1392 | 1390,1,4,5,4,2,2,acc
1393 | 1391,1,4,5,4,2,3,acc
1394 | 1392,1,4,5,4,3,1,unacc
1395 | 1393,1,4,5,4,3,2,acc
1396 | 1394,1,4,5,4,3,3,acc
1397 | 1395,1,4,5,6,1,1,unacc
1398 | 1396,1,4,5,6,1,2,unacc
1399 | 1397,1,4,5,6,1,3,acc
1400 | 1398,1,4,5,6,2,1,unacc
1401 | 1399,1,4,5,6,2,2,acc
1402 | 1400,1,4,5,6,2,3,acc
1403 | 1401,1,4,5,6,3,1,unacc
1404 | 1402,1,4,5,6,3,2,acc
1405 | 1403,1,4,5,6,3,3,acc
1406 | 1404,1,3,2,2,1,1,unacc
1407 | 1405,1,3,2,2,1,2,unacc
1408 | 1406,1,3,2,2,1,3,unacc
1409 | 1407,1,3,2,2,2,1,unacc
1410 | 1408,1,3,2,2,2,2,unacc
1411 | 1409,1,3,2,2,2,3,unacc
1412 | 1410,1,3,2,2,3,1,unacc
1413 | 1411,1,3,2,2,3,2,unacc
1414 | 1412,1,3,2,2,3,3,unacc
1415 | 1413,1,3,2,4,1,1,unacc
1416 | 1414,1,3,2,4,1,2,acc
1417 | 1415,1,3,2,4,1,3,acc
1418 | 1416,1,3,2,4,2,1,unacc
1419 | 1417,1,3,2,4,2,2,acc
1420 | 1418,1,3,2,4,2,3,acc
1421 | 1419,1,3,2,4,3,1,unacc
1422 | 1420,1,3,2,4,3,2,acc
1423 | 1421,1,3,2,4,3,3,vgood
1424 | 1422,1,3,2,6,1,1,unacc
1425 | 1423,1,3,2,6,1,2,unacc
1426 | 1424,1,3,2,6,1,3,unacc
1427 | 1425,1,3,2,6,2,1,unacc
1428 | 1426,1,3,2,6,2,2,acc
1429 | 1427,1,3,2,6,2,3,acc
1430 | 1428,1,3,2,6,3,1,unacc
1431 | 1429,1,3,2,6,3,2,acc
1432 | 1430,1,3,2,6,3,3,vgood
1433 | 1431,1,3,3,2,1,1,unacc
1434 | 1432,1,3,3,2,1,2,unacc
1435 | 1433,1,3,3,2,1,3,unacc
1436 | 1434,1,3,3,2,2,1,unacc
1437 | 1435,1,3,3,2,2,2,unacc
1438 | 1436,1,3,3,2,2,3,unacc
1439 | 1437,1,3,3,2,3,1,unacc
1440 | 1438,1,3,3,2,3,2,unacc
1441 | 1439,1,3,3,2,3,3,unacc
1442 | 1440,1,3,3,4,1,1,unacc
1443 | 1441,1,3,3,4,1,2,acc
1444 | 1442,1,3,3,4,1,3,acc
1445 | 1443,1,3,3,4,2,1,unacc
1446 | 1444,1,3,3,4,2,2,acc
1447 | 1445,1,3,3,4,2,3,acc
1448 | 1446,1,3,3,4,3,1,unacc
1449 | 1447,1,3,3,4,3,2,acc
1450 | 1448,1,3,3,4,3,3,vgood
1451 | 1449,1,3,3,6,1,1,unacc
1452 | 1450,1,3,3,6,1,2,acc
1453 | 1451,1,3,3,6,1,3,acc
1454 | 1452,1,3,3,6,2,1,unacc
1455 | 1453,1,3,3,6,2,2,acc
1456 | 1454,1,3,3,6,2,3,vgood
1457 | 1455,1,3,3,6,3,1,unacc
1458 | 1456,1,3,3,6,3,2,acc
1459 | 1457,1,3,3,6,3,3,vgood
1460 | 1458,1,3,4,2,1,1,unacc
1461 | 1459,1,3,4,2,1,2,unacc
1462 | 1460,1,3,4,2,1,3,unacc
1463 | 1461,1,3,4,2,2,1,unacc
1464 | 1462,1,3,4,2,2,2,unacc
1465 | 1463,1,3,4,2,2,3,unacc
1466 | 1464,1,3,4,2,3,1,unacc
1467 | 1465,1,3,4,2,3,2,unacc
1468 | 1466,1,3,4,2,3,3,unacc
1469 | 1467,1,3,4,4,1,1,unacc
1470 | 1468,1,3,4,4,1,2,acc
1471 | 1469,1,3,4,4,1,3,acc
1472 | 1470,1,3,4,4,2,1,unacc
1473 | 1471,1,3,4,4,2,2,acc
1474 | 1472,1,3,4,4,2,3,vgood
1475 | 1473,1,3,4,4,3,1,unacc
1476 | 1474,1,3,4,4,3,2,acc
1477 | 1475,1,3,4,4,3,3,vgood
1478 | 1476,1,3,4,6,1,1,unacc
1479 | 1477,1,3,4,6,1,2,acc
1480 | 1478,1,3,4,6,1,3,acc
1481 | 1479,1,3,4,6,2,1,unacc
1482 | 1480,1,3,4,6,2,2,acc
1483 | 1481,1,3,4,6,2,3,vgood
1484 | 1482,1,3,4,6,3,1,unacc
1485 | 1483,1,3,4,6,3,2,acc
1486 | 1484,1,3,4,6,3,3,vgood
1487 | 1485,1,3,5,2,1,1,unacc
1488 | 1486,1,3,5,2,1,2,unacc
1489 | 1487,1,3,5,2,1,3,unacc
1490 | 1488,1,3,5,2,2,1,unacc
1491 | 1489,1,3,5,2,2,2,unacc
1492 | 1490,1,3,5,2,2,3,unacc
1493 | 1491,1,3,5,2,3,1,unacc
1494 | 1492,1,3,5,2,3,2,unacc
1495 | 1493,1,3,5,2,3,3,unacc
1496 | 1494,1,3,5,4,1,1,unacc
1497 | 1495,1,3,5,4,1,2,acc
1498 | 1496,1,3,5,4,1,3,acc
1499 | 1497,1,3,5,4,2,1,unacc
1500 | 1498,1,3,5,4,2,2,acc
1501 | 1499,1,3,5,4,2,3,vgood
1502 | 1500,1,3,5,4,3,1,unacc
1503 | 1501,1,3,5,4,3,2,acc
1504 | 1502,1,3,5,4,3,3,vgood
1505 | 1503,1,3,5,6,1,1,unacc
1506 | 1504,1,3,5,6,1,2,acc
1507 | 1505,1,3,5,6,1,3,acc
1508 | 1506,1,3,5,6,2,1,unacc
1509 | 1507,1,3,5,6,2,2,acc
1510 | 1508,1,3,5,6,2,3,vgood
1511 | 1509,1,3,5,6,3,1,unacc
1512 | 1510,1,3,5,6,3,2,acc
1513 | 1511,1,3,5,6,3,3,vgood
1514 | 1512,1,2,2,2,1,1,unacc
1515 | 1513,1,2,2,2,1,2,unacc
1516 | 1514,1,2,2,2,1,3,unacc
1517 | 1515,1,2,2,2,2,1,unacc
1518 | 1516,1,2,2,2,2,2,unacc
1519 | 1517,1,2,2,2,2,3,unacc
1520 | 1518,1,2,2,2,3,1,unacc
1521 | 1519,1,2,2,2,3,2,unacc
1522 | 1520,1,2,2,2,3,3,unacc
1523 | 1521,1,2,2,4,1,1,unacc
1524 | 1522,1,2,2,4,1,2,acc
1525 | 1523,1,2,2,4,1,3,good
1526 | 1524,1,2,2,4,2,1,unacc
1527 | 1525,1,2,2,4,2,2,acc
1528 | 1526,1,2,2,4,2,3,good
1529 | 1527,1,2,2,4,3,1,unacc
1530 | 1528,1,2,2,4,3,2,good
1531 | 1529,1,2,2,4,3,3,vgood
1532 | 1530,1,2,2,6,1,1,unacc
1533 | 1531,1,2,2,6,1,2,unacc
1534 | 1532,1,2,2,6,1,3,unacc
1535 | 1533,1,2,2,6,2,1,unacc
1536 | 1534,1,2,2,6,2,2,acc
1537 | 1535,1,2,2,6,2,3,good
1538 | 1536,1,2,2,6,3,1,unacc
1539 | 1537,1,2,2,6,3,2,good
1540 | 1538,1,2,2,6,3,3,vgood
1541 | 1539,1,2,3,2,1,1,unacc
1542 | 1540,1,2,3,2,1,2,unacc
1543 | 1541,1,2,3,2,1,3,unacc
1544 | 1542,1,2,3,2,2,1,unacc
1545 | 1543,1,2,3,2,2,2,unacc
1546 | 1544,1,2,3,2,2,3,unacc
1547 | 1545,1,2,3,2,3,1,unacc
1548 | 1546,1,2,3,2,3,2,unacc
1549 | 1547,1,2,3,2,3,3,unacc
1550 | 1548,1,2,3,4,1,1,unacc
1551 | 1549,1,2,3,4,1,2,acc
1552 | 1550,1,2,3,4,1,3,good
1553 | 1551,1,2,3,4,2,1,unacc
1554 | 1552,1,2,3,4,2,2,acc
1555 | 1553,1,2,3,4,2,3,good
1556 | 1554,1,2,3,4,3,1,unacc
1557 | 1555,1,2,3,4,3,2,good
1558 | 1556,1,2,3,4,3,3,vgood
1559 | 1557,1,2,3,6,1,1,unacc
1560 | 1558,1,2,3,6,1,2,acc
1561 | 1559,1,2,3,6,1,3,good
1562 | 1560,1,2,3,6,2,1,unacc
1563 | 1561,1,2,3,6,2,2,good
1564 | 1562,1,2,3,6,2,3,vgood
1565 | 1563,1,2,3,6,3,1,unacc
1566 | 1564,1,2,3,6,3,2,good
1567 | 1565,1,2,3,6,3,3,vgood
1568 | 1566,1,2,4,2,1,1,unacc
1569 | 1567,1,2,4,2,1,2,unacc
1570 | 1568,1,2,4,2,1,3,unacc
1571 | 1569,1,2,4,2,2,1,unacc
1572 | 1570,1,2,4,2,2,2,unacc
1573 | 1571,1,2,4,2,2,3,unacc
1574 | 1572,1,2,4,2,3,1,unacc
1575 | 1573,1,2,4,2,3,2,unacc
1576 | 1574,1,2,4,2,3,3,unacc
1577 | 1575,1,2,4,4,1,1,unacc
1578 | 1576,1,2,4,4,1,2,acc
1579 | 1577,1,2,4,4,1,3,good
1580 | 1578,1,2,4,4,2,1,unacc
1581 | 1579,1,2,4,4,2,2,good
1582 | 1580,1,2,4,4,2,3,vgood
1583 | 1581,1,2,4,4,3,1,unacc
1584 | 1582,1,2,4,4,3,2,good
1585 | 1583,1,2,4,4,3,3,vgood
1586 | 1584,1,2,4,6,1,1,unacc
1587 | 1585,1,2,4,6,1,2,acc
1588 | 1586,1,2,4,6,1,3,good
1589 | 1587,1,2,4,6,2,1,unacc
1590 | 1588,1,2,4,6,2,2,good
1591 | 1589,1,2,4,6,2,3,vgood
1592 | 1590,1,2,4,6,3,1,unacc
1593 | 1591,1,2,4,6,3,2,good
1594 | 1592,1,2,4,6,3,3,vgood
1595 | 1593,1,2,5,2,1,1,unacc
1596 | 1594,1,2,5,2,1,2,unacc
1597 | 1595,1,2,5,2,1,3,unacc
1598 | 1596,1,2,5,2,2,1,unacc
1599 | 1597,1,2,5,2,2,2,unacc
1600 | 1598,1,2,5,2,2,3,unacc
1601 | 1599,1,2,5,2,3,1,unacc
1602 | 1600,1,2,5,2,3,2,unacc
1603 | 1601,1,2,5,2,3,3,unacc
1604 | 1602,1,2,5,4,1,1,unacc
1605 | 1603,1,2,5,4,1,2,acc
1606 | 1604,1,2,5,4,1,3,good
1607 | 1605,1,2,5,4,2,1,unacc
1608 | 1606,1,2,5,4,2,2,good
1609 | 1607,1,2,5,4,2,3,vgood
1610 | 1608,1,2,5,4,3,1,unacc
1611 | 1609,1,2,5,4,3,2,good
1612 | 1610,1,2,5,4,3,3,vgood
1613 | 1611,1,2,5,6,1,1,unacc
1614 | 1612,1,2,5,6,1,2,acc
1615 | 1613,1,2,5,6,1,3,good
1616 | 1614,1,2,5,6,2,1,unacc
1617 | 1615,1,2,5,6,2,2,good
1618 | 1616,1,2,5,6,2,3,vgood
1619 | 1617,1,2,5,6,3,1,unacc
1620 | 1618,1,2,5,6,3,2,good
1621 | 1619,1,2,5,6,3,3,vgood
1622 | 1620,1,1,2,2,1,1,unacc
1623 | 1621,1,1,2,2,1,2,unacc
1624 | 1622,1,1,2,2,1,3,unacc
1625 | 1623,1,1,2,2,2,1,unacc
1626 | 1624,1,1,2,2,2,2,unacc
1627 | 1625,1,1,2,2,2,3,unacc
1628 | 1626,1,1,2,2,3,1,unacc
1629 | 1627,1,1,2,2,3,2,unacc
1630 | 1628,1,1,2,2,3,3,unacc
1631 | 1629,1,1,2,4,1,1,unacc
1632 | 1630,1,1,2,4,1,2,acc
1633 | 1631,1,1,2,4,1,3,good
1634 | 1632,1,1,2,4,2,1,unacc
1635 | 1633,1,1,2,4,2,2,acc
1636 | 1634,1,1,2,4,2,3,good
1637 | 1635,1,1,2,4,3,1,unacc
1638 | 1636,1,1,2,4,3,2,good
1639 | 1637,1,1,2,4,3,3,vgood
1640 | 1638,1,1,2,6,1,1,unacc
1641 | 1639,1,1,2,6,1,2,unacc
1642 | 1640,1,1,2,6,1,3,unacc
1643 | 1641,1,1,2,6,2,1,unacc
1644 | 1642,1,1,2,6,2,2,acc
1645 | 1643,1,1,2,6,2,3,good
1646 | 1644,1,1,2,6,3,1,unacc
1647 | 1645,1,1,2,6,3,2,good
1648 | 1646,1,1,2,6,3,3,vgood
1649 | 1647,1,1,3,2,1,1,unacc
1650 | 1648,1,1,3,2,1,2,unacc
1651 | 1649,1,1,3,2,1,3,unacc
1652 | 1650,1,1,3,2,2,1,unacc
1653 | 1651,1,1,3,2,2,2,unacc
1654 | 1652,1,1,3,2,2,3,unacc
1655 | 1653,1,1,3,2,3,1,unacc
1656 | 1654,1,1,3,2,3,2,unacc
1657 | 1655,1,1,3,2,3,3,unacc
1658 | 1656,1,1,3,4,1,1,unacc
1659 | 1657,1,1,3,4,1,2,acc
1660 | 1658,1,1,3,4,1,3,good
1661 | 1659,1,1,3,4,2,1,unacc
1662 | 1660,1,1,3,4,2,2,acc
1663 | 1661,1,1,3,4,2,3,good
1664 | 1662,1,1,3,4,3,1,unacc
1665 | 1663,1,1,3,4,3,2,good
1666 | 1664,1,1,3,4,3,3,vgood
1667 | 1665,1,1,3,6,1,1,unacc
1668 | 1666,1,1,3,6,1,2,acc
1669 | 1667,1,1,3,6,1,3,good
1670 | 1668,1,1,3,6,2,1,unacc
1671 | 1669,1,1,3,6,2,2,good
1672 | 1670,1,1,3,6,2,3,vgood
1673 | 1671,1,1,3,6,3,1,unacc
1674 | 1672,1,1,3,6,3,2,good
1675 | 1673,1,1,3,6,3,3,vgood
1676 | 1674,1,1,4,2,1,1,unacc
1677 | 1675,1,1,4,2,1,2,unacc
1678 | 1676,1,1,4,2,1,3,unacc
1679 | 1677,1,1,4,2,2,1,unacc
1680 | 1678,1,1,4,2,2,2,unacc
1681 | 1679,1,1,4,2,2,3,unacc
1682 | 1680,1,1,4,2,3,1,unacc
1683 | 1681,1,1,4,2,3,2,unacc
1684 | 1682,1,1,4,2,3,3,unacc
1685 | 1683,1,1,4,4,1,1,unacc
1686 | 1684,1,1,4,4,1,2,acc
1687 | 1685,1,1,4,4,1,3,good
1688 | 1686,1,1,4,4,2,1,unacc
1689 | 1687,1,1,4,4,2,2,good
1690 | 1688,1,1,4,4,2,3,vgood
1691 | 1689,1,1,4,4,3,1,unacc
1692 | 1690,1,1,4,4,3,2,good
1693 | 1691,1,1,4,4,3,3,vgood
1694 | 1692,1,1,4,6,1,1,unacc
1695 | 1693,1,1,4,6,1,2,acc
1696 | 1694,1,1,4,6,1,3,good
1697 | 1695,1,1,4,6,2,1,unacc
1698 | 1696,1,1,4,6,2,2,good
1699 | 1697,1,1,4,6,2,3,vgood
1700 | 1698,1,1,4,6,3,1,unacc
1701 | 1699,1,1,4,6,3,2,good
1702 | 1700,1,1,4,6,3,3,vgood
1703 | 1701,1,1,5,2,1,1,unacc
1704 | 1702,1,1,5,2,1,2,unacc
1705 | 1703,1,1,5,2,1,3,unacc
1706 | 1704,1,1,5,2,2,1,unacc
1707 | 1705,1,1,5,2,2,2,unacc
1708 | 1706,1,1,5,2,2,3,unacc
1709 | 1707,1,1,5,2,3,1,unacc
1710 | 1708,1,1,5,2,3,2,unacc
1711 | 1709,1,1,5,2,3,3,unacc
1712 | 1710,1,1,5,4,1,1,unacc
1713 | 1711,1,1,5,4,1,2,acc
1714 | 1712,1,1,5,4,1,3,good
1715 | 1713,1,1,5,4,2,1,unacc
1716 | 1714,1,1,5,4,2,2,good
1717 | 1715,1,1,5,4,2,3,vgood
1718 | 1716,1,1,5,4,3,1,unacc
1719 | 1717,1,1,5,4,3,2,good
1720 | 1718,1,1,5,4,3,3,vgood
1721 | 1719,1,1,5,6,1,1,unacc
1722 | 1720,1,1,5,6,1,2,acc
1723 | 1721,1,1,5,6,1,3,good
1724 | 1722,1,1,5,6,2,1,unacc
1725 | 1723,1,1,5,6,2,2,good
1726 | 1724,1,1,5,6,2,3,vgood
1727 | 1725,1,1,5,6,3,1,unacc
1728 | 1726,1,1,5,6,3,2,good
1729 | 1727,1,1,5,6,3,3,vgood
1730 |
--------------------------------------------------------------------------------
/DecisionTree/decisionTree.py:
--------------------------------------------------------------------------------
1 | #This program uses the Machine Learning Algorithm called a Decision Tree to classify a car as
2 | #'unacceptable', 'accepted', 'good', or 'very good'
3 | # The classification is based off of other features/attributes like the cars
4 | #'buying price', 'maintenance price', 'number of doors', 'number of passengers that fit in the car', 'size of luggage capacity', and 'safety'
5 |
6 |
7 | # The original dataset is the car evaluation dataset from http://archive.ics.uci.edu/ml/datasets/Car+Evaluation,
8 | # more specifically it is a .data file originally and you can download it from http://archive.ics.uci.edu/ml/machine-learning-databases/car/car.data
9 | # We will classify the quality or values column of the car, after switching all of the values from the original dataset to integers
10 | # except for our dependent variable 'values' column which I've already done and have a file for, the new data set.
11 | # The new dataset with the integer values is called 'car_integer_exceptY.csv'
12 | # Get the 'car_integer_exceptY.csv' dataset here: https://github.com/randerson112358/Python/blob/master/DecisionTree/car_integer_exceptY.csv
13 |
14 | # Each attribute/feature described below:
15 | # buying (buying price): vhigh (4), high (3), med (2), low (1)
16 | # main (maintenance price): vhigh (4), high (3), med (2), low (1)
17 | # doors (number of doors): 2, 3, 4, 5-more (5)
18 | # persons (number of passengers fit in a car): 2, 4, more (6)
19 | # lug_boot (size of luggage capacity): small (1), med (2), big (3)
20 | # safety: low (1), med (2), high (3)
21 | # values: unacc = unaccepted, acc = accepted, good = good, vgood = very good
22 |
23 | # How did I create the new data set ?
24 | # By doing the following for every attribute / feature / column in the original data set (except 'values')
25 | # The following is only one example for the column buying. Note: there are other ways to convert the categorical data to integers
26 | #data.buying[ data.buying == 'low'] = 1
27 | #data.buying[ data.buying == 'med'] = 2
28 | #data.buying[ data.buying == 'high'] = 3
29 | #data.buying[ data.buying == 'vhigh'] = 4
30 |
31 | #read more: https://medium.com/machine-learning-guy/using-decision-tree-method-for-car-selection-problem-5272675451f9
32 |
33 | # Import the dependencies / libraries
34 | import numpy as np
35 | import pandas as pd
36 | from sklearn.tree import DecisionTreeClassifier
37 |
38 | #Create a dataframe from the cars dataset / csv file
39 | df = pd.read_csv('DataSets/Cars/car_integer_exceptY.csv')
40 |
41 | #print the first 5 rows of the data set
42 | print(df.head())
43 |
44 | # Split your data into the independent variable(s) and dependent variable
45 | X_train = df.loc[:,'buying':'safety'] #Gets all the rows in the dataset from column 'buying' to column 'safety'
46 | Y_train = df.loc[:,'values'] #Gets all of the rows in the dataset from column 'values'
47 |
48 | # The actual decision tree classifier
49 | tree = DecisionTreeClassifier(max_leaf_nodes=3, random_state=0)
50 |
51 | # Train the model
52 | tree.fit(X_train, Y_train)
53 |
54 | # Make your prediction
55 | # input:buying=v-high, main=high, doors=2, persons=2, lug_boot=med, safety=3
56 | # integer conversion of input: 4,3,2,2,2,3
57 | prediction = tree.predict([[4,3,2,2,2,3]])
58 |
59 | #Print the prediction
60 | print('Printing the prediction: ')
61 | print(prediction)
62 |
--------------------------------------------------------------------------------
/LinearRegression.py:
--------------------------------------------------------------------------------
1 | # Import the libraries
2 | from random import randint
3 | from sklearn.linear_model import LinearRegression
4 |
5 | # Create a range limit for random numbers in the training set, and a count of the number of rows in the training set
6 | TRAIN_SET_LIMIT = 1000
7 | TRAIN_SET_COUNT = 100
8 |
9 |
10 | # Create an empty list of the input training set 'X' and create an empty list of the output for each training set 'Y'
11 | TRAIN_INPUT = list()
12 | TRAIN_OUTPUT= list()
13 |
14 | #Create and append a randomly generated data set to the input and output
15 | for i in range(TRAIN_SET_COUNT):
16 | a = randint(0, TRAIN_SET_LIMIT)
17 | b = randint(0, TRAIN_SET_LIMIT)
18 | c = randint(0, TRAIN_SET_LIMIT)
19 | #Create a linear function for the output dataset 'Y'
20 | op = (10*a) + (2*b) + (3*c)
21 | TRAIN_INPUT.append([a,b,c])
22 | TRAIN_OUTPUT.append(op)
23 |
24 | predictor = LinearRegression(n_jobs=-1) #Create a linear regression object NOTE n_jobs = the number of jobs to use for computation, -1 means use all processors
25 | predictor.fit(X=TRAIN_INPUT, y=TRAIN_OUTPUT) #fit the linear model (approximate a target function)
26 |
27 | X_TEST = [[10,20,30]] #Create our testing data set, the ouput should be 10*10 + 2*20 + 3*30 = 230
28 | outcome = predictor.predict(X=X_TEST) # Predict the ouput of the test data using the linear model
29 |
30 | coefficients = predictor.coef_ #The estimated coefficients for the linear regression problem.
31 |
32 | print('Outcome: {} \n Coefficients: {}'.format(outcome, coefficients))
33 |
--------------------------------------------------------------------------------
/Logistic_Regression.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "colab": {
6 | "name": "Untitled",
7 | "version": "0.3.2",
8 | "provenance": []
9 | },
10 | "kernelspec": {
11 | "name": "python3",
12 | "display_name": "Python 3"
13 | }
14 | },
15 | "cells": [
16 | {
17 | "cell_type": "markdown",
18 | "metadata": {
19 | "id": "27OPLjn73LVf",
20 | "colab_type": "text"
21 | },
22 | "source": [
23 | "\n",
24 | "# Iris Species Classification\n",
25 | "***Classify the Iris species using Logistic Regression***\n",
26 | "\n",
27 | "\n",
28 | "\n",
29 | "This is a simple logistic regression program to classify an iris species as either ( virginica, setosa, or versicolor) based off of the pedal length, pedal height, sepal length, and sepal height using a machine learning algorithm called Logistic Regression.\n",
30 | "\n",
31 | "\n",
32 | "\n",
33 | "**Resources:**\n",
34 | "\n",
35 | " (1) https://towardsdatascience.com/building-a-logistic-regression-in-python-301d27367c24\n",
36 | " \n",
37 | " (2) https://towardsdatascience.com/logistic-regression-a-simplified-approach-using-python-c4bc81a87c31\n",
38 | " \n",
39 | " (3) https://scikit-learn.org/stable/modules/generated/sklearn.metrics.classification_report.html\n",
40 | "\n",
41 | "\n"
42 | ]
43 | },
44 | {
45 | "cell_type": "code",
46 | "metadata": {
47 | "id": "wv9Yp1dSs9Zn",
48 | "colab_type": "code",
49 | "colab": {}
50 | },
51 | "source": [
52 | "\"\"\"\n",
53 | "This Is A Simple Logistic Regression Program To Classify Iris Species\n",
54 | "\n",
55 | "Resources:\n",
56 | " (1) https://towardsdatascience.com/building-a-logistic-regression-in-python-301d27367c24\n",
57 | " (2) https://towardsdatascience.com/logistic-regression-a-simplified-approach-using-python-c4bc81a87c31\n",
58 | " (3) https://scikit-learn.org/stable/modules/generated/sklearn.metrics.classification_report.html\n",
59 | "\"\"\"\n",
60 | "\n",
61 | "\n",
62 | "# Import the dependencies\n",
63 | "import matplotlib.pyplot as plt\n",
64 | "import seaborn as sns\n",
65 | "from sklearn.linear_model import LogisticRegression\n",
66 | "from sklearn.metrics import classification_report\n",
67 | "from sklearn.metrics import accuracy_score\n",
68 | "from sklearn.model_selection import train_test_split"
69 | ],
70 | "execution_count": 0,
71 | "outputs": []
72 | },
73 | {
74 | "cell_type": "code",
75 | "metadata": {
76 | "id": "QykzHudMt480",
77 | "colab_type": "code",
78 | "outputId": "179af60b-22a8-457e-db34-62fa8795f350",
79 | "colab": {
80 | "base_uri": "https://localhost:8080/",
81 | "height": 207
82 | }
83 | },
84 | "source": [
85 | "#Load the data set\n",
86 | "data = sns.load_dataset(\"iris\")\n",
87 | "data.head()"
88 | ],
89 | "execution_count": 26,
90 | "outputs": [
91 | {
92 | "output_type": "execute_result",
93 | "data": {
94 | "text/html": [
95 | "
\n",
96 | "\n",
109 | "
\n",
110 | " \n",
111 | " \n",
112 | " \n",
113 | " sepal_length \n",
114 | " sepal_width \n",
115 | " petal_length \n",
116 | " petal_width \n",
117 | " species \n",
118 | " \n",
119 | " \n",
120 | " \n",
121 | " \n",
122 | " 0 \n",
123 | " 5.1 \n",
124 | " 3.5 \n",
125 | " 1.4 \n",
126 | " 0.2 \n",
127 | " setosa \n",
128 | " \n",
129 | " \n",
130 | " 1 \n",
131 | " 4.9 \n",
132 | " 3.0 \n",
133 | " 1.4 \n",
134 | " 0.2 \n",
135 | " setosa \n",
136 | " \n",
137 | " \n",
138 | " 2 \n",
139 | " 4.7 \n",
140 | " 3.2 \n",
141 | " 1.3 \n",
142 | " 0.2 \n",
143 | " setosa \n",
144 | " \n",
145 | " \n",
146 | " 3 \n",
147 | " 4.6 \n",
148 | " 3.1 \n",
149 | " 1.5 \n",
150 | " 0.2 \n",
151 | " setosa \n",
152 | " \n",
153 | " \n",
154 | " 4 \n",
155 | " 5.0 \n",
156 | " 3.6 \n",
157 | " 1.4 \n",
158 | " 0.2 \n",
159 | " setosa \n",
160 | " \n",
161 | " \n",
162 | "
\n",
163 | "
"
164 | ],
165 | "text/plain": [
166 | " sepal_length sepal_width petal_length petal_width species\n",
167 | "0 5.1 3.5 1.4 0.2 setosa\n",
168 | "1 4.9 3.0 1.4 0.2 setosa\n",
169 | "2 4.7 3.2 1.3 0.2 setosa\n",
170 | "3 4.6 3.1 1.5 0.2 setosa\n",
171 | "4 5.0 3.6 1.4 0.2 setosa"
172 | ]
173 | },
174 | "metadata": {
175 | "tags": []
176 | },
177 | "execution_count": 26
178 | }
179 | ]
180 | },
181 | {
182 | "cell_type": "code",
183 | "metadata": {
184 | "id": "SkmpiAj_uE4w",
185 | "colab_type": "code",
186 | "colab": {}
187 | },
188 | "source": [
189 | "#Prepare the training set\n",
190 | "\n",
191 | "# X = feature values, all the columns except the last column\n",
192 | "X = data.iloc[:, :-1]\n",
193 | "\n",
194 | "# y = target values, last column of the data frame\n",
195 | "y = data.iloc[:, -1]\n"
196 | ],
197 | "execution_count": 0,
198 | "outputs": []
199 | },
200 | {
201 | "cell_type": "code",
202 | "metadata": {
203 | "id": "DkNWmIQNumf1",
204 | "colab_type": "code",
205 | "outputId": "548670c1-b325-4a64-c625-4bec2039723c",
206 | "colab": {
207 | "base_uri": "https://localhost:8080/",
208 | "height": 283
209 | }
210 | },
211 | "source": [
212 | "# Plot the relation of each feature with each species\n",
213 | "\n",
214 | "plt.xlabel('Features')\n",
215 | "plt.ylabel('Species')\n",
216 | "\n",
217 | "pltX = data.loc[:, 'sepal_length']\n",
218 | "pltY = data.loc[:,'species']\n",
219 | "plt.scatter(pltX, pltY, color='blue', label='sepal_length')\n",
220 | "\n",
221 | "pltX = data.loc[:, 'sepal_width']\n",
222 | "pltY = data.loc[:,'species']\n",
223 | "plt.scatter(pltX, pltY, color='green', label='sepal_width')\n",
224 | "\n",
225 | "pltX = data.loc[:, 'petal_length']\n",
226 | "pltY = data.loc[:,'species']\n",
227 | "plt.scatter(pltX, pltY, color='red', label='petal_length')\n",
228 | "\n",
229 | "pltX = data.loc[:, 'petal_width']\n",
230 | "pltY = data.loc[:,'species']\n",
231 | "plt.scatter(pltX, pltY, color='black', label='petal_width')\n",
232 | "\n",
233 | "plt.legend(loc=4, prop={'size':8})\n",
234 | "plt.show()"
235 | ],
236 | "execution_count": 28,
237 | "outputs": [
238 | {
239 | "output_type": "display_data",
240 | "data": {
241 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAaYAAAEKCAYAAABZr/GWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3Xt4VfWd7/H3NzsR2UoJUryAZiet\nHacRqJZ4GfASjXIsFavV0jphZpiiaZOeTtt5tF7wcdTTtHrK6ZRTRU3B6tSMtVo8o9aOFzStqK0F\nwUarnUMliYBHqJRbopTE7/ljrYS9w97J3puEvSSf1/PsJ6y1fmut37qQT9Zav/1b5u6IiIhERVGh\nKyAiIpJMwSQiIpGiYBIRkUhRMImISKQomEREJFIUTCIiEikKJhERiRQFk4iIRIqCSUREIqW40BX4\nIPrwhz/s5eXlha6GiMgHyqpVq/7k7hMGK6dgykN5eTkrV64sdDVERD5QzKw9m3K6lSciIpGiYBIR\nkUhRMImISKQomEREJFIUTCIiEikKJhERiRQFk4iIRIqCSUREIqXgwWRmE83swTzmW2JmlYOU+bKZ\n/X3+tZOBNDc3U15eTlFREeXl5ZxzzjkUFxdjZhQXF3POOeekTG9oaEgZNrP8P0U5lh/db/gjht1o\nFN9UzLibx2E32oCf4puKU34O9IndGMs4bdL/mpSyrIafN6Tu09Zmyr9fTtGNRZR/v5zm1uaU6Q0/\nbxhw/iE+wFBeDkVFwc/m5sxlGxqguBjMoLiYneMm4WZ9n/fi44JpmT6xWMr8HH985uF+nx6Lpayr\nu9/wn21cyvDG2CS6rTgsW8wf48enDG+Mpdb9ufg5fav+W2umzcp534pos3JutYaU4VprTqnelZNS\n92HLQeekrOuOWEPKZn5lXOryv338nn3e/3Acf3zqrjj++NTpDQ0DH77k5fXftccfn/+pMBTM3Yd3\nDXkys2J37y50PdKpqqrykd7zQ3NzM3V1dXR1dRW6KvmrAP6h0JWA+qp6Fn96Mc2tzdQ9UkfX7j37\nNF4Sp2l2E7VTamn4eQO3r7w94/xDqrkZ6uog+fjG49DUBLW1qWUbGuD21Ho5YAMMD6XB1jUUw09Q\nwz38Iz+kjkPoyli2kziX08R91HIpzYOWd+A26vkqi9OW7yTOosomEtfW7nU4cpV8+NId3v4qK+HV\nV3M7FQZjZqvcvWrQcvszmMzsZuBNd78tHL4B2AnMc/fJZjYP+CxwKBADzgJuBc4G3gR2A3e5+4Nm\n1gJc4e4rzWwnsAg4H3gX+Iy7v927fHdfaGbHAncAE4Ae4HPA28B/AOOAEuA6d/+PwbZDwRR0y9Te\nnlXvItF2Q6ErADGL0X19N+XfL6d92977NDE2QdvX2yi+qZge78k4/5AqL4d0xzeRgLa21HHFxdCz\nd70OJA60k6Ccwc/5NhJU0MY6yrMq302MErozlm8jQXWiLe3hyFXv4ct0ePtzz+1UGEy2wbS/b+Xd\nD8xJGp4D/KZfmU8Cl7j7mQQhVQ5UAn8H/E2G5R4C/NrdPwH8Crg8TZlm4LawzHTgLeA94CJ3/yRB\nCP4vM0v7h52Z1ZnZSjNbuXnz5kE39EDX0dFR6CocMHrDpmNb+n3aOz5dKA00fp9kOr7pxh/godSr\njOzO+d5y2ZaP0TNg+TI6Mh6OXPUuJ5fl5XIqDJX9Gkzuvho4PHyu9AngzwRXQsmedPct4b9PAx5w\n9/fd/f8Bz2RY9F+AR8N/ryIIsz5mNgaY5O4PhfV4z927CK6qv21mvwOeAiYBR2Soe5O7V7l71YQJ\ng3aOe8ArKysrdBUOGDGLAVA2Nv0+7R3fWy7T/EMq0/FNNz42DOuPoA6yO+d7y2VbvofYgOU7KMt4\nOHLVu5xclpfLqTBUCtH44QHgEuDzBFdQ/XXmsczdvueeZA/Z95peS3Brb5q7n0Bwa+/gPNY/4jQ2\nNhKPxwtdjX1TUegKBOqm1QHQWNNIvCR1n8ZL4jTWNKaUyzT/kGpsDB4kpFQmHozfqwJ7r7//A4Lh\nfGAw2LqGYvgJariWRjqJD1i2kzjXEuyjbMo7cAd1Gct3EuffKxvTHo5cJR++bJZXWZm5bKZTYagU\nIpjuB75AEE4PDFL2OeBiMysysyOA6nxW6O47gPVmdiGAmY0yszgwFtjk7rvN7Cwgkc/yR6La2lqa\nmppIJBKYGYlEgpqaGmLhX8+xWIyampqU6fX19SnD+yTXJ+n9/9wIGz7ELEbpqNJBZ++9Ksnm6qRo\ngP9WEw+dmLKs5IYLtVNqaZrdRGJsAsNIjE30NXwAWPzpxdRX1Wecf0jV1gZPtxOJoJlWIpH5affi\nxVBfv+fKKRajs3QiDn2fXaMH2cdFRSnzU1mZebif9ylKWVdPv+GtlKYMv1U0kW5iOMHznTdGV6YM\nv1WUWvfnR9fw6dhT3Ectl9NEGwnex2gjwW3UpwzXhQ0fAO6jlhsmpu7DX5bUpKzrzqJ6vh4Ljt9P\nY7V8szR1+Ysqm7j21dq0h6OyX5vkysrU6fX1mQ9f/+X137W9DR/SlR3oVBgqBWmVZ2atwJ/c/Swz\nKwceTWr8UOXu/z0sVwQsJgikNwl+Hd3i7k/2b/zg7oeG81wCnO/u8/o1fvgYcCfwYYJGFJ8DtgOP\nEDS2WAmcCnzK3dsGqr8aP4iI5C6SrfLyYWaHuvtOMxsPvAjMCJ83FYyCSUQkd9kG0wfhDbaPmlkp\ncBDwPwodSiIiMrwiH0zuXl3oOoiIyP5T8C6JREREkimYREQkUhRMIiISKQomERGJFAWTiIhEioJJ\nREQiRcEkIiKRomASEZFIUTCJiEikKJhERCRSFEwiIhIpCiYREYkUBZOIiESKgklERCJFwSQiIpGi\nYBIRkUhRMImISKQomEREJFIUTCIiEikKJhERiRQFk4iIRIqCSUREIkXBJCIikaJgEhGRSFEwiYhI\npCiYREQkUhRMIiISKQomERGJFAWTiIhEioJJREQiRcEkIiKRomASEZFIUTCJiEikKJhERCRSFEwi\nIhIpCiYREYkUBZOIiESKgklERCJFwSQiIpGiYBIRkUhRMImISKQomEREJFIUTCIiEikKJhERiRQF\nk4iIRIqCSUREIkXBJCIikaJgEhGRSFEwiYhIpCiYREQkUhRMIiISKQomERGJFAWTiIhEioJJREQi\nRcEkIiKRomASEZFIUTCJiEikKJhERCRSFEwiIhIpCiYREYkUBZOIiESKgklERCJFwSQiIpGiYBIR\nkUhRMImISKQomEREJFIUTCIiEikKJhERiRQFk4iIRIqCSUREIkXBJCIikaJgEhGRSMkqmMzsa2b2\nIQssNbOXzGzmcFdORERGnmyvmL7o7tuBmcA44O+Am4etViIiMmJlG0wW/pwF/NjdX00aJyIiMmSy\nDaZVZvYEQTA9bmZjgPeHr1oiIjJSFWdZbj5wAvCGu3eZ2XjgH4evWiIiMlJle8XkQCXwT+HwIcDB\nw1IjEREZ0bINpsXA3wCXhsM7gNuGpUYiIjKiZXsr7xR3/6SZrQZw9z+b2UHDWC8RERmhsr1i2m1m\nMYJbepjZBNT4QUREhkG2wfS/gYeAw82sEVgBfHvYaiUiIiNWVrfy3L3ZzFYBNQTfX7rQ3V8b1pqJ\niMiINGAwmdmH3H27mR0GbALuS5p2mLtvGe4KiojIyDLYrbx/D3+uAlYmfXqH9xszu8nMzsljvmoz\ne3Q46iQiIkNvwCsmdz8//FmxPypjZgaYu+/VsMLdr99PdSh29+79sa4oiMfjvPvuu2mnlZSUsHv3\n7iFbV2VlJZ2dnXR0dFBWVsasWbN47LHH+oYbGxupra0FoLm1mQXLF9CxrYOysWU01jRSO6W2b1kN\nP2+gaVUTPd5DzGIcccgRbNy5sW966ahStu7a2jc8Ojaad3sybKeVsNuz385LfwffXg5l26BjLFxb\nA/dN3TM9ed0/eBS+vApiDj1FsOK8Suad29m3Xf/5wrH89c9aoKcHYjGoroa1a6GjA8rKYNYseOyx\nzMOHHAK//33SxpRApmM2ejQkH+vSUtixY8+6x4yBrVvTz5t2x2VYVyzGliOO40Mb/0CMHnqIsbq0\nmsO3ruUYOniTMlZPnEXV248xsaeDjbEynqhu5Ka1tX2b1dgItXsON83NsGBBsNkA7vlXc+JEePvt\nzJvdv3zybuqv/7L6H75jj4WWlj3TjzgCNu45TamshM7O9If3sMOCMlu2pN8nBzLzLI6wmV0EPO3u\n28LhUqDa3f9PhvI3A2+6+23h8A3AToLnU3OAUcBD7v4vZlYOPA78BphG0O3RjUAVQSvAu9z9X83s\nbuBRd3/QzE4CFhF80XcXwbOv3cDt4XzdwD+7+zNmVg1c4e7nh7ck7wI+AnQBde7+u7B+Hw3Hd7h7\n7/e10qqqqvKVK/frBeOwGCiUCiEej9PU1ARToe6ROrp2d+2ZVhKnaXYTtVNqafh5A7evvL0gdbz0\nd/DDR+CQpF9cnSVw+ezUcIIglL6yMrVTSQduq4Kvnp9++oHC2Xu7BxruJM7lNHEfwW/eeByamoJf\nxM3NUFcHXV2MaMn75IPKzFa5e9Wg5bIMpjXufkK/cavd/cQM5U8Evu/uZ4bDvwduAWYAXyI4Jx8G\n/ifQAbwBTHf3X5vZNOBmdz83nLfU3bf2BlM43+vA5939t2b2IYKQ+RpwvLt/0cz+GngC+CvgVPYE\n0w+AP7n7jWZ2NvA9dz8hDKbZwGnuPuhv6gMlmIIL1GhJJBLwdWjf1r73tLEJ2r7eRvFNxfR4mj9f\n94N1/wrl2/Ye3zYWKr6ROm73jVCc5r9Xt0HJv2SePlK1kaCCtr7hRALa2qC8HNr3Ph1GpN598kGV\nbTBl+wXbdM+iMs7r7qvN7HAzmwhMAP4MTCF4bcbqsNihwMcIgqnd3X8djn8D+EgYIj8nCJhkxwFv\nuftvw3VtBzCz04AfhONeN7N2gmBKdhpwcVjmaTMbHwYbwMMDhZKZ1QF1AGVlZZmKyT7q6OiANL/4\nATq2BfdxChVKENy+y3Z8LEPo9I7PNH2kKqMjZbj3tl1HR5rCI9RI2RfZfo9ppZl9z8w+Gn6+R9AA\nYiAPAJcAnwfuJ7hK+o67nxB+jnX3pWHZzt6Z3P3PwCeAFuDLwJLsN2efdA400d2b3L3K3asmTJiw\nn6o08pSVlVE2Nn3w946PWWx/VilFx9jsx/dkuCDtHZ9p+kjVQepx7/37T38H7jFS9kW2wfRV4C8E\nAfMT4D3gK4PMcz/wBYJweoDgOdIXzexQADObZGaH95/JzD4MFLn7z4DrgE/2K/IH4KjwORNmNsbM\nioFnIbhBbWZ/BZSFZZMll6kmuK23fbCNP1CNHj260FVIEY/HaWxspLGmkXhJPHVaSZzGmkYA6qbV\nFaJ6QNDQobMkdVxnSTC+vzumhV2lJPFwfKbpB4p02z3QcCdxrqWxbzgeDx72Q/AzHmfES94nB7qs\ngsndO939auBMdz/J3a9198GuMF4FxgAb3P0td3+CoPn5C2bWCjwYTu9vEtBiZmuAe4Fr+i33LwRX\nYT8ws5eBJwl6Ol8MFIXLvh+Y5+67+i37BmCamf2O4A28/5DN9h+ourq6BgynkpKSjNPyUVlZSSKR\nwMxIJBLU19enDDc1NVFbW0vtlFqaZjeRGJvAMBJjE30NHwAWf3ox9VX1fVdOMYsx8dCJKesqHVWa\nMjw6NsB2Wvbbed/UoKFD29igT662sXs3fOhd91fPDxo6dFvwi7i7CH45q5KFtcF2LaxN8Ic5NUFz\nLQh+1tQEDxLMgp/19QMPV1b225gBtqX/sS4tTV13aene8wwk07piMf48sZJuYsF2E2NlaQ3tJHgf\no50E/zGxnvWxYHh9LMH9NU08n6jt26zkh/y1tcFw72bn+mi0fzUnThx4s/uXT95N/fVfVv/DV9Pv\n8E5MPU2prMx8eMePDz7p9smBLtvGD9MJbqkd6u5lZvYJ4Evu3jDcFYyiA6Xxg4jI/pRt44dsb+X9\nK/DfgHcA3P1l4Iz8qyciIpJetsGEu7/Zb1ThmkaJiMgBK9vm4m+Gt/PczEoIvjOkTlxFRGTIZXvF\n9GWCVniTgI3ACQzeKk9ERCRn2b724k+EzaxFRESGU1ZXTGb2ETN7xMw2m9kmM/sPM/vIcFdORERG\nnmxv5f078FPgKGAiwRdm7xtwDhERkTxkG0xxd/+xu3eHn3sJvtQqIiIypLJtlfcLM7uaoDsiJ+h5\n4bHwNRLoTbYiIjJUsg2mOeHP3k7KejsF+QJBUOl5k4iIDIkBgynsKPXN3jfYmtk/ELw2og24QVdK\nIiIy1AZ7xnQnQa/imNkZwHeAewjemNM0vFUTEZGRaLBbebGkq6LPA03h6yh+Fvb+LSIiMqQGu2KK\nhe86AqgBnk6alu3zKRERkawNFi73Ab80sz8B7xK8aA8zO5aML8AWERHJ34DB5O6NZrac4Iu1T/ie\nlzcVEbzVVkREZEgNejvO3X+dZtx/DU91RERkpMv6fUwiIiL7g4JJREQiRcEkIiKRomASEZFIUTCJ\niEikKJhERCRSFEwiIhIpCiYREYkUBZOIiESKgklERCJFwSQiIpGiYBIRkUhRMImISKQomEREJFIU\nTCIiEikKJhERiRQFk4iIRIqCSUREIkXBJCIikaJgEhGRSFEwiYhIpCiYREQkUhRMIiISKQomERGJ\nFAWTiIhEioJJREQiRcEkIiKRomASEZFIKS50BUREomr37t2sX7+e9957r9BV+cA5+OCDOfrooykp\nKcl5XgWTiEgG69evZ8yYMZSXl2Nmha7OB4a7884777B+/XoqKipynl+38kREMnjvvfcYP368QilH\nZsb48ePzvtJUMImIDEChlJ992W8KJhGRA8hpp52Wdnx1dTXd3d37tOw1a9bw0ksvAdDS0sJ11123\nT8vLRMEkIiJZSQ6m4aTGDyIi+6C5GRYsgI4OKCuDxkaorc1tGc8//zzf+MY3iMfjzJ07lw0bNvD0\n009TVFTEXXfdBUBtbS3jx49n06ZN3HfffVRUVDBnzhzefvttRo0axYMPPsiHPvShQde1efNm5s+f\nz44dO/j4xz/O4sWLueGGG3jjjTfYuHEj5eXlLFmyhD/+8Y/87d/+LUceeSQ7d+5k6dKlNDU18c47\n7/DMM89w+eWXs2rVKmbPns2WLVt4/PHHOfTQQ/PZhXvRFZOISJ6am6GuDtrbwT34WVcXjM/FL37x\nC2655RaeeeYZqqqq2LBhAy0tLdx222185zvfAWDLli0sW7aMRYsWccsttwBw991388tf/pI5c+Zw\n//33Z7Wum2++mWuuuYZnnnmGMWPG8MILLwBw4okn8tRTT9HR0cHWrVtZuHAhixYtYtmyZWzatAmA\nuro6rrzySprDDTzooIN45JFHmDVrFsuXL89towegKyYRkTwtWABdXanjurqC8blcNdXX1/Otb32L\nJUuWMHXqVFpaWqiurgbgqKOOAmDKlCkUFxdzwgknsHbtWnp6erjyyitpbW1l+/btXHTRRVmt67XX\nXuPqq6/GzNi5cycnn3wyAJMnTwZg4sSJbNu2jXXr1jF16lRisVjftP56x0+aNImtW7dmv8GDUDCJ\niOSpoyO38ZmMGzeOxYsXs3HjRubPn8/MmTP5wQ9+AARf8t2wYQOvvPIKPT09vPzyy3z0ox9lzZo1\ndHZ28qtf/Yof/vCHbNiwIat1HXfcccydO5dp06YB0N3dTWtra0orOnenoqKC1tZWqqqqePXVVwEo\nKSlh165dfeX6zzNUFEwiInkqKwtu36Ubn4s777yTZcuWsXPnTq666irWrl1LdXU1Zsall17KzJkz\nOfzww7nwwgvZvHkzzc3NHHHEEaxdu5bzzjuPY445hkmTJmW1rmuvvZa6ujq2bdtGUVERS5YsSVvu\niiuuoLa2lsMPP5xx48ZRUlLCqaeeyrx583jllVe4+OKLc9vIHNhQptxIUVVV5StXrix0NURkmL32\n2mt8/OMfzzi99xlT8u28eByamnJvADGQtrY2rrvuOu69996hW+gguru7KS4upqenhxkzZrBixQqK\ni3O7lum//8xslbtXDTafrphERPLUGz772ipvqC1atIiHHnqob/iiiy7ia1/7Wk7LeOONN7jsssvo\n7Oxk/vz5OYfSvtAVUx50xSQyMgx2xSQDy/eKSc3FRUQkUhRMIiISKQomERGJFAWTiMgBJFMnrpms\nWbOGpUuXpoxra2tj3rx5AH1dIsHQdASbDQWTiMgIdsIJJzB//vyM05ODaX9RMImI7IPm1mbKv19O\n0Y1FlH+/nObWHDvKI+jE9ZRTTuGss85i6dKl3HTTTVRXV3P22WfT1tZGW1sbM2bM4IILLuDUU09l\n3bp1AMyZM4czzzyTmTNnsn379ozL37FjB3PmzAGCK6qHHnqIdevW0dDQkPL6iuuvv57TTz+9r3++\nhx9+mNbWVqqrq3nyyScBuOqqqzjppJP2usoaSgomEZE8Nbc2U/dIHe3b2nGc9m3t1D1Sl3M4DXcn\nrmPGjGHHjh3s2rWLww47jBdeeIHnnnuO6dOn95V56623ePHFF3n22Wc588wzAbjggguYMmUKLS0t\nnHvuuQDMnTuXFStWcM899+S0jblQMImI5GnB8gV07U7txbVrdxcLli/IaTn19fX89Kc/Ze7cuTz+\n+ON9nbjW19f3XQll6sT1jDPO4NZbb2Xjxo0DrqOiooIHHniAWbNmsWnTJp5//nlmzJjRN729vZ2p\nU6cC9PWjl87kyZMZNWoURUXDFx/q+UFEJE8d29L31pppfCb7oxPX6dOns3DhQn784x+zevVqXn/9\ndSoqKmgPO/tLJBK0trYCsHr16r75+r8ifX+8al5XTCIieSobm7631kzjM7nzzjs544wzOP/885k3\nbx5HHnkk1dXVnHXWWfzoRz8C6OvE9Z/+6Z/45je/yXHHHdfXieuLL7446DpmzJhBe3s7kydPpqqq\ninHjxqVMP+qoo5g2bRqnn346K1as6Bt/8sknc+GFF/Lss8/mtE37Ql0S5UFdEomMDIN24ho+Y0q+\nnRcvidM0u4naKUPXYV4hOnEdCurEVURkP+sNnwXLF9CxrYOysWU01jQOaSjlYyg6cS0kXTHlQVdM\nIiODOnHdN+rEVUREDggKJhERiRQFk4iIRIqCSUTkA2Dr1q0sW7ZswDKZOnAdis5X16xZw0svvQSQ\n0o3RcFAwiYh8AGQTTMMpOZiGm4JJRGRfNDdDeTkUFQU/m3PvxLWlpYWZM2fyqU99irPPPpstW7bs\n1ZFrU1MTTz75JNXV1WzevDnrDlyTbd68mQsuuICzzjqLhoYGAG644Qb+/u//nnPOOYfLLrsMgD/+\n8Y+ccsopfOYzn6GmpqZv/d/97neprQ2awq9atYrZs2czY8YMdu7cmfM2D0TBJCKSr+ZmqKuD9nZw\nD37W1eUVTu7OL37xC770pS9x66237tWRa11dHeeeey4tLS1MmDAh6w5ck918881cc801PPPMM4wZ\nM4YXXngBgBNPPJGnnnqKjo4Otm7dysKFC1m0aBHLli1j06ZNANTV1XHllVfSHG7bQQcdxCOPPMKs\nWbNYvnx5zts7EH3BVkQkXwsWQFdqJ650dQXja3P7ku2JJ54IBO9HuuaaaygpKaG6uhoIugtK1tuB\na2trK9u3b+eiiy7Kah2vvfYaV199NWbGzp07Ofnkk4GgY1aAiRMnsm3bNtatW8fUqVOJxWJ90/rr\nHT9p0iS2bt2a07YORsEkIpKvjgydtWYaP4CXX3657+fll1/Oxo0bUzpy3bRpEz09PQA5d+Da67jj\njmPu3Ll9vYd3d3fT2tqa0jGru1NRUUFraytVVVW8+uqrAJSUlLBr166+cv3nGUoKJhGRfJWVBbfv\n0o3PUUlJCeeddx7vvfceP/vZz7jjjjuorq7GzLj00kuZP38+W7Zs4ZJLLuF73/teXweuxxxzDJMm\nTcpqHddeey11dXVs27aNoqIilixZkrbcFVdcQW1tLYcffjjjxo2jpKSEU089lXnz5vHKK69w8cUX\n57x9uVCXRHlQl0QiI8OgXRL1PmNKvp0Xj0NTU0638lpaWnjqqaf41re+tQ+1HTrd3d0UFxfT09PD\njBkzWLFiBcXFuV/HqBNXEZH9rTd8FiwIbt+VlUFjY87Pl4bSUHTg+sYbb3DZZZfR2dnJ/Pnz8wql\nfaErpjzoiklkZFAnrvtGnbiKiMgBQcEkIiKRckAEk5nNM7OJha6HiIjsuwOl8cM84BVgY4HrkZVJ\nkyaxcWN+VS0pKWHixIl0dHRQVlbGrFmzeOyxx/qGGxsb+7oMyUpz854Ht4cdFozbsiV4iHvssdDS\nAj09EIsF32x///286p1WZSWE35GgoSFoydS7rro6WLw4+81obR62t4j2X/amnZt4t+fdjOWLKOJ9\n9t5PMYvR4z0Drqv/vKWjStm6a8+XF0fHRqesu3RUKTv+soMe7yFmMarLq1m7ZS0d2zo4bHRwPLe8\nu4WysWXM+tgsHvu/j/Vtx6xRjTx2S23fM/vdf93Mxo8vgLEdsK2Mia81UvJ60vTdkHzalpbCjh3B\nIdtrO4r27VSprITOzj3tCbZtg3Tf4Ux3qvQ/laqrYe3ayLRNyNvWrVt5+umn+exnP5uxzGmnncaK\nFSuyXuaaNWtYtWoV8+fP7xvX1tbGDTfcwN13381dd93FF7/4RSDoCPapp57aLw0hItv4wcwOAX4K\nHA3EgP8BrAW+BxwK/IkgkGYAdwMbgHeBvwGmAwsJgve3QL277zKzm4ELgG7gCXe/wsxmA9cBBwHv\nALXu/vZAdduXxg/7EkrZiMfjNDU1ZRdO6Zq67m+VlXDmmXD77XtPq6/PKpyaW5upe6SOrt17tiNe\nEqdpdtM+h1O6ZR8wdsfh4SZorYUpzTC7Dg5K2s6/xOGRcHrE9Z4qDQ3pT6VkubTmjlLjh7a2Nq67\n7jruvffejGVyDaZM6+kNpuTl5RNMB2Ljh/OAje7+CXefDPwn8APgEnefBtwFNLr7g8BKgkA5AXCC\noPq8u08hCKd6MxsPXAQc7+5Tgd4vDKwATnX3E4GfAN8czo0azlAC6OrqYsGCBdkVTtedyv72+98H\nvyXSyTS+nwXLF+wVHF27u1iM+3dmAAALs0lEQVSwPMv9kOOyDxglXVAT7qOaBamhBMFwzb7vw/2h\n91TJ5pTp7TFoqDQ3N1NeXk5RURHl5eV9fcnlYjg7cd2xYwdz5swBguB66KGHWLduHQ0NDSmvr7j+\n+us5/fTT+c53vgPAww8/TGtrK9XV1Tz55JMAXHXVVZx00kksXbo0523MRZSDqRU418xuMbPTgWOA\nycCTZraG4Crn6DTzHQesc/f/CofvAc4AtgHvAUvN7LNA7//Co4HHzawVuBI4Pl1lzKzOzFaa2crN\nmzcPzRYOk45su0PJo9uUYZHuXtBA4/vp2JZ+OzKNz8VQLCPSxnak/sw0PeJ6T5UsT5khO/Wbm5up\nq6ujvb0dd6e9vZ26urq8wmm4OnEdM2YMO3bsYNeuXRx22GG88MILPPfcc0yfPr2vzFtvvcWLL77I\ns88+y5lnngnABRdcwJQpU2hpaeHcc88FYO7cuaxYsYJ77rkn5+3LRWSDKQyWTxIE1LeAi4FX3f2E\n8DPF3WfmsLxu4GTgQeB8giswCK7Cbg2vrr4EHJxh/iZ3r3L3qgkTJuS9XftDWbbdoeTRbcqwiMVy\nG99P2dj025FpfC6GYhmRtq0s9Wem6RHXe6pkecoM2am/YMECuvrddcjprkWS5E5c7777blpaWqiu\nrqa+vn6vK6LeTlzPOOMMbr311kHvxFRUVPDAAw8wa9YsNm3axPPPP8+MGTP6pre3tzN16lSAvn70\n0pk8eTKjRo2iqGh4oyOywRS2suty93uB7wKnABPM7G/C6SVm1nt1swMYE/77D0C5mR0bDv8d8Esz\nOxQY6+6PAd8APhFOH0vwfArgH4ZzmyDovXc4xeNxGhsbsyvc2BjccC+kysrgOVc6mcb301jTSLwk\ndTviJXEaa7LcDzku+4CxOw7Lw320vDF4ppTsL0nTI673VMnmlInHg1N/KGS6O5H1XYsk/TtxnTlz\nJi0tLbS0tPBv//ZvlJSUpO3E9Stf+cqgnahOnz6dhQsXMmPGDEaNGsXrr79ORUVF3/REIkFraysA\nq1ev7huf3FFruuHhEtlgAqYAL4a37f4FuB64BLjFzF4G1hA0coDgmdIdYVkD/hF4ILw99z5wB0Fw\nPWpmvyN4rvTP4bw3hGVXETSoGFYbNmzYp3AqKSkhkUhgZiQSCerr61OGs274AMHT36YmSCTADMaP\nDz5mwbiamtQ/RYf6r6TeVnmLFwdPr5PXlWXDB4DaKbU0zW4iMTaBYSTGJoak4UOmZY+OjR5wnqIM\n/61iNvif8/3nLR1VmjLcf92lo0r7lhuzGDUVNX11HT96PONHj++rd31Vfcp21E9qIrG9Njjc22uZ\nuKoJtibADbYmmLgqaXoC+p+2paWZr1D29VSprNxzWiYSwbrS6X+qpDuVampSl5VjN3YDynR3Iuu7\nFkl6O3FdvHgxdXV1HHnkkVRXV3PWWWfxox/9iCOPPLKvE9cJEyb0deL64osvDrrsGTNm0N7ezuTJ\nk6mqqmLcuHEp04866iimTZvG6aefntJ44uSTT+bCCy/k2WefzXl79kVkW+VFmbokEhkZBmuV1/uM\nKfl2Xk4tY0NR68R1qByIrfJERCKttraWpqam/O9aDINFixZRXV3d91m0aFHB6pIvXTHlQVdMIiND\nlL7H9EGkKyYRkWGgP97zsy/7TcEkIpLBwQcfzDvvvKNwypG7884773DwwWm/fTOoA6WvPBGRIXf0\n0Uezfv16ov6l+ig6+OCDOfrodH0gDE7BJCKSQUlJScr3fWT/0K08ERGJFAWTiIhEipqL58HMNgPt\necz6YfZD7xJ5imrdolovUN3yEdV6QXTrFtV6Qe51S7j7oJ2NKpj2IzNbmU0b/kKIat2iWi9Q3fIR\n1XpBdOsW1XrB8NVNt/JERCRSFEwiIhIpCqb9K7tXshZGVOsW1XqB6paPqNYLolu3qNYLhqluesYk\nIiKRoismERGJFAXTfmBm55nZH8xsrZldXej6JDOzu8xsk5m9Uui6JDOzY8zsGTP7vZm9amZfK3Sd\nAMzsYDN70cxeDut1Y6Hr1J+ZxcxstZk9Wui6JDOzNjNrNbM1ZhaZ7vnNrNTMHjSz183std63ZBea\nmR0X7qvez3Yz+3qh69XLzL4R/h94xczuM7P8OsZLt2zdyhteZhYD/gs4F1gP/Ba41N1/X9CKhczs\nDGAn8G/uPrnQ9ellZkcBR7n7S2Y2BlgFXFjo/WbBu6UPcfedZlZC8Dbkr7n7rwtZr2Rm9s9AFfAh\ndz+/0PXpZWZtQJW7R+o7OWZ2D/Csuy8xs4OAuLtvLXS9koW/RzYAp7h7Pt+hHOr6TCI49yvd/V0z\n+ynwmLvfPRTL1xXT8DsZWOvub7j7X4CfAJ8pcJ36uPuvgC2Frkd/7v6Wu78U/nsH8BowqbC1Ag/s\nDAdLwk9k/rozs6OBTwNLCl2XDwIzGwucASwFcPe/RC2UQjXAH6MQSkmKgdFmVgzEgY1DtWAF0/Cb\nBLyZNLyeCPyC/SAxs3LgROA3ha1JILxVtgbYBDzp7pGoV+j7wDeB9wtdkTQceMLMVplZXaErE6oA\nNgM/Cm9/LjGzQwpdqTS+ANxX6Er0cvcNwEKgA3gL2ObuTwzV8hVMEmlmdijwM+Dr7r690PUBcPce\ndz8BOBo42cwicQvUzM4HNrn7qkLXJYPT3P2TwKeAr4S3kQutGPgkcLu7nwh0AlF7DnwQcAHwQKHr\n0svMxhHc+akAJgKHmNncoVq+gmn4bQCOSRo+Ohwngwif4fwMaHb3ZYWuT3/hLZ9ngPMKXZfQDOCC\n8FnOT4CzzezewlZpj/CvbNx9E/AQwW3uQlsPrE+66n2QIKii5FPAS+7+dqErkuQcYJ27b3b33cAy\nYPpQLVzBNPx+C3zMzCrCv3y+ADxc4DpFXtjIYCnwmrt/r9D16WVmE8ysNPz3aIJGLa8XtlYBd7/G\n3Y9293KC8+xpdx+yv2L3hZkdEjZiIbxVNhMoeEtQd/9/wJtmdlw4qgaIRMOkJJcSodt4oQ7gVDOL\nh/9XawieAw8JvShwmLl7t5n9d+BxIAbc5e6vFrhafczsPqAa+LCZrQf+xd2XFrZWQPDX/98BreHz\nHIBr3f2xAtYJ4CjgnrCVVBHwU3ePVLPsiDoCeCj4HUYx8O/u/p+FrVKfrwLN4R+ObwD/WOD69AlD\n/FzgS4WuSzJ3/42ZPQi8BHQDqxnCXiDUXFxERCJFt/JERCRSFEwiIhIpCiYREYkUBZOIiESKgklE\nRCJFwSRSIGbW06/36PI8llFqZg1DXzuRwlFzcZECMbOd7n7oPi6jHHg0157hzSzm7j37sm6R4aIr\nJpEICTuI/a6Z/dbMfmdmXwrHH2pmy83spfCdRr091N8MfDS84vqumVUnv4fJzG41s3nhv9vM7BYz\newn4nJl91Mz+M+xU9Vkz++uw3OfCd+y8bGa/2r97QEQ9P4gU0uikXi3WuftFwHyCnppPMrNRwHNm\n9gRBD/UXuft2M/sw8Gsze5igw9HJYaeymFn1IOt8J+xIFTNbDnzZ3f+vmZ0CLAbOBq4H/pu7b+jt\nfklkf1IwiRTOu72BkmQmMNXMLgmHxwIfI+hs9Nthj9zvE7w65Yg81nk/9PXaPh14IOwmCGBU+PM5\n4O7w5W+R6zxXDnwKJpFoMeCr7v54ysjgdtwEYJq77w57EE/3KutuUm/R9y/TGf4sAramCUbc/cvh\nFdSngVVmNs3d38lnY0TyoWdMItHyOFAfvvIDM/ursCPPsQTvWtptZmcBibD8DmBM0vztQKWZjQpv\nw9WkW0n4bqt1Zva5cD1mZp8I//1Rd/+Nu19P8BK9Y9ItQ2S46IpJJFqWAOXAS+HrBDYDFwLNwCNm\n1gqsJHzVhru/Y2bPmdkrwC/c/crwFtwrwDqCXp8zqQVuN7PrCF4R/xPgZeC7ZvYxgqu35eE4kf1G\nzcVFRCRSdCtPREQiRcEkIiKRomASEZFIUTCJiEikKJhERCRSFEwiIhIpCiYREYkUBZOIiETK/wfI\nYQxYjIKT7AAAAABJRU5ErkJggg==\n",
242 | "text/plain": [
243 | ""
244 | ]
245 | },
246 | "metadata": {
247 | "tags": []
248 | }
249 | }
250 | ]
251 | },
252 | {
253 | "cell_type": "code",
254 | "metadata": {
255 | "id": "JpxxN8UKwBOh",
256 | "colab_type": "code",
257 | "colab": {}
258 | },
259 | "source": [
260 | "#Split the data into 80% training and 20% testing\n",
261 | "x_train, x_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)"
262 | ],
263 | "execution_count": 0,
264 | "outputs": []
265 | },
266 | {
267 | "cell_type": "code",
268 | "metadata": {
269 | "id": "yI7TRVRAwaPU",
270 | "colab_type": "code",
271 | "colab": {}
272 | },
273 | "source": [
274 | "#Train the model\n",
275 | "model = LogisticRegression()\n",
276 | "model.fit(x_train, y_train) #Training the model"
277 | ],
278 | "execution_count": 0,
279 | "outputs": []
280 | },
281 | {
282 | "cell_type": "code",
283 | "metadata": {
284 | "id": "qDKGcVVSwmul",
285 | "colab_type": "code",
286 | "outputId": "b6fe702d-71a7-4a17-b282-28d6f328acdd",
287 | "colab": {
288 | "base_uri": "https://localhost:8080/",
289 | "height": 316
290 | }
291 | },
292 | "source": [
293 | "#Test the model\n",
294 | "predictions = model.predict(x_test)\n",
295 | "print(predictions)# printing predictions\n",
296 | "\n",
297 | "print()# Printing new line\n",
298 | "\n",
299 | "#Check precision, recall, f1-score\n",
300 | "print( classification_report(y_test, predictions) )\n",
301 | "\n",
302 | "print( accuracy_score(y_test, predictions))"
303 | ],
304 | "execution_count": 32,
305 | "outputs": [
306 | {
307 | "output_type": "stream",
308 | "text": [
309 | "['versicolor' 'setosa' 'virginica' 'versicolor' 'versicolor' 'setosa'\n",
310 | " 'versicolor' 'virginica' 'versicolor' 'versicolor' 'virginica' 'setosa'\n",
311 | " 'setosa' 'setosa' 'setosa' 'versicolor' 'virginica' 'versicolor'\n",
312 | " 'versicolor' 'virginica' 'setosa' 'virginica' 'setosa' 'virginica'\n",
313 | " 'virginica' 'virginica' 'virginica' 'virginica' 'setosa' 'setosa']\n",
314 | "\n",
315 | " precision recall f1-score support\n",
316 | "\n",
317 | " setosa 1.00 1.00 1.00 10\n",
318 | " versicolor 1.00 1.00 1.00 9\n",
319 | " virginica 1.00 1.00 1.00 11\n",
320 | "\n",
321 | " accuracy 1.00 30\n",
322 | " macro avg 1.00 1.00 1.00 30\n",
323 | "weighted avg 1.00 1.00 1.00 30\n",
324 | "\n",
325 | "1.0\n"
326 | ],
327 | "name": "stdout"
328 | }
329 | ]
330 | }
331 | ]
332 | }
--------------------------------------------------------------------------------
/Logistic_Regression.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | This Is A Simple Logistic Regression Program To Classify Iris Species
4 |
5 | Resources:
6 | (1) https://towardsdatascience.com/building-a-logistic-regression-in-python-301d27367c24
7 | (2) https://towardsdatascience.com/logistic-regression-a-simplified-approach-using-python-c4bc81a87c31
8 | (3) https://scikit-learn.org/stable/modules/generated/sklearn.metrics.classification_report.html
9 | """
10 |
11 | # Import the dependencies
12 | import matplotlib.pyplot as plt
13 | import seaborn as sns
14 | from sklearn.linear_model import LogisticRegression
15 | from sklearn.metrics import classification_report
16 | from sklearn.metrics import accuracy_score
17 | from sklearn.model_selection import train_test_split
18 |
19 | #Load the data set
20 | data = sns.load_dataset("iris")
21 | data.head()
22 |
23 | #Prepare the training set
24 |
25 | # X = feature values, all the columns except the last column
26 | X = data.iloc[:, :-1]# or data.loc[:,'sepal_length':'petal_width']
27 |
28 | # y = target values, last column of the data frame
29 | y = data.iloc[:, -1]# or data.loc[:,'species']
30 |
31 | # Plot the relation of each feature with each species
32 |
33 | plt.xlabel('Features')
34 | plt.ylabel('Species')
35 |
36 | pltX = data.loc[:, 'sepal_length']
37 | pltY = data.loc[:,'species']
38 | plt.scatter(pltX, pltY, color='blue', label='sepal_length')
39 |
40 | pltX = data.loc[:, 'sepal_width']
41 | pltY = data.loc[:,'species']
42 | plt.scatter(pltX, pltY, color='green', label='sepal_width')
43 |
44 | pltX = data.loc[:, 'petal_length']
45 | pltY = data.loc[:,'species']
46 | plt.scatter(pltX, pltY, color='red', label='petal_length')
47 |
48 | pltX = data.loc[:, 'petal_width']
49 | pltY = data.loc[:,'species']
50 | plt.scatter(pltX, pltY, color='black', label='petal_width')
51 |
52 | plt.legend(loc=4, prop={'size':8})
53 | plt.show()
54 |
55 | #Split the data into 80% training and 20% testing
56 | x_train, x_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
57 |
58 | #Train the model
59 | model = LogisticRegression()
60 | model.fit(x_train, y_train) #Training the model
61 |
62 | #Test the model
63 | predictions = model.predict(x_test)
64 | print(predictions)
65 |
66 | #Check precision, recall, f1-score
67 | print( classification_report(y_test, predictions) )
68 |
69 | print( accuracy_score(y_test, predictions))
70 |
--------------------------------------------------------------------------------
/MNIST_ANN.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "colab": {
6 | "name": "MNIST_ANN.ipynb",
7 | "version": "0.3.2",
8 | "provenance": []
9 | },
10 | "kernelspec": {
11 | "name": "python3",
12 | "display_name": "Python 3"
13 | }
14 | },
15 | "cells": [
16 | {
17 | "cell_type": "code",
18 | "metadata": {
19 | "id": "cEUq8cIBTeiX",
20 | "colab_type": "code",
21 | "colab": {
22 | "base_uri": "https://localhost:8080/",
23 | "height": 510
24 | },
25 | "outputId": "ecb56659-6540-4ae5-aaea-8dbff9a38922"
26 | },
27 | "source": [
28 | "# Description: This program classifies the MNIST handwritten digit images\n",
29 | "# as a number 0 - 9\n",
30 | "\n",
31 | "# Install packages\n",
32 | "pip install tensorflow keras numpy mnist matplotlib"
33 | ],
34 | "execution_count": 26,
35 | "outputs": [
36 | {
37 | "output_type": "stream",
38 | "text": [
39 | "Requirement already satisfied: tensorflow in /usr/local/lib/python3.6/dist-packages (1.14.0rc1)\n",
40 | "Requirement already satisfied: keras in /usr/local/lib/python3.6/dist-packages (2.2.4)\n",
41 | "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (1.16.4)\n",
42 | "Requirement already satisfied: mnist in /usr/local/lib/python3.6/dist-packages (0.2.2)\n",
43 | "Requirement already satisfied: matplotlib in /usr/local/lib/python3.6/dist-packages (3.0.3)\n",
44 | "Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow) (1.1.0)\n",
45 | "Requirement already satisfied: tensorboard<1.14.0,>=1.13.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow) (1.13.1)\n",
46 | "Requirement already satisfied: wheel>=0.26 in /usr/local/lib/python3.6/dist-packages (from tensorflow) (0.33.4)\n",
47 | "Requirement already satisfied: keras-applications>=1.0.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow) (1.0.8)\n",
48 | "Requirement already satisfied: keras-preprocessing>=1.0.5 in /usr/local/lib/python3.6/dist-packages (from tensorflow) (1.1.0)\n",
49 | "Requirement already satisfied: protobuf>=3.6.1 in /usr/local/lib/python3.6/dist-packages (from tensorflow) (3.7.1)\n",
50 | "Requirement already satisfied: google-pasta>=0.1.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow) (0.1.7)\n",
51 | "Requirement already satisfied: wrapt>=1.11.1 in /usr/local/lib/python3.6/dist-packages (from tensorflow) (1.11.1)\n",
52 | "Requirement already satisfied: six>=1.10.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow) (1.12.0)\n",
53 | "Requirement already satisfied: gast>=0.2.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow) (0.2.2)\n",
54 | "Requirement already satisfied: tensorflow-estimator<1.15.0rc0,>=1.14.0rc0 in /usr/local/lib/python3.6/dist-packages (from tensorflow) (1.14.0rc1)\n",
55 | "Requirement already satisfied: grpcio>=1.8.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow) (1.15.0)\n",
56 | "Requirement already satisfied: absl-py>=0.7.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow) (0.7.1)\n",
57 | "Requirement already satisfied: astor>=0.6.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow) (0.8.0)\n",
58 | "Requirement already satisfied: pyyaml in /usr/local/lib/python3.6/dist-packages (from keras) (3.13)\n",
59 | "Requirement already satisfied: scipy>=0.14 in /usr/local/lib/python3.6/dist-packages (from keras) (1.3.0)\n",
60 | "Requirement already satisfied: h5py in /usr/local/lib/python3.6/dist-packages (from keras) (2.8.0)\n",
61 | "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.6/dist-packages (from matplotlib) (0.10.0)\n",
62 | "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib) (1.1.0)\n",
63 | "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib) (2.5.3)\n",
64 | "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib) (2.4.0)\n",
65 | "Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.6/dist-packages (from tensorboard<1.14.0,>=1.13.0->tensorflow) (0.15.4)\n",
66 | "Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.6/dist-packages (from tensorboard<1.14.0,>=1.13.0->tensorflow) (3.1.1)\n",
67 | "Requirement already satisfied: setuptools in /usr/local/lib/python3.6/dist-packages (from protobuf>=3.6.1->tensorflow) (41.0.1)\n"
68 | ],
69 | "name": "stdout"
70 | }
71 | ]
72 | },
73 | {
74 | "cell_type": "code",
75 | "metadata": {
76 | "id": "ehnsueDUUI1r",
77 | "colab_type": "code",
78 | "colab": {}
79 | },
80 | "source": [
81 | "#import the packages / dependecies\n",
82 | "import numpy as np \n",
83 | "import mnist # Get data set from\n",
84 | "from keras.models import Sequential #ANN architecture\n",
85 | "from keras. layers import Dense # The layers in the ANN\n",
86 | "from keras.utils import to_categorical\n",
87 | "import matplotlib.pyplot as plt # Graph"
88 | ],
89 | "execution_count": 0,
90 | "outputs": []
91 | },
92 | {
93 | "cell_type": "code",
94 | "metadata": {
95 | "id": "bNiQWH_NUhTy",
96 | "colab_type": "code",
97 | "colab": {}
98 | },
99 | "source": [
100 | "#Load the data set\n",
101 | "train_images = mnist.train_images() # training data of images\n",
102 | "train_labels = mnist.train_labels() # training data of the labels\n",
103 | "test_images = mnist. test_images() # testing data images\n",
104 | "test_labels = mnist.test_labels() # testing data labels"
105 | ],
106 | "execution_count": 0,
107 | "outputs": []
108 | },
109 | {
110 | "cell_type": "code",
111 | "metadata": {
112 | "id": "VY0-HNI-VDgR",
113 | "colab_type": "code",
114 | "colab": {
115 | "base_uri": "https://localhost:8080/",
116 | "height": 51
117 | },
118 | "outputId": "49555bf2-1dd4-407e-8579-e8824dd48b07"
119 | },
120 | "source": [
121 | "#Normalize the images \n",
122 | "#Normalize the pixel values from [0, 255] to [-0.5 to 0.5]\n",
123 | "#This make the network easier to train\n",
124 | "train_images = (train_images / 255) - 0.5\n",
125 | "test_images = (test_images/ 255) - 0.5\n",
126 | "\n",
127 | "#Flatten the images. Flatten each 28 x 28 image into a 784= 28^2 \n",
128 | "#dimensional vector and pass into the neural network\n",
129 | "train_images = train_images.reshape((-1, 784))\n",
130 | "test_images = test_images.reshape((-1,784))\n",
131 | "#print the new image shape\n",
132 | "print(train_images.shape) #60,000 rows and 784 cols\n",
133 | "print(test_images.shape) #10,000 rows and 784 cols"
134 | ],
135 | "execution_count": 29,
136 | "outputs": [
137 | {
138 | "output_type": "stream",
139 | "text": [
140 | "(60000, 784)\n",
141 | "(10000, 784)\n"
142 | ],
143 | "name": "stdout"
144 | }
145 | ]
146 | },
147 | {
148 | "cell_type": "code",
149 | "metadata": {
150 | "id": "eYpbz8nsWerD",
151 | "colab_type": "code",
152 | "colab": {}
153 | },
154 | "source": [
155 | "#Build the ANN model\n",
156 | "#With 3 layers, 2 with 64 neurons and activation function = relu\n",
157 | "# and 1 layer with 10 neurons with activation function= softmax\n",
158 | "model = Sequential()\n",
159 | "model.add(Dense(64, activation='relu', input_dim=784))\n",
160 | "model.add(Dense(64, activation='relu'))\n",
161 | "model.add(Dense(10, activation='softmax'))"
162 | ],
163 | "execution_count": 0,
164 | "outputs": []
165 | },
166 | {
167 | "cell_type": "code",
168 | "metadata": {
169 | "id": "NG9dfatZXlTk",
170 | "colab_type": "code",
171 | "colab": {}
172 | },
173 | "source": [
174 | "#Compile the model\n",
175 | "# loss measures how well the model did on training, and then tries to improve on\n",
176 | "# it using the optimizer\n",
177 | "model.compile(\n",
178 | " optimizer= 'adam',\n",
179 | " loss = 'categorical_crossentropy', #loss function for classes > 2\n",
180 | " metrics = ['accuracy']\n",
181 | ")"
182 | ],
183 | "execution_count": 0,
184 | "outputs": []
185 | },
186 | {
187 | "cell_type": "code",
188 | "metadata": {
189 | "id": "yhMQSC4AYBjt",
190 | "colab_type": "code",
191 | "colab": {
192 | "base_uri": "https://localhost:8080/",
193 | "height": 204
194 | },
195 | "outputId": "18664389-7bd8-498f-90a4-b8033efd8d8e"
196 | },
197 | "source": [
198 | "#Train the model\n",
199 | "model.fit(\n",
200 | " train_images, #The training data images\n",
201 | " to_categorical(train_labels),#The trainind data labels, label data only returns a single digit representing the class of each label Ex: train_labels = 2,to_categorical(2)= [0,0,1,0,0,0,0,0,0,0]\n",
202 | " epochs=5, #Number of iterations over the entire data set to train on\n",
203 | " batch_size = 3 #The number of samples per gradient update for training\n",
204 | ")"
205 | ],
206 | "execution_count": 32,
207 | "outputs": [
208 | {
209 | "output_type": "stream",
210 | "text": [
211 | "Epoch 1/5\n",
212 | "60000/60000 [==============================] - 31s 518us/step - loss: 0.3158 - acc: 0.9014\n",
213 | "Epoch 2/5\n",
214 | "60000/60000 [==============================] - 31s 513us/step - loss: 0.1876 - acc: 0.9433\n",
215 | "Epoch 3/5\n",
216 | "60000/60000 [==============================] - 31s 517us/step - loss: 0.1603 - acc: 0.9516\n",
217 | "Epoch 4/5\n",
218 | "60000/60000 [==============================] - 31s 511us/step - loss: 0.1446 - acc: 0.9570\n",
219 | "Epoch 5/5\n",
220 | "60000/60000 [==============================] - 30s 498us/step - loss: 0.1365 - acc: 0.9601\n"
221 | ],
222 | "name": "stdout"
223 | },
224 | {
225 | "output_type": "execute_result",
226 | "data": {
227 | "text/plain": [
228 | ""
229 | ]
230 | },
231 | "metadata": {
232 | "tags": []
233 | },
234 | "execution_count": 32
235 | }
236 | ]
237 | },
238 | {
239 | "cell_type": "code",
240 | "metadata": {
241 | "id": "_6Df_KcYaPsd",
242 | "colab_type": "code",
243 | "colab": {
244 | "base_uri": "https://localhost:8080/",
245 | "height": 51
246 | },
247 | "outputId": "2a0472dc-1a26-4e58-c350-c276553361a5"
248 | },
249 | "source": [
250 | "#Evaluate the model\n",
251 | "model.evaluate(\n",
252 | " test_images,\n",
253 | " to_categorical(test_labels)\n",
254 | ")"
255 | ],
256 | "execution_count": 33,
257 | "outputs": [
258 | {
259 | "output_type": "stream",
260 | "text": [
261 | "10000/10000 [==============================] - 0s 28us/step\n"
262 | ],
263 | "name": "stdout"
264 | },
265 | {
266 | "output_type": "execute_result",
267 | "data": {
268 | "text/plain": [
269 | "[0.15349477015165613, 0.9551]"
270 | ]
271 | },
272 | "metadata": {
273 | "tags": []
274 | },
275 | "execution_count": 33
276 | }
277 | ]
278 | },
279 | {
280 | "cell_type": "code",
281 | "metadata": {
282 | "id": "3kTAapL9akr0",
283 | "colab_type": "code",
284 | "colab": {}
285 | },
286 | "source": [
287 | "#save the model to disk\n",
288 | "model.save_weights('model.h5')\n",
289 | "# Load the model from disk later using:\n",
290 | "# Build the model.\n",
291 | "#model = Sequential([\n",
292 | "# Dense(64, activation='relu', input_shape=(784,)),\n",
293 | "# Dense(64, activation='relu'),\n",
294 | "# Dense(10, activation='softmax'),\n",
295 | "#])\n",
296 | "# model.load_weights('model.h5')"
297 | ],
298 | "execution_count": 0,
299 | "outputs": []
300 | },
301 | {
302 | "cell_type": "code",
303 | "metadata": {
304 | "id": "LBcV8923a0MW",
305 | "colab_type": "code",
306 | "colab": {
307 | "base_uri": "https://localhost:8080/",
308 | "height": 51
309 | },
310 | "outputId": "c04291a7-92ae-4b06-a1e7-39849fd5712e"
311 | },
312 | "source": [
313 | "#Make predictions\n",
314 | "# Predict on the first 5 test images.\n",
315 | "# Keep in mind that the output of our network is 10 probabilities, \n",
316 | "# so we'll use np.argmax()to turn those into actual digits\n",
317 | "predictions = model.predict(test_images[:5])\n",
318 | "#print(predictions)\n",
319 | "print (np.argmax(predictions, axis =1))\n",
320 | "print(test_labels[:5])"
321 | ],
322 | "execution_count": 35,
323 | "outputs": [
324 | {
325 | "output_type": "stream",
326 | "text": [
327 | "[7 2 1 0 4]\n",
328 | "[7 2 1 0 4]\n"
329 | ],
330 | "name": "stdout"
331 | }
332 | ]
333 | },
334 | {
335 | "cell_type": "code",
336 | "metadata": {
337 | "id": "zm4BrF2obdyr",
338 | "colab_type": "code",
339 | "colab": {
340 | "base_uri": "https://localhost:8080/",
341 | "height": 1277
342 | },
343 | "outputId": "b53000a7-733a-495d-b67c-4a97dd9ef900"
344 | },
345 | "source": [
346 | "import matplotlib.pyplot as plt\n",
347 | "for i in range(0,5):\n",
348 | " first_image = test_images[i]\n",
349 | " first_image = np.array(first_image, dtype='float')\n",
350 | " pixels = first_image.reshape((28, 28))\n",
351 | " plt.imshow(pixels, cmap='gray')\n",
352 | " plt.show()"
353 | ],
354 | "execution_count": 36,
355 | "outputs": [
356 | {
357 | "output_type": "display_data",
358 | "data": {
359 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAADO5JREFUeJzt3V2IXfW5x/Hf76QpiOlFYjUMNpqe\nogerSKKjCMYS9VhyYiEWg9SLkkLJ9CJKCyVU7EVzWaQv1JvAlIbGkmMrpNUoYmNjMQ1qcSJqEmNi\nElIzMW9lhCaCtNGnF7Nsp3H2f+/st7XH5/uBYfZez3p52Mxv1lp77bX/jggByOe/6m4AQD0IP5AU\n4QeSIvxAUoQfSIrwA0kRfiApwg8kRfiBpD7Vz43Z5uOEQI9FhFuZr6M9v+1ltvfZPmD7gU7WBaC/\n3O5n+23PkrRf0h2SxiW9LOneiHijsAx7fqDH+rHnv1HSgYg4FBF/l/RrSSs6WB+APuok/JdKOjLl\n+Xg17T/YHrE9Znusg20B6LKev+EXEaOSRiUO+4FB0sme/6ikBVOef66aBmAG6CT8L0u6wvbnbX9a\n0tckbelOWwB6re3D/og4a/s+Sb+XNEvShojY07XOAPRU25f62toY5/xAz/XlQz4AZi7CDyRF+IGk\nCD+QFOEHkiL8QFKEH0iK8ANJEX4gKcIPJEX4gaQIP5AU4QeSIvxAUoQfSIrwA0kRfiApwg8kRfiB\npAg/kBThB5Ii/EBShB9IivADSRF+ICnCDyRF+IGkCD+QFOEHkmp7iG5Jsn1Y0mlJH0g6GxHD3WgK\nQO91FP7KrRHx1y6sB0AfcdgPJNVp+EPSVts7bY90oyEA/dHpYf+SiDhq+xJJz9p+MyK2T52h+qfA\nPwZgwDgiurMie52kMxHxo8I83dkYgIYiwq3M1/Zhv+0LbX/mo8eSvixpd7vrA9BfnRz2z5f0O9sf\nref/I+KZrnQFoOe6dtjf0sY47Ad6rueH/QBmNsIPJEX4gaQIP5AU4QeSIvxAUt24qy+FlStXNqyt\nXr26uOw777xTrL///vvF+qZNm4r148ePN6wdOHCguCzyYs8PJEX4gaQIP5AU4QeSIvxAUoQfSIrw\nA0lxS2+LDh061LC2cOHC/jUyjdOnTzes7dmzp4+dDJbx8fGGtYceeqi47NjYWLfb6Rtu6QVQRPiB\npAg/kBThB5Ii/EBShB9IivADSXE/f4tK9+xfe+21xWX37t1brF911VXF+nXXXVesL126tGHtpptu\nKi575MiRYn3BggXFeifOnj1brJ86dapYHxoaanvbb7/9drE+k6/zt4o9P5AU4QeSIvxAUoQfSIrw\nA0kRfiApwg8k1fR+ftsbJH1F0smIuKaaNk/SbyQtlHRY0j0R8W7Tjc3g+/kH2dy5cxvWFi1aVFx2\n586dxfoNN9zQVk+taDZewf79+4v1Zp+fmDdvXsPamjVrisuuX7++WB9k3byf/5eSlp0z7QFJ2yLi\nCknbqucAZpCm4Y+I7ZImzpm8QtLG6vFGSXd1uS8APdbuOf/8iDhWPT4uaX6X+gHQJx1/tj8ionQu\nb3tE0kin2wHQXe3u+U/YHpKk6vfJRjNGxGhEDEfEcJvbAtAD7YZ/i6RV1eNVkp7oTjsA+qVp+G0/\nKulFSf9je9z2NyX9UNIdtt+S9L/VcwAzCN/bj4F19913F+uPPfZYsb579+6GtVtvvbW47MTEuRe4\nZg6+tx9AEeEHkiL8QFKEH0iK8ANJEX4gKS71oTaXXHJJsb5r166Oll+5cmXD2ubNm4vLzmRc6gNQ\nRPiBpAg/kBThB5Ii/EBShB9IivADSTFEN2rT7OuzL7744mL93XfL3xa/b9++8+4pE/b8QFKEH0iK\n8ANJEX4gKcIPJEX4gaQIP5AU9/Ojp26++eaGteeee6647OzZs4v1pUuXFuvbt28v1j+puJ8fQBHh\nB5Ii/EBShB9IivADSRF+ICnCDyTV9H5+2xskfUXSyYi4ppq2TtJqSaeq2R6MiKd71SRmruXLlzes\nNbuOv23btmL9xRdfbKsnTGplz/9LScummf7TiFhU/RB8YIZpGv6I2C5pog+9AOijTs7577P9uu0N\ntud2rSMAfdFu+NdL+oKkRZKOSfpxoxltj9gesz3W5rYA9EBb4Y+IExHxQUR8KOnnkm4szDsaEcMR\nMdxukwC6r63w2x6a8vSrknZ3px0A/dLKpb5HJS2V9Fnb45J+IGmp7UWSQtJhSd/qYY8AeoD7+dGR\nCy64oFjfsWNHw9rVV19dXPa2224r1l944YViPSvu5wdQRPiBpAg/kBThB5Ii/EBShB9IiiG60ZG1\na9cW64sXL25Ye+aZZ4rLcimvt9jzA0kRfiApwg8kRfiBpAg/kBThB5Ii/EBS3NKLojvvvLNYf/zx\nx4v19957r2Ft2bLpvhT631566aViHdPjll4ARYQfSIrwA0kRfiApwg8kRfiBpAg/kBT38yd30UUX\nFesPP/xwsT5r1qxi/emnGw/gzHX8erHnB5Ii/EBShB9IivADSRF+ICnCDyRF+IGkmt7Pb3uBpEck\nzZcUkkYj4me250n6jaSFkg5Luici3m2yLu7n77Nm1+GbXWu//vrri/WDBw8W66V79psti/Z0837+\ns5K+GxFflHSTpDW2vyjpAUnbIuIKSduq5wBmiKbhj4hjEfFK9fi0pL2SLpW0QtLGaraNku7qVZMA\nuu+8zvltL5S0WNKfJc2PiGNV6bgmTwsAzBAtf7bf9hxJmyV9JyL+Zv/7tCIiotH5vO0RSSOdNgqg\nu1ra89uercngb4qI31aTT9gequpDkk5Ot2xEjEbEcEQMd6NhAN3RNPye3MX/QtLeiPjJlNIWSauq\nx6skPdH99gD0SiuX+pZI+pOkXZI+rCY/qMnz/sckXSbpL5q81DfRZF1c6uuzK6+8slh/8803O1r/\nihUrivUnn3yyo/Xj/LV6qa/pOX9E7JDUaGW3n09TAAYHn/ADkiL8QFKEH0iK8ANJEX4gKcIPJMVX\nd38CXH755Q1rW7du7Wjda9euLdafeuqpjtaP+rDnB5Ii/EBShB9IivADSRF+ICnCDyRF+IGkuM7/\nCTAy0vhb0i677LKO1v38888X682+DwKDiz0/kBThB5Ii/EBShB9IivADSRF+ICnCDyTFdf4ZYMmS\nJcX6/fff36dO8EnCnh9IivADSRF+ICnCDyRF+IGkCD+QFOEHkmp6nd/2AkmPSJovKSSNRsTPbK+T\ntFrSqWrWByPi6V41mtktt9xSrM+ZM6ftdR88eLBYP3PmTNvrxmBr5UM+ZyV9NyJesf0ZSTttP1vV\nfhoRP+pdewB6pWn4I+KYpGPV49O290q6tNeNAeit8zrnt71Q0mJJf64m3Wf7ddsbbM9tsMyI7THb\nYx11CqCrWg6/7TmSNkv6TkT8TdJ6SV+QtEiTRwY/nm65iBiNiOGIGO5CvwC6pKXw256tyeBviojf\nSlJEnIiIDyLiQ0k/l3Rj79oE0G1Nw2/bkn4haW9E/GTK9KEps31V0u7utwegV1p5t/9mSV+XtMv2\nq9W0ByXda3uRJi//HZb0rZ50iI689tprxfrtt99erE9MTHSzHQyQVt7t3yHJ05S4pg/MYHzCD0iK\n8ANJEX4gKcIPJEX4gaQIP5CU+znEsm3GcwZ6LCKmuzT/Mez5gaQIP5AU4QeSIvxAUoQfSIrwA0kR\nfiCpfg/R/VdJf5ny/LPVtEE0qL0Nal8SvbWrm71d3uqMff2Qz8c2bo8N6nf7DWpvg9qXRG/tqqs3\nDvuBpAg/kFTd4R+tefslg9rboPYl0Vu7aumt1nN+APWpe88PoCa1hN/2Mtv7bB+w/UAdPTRi+7Dt\nXbZfrXuIsWoYtJO2d0+ZNs/2s7bfqn5PO0xaTb2ts320eu1etb28pt4W2P6j7Tds77H97Wp6ra9d\noa9aXre+H/bbniVpv6Q7JI1LelnSvRHxRl8bacD2YUnDEVH7NWHbX5J0RtIjEXFNNe0hSRMR8cPq\nH+fciPjegPS2TtKZukdurgaUGZo6srSkuyR9QzW+doW+7lENr1sde/4bJR2IiEMR8XdJv5a0ooY+\nBl5EbJd07qgZKyRtrB5v1OQfT9816G0gRMSxiHilenxa0kcjS9f62hX6qkUd4b9U0pEpz8c1WEN+\nh6SttnfaHqm7mWnMr4ZNl6TjkubX2cw0mo7c3E/njCw9MK9dOyNedxtv+H3ckoi4TtL/SVpTHd4O\npJg8ZxukyzUtjdzcL9OMLP0vdb527Y543W11hP+opAVTnn+umjYQIuJo9fukpN9p8EYfPvHRIKnV\n75M19/MvgzRy83QjS2sAXrtBGvG6jvC/LOkK25+3/WlJX5O0pYY+Psb2hdUbMbJ9oaQva/BGH94i\naVX1eJWkJ2rs5T8MysjNjUaWVs2v3cCNeB0Rff+RtFyT7/gflPT9Onpo0Nd/S3qt+tlTd2+SHtXk\nYeA/NPneyDclXSRpm6S3JP1B0rwB6u1XknZJel2TQRuqqbclmjykf13Sq9XP8rpfu0JftbxufMIP\nSIo3/ICkCD+QFOEHkiL8QFKEH0iK8ANJEX4gKcIPJPVP82g/p9/JjhUAAAAASUVORK5CYII=\n",
360 | "text/plain": [
361 | ""
362 | ]
363 | },
364 | "metadata": {
365 | "tags": []
366 | }
367 | },
368 | {
369 | "output_type": "display_data",
370 | "data": {
371 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAADXZJREFUeJzt3X+IHPUZx/HPU5uAaFGT0uMwttGo\nhSj+CKcUCaVFjVZiYkA0wT9SWnr9o0LF+ItUUChiKf1B/wpEDCba2jRcjFFL0zZUTSEJOSVGo1ET\nuWjCJdcQ0QSRmuTpHzvXXvXmu5uZ2Z29PO8XHLc7z+7Mw3Kfm5md3e/X3F0A4vlS3Q0AqAfhB4Ii\n/EBQhB8IivADQRF+ICjCDwRF+IGgCD8Q1Jc7uTEz4+OEQJu5u7XyuFJ7fjO70czeNrPdZvZAmXUB\n6Cwr+tl+MztN0juSrpe0T9I2SYvc/c3Ec9jzA23WiT3/1ZJ2u/t77v5vSX+UNL/E+gB0UJnwnyvp\ngzH392XL/o+Z9ZvZoJkNltgWgIq1/Q0/d18uabnEYT/QTcrs+fdLOm/M/WnZMgATQJnwb5N0kZmd\nb2aTJS2UtL6atgC0W+HDfnc/ZmZ3Stog6TRJK9x9Z2WdAWirwpf6Cm2Mc36g7TryIR8AExfhB4Ii\n/EBQhB8IivADQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBdXTobhRzzz33JOunn356bu2y\nyy5LPvfWW28t1NOoZcuWJeubN2/OrT355JOlto1y2PMDQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCM\n3tsFVq9enayXvRZfpz179uTWrrvuuuRz33///arbCYHRewEkEX4gKMIPBEX4gaAIPxAU4QeCIvxA\nUKW+z29mQ5KOSDou6Zi791XR1Kmmzuv4u3btStY3bNiQrF9wwQXJ+s0335ysz5gxI7d2xx13JJ/7\n6KOPJusop4rBPL7r7ocqWA+ADuKwHwiqbPhd0l/N7BUz66+iIQCdUfawf7a77zezr0n6m5ntcveX\nxz4g+6fAPwagy5Ta87v7/uz3iKRnJF09zmOWu3sfbwYC3aVw+M3sDDP7yuhtSXMkvVFVYwDaq8xh\nf4+kZ8xsdD1/cPe/VNIVgLYrHH53f0/S5RX2MmH19aXPaBYsWFBq/Tt37kzW582bl1s7dCh9Ffbo\n0aPJ+uTJk5P1LVu2JOuXX57/JzJ16tTkc9FeXOoDgiL8QFCEHwiK8ANBEX4gKMIPBMUU3RXo7e1N\n1rPPQuRqdinvhhtuSNaHh4eT9TKWLFmSrM+cObPwul944YXCz0V57PmBoAg/EBThB4Ii/EBQhB8I\nivADQRF+ICiu81fgueeeS9YvvPDCZP3IkSPJ+uHDh0+6p6osXLgwWZ80aVKHOkHV2PMDQRF+ICjC\nDwRF+IGgCD8QFOEHgiL8QFBc5++AvXv31t1CrnvvvTdZv/jii0utf+vWrYVqaD/2/EBQhB8IivAD\nQRF+ICjCDwRF+IGgCD8QlLl7+gFmKyTNlTTi7pdmy6ZIWi1puqQhSbe5+4dNN2aW3hgqN3fu3GR9\nzZo1yXqzKbpHRkaS9dR4AC+99FLyuSjG3dMTRWRa2fM/IenGzy17QNJGd79I0sbsPoAJpGn43f1l\nSZ8fSma+pJXZ7ZWSbqm4LwBtVvScv8fdR+eIOiCpp6J+AHRI6c/2u7unzuXNrF9Sf9ntAKhW0T3/\nQTPrlaTsd+67Pu6+3N373L2v4LYAtEHR8K+XtDi7vVjSs9W0A6BTmobfzJ6WtFnSN81sn5n9UNIv\nJF1vZu9Kui67D2ACaXrO7+6LckrXVtwL2qCvL3221ew6fjOrV69O1rmW3734hB8QFOEHgiL8QFCE\nHwiK8ANBEX4gKIbuPgWsW7cutzZnzpxS6161alWy/uCDD5ZaP+rDnh8IivADQRF+ICjCDwRF+IGg\nCD8QFOEHgmo6dHelG2Po7kJ6e3uT9ddeey23NnXq1ORzDx06lKxfc801yfqePXuSdXRelUN3AzgF\nEX4gKMIPBEX4gaAIPxAU4QeCIvxAUHyffwIYGBhI1ptdy0956qmnknWu45+62PMDQRF+ICjCDwRF\n+IGgCD8QFOEHgiL8QFBNr/Ob2QpJcyWNuPul2bKHJf1I0r+yhy119z+3q8lT3bx585L1WbNmFV73\niy++mKw/9NBDhdeNia2VPf8Tkm4cZ/lv3f2K7IfgAxNM0/C7+8uSDnegFwAdVOac/04z22FmK8zs\nnMo6AtARRcO/TNIMSVdIGpb067wHmlm/mQ2a2WDBbQFog0Lhd/eD7n7c3U9IekzS1YnHLnf3Pnfv\nK9okgOoVCr+ZjR1OdoGkN6ppB0CntHKp72lJ35H0VTPbJ+khSd8xsyskuaQhST9uY48A2qBp+N19\n0TiLH29DL6esZt+3X7p0abI+adKkwtvevn17sn706NHC68bExif8gKAIPxAU4QeCIvxAUIQfCIrw\nA0ExdHcHLFmyJFm/6qqrSq1/3bp1uTW+sos87PmBoAg/EBThB4Ii/EBQhB8IivADQRF+IChz985t\nzKxzG+sin376abJe5iu7kjRt2rTc2vDwcKl1Y+Jxd2vlcez5gaAIPxAU4QeCIvxAUIQfCIrwA0ER\nfiAovs9/CpgyZUpu7bPPPutgJ1/00Ucf5daa9dbs8w9nnXVWoZ4k6eyzz07W77777sLrbsXx48dz\na/fff3/yuZ988kklPbDnB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgml7nN7PzJK2S1CPJJS1399+Z\n2RRJqyVNlzQk6TZ3/7B9rSLPjh076m4h15o1a3JrzcYa6OnpSdZvv/32Qj11uwMHDiTrjzzySCXb\naWXPf0zSEnefKelbkn5iZjMlPSBpo7tfJGljdh/ABNE0/O4+7O6vZrePSHpL0rmS5ktamT1spaRb\n2tUkgOqd1Dm/mU2XdKWkrZJ63H30uO2AGqcFACaIlj/bb2ZnShqQdJe7f2z2v2HC3N3zxuczs35J\n/WUbBVCtlvb8ZjZJjeD/3t3XZosPmllvVu+VNDLec919ubv3uXtfFQ0DqEbT8FtjF/+4pLfc/Tdj\nSuslLc5uL5b0bPXtAWiXpkN3m9lsSZskvS7pRLZ4qRrn/X+S9HVJe9W41He4ybpCDt29du3aZH3+\n/Pkd6iSWY8eO5dZOnDiRW2vF+vXrk/XBwcHC6960aVOyvmXLlmS91aG7m57zu/s/JeWt7NpWNgKg\n+/AJPyAowg8ERfiBoAg/EBThB4Ii/EBQTNHdBe67775kvewU3imXXHJJst7Or82uWLEiWR8aGiq1\n/oGBgdzarl27Sq27mzFFN4Akwg8ERfiBoAg/EBThB4Ii/EBQhB8Iiuv8wCmG6/wAkgg/EBThB4Ii\n/EBQhB8IivADQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBEX4gqKbhN7PzzOwfZvamme00\ns59myx82s/1mtj37uan97QKoStPBPMysV1Kvu79qZl+R9IqkWyTdJumou/+q5Y0xmAfQdq0O5vHl\nFlY0LGk4u33EzN6SdG659gDU7aTO+c1suqQrJW3NFt1pZjvMbIWZnZPznH4zGzSzwVKdAqhUy2P4\nmdmZkl6S9Ii7rzWzHkmHJLmkn6txavCDJuvgsB9os1YP+1sKv5lNkvS8pA3u/ptx6tMlPe/ulzZZ\nD+EH2qyyATzNzCQ9LumtscHP3ggctUDSGyfbJID6tPJu/2xJmyS9LulEtnippEWSrlDjsH9I0o+z\nNwdT62LPD7RZpYf9VSH8QPsxbj+AJMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8ERfiB\noAg/EBThB4Ii/EBQTQfwrNghSXvH3P9qtqwbdWtv3dqXRG9FVdnbN1p9YEe/z/+FjZsNuntfbQ0k\ndGtv3dqXRG9F1dUbh/1AUIQfCKru8C+vefsp3dpbt/Yl0VtRtfRW6zk/gPrUvecHUJNawm9mN5rZ\n22a228weqKOHPGY2ZGavZzMP1zrFWDYN2oiZvTFm2RQz+5uZvZv9HneatJp664qZmxMzS9f62nXb\njNcdP+w3s9MkvSPpekn7JG2TtMjd3+xoIznMbEhSn7vXfk3YzL4t6aikVaOzIZnZLyUddvdfZP84\nz3H3+7ukt4d1kjM3t6m3vJmlv68aX7sqZ7yuQh17/qsl7Xb399z935L+KGl+DX10PXd/WdLhzy2e\nL2lldnulGn88HZfTW1dw92F3fzW7fUTS6MzStb52ib5qUUf4z5X0wZj7+9RdU367pL+a2Stm1l93\nM+PoGTMz0gFJPXU2M46mMzd30udmlu6a167IjNdV4w2/L5rt7rMkfU/ST7LD267kjXO2brpcs0zS\nDDWmcRuW9Os6m8lmlh6QdJe7fzy2VudrN05ftbxudYR/v6Tzxtyfli3rCu6+P/s9IukZNU5TusnB\n0UlSs98jNffzX+5+0N2Pu/sJSY+pxtcum1l6QNLv3X1ttrj21268vup63eoI/zZJF5nZ+WY2WdJC\nSetr6OMLzOyM7I0YmdkZkuao+2YfXi9pcXZ7saRna+zl/3TLzM15M0ur5teu62a8dveO/0i6SY13\n/PdI+lkdPeT0dYGk17KfnXX3JulpNQ4DP1PjvZEfSpoqaaOkdyX9XdKULurtSTVmc96hRtB6a+pt\nthqH9Dskbc9+bqr7tUv0Vcvrxif8gKB4ww8IivADQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFD/Abw9\nWv8QfFP9AAAAAElFTkSuQmCC\n",
372 | "text/plain": [
373 | ""
374 | ]
375 | },
376 | "metadata": {
377 | "tags": []
378 | }
379 | },
380 | {
381 | "output_type": "display_data",
382 | "data": {
383 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAADCRJREFUeJzt3X/oXfV9x/Hne1n6h2n/MKvGYMV0\nRaclYjK+iGCYHdXiRND8I1UYkcnSPxqwsD8m7o8JYyCydgz/KKQ0NJXOZkSDWqdtJ8N0MKpRM383\nOvmWJsREUahVpDN574/viXzV7z33m3vPvecm7+cDLt9zz+eee94c8srn/LrnE5mJpHr+oO8CJPXD\n8EtFGX6pKMMvFWX4paIMv1SU4ZeKMvxSUYZfKuoPp7myiPB2QmnCMjOW87mxev6IuCYifhURr0XE\n7eN8l6TpilHv7Y+IFcAB4GrgIPAUcFNmvtSyjD2/NGHT6PkvA17LzNcz8/fAj4Hrx/g+SVM0TvjP\nBX6z6P3BZt7HRMTWiNgXEfvGWJekjk38hF9mbge2g7v90iwZp+c/BJy36P0XmnmSTgHjhP8p4IKI\n+GJEfAb4OvBQN2VJmrSRd/sz88OI2Ab8FFgB7MjMFzurTNJEjXypb6SVecwvTdxUbvKRdOoy/FJR\nhl8qyvBLRRl+qSjDLxVl+KWiDL9UlOGXijL8UlGGXyrK8EtFGX6pKMMvFWX4paIMv1SU4ZeKMvxS\nUYZfKsrwS0UZfqmoqQ7RrXouvPDCgW2vvPJK67K33XZba/s999wzUk1aYM8vFWX4paIMv1SU4ZeK\nMvxSUYZfKsrwS0WNdZ0/IuaBd4FjwIeZOddFUTp9bNy4cWDb8ePHW5c9ePBg1+VokS5u8vnzzHyr\ng++RNEXu9ktFjRv+BH4WEU9HxNYuCpI0HePu9m/KzEMRcTbw84h4JTP3Lv5A85+C/zFIM2asnj8z\nDzV/jwJ7gMuW+Mz2zJzzZKA0W0YOf0SsiojPnZgGvga80FVhkiZrnN3+NcCeiDjxPf+amY91UpWk\niRs5/Jn5OnBph7XoNLRhw4aBbe+9917rsnv27Om6HC3ipT6pKMMvFWX4paIMv1SU4ZeKMvxSUT66\nW2NZv359a/u2bdsGtt17771dl6OTYM8vFWX4paIMv1SU4ZeKMvxSUYZfKsrwS0V5nV9jueiii1rb\nV61aNbBt165dXZejk2DPLxVl+KWiDL9UlOGXijL8UlGGXyrK8EtFRWZOb2UR01uZpuLJJ59sbT/r\nrLMGtg17FsCwR3traZkZy/mcPb9UlOGXijL8UlGGXyrK8EtFGX6pKMMvFTX09/wRsQO4Djiameub\neauBXcA6YB64MTPfmVyZ6su6deta2+fm5lrbDxw4MLDN6/j9Wk7P/wPgmk/Mux14PDMvAB5v3ks6\nhQwNf2buBd7+xOzrgZ3N9E7gho7rkjRhox7zr8nMw830G8CajuqRNCVjP8MvM7Ptnv2I2ApsHXc9\nkro1as9/JCLWAjR/jw76YGZuz8y5zGw/MyRpqkYN/0PAlmZ6C/BgN+VImpah4Y+I+4D/Bv4kIg5G\nxK3AXcDVEfEqcFXzXtIpZOgxf2beNKDpqx3Xohl05ZVXjrX8m2++2VEl6pp3+ElFGX6pKMMvFWX4\npaIMv1SU4ZeKcohutbrkkkvGWv7uu+/uqBJ1zZ5fKsrwS0UZfqkowy8VZfilogy/VJThl4pyiO7i\nLr/88tb2Rx55pLV9fn6+tf2KK64Y2PbBBx+0LqvROES3pFaGXyrK8EtFGX6pKMMvFWX4paIMv1SU\nv+cv7qqrrmptX716dWv7Y4891trutfzZZc8vFWX4paIMv1SU4ZeKMvxSUYZfKsrwS0UNvc4fETuA\n64Cjmbm+mXcn8NfAifGX78jMf59UkZqcSy+9tLV92PMedu/e3WU5mqLl9Pw/AK5ZYv4/Z+aG5mXw\npVPM0PBn5l7g7SnUImmKxjnm3xYRz0XEjog4s7OKJE3FqOH/LvAlYANwGPj2oA9GxNaI2BcR+0Zc\nl6QJGCn8mXkkM49l5nHge8BlLZ/dnplzmTk3apGSujdS+CNi7aK3m4EXuilH0rQs51LffcBXgM9H\nxEHg74GvRMQGIIF54BsTrFHSBPjc/tPcOeec09q+f//+1vZ33nmntf3iiy8+6Zo0WT63X1Irwy8V\nZfilogy/VJThl4oy/FJRPrr7NHfLLbe0tp999tmt7Y8++miH1WiW2PNLRRl+qSjDLxVl+KWiDL9U\nlOGXijL8UlFe5z/NnX/++WMtP+wnvTp12fNLRRl+qSjDLxVl+KWiDL9UlOGXijL8UlFe5z/NXXfd\ndWMt//DDD3dUiWaNPb9UlOGXijL8UlGGXyrK8EtFGX6pKMMvFTX0On9EnAf8EFgDJLA9M/8lIlYD\nu4B1wDxwY2b64+8ebNq0aWDbsCG6Vddyev4Pgb/JzC8DlwPfjIgvA7cDj2fmBcDjzXtJp4ih4c/M\nw5n5TDP9LvAycC5wPbCz+dhO4IZJFSmpeyd1zB8R64CNwC+BNZl5uGl6g4XDAkmniGXf2x8RnwXu\nB76Vmb+NiI/aMjMjIgcstxXYOm6hkrq1rJ4/IlayEPwfZeYDzewjEbG2aV8LHF1q2czcnplzmTnX\nRcGSujE0/LHQxX8feDkzv7Oo6SFgSzO9BXiw+/IkTcpydvuvAP4SeD4i9jfz7gDuAv4tIm4Ffg3c\nOJkSNczmzZsHtq1YsaJ12Weffba1fe/evSPVpNk3NPyZ+V9ADGj+arflSJoW7/CTijL8UlGGXyrK\n8EtFGX6pKMMvFeWju08BZ5xxRmv7tddeO/J37969u7X92LFjI3+3Zps9v1SU4ZeKMvxSUYZfKsrw\nS0UZfqkowy8VFZlLPn1rMisb8KgvtVu5cmVr+xNPPDGw7ejRJR+w9JGbb765tf39999vbdfsycxB\nP8H/GHt+qSjDLxVl+KWiDL9UlOGXijL8UlGGXyrK6/zSacbr/JJaGX6pKMMvFWX4paIMv1SU4ZeK\nMvxSUUPDHxHnRcR/RsRLEfFiRNzWzL8zIg5FxP7mNfrD4yVN3dCbfCJiLbA2M5+JiM8BTwM3ADcC\nv8vMf1r2yrzJR5q45d7kM3TEnsw8DBxupt+NiJeBc8crT1LfTuqYPyLWARuBXzaztkXEcxGxIyLO\nHLDM1ojYFxH7xqpUUqeWfW9/RHwWeAL4x8x8ICLWAG8BCfwDC4cGfzXkO9ztlyZsubv9ywp/RKwE\nfgL8NDO/s0T7OuAnmbl+yPcYfmnCOvthT0QE8H3g5cXBb04EnrAZeOFki5TUn+Wc7d8E/AJ4Hjje\nzL4DuAnYwMJu/zzwjebkYNt32fNLE9bpbn9XDL80ef6eX1Irwy8VZfilogy/VJThl4oy/FJRhl8q\nyvBLRRl+qSjDLxVl+KWiDL9UlOGXijL8UlFDH+DZsbeAXy96//lm3iya1dpmtS6wtlF1Wdv5y/3g\nVH/P/6mVR+zLzLneCmgxq7XNal1gbaPqqzZ3+6WiDL9UVN/h397z+tvMam2zWhdY26h6qa3XY35J\n/em755fUk17CHxHXRMSvIuK1iLi9jxoGiYj5iHi+GXm41yHGmmHQjkbEC4vmrY6In0fEq83fJYdJ\n66m2mRi5uWVk6V633ayNeD313f6IWAEcAK4GDgJPATdl5ktTLWSAiJgH5jKz92vCEfFnwO+AH54Y\nDSki7gbezsy7mv84z8zMv52R2u7kJEdunlBtg0aWvoUet12XI153oY+e/zLgtcx8PTN/D/wYuL6H\nOmZeZu4F3v7E7OuBnc30Thb+8UzdgNpmQmYezsxnmul3gRMjS/e67Vrq6kUf4T8X+M2i9weZrSG/\nE/hZRDwdEVv7LmYJaxaNjPQGsKbPYpYwdOTmafrEyNIzs+1GGfG6a57w+7RNmfmnwF8A32x2b2dS\nLhyzzdLlmu8CX2JhGLfDwLf7LKYZWfp+4FuZ+dvFbX1uuyXq6mW79RH+Q8B5i95/oZk3EzLzUPP3\nKLCHhcOUWXLkxCCpzd+jPdfzkcw8kpnHMvM48D163HbNyNL3Az/KzAea2b1vu6Xq6mu79RH+p4AL\nIuKLEfEZ4OvAQz3U8SkRsao5EUNErAK+xuyNPvwQsKWZ3gI82GMtHzMrIzcPGlmanrfdzI14nZlT\nfwHXsnDG/3+Bv+ujhgF1/THwP83rxb5rA+5jYTfw/1g4N3Ir8EfA48CrwH8Aq2eotntZGM35ORaC\ntran2jaxsEv/HLC/eV3b97ZrqauX7eYdflJRnvCTijL8UlGGXyrK8EtFGX6pKMMvFWX4paIMv1TU\n/wNRj+er2ohshAAAAABJRU5ErkJggg==\n",
384 | "text/plain": [
385 | ""
386 | ]
387 | },
388 | "metadata": {
389 | "tags": []
390 | }
391 | },
392 | {
393 | "output_type": "display_data",
394 | "data": {
395 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAADbdJREFUeJzt3W+MFPUdx/HPF2qfYB9ouRL8U7DF\nYIhJpTmxDwi2thowGvCBijGGRtNDg2KTPqiBxGKaJo22NE0kkGskPRtrbYLGCyGVlphSE9J4mPrv\nrv7NQSEniDQqIaYI3z7YufaU298suzM7c3zfr+Ryu/Pdnf068rmZ3d/M/szdBSCeaVU3AKAahB8I\nivADQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFBf6OaLmRmnEwIlc3dr5XEd7fnNbKmZvWFmb5vZA52s\nC0B3Wbvn9pvZdElvSrpW0gFJL0q6zd2HE89hzw+UrBt7/kWS3nb3d939P5L+IGl5B+sD0EWdhP9C\nSf+acP9AtuwzzKzPzIbMbKiD1wJQsNI/8HP3fkn9Eof9QJ10suc/KOniCfcvypYBmAI6Cf+Lki41\ns0vM7IuSVkoaLKYtAGVr+7Df3T81s3slPSdpuqSt7v56YZ0BKFXbQ31tvRjv+YHSdeUkHwBTF+EH\ngiL8QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoAg/EBTh\nB4Ii/EBQhB8IivADQXV1im5034wZM5L1Rx55JFlfvXp1sr53795k/eabb25a27dvX/K5KBd7fiAo\nwg8ERfiBoAg/EBThB4Ii/EBQhB8IqqNZes1sVNLHkk5K+tTde3Mezyy9XTZv3rxkfWRkpKP1T5uW\n3n+sXbu2aW3Tpk0dvTYm1+osvUWc5PMddz9SwHoAdBGH/UBQnYbfJe00s71m1ldEQwC6o9PD/sXu\nftDMviLpz2b2T3ffPfEB2R8F/jAANdPRnt/dD2a/D0t6RtKiSR7T7+69eR8GAuiutsNvZjPM7Evj\ntyVdJ+m1ohoDUK5ODvtnSXrGzMbX83t3/1MhXQEoXdvhd/d3JX2jwF7Qpp6enqa1gYGBLnaCqYSh\nPiAowg8ERfiBoAg/EBThB4Ii/EBQfHX3FJC6LFaSVqxY0bS2aNFpJ1121ZIlS5rW8i4Hfvnll5P1\n3bt3J+tIY88PBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0F19NXdZ/xifHV3W06ePJmsnzp1qkudnC5v\nrL6T3vKm8L711luT9bzpw89WrX51N3t+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiKcf4a2LFjR7K+\nbNmyZL3Kcf4PPvggWT927FjT2pw5c4pu5zOmT59e6vrrinF+AEmEHwiK8ANBEX4gKMIPBEX4gaAI\nPxBU7vf2m9lWSTdIOuzul2fLzpf0lKS5kkYl3eLu/y6vzant6quvTtbnz5+frOeN45c5zr9ly5Zk\nfefOncn6hx9+2LR2zTXXJJ+7fv36ZD3PPffc07S2efPmjtZ9Nmhlz/9bSUs/t+wBSbvc/VJJu7L7\nAKaQ3PC7+25JRz+3eLmkgez2gKTmU8YAqKV23/PPcvex7PZ7kmYV1A+ALul4rj5399Q5+2bWJ6mv\n09cBUKx29/yHzGy2JGW/Dzd7oLv3u3uvu/e2+VoAStBu+Aclrcpur5L0bDHtAOiW3PCb2ZOS9kia\nb2YHzOwuST+XdK2ZvSXpe9l9AFMI1/MXYO7cucn6nj17kvWZM2cm6518N37ed99v27YtWX/ooYeS\n9ePHjyfrKXnX8+dtt56enmT9k08+aVp78MEHk8999NFHk/UTJ04k61Xien4ASYQfCIrwA0ERfiAo\nwg8ERfiBoBjqK8C8efOS9ZGRkY7WnzfU9/zzzzetrVy5MvncI0eOtNVTN9x3333J+saNG5P11HbL\nuwz6sssuS9bfeeedZL1KDPUBSCL8QFCEHwiK8ANBEX4gKMIPBEX4gaA6/hovlG9oaChZv/POO5vW\n6jyOn2dwcDBZv/3225P1K6+8ssh2zjrs+YGgCD8QFOEHgiL8QFCEHwiK8ANBEX4gKMb5uyDvevw8\nV111VUGdTC1m6cvS87ZrJ9t9w4YNyfodd9zR9rrrgj0/EBThB4Ii/EBQhB8IivADQRF+ICjCDwSV\nO85vZlsl3SDpsLtfni3bIOkHkt7PHrbO3XeU1WTd3X333cl63nfEY3I33nhjsr5w4cJkPbXd8/6f\n5I3znw1a2fP/VtLSSZb/yt2vyH7CBh+YqnLD7+67JR3tQi8AuqiT9/z3mtkrZrbVzM4rrCMAXdFu\n+DdL+rqkKySNSfplsweaWZ+ZDZlZ+ovoAHRVW+F390PuftLdT0n6jaRFicf2u3uvu/e22ySA4rUV\nfjObPeHuTZJeK6YdAN3SylDfk5K+LWmmmR2Q9BNJ3zazKyS5pFFJq0vsEUAJcsPv7rdNsvixEnqZ\nsvLGoyPr6elpWluwYEHyuevWrSu6nf95//33k/UTJ06U9tp1wRl+QFCEHwiK8ANBEX4gKMIPBEX4\ngaD46m6Uav369U1ra9asKfW1R0dHm9ZWrVqVfO7+/fsL7qZ+2PMDQRF+ICjCDwRF+IGgCD8QFOEH\ngiL8QFCM86MjO3akv7h5/vz5XerkdMPDw01rL7zwQhc7qSf2/EBQhB8IivADQRF+ICjCDwRF+IGg\nCD8QFOP8BTCzZH3atM7+xi5btqzt5/b39yfrF1xwQdvrlvL/26qcnpyvVE9jzw8ERfiBoAg/EBTh\nB4Ii/EBQhB8IivADQeWO85vZxZIelzRLkkvqd/dfm9n5kp6SNFfSqKRb3P3f5bVaX5s3b07WH374\n4Y7Wv3379mS9k7H0ssfhy1z/li1bSlt3BK3s+T+V9CN3XyDpW5LWmNkCSQ9I2uXul0rald0HMEXk\nht/dx9z9pez2x5JGJF0oabmkgexhA5JWlNUkgOKd0Xt+M5sraaGkv0ua5e5jWek9Nd4WAJgiWj63\n38zOlbRN0g/d/aOJ57O7u5uZN3len6S+ThsFUKyW9vxmdo4awX/C3Z/OFh8ys9lZfbakw5M91937\n3b3X3XuLaBhAMXLDb41d/GOSRtx944TSoKTxqU5XSXq2+PYAlMXcJz1a//8DzBZL+pukVyWNj9us\nU+N9/x8lfVXSPjWG+o7mrCv9YlPUnDlzkvU9e/Yk6z09Pcl6nS+bzevt0KFDTWsjIyPJ5/b1pd8t\njo2NJevHjx9P1s9W7p6+xjyT+57f3V+Q1Gxl3z2TpgDUB2f4AUERfiAowg8ERfiBoAg/EBThB4LK\nHecv9MXO0nH+PEuWLEnWV6xIXxN1//33J+t1Hudfu3Zt09qmTZuKbgdqfZyfPT8QFOEHgiL8QFCE\nHwiK8ANBEX4gKMIPBMU4/xSwdOnSZD113XveNNWDg4PJet4U33nTkw8PDzet7d+/P/lctIdxfgBJ\nhB8IivADQRF+ICjCDwRF+IGgCD8QFOP8wFmGcX4ASYQfCIrwA0ERfiAowg8ERfiBoAg/EFRu+M3s\nYjN73syGzex1M7s/W77BzA6a2T+yn+vLbxdAUXJP8jGz2ZJmu/tLZvYlSXslrZB0i6Rj7v6Lll+M\nk3yA0rV6ks8XWljRmKSx7PbHZjYi6cLO2gNQtTN6z29mcyUtlPT3bNG9ZvaKmW01s/OaPKfPzIbM\nbKijTgEUquVz+83sXEl/lfQzd3/azGZJOiLJJf1UjbcGd+asg8N+oGStHva3FH4zO0fSdknPufvG\nSepzJW1398tz1kP4gZIVdmGPNb6e9TFJIxODn30QOO4mSa+daZMAqtPKp/2LJf1N0quSxueCXifp\nNklXqHHYPyppdfbhYGpd7PmBkhV62F8Uwg+Uj+v5ASQRfiAowg8ERfiBoAg/EBThB4Ii/EBQhB8I\nivADQRF+ICjCDwRF+IGgCD8QFOEHgsr9As+CHZG0b8L9mdmyOqprb3XtS6K3dhXZ25xWH9jV6/lP\ne3GzIXfvrayBhLr2Vte+JHprV1W9cdgPBEX4gaCqDn9/xa+fUtfe6tqXRG/tqqS3St/zA6hO1Xt+\nABWpJPxmttTM3jCzt83sgSp6aMbMRs3s1Wzm4UqnGMumQTtsZq9NWHa+mf3ZzN7Kfk86TVpFvdVi\n5ubEzNKVbru6zXjd9cN+M5su6U1J10o6IOlFSbe5+3BXG2nCzEYl9bp75WPCZrZE0jFJj4/PhmRm\nD0s66u4/z/5wnufuP65Jbxt0hjM3l9Rbs5mlv68Kt12RM14XoYo9/yJJb7v7u+7+H0l/kLS8gj5q\nz913Szr6ucXLJQ1ktwfU+MfTdU16qwV3H3P3l7LbH0san1m60m2X6KsSVYT/Qkn/mnD/gOo15bdL\n2mlme82sr+pmJjFrwsxI70maVWUzk8idubmbPjezdG22XTszXheND/xOt9jdvylpmaQ12eFtLXnj\nPVudhms2S/q6GtO4jUn6ZZXNZDNLb5P0Q3f/aGKtym03SV+VbLcqwn9Q0sUT7l+ULasFdz+Y/T4s\n6Rk13qbUyaHxSVKz34cr7ud/3P2Qu59091OSfqMKt102s/Q2SU+4+9PZ4sq33WR9VbXdqgj/i5Iu\nNbNLzOyLklZKGqygj9OY2YzsgxiZ2QxJ16l+sw8PSlqV3V4l6dkKe/mMuszc3GxmaVW87Wo347W7\nd/1H0vVqfOL/jqT1VfTQpK+vSXo5+3m96t4kPanGYeAJNT4buUvSlyXtkvSWpL9IOr9Gvf1Ojdmc\nX1EjaLMr6m2xGof0r0j6R/ZzfdXbLtFXJduNM/yAoPjADwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeC\nIvxAUP8FAfaK+yOWZZUAAAAASUVORK5CYII=\n",
396 | "text/plain": [
397 | ""
398 | ]
399 | },
400 | "metadata": {
401 | "tags": []
402 | }
403 | },
404 | {
405 | "output_type": "display_data",
406 | "data": {
407 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAADXVJREFUeJzt3W+oXPWdx/HPZ00bMQ2Su8FwScPe\nGmUlBDfViygb1krXmI2VWPxDQliyKr19UGGL+2BFhRV1QWSbpU8MpBgal27aRSOGWvpnQ1xXWEpu\nJKvRu60xpCQh5o9paCKBau53H9wTuSZ3ztzMnJkzc7/vF1zuzPmeM/PlJJ/7O2fOzPwcEQKQz5/U\n3QCAehB+ICnCDyRF+IGkCD+QFOEHkiL8QFKEH0iK8ANJzermk9nm7YRAh0WEp7NeWyO/7ZW2f2N7\nn+1H23ksAN3lVt/bb/sySb+VdLukQ5J2SVobEe+VbMPID3RYN0b+myTti4j9EfFHST+WtLqNxwPQ\nRe2Ef6Gkg5PuHyqWfY7tEdujtkfbeC4AFev4C34RsUnSJonDfqCXtDPyH5a0aNL9LxfLAPSBdsK/\nS9K1tr9i+4uS1kjaXk1bADqt5cP+iPjU9sOSfiHpMkmbI+LdyjoD0FEtX+pr6ck45wc6ritv8gHQ\nvwg/kBThB5Ii/EBShB9IivADSRF+ICnCDyRF+IGkCD+QFOEHkiL8QFKEH0iK8ANJEX4gKcIPJEX4\ngaQIP5AU4QeSIvxAUoQfSIrwA0kRfiApwg8kRfiBpAg/kBThB5Ii/EBShB9IquUpuiXJ9gFJpyWd\nk/RpRAxX0RQ+74Ybbiitb9u2rWFtaGio4m56x4oVK0rrY2NjDWsHDx6sup2+01b4C7dFxIkKHgdA\nF3HYDyTVbvhD0i9t77Y9UkVDALqj3cP+5RFx2PZVkn5l+/8i4o3JKxR/FPjDAPSYtkb+iDhc/D4m\n6RVJN02xzqaIGObFQKC3tBx+23Nszz1/W9IKSXuragxAZ7Vz2L9A0iu2zz/Ov0fEzyvpCkDHtRz+\niNgv6S8q7AUN3HHHHaX12bNnd6mT3nLXXXeV1h988MGGtTVr1lTdTt/hUh+QFOEHkiL8QFKEH0iK\n8ANJEX4gqSo+1Yc2zZpV/s+watWqLnXSX3bv3l1af+SRRxrW5syZU7rtxx9/3FJP/YSRH0iK8ANJ\nEX4gKcIPJEX4gaQIP5AU4QeS4jp/D7jttttK67fccktp/bnnnquynb4xb9680vqSJUsa1q644orS\nbbnOD2DGIvxAUoQfSIrwA0kRfiApwg8kRfiBpBwR3Xsyu3tP1kOWLl1aWn/99ddL6x999FFp/cYb\nb2xYO3PmTOm2/azZflu+fHnD2uDgYOm2x48fb6WlnhARns56jPxAUoQfSIrwA0kRfiApwg8kRfiB\npAg/kFTTz/Pb3izpG5KORcTSYtmApJ9IGpJ0QNL9EfH7zrXZ35544onSerPvkF+5cmVpfaZeyx8Y\nGCit33rrraX18fHxKtuZcaYz8v9Q0oX/+x6VtCMirpW0o7gPoI80DX9EvCHp5AWLV0vaUtzeIunu\nivsC0GGtnvMviIgjxe0PJS2oqB8AXdL2d/hFRJS9Z9/2iKSRdp8HQLVaHfmP2h6UpOL3sUYrRsSm\niBiOiOEWnwtAB7Qa/u2S1he310t6tZp2AHRL0/Db3irpfyT9ue1Dth+S9Kyk222/L+mvi/sA+kjT\nc/6IWNug9PWKe+lb9957b2l91apVpfV9+/aV1kdHRy+5p5ng8ccfL603u45f9nn/U6dOtdLSjMI7\n/ICkCD+QFOEHkiL8QFKEH0iK8ANJMUV3Be67777SerPpoJ9//vkq2+kbQ0NDpfV169aV1s+dO1da\nf+aZZxrWPvnkk9JtM2DkB5Ii/EBShB9IivADSRF+ICnCDyRF+IGkuM4/TVdeeWXD2s0339zWY2/c\nuLGt7fvVyEj5t7vNnz+/tD42NlZa37lz5yX3lAkjP5AU4QeSIvxAUoQfSIrwA0kRfiApwg8kxXX+\naZo9e3bD2sKFC0u33bp1a9XtzAiLFy9ua/u9e/dW1ElOjPxAUoQfSIrwA0kRfiApwg8kRfiBpAg/\nkFTT6/y2N0v6hqRjEbG0WPakpG9JOl6s9lhE/KxTTfaC06dPN6zt2bOndNvrr7++tD4wMFBaP3ny\nZGm9l1111VUNa82mNm/mzTffbGv77KYz8v9Q0soplv9rRCwrfmZ08IGZqGn4I+INSf079ACYUjvn\n/A/bftv2ZtvzKusIQFe0Gv6NkhZLWibpiKTvNVrR9ojtUdujLT4XgA5oKfwRcTQizkXEuKQfSLqp\nZN1NETEcEcOtNgmgei2F3/bgpLvflMTHq4A+M51LfVslfU3SfNuHJP2TpK/ZXiYpJB2Q9O0O9gig\nA5qGPyLWTrH4hQ700tPOnj3bsPbBBx+UbnvPPfeU1l977bXS+oYNG0rrnbR06dLS+tVXX11aHxoa\naliLiFZa+sz4+Hhb22fHO/yApAg/kBThB5Ii/EBShB9IivADSbndyy2X9GR2956si6677rrS+lNP\nPVVav/POO0vrZV8b3mknTpworTf7/1M2zbbtlno6b+7cuaX1ssuzM1lETGvHMvIDSRF+ICnCDyRF\n+IGkCD+QFOEHkiL8QFJc5+8By5YtK61fc801XerkYi+99FJb22/ZsqVhbd26dW099qxZzDA/Fa7z\nAyhF+IGkCD+QFOEHkiL8QFKEH0iK8ANJcaG0BzSb4rtZvZft37+/Y4/d7GvF9+5lLpkyjPxAUoQf\nSIrwA0kRfiApwg8kRfiBpAg/kFTT6/y2F0l6UdICSSFpU0R83/aApJ9IGpJ0QNL9EfH7zrWKflT2\n3fztfm8/1/HbM52R/1NJ/xARSyTdLOk7tpdIelTSjoi4VtKO4j6APtE0/BFxJCLeKm6fljQmaaGk\n1ZLOf03LFkl3d6pJANW7pHN+20OSvirp15IWRMSRovShJk4LAPSJab+33/aXJL0s6bsR8YfJ52sR\nEY2+n8/2iKSRdhsFUK1pjfy2v6CJ4P8oIrYVi4/aHizqg5KOTbVtRGyKiOGIGK6iYQDVaBp+Twzx\nL0gai4gNk0rbJa0vbq+X9Gr17QHolOkc9v+lpL+V9I7t858tfUzSs5L+w/ZDkn4n6f7OtIh+VvbV\n8N382nhcrGn4I+JNSY0uyH692nYAdAvv8AOSIvxAUoQfSIrwA0kRfiApwg8kxVd3o6Muv/zylrc9\ne/ZshZ3gQoz8QFKEH0iK8ANJEX4gKcIPJEX4gaQIP5AU1/nRUQ888EDD2qlTp0q3ffrpp6tuB5Mw\n8gNJEX4gKcIPJEX4gaQIP5AU4QeSIvxAUlznR0ft2rWrYW3Dhg0Na5K0c+fOqtvBJIz8QFKEH0iK\n8ANJEX4gKcIPJEX4gaQIP5CUm82RbnuRpBclLZAUkjZFxPdtPynpW5KOF6s+FhE/a/JYTMgOdFhE\neDrrTSf8g5IGI+It23Ml7ZZ0t6T7JZ2JiH+ZblOEH+i86Ya/6Tv8IuKIpCPF7dO2xyQtbK89AHW7\npHN+20OSvirp18Wih22/bXuz7XkNthmxPWp7tK1OAVSq6WH/ZyvaX5L0X5L+OSK22V4g6YQmXgd4\nWhOnBg82eQwO+4EOq+ycX5Jsf0HSTyX9IiIu+jRGcUTw04hY2uRxCD/QYdMNf9PDftuW9IKkscnB\nL14IPO+bkvZeapMA6jOdV/uXS/pvSe9IGi8WPyZpraRlmjjsPyDp28WLg2WPxcgPdFilh/1VIfxA\n51V22A9gZiL8QFKEH0iK8ANJEX4gKcIPJEX4gaQIP5AU4QeSIvxAUoQfSIrwA0kRfiApwg8k1e0p\nuk9I+t2k+/OLZb2oV3vr1b4kemtVlb392XRX7Orn+S96cns0IoZra6BEr/bWq31J9NaqunrjsB9I\nivADSdUd/k01P3+ZXu2tV/uS6K1VtfRW6zk/gPrUPfIDqEkt4be90vZvbO+z/WgdPTRi+4Dtd2zv\nqXuKsWIatGO2905aNmD7V7bfL35POU1aTb09aftwse/22F5VU2+LbO+0/Z7td23/fbG81n1X0lct\n+63rh/22L5P0W0m3SzokaZektRHxXlcbacD2AUnDEVH7NWHbfyXpjKQXz8+GZPs5SScj4tniD+e8\niPjHHuntSV3izM0d6q3RzNJ/pxr3XZUzXlehjpH/Jkn7ImJ/RPxR0o8lra6hj54XEW9IOnnB4tWS\nthS3t2jiP0/XNeitJ0TEkYh4q7h9WtL5maVr3XclfdWijvAvlHRw0v1D6q0pv0PSL23vtj1SdzNT\nWDBpZqQPJS2os5kpNJ25uZsumFm6Z/ZdKzNeV40X/C62PCJukPQ3kr5THN72pJg4Z+ulyzUbJS3W\nxDRuRyR9r85mipmlX5b03Yj4w+Ranftuir5q2W91hP+wpEWT7n+5WNYTIuJw8fuYpFc0cZrSS46e\nnyS1+H2s5n4+ExFHI+JcRIxL+oFq3HfFzNIvS/pRRGwrFte+76bqq679Vkf4d0m61vZXbH9R0hpJ\n22vo4yK25xQvxMj2HEkr1HuzD2+XtL64vV7SqzX28jm9MnNzo5mlVfO+67kZryOi6z+SVmniFf8P\nJD1eRw8N+rpa0v8WP+/W3ZukrZo4DPxEE6+NPCTpTyXtkPS+pP+UNNBDvf2bJmZzflsTQRusqbfl\nmjikf1vSnuJnVd37rqSvWvYb7/ADkuIFPyApwg8kRfiBpAg/kBThB5Ii/EBShB9IivADSf0/fhI1\nni26LDgAAAAASUVORK5CYII=\n",
408 | "text/plain": [
409 | ""
410 | ]
411 | },
412 | "metadata": {
413 | "tags": []
414 | }
415 | }
416 | ]
417 | }
418 | ]
419 | }
--------------------------------------------------------------------------------
/NBA_Basketball_Exploration/basketball_data_exploration.py:
--------------------------------------------------------------------------------
1 |
2 | # This code explores the NBA players from 2013 - 2014 basketball season, and uses
3 | # a machine learning algorithm called kMeans to group them in clusters, this will
4 | # show which players are most similar
5 |
6 | #Stat Glossaries: https://www.basketball-reference.com/about/glossary.html
7 | # https://stats.nba.com/help/glossary/#fta
8 |
9 | #Resource: https://www.dataquest.io/blog/python-vs-r/
10 |
11 | #import the dependencies
12 | import pandas as pd
13 | import seaborn as sns
14 | import matplotlib.pyplot as plt
15 |
16 | #load the data
17 | #from google.colab import files #Only use for Google Colab
18 | #uploaded = files.upload() #Only use for Google Colab
19 | nba = pd.read_csv('nba_2013.csv')# the nba_2013.csv data contains data on NBA players from 2013 - 2014 season
20 | nba.head(7)# Print the first 7 rows of data or first 7 players
21 |
22 | #Get the number of rows and columns (481 rows or players , and 31 columns containing data on the players)
23 | nba.shape
24 |
25 | # Find the average value for each numeric column / feature
26 | nba.mean() # The columns have names like 'fg' (field goals made), 'ast'(assists), here is a glossary of all of the stats https://stats.nba.com/help/glossary/
27 |
28 | #Get the mean / average of specific columns
29 | # mean of the specific column
30 | nba.loc[:,"fg"].mean()
31 |
32 | #Make pairwise scatter plots
33 | # This is a common way to explore a data set to see how different columns correlate
34 | # to others, we'll compare ast (assits), fg(field goals), trb (total rebound )
35 | #NOTE: pairplot of all columns ==> sns.pairplot(nba)
36 | sns.pairplot(nba[["ast", "fg", "trb"]])
37 | plt.show()
38 |
39 | #Make Heat Maps to see correlations
40 | #Note: Heat Map of all columns ==> sns.heatmap(nba.corr())
41 | #Note: Heat Map of all columns with annotation/numbers ==> sns.heatmap(nba.corr(), annot=True)
42 | #Heat Map of the columns ast (assits), fg(field goals), trb (total rebound ) with annotation
43 | correlation = nba[["ast", "fg", "trb"]].corr()
44 | sns.heatmap(correlation, annot=True)
45 |
46 | # Make clusters of the players using a machine learning model called kMeans
47 | #One good way to explore this kind of data is to generate cluster plots.
48 | #This will show which players are most similar.
49 |
50 | from sklearn.cluster import KMeans
51 | kmeans_model = KMeans(n_clusters=5, random_state=1)# Create a 5 cluster kmeans model
52 | good_columns = nba._get_numeric_data().dropna(axis=1)#remove any non-numeric columns, or columns with missing values (NA, Nan, etc).
53 | kmeans_model.fit(good_columns)# Train the model
54 | labels = kmeans_model.labels_ # Get the labels or (cluster label for each player)
55 | labels
56 |
57 | #Plot players by cluster
58 | #We can now plot out the players by cluster to discover patterns.
59 | #One way to do this is to first use PCA to make our data 2-dimensional,
60 | #then plot it, and shade each point according to cluster association
61 |
62 | #NOTE: PCA Principal component analysis (PCA) is a statistical procedure that uses an orthogonal transformation to convert a set of
63 | #observations of possibly correlated variables (entities each of which takes on various numerical values) into a set of values of linearly
64 | #uncorrelated variables called principal components. A Dimensionality Reducing Algorithm
65 |
66 | from sklearn.decomposition import PCA
67 | pca_2 = PCA(2)
68 | plot_columns = pca_2.fit_transform(good_columns)
69 | plt.scatter(x=plot_columns[:,0], y=plot_columns[:,1], c=labels)
70 | plt.show()
71 |
72 | #Show plot points
73 | plot_columns
74 |
75 | #Find the 'good columns' data for the player LeBron James
76 | #again by 'good columns' I mean with only numeric values, and no missing values (NA, Nan, etc).
77 |
78 | # Find player LeBron
79 | LeBron = good_columns.loc[ nba['player'] == 'LeBron James',: ]
80 |
81 | #Find player Durant
82 | Durant = good_columns.loc[ nba['player'] == 'Kevin Durant',: ]
83 |
84 | #print the players
85 | print(LeBron)
86 | print(Durant)
87 |
88 | #Change the dataframe to a list Lebron to be able to use the kmeans model to make predictions/grouping
89 | Lebron_list = LeBron.values.tolist()
90 | Durant_list = Durant.values.tolist()
91 |
92 | #Predict which group LeBron James and Kevin Durant belongs
93 | LeBron_Cluster_Label = kmeans_model.predict(Lebron_list)
94 | Durant_Cluster_Label = kmeans_model.predict(Durant_list)
95 |
96 | print(LeBron_Cluster_Label)
97 | print(Durant_Cluster_Label)
98 |
99 | # Look at all of the column coorelations
100 | nba.corr() #Note there is a positive coorelation between minutes played(mp) and points(pts)
101 |
102 | # Let’s say we want to predict number of assists per player from field goals made per player.
103 | #Split the data into 80% training and 20% testing
104 | from sklearn.model_selection import train_test_split
105 | x_train, x_test, y_train, y_test = train_test_split(nba[['fg']], nba[['ast']], test_size=0.2, random_state=42)
106 |
107 | #Create the Linear Regression Model
108 | from sklearn.linear_model import LinearRegression
109 | lr = LinearRegression() # Create the model
110 | lr.fit(x_train, y_train) #Train the model
111 | predictions = lr.predict(x_test) #Make predictions on the test data
112 |
113 | print(predictions)
114 | print(y_test)
115 |
116 | # Testing Model: Score returns the coefficient of determination R^2 of the prediction.
117 | # The best possible score is 1.0 (58.78% of the variance for assists is explained by the field goals players made)
118 | lr_confidence = lr.score(x_test, y_test)
119 | print("lr confidence: ", lr_confidence)
120 |
121 | # mean squared error which tells you how close a regression line is to a set of points.
122 | from sklearn.metrics import mean_squared_error
123 | print("Mean Squared Error (MSE): ",mean_squared_error(y_test, predictions))
124 |
--------------------------------------------------------------------------------
/Predict_Boston_Housing_Price.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "colab": {
6 | "name": "Predict Boston Housing Price.ipynb",
7 | "version": "0.3.2",
8 | "provenance": [],
9 | "collapsed_sections": [],
10 | "include_colab_link": true
11 | },
12 | "kernelspec": {
13 | "name": "python3",
14 | "display_name": "Python 3"
15 | }
16 | },
17 | "cells": [
18 | {
19 | "cell_type": "markdown",
20 | "metadata": {
21 | "id": "view-in-github",
22 | "colab_type": "text"
23 | },
24 | "source": [
25 | " "
26 | ]
27 | },
28 | {
29 | "cell_type": "markdown",
30 | "metadata": {
31 | "id": "8g4rFjz83o6J",
32 | "colab_type": "text"
33 | },
34 | "source": [
35 | "# Predict Boston Housing Prices\n",
36 | "\n",
37 | "This python program predicts the price of houses in Boston using a machine learning algorithm called a Linear Regression.\n",
38 | "\n",
39 | "\n",
40 | " \n",
41 | "
\n",
42 | "\n",
43 | "\n",
44 | "# Linear Regression\n",
45 | "Linear regression is a linear approach to modeling the relationship between a scalar response (or dependent variable) and one or more explanatory variables (or independent variables).\n",
46 | "\n",
47 | "## Pros:\n",
48 | "1. Simple to implement.\n",
49 | "2. Used to predict numeric values.\n",
50 | "\n",
51 | "## Cons:\n",
52 | "1. Prone to overfitting.\n",
53 | "2. Cannot be used when the relation between independent and dependent variable are non linear.\n",
54 | "\n",
55 | "\n",
56 | "##Resources:\n",
57 | "\n",
58 | "* https://scikit-learn.org/stable/modules/generated/sklearn.datasets.load_boston.html\n",
59 | "* https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.LinearRegression.html\n",
60 | "* https://youtu.be/gOXoFDrseis\n",
61 | "\n",
62 | "\n",
63 | "\n"
64 | ]
65 | },
66 | {
67 | "cell_type": "code",
68 | "metadata": {
69 | "id": "SpnJzKHkswHf",
70 | "colab_type": "code",
71 | "colab": {}
72 | },
73 | "source": [
74 | "import pandas as pd\n",
75 | "import numpy as np\n",
76 | "from sklearn import linear_model\n",
77 | "from sklearn.model_selection import train_test_split\n"
78 | ],
79 | "execution_count": 0,
80 | "outputs": []
81 | },
82 | {
83 | "cell_type": "code",
84 | "metadata": {
85 | "id": "GS7O0-iAu7pO",
86 | "colab_type": "code",
87 | "outputId": "ca2223e3-9c64-4dc2-bc46-351e16d45055",
88 | "colab": {
89 | "base_uri": "https://localhost:8080/",
90 | "height": 1040
91 | }
92 | },
93 | "source": [
94 | "#Load the Boston Housing Data Set from sklearn.datasets and print it\n",
95 | "from sklearn.datasets import load_boston\n",
96 | "boston = load_boston()\n",
97 | "print(boston)"
98 | ],
99 | "execution_count": 3,
100 | "outputs": [
101 | {
102 | "output_type": "stream",
103 | "text": [
104 | "{'data': array([[6.3200e-03, 1.8000e+01, 2.3100e+00, ..., 1.5300e+01, 3.9690e+02,\n",
105 | " 4.9800e+00],\n",
106 | " [2.7310e-02, 0.0000e+00, 7.0700e+00, ..., 1.7800e+01, 3.9690e+02,\n",
107 | " 9.1400e+00],\n",
108 | " [2.7290e-02, 0.0000e+00, 7.0700e+00, ..., 1.7800e+01, 3.9283e+02,\n",
109 | " 4.0300e+00],\n",
110 | " ...,\n",
111 | " [6.0760e-02, 0.0000e+00, 1.1930e+01, ..., 2.1000e+01, 3.9690e+02,\n",
112 | " 5.6400e+00],\n",
113 | " [1.0959e-01, 0.0000e+00, 1.1930e+01, ..., 2.1000e+01, 3.9345e+02,\n",
114 | " 6.4800e+00],\n",
115 | " [4.7410e-02, 0.0000e+00, 1.1930e+01, ..., 2.1000e+01, 3.9690e+02,\n",
116 | " 7.8800e+00]]), 'target': array([24. , 21.6, 34.7, 33.4, 36.2, 28.7, 22.9, 27.1, 16.5, 18.9, 15. ,\n",
117 | " 18.9, 21.7, 20.4, 18.2, 19.9, 23.1, 17.5, 20.2, 18.2, 13.6, 19.6,\n",
118 | " 15.2, 14.5, 15.6, 13.9, 16.6, 14.8, 18.4, 21. , 12.7, 14.5, 13.2,\n",
119 | " 13.1, 13.5, 18.9, 20. , 21. , 24.7, 30.8, 34.9, 26.6, 25.3, 24.7,\n",
120 | " 21.2, 19.3, 20. , 16.6, 14.4, 19.4, 19.7, 20.5, 25. , 23.4, 18.9,\n",
121 | " 35.4, 24.7, 31.6, 23.3, 19.6, 18.7, 16. , 22.2, 25. , 33. , 23.5,\n",
122 | " 19.4, 22. , 17.4, 20.9, 24.2, 21.7, 22.8, 23.4, 24.1, 21.4, 20. ,\n",
123 | " 20.8, 21.2, 20.3, 28. , 23.9, 24.8, 22.9, 23.9, 26.6, 22.5, 22.2,\n",
124 | " 23.6, 28.7, 22.6, 22. , 22.9, 25. , 20.6, 28.4, 21.4, 38.7, 43.8,\n",
125 | " 33.2, 27.5, 26.5, 18.6, 19.3, 20.1, 19.5, 19.5, 20.4, 19.8, 19.4,\n",
126 | " 21.7, 22.8, 18.8, 18.7, 18.5, 18.3, 21.2, 19.2, 20.4, 19.3, 22. ,\n",
127 | " 20.3, 20.5, 17.3, 18.8, 21.4, 15.7, 16.2, 18. , 14.3, 19.2, 19.6,\n",
128 | " 23. , 18.4, 15.6, 18.1, 17.4, 17.1, 13.3, 17.8, 14. , 14.4, 13.4,\n",
129 | " 15.6, 11.8, 13.8, 15.6, 14.6, 17.8, 15.4, 21.5, 19.6, 15.3, 19.4,\n",
130 | " 17. , 15.6, 13.1, 41.3, 24.3, 23.3, 27. , 50. , 50. , 50. , 22.7,\n",
131 | " 25. , 50. , 23.8, 23.8, 22.3, 17.4, 19.1, 23.1, 23.6, 22.6, 29.4,\n",
132 | " 23.2, 24.6, 29.9, 37.2, 39.8, 36.2, 37.9, 32.5, 26.4, 29.6, 50. ,\n",
133 | " 32. , 29.8, 34.9, 37. , 30.5, 36.4, 31.1, 29.1, 50. , 33.3, 30.3,\n",
134 | " 34.6, 34.9, 32.9, 24.1, 42.3, 48.5, 50. , 22.6, 24.4, 22.5, 24.4,\n",
135 | " 20. , 21.7, 19.3, 22.4, 28.1, 23.7, 25. , 23.3, 28.7, 21.5, 23. ,\n",
136 | " 26.7, 21.7, 27.5, 30.1, 44.8, 50. , 37.6, 31.6, 46.7, 31.5, 24.3,\n",
137 | " 31.7, 41.7, 48.3, 29. , 24. , 25.1, 31.5, 23.7, 23.3, 22. , 20.1,\n",
138 | " 22.2, 23.7, 17.6, 18.5, 24.3, 20.5, 24.5, 26.2, 24.4, 24.8, 29.6,\n",
139 | " 42.8, 21.9, 20.9, 44. , 50. , 36. , 30.1, 33.8, 43.1, 48.8, 31. ,\n",
140 | " 36.5, 22.8, 30.7, 50. , 43.5, 20.7, 21.1, 25.2, 24.4, 35.2, 32.4,\n",
141 | " 32. , 33.2, 33.1, 29.1, 35.1, 45.4, 35.4, 46. , 50. , 32.2, 22. ,\n",
142 | " 20.1, 23.2, 22.3, 24.8, 28.5, 37.3, 27.9, 23.9, 21.7, 28.6, 27.1,\n",
143 | " 20.3, 22.5, 29. , 24.8, 22. , 26.4, 33.1, 36.1, 28.4, 33.4, 28.2,\n",
144 | " 22.8, 20.3, 16.1, 22.1, 19.4, 21.6, 23.8, 16.2, 17.8, 19.8, 23.1,\n",
145 | " 21. , 23.8, 23.1, 20.4, 18.5, 25. , 24.6, 23. , 22.2, 19.3, 22.6,\n",
146 | " 19.8, 17.1, 19.4, 22.2, 20.7, 21.1, 19.5, 18.5, 20.6, 19. , 18.7,\n",
147 | " 32.7, 16.5, 23.9, 31.2, 17.5, 17.2, 23.1, 24.5, 26.6, 22.9, 24.1,\n",
148 | " 18.6, 30.1, 18.2, 20.6, 17.8, 21.7, 22.7, 22.6, 25. , 19.9, 20.8,\n",
149 | " 16.8, 21.9, 27.5, 21.9, 23.1, 50. , 50. , 50. , 50. , 50. , 13.8,\n",
150 | " 13.8, 15. , 13.9, 13.3, 13.1, 10.2, 10.4, 10.9, 11.3, 12.3, 8.8,\n",
151 | " 7.2, 10.5, 7.4, 10.2, 11.5, 15.1, 23.2, 9.7, 13.8, 12.7, 13.1,\n",
152 | " 12.5, 8.5, 5. , 6.3, 5.6, 7.2, 12.1, 8.3, 8.5, 5. , 11.9,\n",
153 | " 27.9, 17.2, 27.5, 15. , 17.2, 17.9, 16.3, 7. , 7.2, 7.5, 10.4,\n",
154 | " 8.8, 8.4, 16.7, 14.2, 20.8, 13.4, 11.7, 8.3, 10.2, 10.9, 11. ,\n",
155 | " 9.5, 14.5, 14.1, 16.1, 14.3, 11.7, 13.4, 9.6, 8.7, 8.4, 12.8,\n",
156 | " 10.5, 17.1, 18.4, 15.4, 10.8, 11.8, 14.9, 12.6, 14.1, 13. , 13.4,\n",
157 | " 15.2, 16.1, 17.8, 14.9, 14.1, 12.7, 13.5, 14.9, 20. , 16.4, 17.7,\n",
158 | " 19.5, 20.2, 21.4, 19.9, 19. , 19.1, 19.1, 20.1, 19.9, 19.6, 23.2,\n",
159 | " 29.8, 13.8, 13.3, 16.7, 12. , 14.6, 21.4, 23. , 23.7, 25. , 21.8,\n",
160 | " 20.6, 21.2, 19.1, 20.6, 15.2, 7. , 8.1, 13.6, 20.1, 21.8, 24.5,\n",
161 | " 23.1, 19.7, 18.3, 21.2, 17.5, 16.8, 22.4, 20.6, 23.9, 22. , 11.9]), 'feature_names': array(['CRIM', 'ZN', 'INDUS', 'CHAS', 'NOX', 'RM', 'AGE', 'DIS', 'RAD',\n",
162 | " 'TAX', 'PTRATIO', 'B', 'LSTAT'], dtype='\n",
208 | "\n",
221 | "\n",
222 | " \n",
223 | " \n",
224 | " \n",
225 | " CRIM \n",
226 | " ZN \n",
227 | " INDUS \n",
228 | " CHAS \n",
229 | " NOX \n",
230 | " RM \n",
231 | " AGE \n",
232 | " DIS \n",
233 | " RAD \n",
234 | " TAX \n",
235 | " PTRATIO \n",
236 | " B \n",
237 | " LSTAT \n",
238 | " \n",
239 | " \n",
240 | " \n",
241 | " \n",
242 | " count \n",
243 | " 506.000000 \n",
244 | " 506.000000 \n",
245 | " 506.000000 \n",
246 | " 506.000000 \n",
247 | " 506.000000 \n",
248 | " 506.000000 \n",
249 | " 506.000000 \n",
250 | " 506.000000 \n",
251 | " 506.000000 \n",
252 | " 506.000000 \n",
253 | " 506.000000 \n",
254 | " 506.000000 \n",
255 | " 506.000000 \n",
256 | " \n",
257 | " \n",
258 | " mean \n",
259 | " 3.613524 \n",
260 | " 11.363636 \n",
261 | " 11.136779 \n",
262 | " 0.069170 \n",
263 | " 0.554695 \n",
264 | " 6.284634 \n",
265 | " 68.574901 \n",
266 | " 3.795043 \n",
267 | " 9.549407 \n",
268 | " 408.237154 \n",
269 | " 18.455534 \n",
270 | " 356.674032 \n",
271 | " 12.653063 \n",
272 | " \n",
273 | " \n",
274 | " std \n",
275 | " 8.601545 \n",
276 | " 23.322453 \n",
277 | " 6.860353 \n",
278 | " 0.253994 \n",
279 | " 0.115878 \n",
280 | " 0.702617 \n",
281 | " 28.148861 \n",
282 | " 2.105710 \n",
283 | " 8.707259 \n",
284 | " 168.537116 \n",
285 | " 2.164946 \n",
286 | " 91.294864 \n",
287 | " 7.141062 \n",
288 | " \n",
289 | " \n",
290 | " min \n",
291 | " 0.006320 \n",
292 | " 0.000000 \n",
293 | " 0.460000 \n",
294 | " 0.000000 \n",
295 | " 0.385000 \n",
296 | " 3.561000 \n",
297 | " 2.900000 \n",
298 | " 1.129600 \n",
299 | " 1.000000 \n",
300 | " 187.000000 \n",
301 | " 12.600000 \n",
302 | " 0.320000 \n",
303 | " 1.730000 \n",
304 | " \n",
305 | " \n",
306 | " 25% \n",
307 | " 0.082045 \n",
308 | " 0.000000 \n",
309 | " 5.190000 \n",
310 | " 0.000000 \n",
311 | " 0.449000 \n",
312 | " 5.885500 \n",
313 | " 45.025000 \n",
314 | " 2.100175 \n",
315 | " 4.000000 \n",
316 | " 279.000000 \n",
317 | " 17.400000 \n",
318 | " 375.377500 \n",
319 | " 6.950000 \n",
320 | " \n",
321 | " \n",
322 | " 50% \n",
323 | " 0.256510 \n",
324 | " 0.000000 \n",
325 | " 9.690000 \n",
326 | " 0.000000 \n",
327 | " 0.538000 \n",
328 | " 6.208500 \n",
329 | " 77.500000 \n",
330 | " 3.207450 \n",
331 | " 5.000000 \n",
332 | " 330.000000 \n",
333 | " 19.050000 \n",
334 | " 391.440000 \n",
335 | " 11.360000 \n",
336 | " \n",
337 | " \n",
338 | " 75% \n",
339 | " 3.677083 \n",
340 | " 12.500000 \n",
341 | " 18.100000 \n",
342 | " 0.000000 \n",
343 | " 0.624000 \n",
344 | " 6.623500 \n",
345 | " 94.075000 \n",
346 | " 5.188425 \n",
347 | " 24.000000 \n",
348 | " 666.000000 \n",
349 | " 20.200000 \n",
350 | " 396.225000 \n",
351 | " 16.955000 \n",
352 | " \n",
353 | " \n",
354 | " max \n",
355 | " 88.976200 \n",
356 | " 100.000000 \n",
357 | " 27.740000 \n",
358 | " 1.000000 \n",
359 | " 0.871000 \n",
360 | " 8.780000 \n",
361 | " 100.000000 \n",
362 | " 12.126500 \n",
363 | " 24.000000 \n",
364 | " 711.000000 \n",
365 | " 22.000000 \n",
366 | " 396.900000 \n",
367 | " 37.970000 \n",
368 | " \n",
369 | " \n",
370 | "
\n",
371 | ""
372 | ],
373 | "text/plain": [
374 | " CRIM ZN INDUS ... PTRATIO B LSTAT\n",
375 | "count 506.000000 506.000000 506.000000 ... 506.000000 506.000000 506.000000\n",
376 | "mean 3.613524 11.363636 11.136779 ... 18.455534 356.674032 12.653063\n",
377 | "std 8.601545 23.322453 6.860353 ... 2.164946 91.294864 7.141062\n",
378 | "min 0.006320 0.000000 0.460000 ... 12.600000 0.320000 1.730000\n",
379 | "25% 0.082045 0.000000 5.190000 ... 17.400000 375.377500 6.950000\n",
380 | "50% 0.256510 0.000000 9.690000 ... 19.050000 391.440000 11.360000\n",
381 | "75% 3.677083 12.500000 18.100000 ... 20.200000 396.225000 16.955000\n",
382 | "max 88.976200 100.000000 27.740000 ... 22.000000 396.900000 37.970000\n",
383 | "\n",
384 | "[8 rows x 13 columns]"
385 | ]
386 | },
387 | "metadata": {
388 | "tags": []
389 | },
390 | "execution_count": 7
391 | }
392 | ]
393 | },
394 | {
395 | "cell_type": "code",
396 | "metadata": {
397 | "id": "HrZKX4J_wtgg",
398 | "colab_type": "code",
399 | "colab": {}
400 | },
401 | "source": [
402 | "#Initialize the linear regression model\n",
403 | "reg = linear_model.LinearRegression()"
404 | ],
405 | "execution_count": 0,
406 | "outputs": []
407 | },
408 | {
409 | "cell_type": "code",
410 | "metadata": {
411 | "id": "xWqM85e6w7G_",
412 | "colab_type": "code",
413 | "colab": {}
414 | },
415 | "source": [
416 | "#Split the data into 67% training and 33% testing data\n",
417 | "#NOTE: We have to split the dependent variables (x) and the target or independent variable (y)\n",
418 | "x_train, x_test, y_train, y_test = train_test_split(df_x, df_y, test_size=0.33, random_state=42)"
419 | ],
420 | "execution_count": 0,
421 | "outputs": []
422 | },
423 | {
424 | "cell_type": "code",
425 | "metadata": {
426 | "id": "36zhLfzKxthN",
427 | "colab_type": "code",
428 | "outputId": "10bd8f5f-9281-47e0-dec9-d1e2ca2035a3",
429 | "colab": {
430 | "base_uri": "https://localhost:8080/",
431 | "height": 34
432 | }
433 | },
434 | "source": [
435 | "#Train our model with the training data\n",
436 | "reg.fit(x_train, y_train)"
437 | ],
438 | "execution_count": 13,
439 | "outputs": [
440 | {
441 | "output_type": "execute_result",
442 | "data": {
443 | "text/plain": [
444 | "LinearRegression(copy_X=True, fit_intercept=True, n_jobs=None, normalize=False)"
445 | ]
446 | },
447 | "metadata": {
448 | "tags": []
449 | },
450 | "execution_count": 13
451 | }
452 | ]
453 | },
454 | {
455 | "cell_type": "code",
456 | "metadata": {
457 | "id": "s0vgePv-x5jC",
458 | "colab_type": "code",
459 | "outputId": "e5e26f9a-a99e-4e00-dc85-282b5ed35a73",
460 | "colab": {
461 | "base_uri": "https://localhost:8080/",
462 | "height": 85
463 | }
464 | },
465 | "source": [
466 | "#Print the coefecients/weights for each feature/column of our model\n",
467 | "print(reg.coef_)"
468 | ],
469 | "execution_count": 14,
470 | "outputs": [
471 | {
472 | "output_type": "stream",
473 | "text": [
474 | "[[-1.28749718e-01 3.78232228e-02 5.82109233e-02 3.23866812e+00\n",
475 | " -1.61698120e+01 3.90205116e+00 -1.28507825e-02 -1.42222430e+00\n",
476 | " 2.34853915e-01 -8.21331947e-03 -9.28722459e-01 1.17695921e-02\n",
477 | " -5.47566338e-01]]\n"
478 | ],
479 | "name": "stdout"
480 | }
481 | ]
482 | },
483 | {
484 | "cell_type": "code",
485 | "metadata": {
486 | "id": "A8VH_VYsyHF0",
487 | "colab_type": "code",
488 | "colab": {
489 | "base_uri": "https://localhost:8080/",
490 | "height": 2856
491 | },
492 | "outputId": "4a0fd61f-3512-48cc-cecd-1fba768881b9"
493 | },
494 | "source": [
495 | "#print our price predictions on our test data\n",
496 | "y_pred = reg.predict(x_test)\n",
497 | "print(y_pred)"
498 | ],
499 | "execution_count": 15,
500 | "outputs": [
501 | {
502 | "output_type": "stream",
503 | "text": [
504 | "[[28.53469469]\n",
505 | " [36.6187006 ]\n",
506 | " [15.63751079]\n",
507 | " [25.5014496 ]\n",
508 | " [18.7096734 ]\n",
509 | " [23.16471591]\n",
510 | " [17.31011035]\n",
511 | " [14.07736367]\n",
512 | " [23.01064388]\n",
513 | " [20.54223482]\n",
514 | " [24.91632351]\n",
515 | " [18.41098052]\n",
516 | " [-6.52079687]\n",
517 | " [21.83372604]\n",
518 | " [19.14903064]\n",
519 | " [26.0587322 ]\n",
520 | " [20.30232625]\n",
521 | " [ 5.74943567]\n",
522 | " [40.33137811]\n",
523 | " [17.45791446]\n",
524 | " [27.47486665]\n",
525 | " [30.2170757 ]\n",
526 | " [10.80555625]\n",
527 | " [23.87721728]\n",
528 | " [17.99492211]\n",
529 | " [16.02608791]\n",
530 | " [23.268288 ]\n",
531 | " [14.36825207]\n",
532 | " [22.38116971]\n",
533 | " [19.3092068 ]\n",
534 | " [22.17284576]\n",
535 | " [25.05925441]\n",
536 | " [25.13780726]\n",
537 | " [18.46730198]\n",
538 | " [16.60405712]\n",
539 | " [17.46564046]\n",
540 | " [30.71367733]\n",
541 | " [20.05106788]\n",
542 | " [23.9897768 ]\n",
543 | " [24.94322408]\n",
544 | " [13.97945355]\n",
545 | " [31.64706967]\n",
546 | " [42.48057206]\n",
547 | " [17.70042814]\n",
548 | " [26.92507869]\n",
549 | " [17.15897719]\n",
550 | " [13.68918087]\n",
551 | " [26.14924245]\n",
552 | " [20.2782306 ]\n",
553 | " [29.99003492]\n",
554 | " [21.21260347]\n",
555 | " [34.03649185]\n",
556 | " [15.41837553]\n",
557 | " [25.95781061]\n",
558 | " [39.13897274]\n",
559 | " [22.96118424]\n",
560 | " [18.80310558]\n",
561 | " [33.07865362]\n",
562 | " [24.74384155]\n",
563 | " [12.83640958]\n",
564 | " [22.41963398]\n",
565 | " [30.64804979]\n",
566 | " [31.59567111]\n",
567 | " [16.34088197]\n",
568 | " [20.9504304 ]\n",
569 | " [16.70145875]\n",
570 | " [20.23215646]\n",
571 | " [26.1437865 ]\n",
572 | " [31.12160889]\n",
573 | " [11.89762768]\n",
574 | " [20.45432404]\n",
575 | " [27.48356359]\n",
576 | " [10.89034224]\n",
577 | " [16.77707214]\n",
578 | " [24.02593714]\n",
579 | " [ 5.44691807]\n",
580 | " [21.35152331]\n",
581 | " [41.27267175]\n",
582 | " [18.13447647]\n",
583 | " [ 9.8012101 ]\n",
584 | " [21.24024342]\n",
585 | " [13.02644969]\n",
586 | " [21.80198374]\n",
587 | " [ 9.48201752]\n",
588 | " [22.99183857]\n",
589 | " [31.90465631]\n",
590 | " [18.95594718]\n",
591 | " [25.48515032]\n",
592 | " [29.49687019]\n",
593 | " [20.07282539]\n",
594 | " [25.5616062 ]\n",
595 | " [ 5.59584382]\n",
596 | " [20.18410904]\n",
597 | " [15.08773299]\n",
598 | " [14.34562117]\n",
599 | " [20.85155407]\n",
600 | " [24.80149389]\n",
601 | " [-0.19785401]\n",
602 | " [13.57649004]\n",
603 | " [15.64401679]\n",
604 | " [22.03765773]\n",
605 | " [24.70314482]\n",
606 | " [10.86409112]\n",
607 | " [19.60231067]\n",
608 | " [23.73429161]\n",
609 | " [12.08082177]\n",
610 | " [18.40997903]\n",
611 | " [25.4366158 ]\n",
612 | " [20.76506636]\n",
613 | " [24.68588237]\n",
614 | " [ 7.4995836 ]\n",
615 | " [18.93015665]\n",
616 | " [21.70801764]\n",
617 | " [27.14350579]\n",
618 | " [31.93765208]\n",
619 | " [15.19483586]\n",
620 | " [34.01357428]\n",
621 | " [12.85763091]\n",
622 | " [21.06646184]\n",
623 | " [28.58470042]\n",
624 | " [15.77437534]\n",
625 | " [24.77512495]\n",
626 | " [ 3.64655689]\n",
627 | " [23.91169589]\n",
628 | " [25.82292925]\n",
629 | " [23.03339677]\n",
630 | " [25.35158335]\n",
631 | " [33.05655447]\n",
632 | " [20.65930467]\n",
633 | " [38.18917361]\n",
634 | " [14.04714297]\n",
635 | " [25.26034469]\n",
636 | " [17.6138723 ]\n",
637 | " [20.60883766]\n",
638 | " [ 9.8525544 ]\n",
639 | " [21.06756951]\n",
640 | " [22.20145587]\n",
641 | " [32.2920276 ]\n",
642 | " [31.57638342]\n",
643 | " [15.29265938]\n",
644 | " [16.7100235 ]\n",
645 | " [29.10550932]\n",
646 | " [25.17762329]\n",
647 | " [16.88159225]\n",
648 | " [ 6.32621877]\n",
649 | " [26.70210263]\n",
650 | " [23.3525851 ]\n",
651 | " [17.24168182]\n",
652 | " [13.22815696]\n",
653 | " [39.49907507]\n",
654 | " [16.53528575]\n",
655 | " [18.14635902]\n",
656 | " [25.06620426]\n",
657 | " [23.70640231]\n",
658 | " [22.20167772]\n",
659 | " [21.22272327]\n",
660 | " [16.89825921]\n",
661 | " [23.15518273]\n",
662 | " [28.69699805]\n",
663 | " [ 6.65526482]\n",
664 | " [23.98399958]\n",
665 | " [17.21004545]\n",
666 | " [21.0574427 ]\n",
667 | " [25.01734597]\n",
668 | " [27.65461859]\n",
669 | " [20.70205823]\n",
670 | " [40.38214871]]\n"
671 | ],
672 | "name": "stdout"
673 | }
674 | ]
675 | },
676 | {
677 | "cell_type": "code",
678 | "metadata": {
679 | "id": "7DjKafwryYcP",
680 | "colab_type": "code",
681 | "outputId": "cd7323b7-0d9b-4439-b654-628c47dbe6c8",
682 | "colab": {
683 | "base_uri": "https://localhost:8080/",
684 | "height": 34
685 | }
686 | },
687 | "source": [
688 | "#Print the the prediction for the third row of our test data actual price = 13.6\n",
689 | "y_pred[2]"
690 | ],
691 | "execution_count": 0,
692 | "outputs": [
693 | {
694 | "output_type": "execute_result",
695 | "data": {
696 | "text/plain": [
697 | "array([15.63751079])"
698 | ]
699 | },
700 | "metadata": {
701 | "tags": []
702 | },
703 | "execution_count": 109
704 | }
705 | ]
706 | },
707 | {
708 | "cell_type": "code",
709 | "metadata": {
710 | "id": "PO_z7lVwydrh",
711 | "colab_type": "code",
712 | "outputId": "dad60000-ede2-40ee-949e-ee13d195f4bf",
713 | "colab": {
714 | "base_uri": "https://localhost:8080/",
715 | "height": 1071
716 | }
717 | },
718 | "source": [
719 | "#print the actual price of houses from the testing data set\n",
720 | "y_test[0]"
721 | ],
722 | "execution_count": 0,
723 | "outputs": [
724 | {
725 | "output_type": "execute_result",
726 | "data": {
727 | "text/plain": [
728 | "173 23.6\n",
729 | "274 32.4\n",
730 | "491 13.6\n",
731 | "72 22.8\n",
732 | "452 16.1\n",
733 | "76 20.0\n",
734 | "316 17.8\n",
735 | "140 14.0\n",
736 | "471 19.6\n",
737 | "500 16.8\n",
738 | "218 21.5\n",
739 | "9 18.9\n",
740 | "414 7.0\n",
741 | "78 21.2\n",
742 | "323 18.5\n",
743 | "473 29.8\n",
744 | "124 18.8\n",
745 | "388 10.2\n",
746 | "195 50.0\n",
747 | "448 14.1\n",
748 | "271 25.2\n",
749 | "278 29.1\n",
750 | "30 12.7\n",
751 | "501 22.4\n",
752 | "421 14.2\n",
753 | "474 13.8\n",
754 | "79 20.3\n",
755 | "454 14.9\n",
756 | "210 21.7\n",
757 | "497 18.3\n",
758 | " ... \n",
759 | "57 31.6\n",
760 | "194 29.1\n",
761 | "24 15.6\n",
762 | "17 17.5\n",
763 | "298 22.5\n",
764 | "66 19.4\n",
765 | "211 19.3\n",
766 | "404 8.5\n",
767 | "94 20.6\n",
768 | "154 17.0\n",
769 | "441 17.1\n",
770 | "23 14.5\n",
771 | "225 50.0\n",
772 | "433 14.3\n",
773 | "447 12.6\n",
774 | "5 28.7\n",
775 | "116 21.2\n",
776 | "45 19.3\n",
777 | "16 23.1\n",
778 | "468 19.1\n",
779 | "360 25.0\n",
780 | "3 33.4\n",
781 | "405 5.0\n",
782 | "185 29.6\n",
783 | "60 18.7\n",
784 | "110 21.7\n",
785 | "321 23.1\n",
786 | "265 22.8\n",
787 | "29 21.0\n",
788 | "262 48.8\n",
789 | "Name: 0, Length: 167, dtype: float64"
790 | ]
791 | },
792 | "metadata": {
793 | "tags": []
794 | },
795 | "execution_count": 108
796 | }
797 | ]
798 | },
799 | {
800 | "cell_type": "code",
801 | "metadata": {
802 | "id": "MA-oMX41y7sB",
803 | "colab_type": "code",
804 | "outputId": "606dcf97-a454-4dc9-f729-28ccc421a1fb",
805 | "colab": {
806 | "base_uri": "https://localhost:8080/",
807 | "height": 68
808 | }
809 | },
810 | "source": [
811 | "# Two different ways to check model performance/accuracy using,\n",
812 | "# mean squared error which tells you how close a regression line is to a set of points.\n",
813 | "\n",
814 | "# 1. Mean squared error by numpy\n",
815 | "print(np.mean((y_pred-y_test)**2))\n",
816 | "\n",
817 | "# 2. Mean squared error by sklearn \n",
818 | "# Resource: https://stackoverflow.com/questions/42453875/precision-score-and-accuracy-score-showing-value-error?rq=1\n",
819 | "from sklearn.metrics import mean_squared_error\n",
820 | "print(mean_squared_error(y_test, y_pred))"
821 | ],
822 | "execution_count": 17,
823 | "outputs": [
824 | {
825 | "output_type": "stream",
826 | "text": [
827 | "0 20.724023\n",
828 | "dtype: float64\n",
829 | "20.724023437339717\n"
830 | ],
831 | "name": "stdout"
832 | }
833 | ]
834 | }
835 | ]
836 | }
837 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Python
2 | This is a repository that holds my Python programs
3 |
4 |
5 |
6 | To see me programming in Python checkout the YouTube channel: Go To YouTube Channel
7 |
8 |
9 |
10 | ## Python Programs / Data Cleaning Videos
11 | Program Name | Algorithm Name| Link to Program | Blog | YouTube
12 | --- | --- | --- | --- | ---
13 | concatenate_file.py | Concatenate Multiple CSV files | [Program](https://github.com/randerson112358/Python/blob/master/concatenate_file.py) | [Blog](http://everythingcomputerscience.com/) | [YouTubeX](https://www.youtube.com/channel/UCbmb5IoBtHZTpYZCDBOC1CA)
14 | remove_empty_row.py | Removes Empty Rows | [Program](https://github.com/randerson112358/Python/blob/master/remove_empty_row.py) | [Blog](http://everythingcomputerscience.com/) | [YouTubeX](https://www.youtube.com/channel/UCbmb5IoBtHZTpYZCDBOC1CA)
15 | replace_strings_with_numbers.py | Changes Strings in CSV to Numbers | [Program](https://github.com/randerson112358/Python/blob/master/Replace_Strings_With_Numbers/replace_strings_with_numbers.py) | [Blog](http://everythingcomputerscience.com/) | [YouTube](https://youtu.be/zv_fzW2iA_U)
16 | ## Web Scraping
17 | Program Name | Algorithm Name| Link to Program | Blog | YouTube
18 | --- | --- | --- | --- | ---
19 | scrape.py | Scrape Website Links | [Program](https://github.com/randerson112358/Python/blob/master/scrape.py) | [Blog](https://medium.com/@randerson112358/scrape-website-using-python-90619cac7c97) | [YouTube](https://youtu.be/LGZEn1OYUTk)
20 |
21 | ## Machine Learning Programs
22 | Project Name |Program Name | Algorithm Name| Link to Program | Blog | YouTube
23 | --- | --- | --- | --- | --- | ---
24 | Sentiment Analysis |sentiment.py | Sentiment Analysis | [Program](https://github.com/randerson112358/Python/blob/master/sentiment.py) | [Blog](https://medium.com/@randerson112358/sentiment-analysis-e2e4442bac13) | [YouTube](https://youtu.be/1VHhDSOwJPw)
25 | Simple Linear Regression Ex| LinearRegression.py | Linear Regression | [Program](https://github.com/randerson112358/Python/blob/master/LinearRegression.py) | [Blog](https://medium.com/@randerson112358/a-simple-machine-learning-python-program-bf5d156d2cda) | [YouTube](https://youtu.be/z7jEJY8FbA8)
26 | Car Classification |decisionTree.py | Decision Tree | [Program](https://github.com/randerson112358/Python/blob/master/DecisionTree/decisionTree.py) | [Blog](https://medium.com/@randerson112358/car-classification-89ad60204acf) | [YouTube](https://youtu.be/U-Jm8ugN0Ps)
27 | Golf Predictions |Golf_Predictions.ipynb | Decision Tree | [Program](https://github.com/randerson112358/Python/blob/master/Golf_Predictions.ipynb) | [Blog](https://medium.com/@randerson112358/python-decision-tree-classifier-example-d73bc3aeca6) | [YouTube](https://youtu.be/bT-43kgYI3o)
28 | Predict Boston House Price|Predict_Boston_Housing_Price.ipynb | Linear Regression | [Program](https://github.com/randerson112358/Python/blob/master/Predict_Boston_Housing_Price.ipynb) | [Blog](https://medium.com/@randerson112358/predict-boston-house-prices-using-python-linear-regression-90469e0a341) | [YouTube](https://youtu.be/gOXoFDrseis)
29 | Predict Stock Price|stock.ipynb | Linear Regression & SVR | [Program](https://github.com/randerson112358/Python/blob/master/stock.ipynb) | [Blog](https://medium.com/@randerson112358/predict-stock-prices-using-python-machine-learning-53aa024da20a) | [YouTube](https://youtu.be/EYnC4ACIt2g)
30 | Classify Iris Species|Logistic_Regression.ipynb | Logistic Regression | [Program](https://github.com/randerson112358/Python/blob/master/Logistic_Regression.ipynb) | [Blog](https://medium.com/@randerson112358/python-logistic-regression-program-5e1b32f964db) | [YouTube](https://youtu.be/ACdBKML9l4s)
31 | Predict Median House Price|Neural_Networks.ipynb | Deep Neural Networks | [Program](https://github.com/randerson112358/Python/blob/master/Neural_Networks/Neural_Networks.ipynb) | [Blog](https://medium.com/@randerson112358/predict-house-median-prices-5f1a768dd256?postPublishedType=repub) | [YouTube](https://youtu.be/vSzou5zRwNQ)
32 | Classify Handwritten Digits|MNIST_ANN.ipynb | Artificial Neural Networks | [Program](https://github.com/randerson112358/Python/blob/master/MNIST_ANN.ipynb) | [Blog](https://medium.com/@randerson112358/classify-hand-written-digits-5fdbe5d99ee7) | [YouTube](https://youtu.be/kOFUQB7u5Ck)
33 | Cluster NBA Basketball Players|Basketball_Data_Exploration.ipynb | KMeans | [Program](https://github.com/randerson112358/Python/blob/master/NBA_Basketball_Exploration/Basketball_Data_Exploration.ipynb) | [Blog](https://medium.com/@randerson112358/nba-data-analysis-exploration-9293f311e0e8) | [YouTube](https://youtu.be/2Pmf6Kqak3w)
34 | Predict FB Stock Price|SVM.ipynb | Support Vector Regression (SVR) | [Program](https://github.com/randerson112358/Python/blob/master/SVM_Stock/SVM.ipynb) | [Blog](https://medium.com/@randerson112358/facebook-stock-prediction-bcfc676bc611) | [YouTube](https://youtu.be/tMPfZV_ipOg)
35 | Breast Cancer Detection|Breast_Cancer_Detection.ipynb | Random Forest Classifier & Gaussian Naive Bayes & Logistic Regression & Decision Tree Classifier & SVC | [Program](https://github.com/randerson112358/Python/blob/master/breast_cancer_detection/Breast_Cancer_Detection.ipynb) | [Blog](https://medium.com/@randerson112358/breast-cancer-detection-using-machine-learning-38820fe98982) | [YouTube](https://youtu.be/NSSOyhJBmWY)
36 |
37 |
38 | # Relavent Books On Amazon
39 | * [Learning Python, 5th Edition](https://www.amazon.com/gp/product/1449355730/ref=as_li_tl?ie=UTF8&tag=github01d-20&camp=1789&creative=9325&linkCode=as2&creativeASIN=1449355730&linkId=95e6eaf8c12b9fcd483dd06c1dd53e48)
40 | * [Hands-On Machine Learning with Scikit-Learn and TensorFlow: Concepts, Tools, and Techniques to Build Intelligent Systems](https://www.amazon.com/gp/product/1491962291/ref=as_li_tl?ie=UTF8&tag=github01d-20&camp=1789&creative=9325&linkCode=as2&creativeASIN=1491962291&linkId=9dec6584d63a7cfcbc32af1ff9737bbf)
41 | * [Python Data Science Handbook: Essential Tools for Working with Data](https://www.amazon.com/gp/product/1491912057/ref=as_li_tl?ie=UTF8&tag=github01d-20&camp=1789&creative=9325&linkCode=as2&creativeASIN=1491912057&linkId=af650651a6d71fdea49cd5aa95653e1c)
42 | * [Introduction to Machine Learning with Python: A Guide for Data Scientists](https://www.amazon.com/gp/product/1449369413/ref=as_li_tl?ie=UTF8&tag=github01d-20&camp=1789&creative=9325&linkCode=as2&creativeASIN=1449369413&linkId=7b6ad9375121575c83af505f2a3ed6f3)
43 |
44 |
--------------------------------------------------------------------------------
/Replace_Strings_With_Numbers/replace_strings_with_numbers.py:
--------------------------------------------------------------------------------
1 | #import pandas library
2 | import pandas as pd
3 |
4 | #Create a file handler for our csv file in read mode
5 | file_handler = open("car.csv", "r")
6 |
7 | #Create a Pandas DataFrame using read_csv function that reads from a csv file
8 | data = pd.read_csv(file_handler, sep=",")
9 |
10 | #close file_handler
11 | file_handler.close()
12 |
13 | # traverse through the buying column of dataframe and write the values where conditions match
14 | # buying values: low, med, high, vhigh
15 |
16 | data.buying[ data.buying == 'low'] = 1
17 | data.buying[ data.buying == 'med'] = 2
18 | data.buying[ data.buying == 'high'] = 3
19 | data.buying[ data.buying == 'vhigh'] = 4
20 |
21 | #write the dataframe to a csv file
22 | data.to_csv("car_example.csv")
23 |
--------------------------------------------------------------------------------
/SVM_Stock/FB_30_days.csv:
--------------------------------------------------------------------------------
1 | Date,Open,High,Low,Close,Adj Close,Volume
2 | 2019-05-01,194.779999,196.179993,193.009995,193.029999,193.029999,15996600
3 | 2019-05-02,193.000000,194.000000,189.750000,192.529999,192.529999,13209500
4 | 2019-05-03,194.380005,196.160004,193.710007,195.470001,195.470001,14575400
5 | 2019-05-06,191.240005,194.279999,190.550003,193.880005,193.880005,13994900
6 | 2019-05-07,192.539993,192.899994,187.850006,189.770004,189.770004,16253000
7 | 2019-05-08,189.389999,190.720001,188.550003,189.539993,189.539993,12505700
8 | 2019-05-09,187.199997,189.770004,186.259995,188.649994,188.649994,12967000
9 | 2019-05-10,188.250000,190.000000,184.589996,188.339996,188.339996,12578500
10 | 2019-05-13,183.500000,185.429993,180.839996,181.539993,181.539993,16833300
11 | 2019-05-14,182.520004,183.490005,178.100006,180.729996,180.729996,17628100
12 | 2019-05-15,180.419998,187.279999,180.020004,186.270004,186.270004,16746900
13 | 2019-05-16,185.050003,188.580002,185.050003,186.990005,186.990005,12953100
14 | 2019-05-17,184.839996,187.580002,184.279999,185.300003,185.300003,10485400
15 | 2019-05-20,181.880005,184.229996,181.369995,182.720001,182.720001,10352000
16 | 2019-05-21,184.570007,185.699997,183.889999,184.820007,184.820007,7502800
17 | 2019-05-22,184.729996,186.740005,183.610001,185.320007,185.320007,9213800
18 | 2019-05-23,182.419998,183.899994,179.669998,180.869995,180.869995,12768800
19 | 2019-05-24,182.330002,183.630005,180.830002,181.059998,181.059998,8807700
20 | 2019-05-28,181.539993,184.710007,181.449997,184.309998,184.309998,14843300
21 | 2019-05-29,183.500000,184.559998,181.350006,182.190002,182.190002,12797700
22 | 2019-05-30,183.080002,183.479996,180.889999,183.009995,183.009995,8581500
23 | 2019-05-31,180.279999,180.539993,177.160004,177.470001,177.470001,15226500
24 |
--------------------------------------------------------------------------------
/SVM_Stock/svm.py:
--------------------------------------------------------------------------------
1 |
2 | #Description: This program predicts the price of FB stock for a specific day
3 | # using the Machine Learning algorithm called
4 | # Support Vector Regression (SVR) Model
5 |
6 | #import the packages
7 | import pandas as pd
8 | import numpy as np
9 | from sklearn.svm import SVR
10 | import matplotlib.pyplot as plt
11 |
12 | #Load the data
13 | #from google.colab import files # Use to load data on Google Colab
14 | #uploaded = files.upload() # Use to load data on Google Colab
15 | df = pd.read_csv('FB_30_days.csv')
16 | df.head(7)
17 |
18 | #Create the lists / X and y data set
19 | dates = []
20 | prices = []
21 |
22 | #Get the number of rows and columns in the data set
23 | df.shape
24 |
25 | #Print the last row of data (this will be the that we test on)
26 | df.tail(1)
27 |
28 | #Get all of the data except for the last row
29 | df = df.head(len(df)-1)
30 | print(df.shape)
31 |
32 | df_dates = df.loc[:,'Date'] # Get all of the rows from the Date column
33 | df_open = df.loc[:,'Open'] #Get all of the rows from the Open column
34 |
35 | #Create the independent data set 'X' as dates
36 | for date in df_dates:
37 | dates.append( [int(date.split('-')[2])] )
38 |
39 | #Create the dependent data set 'y' as prices
40 | for open_price in df_open:
41 | prices.append(float(open_price))
42 |
43 | #See what days were recoreded in teh data set
44 | print(dates)
45 |
46 | #Function to make predictions using 3 different support vector regression models with 3 different kernals
47 | def predict_prices(dates, prices, x):
48 |
49 | #Create 3 Support Vector Regression Models
50 | svr_lin = SVR(kernel='linear', C=1e3)
51 | svr_poly = SVR(kernel='poly', C=1e3, degree=2)
52 | svr_rbf = SVR(kernel='rbf', C=1e3, gamma=0.1)
53 |
54 | #Train the models on the dates and prices
55 | svr_lin.fit(dates,prices)
56 | svr_poly.fit(dates, prices)
57 | svr_rbf.fit(dates, prices)
58 |
59 | #Plot the models on a graph to see which has the best fit
60 | plt.scatter(dates, prices, color = 'black', label='Data')
61 | plt.plot(dates, svr_rbf.predict(dates), color = 'red', label='RBF model')
62 | plt.plot(dates, svr_lin.predict(dates), color = 'green', label='Linear model')
63 | plt.plot(dates, svr_poly.predict(dates), color = 'blue', label='Polynomial model')
64 | plt.xlabel('Date')
65 | plt.ylabel('Price')
66 | plt.title('Support Vector Regression')
67 | plt.legend()
68 | plt.show()
69 |
70 | #return all three model predictions
71 | return svr_rbf.predict(x)[0], svr_lin.predict(x)[0], svr_poly.predict(x)[0]
72 |
73 | #Predict the price of FB on day 31
74 | predicted_price = predict_prices(dates, prices, [[31]])
75 | print(predicted_price)
76 |
--------------------------------------------------------------------------------
/breast_cancer_detection/breast_cancer_detection.py:
--------------------------------------------------------------------------------
1 | #Description: This program detects breast cancer, based off of data. Breast Cancer (BC) is a common cancer for women around the world
2 | # Early detection of BC can greatly improve prognosis and survival chances by promoting clinical treatment to patients.
3 |
4 |
5 | # Resources: (1)https://github.com/mwaskom/seaborn/issues/917
6 | # (2)https://seaborn.pydata.org/tutorial/axis_grids.html
7 | # (3)https://seaborn.pydata.org/generated/seaborn.pairplot.html
8 | # (4)https://seaborn.pydata.org/generated/seaborn.heatmap.html
9 | # (5)https://towardsdatascience.com/building-a-simple-machine-learning-model-on-breast-cancer-data-eca4b3b99fa3
10 | # (6)Original Data Set: http://archive.ics.uci.edu/ml/datasets/breast+cancer+wisconsin+%28diagnostic%29 (has been updated)
11 | # (7)Confusion Matrix: https://tatwan.github.io/How-To-Plot-A-Confusion-Matrix-In-Python/
12 | # (8)Data Set I used: https://www.kaggle.com/uciml/breast-cancer-wisconsin-data
13 |
14 | #import libraries
15 | import numpy as np
16 | import pandas as pd
17 | import matplotlib.pyplot as plt
18 | import seaborn as sns
19 |
20 | #Load the data
21 | from google.colab import files # Use to load data on Google Colab
22 | uploaded = files.upload() # Use to load data on Google Colab
23 | df = pd.read_csv('data.csv')
24 | df.head(7)
25 |
26 | #Count the number of rows and columns in the data set
27 | df.shape
28 |
29 | #Count the empty (NaN, NAN, na) values in each column
30 | df.isna().sum()
31 |
32 | #Drop the column with all missing values (na, NAN, NaN)
33 | #NOTE: This drops the column Unnamed
34 | df = df.dropna(axis=1)
35 |
36 | #Get the new count of the number of rows and cols
37 | df.shape
38 |
39 | #Get a count of the number of Malignant (M) (harmful) or Benign (B) cells (not harmful)
40 | df['diagnosis'].value_counts()
41 |
42 | #Visualize this count
43 | sns.countplot(df['diagnosis'],label="Count")
44 |
45 | #Look at the data types to see which columns need to be transformed / encoded to a number
46 | df.dtypes
47 |
48 | #Transform/ Encode the column diagnosis
49 | #dictionary = {'M':1, 'B':0}#Create a dictionary file
50 | #df.diagnosis = [dictionary[item] for item in df.diagnosis] #Change all 'M' to 1 and all 'B' to 0 in the diagnosis col
51 |
52 | #Encoding categorical data values (Transforming categorical data/ Strings to integers)
53 | from sklearn.preprocessing import LabelEncoder
54 | labelencoder_Y = LabelEncoder()
55 | df.iloc[:,1]= labelencoder_Y.fit_transform(df.iloc[:,1].values)
56 | print(labelencoder_Y.fit_transform(df.iloc[:,1].values))
57 |
58 | #A “pairs plot” is also known as a scatterplot, in which one variable in the same data row is matched with another variable's value
59 | sns.pairplot(df, hue="diagnosis")
60 | #sns.pairplot(df.iloc[:,1:6], hue="diagnosis") #plot a sample of the columns
61 |
62 | #Print the first 5 rows of the new data set
63 | df.head(5)
64 |
65 | #Get the correlation of the columns
66 | df.corr()
67 | #df.iloc[:,1:12].corr() #Get a sample of correlated column info
68 |
69 | #Visualize the correlation
70 | #NOTE: To see the numbers within the cell ==> sns.heatmap(df.corr(), annot=True)
71 | plt.figure(figsize=(20,20)) #This is used to change the size of the figure/ heatmap
72 | sns.heatmap(df.corr(), annot=True, fmt='.0%')
73 | #plt.figure(figsize=(10,10)) #This is used to change the size of the figure/ heatmap
74 | #sns.heatmap(df.iloc[:,1:12].corr(), annot=True, fmt='.0%') #Get a heap map of 11 columns, index 1-11, note index 0 is just the id column and is left out.
75 |
76 | #Split the data into independent 'X' and dependent 'Y' variables
77 | X = df.iloc[:, 2:31].values #Notice I started from index 2 to 31, essentially removing the id column & diagnosis
78 | Y = df.iloc[:, 1].values #Get the target variable 'diagnosis' located at index=1
79 |
80 | # Split the dataset into 75% Training set and 25% Testing set
81 | from sklearn.model_selection import train_test_split
82 | X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size = 0.25, random_state = 0)
83 |
84 | # Scale the data to bring all features to the same level of magnitude
85 | # This means the data will be within a specific range for example 0 -100 or 0 - 1
86 |
87 | #Feature Scaling
88 | from sklearn.preprocessing import StandardScaler
89 | sc = StandardScaler()
90 | X_train = sc.fit_transform(X_train)
91 | X_test = sc.transform(X_test)
92 |
93 | #Create a function within many Machine Learning Models
94 | def models(X_train,Y_train):
95 |
96 | #Using Logistic Regression Algorithm to the Training Set
97 | from sklearn.linear_model import LogisticRegression
98 | log = LogisticRegression(random_state = 0)
99 | log.fit(X_train, Y_train)
100 |
101 | #Using KNeighborsClassifier Method of neighbors class to use Nearest Neighbor algorithm
102 | from sklearn.neighbors import KNeighborsClassifier
103 | knn = KNeighborsClassifier(n_neighbors = 5, metric = 'minkowski', p = 2)
104 | knn.fit(X_train, Y_train)
105 |
106 | #Using SVC method of svm class to use Support Vector Machine Algorithm
107 | from sklearn.svm import SVC
108 | svc_lin = SVC(kernel = 'linear', random_state = 0)
109 | svc_lin.fit(X_train, Y_train)
110 |
111 | #Using SVC method of svm class to use Kernel SVM Algorithm
112 | from sklearn.svm import SVC
113 | svc_rbf = SVC(kernel = 'rbf', random_state = 0)
114 | svc_rbf.fit(X_train, Y_train)
115 |
116 | #Using GaussianNB method of naïve_bayes class to use Naïve Bayes Algorithm
117 | from sklearn.naive_bayes import GaussianNB
118 | gauss = GaussianNB()
119 | gauss.fit(X_train, Y_train)
120 |
121 | #Using DecisionTreeClassifier of tree class to use Decision Tree Algorithm
122 | from sklearn.tree import DecisionTreeClassifier
123 | tree = DecisionTreeClassifier(criterion = 'entropy', random_state = 0)
124 | tree.fit(X_train, Y_train)
125 |
126 | #Using RandomForestClassifier method of ensemble class to use Random Forest Classification algorithm
127 | from sklearn.ensemble import RandomForestClassifier
128 | forest = RandomForestClassifier(n_estimators = 10, criterion = 'entropy', random_state = 0)
129 | forest.fit(X_train, Y_train)
130 |
131 | #print model accuracy on the training data.
132 | print('[0]Logistic Regression Training Accuracy:', log.score(X_train, Y_train))
133 | print('[1]K Nearest Neighbor Training Accuracy:', knn.score(X_train, Y_train))
134 | print('[2]Support Vector Machine (Linear Classifier) Training Accuracy:', svc_lin.score(X_train, Y_train))
135 | print('[3]Support Vector Machine (RBF Classifier) Training Accuracy:', svc_rbf.score(X_train, Y_train))
136 | print('[4]Gaussian Naive Bayes Training Accuracy:', gauss.score(X_train, Y_train))
137 | print('[5]Decision Tree Classifier Training Accuracy:', tree.score(X_train, Y_train))
138 | print('[6]Random Forest Classifier Training Accuracy:', forest.score(X_train, Y_train))
139 |
140 | return log, knn, svc_lin, svc_rbf, gauss, tree, forest
141 |
142 | model = models(X_train,Y_train)
143 |
144 | #Show the confusion matrix and accuracy for all of the models on the test data
145 | #Classification accuracy is the ratio of correct predictions to total predictions made.
146 | from sklearn.metrics import confusion_matrix
147 | for i in range(len(model)):
148 | cm = confusion_matrix(Y_test, model[i].predict(X_test))
149 |
150 | TP = cm[0][0]
151 | TN = cm[1][1]
152 | FN = cm[1][0]
153 | FP = cm[0][1]
154 |
155 | print(cm)
156 | print('Model[{}] Testing Accuracy = "{}!"'.format(i, (TP + TN) / (TP + TN + FN + FP)))
157 | print()# Print a new line
158 |
159 | #Show other ways to get the classification accuracy & other metrics
160 |
161 | from sklearn.metrics import classification_report
162 | from sklearn.metrics import accuracy_score
163 |
164 | for i in range(len(model)):
165 | print('Model ',i)
166 | #Check precision, recall, f1-score
167 | print( classification_report(Y_test, model[i].predict(X_test)) )
168 | #Another way to get the models accuracy on the test data
169 | print( accuracy_score(Y_test, model[i].predict(X_test)))
170 | print()#Print a new line
171 |
172 |
173 | #Print Prediction of Random Forest Classifier model
174 | pred = model[6].predict(X_test)
175 | print(pred)
176 | #Print a space
177 | print()
178 | #Print the actual values
179 | print(Y_test)
180 |
--------------------------------------------------------------------------------
/concatenate_file.py:
--------------------------------------------------------------------------------
1 | # This program concatenates multiple files in the form fit1, fit2, fit3, ... fit24
2 | # into one file called out.csv
3 |
4 | fout=open("out.csv","a")
5 | # first file:
6 | for line in open("/fit1.csv"):
7 | fout.write(line)
8 | # now the rest:
9 | for num in range(2,25): #You can change the number of files to concatenate by changing 25 to a different number Note: range(2,25) is exclusive [2,25)
10 | f = open("/fit"+str(num)+".csv")
11 | next(f) # skip the header
12 | for line in f:
13 | fout.write(line)
14 | f.close() # not really needed
15 | fout.close()
16 |
--------------------------------------------------------------------------------
/mnist_ann.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """MNIST_ANN.ipynb
3 |
4 | Automatically generated by Colaboratory.
5 |
6 | Original file is located at
7 | https://colab.research.google.com/drive/1IYoctAFrXbyWW_H6FZ2W2QD8heZnMvi-
8 | """
9 |
10 | # Description: This program classifies the MNIST handwritten digit images
11 | # as a number 0 - 9
12 |
13 | # Install packages
14 | pip install tensorflow keras numpy mnist matplotlib
15 |
16 | #import the packages / dependecies
17 | import numpy as np
18 | import mnist # Get data set from
19 | from keras.models import Sequential #ANN architecture
20 | from keras. layers import Dense # The layers in the ANN
21 | from keras.utils import to_categorical
22 | import matplotlib.pyplot as plt # Graph
23 |
24 | #Load the data set
25 | train_images = mnist.train_images() # training data of images
26 | train_labels = mnist.train_labels() # training data of the labels
27 | test_images = mnist. test_images() # testing data images
28 | test_labels = mnist.test_labels() # testing data labels
29 |
30 | #Normalize the images
31 | #Normalize the pixel values from [0, 255] to [-0.5 to 0.5]
32 | #This make the network easier to train
33 | train_images = (train_images / 255) - 0.5
34 | test_images = (test_images/ 255) - 0.5
35 |
36 | #Flatten the images. Flatten each 28 x 28 image into a 784= 28^2
37 | #dimensional vector and pass into the neural network
38 | train_images = train_images.reshape((-1, 784))
39 | test_images = test_images.reshape((-1,784))
40 | #print the new image shape
41 | print(train_images.shape) #60,000 rows and 784 cols
42 | print(test_images.shape) #10,000 rows and 784 cols
43 |
44 | #Build the ANN model
45 | #With 3 layers, 2 with 64 neurons and activation function = relu
46 | # and 1 layer with 10 neurons with activation function= softmax
47 | model = Sequential()
48 | model.add(Dense(64, activation='relu', input_dim=784))
49 | model.add(Dense(64, activation='relu'))
50 | model.add(Dense(10, activation='softmax'))
51 |
52 | #Compile the model
53 | # loss measures how well the model did on training, and then tries to improve on
54 | # it using the optimizer
55 | model.compile(
56 | optimizer= 'adam',
57 | loss = 'categorical_crossentropy', #loss function for classes > 2
58 | metrics = ['accuracy']
59 | )
60 |
61 | #Train the model
62 | model.fit(
63 | train_images, #The training data images
64 | to_categorical(train_labels),#The trainind data labels, label data only returns a single digit representing the class of each label Ex: train_labels = 2,to_categorical(2)= [0,0,1,0,0,0,0,0,0,0]
65 | epochs=5, #Number of iterations over the entire data set to train on
66 | batch_size = 3 #The number of samples per gradient update for training
67 | )
68 |
69 | #Evaluate the model
70 | model.evaluate(
71 | test_images,
72 | to_categorical(test_labels)
73 | )
74 |
75 | #save the model to disk
76 | model.save_weights('model.h5')
77 | # Load the model from disk later using:
78 | # Build the model.
79 | #model = Sequential([
80 | # Dense(64, activation='relu', input_shape=(784,)),
81 | # Dense(64, activation='relu'),
82 | # Dense(10, activation='softmax'),
83 | #])
84 | # model.load_weights('model.h5')
85 |
86 | #Make predictions
87 | # Predict on the first 5 test images.
88 | # Keep in mind that the output of our network is 10 probabilities,
89 | # so we'll use np.argmax()to turn those into actual digits
90 | predictions = model.predict(test_images[:5])
91 | #print(predictions)
92 | print (np.argmax(predictions, axis =1))
93 | print(test_labels[:5])
94 |
95 | import matplotlib.pyplot as plt
96 | for i in range(0,5):
97 | first_image = test_images[i]
98 | first_image = np.array(first_image, dtype='float')
99 | pixels = first_image.reshape((28, 28))
100 | plt.imshow(pixels, cmap='gray')
101 | plt.show()
--------------------------------------------------------------------------------
/remove_empty_row.py:
--------------------------------------------------------------------------------
1 | #This program reads in a .csv file called out.csv and removes all empty rows then creates a new file without the rows called FitBit.csv
2 |
3 | import csv
4 | input1 = open('out.csv', 'r')
5 | output = open('FitBit.csv', 'w', newline='')
6 | writer = csv.writer(output)
7 | for row in csv.reader(input1):
8 | if any(row):
9 | writer.writerow(row)
10 | input1.close()
11 | output.close()
12 |
--------------------------------------------------------------------------------
/scrape.py:
--------------------------------------------------------------------------------
1 | #This program scrapes all of the links of a given website.
2 |
3 | from bs4 import BeautifulSoup
4 |
5 | import requests
6 |
7 | #Ask the user for the input URL for example https://www.census.gov/data/tables/2016/demo/popest/state-total.html
8 | url = input("Enter a website to extract the URL's from: ")
9 |
10 | #Request data from the server using GET protocol,
11 | r = requests.get(url)
12 | #In order to retrieve the data from the response object convert the raw response to text
13 | data = r.text
14 |
15 | #Pythons HTML parser
16 | soup = BeautifulSoup(data, 'html.parser')
17 |
18 | list =''
19 |
20 | # Get all URLS from tags with attribute href
21 | for link in soup.find_all('a'):
22 | print(link.get('href'))
23 | #list += link.get('href') + '\n'
24 |
25 |
26 |
27 | #print(list)
28 |
--------------------------------------------------------------------------------
/sentiment.py:
--------------------------------------------------------------------------------
1 | #pip install TextBlob
2 |
3 | #import TextBlob
4 | from textblob import TextBlob
5 |
6 | text = "Python is a very good language to learn"
7 |
8 | obj = TextBlob(text)
9 |
10 | #returns the sentiment of text
11 | #by returning a value between -1.0 and 1.0
12 | sentiment = obj.sentiment.polarity
13 |
14 | print(sentiment)
15 |
--------------------------------------------------------------------------------
/stock.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "colab": {
6 | "name": "Untitled11.ipynb",
7 | "version": "0.3.2",
8 | "provenance": []
9 | },
10 | "kernelspec": {
11 | "name": "python3",
12 | "display_name": "Python 3"
13 | }
14 | },
15 | "cells": [
16 | {
17 | "cell_type": "code",
18 | "metadata": {
19 | "id": "twOJzFIIuF0w",
20 | "colab_type": "code",
21 | "colab": {}
22 | },
23 | "source": [
24 | "# This program predicts stock prices by using machine learning models\n",
25 | "\n",
26 | "#Install the dependencies\n",
27 | "import quandl\n",
28 | "import numpy as np \n",
29 | "from sklearn.linear_model import LinearRegression\n",
30 | "from sklearn.svm import SVR\n",
31 | "from sklearn.model_selection import train_test_split"
32 | ],
33 | "execution_count": 0,
34 | "outputs": []
35 | },
36 | {
37 | "cell_type": "code",
38 | "metadata": {
39 | "id": "8l3KwJoovJLC",
40 | "colab_type": "code",
41 | "colab": {
42 | "base_uri": "https://localhost:8080/",
43 | "height": 175
44 | },
45 | "outputId": "0b2c989f-3db8-48bc-ede8-03f1bf8e8ec1"
46 | },
47 | "source": [
48 | "#Get the stock data\n",
49 | "df = quandl.get(\"WIKI/AMZN\")\n",
50 | "# Take a look at the data\n",
51 | "print(df.head())"
52 | ],
53 | "execution_count": 15,
54 | "outputs": [
55 | {
56 | "output_type": "stream",
57 | "text": [
58 | " Open High Low ... Adj. Low Adj. Close Adj. Volume\n",
59 | "Date ... \n",
60 | "1997-05-16 22.38 23.75 20.50 ... 1.708333 1.729167 14700000.0\n",
61 | "1997-05-19 20.50 21.25 19.50 ... 1.625000 1.708333 6106800.0\n",
62 | "1997-05-20 20.75 21.00 19.63 ... 1.635833 1.635833 5467200.0\n",
63 | "1997-05-21 19.25 19.75 16.50 ... 1.375000 1.427500 18853200.0\n",
64 | "1997-05-22 17.25 17.38 15.75 ... 1.312500 1.395833 11776800.0\n",
65 | "\n",
66 | "[5 rows x 12 columns]\n"
67 | ],
68 | "name": "stdout"
69 | }
70 | ]
71 | },
72 | {
73 | "cell_type": "code",
74 | "metadata": {
75 | "id": "lT9a4KJxvi67",
76 | "colab_type": "code",
77 | "colab": {
78 | "base_uri": "https://localhost:8080/",
79 | "height": 140
80 | },
81 | "outputId": "52b7a080-adf7-4743-8ced-23688173dd2f"
82 | },
83 | "source": [
84 | "# Get the Adjusted Close Price\n",
85 | "df = df[['Adj. Close']]\n",
86 | "#Take a look at the new data\n",
87 | "print(df.head())"
88 | ],
89 | "execution_count": 16,
90 | "outputs": [
91 | {
92 | "output_type": "stream",
93 | "text": [
94 | " Adj. Close\n",
95 | "Date \n",
96 | "1997-05-16 1.729167\n",
97 | "1997-05-19 1.708333\n",
98 | "1997-05-20 1.635833\n",
99 | "1997-05-21 1.427500\n",
100 | "1997-05-22 1.395833\n"
101 | ],
102 | "name": "stdout"
103 | }
104 | ]
105 | },
106 | {
107 | "cell_type": "code",
108 | "metadata": {
109 | "id": "jI5ZFdugvz0Z",
110 | "colab_type": "code",
111 | "colab": {
112 | "base_uri": "https://localhost:8080/",
113 | "height": 140
114 | },
115 | "outputId": "02e07ebf-8e83-404c-9282-0aa2b15f3130"
116 | },
117 | "source": [
118 | "# A variable for predicting 'n' days out into the future\n",
119 | "forecast_out = 30 #'n=30' days\n",
120 | "#Create another column (the target or dependent variable) shifted 'n' units up\n",
121 | "df['Prediction'] = df[['Adj. Close']].shift(-forecast_out)\n",
122 | "#print the new data set\n",
123 | "print(df.tail())\n"
124 | ],
125 | "execution_count": 17,
126 | "outputs": [
127 | {
128 | "output_type": "stream",
129 | "text": [
130 | " Adj. Close Prediction\n",
131 | "Date \n",
132 | "2018-03-21 1581.86 NaN\n",
133 | "2018-03-22 1544.10 NaN\n",
134 | "2018-03-23 1495.56 NaN\n",
135 | "2018-03-26 1555.86 NaN\n",
136 | "2018-03-27 1497.05 NaN\n"
137 | ],
138 | "name": "stdout"
139 | }
140 | ]
141 | },
142 | {
143 | "cell_type": "code",
144 | "metadata": {
145 | "id": "FyJ8Juj0xfQG",
146 | "colab_type": "code",
147 | "colab": {
148 | "base_uri": "https://localhost:8080/",
149 | "height": 140
150 | },
151 | "outputId": "303ae60f-cd59-4356-9245-76a24da345ce"
152 | },
153 | "source": [
154 | "### Create the indeoendent data set (X) #######\n",
155 | "# Convert the dataframe to a numpy array\n",
156 | "X = np.array(df.drop(['Prediction'],1))\n",
157 | "\n",
158 | "#Remove the last 'n' rows\n",
159 | "X = X[:-forecast_out]\n",
160 | "print(X)"
161 | ],
162 | "execution_count": 18,
163 | "outputs": [
164 | {
165 | "output_type": "stream",
166 | "text": [
167 | "[[ 1.72916667]\n",
168 | " [ 1.70833333]\n",
169 | " [ 1.63583333]\n",
170 | " ...\n",
171 | " [1350.47 ]\n",
172 | " [1338.99 ]\n",
173 | " [1386.23 ]]\n"
174 | ],
175 | "name": "stdout"
176 | }
177 | ]
178 | },
179 | {
180 | "cell_type": "code",
181 | "metadata": {
182 | "id": "XSFFcCUCyJGl",
183 | "colab_type": "code",
184 | "colab": {
185 | "base_uri": "https://localhost:8080/",
186 | "height": 52
187 | },
188 | "outputId": "d3bf26d7-67c3-4cc1-e854-60e7636364b0"
189 | },
190 | "source": [
191 | "### Create the dependent data set (y) #####\n",
192 | "# Convert the dataframe to a numpy array (All of the values including the NaN's)\n",
193 | "y = np.array(df['Prediction'])\n",
194 | "# Get all of the y values except the last 'n' rows\n",
195 | "y = y[:-forecast_out]\n",
196 | "print(y)"
197 | ],
198 | "execution_count": 19,
199 | "outputs": [
200 | {
201 | "output_type": "stream",
202 | "text": [
203 | "[1.54166667e+00 1.51583333e+00 1.58833333e+00 ... 1.49556000e+03\n",
204 | " 1.55586000e+03 1.49705000e+03]\n"
205 | ],
206 | "name": "stdout"
207 | }
208 | ]
209 | },
210 | {
211 | "cell_type": "code",
212 | "metadata": {
213 | "id": "uTs969WyzAVv",
214 | "colab_type": "code",
215 | "colab": {}
216 | },
217 | "source": [
218 | "# Split the data into 80% training and 20% testing\n",
219 | "x_train, x_test, y_train, y_test = train_test_split(X, y, test_size=0.2)"
220 | ],
221 | "execution_count": 0,
222 | "outputs": []
223 | },
224 | {
225 | "cell_type": "code",
226 | "metadata": {
227 | "id": "mOU8vGVyzdtk",
228 | "colab_type": "code",
229 | "colab": {
230 | "base_uri": "https://localhost:8080/",
231 | "height": 52
232 | },
233 | "outputId": "bc0e6671-92f9-4793-d6cd-aef197ac5962"
234 | },
235 | "source": [
236 | "# Create and train the Support Vector Machine (Regressor)\n",
237 | "svr_rbf = SVR(kernel='rbf', C=1e3, gamma=0.1)\n",
238 | "svr_rbf.fit(x_train, y_train)"
239 | ],
240 | "execution_count": 21,
241 | "outputs": [
242 | {
243 | "output_type": "execute_result",
244 | "data": {
245 | "text/plain": [
246 | "SVR(C=1000.0, cache_size=200, coef0=0.0, degree=3, epsilon=0.1, gamma=0.1,\n",
247 | " kernel='rbf', max_iter=-1, shrinking=True, tol=0.001, verbose=False)"
248 | ]
249 | },
250 | "metadata": {
251 | "tags": []
252 | },
253 | "execution_count": 21
254 | }
255 | ]
256 | },
257 | {
258 | "cell_type": "code",
259 | "metadata": {
260 | "id": "dE99HjCa0Ft9",
261 | "colab_type": "code",
262 | "colab": {
263 | "base_uri": "https://localhost:8080/",
264 | "height": 35
265 | },
266 | "outputId": "d7a24fc9-a364-4d5e-848f-be7856f2dad5"
267 | },
268 | "source": [
269 | "# Testing Model: Score returns the coefficient of determination R^2 of the prediction. \n",
270 | "# The best possible score is 1.0\n",
271 | "svm_confidence = svr_rbf.score(x_test, y_test)\n",
272 | "print(\"svm confidence: \", svm_confidence)"
273 | ],
274 | "execution_count": 22,
275 | "outputs": [
276 | {
277 | "output_type": "stream",
278 | "text": [
279 | "svm confidence: 0.9274190417518909\n"
280 | ],
281 | "name": "stdout"
282 | }
283 | ]
284 | },
285 | {
286 | "cell_type": "code",
287 | "metadata": {
288 | "id": "ThPLji_30jUh",
289 | "colab_type": "code",
290 | "colab": {
291 | "base_uri": "https://localhost:8080/",
292 | "height": 35
293 | },
294 | "outputId": "6979bf00-01cc-4a99-936a-e29cf7d14bcb"
295 | },
296 | "source": [
297 | "# Create and train the Linear Regression Model\n",
298 | "lr = LinearRegression()\n",
299 | "# Train the model\n",
300 | "lr.fit(x_train, y_train)"
301 | ],
302 | "execution_count": 23,
303 | "outputs": [
304 | {
305 | "output_type": "execute_result",
306 | "data": {
307 | "text/plain": [
308 | "LinearRegression(copy_X=True, fit_intercept=True, n_jobs=None, normalize=False)"
309 | ]
310 | },
311 | "metadata": {
312 | "tags": []
313 | },
314 | "execution_count": 23
315 | }
316 | ]
317 | },
318 | {
319 | "cell_type": "code",
320 | "metadata": {
321 | "id": "jCPA1KZj0z-A",
322 | "colab_type": "code",
323 | "colab": {
324 | "base_uri": "https://localhost:8080/",
325 | "height": 35
326 | },
327 | "outputId": "c22f33b3-a840-4666-8861-3b4d3f049776"
328 | },
329 | "source": [
330 | "# Testing Model: Score returns the coefficient of determination R^2 of the prediction. \n",
331 | "# The best possible score is 1.0\n",
332 | "lr_confidence = lr.score(x_test, y_test)\n",
333 | "print(\"lr confidence: \", lr_confidence)"
334 | ],
335 | "execution_count": 24,
336 | "outputs": [
337 | {
338 | "output_type": "stream",
339 | "text": [
340 | "lr confidence: 0.9874918531515935\n"
341 | ],
342 | "name": "stdout"
343 | }
344 | ]
345 | },
346 | {
347 | "cell_type": "code",
348 | "metadata": {
349 | "id": "zJ2PwS_k1EmG",
350 | "colab_type": "code",
351 | "colab": {
352 | "base_uri": "https://localhost:8080/",
353 | "height": 543
354 | },
355 | "outputId": "fddf4529-790f-44cc-fef2-bb17df3933c5"
356 | },
357 | "source": [
358 | "# Set x_forecast equal to the last 30 rows of the original data set from Adj. Close column\n",
359 | "x_forecast = np.array(df.drop(['Prediction'],1))[-forecast_out:]\n",
360 | "print(x_forecast)"
361 | ],
362 | "execution_count": 25,
363 | "outputs": [
364 | {
365 | "output_type": "stream",
366 | "text": [
367 | "[[1414.51]\n",
368 | " [1451.05]\n",
369 | " [1461.76]\n",
370 | " [1448.69]\n",
371 | " [1468.35]\n",
372 | " [1482.92]\n",
373 | " [1484.76]\n",
374 | " [1500. ]\n",
375 | " [1521.95]\n",
376 | " [1511.98]\n",
377 | " [1512.45]\n",
378 | " [1493.45]\n",
379 | " [1500.25]\n",
380 | " [1523.61]\n",
381 | " [1537.64]\n",
382 | " [1545. ]\n",
383 | " [1551.86]\n",
384 | " [1578.89]\n",
385 | " [1598.39]\n",
386 | " [1588.18]\n",
387 | " [1591. ]\n",
388 | " [1582.32]\n",
389 | " [1571.68]\n",
390 | " [1544.93]\n",
391 | " [1586.51]\n",
392 | " [1581.86]\n",
393 | " [1544.1 ]\n",
394 | " [1495.56]\n",
395 | " [1555.86]\n",
396 | " [1497.05]]\n"
397 | ],
398 | "name": "stdout"
399 | }
400 | ]
401 | },
402 | {
403 | "cell_type": "code",
404 | "metadata": {
405 | "id": "6FAGkNOr1l64",
406 | "colab_type": "code",
407 | "colab": {
408 | "base_uri": "https://localhost:8080/",
409 | "height": 228
410 | },
411 | "outputId": "8449bdff-96ae-4884-d7df-e753acb96915"
412 | },
413 | "source": [
414 | "# Print linear regression model predictions for the next 'n' days\n",
415 | "lr_prediction = lr.predict(x_forecast)\n",
416 | "print(lr_prediction)\n",
417 | "\n",
418 | "# Print support vector regressor model predictions for the next 'n' days\n",
419 | "svm_prediction = svr_rbf.predict(x_forecast)\n",
420 | "print(svm_prediction)"
421 | ],
422 | "execution_count": 26,
423 | "outputs": [
424 | {
425 | "output_type": "stream",
426 | "text": [
427 | "[1494.09445102 1532.76646354 1544.10136377 1530.26876377 1551.07587287\n",
428 | " 1566.4959939 1568.44335304 1584.5725668 1607.80329135 1597.25156817\n",
429 | " 1597.74899143 1577.64039159 1584.83715364 1609.56014796 1624.40876142\n",
430 | " 1632.19819799 1639.45846087 1668.06559001 1688.70336352 1677.89763698\n",
431 | " 1680.88217653 1671.69572145 1660.43490554 1632.12411367 1676.13019689\n",
432 | " 1671.20888167 1631.24568536 1579.87350452 1643.69185031 1581.45044209]\n",
433 | "[1048.26903008 660.32920232 659.32078508 687.16846237 659.32078508\n",
434 | " 659.32078508 659.32078508 659.32078508 659.32078508 659.32078508\n",
435 | " 659.32078508 659.32078508 659.32078508 659.32078508 659.32078508\n",
436 | " 659.32078508 659.32078508 659.32078508 659.32078508 659.32078508\n",
437 | " 659.32078508 659.32078508 659.32078508 659.32078508 659.32078508\n",
438 | " 659.32078508 659.32078508 659.32078508 659.32078508 659.32078508]\n"
439 | ],
440 | "name": "stdout"
441 | }
442 | ]
443 | }
444 | ]
445 | }
446 |
--------------------------------------------------------------------------------
/stock.py:
--------------------------------------------------------------------------------
1 |
2 | # This program predicts stock prices by using machine learning models
3 |
4 | #Install the dependencies
5 | import quandl
6 | import numpy as np
7 | from sklearn.linear_model import LinearRegression
8 | from sklearn.svm import SVR
9 | from sklearn.model_selection import train_test_split
10 |
11 | #Get the stock data
12 | df = quandl.get("WIKI/AMZN")
13 | # Take a look at the data
14 | print(df.head())
15 |
16 | # Get the Adjusted Close Price
17 | df = df[['Adj. Close']]
18 | #Take a look at the new data
19 | print(df.head())
20 |
21 | # A variable for predicting 'n' days out into the future
22 | forecast_out = 30 #forecast_out = 'n=30' days
23 | #Create another column (the target or dependent variable) shifted 'n' units up
24 | df['Prediction'] = df[['Adj. Close']].shift(-forecast_out)
25 | #print the new data set
26 | print(df.tail())
27 |
28 | ### Create the indeoendent data set (X) #######
29 | # Convert the dataframe to a numpy array
30 | X = np.array(df.drop(['Prediction'],1))
31 |
32 | #Remove the last 'n' rows
33 | X = X[:-forecast_out]
34 | print(X)
35 |
36 | ### Create the dependent data set (y) #####
37 | # Convert the dataframe to a numpy array (All of the values including the NaN's)
38 | y = np.array(df['Prediction'])
39 | # Get all of the y values except the last 'n' rows
40 | y = y[:-forecast_out]
41 | print(y)
42 |
43 | # Split the data into 80% training and 20% testing
44 | x_train, x_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
45 |
46 | # Create and train the Support Vector Machine (Regressor)
47 | svr_rbf = SVR(kernel='rbf', C=1e3, gamma=0.1)
48 | svr_rbf.fit(x_train, y_train)
49 |
50 | # Testing Model: Score returns the coefficient of determination R^2 of the prediction.
51 | # The best possible score is 1.0
52 | svm_confidence = svr_rbf.score(x_test, y_test)
53 | print("svm confidence: ", svm_confidence)
54 |
55 | # Create and train the Linear Regression Model
56 | lr = LinearRegression()
57 | # Train the model
58 | lr.fit(x_train, y_train)
59 |
60 | # Testing Model: Score returns the coefficient of determination R^2 of the prediction.
61 | # The best possible score is 1.0
62 | lr_confidence = lr.score(x_test, y_test)
63 | print("lr confidence: ", lr_confidence)
64 |
65 | # Set x_forecast equal to the last 30 rows of the original data set from Adj. Close column
66 | x_forecast = np.array(df.drop(['Prediction'],1))[-forecast_out:]
67 | print(x_forecast)
68 |
69 | # Print linear regression model predictions for the next 'n' days
70 | lr_prediction = lr.predict(x_forecast)
71 | print(lr_prediction)
72 |
73 | # Print support vector regressor model predictions for the next 'n' days
74 | svm_prediction = svr_rbf.predict(x_forecast)
75 | print(svm_prediction)
76 |
--------------------------------------------------------------------------------