├── .gitignore
├── AutoEncoder
├── autoencoder.ipynb
└── model.png
├── Control
├── PID.ipynb
├── SLAM.ipynb
└── basics.ipynb
├── Dynamic-Time-Warping
├── README.md
├── dynamic-time-warping.ipynb
└── dynamic-time-warping.py
├── GP
└── gp.ipynb
├── Kaggle-NCAA
├── Match-Prediction.ipynb
├── test.ipynb
├── test2.ipynb
└── womens-machine-learning-competition-2019
│ ├── .DS_Store
│ ├── Stage2WDataFiles.zip
│ ├── Stage2WDataFiles
│ ├── WCities.csv
│ ├── WGameCities.csv
│ ├── WNCAATourneyCompactResults.csv
│ ├── WNCAATourneyDetailedResults.csv
│ ├── WNCAATourneySeeds.csv
│ ├── WNCAATourneySlots.csv
│ ├── WRegularSeasonCompactResults.csv
│ ├── WRegularSeasonDetailedResults.csv
│ ├── WSeasons.csv
│ ├── WTeamSpellings.csv
│ └── WTeams.csv
│ ├── WDataFiles.zip
│ ├── WDataFiles
│ ├── WCities.csv
│ ├── WGameCities.csv
│ ├── WNCAATourneyCompactResults.csv
│ ├── WNCAATourneyDetailedResults.csv
│ ├── WNCAATourneySeeds.csv
│ ├── WNCAATourneySlots.csv
│ ├── WRegularSeasonCompactResults.csv
│ ├── WRegularSeasonDetailedResults.csv
│ ├── WSeasons.csv
│ ├── WTeamSpellings.csv
│ └── WTeams.csv
│ ├── WSampleSubmissionStage1.csv
│ └── WSampleSubmissionStage2.csv
├── Localisation
├── Basics.ipynb
├── basics.py
├── kalman-filter.ipynb
├── localization.ipynb
├── particle-filters.ipynb
└── pics
│ ├── bayes.png
│ └── sense-move.png
├── Optimisation
├── adagrad.py
├── adam.py
├── gradient-descent.ipynb
├── rmsprop.py
├── sgd.py
└── vanilla-gd.py
├── README.md
├── SMOTE
└── SMOTE.ipynb
├── Search
├── A-star.ipynb
├── dynamic-programming.ipynb
└── first_search.ipynb
├── UNet
├── model-tgs-salt.h5
└── test.ipynb
├── Uplift
├── blift-local.ipynb
├── blift-spark.ipynb
└── quickstart.ipynb
├── quantile-regression
├── lgb-quantile-regression.ipynb
└── pinball_loss.png
└── survival-analysis
└── demo.ipynb
/.gitignore:
--------------------------------------------------------------------------------
1 | */.ipynb_checkpoints
2 | .ipynb_checkpoints
3 | */*.csv
4 | */data
5 | .idea
--------------------------------------------------------------------------------
/AutoEncoder/model.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MJeremy2017/machine-learning-models/e8b81f3172d5e309e977fe51ac5c812b650ccd80/AutoEncoder/model.png
--------------------------------------------------------------------------------
/Control/SLAM.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "from math import *\n",
10 | "import random"
11 | ]
12 | },
13 | {
14 | "cell_type": "code",
15 | "execution_count": 2,
16 | "metadata": {},
17 | "outputs": [],
18 | "source": [
19 | "steering_noise = 0.1\n",
20 | "distance_noise = 0.03\n",
21 | "measurement_noise = 0.3"
22 | ]
23 | },
24 | {
25 | "cell_type": "markdown",
26 | "metadata": {},
27 | "source": [
28 | "## Planning"
29 | ]
30 | },
31 | {
32 | "cell_type": "code",
33 | "execution_count": 3,
34 | "metadata": {
35 | "code_folding": [
36 | 26
37 | ]
38 | },
39 | "outputs": [],
40 | "source": [
41 | "class plan:\n",
42 | " def __init__(self, grid, init, goal, cost = 1):\n",
43 | " self.cost = cost\n",
44 | " self.grid = grid\n",
45 | " self.init = init\n",
46 | " self.goal = goal\n",
47 | " self.make_heuristic(grid, goal, self.cost)\n",
48 | " self.path = []\n",
49 | " self.spath = []\n",
50 | "\n",
51 | " # --------\n",
52 | " #\n",
53 | " # make heuristic function for a grid\n",
54 | " \n",
55 | " def make_heuristic(self, grid, goal, cost):\n",
56 | " self.heuristic = [[0 for row in range(len(grid[0]))] for col in range(len(grid))]\n",
57 | " for i in range(len(self.grid)): \n",
58 | " for j in range(len(self.grid[0])):\n",
59 | " self.heuristic[i][j] = abs(i - self.goal[0]) + abs(j - self.goal[1])\n",
60 | "\n",
61 | " # ------------------------------------------------\n",
62 | " # \n",
63 | " # A* for searching a path to the goal\n",
64 | " #\n",
65 | " #\n",
66 | "\n",
67 | " def astar(self):\n",
68 | "\n",
69 | " if self.heuristic == []:\n",
70 | " raise ValueError(\"Heuristic must be defined to run A*\")\n",
71 | "\n",
72 | " # internal motion parameters\n",
73 | " delta = [[-1, 0], # go up\n",
74 | " [ 0, -1], # go left\n",
75 | " [ 1, 0], # go down\n",
76 | " [ 0, 1]] # do right\n",
77 | "\n",
78 | "\n",
79 | " # open list elements are of the type: [f, g, h, x, y]\n",
80 | "\n",
81 | " closed = [[0 for row in range(len(self.grid[0]))] for col in range(len(self.grid))]\n",
82 | " action = [[0 for row in range(len(self.grid[0]))] for col in range(len(self.grid))]\n",
83 | "\n",
84 | " closed[self.init[0]][self.init[1]] = 1\n",
85 | "\n",
86 | " x = self.init[0]\n",
87 | " y = self.init[1]\n",
88 | " h = self.heuristic[x][y]\n",
89 | " g = 0\n",
90 | " f = g + h\n",
91 | "\n",
92 | " open = [[f, g, h, x, y]]\n",
93 | "\n",
94 | " found = False # flag that is set when search complete\n",
95 | " resign = False # flag set if we can't find expand\n",
96 | " count = 0\n",
97 | "\n",
98 | " while not found and not resign:\n",
99 | "\n",
100 | " # check if we still have elements on the open list\n",
101 | " if len(open) == 0:\n",
102 | " resign = True\n",
103 | " print('###### Search terminated without success')\n",
104 | " \n",
105 | " else:\n",
106 | " # remove node from list\n",
107 | " open.sort()\n",
108 | " open.reverse()\n",
109 | " next = open.pop()\n",
110 | " x = next[3]\n",
111 | " y = next[4]\n",
112 | " g = next[1]\n",
113 | "\n",
114 | " # check if we are done\n",
115 | " if x == goal[0] and y == goal[1]:\n",
116 | " found = True\n",
117 | " else:\n",
118 | " # expand winning element and add to new open list\n",
119 | " for i in range(len(delta)):\n",
120 | " x2 = x + delta[i][0]\n",
121 | " y2 = y + delta[i][1]\n",
122 | " if x2 >= 0 and x2 < len(self.grid) and y2 >= 0 and y2 < len(self.grid[0]):\n",
123 | " if closed[x2][y2] == 0 and self.grid[x2][y2] == 0:\n",
124 | " g2 = g + self.cost\n",
125 | " h2 = self.heuristic[x2][y2]\n",
126 | " f2 = g2 + h2\n",
127 | " open.append([f2, g2, h2, x2, y2])\n",
128 | " closed[x2][y2] = 1\n",
129 | " action[x2][y2] = i\n",
130 | " count += 1\n",
131 | "\n",
132 | " # extract the path\n",
133 | " invpath = []\n",
134 | " x = self.goal[0]\n",
135 | " y = self.goal[1]\n",
136 | " invpath.append([x, y])\n",
137 | " while x != self.init[0] or y != self.init[1]:\n",
138 | " x2 = x - delta[action[x][y]][0]\n",
139 | " y2 = y - delta[action[x][y]][1]\n",
140 | " x = x2\n",
141 | " y = y2\n",
142 | " invpath.append([x, y])\n",
143 | "\n",
144 | " self.path = []\n",
145 | " for i in range(len(invpath)):\n",
146 | " self.path.append(invpath[len(invpath) - 1 - i])\n",
147 | "\n",
148 | "\n",
149 | " # ------------------------------------------------\n",
150 | " # \n",
151 | " # this is the smoothing function\n",
152 | " #\n",
153 | "\n",
154 | " def smooth(self, weight_data=0.1, weight_smooth=0.1, tolerance=0.000001):\n",
155 | "\n",
156 | " if self.path == []:\n",
157 | " raise ValueError(\"Run A* first before smoothing path\")\n",
158 | "\n",
159 | " self.spath = [[0 for row in range(len(self.path[0]))] for col in range(len(self.path))]\n",
160 | " for i in range(len(self.path)):\n",
161 | " for j in range(len(self.path[0])):\n",
162 | " self.spath[i][j] = self.path[i][j]\n",
163 | "\n",
164 | " change = tolerance\n",
165 | " while change >= tolerance:\n",
166 | " change = 0.0\n",
167 | " for i in range(1, len(self.path)-1):\n",
168 | " for j in range(len(self.path[0])):\n",
169 | " aux = self.spath[i][j]\n",
170 | " \n",
171 | " self.spath[i][j] += weight_data * (self.path[i][j] - self.spath[i][j])\n",
172 | " self.spath[i][j] += weight_smooth * (self.spath[i-1][j] + self.spath[i+1][j] - (2.0 * self.spath[i][j]))\n",
173 | " if i >= 2:\n",
174 | " self.spath[i][j] += 0.5 * weight_smooth * \\\n",
175 | " (2.0 * self.spath[i-1][j] - self.spath[i-2][j] - self.spath[i][j])\n",
176 | " if i <= len(self.path) - 3:\n",
177 | " self.spath[i][j] += 0.5 * weight_smooth * \\\n",
178 | " (2.0 * self.spath[i+1][j] - self.spath[i+2][j] - self.spath[i][j])\n",
179 | " change += abs(aux - self.spath[i][j])"
180 | ]
181 | },
182 | {
183 | "cell_type": "markdown",
184 | "metadata": {},
185 | "source": [
186 | "## Robot"
187 | ]
188 | },
189 | {
190 | "cell_type": "code",
191 | "execution_count": 4,
192 | "metadata": {},
193 | "outputs": [],
194 | "source": [
195 | "# ------------------------------------------------\n",
196 | "# \n",
197 | "# this is the robot class\n",
198 | "#\n",
199 | "\n",
200 | "class robot:\n",
201 | "\n",
202 | " # --------\n",
203 | " # init: \n",
204 | " #\tcreates robot and initializes location/orientation to 0, 0, 0\n",
205 | " #\n",
206 | "\n",
207 | " def __init__(self, length = 0.5):\n",
208 | " self.x = 0.0\n",
209 | " self.y = 0.0\n",
210 | " self.orientation = 0.0\n",
211 | " self.length = length\n",
212 | " self.steering_noise = 0.0\n",
213 | " self.distance_noise = 0.0\n",
214 | " self.measurement_noise = 0.0\n",
215 | " self.num_collisions = 0\n",
216 | " self.num_steps = 0\n",
217 | "\n",
218 | " # --------\n",
219 | " # set: \n",
220 | " #\tsets a robot coordinate\n",
221 | " #\n",
222 | "\n",
223 | " def set(self, new_x, new_y, new_orientation):\n",
224 | "\n",
225 | " self.x = float(new_x)\n",
226 | " self.y = float(new_y)\n",
227 | " self.orientation = float(new_orientation) % (2.0 * pi)\n",
228 | "\n",
229 | "\n",
230 | " # --------\n",
231 | " # set_noise: \n",
232 | " #\tsets the noise parameters\n",
233 | " #\n",
234 | "\n",
235 | " def set_noise(self, new_s_noise, new_d_noise, new_m_noise):\n",
236 | " # makes it possible to change the noise parameters\n",
237 | " # this is often useful in particle filters\n",
238 | " self.steering_noise = float(new_s_noise)\n",
239 | " self.distance_noise = float(new_d_noise)\n",
240 | " self.measurement_noise = float(new_m_noise)\n",
241 | "\n",
242 | " # --------\n",
243 | " # check: \n",
244 | " # checks of the robot pose collides with an obstacle, or\n",
245 | " # is too far outside the plane\n",
246 | "\n",
247 | " def check_collision(self, grid):\n",
248 | " for i in range(len(grid)):\n",
249 | " for j in range(len(grid[0])):\n",
250 | " if grid[i][j] == 1:\n",
251 | " dist = sqrt((self.x - float(i)) ** 2 + (self.y - float(j)) ** 2)\n",
252 | " if dist < 0.5:\n",
253 | " self.num_collisions += 1\n",
254 | " return False\n",
255 | " return True\n",
256 | " \n",
257 | " def check_goal(self, goal, threshold=1.0):\n",
258 | " dist = sqrt((float(goal[0]) - self.x) ** 2 + (float(goal[1]) - self.y) ** 2)\n",
259 | " return dist < threshold\n",
260 | " \n",
261 | " # --------\n",
262 | " # move: \n",
263 | " # steering = front wheel steering angle, limited by max_steering_angle\n",
264 | " # distance = total distance driven, most be non-negative\n",
265 | "\n",
266 | " def move(self, grid, steering, distance, tolerance=0.001, max_steering_angle=pi / 4.0):\n",
267 | " if steering > max_steering_angle:\n",
268 | " steering = max_steering_angle\n",
269 | " if steering < -max_steering_angle:\n",
270 | " steering = -max_steering_angle\n",
271 | " if distance < 0.0:\n",
272 | " distance = 0.0\n",
273 | "\n",
274 | " # make a new copy\n",
275 | " res = robot()\n",
276 | " res.length = self.length\n",
277 | " res.steering_noise = self.steering_noise\n",
278 | " res.distance_noise = self.distance_noise\n",
279 | " res.measurement_noise = self.measurement_noise\n",
280 | " res.num_collisions = self.num_collisions\n",
281 | " res.num_steps = self.num_steps + 1\n",
282 | "\n",
283 | " # apply noise\n",
284 | " steering2 = random.gauss(steering, self.steering_noise)\n",
285 | " distance2 = random.gauss(distance, self.distance_noise)\n",
286 | "\n",
287 | " # Execute motion\n",
288 | " turn = tan(steering2) * distance2 / res.length\n",
289 | "\n",
290 | " if abs(turn) < tolerance:\n",
291 | " # approximate by straight line motion\n",
292 | " res.x = self.x + (distance2 * cos(self.orientation))\n",
293 | " res.y = self.y + (distance2 * sin(self.orientation))\n",
294 | " res.orientation = (self.orientation + turn) % (2.0 * pi)\n",
295 | " else:\n",
296 | " # approximate bicycle model for motion\n",
297 | " radius = distance2 / turn\n",
298 | " cx = self.x - (sin(self.orientation) * radius)\n",
299 | " cy = self.y + (cos(self.orientation) * radius)\n",
300 | " res.orientation = (self.orientation + turn) % (2.0 * pi)\n",
301 | " res.x = cx + (sin(res.orientation) * radius)\n",
302 | " res.y = cy - (cos(res.orientation) * radius)\n",
303 | " # check for collision\n",
304 | " # res.check_collision(grid)\n",
305 | " return res\n",
306 | "\n",
307 | " # --------\n",
308 | " # sense: \n",
309 | " # \n",
310 | " def sense(self):\n",
311 | " return [random.gauss(self.x, self.measurement_noise), random.gauss(self.y, self.measurement_noise)]\n",
312 | "\n",
313 | " # --------\n",
314 | " # measurement_prob\n",
315 | " # computes the probability of a measurement\n",
316 | " # \n",
317 | " def measurement_prob(self, measurement):\n",
318 | " # compute errors\n",
319 | " error_x = measurement[0] - self.x\n",
320 | " error_y = measurement[1] - self.y\n",
321 | "\n",
322 | " # calculate Gaussian\n",
323 | " error = exp(-(error_x ** 2) / (self.measurement_noise ** 2) / 2.0) \\\n",
324 | " / sqrt(2.0 * pi * (self.measurement_noise ** 2))\n",
325 | " error *= exp(- (error_y ** 2) / (self.measurement_noise ** 2) / 2.0) \\\n",
326 | " / sqrt(2.0 * pi * (self.measurement_noise ** 2))\n",
327 | "\n",
328 | " return error\n",
329 | "\n",
330 | " def __repr__(self):\n",
331 | " return '[%.5f, %.5f]' % (self.x, self.y)"
332 | ]
333 | },
334 | {
335 | "cell_type": "markdown",
336 | "metadata": {},
337 | "source": [
338 | "## Particle Filter"
339 | ]
340 | },
341 | {
342 | "cell_type": "code",
343 | "execution_count": 5,
344 | "metadata": {},
345 | "outputs": [],
346 | "source": [
347 | "# ------------------------------------------------\n",
348 | "# \n",
349 | "# this is the particle filter class\n",
350 | "#\n",
351 | "\n",
352 | "class particles:\n",
353 | "\n",
354 | " # --------\n",
355 | " # init: \n",
356 | " #\tcreates particle set with given initial position\n",
357 | " #\n",
358 | " def __init__(self, x, y, theta, \n",
359 | " steering_noise, distance_noise, measurement_noise, N=100):\n",
360 | " self.N = N\n",
361 | " self.steering_noise = steering_noise\n",
362 | " self.distance_noise = distance_noise\n",
363 | " self.measurement_noise = measurement_noise\n",
364 | " \n",
365 | " self.data = []\n",
366 | " for i in range(self.N):\n",
367 | " r = robot()\n",
368 | " r.set(x, y, theta)\n",
369 | " r.set_noise(steering_noise, distance_noise, measurement_noise)\n",
370 | " self.data.append(r)\n",
371 | "\n",
372 | "\n",
373 | " # --------\n",
374 | " #\n",
375 | " # extract position from a particle set\n",
376 | " # \n",
377 | " \n",
378 | " def get_position(self):\n",
379 | " x = 0.0\n",
380 | " y = 0.0\n",
381 | " orientation = 0.0\n",
382 | "\n",
383 | " for i in range(self.N):\n",
384 | " x += self.data[i].x\n",
385 | " y += self.data[i].y\n",
386 | " # orientation is tricky because it is cyclic. By normalizing\n",
387 | " # around the first particle we are somewhat more robust to\n",
388 | " # the 0=2pi problem\n",
389 | " orientation += (((self.data[i].orientation\n",
390 | " - self.data[0].orientation + pi) % (2.0 * pi)) \n",
391 | " + self.data[0].orientation - pi)\n",
392 | " return [x / self.N, y / self.N, orientation / self.N]\n",
393 | "\n",
394 | " # --------\n",
395 | " #\n",
396 | " # motion of the particles\n",
397 | " # \n",
398 | "\n",
399 | " def move(self, grid, steer, speed):\n",
400 | " newdata = []\n",
401 | "\n",
402 | " for i in range(self.N):\n",
403 | " r = self.data[i].move(grid, steer, speed)\n",
404 | " newdata.append(r)\n",
405 | " self.data = newdata\n",
406 | "\n",
407 | " # --------\n",
408 | " #\n",
409 | " # sensing and resampling\n",
410 | " # \n",
411 | "\n",
412 | " def sense(self, Z):\n",
413 | " w = []\n",
414 | " for i in range(self.N):\n",
415 | " w.append(self.data[i].measurement_prob(Z))\n",
416 | "\n",
417 | " # resampling (careful, this is using shallow copy)\n",
418 | " p3 = []\n",
419 | " index = int(random.random() * self.N)\n",
420 | " beta = 0.0\n",
421 | " mw = max(w)\n",
422 | "\n",
423 | " for i in range(self.N):\n",
424 | " beta += random.random() * 2.0 * mw\n",
425 | " while beta > w[index]:\n",
426 | " beta -= w[index]\n",
427 | " index = (index + 1) % self.N\n",
428 | " p3.append(self.data[index])\n",
429 | " self.data = p3"
430 | ]
431 | },
432 | {
433 | "cell_type": "code",
434 | "execution_count": 6,
435 | "metadata": {},
436 | "outputs": [],
437 | "source": [
438 | "# --------\n",
439 | "#\n",
440 | "# run: runs control program for the robot\n",
441 | "#\n",
442 | "\n",
443 | "def run(grid, goal, spath, params, printflag=False, speed=0.1, timeout=1000):\n",
444 | " myrobot = robot()\n",
445 | " myrobot.set(0., 0., 0.)\n",
446 | " myrobot.set_noise(steering_noise, distance_noise, measurement_noise)\n",
447 | " filter = particles(myrobot.x, myrobot.y, myrobot.orientation,\n",
448 | " steering_noise, distance_noise, measurement_noise) # default 100 particles\n",
449 | "\n",
450 | " cte = 0.0\n",
451 | " err = 0.0\n",
452 | " N = 0\n",
453 | "\n",
454 | " index = 0 # index into the path\n",
455 | " \n",
456 | " while not myrobot.check_goal(goal) and N < timeout:\n",
457 | "\n",
458 | " diff_cte = - cte\n",
459 | "\n",
460 | "\n",
461 | " # ----------------------------------------\n",
462 | " # compute the CTE\n",
463 | "\n",
464 | " # start with the present robot estimate\n",
465 | " estimate = filter.get_position()\n",
466 | "\n",
467 | " ### ENTER CODE HERE\n",
468 | " dx = spath[index+1][0] - spath[index][0]\n",
469 | " dy = spath[index+1][1] - spath[index][1]\n",
470 | " drx = estimate[0] - spath[index][0]\n",
471 | " dry = estimate[1] - spath[index][1]\n",
472 | " \n",
473 | " u = (dx * drx + dy * dry)/(dx * dx + dy * dy)\n",
474 | " cte = (dry * dx - drx * dy)/(dx * dx + dy * dy)\n",
475 | " \n",
476 | " if u > 1:\n",
477 | " index += 1\n",
478 | " # ----------------------------------------\n",
479 | "\n",
480 | " diff_cte += cte\n",
481 | "\n",
482 | " steer = - params[0] * cte - params[1] * diff_cte \n",
483 | "\n",
484 | " myrobot = myrobot.move(grid, steer, speed)\n",
485 | " filter.move(grid, steer, speed)\n",
486 | "\n",
487 | " Z = myrobot.sense()\n",
488 | " filter.sense(Z) # filter and update particles\n",
489 | "\n",
490 | " if not myrobot.check_collision(grid):\n",
491 | " print('##### Collision ####')\n",
492 | "\n",
493 | " err += (cte ** 2)\n",
494 | " N += 1\n",
495 | "\n",
496 | " if printflag:\n",
497 | " print(myrobot, cte, index, u)\n",
498 | "\n",
499 | " return [myrobot.check_goal(goal), myrobot.num_collisions, myrobot.num_steps]"
500 | ]
501 | },
502 | {
503 | "cell_type": "code",
504 | "execution_count": 7,
505 | "metadata": {},
506 | "outputs": [
507 | {
508 | "name": "stdout",
509 | "output_type": "stream",
510 | "text": [
511 | "[True, 0, 143]\n"
512 | ]
513 | }
514 | ],
515 | "source": [
516 | "# ------------------------------------------------\n",
517 | "# \n",
518 | "# this is our main routine\n",
519 | "#\n",
520 | "\n",
521 | "def main(grid, init, goal, steering_noise, distance_noise, measurement_noise, \n",
522 | " weight_data, weight_smooth, p_gain, d_gain):\n",
523 | "\n",
524 | " path = plan(grid, init, goal)\n",
525 | " path.astar() # get astar path\n",
526 | " path.smooth(weight_data, weight_smooth)\n",
527 | " return run(grid, goal, path.spath, [p_gain, d_gain])\n",
528 | "\n",
529 | "\n",
530 | "# ------------------------------------------------\n",
531 | "# \n",
532 | "# input data and parameters\n",
533 | "#\n",
534 | "\n",
535 | "\n",
536 | "# grid format:\n",
537 | "# 0 = navigable space\n",
538 | "# 1 = occupied space\n",
539 | "\n",
540 | "grid = [[0, 1, 0, 0, 0, 0],\n",
541 | " [0, 1, 0, 1, 1, 0],\n",
542 | " [0, 1, 0, 1, 0, 0],\n",
543 | " [0, 0, 0, 1, 0, 1],\n",
544 | " [0, 1, 0, 1, 0, 0]]\n",
545 | "\n",
546 | "\n",
547 | "init = [0, 0]\n",
548 | "goal = [len(grid)-1, len(grid[0])-1]\n",
549 | "\n",
550 | "\n",
551 | "steering_noise = 0.1\n",
552 | "distance_noise = 0.03\n",
553 | "measurement_noise = 0.3\n",
554 | "\n",
555 | "weight_data = 0.1\n",
556 | "weight_smooth = 0.2\n",
557 | "p_gain = 2.0\n",
558 | "d_gain = 6.0\n",
559 | "\n",
560 | " \n",
561 | "print(main(grid, init, goal, steering_noise, distance_noise, measurement_noise, \n",
562 | " weight_data, weight_smooth, p_gain, d_gain))"
563 | ]
564 | },
565 | {
566 | "cell_type": "code",
567 | "execution_count": 10,
568 | "metadata": {},
569 | "outputs": [],
570 | "source": [
571 | "def twiddle(init_params):\n",
572 | " n_params = len(init_params)\n",
573 | " dparams = [1.0 for row in range(n_params)]\n",
574 | " params = [0.0 for row in range(n_params)]\n",
575 | " K = 10\n",
576 | "\n",
577 | " for i in range(n_params):\n",
578 | " params[i] = init_params[i]\n",
579 | "\n",
580 | "\n",
581 | " best_error = 0.0;\n",
582 | " for k in range(K):\n",
583 | " ret = main(grid, init, goal, \n",
584 | " steering_noise, distance_noise, measurement_noise, \n",
585 | " params[0], params[1], params[2], params[3])\n",
586 | " if ret[0]:\n",
587 | " best_error += ret[1] * 100 + ret[2]\n",
588 | " else:\n",
589 | " best_error += 99999\n",
590 | " best_error = float(best_error) / float(k+1)\n",
591 | " print(best_error)\n",
592 | "\n",
593 | " n = 0\n",
594 | " while sum(dparams) > 0.0000001:\n",
595 | " for i in range(len(params)):\n",
596 | " params[i] += dparams[i]\n",
597 | " err = 0\n",
598 | " for k in range(K):\n",
599 | " ret = main(grid, init, goal, \n",
600 | " steering_noise, distance_noise, measurement_noise, \n",
601 | " params[0], params[1], params[2], params[3])\n",
602 | " if ret[0]:\n",
603 | " err += ret[1] * 100 + ret[2]\n",
604 | " else:\n",
605 | " err += 99999\n",
606 | " print(float(err) / float(k+1))\n",
607 | " if err < best_error:\n",
608 | " best_error = float(err) / float(k+1)\n",
609 | " dparams[i] *= 1.1\n",
610 | " else:\n",
611 | " params[i] -= 2.0 * dparams[i] \n",
612 | " err = 0\n",
613 | " for k in range(K):\n",
614 | " ret = main(grid, init, goal, \n",
615 | " steering_noise, distance_noise, measurement_noise, \n",
616 | " params[0], params[1], params[2], params[3])\n",
617 | " if ret[0]:\n",
618 | " err += ret[1] * 100 + ret[2]\n",
619 | " else:\n",
620 | " err += 99999\n",
621 | " print(float(err) / float(k+1))\n",
622 | " if err < best_error:\n",
623 | " best_error = float(err) / float(k+1)\n",
624 | " dparams[i] *= 1.1\n",
625 | " else:\n",
626 | " params[i] += dparams[i]\n",
627 | " dparams[i] *= 0.5\n",
628 | " n += 1\n",
629 | " print('Twiddle #', n, params, ' -> ', best_error)\n",
630 | " print(' ')\n",
631 | " return params"
632 | ]
633 | },
634 | {
635 | "cell_type": "code",
636 | "execution_count": 12,
637 | "metadata": {},
638 | "outputs": [],
639 | "source": [
640 | "# twiddle([weight_data, weight_smooth, p_gain, d_gain])"
641 | ]
642 | },
643 | {
644 | "cell_type": "code",
645 | "execution_count": null,
646 | "metadata": {},
647 | "outputs": [],
648 | "source": []
649 | }
650 | ],
651 | "metadata": {
652 | "kernelspec": {
653 | "display_name": "Python 3",
654 | "language": "python",
655 | "name": "python3"
656 | },
657 | "language_info": {
658 | "codemirror_mode": {
659 | "name": "ipython",
660 | "version": 3
661 | },
662 | "file_extension": ".py",
663 | "mimetype": "text/x-python",
664 | "name": "python",
665 | "nbconvert_exporter": "python",
666 | "pygments_lexer": "ipython3",
667 | "version": "3.6.5"
668 | }
669 | },
670 | "nbformat": 4,
671 | "nbformat_minor": 2
672 | }
673 |
--------------------------------------------------------------------------------
/Control/basics.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 15,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "from copy import deepcopy"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 16,
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "def printpaths(path, newpath):\n",
19 | " for old, new in zip(path, newpath):\n",
20 | " print('['+ ', '.join('%.3f'%x for x in old) + '] -> ['+ ', '.join('%.3f'%x for x in new) +']')\n",
21 | "\n",
22 | "path = [[0, 0],\n",
23 | " [0, 1],\n",
24 | " [0, 2],\n",
25 | " [1, 2],\n",
26 | " [2, 2],\n",
27 | " [3, 2],\n",
28 | " [4, 2],\n",
29 | " [4, 3],\n",
30 | " [4, 4]]"
31 | ]
32 | },
33 | {
34 | "cell_type": "code",
35 | "execution_count": 17,
36 | "metadata": {},
37 | "outputs": [],
38 | "source": [
39 | "def smooth(path, weight_data=0.5, weight_smooth=0.1, tolerance=0.000001):\n",
40 | " # Make a deep copy of path into newpath\n",
41 | " newpath = deepcopy(path)\n",
42 | " err = 1\n",
43 | " while err > tolerance:\n",
44 | " inner_err = 0\n",
45 | " for i in range(1, len(path) - 1):\n",
46 | " for j in range(len(path[0])):\n",
47 | " aux = newpath[i][j]\n",
48 | " newpath[i][j] = newpath[i][j] + weight_data*(path[i][j] - newpath[i][j]) \\\n",
49 | " + weight_smooth*(newpath[i+1][j] + newpath[i-1][j] - 2*newpath[i][j])\n",
50 | " inner_err += abs(newpath[i][j] - aux)\n",
51 | " err = inner_err\n",
52 | " \n",
53 | " return newpath "
54 | ]
55 | },
56 | {
57 | "cell_type": "code",
58 | "execution_count": 20,
59 | "metadata": {},
60 | "outputs": [
61 | {
62 | "name": "stdout",
63 | "output_type": "stream",
64 | "text": [
65 | "[0.000, 0.000] -> [0.000, 0.000]\n",
66 | "[0.000, 1.000] -> [0.021, 0.979]\n",
67 | "[0.000, 2.000] -> [0.149, 1.851]\n",
68 | "[1.000, 2.000] -> [1.021, 1.979]\n",
69 | "[2.000, 2.000] -> [2.000, 2.000]\n",
70 | "[3.000, 2.000] -> [2.979, 2.021]\n",
71 | "[4.000, 2.000] -> [3.851, 2.149]\n",
72 | "[4.000, 3.000] -> [3.979, 3.021]\n",
73 | "[4.000, 4.000] -> [4.000, 4.000]\n"
74 | ]
75 | }
76 | ],
77 | "source": [
78 | "printpaths(path, smooth(path))"
79 | ]
80 | },
81 | {
82 | "cell_type": "code",
83 | "execution_count": 26,
84 | "metadata": {},
85 | "outputs": [],
86 | "source": [
87 | "path = [[0, 0], \n",
88 | " [1, 0],\n",
89 | " [2, 0],\n",
90 | " [3, 0],\n",
91 | " [4, 0],\n",
92 | " [5, 0],\n",
93 | " [6, 0],\n",
94 | " [6, 1],\n",
95 | " [6, 2],\n",
96 | " [6, 3],\n",
97 | " [5, 3],\n",
98 | " [4, 3],\n",
99 | " [3, 3],\n",
100 | " [2, 3],\n",
101 | " [1, 3],\n",
102 | " [0, 3],\n",
103 | " [0, 2],\n",
104 | " [0, 1]]\n",
105 | "\n",
106 | "def smooth2(path, weight_data=0.5, weight_smooth=0.1, tolerance=0.000001):\n",
107 | " # Make a deep copy of path into newpath\n",
108 | " newpath = deepcopy(path)\n",
109 | " err = 1\n",
110 | " n = len(path)\n",
111 | " while err > tolerance:\n",
112 | " inner_err = 0\n",
113 | " for i in range(len(path)):\n",
114 | " for j in range(len(path[0])):\n",
115 | " aux = newpath[i][j]\n",
116 | " newpath[i][j] = newpath[i][j] + weight_data*(path[i][j] - newpath[i][j]) \\\n",
117 | " + weight_smooth*(newpath[(i+1)%n][j] + newpath[(i-1)%n][j] - 2*newpath[i][j])\n",
118 | " inner_err += abs(newpath[i][j] - aux)\n",
119 | " err = inner_err\n",
120 | " \n",
121 | " return newpath "
122 | ]
123 | },
124 | {
125 | "cell_type": "code",
126 | "execution_count": 27,
127 | "metadata": {},
128 | "outputs": [
129 | {
130 | "name": "stdout",
131 | "output_type": "stream",
132 | "text": [
133 | "[0.000, 0.000] -> [0.150, 0.149]\n",
134 | "[1.000, 0.000] -> [1.022, 0.022]\n",
135 | "[2.000, 0.000] -> [2.003, 0.003]\n",
136 | "[3.000, 0.000] -> [3.000, 0.001]\n",
137 | "[4.000, 0.000] -> [3.997, 0.003]\n",
138 | "[5.000, 0.000] -> [4.978, 0.022]\n",
139 | "[6.000, 0.000] -> [5.850, 0.149]\n",
140 | "[6.000, 1.000] -> [5.975, 1.019]\n",
141 | "[6.000, 2.000] -> [5.975, 1.981]\n",
142 | "[6.000, 3.000] -> [5.850, 2.851]\n",
143 | "[5.000, 3.000] -> [4.978, 2.978]\n",
144 | "[4.000, 3.000] -> [3.997, 2.997]\n",
145 | "[3.000, 3.000] -> [3.000, 2.999]\n",
146 | "[2.000, 3.000] -> [2.003, 2.997]\n",
147 | "[1.000, 3.000] -> [1.022, 2.978]\n",
148 | "[0.000, 3.000] -> [0.150, 2.851]\n",
149 | "[0.000, 2.000] -> [0.025, 1.981]\n",
150 | "[0.000, 1.000] -> [0.025, 1.019]\n"
151 | ]
152 | }
153 | ],
154 | "source": [
155 | "printpaths(path, smooth2(path))"
156 | ]
157 | },
158 | {
159 | "cell_type": "code",
160 | "execution_count": null,
161 | "metadata": {},
162 | "outputs": [],
163 | "source": []
164 | }
165 | ],
166 | "metadata": {
167 | "kernelspec": {
168 | "display_name": "Python 3",
169 | "language": "python",
170 | "name": "python3"
171 | },
172 | "language_info": {
173 | "codemirror_mode": {
174 | "name": "ipython",
175 | "version": 3
176 | },
177 | "file_extension": ".py",
178 | "mimetype": "text/x-python",
179 | "name": "python",
180 | "nbconvert_exporter": "python",
181 | "pygments_lexer": "ipython3",
182 | "version": "3.6.5"
183 | }
184 | },
185 | "nbformat": 4,
186 | "nbformat_minor": 2
187 | }
188 |
--------------------------------------------------------------------------------
/Dynamic-Time-Warping/README.md:
--------------------------------------------------------------------------------
1 | Dynamic Time Warping
2 | ---
3 | https://towardsdatascience.com/dynamic-time-warping-3933f25fcdd
4 |
--------------------------------------------------------------------------------
/Dynamic-Time-Warping/dynamic-time-warping.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "DTW\n",
8 | "---\n",
9 | "It’s a technique used to dynamically compare time series data when the time indices between comparison data points do not sync up perfectly.\n",
10 | "\n",
11 | "In general, DTW is a method that calculates an optimal match between two given sequences (e.g. time series) with certain restriction and rules:\n",
12 | "\n",
13 | "- Every index from the first sequence must be matched with one or more indices from the other sequence, and vice versa\n",
14 | "- The first index from the first sequence must be matched with the first index from the other sequence (but it does not have to be its only match)\n",
15 | "- The last index from the first sequence must be matched with the last index from the other sequence (but it does not have to be its only match)\n",
16 | "- The mapping of the indices from the first sequence to indices from the other sequence must be monotonically increasing, and vice versa.\n",
17 | "\n",
18 | "The optimal match is denoted by the match that satisfies all the restrictions and the rules and that has the minimal cost, where the cost is computed as the sum of absolute differences, for each matched pair of indices, between their values.\n",
19 | "\n",
20 | "---\n",
21 | "Pseudo Code\n",
22 | "\n",
23 | "```\n",
24 | "int DTWDistance(s: array [1..n], t: array [1..m]) {\n",
25 | " DTW := array [0..n, 0..m]\n",
26 | " \n",
27 | " for i := 1 to n\n",
28 | " for j := 1 to m\n",
29 | " DTW[i, j] := infinity\n",
30 | " DTW[0, 0] := 0\n",
31 | " \n",
32 | " for i := 1 to n\n",
33 | " for j := 1 to m\n",
34 | " cost := d(s[i], t[j])\n",
35 | " DTW[i, j] := cost + minimum(DTW[i-1, j ], // insertion\n",
36 | " DTW[i , j-1], // deletion\n",
37 | " DTW[i-1, j-1]) // match\n",
38 | " \n",
39 | " return DTW[n, m]\n",
40 | "}\n",
41 | "```\n",
42 | "where $DTW[i, j]$ is the distance between $s[1:i]$ and $t[1:j]$ with the best alignment.\n"
43 | ]
44 | },
45 | {
46 | "cell_type": "code",
47 | "execution_count": 1,
48 | "metadata": {},
49 | "outputs": [],
50 | "source": [
51 | "import numpy as np"
52 | ]
53 | },
54 | {
55 | "cell_type": "code",
56 | "execution_count": 2,
57 | "metadata": {},
58 | "outputs": [],
59 | "source": [
60 | "def dtw(s, t):\n",
61 | " n, m = len(s), len(t)\n",
62 | " dtw_matrix = np.zeros((n+1, m+1))\n",
63 | " for i in range(n+1):\n",
64 | " for j in range(m+1):\n",
65 | " dtw_matrix[i, j] = np.inf\n",
66 | " dtw_matrix[0, 0] = 0\n",
67 | " \n",
68 | " for i in range(1, n+1):\n",
69 | " for j in range(1, m+1):\n",
70 | " cost = abs(s[i-1] - t[j-1])\n",
71 | " # take last min from a square box\n",
72 | " last_min = np.min([dtw_matrix[i-1, j], dtw_matrix[i, j-1], dtw_matrix[i-1, j-1]])\n",
73 | " dtw_matrix[i, j] = cost + last_min\n",
74 | " return dtw_matrix"
75 | ]
76 | },
77 | {
78 | "cell_type": "code",
79 | "execution_count": 3,
80 | "metadata": {},
81 | "outputs": [
82 | {
83 | "data": {
84 | "text/plain": [
85 | "array([[ 0., inf, inf, inf, inf, inf],\n",
86 | " [inf, 1., 2., 3., 5., 8.],\n",
87 | " [inf, 1., 1., 1., 2., 4.],\n",
88 | " [inf, 2., 2., 2., 1., 2.]])"
89 | ]
90 | },
91 | "execution_count": 3,
92 | "metadata": {},
93 | "output_type": "execute_result"
94 | }
95 | ],
96 | "source": [
97 | "a = [1, 2, 3]\n",
98 | "b = [2, 2, 2, 3, 4]\n",
99 | "\n",
100 | "dtw(a, b)"
101 | ]
102 | },
103 | {
104 | "cell_type": "markdown",
105 | "metadata": {},
106 | "source": [
107 | "Add Window Constraint\n",
108 | "---\n",
109 | "We sometimes want to add a locality constraint. That is, we require that if $s[i]$ is matched with $t[j]$, then $|i - j|$ is no larger than $w$, a window parameter.\n",
110 | "\n",
111 | "We can easily modify the above algorithm to add a locality constraint (differences marked). However, the above given modification works only if $| n - m |$ is no larger than $w$, i.e. the end point is within the window length from diagonal. In order to make the algorithm work, the window parameter $w$ must be adapted so that $|n-m| \\leq w$\n",
112 | "\n",
113 | "---\n",
114 | "Pseudo Code\n",
115 | "```\n",
116 | "int DTWDistance(s: array [1..n], t: array [1..m], w: int) {\n",
117 | " DTW := array [0..n, 0..m]\n",
118 | "\n",
119 | " w := max(w, abs(n-m)) // adapt window size (*)\n",
120 | "\n",
121 | " for i := 0 to n\n",
122 | " for j:= 0 to m\n",
123 | " DTW[i, j] := infinity\n",
124 | " DTW[0, 0] := 0\n",
125 | " \n",
126 | " for i := 1 to n\n",
127 | " for j := max(1, i-w) to min(m, i+w)\n",
128 | " DTW[i, j] := 0\n",
129 | "\n",
130 | " for i := 1 to n\n",
131 | " for j := max(1, i-w) to min(m, i+w)\n",
132 | " cost := d(s[i], t[j])\n",
133 | " DTW[i, j] := cost + minimum(DTW[i-1, j ], // insertion\n",
134 | " DTW[i , j-1], // deletion\n",
135 | " DTW[i-1, j-1]) // match\n",
136 | "\n",
137 | " return DTW[n, m]\n",
138 | "}\n",
139 | "```"
140 | ]
141 | },
142 | {
143 | "cell_type": "code",
144 | "execution_count": 4,
145 | "metadata": {},
146 | "outputs": [],
147 | "source": [
148 | "def dtw(s, t, window):\n",
149 | " n, m = len(s), len(t)\n",
150 | " w = np.max([window, abs(n-m)])\n",
151 | " dtw_matrix = np.zeros((n+1, m+1))\n",
152 | " \n",
153 | " for i in range(n+1):\n",
154 | " for j in range(m+1):\n",
155 | " dtw_matrix[i, j] = np.inf\n",
156 | " dtw_matrix[0, 0] = 0\n",
157 | " \n",
158 | " for i in range(1, n+1):\n",
159 | " for j in range(np.max([1, i-w]), np.min([m, i+w])+1):\n",
160 | " dtw_matrix[i, j] = 0\n",
161 | " \n",
162 | " for i in range(1, n+1):\n",
163 | " for j in range(np.max([1, i-w]), np.min([m, i+w])+1):\n",
164 | " cost = abs(s[i-1] - t[j-1])\n",
165 | " # take last min from a square box\n",
166 | " last_min = np.min([dtw_matrix[i-1, j], dtw_matrix[i, j-1], dtw_matrix[i-1, j-1]])\n",
167 | " dtw_matrix[i, j] = cost + last_min\n",
168 | " return dtw_matrix"
169 | ]
170 | },
171 | {
172 | "cell_type": "code",
173 | "execution_count": 5,
174 | "metadata": {},
175 | "outputs": [
176 | {
177 | "data": {
178 | "text/plain": [
179 | "array([[ 0., inf, inf, inf, inf, inf, inf, inf, inf],\n",
180 | " [inf, 0., 1., 2., 3., inf, inf, inf, inf],\n",
181 | " [inf, 1., 0., 0., 0., 0., inf, inf, inf],\n",
182 | " [inf, 3., 1., 1., 1., 1., 1., inf, inf],\n",
183 | " [inf, 5., 2., 2., 2., 2., 2., 2., inf],\n",
184 | " [inf, inf, 5., 5., 5., 5., 5., 5., 3.]])"
185 | ]
186 | },
187 | "execution_count": 5,
188 | "metadata": {},
189 | "output_type": "execute_result"
190 | }
191 | ],
192 | "source": [
193 | "a = [1, 2, 3, 3, 5]\n",
194 | "b = [1, 2, 2, 2, 2, 2, 2, 4]\n",
195 | "\n",
196 | "dtw(a, b, window=3)"
197 | ]
198 | },
199 | {
200 | "cell_type": "markdown",
201 | "metadata": {},
202 | "source": [
203 | "# Package"
204 | ]
205 | },
206 | {
207 | "cell_type": "code",
208 | "execution_count": 6,
209 | "metadata": {},
210 | "outputs": [],
211 | "source": [
212 | "from fastdtw import fastdtw\n",
213 | "from scipy.spatial.distance import euclidean"
214 | ]
215 | },
216 | {
217 | "cell_type": "code",
218 | "execution_count": 7,
219 | "metadata": {},
220 | "outputs": [
221 | {
222 | "name": "stdout",
223 | "output_type": "stream",
224 | "text": [
225 | "5.0\n",
226 | "[(0, 0), (1, 1), (1, 2), (1, 3), (1, 4), (2, 5), (3, 6), (4, 7)]\n"
227 | ]
228 | }
229 | ],
230 | "source": [
231 | "x = np.array([1, 2, 3, 3, 7])\n",
232 | "y = np.array([1, 2, 2, 2, 2, 2, 2, 4])\n",
233 | "\n",
234 | "distance, path = fastdtw(x, y, dist=euclidean)\n",
235 | "\n",
236 | "print(distance)\n",
237 | "print(path)"
238 | ]
239 | },
240 | {
241 | "cell_type": "code",
242 | "execution_count": null,
243 | "metadata": {},
244 | "outputs": [],
245 | "source": []
246 | }
247 | ],
248 | "metadata": {
249 | "kernelspec": {
250 | "display_name": "Python 3",
251 | "language": "python",
252 | "name": "python3"
253 | },
254 | "language_info": {
255 | "codemirror_mode": {
256 | "name": "ipython",
257 | "version": 3
258 | },
259 | "file_extension": ".py",
260 | "mimetype": "text/x-python",
261 | "name": "python",
262 | "nbconvert_exporter": "python",
263 | "pygments_lexer": "ipython3",
264 | "version": "3.6.5"
265 | }
266 | },
267 | "nbformat": 4,
268 | "nbformat_minor": 2
269 | }
270 |
--------------------------------------------------------------------------------
/Dynamic-Time-Warping/dynamic-time-warping.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | def dtw(s, t):
5 | n, m = len(s), len(t)
6 | dtw_matrix = np.zeros((n+1, m+1))
7 | for i in range(n+1):
8 | for j in range(m+1):
9 | dtw_matrix[i, j] = np.inf
10 | dtw_matrix[0, 0] = 0
11 |
12 | for i in range(1, n+1):
13 | for j in range(1, m+1):
14 | cost = abs(s[i-1] - t[j-1])
15 | # take last min from a square box
16 | last_min = np.min([dtw_matrix[i-1, j], dtw_matrix[i, j-1], dtw_matrix[i-1, j-1]])
17 | dtw_matrix[i, j] = cost + last_min
18 | return dtw_matrix
19 |
20 |
21 | def dtw2(s, t, window):
22 | n, m = len(s), len(t)
23 | w = np.max([window, abs(n-m)])
24 | dtw_matrix = np.zeros((n+1, m+1))
25 |
26 | for i in range(n+1):
27 | for j in range(m+1):
28 | dtw_matrix[i, j] = np.inf
29 | dtw_matrix[0, 0] = 0
30 |
31 | for i in range(1, n+1):
32 | for j in range(np.max([1, i-w]), np.min([m, i+w])+1):
33 | dtw_matrix[i, j] = 0
34 |
35 | for i in range(1, n+1):
36 | for j in range(np.max([1, i-w]), np.min([m, i+w])+1):
37 | cost = abs(s[i-1] - t[j-1])
38 | # take last min from a square box
39 | last_min = np.min([dtw_matrix[i-1, j], dtw_matrix[i, j-1], dtw_matrix[i-1, j-1]])
40 | dtw_matrix[i, j] = cost + last_min
41 | return dtw_matrix
--------------------------------------------------------------------------------
/Kaggle-NCAA/test2.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "A Prediction that totally based on seed difference: \n",
8 | "[kaggle 1](https://www.kaggle.com/ateplyuk/lgbm-str-w)"
9 | ]
10 | },
11 | {
12 | "cell_type": "code",
13 | "execution_count": 1,
14 | "metadata": {},
15 | "outputs": [],
16 | "source": [
17 | "import numpy as np\n",
18 | "import pandas as pd\n",
19 | "from sklearn.model_selection import train_test_split \n",
20 | "import lightgbm as lgb"
21 | ]
22 | },
23 | {
24 | "cell_type": "code",
25 | "execution_count": 2,
26 | "metadata": {},
27 | "outputs": [],
28 | "source": [
29 | "t_res = pd.read_csv('womens-machine-learning-competition-2019/WDataFiles/WNCAATourneyCompactResults.csv')\n",
30 | "t_ds = pd.read_csv('womens-machine-learning-competition-2019/WDataFiles/WNCAATourneySeeds.csv')\n",
31 | "sub = pd.read_csv('womens-machine-learning-competition-2019/WSampleSubmissionStage1.csv')"
32 | ]
33 | },
34 | {
35 | "cell_type": "code",
36 | "execution_count": 3,
37 | "metadata": {},
38 | "outputs": [
39 | {
40 | "name": "stdout",
41 | "output_type": "stream",
42 | "text": [
43 | "(1323, 8)\n"
44 | ]
45 | },
46 | {
47 | "data": {
48 | "text/html": [
49 | "
\n",
50 | "\n",
63 | "
\n",
64 | " \n",
65 | " \n",
66 | " | \n",
67 | " Season | \n",
68 | " DayNum | \n",
69 | " WTeamID | \n",
70 | " WScore | \n",
71 | " LTeamID | \n",
72 | " LScore | \n",
73 | " WLoc | \n",
74 | " NumOT | \n",
75 | "
\n",
76 | " \n",
77 | " \n",
78 | " \n",
79 | " 0 | \n",
80 | " 1998 | \n",
81 | " 137 | \n",
82 | " 3104 | \n",
83 | " 94 | \n",
84 | " 3422 | \n",
85 | " 46 | \n",
86 | " H | \n",
87 | " 0 | \n",
88 | "
\n",
89 | " \n",
90 | " 1 | \n",
91 | " 1998 | \n",
92 | " 137 | \n",
93 | " 3112 | \n",
94 | " 75 | \n",
95 | " 3365 | \n",
96 | " 63 | \n",
97 | " H | \n",
98 | " 0 | \n",
99 | "
\n",
100 | " \n",
101 | " 2 | \n",
102 | " 1998 | \n",
103 | " 137 | \n",
104 | " 3163 | \n",
105 | " 93 | \n",
106 | " 3193 | \n",
107 | " 52 | \n",
108 | " H | \n",
109 | " 0 | \n",
110 | "
\n",
111 | " \n",
112 | " 3 | \n",
113 | " 1998 | \n",
114 | " 137 | \n",
115 | " 3198 | \n",
116 | " 59 | \n",
117 | " 3266 | \n",
118 | " 45 | \n",
119 | " H | \n",
120 | " 0 | \n",
121 | "
\n",
122 | " \n",
123 | " 4 | \n",
124 | " 1998 | \n",
125 | " 137 | \n",
126 | " 3203 | \n",
127 | " 74 | \n",
128 | " 3208 | \n",
129 | " 72 | \n",
130 | " A | \n",
131 | " 0 | \n",
132 | "
\n",
133 | " \n",
134 | "
\n",
135 | "
"
136 | ],
137 | "text/plain": [
138 | " Season DayNum WTeamID WScore LTeamID LScore WLoc NumOT\n",
139 | "0 1998 137 3104 94 3422 46 H 0\n",
140 | "1 1998 137 3112 75 3365 63 H 0\n",
141 | "2 1998 137 3163 93 3193 52 H 0\n",
142 | "3 1998 137 3198 59 3266 45 H 0\n",
143 | "4 1998 137 3203 74 3208 72 A 0"
144 | ]
145 | },
146 | "execution_count": 3,
147 | "metadata": {},
148 | "output_type": "execute_result"
149 | }
150 | ],
151 | "source": [
152 | "print(t_res.shape)\n",
153 | "t_res.head()"
154 | ]
155 | },
156 | {
157 | "cell_type": "code",
158 | "execution_count": 4,
159 | "metadata": {},
160 | "outputs": [
161 | {
162 | "data": {
163 | "text/html": [
164 | "\n",
165 | "\n",
178 | "
\n",
179 | " \n",
180 | " \n",
181 | " | \n",
182 | " Season | \n",
183 | " Seed | \n",
184 | " TeamID | \n",
185 | "
\n",
186 | " \n",
187 | " \n",
188 | " \n",
189 | " 0 | \n",
190 | " 1998 | \n",
191 | " W01 | \n",
192 | " 3330 | \n",
193 | "
\n",
194 | " \n",
195 | " 1 | \n",
196 | " 1998 | \n",
197 | " W02 | \n",
198 | " 3163 | \n",
199 | "
\n",
200 | " \n",
201 | " 2 | \n",
202 | " 1998 | \n",
203 | " W03 | \n",
204 | " 3112 | \n",
205 | "
\n",
206 | " \n",
207 | " 3 | \n",
208 | " 1998 | \n",
209 | " W04 | \n",
210 | " 3301 | \n",
211 | "
\n",
212 | " \n",
213 | " 4 | \n",
214 | " 1998 | \n",
215 | " W05 | \n",
216 | " 3272 | \n",
217 | "
\n",
218 | " \n",
219 | "
\n",
220 | "
"
221 | ],
222 | "text/plain": [
223 | " Season Seed TeamID\n",
224 | "0 1998 W01 3330\n",
225 | "1 1998 W02 3163\n",
226 | "2 1998 W03 3112\n",
227 | "3 1998 W04 3301\n",
228 | "4 1998 W05 3272"
229 | ]
230 | },
231 | "execution_count": 4,
232 | "metadata": {},
233 | "output_type": "execute_result"
234 | }
235 | ],
236 | "source": [
237 | "t_ds.head()"
238 | ]
239 | },
240 | {
241 | "cell_type": "code",
242 | "execution_count": 5,
243 | "metadata": {},
244 | "outputs": [
245 | {
246 | "data": {
247 | "text/html": [
248 | "\n",
249 | "\n",
262 | "
\n",
263 | " \n",
264 | " \n",
265 | " | \n",
266 | " ID | \n",
267 | " Pred | \n",
268 | "
\n",
269 | " \n",
270 | " \n",
271 | " \n",
272 | " 0 | \n",
273 | " 2014_3103_3107 | \n",
274 | " 0.5 | \n",
275 | "
\n",
276 | " \n",
277 | " 1 | \n",
278 | " 2014_3103_3113 | \n",
279 | " 0.5 | \n",
280 | "
\n",
281 | " \n",
282 | " 2 | \n",
283 | " 2014_3103_3119 | \n",
284 | " 0.5 | \n",
285 | "
\n",
286 | " \n",
287 | " 3 | \n",
288 | " 2014_3103_3124 | \n",
289 | " 0.5 | \n",
290 | "
\n",
291 | " \n",
292 | " 4 | \n",
293 | " 2014_3103_3140 | \n",
294 | " 0.5 | \n",
295 | "
\n",
296 | " \n",
297 | "
\n",
298 | "
"
299 | ],
300 | "text/plain": [
301 | " ID Pred\n",
302 | "0 2014_3103_3107 0.5\n",
303 | "1 2014_3103_3113 0.5\n",
304 | "2 2014_3103_3119 0.5\n",
305 | "3 2014_3103_3124 0.5\n",
306 | "4 2014_3103_3140 0.5"
307 | ]
308 | },
309 | "execution_count": 5,
310 | "metadata": {},
311 | "output_type": "execute_result"
312 | }
313 | ],
314 | "source": [
315 | "sub.head()"
316 | ]
317 | },
318 | {
319 | "cell_type": "code",
320 | "execution_count": 6,
321 | "metadata": {},
322 | "outputs": [],
323 | "source": [
324 | "# seed to int\n",
325 | "t_ds['seed_int'] = t_ds.Seed.apply(lambda a: int(a[1:3]))"
326 | ]
327 | },
328 | {
329 | "cell_type": "code",
330 | "execution_count": 7,
331 | "metadata": {},
332 | "outputs": [
333 | {
334 | "data": {
335 | "text/html": [
336 | "\n",
337 | "\n",
350 | "
\n",
351 | " \n",
352 | " \n",
353 | " | \n",
354 | " Season | \n",
355 | " WTeamID | \n",
356 | " LTeamID | \n",
357 | "
\n",
358 | " \n",
359 | " \n",
360 | " \n",
361 | " 0 | \n",
362 | " 1998 | \n",
363 | " 3104 | \n",
364 | " 3422 | \n",
365 | "
\n",
366 | " \n",
367 | " 1 | \n",
368 | " 1998 | \n",
369 | " 3112 | \n",
370 | " 3365 | \n",
371 | "
\n",
372 | " \n",
373 | " 2 | \n",
374 | " 1998 | \n",
375 | " 3163 | \n",
376 | " 3193 | \n",
377 | "
\n",
378 | " \n",
379 | " 3 | \n",
380 | " 1998 | \n",
381 | " 3198 | \n",
382 | " 3266 | \n",
383 | "
\n",
384 | " \n",
385 | " 4 | \n",
386 | " 1998 | \n",
387 | " 3203 | \n",
388 | " 3208 | \n",
389 | "
\n",
390 | " \n",
391 | "
\n",
392 | "
"
393 | ],
394 | "text/plain": [
395 | " Season WTeamID LTeamID\n",
396 | "0 1998 3104 3422\n",
397 | "1 1998 3112 3365\n",
398 | "2 1998 3163 3193\n",
399 | "3 1998 3198 3266\n",
400 | "4 1998 3203 3208"
401 | ]
402 | },
403 | "execution_count": 7,
404 | "metadata": {},
405 | "output_type": "execute_result"
406 | }
407 | ],
408 | "source": [
409 | "drop_lbls = ['DayNum', 'WScore', 'LScore', 'WLoc', 'NumOT']\n",
410 | "t_ds.drop(labels=['Seed'], inplace=True, axis=1)\n",
411 | "t_res.drop(labels=drop_lbls, inplace=True, axis=1)\n",
412 | "\n",
413 | "t_res.head()"
414 | ]
415 | },
416 | {
417 | "cell_type": "code",
418 | "execution_count": 8,
419 | "metadata": {},
420 | "outputs": [
421 | {
422 | "data": {
423 | "text/html": [
424 | "\n",
425 | "\n",
438 | "
\n",
439 | " \n",
440 | " \n",
441 | " | \n",
442 | " dff | \n",
443 | " rsl | \n",
444 | "
\n",
445 | " \n",
446 | " \n",
447 | " \n",
448 | " 0 | \n",
449 | " -13 | \n",
450 | " 1 | \n",
451 | "
\n",
452 | " \n",
453 | " 1 | \n",
454 | " -11 | \n",
455 | " 1 | \n",
456 | "
\n",
457 | " \n",
458 | " 2 | \n",
459 | " -13 | \n",
460 | " 1 | \n",
461 | "
\n",
462 | " \n",
463 | " 3 | \n",
464 | " -3 | \n",
465 | " 1 | \n",
466 | "
\n",
467 | " \n",
468 | " 4 | \n",
469 | " 3 | \n",
470 | " 1 | \n",
471 | "
\n",
472 | " \n",
473 | "
\n",
474 | "
"
475 | ],
476 | "text/plain": [
477 | " dff rsl\n",
478 | "0 -13 1\n",
479 | "1 -11 1\n",
480 | "2 -13 1\n",
481 | "3 -3 1\n",
482 | "4 3 1"
483 | ]
484 | },
485 | "execution_count": 8,
486 | "metadata": {},
487 | "output_type": "execute_result"
488 | }
489 | ],
490 | "source": [
491 | "ren1 = {'TeamID':'WTeamID', 'seed_int':'WS'}\n",
492 | "ren2 = {'TeamID':'LTeamID', 'seed_int':'LS'}\n",
493 | "\n",
494 | "df1 = pd.merge(left=t_res, right=t_ds.rename(columns=ren1), how='left', on=['Season', 'WTeamID'])\n",
495 | "df2 = pd.merge(left=df1, right=t_ds.rename(columns=ren2), on=['Season', 'LTeamID'])\n",
496 | "\n",
497 | "df_w = pd.DataFrame()\n",
498 | "df_w['dff'] = df2.WS - df2.LS # seed diff\n",
499 | "df_w['rsl'] = 1\n",
500 | "\n",
501 | "df_l = pd.DataFrame()\n",
502 | "df_l['dff'] = -df_w['dff']\n",
503 | "df_l['rsl'] = 0\n",
504 | "\n",
505 | "df_prd = pd.concat((df_w, df_l))\n",
506 | "\n",
507 | "df_prd.head()"
508 | ]
509 | },
510 | {
511 | "cell_type": "code",
512 | "execution_count": 10,
513 | "metadata": {},
514 | "outputs": [
515 | {
516 | "data": {
517 | "text/html": [
518 | "\n",
519 | "\n",
532 | "
\n",
533 | " \n",
534 | " \n",
535 | " | \n",
536 | " Season | \n",
537 | " WTeamID | \n",
538 | " LTeamID | \n",
539 | " WS | \n",
540 | " LS | \n",
541 | "
\n",
542 | " \n",
543 | " \n",
544 | " \n",
545 | " 0 | \n",
546 | " 1998 | \n",
547 | " 3104 | \n",
548 | " 3422 | \n",
549 | " 2 | \n",
550 | " 15 | \n",
551 | "
\n",
552 | " \n",
553 | " 1 | \n",
554 | " 1998 | \n",
555 | " 3112 | \n",
556 | " 3365 | \n",
557 | " 3 | \n",
558 | " 14 | \n",
559 | "
\n",
560 | " \n",
561 | " 2 | \n",
562 | " 1998 | \n",
563 | " 3163 | \n",
564 | " 3193 | \n",
565 | " 2 | \n",
566 | " 15 | \n",
567 | "
\n",
568 | " \n",
569 | " 3 | \n",
570 | " 1998 | \n",
571 | " 3198 | \n",
572 | " 3266 | \n",
573 | " 7 | \n",
574 | " 10 | \n",
575 | "
\n",
576 | " \n",
577 | " 4 | \n",
578 | " 1998 | \n",
579 | " 3203 | \n",
580 | " 3208 | \n",
581 | " 10 | \n",
582 | " 7 | \n",
583 | "
\n",
584 | " \n",
585 | "
\n",
586 | "
"
587 | ],
588 | "text/plain": [
589 | " Season WTeamID LTeamID WS LS\n",
590 | "0 1998 3104 3422 2 15\n",
591 | "1 1998 3112 3365 3 14\n",
592 | "2 1998 3163 3193 2 15\n",
593 | "3 1998 3198 3266 7 10\n",
594 | "4 1998 3203 3208 10 7"
595 | ]
596 | },
597 | "execution_count": 10,
598 | "metadata": {},
599 | "output_type": "execute_result"
600 | }
601 | ],
602 | "source": [
603 | "df2.head()"
604 | ]
605 | },
606 | {
607 | "cell_type": "code",
608 | "execution_count": 11,
609 | "metadata": {},
610 | "outputs": [],
611 | "source": [
612 | "X = df_prd.dff.values.reshape(-1,1)\n",
613 | "y = df_prd.rsl.values"
614 | ]
615 | },
616 | {
617 | "cell_type": "code",
618 | "execution_count": 12,
619 | "metadata": {},
620 | "outputs": [],
621 | "source": [
622 | "X_test = np.zeros(shape=(len(sub), 1))"
623 | ]
624 | },
625 | {
626 | "cell_type": "code",
627 | "execution_count": 14,
628 | "metadata": {},
629 | "outputs": [],
630 | "source": [
631 | "# get only seed diff\n",
632 | "for ind, row in sub.iterrows():\n",
633 | " yr, o, t = [int(x) for x in row.ID.split('_')] \n",
634 | " X_test[ind, 0] = t_ds[(t_ds.TeamID == o) & (t_ds.Season == yr)].seed_int.values[0] - t_ds[(t_ds.TeamID == t) & (t_ds.Season == yr)].seed_int.values[0]"
635 | ]
636 | },
637 | {
638 | "cell_type": "code",
639 | "execution_count": 16,
640 | "metadata": {},
641 | "outputs": [
642 | {
643 | "data": {
644 | "text/plain": [
645 | "(10080, 1)"
646 | ]
647 | },
648 | "execution_count": 16,
649 | "metadata": {},
650 | "output_type": "execute_result"
651 | }
652 | ],
653 | "source": [
654 | "X_test.shape"
655 | ]
656 | },
657 | {
658 | "cell_type": "code",
659 | "execution_count": 17,
660 | "metadata": {},
661 | "outputs": [],
662 | "source": [
663 | "X_train, X_val, y_train, y_val = train_test_split(X, y, test_size=0.1, random_state=0)"
664 | ]
665 | },
666 | {
667 | "cell_type": "code",
668 | "execution_count": 19,
669 | "metadata": {},
670 | "outputs": [
671 | {
672 | "name": "stdout",
673 | "output_type": "stream",
674 | "text": [
675 | "Train until valid scores didn't improve in 50 rounds.\n",
676 | "[200]\tvalid_0's binary_logloss: 0.438127\n",
677 | "[400]\tvalid_0's binary_logloss: 0.417324\n",
678 | "[600]\tvalid_0's binary_logloss: 0.414788\n",
679 | "[800]\tvalid_0's binary_logloss: 0.414452\n",
680 | "[1000]\tvalid_0's binary_logloss: 0.414407\n"
681 | ]
682 | }
683 | ],
684 | "source": [
685 | "params = {\"objective\": \"binary\",\n",
686 | " \"boosting_type\": \"gbdt\",\n",
687 | " \"learning_rate\": 0.01,\n",
688 | " \"num_leaves\": 31,\n",
689 | " \"min_data_in_leaf\": 10,\n",
690 | " \"min_child_samples\": 10,\n",
691 | " \"metric\": \"binary_logloss\"\n",
692 | " }\n",
693 | "\n",
694 | "dtrain = lgb.Dataset(X_train, y_train)\n",
695 | "dvalid = lgb.Dataset(X_val, y_val, reference=dtrain)\n",
696 | "\n",
697 | "bst = lgb.train(params, dtrain, 1000, valid_sets=dvalid, verbose_eval=200, early_stopping_rounds=50)"
698 | ]
699 | },
700 | {
701 | "cell_type": "code",
702 | "execution_count": 21,
703 | "metadata": {},
704 | "outputs": [
705 | {
706 | "data": {
707 | "text/plain": [
708 | "array([7.69219126e-01, 2.40751952e-01, 5.00000000e-01, 2.26211837e-05,\n",
709 | " 4.57015430e-01, 1.52557423e-01, 2.30780874e-01, 2.26211837e-05,\n",
710 | " 6.91420302e-01, 1.52557423e-01])"
711 | ]
712 | },
713 | "execution_count": 21,
714 | "metadata": {},
715 | "output_type": "execute_result"
716 | }
717 | ],
718 | "source": [
719 | "test_pred = bst.predict(X_test, num_iteration=bst.best_iteration)\n",
720 | "\n",
721 | "test_pred[:10]"
722 | ]
723 | },
724 | {
725 | "cell_type": "code",
726 | "execution_count": null,
727 | "metadata": {},
728 | "outputs": [],
729 | "source": []
730 | }
731 | ],
732 | "metadata": {
733 | "kernelspec": {
734 | "display_name": "Python 3",
735 | "language": "python",
736 | "name": "python3"
737 | },
738 | "language_info": {
739 | "codemirror_mode": {
740 | "name": "ipython",
741 | "version": 3
742 | },
743 | "file_extension": ".py",
744 | "mimetype": "text/x-python",
745 | "name": "python",
746 | "nbconvert_exporter": "python",
747 | "pygments_lexer": "ipython3",
748 | "version": "3.6.5"
749 | }
750 | },
751 | "nbformat": 4,
752 | "nbformat_minor": 2
753 | }
754 |
--------------------------------------------------------------------------------
/Kaggle-NCAA/womens-machine-learning-competition-2019/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MJeremy2017/machine-learning-models/e8b81f3172d5e309e977fe51ac5c812b650ccd80/Kaggle-NCAA/womens-machine-learning-competition-2019/.DS_Store
--------------------------------------------------------------------------------
/Kaggle-NCAA/womens-machine-learning-competition-2019/Stage2WDataFiles.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MJeremy2017/machine-learning-models/e8b81f3172d5e309e977fe51ac5c812b650ccd80/Kaggle-NCAA/womens-machine-learning-competition-2019/Stage2WDataFiles.zip
--------------------------------------------------------------------------------
/Kaggle-NCAA/womens-machine-learning-competition-2019/Stage2WDataFiles/WCities.csv:
--------------------------------------------------------------------------------
1 | CityID,City,State
2 | 4001,Abilene,TX
3 | 4002,Akron,OH
4 | 4003,Albany,NY
5 | 4004,Albuquerque,NM
6 | 4005,Allentown,PA
7 | 4006,Ames,IA
8 | 4007,Amherst,MA
9 | 4008,Anaheim,CA
10 | 4009,Anchorage,AK
11 | 4010,Ann Arbor,MI
12 | 4011,Annapolis,MD
13 | 4012,Arlington,TX
14 | 4013,Asheville,NC
15 | 4014,Athens,GA
16 | 4015,Athens,OH
17 | 4016,Atlanta,GA
18 | 4017,Atlantic City,NJ
19 | 4018,Auburn,AL
20 | 4019,Auburn Hills,MI
21 | 4020,Austin,TX
22 | 4021,Bakersfield,CA
23 | 4022,Baltimore,MD
24 | 4023,Bangor,ME
25 | 4024,Baton Rouge,LA
26 | 4025,Bayamon,PR
27 | 4026,Beaumont,TX
28 | 4027,Berkeley,CA
29 | 4028,Bethlehem,PA
30 | 4029,Billings,MT
31 | 4030,Birmingham,AL
32 | 4031,Blacksburg,VA
33 | 4032,Bloomington,IN
34 | 4033,Boca Raton,FL
35 | 4034,Boiling Springs,NC
36 | 4035,Boise,ID
37 | 4036,Boone,NC
38 | 4037,Borinquen,PR
39 | 4038,Bossier City,LA
40 | 4039,Boston,MA
41 | 4040,Boulder,CO
42 | 4041,Bowling Green,KY
43 | 4042,Bowling Green,OH
44 | 4043,Bozeman,MT
45 | 4044,Bridgeport,CT
46 | 4045,Bronx,NY
47 | 4046,Brookings,SD
48 | 4047,Brooklyn,NY
49 | 4048,Brooklyn Heights,NY
50 | 4049,Buffalo,NY
51 | 4050,Buies Creek,NC
52 | 4051,Burlington,VT
53 | 4052,Cambridge,MA
54 | 4053,Cancun,MX
55 | 4054,Cape Girardeau,MO
56 | 4055,Carbondale,IL
57 | 4056,Casper,WY
58 | 4057,Cedar City,UT
59 | 4058,Cedar Falls,IA
60 | 4059,Cedar Park,TX
61 | 4060,Champaign,IL
62 | 4061,Chapel Hill,NC
63 | 4062,Charleston,IL
64 | 4063,Charleston,NC
65 | 4064,Charleston,SC
66 | 4065,Charleston,WV
67 | 4066,Charlotte,NC
68 | 4067,Charlottesville,VA
69 | 4068,Chattanooga,TN
70 | 4069,Cheney,WA
71 | 4070,Chestnut Hill,MA
72 | 4071,Chicago,IL
73 | 4072,Cincinnati,OH
74 | 4073,Clarksville,TN
75 | 4074,Clemson,SC
76 | 4075,Cleveland,OH
77 | 4076,Clinton,SC
78 | 4077,College Park,MD
79 | 4078,College Station,TX
80 | 4079,Columbia,SC
81 | 4080,Columbus,OH
82 | 4081,Conway,AR
83 | 4082,Conway,SC
84 | 4083,Cookeville,TN
85 | 4084,Coral Gables,FL
86 | 4085,Corpus Christi,TX
87 | 4086,Corvallis,OR
88 | 4087,Cullowhee,NC
89 | 4088,Dallas,TX
90 | 4089,Davidson,NC
91 | 4090,Davis,CA
92 | 4091,Dayton,OH
93 | 4092,Daytona Beach,FL
94 | 4093,DeKalb,IL
95 | 4094,DeLand,FL
96 | 4095,Denton,TX
97 | 4096,Denver,CO
98 | 4097,Des Moines,IA
99 | 4098,Detroit,MI
100 | 4099,Dover,DE
101 | 4100,Duluth,GA
102 | 4101,Durham,NC
103 | 4102,Durham,NH
104 | 4103,East Lansing,MI
105 | 4104,East Rutherford,NJ
106 | 4105,Easton,PA
107 | 4106,Edinburg,TX
108 | 4107,Edwardsville,IL
109 | 4108,El Paso,TX
110 | 4109,Elon,NC
111 | 4110,Emmitsburg,MD
112 | 4111,Estero,FL
113 | 4112,Eugene,OR
114 | 4113,Evanston,IL
115 | 4114,Evansville,IN
116 | 4115,Fairfax,VA
117 | 4116,Fairfield,CT
118 | 4117,Fargo,ND
119 | 4118,Farmville,VA
120 | 4119,Fayetteville,AR
121 | 4120,Flagstaff,AZ
122 | 4121,Fort Collins,CO
123 | 4122,Fort Hood,TX
124 | 4123,Fort Myers,FL
125 | 4124,Fort Wayne,IN
126 | 4125,Fort Worth,TX
127 | 4126,Fresno,CA
128 | 4127,Fullerton,CA
129 | 4128,Gainesville,FL
130 | 4129,Garland,TX
131 | 4130,Glendale,AZ
132 | 4131,Glens Falls,NY
133 | 4132,Grambling,LA
134 | 4133,Grand Forks,ND
135 | 4134,Greeley,CO
136 | 4135,Green Bay,WI
137 | 4136,Greensboro,NC
138 | 4137,Greenville,MS
139 | 4138,Greenville,NC
140 | 4139,Greenville,SC
141 | 4140,Greenwood,MS
142 | 4141,Gulfport,MS
143 | 4142,Hackensack,NJ
144 | 4143,Hamden,CT
145 | 4144,Hamilton,NY
146 | 4145,Hammond,LA
147 | 4146,Hampton,VA
148 | 4147,Hanover,NH
149 | 4148,Harrisonburg,VA
150 | 4149,Hartford,CT
151 | 4150,Hattiesburg,MS
152 | 4151,Hempstead,NY
153 | 4152,High Point,NC
154 | 4153,Highland Heights,KY
155 | 4154,Hoffman Estates,IL
156 | 4155,Honolulu,HI
157 | 4156,Hot Springs,AR
158 | 4157,Houston,TX
159 | 4158,Huntington,WV
160 | 4159,Huntsville,AL
161 | 4160,Huntsville,TX
162 | 4161,Indianapolis,IN
163 | 4162,Iowa City,IA
164 | 4163,Irvine,CA
165 | 4164,Ithaca,NY
166 | 4165,Itta Bena,MS
167 | 4166,Jackson,MS
168 | 4167,Jacksonville,AL
169 | 4168,Jacksonville,FL
170 | 4169,Jersey City,NJ
171 | 4170,Johnson City,TN
172 | 4171,Johnstown,PA
173 | 4172,Jonesboro,AR
174 | 4173,Kalamazoo,MI
175 | 4174,Kansas City,MO
176 | 4175,Katy,TX
177 | 4176,Kennesaw,GA
178 | 4177,Kent,OH
179 | 4178,Kent,WA
180 | 4179,Kingston,RI
181 | 4180,Knoxville,TN
182 | 4181,Lafayette,LA
183 | 4182,Lahaina,HI
184 | 4183,Lake Buena Vista,FL
185 | 4184,Lake Charles,LA
186 | 4185,Lakeland,FL
187 | 4186,Laramie,WY
188 | 4187,Las Cruces,NM
189 | 4188,Las Vegas,NV
190 | 4189,Lawrence,KS
191 | 4190,Lawrenceville,NJ
192 | 4191,Lewisburg,PA
193 | 4192,Lewiston,NY
194 | 4193,Lexington,KY
195 | 4194,Lexington,VA
196 | 4195,Lincoln,NE
197 | 4196,Little Rock,AR
198 | 4197,Logan,UT
199 | 4198,Long Beach,CA
200 | 4199,Loretto,PA
201 | 4200,Lorman,MS
202 | 4201,Los Angeles,CA
203 | 4202,Loudonville,NY
204 | 4203,Louisville,KY
205 | 4204,Lowell,MA
206 | 4205,Lubbock,TX
207 | 4206,Lynchburg,VA
208 | 4207,Macomb,IL
209 | 4208,Macon,GA
210 | 4209,Madison,WI
211 | 4210,Malibu,CA
212 | 4211,Manhattan,KS
213 | 4212,Martin,TN
214 | 4213,Memphis,TN
215 | 4214,Miami,FL
216 | 4215,Milwaukee,WI
217 | 4216,Minneapolis,MN
218 | 4217,Missoula,MT
219 | 4218,Mobile,AL
220 | 4219,Monroe,LA
221 | 4220,Montgomery,AL
222 | 4221,Moon Township,PA
223 | 4222,Moraga,CA
224 | 4223,Morehead,KY
225 | 4224,Morgantown,WV
226 | 4225,Moscow,ID
227 | 4226,Mt. Pleasant,MI
228 | 4227,Muncie,IN
229 | 4228,Murfreesboro,TN
230 | 4229,Murray,KY
231 | 4230,Myrtle Beach,SC
232 | 4231,Nacogdoches,TX
233 | 4232,Nampa,ID
234 | 4233,Nashville,TN
235 | 4234,Natchitoches,LA
236 | 4235,New Britain,CT
237 | 4236,New Haven,CT
238 | 4237,New Orleans,LA
239 | 4238,New Rochelle,NY
240 | 4239,New York,NY
241 | 4240,Newark,DE
242 | 4241,Newark,NJ
243 | 4242,Niagara Falls,NY
244 | 4243,Niceville,FL
245 | 4244,Norfolk,VA
246 | 4245,Normal,AL
247 | 4246,Normal,IL
248 | 4247,Norman,OK
249 | 4248,North Little Rock,AR
250 | 4249,Northridge,CA
251 | 4250,Notre Dame,IN
252 | 4251,Oakland,CA
253 | 4252,Oakland City,IN
254 | 4253,Ogden,UT
255 | 4254,Oklahoma City,OK
256 | 4255,Omaha,NE
257 | 4256,Orangeburg,SC
258 | 4257,Orem,UT
259 | 4258,Orlando,FL
260 | 4259,Orono,ME
261 | 4260,Oxford,MS
262 | 4261,Oxford,OH
263 | 4262,Paradise,NV
264 | 4263,Paradise Island,BA
265 | 4264,Pearl Harbor-Hickam,HI
266 | 4265,Peoria,IL
267 | 4266,Philadelphia,PA
268 | 4267,Phoenix,AZ
269 | 4268,Pine Bluff,AR
270 | 4269,Piscataway,NJ
271 | 4270,Pittsburgh,PA
272 | 4271,Playa del Carmen,MX
273 | 4272,Pocatello,ID
274 | 4273,Portland,ME
275 | 4274,Portland,OR
276 | 4275,Poughkeepsie,NY
277 | 4276,Prairie View,TX
278 | 4277,Presbyterian,SC
279 | 4278,Prescott Valley,AZ
280 | 4279,Princess Anne,MD
281 | 4280,Princeton,NJ
282 | 4281,Providence,RI
283 | 4282,Provo,UT
284 | 4283,Puerto Vallarta,MX
285 | 4284,Pullman,WA
286 | 4285,Queens,NY
287 | 4286,Radford,VA
288 | 4287,Raleigh,NC
289 | 4288,Ramstein-Miesenbach,GY
290 | 4289,Rapid City,SD
291 | 4290,Reno,NV
292 | 4291,Richmond,KY
293 | 4292,Richmond,VA
294 | 4293,Riverdale,NY
295 | 4294,Riverside,CA
296 | 4295,Rochester,MI
297 | 4296,Rochester,NY
298 | 4297,Rock Hill,SC
299 | 4298,Rosemont,IL
300 | 4299,Ruston,LA
301 | 4300,Sacramento,CA
302 | 4301,Salt Lake City,UT
303 | 4302,San Antonio,TX
304 | 4303,San Diego,CA
305 | 4304,San Francisco,CA
306 | 4305,San Jose,CA
307 | 4307,San Juan,PR
308 | 4308,San Luis Obispo,CA
309 | 4309,San Marcos,TX
310 | 4310,Santa Barbara,CA
311 | 4311,Santa Clara,CA
312 | 4312,Savannah,GA
313 | 4313,Seattle,WA
314 | 4314,Shanghai,CH
315 | 4315,Shreveport,LA
316 | 4316,Sioux Falls,SD
317 | 4317,Smithfield,RI
318 | 4318,South Bend,IN
319 | 4319,South Korea,SK
320 | 4320,South Orange,NJ
321 | 4321,South Padre Island,TX
322 | 4322,Southaven,MS
323 | 4323,Spartanburg,SC
324 | 4324,Spokane,WA
325 | 4325,Springfield,IL
326 | 4326,Springfield,MA
327 | 4327,Springfield,MO
328 | 4328,St. Bonaventure,NY
329 | 4329,St. Louis,MO
330 | 4330,St. Thomas,VI
331 | 4331,Stanford,CA
332 | 4332,Starkville,MS
333 | 4333,Staten Island,NY
334 | 4334,Statesboro,GA
335 | 4335,Stillwater,OK
336 | 4336,Stockton,CA
337 | 4337,Stony Brook,NY
338 | 4338,Storrs,CT
339 | 4339,Sunrise,FL
340 | 4340,Syracuse,NY
341 | 4341,Tallahassee,FL
342 | 4342,Tampa,FL
343 | 4343,Tempe,AZ
344 | 4344,Terre Haute,IN
345 | 4345,Thibodaux,LA
346 | 4346,Toledo,OH
347 | 4347,Tortola,VI
348 | 4348,Towson,MD
349 | 4349,Troy,AL
350 | 4350,Tucson,AZ
351 | 4351,Tulsa,OK
352 | 4352,Tupelo,MS
353 | 4353,Tuscaloosa,AL
354 | 4354,Uncasville,CT
355 | 4355,University Park,PA
356 | 4356,USAF Academy,CO
357 | 4357,Valparaiso,IN
358 | 4358,Vancouver,BC
359 | 4359,Vermillion,SD
360 | 4360,Vestal,NY
361 | 4361,Villanova,PA
362 | 4362,Waco,TX
363 | 4363,Washington,DC
364 | 4364,West Hartford,CT
365 | 4365,West Lafayette,IN
366 | 4366,West Long Branch,NJ
367 | 4367,West Point,NY
368 | 4368,Wichita,KS
369 | 4369,Williamsburg,VA
370 | 4370,Wilmington,NC
371 | 4371,Winston-Salem,NC
372 | 4372,Worcester,MA
373 | 4373,Youngstown,OH
374 | 4374,Ypsilanti,MI
375 | 4376,Biloxi,MS
376 | 4382,Freeport,BA
377 | 4383,Guaynabo,PR
378 | 4385,Laie,HI
379 | 4387,Moline,IL
380 | 4389,New York City,NY
381 | 4390,North Charleston,SC
382 | 4391,Ocala,FL
383 | 4392,Panama City,FL
384 | 4396,St. Charles,MO
385 | 4399,Trujillo Alto,PR
386 | 4402,Upper Marlboro,MD
387 | 4403,Wailuku,HI
388 | 4404,Winter Park,FL
389 | 4406,Garden City,NY
390 | 4407,Great Falls,MT
391 | 4408,Reading,PA
392 | 4409,Uniondale,NY
393 | 4410,Belfast,IR
394 | 4412,Montego Bay,JA
395 | 4413,Nassau,BA
396 | 4414,Frisco,TX
397 | 4415,Grand Cayman,CI
398 | 4416,Miami Shores,FL
399 | 4417,Midland,MI
400 | 4421,Alexandria,LA
401 | 4422,Bimini,BA
402 | 4423,Bloomington,IL
403 | 4424,Carolina,PR
404 | 4425,Clarksdale,MS
405 | 4426,Cupertino,CA
406 | 4427,Davie,FL
407 | 4428,East Peoria,IL
408 | 4429,Fort Lauderdale,FL
409 | 4430,Fort Mill,SC
410 | 4431,Grand Rapids,MI
411 | 4432,Hilo,HI
412 | 4433,Houma,LA
413 | 4434,Jennings,LA
414 | 4435,Kennewick,WA
415 | 4436,Livonia,MI
416 | 4437,Lucaya,BA
417 | 4438,Melbourne,FL
418 | 4439,Moorehead,MS
419 | 4440,Mt. Pleasant,SC
420 | 4441,Niagara University,NY
421 | 4442,Toronto,ON
422 | 4443,West Palm Beach,FL
423 | 4444,Florence,AL
424 | 4445,Georgetown,KY
425 | 4446,Seaside,CA
426 | 4447,Kissimmee,FL
427 | 4448,Oakland,MI
428 | 4449,St. Petersburg,FL
429 | 4450,Berea,KY
430 | 4451,Chatham,ON
431 | 4452,Kahului,HI
432 | 4453,La Crosse,WI
433 | 4454,Maui,HI
434 | 4455,West Orange,NJ
435 | 4456,White Plains,NY
436 | 4457,Winter Garden,FL
437 |
--------------------------------------------------------------------------------
/Kaggle-NCAA/womens-machine-learning-competition-2019/Stage2WDataFiles/WNCAATourneySlots.csv:
--------------------------------------------------------------------------------
1 | Slot,StrongSeed,WeakSeed
2 | R1W1,W01,W16
3 | R1W2,W02,W15
4 | R1W3,W03,W14
5 | R1W4,W04,W13
6 | R1W5,W05,W12
7 | R1W6,W06,W11
8 | R1W7,W07,W10
9 | R1W8,W08,W09
10 | R1X1,X01,X16
11 | R1X2,X02,X15
12 | R1X3,X03,X14
13 | R1X4,X04,X13
14 | R1X5,X05,X12
15 | R1X6,X06,X11
16 | R1X7,X07,X10
17 | R1X8,X08,X09
18 | R1Y1,Y01,Y16
19 | R1Y2,Y02,Y15
20 | R1Y3,Y03,Y14
21 | R1Y4,Y04,Y13
22 | R1Y5,Y05,Y12
23 | R1Y6,Y06,Y11
24 | R1Y7,Y07,Y10
25 | R1Y8,Y08,Y09
26 | R1Z1,Z01,Z16
27 | R1Z2,Z02,Z15
28 | R1Z3,Z03,Z14
29 | R1Z4,Z04,Z13
30 | R1Z5,Z05,Z12
31 | R1Z6,Z06,Z11
32 | R1Z7,Z07,Z10
33 | R1Z8,Z08,Z09
34 | R2W1,R1W1,R1W8
35 | R2W2,R1W2,R1W7
36 | R2W3,R1W3,R1W6
37 | R2W4,R1W4,R1W5
38 | R2X1,R1X1,R1X8
39 | R2X2,R1X2,R1X7
40 | R2X3,R1X3,R1X6
41 | R2X4,R1X4,R1X5
42 | R2Y1,R1Y1,R1Y8
43 | R2Y2,R1Y2,R1Y7
44 | R2Y3,R1Y3,R1Y6
45 | R2Y4,R1Y4,R1Y5
46 | R2Z1,R1Z1,R1Z8
47 | R2Z2,R1Z2,R1Z7
48 | R2Z3,R1Z3,R1Z6
49 | R2Z4,R1Z4,R1Z5
50 | R3W1,R2W1,R2W4
51 | R3W2,R2W2,R2W3
52 | R3X1,R2X1,R2X4
53 | R3X2,R2X2,R2X3
54 | R3Y1,R2Y1,R2Y4
55 | R3Y2,R2Y2,R2Y3
56 | R3Z1,R2Z1,R2Z4
57 | R3Z2,R2Z2,R2Z3
58 | R4W1,R3W1,R3W2
59 | R4X1,R3X1,R3X2
60 | R4Y1,R3Y1,R3Y2
61 | R4Z1,R3Z1,R3Z2
62 | R5WX,R4W1,R4X1
63 | R5YZ,R4Y1,R4Z1
64 | R6CH,R5WX,R5YZ
65 |
--------------------------------------------------------------------------------
/Kaggle-NCAA/womens-machine-learning-competition-2019/Stage2WDataFiles/WSeasons.csv:
--------------------------------------------------------------------------------
1 | Season,DayZero,RegionW,RegionX,RegionY,RegionZ
2 | 1998,10/27/1997,East,Midwest,Mideast,West
3 | 1999,10/26/1998,East,Mideast,Midwest,West
4 | 2000,11/1/1999,East,Midwest,Mideast,West
5 | 2001,10/30/2000,East,Midwest,Mideast,West
6 | 2002,10/29/2001,East,West,Mideast,Midwest
7 | 2003,11/4/2002,East,West,Mideast,Midwest
8 | 2004,11/3/2003,East,Mideast,Midwest,West
9 | 2005,11/1/2004,Chattanooga,Tempe,KansasCity,Philadelphia
10 | 2006,10/31/2005,Albuquerque,Cleveland,Bridgeport,San Antonio
11 | 2007,10/30/2006,Dallas,Dayton,Fresno,Greensboro
12 | 2008,11/5/2007,Greensboro,Spokane,NewOrleans,OklahomaCity
13 | 2009,11/3/2008,Berkeley,Trenton,OklahomaCity,Raleigh
14 | 2010,11/2/2009,Dayton,Memphis,KansasCity,Sacramento
15 | 2011,11/1/2010,Dallas,Spokane,Dayton,Philadelphia
16 | 2012,10/31/2011,DesMoines,Fresno,Kingston,Raleigh
17 | 2013,11/5/2012,Bridgeport,Norfolk,OklahomaCity,Spokane
18 | 2014,11/4/2013,Lincoln,Stanford,Louisville,NotreDame
19 | 2015,11/3/2014,Albany,Spokane,Greensboro,OklahomaCity
20 | 2016,11/2/2015,Bridgeport,Dallas,Lexington,SiouxFalls
21 | 2017,10/31/2016,Bridgeport,OklahomaCity,Lexington,Stockton
22 | 2018,10/30/2017,Albany,Spokane,KansasCity,Lexington
23 | 2019,11/5/2018,Albany,Chicago,Greensboro,Portland
24 |
--------------------------------------------------------------------------------
/Kaggle-NCAA/womens-machine-learning-competition-2019/Stage2WDataFiles/WTeamSpellings.csv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MJeremy2017/machine-learning-models/e8b81f3172d5e309e977fe51ac5c812b650ccd80/Kaggle-NCAA/womens-machine-learning-competition-2019/Stage2WDataFiles/WTeamSpellings.csv
--------------------------------------------------------------------------------
/Kaggle-NCAA/womens-machine-learning-competition-2019/Stage2WDataFiles/WTeams.csv:
--------------------------------------------------------------------------------
1 | TeamID,TeamName
2 | 3101,Abilene Chr
3 | 3102,Air Force
4 | 3103,Akron
5 | 3104,Alabama
6 | 3105,Alabama A&M
7 | 3106,Alabama St
8 | 3107,Albany NY
9 | 3108,Alcorn St
10 | 3109,Alliant Intl
11 | 3110,American Univ
12 | 3111,Appalachian St
13 | 3112,Arizona
14 | 3113,Arizona St
15 | 3114,Ark Little Rock
16 | 3115,Ark Pine Bluff
17 | 3116,Arkansas
18 | 3117,Arkansas St
19 | 3118,Armstrong St
20 | 3119,Army
21 | 3120,Auburn
22 | 3121,Augusta
23 | 3122,Austin Peay
24 | 3123,Ball St
25 | 3124,Baylor
26 | 3125,Belmont
27 | 3126,Bethune-Cookman
28 | 3127,Binghamton
29 | 3128,Birmingham So
30 | 3129,Boise St
31 | 3130,Boston College
32 | 3131,Boston Univ
33 | 3132,Bowling Green
34 | 3133,Bradley
35 | 3134,Brooklyn
36 | 3135,Brown
37 | 3136,Bryant
38 | 3137,Bucknell
39 | 3138,Buffalo
40 | 3139,Butler
41 | 3140,BYU
42 | 3141,C Michigan
43 | 3142,Cal Poly SLO
44 | 3143,California
45 | 3144,Campbell
46 | 3145,Canisius
47 | 3146,Cent Arkansas
48 | 3147,Centenary
49 | 3148,Central Conn
50 | 3149,Charleston So
51 | 3150,Charlotte
52 | 3151,Chattanooga
53 | 3152,Chicago St
54 | 3153,Cincinnati
55 | 3154,Citadel
56 | 3155,Clemson
57 | 3156,Cleveland St
58 | 3157,Coastal Car
59 | 3158,Col Charleston
60 | 3159,Colgate
61 | 3160,Colorado
62 | 3161,Colorado St
63 | 3162,Columbia
64 | 3163,Connecticut
65 | 3164,Coppin St
66 | 3165,Cornell
67 | 3166,Creighton
68 | 3167,CS Bakersfield
69 | 3168,CS Fullerton
70 | 3169,CS Northridge
71 | 3170,CS Sacramento
72 | 3171,Dartmouth
73 | 3172,Davidson
74 | 3173,Dayton
75 | 3174,Delaware
76 | 3175,Delaware St
77 | 3176,Denver
78 | 3177,DePaul
79 | 3178,Detroit
80 | 3179,Drake
81 | 3180,Drexel
82 | 3181,Duke
83 | 3182,Duquesne
84 | 3183,E Illinois
85 | 3184,E Kentucky
86 | 3185,E Michigan
87 | 3186,E Washington
88 | 3187,East Carolina
89 | 3188,Edwardsville
90 | 3189,Elon
91 | 3190,ETSU
92 | 3191,Evansville
93 | 3192,F Dickinson
94 | 3193,Fairfield
95 | 3194,FL Atlantic
96 | 3195,FL Gulf Coast
97 | 3196,Florida
98 | 3197,Florida A&M
99 | 3198,Florida Intl
100 | 3199,Florida St
101 | 3200,Fordham
102 | 3201,Fresno St
103 | 3202,Furman
104 | 3203,G Washington
105 | 3204,Ga Southern
106 | 3205,Gardner Webb
107 | 3206,George Mason
108 | 3207,Georgetown
109 | 3208,Georgia
110 | 3209,Georgia St
111 | 3210,Georgia Tech
112 | 3211,Gonzaga
113 | 3212,Grambling
114 | 3213,Grand Canyon
115 | 3214,Hampton
116 | 3215,Hardin-Simmons
117 | 3216,Hartford
118 | 3217,Harvard
119 | 3218,Hawaii
120 | 3219,High Point
121 | 3220,Hofstra
122 | 3221,Holy Cross
123 | 3222,Houston
124 | 3223,Houston Bap
125 | 3224,Howard
126 | 3225,Idaho
127 | 3226,Idaho St
128 | 3227,IL Chicago
129 | 3228,Illinois
130 | 3229,Illinois St
131 | 3230,Incarnate Word
132 | 3231,Indiana
133 | 3232,Indiana St
134 | 3233,Iona
135 | 3234,Iowa
136 | 3235,Iowa St
137 | 3236,IPFW
138 | 3237,IUPUI
139 | 3238,Jackson St
140 | 3239,Jacksonville
141 | 3240,Jacksonville St
142 | 3241,James Madison
143 | 3242,Kansas
144 | 3243,Kansas St
145 | 3244,Kennesaw
146 | 3245,Kent
147 | 3246,Kentucky
148 | 3247,La Salle
149 | 3248,Lafayette
150 | 3249,Lamar
151 | 3250,Lehigh
152 | 3251,Liberty
153 | 3252,Lipscomb
154 | 3253,Long Beach St
155 | 3254,Long Island
156 | 3255,Longwood
157 | 3256,Louisiana Tech
158 | 3257,Louisville
159 | 3258,Loy Marymount
160 | 3259,Loyola MD
161 | 3260,Loyola-Chicago
162 | 3261,LSU
163 | 3262,MA Lowell
164 | 3263,Maine
165 | 3264,Manhattan
166 | 3265,Marist
167 | 3266,Marquette
168 | 3267,Marshall
169 | 3268,Maryland
170 | 3269,Massachusetts
171 | 3270,McNeese St
172 | 3271,MD E Shore
173 | 3272,Memphis
174 | 3273,Mercer
175 | 3274,Miami FL
176 | 3275,Miami OH
177 | 3276,Michigan
178 | 3277,Michigan St
179 | 3278,Minnesota
180 | 3279,Mississippi
181 | 3280,Mississippi St
182 | 3281,Missouri
183 | 3282,Missouri KC
184 | 3283,Missouri St
185 | 3284,Monmouth NJ
186 | 3285,Montana
187 | 3286,Montana St
188 | 3287,Morehead St
189 | 3288,Morgan St
190 | 3289,Morris Brown
191 | 3290,MS Valley St
192 | 3291,Mt St Mary's
193 | 3292,MTSU
194 | 3293,Murray St
195 | 3294,N Colorado
196 | 3295,N Dakota St
197 | 3296,N Illinois
198 | 3297,N Kentucky
199 | 3298,Navy
200 | 3299,NC A&T
201 | 3300,NC Central
202 | 3301,NC State
203 | 3302,NE Illinois
204 | 3303,NE Omaha
205 | 3304,Nebraska
206 | 3305,Nevada
207 | 3306,New Hampshire
208 | 3307,New Mexico
209 | 3308,New Mexico St
210 | 3309,New Orleans
211 | 3310,Niagara
212 | 3311,Nicholls St
213 | 3312,NJIT
214 | 3313,Norfolk St
215 | 3314,North Carolina
216 | 3315,North Dakota
217 | 3316,North Florida
218 | 3317,North Texas
219 | 3318,Northeastern
220 | 3319,Northern Arizona
221 | 3320,Northern Iowa
222 | 3321,Northwestern
223 | 3322,Northwestern LA
224 | 3323,Notre Dame
225 | 3324,Oakland
226 | 3325,Ohio
227 | 3326,Ohio St
228 | 3327,Okla City
229 | 3328,Oklahoma
230 | 3329,Oklahoma St
231 | 3330,Old Dominion
232 | 3331,Oral Roberts
233 | 3332,Oregon
234 | 3333,Oregon St
235 | 3334,Pacific
236 | 3335,Penn
237 | 3336,Penn St
238 | 3337,Pepperdine
239 | 3338,Pittsburgh
240 | 3339,Portland
241 | 3340,Portland St
242 | 3341,Prairie View
243 | 3342,Presbyterian
244 | 3343,Princeton
245 | 3344,Providence
246 | 3345,Purdue
247 | 3346,Quinnipiac
248 | 3347,Radford
249 | 3348,Rhode Island
250 | 3349,Rice
251 | 3350,Richmond
252 | 3351,Rider
253 | 3352,Robert Morris
254 | 3353,Rutgers
255 | 3354,S Carolina St
256 | 3355,S Dakota St
257 | 3356,S Illinois
258 | 3357,Sacred Heart
259 | 3358,Sam Houston St
260 | 3359,Samford
261 | 3360,San Diego
262 | 3361,San Diego St
263 | 3362,San Francisco
264 | 3363,San Jose St
265 | 3364,Santa Barbara
266 | 3365,Santa Clara
267 | 3366,Savannah St
268 | 3367,SC Upstate
269 | 3368,SE Louisiana
270 | 3369,SE Missouri St
271 | 3370,Seattle
272 | 3371,Seton Hall
273 | 3372,SF Austin
274 | 3373,Siena
275 | 3374,SMU
276 | 3375,South Alabama
277 | 3376,South Carolina
278 | 3377,South Dakota
279 | 3378,South Florida
280 | 3379,Southern Miss
281 | 3380,Southern Univ
282 | 3381,Southern Utah
283 | 3382,St Bonaventure
284 | 3383,St Francis NY
285 | 3384,St Francis PA
286 | 3385,St John's
287 | 3386,St Joseph's PA
288 | 3387,St Louis
289 | 3388,St Mary's CA
290 | 3389,St Peter's
291 | 3390,Stanford
292 | 3391,Stetson
293 | 3392,Stony Brook
294 | 3393,Syracuse
295 | 3394,TAM C. Christi
296 | 3395,TCU
297 | 3396,Temple
298 | 3397,Tennessee
299 | 3398,Tennessee St
300 | 3399,Tennessee Tech
301 | 3400,Texas
302 | 3401,Texas A&M
303 | 3402,Texas St
304 | 3403,Texas Tech
305 | 3404,TN Martin
306 | 3405,Toledo
307 | 3406,Towson
308 | 3407,Troy
309 | 3408,Tulane
310 | 3409,Tulsa
311 | 3410,UTRGV
312 | 3411,TX Southern
313 | 3412,UAB
314 | 3413,UC Davis
315 | 3414,UC Irvine
316 | 3415,UC Riverside
317 | 3416,UCF
318 | 3417,UCLA
319 | 3418,ULL
320 | 3419,ULM
321 | 3420,UMBC
322 | 3421,UNC Asheville
323 | 3422,UNC Greensboro
324 | 3423,UNC Wilmington
325 | 3424,UNLV
326 | 3425,USC
327 | 3426,UT Arlington
328 | 3427,UT San Antonio
329 | 3428,Utah
330 | 3429,Utah St
331 | 3430,Utah Valley
332 | 3431,UTEP
333 | 3432,Utica
334 | 3433,VA Commonwealth
335 | 3434,Valparaiso
336 | 3435,Vanderbilt
337 | 3436,Vermont
338 | 3437,Villanova
339 | 3438,Virginia
340 | 3439,Virginia Tech
341 | 3440,VMI
342 | 3441,W Carolina
343 | 3442,W Illinois
344 | 3443,WKU
345 | 3444,W Michigan
346 | 3445,W Salem St
347 | 3446,W Texas A&M
348 | 3447,Wagner
349 | 3448,Wake Forest
350 | 3449,Washington
351 | 3450,Washington St
352 | 3451,Weber St
353 | 3452,West Virginia
354 | 3453,WI Green Bay
355 | 3454,WI Milwaukee
356 | 3455,Wichita St
357 | 3456,William & Mary
358 | 3457,Winthrop
359 | 3458,Wisconsin
360 | 3459,Wofford
361 | 3460,Wright St
362 | 3461,Wyoming
363 | 3462,Xavier
364 | 3463,Yale
365 | 3464,Youngstown St
366 | 3465,Cal Baptist
367 | 3466,North Alabama
368 |
--------------------------------------------------------------------------------
/Kaggle-NCAA/womens-machine-learning-competition-2019/WDataFiles.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MJeremy2017/machine-learning-models/e8b81f3172d5e309e977fe51ac5c812b650ccd80/Kaggle-NCAA/womens-machine-learning-competition-2019/WDataFiles.zip
--------------------------------------------------------------------------------
/Kaggle-NCAA/womens-machine-learning-competition-2019/WDataFiles/WCities.csv:
--------------------------------------------------------------------------------
1 | CityID,City,State
2 | 4001,Abilene,TX
3 | 4002,Akron,OH
4 | 4003,Albany,NY
5 | 4004,Albuquerque,NM
6 | 4005,Allentown,PA
7 | 4006,Ames,IA
8 | 4007,Amherst,MA
9 | 4008,Anaheim,CA
10 | 4009,Anchorage,AK
11 | 4010,Ann Arbor,MI
12 | 4011,Annapolis,MD
13 | 4012,Arlington,TX
14 | 4013,Asheville,NC
15 | 4014,Athens,GA
16 | 4015,Athens,OH
17 | 4016,Atlanta,GA
18 | 4017,Atlantic City,NJ
19 | 4018,Auburn,AL
20 | 4019,Auburn Hills,MI
21 | 4020,Austin,TX
22 | 4021,Bakersfield,CA
23 | 4022,Baltimore,MD
24 | 4023,Bangor,ME
25 | 4024,Baton Rouge,LA
26 | 4025,Bayamon,PR
27 | 4026,Beaumont,TX
28 | 4027,Berkeley,CA
29 | 4028,Bethlehem,PA
30 | 4029,Billings,MT
31 | 4030,Birmingham,AL
32 | 4031,Blacksburg,VA
33 | 4032,Bloomington,IN
34 | 4033,Boca Raton,FL
35 | 4034,Boiling Springs,NC
36 | 4035,Boise,ID
37 | 4036,Boone,NC
38 | 4037,Borinquen,PR
39 | 4038,Bossier City,LA
40 | 4039,Boston,MA
41 | 4040,Boulder,CO
42 | 4041,Bowling Green,KY
43 | 4042,Bowling Green,OH
44 | 4043,Bozeman,MT
45 | 4044,Bridgeport,CT
46 | 4045,Bronx,NY
47 | 4046,Brookings,SD
48 | 4047,Brooklyn,NY
49 | 4048,Brooklyn Heights,NY
50 | 4049,Buffalo,NY
51 | 4050,Buies Creek,NC
52 | 4051,Burlington,VT
53 | 4052,Cambridge,MA
54 | 4053,Cancun,MX
55 | 4054,Cape Girardeau,MO
56 | 4055,Carbondale,IL
57 | 4056,Casper,WY
58 | 4057,Cedar City,UT
59 | 4058,Cedar Falls,IA
60 | 4059,Cedar Park,TX
61 | 4060,Champaign,IL
62 | 4061,Chapel Hill,NC
63 | 4062,Charleston,IL
64 | 4063,Charleston,NC
65 | 4064,Charleston,SC
66 | 4065,Charleston,WV
67 | 4066,Charlotte,NC
68 | 4067,Charlottesville,VA
69 | 4068,Chattanooga,TN
70 | 4069,Cheney,WA
71 | 4070,Chestnut Hill,MA
72 | 4071,Chicago,IL
73 | 4072,Cincinnati,OH
74 | 4073,Clarksville,TN
75 | 4074,Clemson,SC
76 | 4075,Cleveland,OH
77 | 4076,Clinton,SC
78 | 4077,College Park,MD
79 | 4078,College Station,TX
80 | 4079,Columbia,SC
81 | 4080,Columbus,OH
82 | 4081,Conway,AR
83 | 4082,Conway,SC
84 | 4083,Cookeville,TN
85 | 4084,Coral Gables,FL
86 | 4085,Corpus Christi,TX
87 | 4086,Corvallis,OR
88 | 4087,Cullowhee,NC
89 | 4088,Dallas,TX
90 | 4089,Davidson,NC
91 | 4090,Davis,CA
92 | 4091,Dayton,OH
93 | 4092,Daytona Beach,FL
94 | 4093,DeKalb,IL
95 | 4094,DeLand,FL
96 | 4095,Denton,TX
97 | 4096,Denver,CO
98 | 4097,Des Moines,IA
99 | 4098,Detroit,MI
100 | 4099,Dover,DE
101 | 4100,Duluth,GA
102 | 4101,Durham,NC
103 | 4102,Durham,NH
104 | 4103,East Lansing,MI
105 | 4104,East Rutherford,NJ
106 | 4105,Easton,PA
107 | 4106,Edinburg,TX
108 | 4107,Edwardsville,IL
109 | 4108,El Paso,TX
110 | 4109,Elon,NC
111 | 4110,Emmitsburg,MD
112 | 4111,Estero,FL
113 | 4112,Eugene,OR
114 | 4113,Evanston,IL
115 | 4114,Evansville,IN
116 | 4115,Fairfax,VA
117 | 4116,Fairfield,CT
118 | 4117,Fargo,ND
119 | 4118,Farmville,VA
120 | 4119,Fayetteville,AR
121 | 4120,Flagstaff,AZ
122 | 4121,Fort Collins,CO
123 | 4122,Fort Hood,TX
124 | 4123,Fort Myers,FL
125 | 4124,Fort Wayne,IN
126 | 4125,Fort Worth,TX
127 | 4126,Fresno,CA
128 | 4127,Fullerton,CA
129 | 4128,Gainesville,FL
130 | 4129,Garland,TX
131 | 4130,Glendale,AZ
132 | 4131,Glens Falls,NY
133 | 4132,Grambling,LA
134 | 4133,Grand Forks,ND
135 | 4134,Greeley,CO
136 | 4135,Green Bay,WI
137 | 4136,Greensboro,NC
138 | 4137,Greenville,MS
139 | 4138,Greenville,NC
140 | 4139,Greenville,SC
141 | 4140,Greenwood,MS
142 | 4141,Gulfport,MS
143 | 4142,Hackensack,NJ
144 | 4143,Hamden,CT
145 | 4144,Hamilton,NY
146 | 4145,Hammond,LA
147 | 4146,Hampton,VA
148 | 4147,Hanover,NH
149 | 4148,Harrisonburg,VA
150 | 4149,Hartford,CT
151 | 4150,Hattiesburg,MS
152 | 4151,Hempstead,NY
153 | 4152,High Point,NC
154 | 4153,Highland Heights,KY
155 | 4154,Hoffman Estates,IL
156 | 4155,Honolulu,HI
157 | 4156,Hot Springs,AR
158 | 4157,Houston,TX
159 | 4158,Huntington,WV
160 | 4159,Huntsville,AL
161 | 4160,Huntsville,TX
162 | 4161,Indianapolis,IN
163 | 4162,Iowa City,IA
164 | 4163,Irvine,CA
165 | 4164,Ithaca,NY
166 | 4165,Itta Bena,MS
167 | 4166,Jackson,MS
168 | 4167,Jacksonville,AL
169 | 4168,Jacksonville,FL
170 | 4169,Jersey City,NJ
171 | 4170,Johnson City,TN
172 | 4171,Johnstown,PA
173 | 4172,Jonesboro,AR
174 | 4173,Kalamazoo,MI
175 | 4174,Kansas City,MO
176 | 4175,Katy,TX
177 | 4176,Kennesaw,GA
178 | 4177,Kent,OH
179 | 4178,Kent,WA
180 | 4179,Kingston,RI
181 | 4180,Knoxville,TN
182 | 4181,Lafayette,LA
183 | 4182,Lahaina,HI
184 | 4183,Lake Buena Vista,FL
185 | 4184,Lake Charles,LA
186 | 4185,Lakeland,FL
187 | 4186,Laramie,WY
188 | 4187,Las Cruces,NM
189 | 4188,Las Vegas,NV
190 | 4189,Lawrence,KS
191 | 4190,Lawrenceville,NJ
192 | 4191,Lewisburg,PA
193 | 4192,Lewiston,NY
194 | 4193,Lexington,KY
195 | 4194,Lexington,VA
196 | 4195,Lincoln,NE
197 | 4196,Little Rock,AR
198 | 4197,Logan,UT
199 | 4198,Long Beach,CA
200 | 4199,Loretto,PA
201 | 4200,Lorman,MS
202 | 4201,Los Angeles,CA
203 | 4202,Loudonville,NY
204 | 4203,Louisville,KY
205 | 4204,Lowell,MA
206 | 4205,Lubbock,TX
207 | 4206,Lynchburg,VA
208 | 4207,Macomb,IL
209 | 4208,Macon,GA
210 | 4209,Madison,WI
211 | 4210,Malibu,CA
212 | 4211,Manhattan,KS
213 | 4212,Martin,TN
214 | 4213,Memphis,TN
215 | 4214,Miami,FL
216 | 4215,Milwaukee,WI
217 | 4216,Minneapolis,MN
218 | 4217,Missoula,MT
219 | 4218,Mobile,AL
220 | 4219,Monroe,LA
221 | 4220,Montgomery,AL
222 | 4221,Moon Township,PA
223 | 4222,Moraga,CA
224 | 4223,Morehead,KY
225 | 4224,Morgantown,WV
226 | 4225,Moscow,ID
227 | 4226,Mt. Pleasant,MI
228 | 4227,Muncie,IN
229 | 4228,Murfreesboro,TN
230 | 4229,Murray,KY
231 | 4230,Myrtle Beach,SC
232 | 4231,Nacogdoches,TX
233 | 4232,Nampa,ID
234 | 4233,Nashville,TN
235 | 4234,Natchitoches,LA
236 | 4235,New Britain,CT
237 | 4236,New Haven,CT
238 | 4237,New Orleans,LA
239 | 4238,New Rochelle,NY
240 | 4239,New York,NY
241 | 4240,Newark,DE
242 | 4241,Newark,NJ
243 | 4242,Niagara Falls,NY
244 | 4243,Niceville,FL
245 | 4244,Norfolk,VA
246 | 4245,Normal,AL
247 | 4246,Normal,IL
248 | 4247,Norman,OK
249 | 4248,North Little Rock,AR
250 | 4249,Northridge,CA
251 | 4250,Notre Dame,IN
252 | 4251,Oakland,CA
253 | 4252,Oakland City,IN
254 | 4253,Ogden,UT
255 | 4254,Oklahoma City,OK
256 | 4255,Omaha,NE
257 | 4256,Orangeburg,SC
258 | 4257,Orem,UT
259 | 4258,Orlando,FL
260 | 4259,Orono,ME
261 | 4260,Oxford,MS
262 | 4261,Oxford,OH
263 | 4262,Paradise,NV
264 | 4263,Paradise Island,BA
265 | 4264,Pearl Harbor-Hickam,HI
266 | 4265,Peoria,IL
267 | 4266,Philadelphia,PA
268 | 4267,Phoenix,AZ
269 | 4268,Pine Bluff,AR
270 | 4269,Piscataway,NJ
271 | 4270,Pittsburgh,PA
272 | 4271,Playa del Carmen,MX
273 | 4272,Pocatello,ID
274 | 4273,Portland,ME
275 | 4274,Portland,OR
276 | 4275,Poughkeepsie,NY
277 | 4276,Prairie View,TX
278 | 4277,Presbyterian,SC
279 | 4278,Prescott Valley,AZ
280 | 4279,Princess Anne,MD
281 | 4280,Princeton,NJ
282 | 4281,Providence,RI
283 | 4282,Provo,UT
284 | 4283,Puerto Vallarta,MX
285 | 4284,Pullman,WA
286 | 4285,Queens,NY
287 | 4286,Radford,VA
288 | 4287,Raleigh,NC
289 | 4288,Ramstein-Miesenbach,GY
290 | 4289,Rapid City,SD
291 | 4290,Reno,NV
292 | 4291,Richmond,KY
293 | 4292,Richmond,VA
294 | 4293,Riverdale,NY
295 | 4294,Riverside,CA
296 | 4295,Rochester,MI
297 | 4296,Rochester,NY
298 | 4297,Rock Hill,SC
299 | 4298,Rosemont,IL
300 | 4299,Ruston,LA
301 | 4300,Sacramento,CA
302 | 4301,Salt Lake City,UT
303 | 4302,San Antonio,TX
304 | 4303,San Diego,CA
305 | 4304,San Francisco,CA
306 | 4305,San Jose,CA
307 | 4307,San Juan,PR
308 | 4308,San Luis Obispo,CA
309 | 4309,San Marcos,TX
310 | 4310,Santa Barbara,CA
311 | 4311,Santa Clara,CA
312 | 4312,Savannah,GA
313 | 4313,Seattle,WA
314 | 4314,Shanghai,CH
315 | 4315,Shreveport,LA
316 | 4316,Sioux Falls,SD
317 | 4317,Smithfield,RI
318 | 4318,South Bend,IN
319 | 4319,South Korea,SK
320 | 4320,South Orange,NJ
321 | 4321,South Padre Island,TX
322 | 4322,Southaven,MS
323 | 4323,Spartanburg,SC
324 | 4324,Spokane,WA
325 | 4325,Springfield,IL
326 | 4326,Springfield,MA
327 | 4327,Springfield,MO
328 | 4328,St. Bonaventure,NY
329 | 4329,St. Louis,MO
330 | 4330,St. Thomas,VI
331 | 4331,Stanford,CA
332 | 4332,Starkville,MS
333 | 4333,Staten Island,NY
334 | 4334,Statesboro,GA
335 | 4335,Stillwater,OK
336 | 4336,Stockton,CA
337 | 4337,Stony Brook,NY
338 | 4338,Storrs,CT
339 | 4339,Sunrise,FL
340 | 4340,Syracuse,NY
341 | 4341,Tallahassee,FL
342 | 4342,Tampa,FL
343 | 4343,Tempe,AZ
344 | 4344,Terre Haute,IN
345 | 4345,Thibodaux,LA
346 | 4346,Toledo,OH
347 | 4347,Tortola,VI
348 | 4348,Towson,MD
349 | 4349,Troy,AL
350 | 4350,Tucson,AZ
351 | 4351,Tulsa,OK
352 | 4352,Tupelo,MS
353 | 4353,Tuscaloosa,AL
354 | 4354,Uncasville,CT
355 | 4355,University Park,PA
356 | 4356,USAF Academy,CO
357 | 4357,Valparaiso,IN
358 | 4358,Vancouver,BC
359 | 4359,Vermillion,SD
360 | 4360,Vestal,NY
361 | 4361,Villanova,PA
362 | 4362,Waco,TX
363 | 4363,Washington,DC
364 | 4364,West Hartford,CT
365 | 4365,West Lafayette,IN
366 | 4366,West Long Branch,NJ
367 | 4367,West Point,NY
368 | 4368,Wichita,KS
369 | 4369,Williamsburg,VA
370 | 4370,Wilmington,NC
371 | 4371,Winston-Salem,NC
372 | 4372,Worcester,MA
373 | 4373,Youngstown,OH
374 | 4374,Ypsilanti,MI
375 | 4376,Biloxi,MS
376 | 4382,Freeport,BA
377 | 4383,Guaynabo,PR
378 | 4385,Laie,HI
379 | 4387,Moline,IL
380 | 4389,New York City,NY
381 | 4390,North Charleston,SC
382 | 4391,Ocala,FL
383 | 4392,Panama City,FL
384 | 4396,St. Charles,MO
385 | 4399,Trujillo Alto,PR
386 | 4402,Upper Marlboro,MD
387 | 4403,Wailuku,HI
388 | 4404,Winter Park,FL
389 | 4406,Garden City,NY
390 | 4407,Great Falls,MT
391 | 4408,Reading,PA
392 | 4409,Uniondale,NY
393 | 4410,Belfast,IR
394 | 4412,Montego Bay,JA
395 | 4413,Nassau,BA
396 | 4414,Frisco,TX
397 | 4415,Grand Cayman,CI
398 | 4416,Miami Shores,FL
399 | 4417,Midland,MI
400 | 4421,Alexandria,LA
401 | 4422,Bimini,BA
402 | 4423,Bloomington,IL
403 | 4424,Carolina,PR
404 | 4425,Clarksdale,MS
405 | 4426,Cupertino,CA
406 | 4427,Davie,FL
407 | 4428,East Peoria,IL
408 | 4429,Fort Lauderdale,FL
409 | 4430,Fort Mill,SC
410 | 4431,Grand Rapids,MI
411 | 4432,Hilo,HI
412 | 4433,Houma,LA
413 | 4434,Jennings,LA
414 | 4435,Kennewick,WA
415 | 4436,Livonia,MI
416 | 4437,Lucaya,BA
417 | 4438,Melbourne,FL
418 | 4439,Moorehead,MS
419 | 4440,Mt. Pleasant,SC
420 | 4441,Niagara University,NY
421 | 4442,Toronto,ON
422 | 4443,West Palm Beach,FL
423 |
--------------------------------------------------------------------------------
/Kaggle-NCAA/womens-machine-learning-competition-2019/WDataFiles/WNCAATourneySeeds.csv:
--------------------------------------------------------------------------------
1 | Season,Seed,TeamID
2 | 1998,W01,3330
3 | 1998,W02,3163
4 | 1998,W03,3112
5 | 1998,W04,3301
6 | 1998,W05,3272
7 | 1998,W06,3438
8 | 1998,W07,3208
9 | 1998,W08,3307
10 | 1998,W09,3304
11 | 1998,W10,3203
12 | 1998,W11,3374
13 | 1998,W12,3464
14 | 1998,W13,3263
15 | 1998,W14,3365
16 | 1998,W15,3193
17 | 1998,W16,3384
18 | 1998,X01,3403
19 | 1998,X02,3104
20 | 1998,X03,3256
21 | 1998,X04,3345
22 | 1998,X05,3179
23 | 1998,X06,3155
24 | 1998,X07,3417
25 | 1998,X08,3283
26 | 1998,X09,3323
27 | 1998,X10,3276
28 | 1998,X11,3274
29 | 1998,X12,3161
30 | 1998,X13,3449
31 | 1998,X14,3221
32 | 1998,X15,3422
33 | 1998,X16,3212
34 | 1998,Y01,3397
35 | 1998,Y02,3314
36 | 1998,Y03,3228
37 | 1998,Y04,3235
38 | 1998,Y05,3353
39 | 1998,Y06,3435
40 | 1998,Y07,3198
41 | 1998,Y08,3443
42 | 1998,Y09,3372
43 | 1998,Y10,3266
44 | 1998,Y11,3364
45 | 1998,Y12,3332
46 | 1998,Y13,3245
47 | 1998,Y14,3453
48 | 1998,Y15,3224
49 | 1998,Y16,3251
50 | 1998,Z01,3390
51 | 1998,Z02,3181
52 | 1998,Z03,3196
53 | 1998,Z04,3234
54 | 1998,Z05,3242
55 | 1998,Z06,3458
56 | 1998,Z07,3428
57 | 1998,Z08,3218
58 | 1998,Z09,3116
59 | 1998,Z10,3257
60 | 1998,Z11,3439
61 | 1998,Z12,3408
62 | 1998,Z13,3269
63 | 1998,Z14,3285
64 | 1998,Z15,3292
65 | 1998,Z16,3217
66 | 1999,W01,3397
67 | 1999,W02,3330
68 | 1999,W03,3181
69 | 1999,W04,3439
70 | 1999,W05,3120
71 | 1999,W06,3408
72 | 1999,W07,3390
73 | 1999,W08,3130
74 | 1999,W09,3326
75 | 1999,W10,3263
76 | 1999,W11,3386
77 | 1999,W12,3400
78 | 1999,W13,3389
79 | 1999,W14,3221
80 | 1999,W15,3399
81 | 1999,W16,3111
82 | 1999,X01,3163
83 | 1999,X02,3155
84 | 1999,X03,3208
85 | 1999,X04,3235
86 | 1999,X05,3332
87 | 1999,X06,3405
88 | 1999,X07,3228
89 | 1999,X08,3462
90 | 1999,X09,3198
91 | 1999,X10,3257
92 | 1999,X11,3374
93 | 1999,X12,3153
94 | 1999,X13,3365
95 | 1999,X14,3251
96 | 1999,X15,3197
97 | 1999,X16,3384
98 | 1999,Y01,3345
99 | 1999,Y02,3403
100 | 1999,Y03,3353
101 | 1999,Y04,3314
102 | 1999,Y05,3104
103 | 1999,Y06,3112
104 | 1999,Y07,3280
105 | 1999,Y08,3266
106 | 1999,Y09,3242
107 | 1999,Y10,3301
108 | 1999,Y11,3196
109 | 1999,Y12,3212
110 | 1999,Y13,3318
111 | 1999,Y14,3171
112 | 1999,Y15,3372
113 | 1999,Y16,3331
114 | 1999,Z01,3256
115 | 1999,Z02,3161
116 | 1999,Z03,3417
117 | 1999,Z04,3261
118 | 1999,Z05,3323
119 | 1999,Z06,3246
120 | 1999,Z07,3283
121 | 1999,Z08,3336
122 | 1999,Z09,3438
123 | 1999,Z10,3364
124 | 1999,Z11,3304
125 | 1999,Z12,3388
126 | 1999,Z13,3191
127 | 1999,Z14,3453
128 | 1999,Z15,3169
129 | 1999,Z16,3416
130 | 2000,W01,3163
131 | 2000,W02,3181
132 | 2000,W03,3261
133 | 2000,W04,3345
134 | 2000,W05,3328
135 | 2000,W06,3462
136 | 2000,W07,3266
137 | 2000,W08,3179
138 | 2000,W09,3155
139 | 2000,W10,3443
140 | 2000,W11,3372
141 | 2000,W12,3140
142 | 2000,W13,3171
143 | 2000,W14,3251
144 | 2000,W15,3144
145 | 2000,W16,3214
146 | 2000,X01,3256
147 | 2000,X02,3336
148 | 2000,X03,3235
149 | 2000,X04,3330
150 | 2000,X05,3301
151 | 2000,X06,3228
152 | 2000,X07,3120
153 | 2000,X08,3242
154 | 2000,X09,3435
155 | 2000,X10,3283
156 | 2000,X11,3428
157 | 2000,X12,3374
158 | 2000,X13,3453
159 | 2000,X14,3384
160 | 2000,X15,3464
161 | 2000,X16,3108
162 | 2000,Y01,3397
163 | 2000,Y02,3323
164 | 2000,Y03,3403
165 | 2000,Y04,3438
166 | 2000,Y05,3130
167 | 2000,Y06,3408
168 | 2000,Y07,3203
169 | 2000,Y08,3112
170 | 2000,Y09,3245
171 | 2000,Y10,3417
172 | 2000,Y11,3436
173 | 2000,Y12,3304
174 | 2000,Y13,3337
175 | 2000,Y14,3399
176 | 2000,Y15,3360
177 | 2000,Y16,3202
178 | 2000,Z01,3208
179 | 2000,Z02,3353
180 | 2000,Z03,3280
181 | 2000,Z04,3364
182 | 2000,Z05,3314
183 | 2000,Z06,3332
184 | 2000,Z07,3400
185 | 2000,Z08,3276
186 | 2000,Z09,3390
187 | 2000,Z10,3386
188 | 2000,Z11,3412
189 | 2000,Z12,3263
190 | 2000,Z13,3349
191 | 2000,Z14,3389
192 | 2000,Z15,3221
193 | 2000,Z16,3285
194 | 2001,W01,3163
195 | 2001,W02,3208
196 | 2001,W03,3256
197 | 2001,W04,3301
198 | 2001,W05,3437
199 | 2001,W06,3336
200 | 2001,W07,3458
201 | 2001,W08,3268
202 | 2001,W09,3161
203 | 2001,W10,3281
204 | 2001,W11,3395
205 | 2001,W12,3179
206 | 2001,W13,3174
207 | 2001,W14,3209
208 | 2001,W15,3251
209 | 2001,W16,3254
210 | 2001,X01,3323
211 | 2001,X02,3235
212 | 2001,X03,3435
213 | 2001,X04,3234
214 | 2001,X05,3428
215 | 2001,X06,3160
216 | 2001,X07,3199
217 | 2001,X08,3276
218 | 2001,X09,3438
219 | 2001,X10,3408
220 | 2001,X11,3373
221 | 2001,X12,3193
222 | 2001,X13,3332
223 | 2001,X14,3226
224 | 2001,X15,3224
225 | 2001,X16,3108
226 | 2001,Y01,3397
227 | 2001,Y02,3403
228 | 2001,Y03,3345
229 | 2001,Y04,3462
230 | 2001,Y05,3155
231 | 2001,Y06,3261
232 | 2001,Y07,3439
233 | 2001,Y08,3400
234 | 2001,Y09,3388
235 | 2001,Y10,3176
236 | 2001,Y11,3113
237 | 2001,Y12,3151
238 | 2001,Y13,3257
239 | 2001,Y14,3364
240 | 2001,Y15,3335
241 | 2001,Y16,3122
242 | 2001,Z01,3181
243 | 2001,Z02,3328
244 | 2001,Z03,3196
245 | 2001,Z04,3353
246 | 2001,Z05,3283
247 | 2001,Z06,3449
248 | 2001,Z07,3203
249 | 2001,Z08,3124
250 | 2001,Z09,3116
251 | 2001,Z10,3390
252 | 2001,Z11,3330
253 | 2001,Z12,3405
254 | 2001,Z13,3372
255 | 2001,Z14,3221
256 | 2001,Z15,3331
257 | 2001,Z16,3454
258 | 2002,W01,3181
259 | 2002,W02,3124
260 | 2002,W03,3376
261 | 2002,W04,3400
262 | 2002,W05,3256
263 | 2002,W06,3153
264 | 2002,W07,3179
265 | 2002,W08,3395
266 | 2002,W09,3231
267 | 2002,W10,3393
268 | 2002,W11,3389
269 | 2002,W12,3364
270 | 2002,W13,3453
271 | 2002,W14,3251
272 | 2002,W15,3137
273 | 2002,W16,3313
274 | 2002,X01,3328
275 | 2002,X02,3390
276 | 2002,X03,3160
277 | 2002,X04,3403
278 | 2002,X05,3130
279 | 2002,X06,3261
280 | 2002,X07,3161
281 | 2002,X08,3337
282 | 2002,X09,3437
283 | 2002,X10,3408
284 | 2002,X11,3365
285 | 2002,X12,3280
286 | 2002,X13,3372
287 | 2002,X14,3380
288 | 2002,X15,3451
289 | 2002,X16,3216
290 | 2002,Y01,3163
291 | 2002,Y02,3345
292 | 2002,Y03,3243
293 | 2002,Y04,3336
294 | 2002,Y05,3198
295 | 2002,Y06,3116
296 | 2002,Y07,3330
297 | 2002,Y08,3438
298 | 2002,Y09,3234
299 | 2002,Y10,3208
300 | 2002,Y11,3155
301 | 2002,Y12,3166
302 | 2002,Y13,3151
303 | 2002,Y14,3245
304 | 2002,Y15,3122
305 | 2002,Y16,3384
306 | 2002,Z01,3435
307 | 2002,Z02,3397
308 | 2002,Z03,3235
309 | 2002,Z04,3314
310 | 2002,Z05,3278
311 | 2002,Z06,3196
312 | 2002,Z07,3323
313 | 2002,Z08,3458
314 | 2002,Z09,3113
315 | 2002,Z10,3307
316 | 2002,Z11,3140
317 | 2002,Z12,3424
318 | 2002,Z13,3217
319 | 2002,Z14,3396
320 | 2002,Z15,3209
321 | 2002,Z16,3324
322 | 2003,W01,3163
323 | 2003,W02,3345
324 | 2003,W03,3243
325 | 2003,W04,3435
326 | 2003,W05,3130
327 | 2003,W06,3112
328 | 2003,W07,3439
329 | 2003,W08,3277
330 | 2003,W09,3395
331 | 2003,W10,3210
332 | 2003,W11,3323
333 | 2003,W12,3330
334 | 2003,W13,3251
335 | 2003,W14,3217
336 | 2003,W15,3434
337 | 2003,W16,3131
338 | 2003,X01,3261
339 | 2003,X02,3400
340 | 2003,X03,3390
341 | 2003,X04,3326
342 | 2003,X05,3256
343 | 2003,X06,3278
344 | 2003,X07,3116
345 | 2003,X08,3453
346 | 2003,X09,3449
347 | 2003,X10,3153
348 | 2003,X11,3408
349 | 2003,X12,3337
350 | 2003,X13,3451
351 | 2003,X14,3444
352 | 2003,X15,3214
353 | 2003,X16,3402
354 | 2003,Y01,3397
355 | 2003,Y02,3437
356 | 2003,Y03,3314
357 | 2003,Y04,3336
358 | 2003,Y05,3376
359 | 2003,Y06,3160
360 | 2003,Y07,3203
361 | 2003,Y08,3438
362 | 2003,Y09,3228
363 | 2003,Y10,3328
364 | 2003,Y11,3140
365 | 2003,Y12,3151
366 | 2003,Y13,3221
367 | 2003,Y14,3122
368 | 2003,Y15,3384
369 | 2003,Y16,3106
370 | 2003,Z01,3181
371 | 2003,Z02,3403
372 | 2003,Z03,3280
373 | 2003,Z04,3353
374 | 2003,Z05,3208
375 | 2003,Z06,3307
376 | 2003,Z07,3364
377 | 2003,Z08,3428
378 | 2003,Z09,3177
379 | 2003,Z10,3462
380 | 2003,Z11,3274
381 | 2003,Z12,3150
382 | 2003,Z13,3443
383 | 2003,Z14,3264
384 | 2003,Z15,3283
385 | 2003,Z16,3209
386 | 2004,W01,3336
387 | 2004,W02,3163
388 | 2004,W03,3222
389 | 2004,W04,3314
390 | 2004,W05,3323
391 | 2004,W06,3160
392 | 2004,W07,3120
393 | 2004,W08,3439
394 | 2004,W09,3234
395 | 2004,W10,3301
396 | 2004,W11,3364
397 | 2004,W12,3283
398 | 2004,W13,3292
399 | 2004,W14,3453
400 | 2004,W15,3335
401 | 2004,W16,3214
402 | 2004,X01,3181
403 | 2004,X02,3243
404 | 2004,X03,3130
405 | 2004,X04,3403
406 | 2004,X05,3256
407 | 2004,X06,3326
408 | 2004,X07,3278
409 | 2004,X08,3330
410 | 2004,X09,3266
411 | 2004,X10,3417
412 | 2004,X11,3452
413 | 2004,X12,3285
414 | 2004,X13,3263
415 | 2004,X14,3185
416 | 2004,X15,3434
417 | 2004,X16,3322
418 | 2004,Y01,3397
419 | 2004,Y02,3435
420 | 2004,Y03,3328
421 | 2004,Y04,3124
422 | 2004,Y05,3196
423 | 2004,Y06,3390
424 | 2004,Y07,3353
425 | 2004,Y08,3203
426 | 2004,Y09,3177
427 | 2004,Y10,3151
428 | 2004,Y11,3281
429 | 2004,Y12,3307
430 | 2004,Y13,3258
431 | 2004,Y14,3265
432 | 2004,Y15,3252
433 | 2004,Y16,3159
434 | 2004,Z01,3400
435 | 2004,Z02,3345
436 | 2004,Z03,3208
437 | 2004,Z04,3261
438 | 2004,Z05,3274
439 | 2004,Z06,3395
440 | 2004,Z07,3437
441 | 2004,Z08,3277
442 | 2004,Z09,3112
443 | 2004,Z10,3279
444 | 2004,Z11,3396
445 | 2004,Z12,3268
446 | 2004,Z13,3122
447 | 2004,Z14,3251
448 | 2004,Z15,3384
449 | 2004,Z16,3380
450 | 2005,W01,3261
451 | 2005,W02,3181
452 | 2005,W03,3400
453 | 2005,W04,3336
454 | 2005,W05,3177
455 | 2005,W06,3208
456 | 2005,W07,3130
457 | 2005,W08,3328
458 | 2005,W09,3112
459 | 2005,W10,3222
460 | 2005,W11,3349
461 | 2005,W12,3439
462 | 2005,W13,3251
463 | 2005,W14,3331
464 | 2005,W15,3145
465 | 2005,W16,3391
466 | 2005,X01,3314
467 | 2005,X02,3124
468 | 2005,X03,3278
469 | 2005,X04,3323
470 | 2005,X05,3113
471 | 2005,X06,3438
472 | 2005,X07,3395
473 | 2005,X08,3279
474 | 2005,X09,3203
475 | 2005,X10,3332
476 | 2005,X11,3330
477 | 2005,X12,3184
478 | 2005,X13,3364
479 | 2005,X14,3384
480 | 2005,X15,3229
481 | 2005,X16,3164
482 | 2005,Y01,3277
483 | 2005,Y02,3390
484 | 2005,Y03,3163
485 | 2005,Y04,3243
486 | 2005,Y05,3435
487 | 2005,Y06,3199
488 | 2005,Y07,3235
489 | 2005,Y08,3425
490 | 2005,Y09,3257
491 | 2005,Y10,3428
492 | 2005,Y11,3350
493 | 2005,Y12,3285
494 | 2005,Y13,3132
495 | 2005,Y14,3171
496 | 2005,Y15,3365
497 | 2005,Y16,3108
498 | 2005,Z01,3397
499 | 2005,Z02,3326
500 | 2005,Z03,3353
501 | 2005,Z04,3403
502 | 2005,Z05,3301
503 | 2005,Z06,3396
504 | 2005,Z07,3268
505 | 2005,Z08,3307
506 | 2005,Z09,3345
507 | 2005,Z10,3453
508 | 2005,Z11,3256
509 | 2005,Z12,3292
510 | 2005,Z13,3426
511 | 2005,Z14,3216
512 | 2005,Z15,3221
513 | 2005,Z16,3441
514 | 2006,W01,3326
515 | 2006,W02,3268
516 | 2006,W03,3124
517 | 2006,W04,3113
518 | 2006,W05,3428
519 | 2006,W06,3196
520 | 2006,W07,3385
521 | 2006,W08,3130
522 | 2006,W09,3323
523 | 2006,W10,3143
524 | 2006,W11,3307
525 | 2006,W12,3292
526 | 2006,W13,3372
527 | 2006,W14,3319
528 | 2006,W15,3357
529 | 2006,W16,3324
530 | 2006,X01,3314
531 | 2006,X02,3397
532 | 2006,X03,3353
533 | 2006,X04,3345
534 | 2006,X05,3417
535 | 2006,X06,3401
536 | 2006,X07,3203
537 | 2006,X08,3435
538 | 2006,X09,3257
539 | 2006,X10,3330
540 | 2006,X11,3395
541 | 2006,X12,3132
542 | 2006,X13,3283
543 | 2006,X14,3171
544 | 2006,X15,3119
545 | 2006,X16,3415
546 | 2006,Y01,3181
547 | 2006,Y02,3163
548 | 2006,Y03,3208
549 | 2006,Y04,3277
550 | 2006,Y05,3246
551 | 2006,Y06,3396
552 | 2006,Y07,3439
553 | 2006,Y08,3425
554 | 2006,Y09,3378
555 | 2006,Y10,3281
556 | 2006,Y11,3216
557 | 2006,Y12,3151
558 | 2006,Y13,3454
559 | 2006,Y14,3265
560 | 2006,Y15,3164
561 | 2006,Y16,3380
562 | 2006,Z01,3261
563 | 2006,Z02,3328
564 | 2006,Z03,3390
565 | 2006,Z04,3177
566 | 2006,Z05,3301
567 | 2006,Z06,3199
568 | 2006,Z07,3140
569 | 2006,Z08,3278
570 | 2006,Z09,3449
571 | 2006,Z10,3234
572 | 2006,Z11,3256
573 | 2006,Z12,3409
574 | 2006,Z13,3251
575 | 2006,Z14,3369
576 | 2006,Z15,3337
577 | 2006,Z16,3194
578 | 2007,W01,3314
579 | 2007,W02,3345
580 | 2007,W03,3208
581 | 2007,W04,3401
582 | 2007,W05,3203
583 | 2007,W06,3235
584 | 2007,W07,3210
585 | 2007,W08,3143
586 | 2007,W09,3323
587 | 2007,W10,3177
588 | 2007,W11,3449
589 | 2007,W12,3129
590 | 2007,W13,3426
591 | 2007,W14,3125
592 | 2007,W15,3331
593 | 2007,W16,3341
594 | 2007,X01,3397
595 | 2007,X02,3268
596 | 2007,X03,3328
597 | 2007,X04,3326
598 | 2007,X05,3292
599 | 2007,X06,3266
600 | 2007,X07,3279
601 | 2007,X08,3338
602 | 2007,X09,3241
603 | 2007,X10,3395
604 | 2007,X11,3418
605 | 2007,X12,3211
606 | 2007,X13,3265
607 | 2007,X14,3369
608 | 2007,X15,3217
609 | 2007,X16,3179
610 | 2007,Y01,3163
611 | 2007,Y02,3390
612 | 2007,Y03,3261
613 | 2007,Y04,3301
614 | 2007,Y05,3124
615 | 2007,Y06,3462
616 | 2007,Y07,3330
617 | 2007,Y08,3307
618 | 2007,Y09,3453
619 | 2007,Y10,3199
620 | 2007,Y11,3452
621 | 2007,Y12,3151
622 | 2007,Y13,3352
623 | 2007,Y14,3421
624 | 2007,Y15,3226
625 | 2007,Y16,3420
626 | 2007,Z01,3181
627 | 2007,Z02,3435
628 | 2007,Z03,3113
629 | 2007,Z04,3353
630 | 2007,Z05,3277
631 | 2007,Z06,3257
632 | 2007,Z07,3132
633 | 2007,Z08,3396
634 | 2007,Z09,3304
635 | 2007,Z10,3329
636 | 2007,Z11,3140
637 | 2007,Z12,3174
638 | 2007,Z13,3187
639 | 2007,Z14,3415
640 | 2007,Z15,3175
641 | 2007,Z16,3221
642 | 2008,W01,3163
643 | 2008,W02,3353
644 | 2008,W03,3143
645 | 2008,W04,3438
646 | 2008,W05,3330
647 | 2008,W06,3203
648 | 2008,W07,3235
649 | 2008,W08,3400
650 | 2008,W09,3278
651 | 2008,W10,3210
652 | 2008,W11,3120
653 | 2008,W12,3251
654 | 2008,W13,3364
655 | 2008,W14,3360
656 | 2008,W15,3352
657 | 2008,W16,3165
658 | 2008,X01,3268
659 | 2008,X02,3390
660 | 2008,X03,3124
661 | 2008,X04,3435
662 | 2008,X05,3452
663 | 2008,X06,3338
664 | 2008,X07,3431
665 | 2008,X08,3304
666 | 2008,X09,3462
667 | 2008,X10,3443
668 | 2008,X11,3461
669 | 2008,X12,3307
670 | 2008,X13,3285
671 | 2008,X14,3201
672 | 2008,X15,3156
673 | 2008,X16,3164
674 | 2008,Y01,3314
675 | 2008,Y02,3261
676 | 2008,Y03,3329
677 | 2008,Y04,3257
678 | 2008,Y05,3243
679 | 2008,Y06,3326
680 | 2008,Y07,3265
681 | 2008,Y08,3208
682 | 2008,Y09,3234
683 | 2008,Y10,3177
684 | 2008,Y11,3199
685 | 2008,Y12,3151
686 | 2008,Y13,3275
687 | 2008,Y14,3190
688 | 2008,Y15,3238
689 | 2008,Y16,3137
690 | 2008,Z01,3397
691 | 2008,Z02,3401
692 | 2008,Z03,3181
693 | 2008,Z04,3328
694 | 2008,Z05,3323
695 | 2008,Z06,3113
696 | 2008,Z07,3393
697 | 2008,Z08,3428
698 | 2008,Z09,3345
699 | 2008,Z10,3216
700 | 2008,Z11,3396
701 | 2008,Z12,3374
702 | 2008,Z13,3229
703 | 2008,Z14,3293
704 | 2008,Z15,3427
705 | 2008,Z16,3331
706 | 2009,W01,3181
707 | 2009,W02,3390
708 | 2009,W03,3326
709 | 2009,W04,3235
710 | 2009,W05,3397
711 | 2009,W06,3400
712 | 2009,W07,3177
713 | 2009,W08,3292
714 | 2009,W09,3277
715 | 2009,W10,3361
716 | 2009,W11,3280
717 | 2009,W12,3123
718 | 2009,W13,3190
719 | 2009,W14,3357
720 | 2009,W15,3364
721 | 2009,W16,3122
722 | 2009,X01,3163
723 | 2009,X02,3401
724 | 2009,X03,3199
725 | 2009,X04,3143
726 | 2009,X05,3438
727 | 2009,X06,3113
728 | 2009,X07,3323
729 | 2009,X08,3196
730 | 2009,X09,3396
731 | 2009,X10,3278
732 | 2009,X11,3208
733 | 2009,X12,3265
734 | 2009,X13,3201
735 | 2009,X14,3299
736 | 2009,X15,3191
737 | 2009,X16,3436
738 | 2009,Y01,3328
739 | 2009,Y02,3120
740 | 2009,Y03,3314
741 | 2009,Y04,3338
742 | 2009,Y05,3462
743 | 2009,Y06,3345
744 | 2009,Y07,3353
745 | 2009,Y08,3234
746 | 2009,Y09,3210
747 | 2009,Y10,3433
748 | 2009,Y11,3150
749 | 2009,Y12,3211
750 | 2009,Y13,3285
751 | 2009,Y14,3416
752 | 2009,Y15,3250
753 | 2009,Y16,3341
754 | 2009,Z01,3268
755 | 2009,Z02,3124
756 | 2009,Z03,3257
757 | 2009,Z04,3435
758 | 2009,Z05,3243
759 | 2009,Z06,3261
760 | 2009,Z07,3355
761 | 2009,Z08,3437
762 | 2009,Z09,3428
763 | 2009,Z10,3395
764 | 2009,Z11,3453
765 | 2009,Z12,3180
766 | 2009,Z13,3441
767 | 2009,Z14,3251
768 | 2009,Z15,3427
769 | 2009,Z16,3171
770 | 2010,W01,3163
771 | 2010,W02,3326
772 | 2010,W03,3199
773 | 2010,W04,3235
774 | 2010,W05,3438
775 | 2010,W06,3385
776 | 2010,W07,3280
777 | 2010,W08,3396
778 | 2010,W09,3241
779 | 2010,W10,3292
780 | 2010,W11,3343
781 | 2010,W12,3453
782 | 2010,W13,3250
783 | 2010,W14,3256
784 | 2010,W15,3384
785 | 2010,W16,3380
786 | 2010,X01,3397
787 | 2010,X02,3181
788 | 2010,X03,3452
789 | 2010,X04,3124
790 | 2010,X05,3207
791 | 2010,X06,3400
792 | 2010,X07,3261
793 | 2010,X08,3173
794 | 2010,X09,3395
795 | 2010,X10,3216
796 | 2010,X11,3361
797 | 2010,X12,3265
798 | 2010,X13,3201
799 | 2010,X14,3249
800 | 2010,X15,3214
801 | 2010,X16,3122
802 | 2010,Y01,3304
803 | 2010,Y02,3323
804 | 2010,Y03,3328
805 | 2010,Y04,3246
806 | 2010,Y05,3277
807 | 2010,Y06,3210
808 | 2010,Y07,3458
809 | 2010,Y08,3417
810 | 2010,Y09,3301
811 | 2010,Y10,3436
812 | 2010,Y11,3114
813 | 2010,Y12,3132
814 | 2010,Y13,3251
815 | 2010,Y14,3355
816 | 2010,Y15,3156
817 | 2010,Y16,3320
818 | 2010,Z01,3390
819 | 2010,Z02,3401
820 | 2010,Z03,3462
821 | 2010,Z04,3329
822 | 2010,Z05,3208
823 | 2010,Z06,3435
824 | 2010,Z07,3211
825 | 2010,Z08,3234
826 | 2010,Z09,3353
827 | 2010,Z10,3314
828 | 2010,Z11,3177
829 | 2010,Z12,3408
830 | 2010,Z13,3151
831 | 2010,Z14,3190
832 | 2010,Z15,3340
833 | 2010,Z16,3415
834 | 2011,W01,3124
835 | 2011,W02,3401
836 | 2011,W03,3199
837 | 2011,W04,3277
838 | 2011,W05,3453
839 | 2011,W06,3208
840 | 2011,W07,3353
841 | 2011,W08,3222
842 | 2011,W09,3452
843 | 2011,W10,3256
844 | 2011,W11,3292
845 | 2011,W12,3114
846 | 2011,W13,3320
847 | 2011,W14,3359
848 | 2011,W15,3270
849 | 2011,W16,3341
850 | 2011,X01,3390
851 | 2011,X02,3462
852 | 2011,X03,3417
853 | 2011,X04,3246
854 | 2011,X05,3314
855 | 2011,X06,3234
856 | 2011,X07,3257
857 | 2011,X08,3403
858 | 2011,X09,3385
859 | 2011,X10,3435
860 | 2011,X11,3211
861 | 2011,X12,3201
862 | 2011,X13,3214
863 | 2011,X14,3285
864 | 2011,X15,3355
865 | 2011,X16,3413
866 | 2011,Y01,3397
867 | 2011,Y02,3323
868 | 2011,Y03,3274
869 | 2011,Y04,3326
870 | 2011,Y05,3210
871 | 2011,Y06,3328
872 | 2011,Y07,3113
873 | 2011,Y08,3266
874 | 2011,Y09,3400
875 | 2011,Y10,3396
876 | 2011,Y11,3241
877 | 2011,Y12,3132
878 | 2011,Y13,3416
879 | 2011,Y14,3205
880 | 2011,Y15,3428
881 | 2011,Y16,3391
882 | 2011,Z01,3163
883 | 2011,Z02,3181
884 | 2011,Z03,3177
885 | 2011,Z04,3268
886 | 2011,Z05,3207
887 | 2011,Z06,3336
888 | 2011,Z07,3235
889 | 2011,Z08,3243
890 | 2011,Z09,3345
891 | 2011,Z10,3265
892 | 2011,Z11,3173
893 | 2011,Z12,3343
894 | 2011,Z13,3384
895 | 2011,Z14,3298
896 | 2011,Z15,3404
897 | 2011,Z16,3216
898 | 2012,W01,3124
899 | 2012,W02,3397
900 | 2012,W03,3174
901 | 2012,W04,3210
902 | 2012,W05,3207
903 | 2012,W06,3304
904 | 2012,W07,3177
905 | 2012,W08,3326
906 | 2012,W09,3196
907 | 2012,W10,3140
908 | 2012,W11,3242
909 | 2012,W12,3201
910 | 2012,W13,3357
911 | 2012,W14,3114
912 | 2012,W15,3404
913 | 2012,W16,3364
914 | 2012,X01,3390
915 | 2012,X02,3181
916 | 2012,X03,3385
917 | 2012,X04,3345
918 | 2012,X05,3376
919 | 2012,X06,3328
920 | 2012,X07,3435
921 | 2012,X08,3452
922 | 2012,X09,3400
923 | 2012,X10,3292
924 | 2012,X11,3276
925 | 2012,X12,3185
926 | 2012,X13,3355
927 | 2012,X14,3166
928 | 2012,X15,3359
929 | 2012,X16,3214
930 | 2012,Y01,3163
931 | 2012,Y02,3246
932 | 2012,Y03,3274
933 | 2012,Y04,3336
934 | 2012,Y05,3261
935 | 2012,Y06,3353
936 | 2012,Y07,3453
937 | 2012,Y08,3243
938 | 2012,Y09,3343
939 | 2012,Y10,3235
940 | 2012,Y11,3211
941 | 2012,Y12,3361
942 | 2012,Y13,3431
943 | 2012,Y14,3226
944 | 2012,Y15,3270
945 | 2012,Y16,3341
946 | 2012,Z01,3323
947 | 2012,Z02,3268
948 | 2012,Z03,3401
949 | 2012,Z04,3208
950 | 2012,Z05,3382
951 | 2012,Z06,3116
952 | 2012,Z07,3257
953 | 2012,Z08,3143
954 | 2012,Z09,3234
955 | 2012,Z10,3277
956 | 2012,Z11,3173
957 | 2012,Z12,3195
958 | 2012,Z13,3265
959 | 2012,Z14,3107
960 | 2012,Z15,3298
961 | 2012,Z16,3251
962 | 2013,W01,3163
963 | 2013,W02,3246
964 | 2013,W03,3314
965 | 2013,W04,3268
966 | 2013,W05,3277
967 | 2013,W06,3174
968 | 2013,W07,3173
969 | 2013,W08,3435
970 | 2013,W09,3386
971 | 2013,W10,3385
972 | 2013,W11,3452
973 | 2013,W12,3265
974 | 2013,W13,3346
975 | 2013,W14,3107
976 | 2013,W15,3298
977 | 2013,W16,3225
978 | 2013,X01,3323
979 | 2013,X02,3181
980 | 2013,X03,3401
981 | 2013,X04,3376
982 | 2013,X05,3160
983 | 2013,X06,3304
984 | 2013,X07,3329
985 | 2013,X08,3274
986 | 2013,X09,3234
987 | 2013,X10,3177
988 | 2013,X11,3151
989 | 2013,X12,3242
990 | 2013,X13,3355
991 | 2013,X14,3455
992 | 2013,X15,3214
993 | 2013,X16,3404
994 | 2013,Y01,3124
995 | 2013,Y02,3397
996 | 2013,Y03,3417
997 | 2013,Y04,3345
998 | 2013,Y05,3257
999 | 2013,Y06,3328
1000 | 2013,Y07,3393
1001 | 2013,Y08,3199
1002 | 2013,Y09,3343
1003 | 2013,Y10,3166
1004 | 2013,Y11,3141
1005 | 2013,Y12,3292
1006 | 2013,Y13,3251
1007 | 2013,Y14,3391
1008 | 2013,Y15,3331
1009 | 2013,Y16,3341
1010 | 2013,Z01,3390
1011 | 2013,Z02,3143
1012 | 2013,Z03,3336
1013 | 2013,Z04,3208
1014 | 2013,Z05,3235
1015 | 2013,Z06,3261
1016 | 2013,Z07,3403
1017 | 2013,Z08,3276
1018 | 2013,Z09,3437
1019 | 2013,Z10,3378
1020 | 2013,Z11,3453
1021 | 2013,Z12,3211
1022 | 2013,Z13,3285
1023 | 2013,Z14,3142
1024 | 2013,Z15,3201
1025 | 2013,Z16,3409
1026 | 2014,W01,3163
1027 | 2014,W02,3181
1028 | 2014,W03,3401
1029 | 2014,W04,3304
1030 | 2014,W05,3301
1031 | 2014,W06,3211
1032 | 2014,W07,3177
1033 | 2014,W08,3208
1034 | 2014,W09,3386
1035 | 2014,W10,3328
1036 | 2014,W11,3241
1037 | 2014,W12,3140
1038 | 2014,W13,3201
1039 | 2014,W14,3315
1040 | 2014,W15,3457
1041 | 2014,W16,3341
1042 | 2014,X01,3376
1043 | 2014,X02,3390
1044 | 2014,X03,3336
1045 | 2014,X04,3314
1046 | 2014,X05,3277
1047 | 2014,X06,3173
1048 | 2014,X07,3235
1049 | 2014,X08,3292
1050 | 2014,X09,3333
1051 | 2014,X10,3199
1052 | 2014,X11,3196
1053 | 2014,X12,3214
1054 | 2014,X13,3404
1055 | 2014,X14,3455
1056 | 2014,X15,3377
1057 | 2014,X16,3169
1058 | 2014,Y01,3397
1059 | 2014,Y02,3452
1060 | 2014,Y03,3257
1061 | 2014,Y04,3268
1062 | 2014,Y05,3400
1063 | 2014,Y06,3234
1064 | 2014,Y07,3261
1065 | 2014,Y08,3385
1066 | 2014,Y09,3425
1067 | 2014,Y10,3210
1068 | 2014,Y11,3265
1069 | 2014,Y12,3335
1070 | 2014,Y13,3119
1071 | 2014,Y14,3225
1072 | 2014,Y15,3107
1073 | 2014,Y16,3322
1074 | 2014,Z01,3323
1075 | 2014,Z02,3124
1076 | 2014,Z03,3246
1077 | 2014,Z04,3345
1078 | 2014,Z05,3329
1079 | 2014,Z06,3393
1080 | 2014,Z07,3143
1081 | 2014,Z08,3435
1082 | 2014,Z09,3113
1083 | 2014,Z10,3200
1084 | 2014,Z11,3151
1085 | 2014,Z12,3195
1086 | 2014,Z13,3103
1087 | 2014,Z14,3460
1088 | 2014,Z15,3443
1089 | 2014,Z16,3352
1090 | 2015,W01,3163
1091 | 2015,W02,3246
1092 | 2015,W03,3257
1093 | 2015,W04,3143
1094 | 2015,W05,3400
1095 | 2015,W06,3378
1096 | 2015,W07,3173
1097 | 2015,W08,3353
1098 | 2015,W09,3371
1099 | 2015,W10,3235
1100 | 2015,W11,3261
1101 | 2015,W12,3443
1102 | 2015,W13,3455
1103 | 2015,W14,3140
1104 | 2015,W15,3398
1105 | 2015,W16,3383
1106 | 2015,X01,3268
1107 | 2015,X02,3397
1108 | 2015,X03,3333
1109 | 2015,X04,3181
1110 | 2015,X05,3280
1111 | 2015,X06,3203
1112 | 2015,X07,3151
1113 | 2015,X08,3343
1114 | 2015,X09,3453
1115 | 2015,X10,3338
1116 | 2015,X11,3211
1117 | 2015,X12,3408
1118 | 2015,X13,3107
1119 | 2015,X14,3355
1120 | 2015,X15,3129
1121 | 2015,X16,3308
1122 | 2015,Y01,3376
1123 | 2015,Y02,3199
1124 | 2015,Y03,3113
1125 | 2015,Y04,3314
1126 | 2015,Y05,3326
1127 | 2015,Y06,3401
1128 | 2015,Y07,3195
1129 | 2015,Y08,3393
1130 | 2015,Y09,3304
1131 | 2015,Y10,3329
1132 | 2015,Y11,3114
1133 | 2015,Y12,3241
1134 | 2015,Y13,3251
1135 | 2015,Y14,3325
1136 | 2015,Y15,3106
1137 | 2015,Y16,3366
1138 | 2015,Z01,3323
1139 | 2015,Z02,3124
1140 | 2015,Z03,3234
1141 | 2015,Z04,3390
1142 | 2015,Z05,3328
1143 | 2015,Z06,3449
1144 | 2015,Z07,3321
1145 | 2015,Z08,3278
1146 | 2015,Z09,3177
1147 | 2015,Z10,3116
1148 | 2015,Z11,3274
1149 | 2015,Z12,3346
1150 | 2015,Z13,3169
1151 | 2015,Z14,3110
1152 | 2015,Z15,3322
1153 | 2015,Z16,3285
1154 | 2016,W01,3163
1155 | 2016,W02,3400
1156 | 2016,W03,3417
1157 | 2016,W04,3277
1158 | 2016,W05,3280
1159 | 2016,W06,3378
1160 | 2016,W07,3140
1161 | 2016,W08,3371
1162 | 2016,W09,3182
1163 | 2016,W10,3281
1164 | 2016,W11,3161
1165 | 2016,W12,3151
1166 | 2016,W13,3125
1167 | 2016,W14,3218
1168 | 2016,W15,3106
1169 | 2016,W16,3352
1170 | 2016,X01,3124
1171 | 2016,X02,3333
1172 | 2016,X03,3257
1173 | 2016,X04,3401
1174 | 2016,X05,3199
1175 | 2016,X06,3177
1176 | 2016,X07,3329
1177 | 2016,X08,3385
1178 | 2016,X09,3120
1179 | 2016,X10,3382
1180 | 2016,X11,3241
1181 | 2016,X12,3292
1182 | 2016,X13,3283
1183 | 2016,X14,3146
1184 | 2016,X15,3407
1185 | 2016,X16,3225
1186 | 2016,Y01,3323
1187 | 2016,Y02,3268
1188 | 2016,Y03,3246
1189 | 2016,Y04,3390
1190 | 2016,Y05,3274
1191 | 2016,Y06,3328
1192 | 2016,Y07,3449
1193 | 2016,Y08,3208
1194 | 2016,Y09,3231
1195 | 2016,Y10,3335
1196 | 2016,Y11,3345
1197 | 2016,Y12,3355
1198 | 2016,Y13,3362
1199 | 2016,Y14,3421
1200 | 2016,Y15,3233
1201 | 2016,Y16,3299
1202 | 2016,Z01,3376
1203 | 2016,Z02,3113
1204 | 2016,Z03,3326
1205 | 2016,Z04,3393
1206 | 2016,Z05,3196
1207 | 2016,Z06,3452
1208 | 2016,Z07,3397
1209 | 2016,Z08,3203
1210 | 2016,Z09,3243
1211 | 2016,Z10,3453
1212 | 2016,Z11,3343
1213 | 2016,Z12,3107
1214 | 2016,Z13,3119
1215 | 2016,Z14,3138
1216 | 2016,Z15,3308
1217 | 2016,Z16,3239
1218 | 2017,W01,3163
1219 | 2017,W02,3181
1220 | 2017,W03,3268
1221 | 2017,W04,3417
1222 | 2017,W05,3401
1223 | 2017,W06,3452
1224 | 2017,W07,3396
1225 | 2017,W08,3393
1226 | 2017,W09,3235
1227 | 2017,W10,3332
1228 | 2017,W11,3189
1229 | 2017,W12,3335
1230 | 2017,W13,3129
1231 | 2017,W14,3137
1232 | 2017,W15,3214
1233 | 2017,W16,3107
1234 | 2017,X01,3124
1235 | 2017,X02,3280
1236 | 2017,X03,3449
1237 | 2017,X04,3257
1238 | 2017,X05,3397
1239 | 2017,X06,3328
1240 | 2017,X07,3177
1241 | 2017,X08,3261
1242 | 2017,X09,3143
1243 | 2017,X10,3320
1244 | 2017,X11,3211
1245 | 2017,X12,3173
1246 | 2017,X13,3151
1247 | 2017,X14,3286
1248 | 2017,X15,3407
1249 | 2017,X16,3411
1250 | 2017,Y01,3323
1251 | 2017,Y02,3390
1252 | 2017,Y03,3400
1253 | 2017,Y04,3246
1254 | 2017,Y05,3326
1255 | 2017,Y06,3301
1256 | 2017,Y07,3243
1257 | 2017,Y08,3453
1258 | 2017,Y09,3345
1259 | 2017,Y10,3179
1260 | 2017,Y11,3120
1261 | 2017,Y12,3443
1262 | 2017,Y13,3125
1263 | 2017,Y14,3146
1264 | 2017,Y15,3308
1265 | 2017,Y16,3352
1266 | 2017,Z01,3376
1267 | 2017,Z02,3333
1268 | 2017,Z03,3199
1269 | 2017,Z04,3274
1270 | 2017,Z05,3266
1271 | 2017,Z06,3281
1272 | 2017,Z07,3166
1273 | 2017,Z08,3113
1274 | 2017,Z09,3277
1275 | 2017,Z10,3405
1276 | 2017,Z11,3378
1277 | 2017,Z12,3346
1278 | 2017,Z13,3195
1279 | 2017,Z14,3442
1280 | 2017,Z15,3253
1281 | 2017,Z16,3421
1282 | 2018,W01,3163
1283 | 2018,W02,3376
1284 | 2018,W03,3199
1285 | 2018,W04,3208
1286 | 2018,W05,3181
1287 | 2018,W06,3378
1288 | 2018,W07,3143
1289 | 2018,W08,3274
1290 | 2018,W09,3346
1291 | 2018,W10,3438
1292 | 2018,W11,3138
1293 | 2018,W12,3125
1294 | 2018,W13,3273
1295 | 2018,W14,3114
1296 | 2018,W15,3299
1297 | 2018,W16,3384
1298 | 2018,X01,3323
1299 | 2018,X02,3332
1300 | 2018,X03,3326
1301 | 2018,X04,3401
1302 | 2018,X05,3177
1303 | 2018,X06,3261
1304 | 2018,X07,3453
1305 | 2018,X08,3355
1306 | 2018,X09,3437
1307 | 2018,X10,3278
1308 | 2018,X11,3141
1309 | 2018,X12,3328
1310 | 2018,X13,3179
1311 | 2018,X14,3203
1312 | 2018,X15,3370
1313 | 2018,X16,3169
1314 | 2018,Y01,3280
1315 | 2018,Y02,3400
1316 | 2018,Y03,3417
1317 | 2018,Y04,3301
1318 | 2018,Y05,3268
1319 | 2018,Y06,3234
1320 | 2018,Y07,3113
1321 | 2018,Y08,3393
1322 | 2018,Y09,3329
1323 | 2018,Y10,3304
1324 | 2018,Y11,3166
1325 | 2018,Y12,3343
1326 | 2018,Y13,3189
1327 | 2018,Y14,3110
1328 | 2018,Y15,3263
1329 | 2018,Y16,3311
1330 | 2018,Z01,3257
1331 | 2018,Z02,3124
1332 | 2018,Z03,3397
1333 | 2018,Z04,3390
1334 | 2018,Z05,3281
1335 | 2018,Z06,3333
1336 | 2018,Z07,3276
1337 | 2018,Z08,3266
1338 | 2018,Z09,3173
1339 | 2018,Z10,3294
1340 | 2018,Z11,3443
1341 | 2018,Z12,3195
1342 | 2018,Z13,3211
1343 | 2018,Z14,3251
1344 | 2018,Z15,3212
1345 | 2018,Z16,3129
1346 |
--------------------------------------------------------------------------------
/Kaggle-NCAA/womens-machine-learning-competition-2019/WDataFiles/WNCAATourneySlots.csv:
--------------------------------------------------------------------------------
1 | Slot,StrongSeed,WeakSeed
2 | R1W1,W01,W16
3 | R1W2,W02,W15
4 | R1W3,W03,W14
5 | R1W4,W04,W13
6 | R1W5,W05,W12
7 | R1W6,W06,W11
8 | R1W7,W07,W10
9 | R1W8,W08,W09
10 | R1X1,X01,X16
11 | R1X2,X02,X15
12 | R1X3,X03,X14
13 | R1X4,X04,X13
14 | R1X5,X05,X12
15 | R1X6,X06,X11
16 | R1X7,X07,X10
17 | R1X8,X08,X09
18 | R1Y1,Y01,Y16
19 | R1Y2,Y02,Y15
20 | R1Y3,Y03,Y14
21 | R1Y4,Y04,Y13
22 | R1Y5,Y05,Y12
23 | R1Y6,Y06,Y11
24 | R1Y7,Y07,Y10
25 | R1Y8,Y08,Y09
26 | R1Z1,Z01,Z16
27 | R1Z2,Z02,Z15
28 | R1Z3,Z03,Z14
29 | R1Z4,Z04,Z13
30 | R1Z5,Z05,Z12
31 | R1Z6,Z06,Z11
32 | R1Z7,Z07,Z10
33 | R1Z8,Z08,Z09
34 | R2W1,R1W1,R1W8
35 | R2W2,R1W2,R1W7
36 | R2W3,R1W3,R1W6
37 | R2W4,R1W4,R1W5
38 | R2X1,R1X1,R1X8
39 | R2X2,R1X2,R1X7
40 | R2X3,R1X3,R1X6
41 | R2X4,R1X4,R1X5
42 | R2Y1,R1Y1,R1Y8
43 | R2Y2,R1Y2,R1Y7
44 | R2Y3,R1Y3,R1Y6
45 | R2Y4,R1Y4,R1Y5
46 | R2Z1,R1Z1,R1Z8
47 | R2Z2,R1Z2,R1Z7
48 | R2Z3,R1Z3,R1Z6
49 | R2Z4,R1Z4,R1Z5
50 | R3W1,R2W1,R2W4
51 | R3W2,R2W2,R2W3
52 | R3X1,R2X1,R2X4
53 | R3X2,R2X2,R2X3
54 | R3Y1,R2Y1,R2Y4
55 | R3Y2,R2Y2,R2Y3
56 | R3Z1,R2Z1,R2Z4
57 | R3Z2,R2Z2,R2Z3
58 | R4W1,R3W1,R3W2
59 | R4X1,R3X1,R3X2
60 | R4Y1,R3Y1,R3Y2
61 | R4Z1,R3Z1,R3Z2
62 | R5WX,R4W1,R4X1
63 | R5YZ,R4Y1,R4Z1
64 | R6CH,R5WX,R5YZ
65 |
--------------------------------------------------------------------------------
/Kaggle-NCAA/womens-machine-learning-competition-2019/WDataFiles/WSeasons.csv:
--------------------------------------------------------------------------------
1 | Season,DayZero,RegionW,RegionX,RegionY,RegionZ
2 | 1998,10/27/1997,East,Midwest,Mideast,West
3 | 1999,10/26/1998,East,Mideast,Midwest,West
4 | 2000,11/1/1999,East,Midwest,Mideast,West
5 | 2001,10/30/2000,East,Midwest,Mideast,West
6 | 2002,10/29/2001,East,West,Mideast,Midwest
7 | 2003,11/4/2002,East,West,Mideast,Midwest
8 | 2004,11/3/2003,East,Mideast,Midwest,West
9 | 2005,11/1/2004,Chattanooga,Tempe,KansasCity,Philadelphia
10 | 2006,10/31/2005,Albuquerque,Cleveland,Bridgeport,San Antonio
11 | 2007,10/30/2006,Dallas,Dayton,Fresno,Greensboro
12 | 2008,11/5/2007,Greensboro,Spokane,NewOrleans,OklahomaCity
13 | 2009,11/3/2008,Berkeley,Trenton,OklahomaCity,Raleigh
14 | 2010,11/2/2009,Dayton,Memphis,KansasCity,Sacramento
15 | 2011,11/1/2010,Dallas,Spokane,Dayton,Philadelphia
16 | 2012,10/31/2011,DesMoines,Fresno,Kingston,Raleigh
17 | 2013,11/5/2012,Bridgeport,Norfolk,OklahomaCity,Spokane
18 | 2014,11/4/2013,Lincoln,Stanford,Louisville,NotreDame
19 | 2015,11/3/2014,Albany,Spokane,Greensboro,OklahomaCity
20 | 2016,11/2/2015,Bridgeport,Dallas,Lexington,SiouxFalls
21 | 2017,10/31/2016,Bridgeport,OklahomaCity,Lexington,Stockton
22 | 2018,10/30/2017,Albany,Spokane,KansasCity,Lexington
23 | 2019,11/5/2018,TBD1,TBD2,TBD3,TBD4
24 |
--------------------------------------------------------------------------------
/Kaggle-NCAA/womens-machine-learning-competition-2019/WDataFiles/WTeamSpellings.csv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MJeremy2017/machine-learning-models/e8b81f3172d5e309e977fe51ac5c812b650ccd80/Kaggle-NCAA/womens-machine-learning-competition-2019/WDataFiles/WTeamSpellings.csv
--------------------------------------------------------------------------------
/Kaggle-NCAA/womens-machine-learning-competition-2019/WDataFiles/WTeams.csv:
--------------------------------------------------------------------------------
1 | TeamID,TeamName
2 | 3101,Abilene Chr
3 | 3102,Air Force
4 | 3103,Akron
5 | 3104,Alabama
6 | 3105,Alabama A&M
7 | 3106,Alabama St
8 | 3107,Albany NY
9 | 3108,Alcorn St
10 | 3109,Alliant Intl
11 | 3110,American Univ
12 | 3111,Appalachian St
13 | 3112,Arizona
14 | 3113,Arizona St
15 | 3114,Ark Little Rock
16 | 3115,Ark Pine Bluff
17 | 3116,Arkansas
18 | 3117,Arkansas St
19 | 3118,Armstrong St
20 | 3119,Army
21 | 3120,Auburn
22 | 3121,Augusta
23 | 3122,Austin Peay
24 | 3123,Ball St
25 | 3124,Baylor
26 | 3125,Belmont
27 | 3126,Bethune-Cookman
28 | 3127,Binghamton
29 | 3128,Birmingham So
30 | 3129,Boise St
31 | 3130,Boston College
32 | 3131,Boston Univ
33 | 3132,Bowling Green
34 | 3133,Bradley
35 | 3134,Brooklyn
36 | 3135,Brown
37 | 3136,Bryant
38 | 3137,Bucknell
39 | 3138,Buffalo
40 | 3139,Butler
41 | 3140,BYU
42 | 3141,C Michigan
43 | 3142,Cal Poly SLO
44 | 3143,California
45 | 3144,Campbell
46 | 3145,Canisius
47 | 3146,Cent Arkansas
48 | 3147,Centenary
49 | 3148,Central Conn
50 | 3149,Charleston So
51 | 3150,Charlotte
52 | 3151,Chattanooga
53 | 3152,Chicago St
54 | 3153,Cincinnati
55 | 3154,Citadel
56 | 3155,Clemson
57 | 3156,Cleveland St
58 | 3157,Coastal Car
59 | 3158,Col Charleston
60 | 3159,Colgate
61 | 3160,Colorado
62 | 3161,Colorado St
63 | 3162,Columbia
64 | 3163,Connecticut
65 | 3164,Coppin St
66 | 3165,Cornell
67 | 3166,Creighton
68 | 3167,CS Bakersfield
69 | 3168,CS Fullerton
70 | 3169,CS Northridge
71 | 3170,CS Sacramento
72 | 3171,Dartmouth
73 | 3172,Davidson
74 | 3173,Dayton
75 | 3174,Delaware
76 | 3175,Delaware St
77 | 3176,Denver
78 | 3177,DePaul
79 | 3178,Detroit
80 | 3179,Drake
81 | 3180,Drexel
82 | 3181,Duke
83 | 3182,Duquesne
84 | 3183,E Illinois
85 | 3184,E Kentucky
86 | 3185,E Michigan
87 | 3186,E Washington
88 | 3187,East Carolina
89 | 3188,Edwardsville
90 | 3189,Elon
91 | 3190,ETSU
92 | 3191,Evansville
93 | 3192,F Dickinson
94 | 3193,Fairfield
95 | 3194,FL Atlantic
96 | 3195,FL Gulf Coast
97 | 3196,Florida
98 | 3197,Florida A&M
99 | 3198,Florida Intl
100 | 3199,Florida St
101 | 3200,Fordham
102 | 3201,Fresno St
103 | 3202,Furman
104 | 3203,G Washington
105 | 3204,Ga Southern
106 | 3205,Gardner Webb
107 | 3206,George Mason
108 | 3207,Georgetown
109 | 3208,Georgia
110 | 3209,Georgia St
111 | 3210,Georgia Tech
112 | 3211,Gonzaga
113 | 3212,Grambling
114 | 3213,Grand Canyon
115 | 3214,Hampton
116 | 3215,Hardin-Simmons
117 | 3216,Hartford
118 | 3217,Harvard
119 | 3218,Hawaii
120 | 3219,High Point
121 | 3220,Hofstra
122 | 3221,Holy Cross
123 | 3222,Houston
124 | 3223,Houston Bap
125 | 3224,Howard
126 | 3225,Idaho
127 | 3226,Idaho St
128 | 3227,IL Chicago
129 | 3228,Illinois
130 | 3229,Illinois St
131 | 3230,Incarnate Word
132 | 3231,Indiana
133 | 3232,Indiana St
134 | 3233,Iona
135 | 3234,Iowa
136 | 3235,Iowa St
137 | 3236,IPFW
138 | 3237,IUPUI
139 | 3238,Jackson St
140 | 3239,Jacksonville
141 | 3240,Jacksonville St
142 | 3241,James Madison
143 | 3242,Kansas
144 | 3243,Kansas St
145 | 3244,Kennesaw
146 | 3245,Kent
147 | 3246,Kentucky
148 | 3247,La Salle
149 | 3248,Lafayette
150 | 3249,Lamar
151 | 3250,Lehigh
152 | 3251,Liberty
153 | 3252,Lipscomb
154 | 3253,Long Beach St
155 | 3254,Long Island
156 | 3255,Longwood
157 | 3256,Louisiana Tech
158 | 3257,Louisville
159 | 3258,Loy Marymount
160 | 3259,Loyola MD
161 | 3260,Loyola-Chicago
162 | 3261,LSU
163 | 3262,MA Lowell
164 | 3263,Maine
165 | 3264,Manhattan
166 | 3265,Marist
167 | 3266,Marquette
168 | 3267,Marshall
169 | 3268,Maryland
170 | 3269,Massachusetts
171 | 3270,McNeese St
172 | 3271,MD E Shore
173 | 3272,Memphis
174 | 3273,Mercer
175 | 3274,Miami FL
176 | 3275,Miami OH
177 | 3276,Michigan
178 | 3277,Michigan St
179 | 3278,Minnesota
180 | 3279,Mississippi
181 | 3280,Mississippi St
182 | 3281,Missouri
183 | 3282,Missouri KC
184 | 3283,Missouri St
185 | 3284,Monmouth NJ
186 | 3285,Montana
187 | 3286,Montana St
188 | 3287,Morehead St
189 | 3288,Morgan St
190 | 3289,Morris Brown
191 | 3290,MS Valley St
192 | 3291,Mt St Mary's
193 | 3292,MTSU
194 | 3293,Murray St
195 | 3294,N Colorado
196 | 3295,N Dakota St
197 | 3296,N Illinois
198 | 3297,N Kentucky
199 | 3298,Navy
200 | 3299,NC A&T
201 | 3300,NC Central
202 | 3301,NC State
203 | 3302,NE Illinois
204 | 3303,NE Omaha
205 | 3304,Nebraska
206 | 3305,Nevada
207 | 3306,New Hampshire
208 | 3307,New Mexico
209 | 3308,New Mexico St
210 | 3309,New Orleans
211 | 3310,Niagara
212 | 3311,Nicholls St
213 | 3312,NJIT
214 | 3313,Norfolk St
215 | 3314,North Carolina
216 | 3315,North Dakota
217 | 3316,North Florida
218 | 3317,North Texas
219 | 3318,Northeastern
220 | 3319,Northern Arizona
221 | 3320,Northern Iowa
222 | 3321,Northwestern
223 | 3322,Northwestern LA
224 | 3323,Notre Dame
225 | 3324,Oakland
226 | 3325,Ohio
227 | 3326,Ohio St
228 | 3327,Okla City
229 | 3328,Oklahoma
230 | 3329,Oklahoma St
231 | 3330,Old Dominion
232 | 3331,Oral Roberts
233 | 3332,Oregon
234 | 3333,Oregon St
235 | 3334,Pacific
236 | 3335,Penn
237 | 3336,Penn St
238 | 3337,Pepperdine
239 | 3338,Pittsburgh
240 | 3339,Portland
241 | 3340,Portland St
242 | 3341,Prairie View
243 | 3342,Presbyterian
244 | 3343,Princeton
245 | 3344,Providence
246 | 3345,Purdue
247 | 3346,Quinnipiac
248 | 3347,Radford
249 | 3348,Rhode Island
250 | 3349,Rice
251 | 3350,Richmond
252 | 3351,Rider
253 | 3352,Robert Morris
254 | 3353,Rutgers
255 | 3354,S Carolina St
256 | 3355,S Dakota St
257 | 3356,S Illinois
258 | 3357,Sacred Heart
259 | 3358,Sam Houston St
260 | 3359,Samford
261 | 3360,San Diego
262 | 3361,San Diego St
263 | 3362,San Francisco
264 | 3363,San Jose St
265 | 3364,Santa Barbara
266 | 3365,Santa Clara
267 | 3366,Savannah St
268 | 3367,SC Upstate
269 | 3368,SE Louisiana
270 | 3369,SE Missouri St
271 | 3370,Seattle
272 | 3371,Seton Hall
273 | 3372,SF Austin
274 | 3373,Siena
275 | 3374,SMU
276 | 3375,South Alabama
277 | 3376,South Carolina
278 | 3377,South Dakota
279 | 3378,South Florida
280 | 3379,Southern Miss
281 | 3380,Southern Univ
282 | 3381,Southern Utah
283 | 3382,St Bonaventure
284 | 3383,St Francis NY
285 | 3384,St Francis PA
286 | 3385,St John's
287 | 3386,St Joseph's PA
288 | 3387,St Louis
289 | 3388,St Mary's CA
290 | 3389,St Peter's
291 | 3390,Stanford
292 | 3391,Stetson
293 | 3392,Stony Brook
294 | 3393,Syracuse
295 | 3394,TAM C. Christi
296 | 3395,TCU
297 | 3396,Temple
298 | 3397,Tennessee
299 | 3398,Tennessee St
300 | 3399,Tennessee Tech
301 | 3400,Texas
302 | 3401,Texas A&M
303 | 3402,Texas St
304 | 3403,Texas Tech
305 | 3404,TN Martin
306 | 3405,Toledo
307 | 3406,Towson
308 | 3407,Troy
309 | 3408,Tulane
310 | 3409,Tulsa
311 | 3410,UTRGV
312 | 3411,TX Southern
313 | 3412,UAB
314 | 3413,UC Davis
315 | 3414,UC Irvine
316 | 3415,UC Riverside
317 | 3416,UCF
318 | 3417,UCLA
319 | 3418,ULL
320 | 3419,ULM
321 | 3420,UMBC
322 | 3421,UNC Asheville
323 | 3422,UNC Greensboro
324 | 3423,UNC Wilmington
325 | 3424,UNLV
326 | 3425,USC
327 | 3426,UT Arlington
328 | 3427,UT San Antonio
329 | 3428,Utah
330 | 3429,Utah St
331 | 3430,Utah Valley
332 | 3431,UTEP
333 | 3432,Utica
334 | 3433,VA Commonwealth
335 | 3434,Valparaiso
336 | 3435,Vanderbilt
337 | 3436,Vermont
338 | 3437,Villanova
339 | 3438,Virginia
340 | 3439,Virginia Tech
341 | 3440,VMI
342 | 3441,W Carolina
343 | 3442,W Illinois
344 | 3443,WKU
345 | 3444,W Michigan
346 | 3445,W Salem St
347 | 3446,W Texas A&M
348 | 3447,Wagner
349 | 3448,Wake Forest
350 | 3449,Washington
351 | 3450,Washington St
352 | 3451,Weber St
353 | 3452,West Virginia
354 | 3453,WI Green Bay
355 | 3454,WI Milwaukee
356 | 3455,Wichita St
357 | 3456,William & Mary
358 | 3457,Winthrop
359 | 3458,Wisconsin
360 | 3459,Wofford
361 | 3460,Wright St
362 | 3461,Wyoming
363 | 3462,Xavier
364 | 3463,Yale
365 | 3464,Youngstown St
366 | 3465,Cal Baptist
367 | 3466,North Alabama
368 |
--------------------------------------------------------------------------------
/Localisation/Basics.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "[ref](https://classroom.udacity.com/courses/cs373)"
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": 1,
13 | "metadata": {},
14 | "outputs": [],
15 | "source": [
16 | "import numpy as np"
17 | ]
18 | },
19 | {
20 | "cell_type": "code",
21 | "execution_count": 25,
22 | "metadata": {},
23 | "outputs": [],
24 | "source": [
25 | "p = [0.2, 0.2, 0.2, 0.2, 0.2]\n",
26 | "world = ['green', 'red', 'red', 'green', 'green']\n",
27 | "Z = 'red'\n",
28 | "pHit = 0.6\n",
29 | "pMiss = 0.2"
30 | ]
31 | },
32 | {
33 | "cell_type": "markdown",
34 | "metadata": {},
35 | "source": [
36 | "Bayes Rule\n",
37 | "---\n",
38 | "$$ P(A|B) = \\frac{P(A)P(B|A)}{P(B)} \\hspace{1cm}(1) $$\n",
39 | "\n",
40 | "$$ P(X_2|red) = \\frac{P(X_2)P(red|X_2)}{P(red)} \\hspace{1cm}(2) $$\n",
41 | "\n",
42 | "$$ P(red) = \\sum_{i=0}^4P(X_i)P(red|X_i) \\hspace{1cm}(3) $$"
43 | ]
44 | },
45 | {
46 | "cell_type": "code",
47 | "execution_count": 13,
48 | "metadata": {},
49 | "outputs": [],
50 | "source": [
51 | "def sense(p, Z):\n",
52 | " prob = np.array(p)\n",
53 | " measure = np.array([pHit if i == Z else pMiss for i in world])\n",
54 | " \n",
55 | " combine_prob = prob*measure\n",
56 | " \n",
57 | " norm_prob = combine_prob/sum(combine_prob)\n",
58 | " return norm_prob"
59 | ]
60 | },
61 | {
62 | "cell_type": "code",
63 | "execution_count": 11,
64 | "metadata": {},
65 | "outputs": [
66 | {
67 | "data": {
68 | "text/plain": [
69 | "array([0.11111111, 0.33333333, 0.33333333, 0.11111111, 0.11111111])"
70 | ]
71 | },
72 | "execution_count": 11,
73 | "metadata": {},
74 | "output_type": "execute_result"
75 | }
76 | ],
77 | "source": [
78 | "sense(p, Z)"
79 | ]
80 | },
81 | {
82 | "cell_type": "code",
83 | "execution_count": 26,
84 | "metadata": {},
85 | "outputs": [
86 | {
87 | "name": "stdout",
88 | "output_type": "stream",
89 | "text": [
90 | "sensing [red] -> prior distribution [0.11111111 0.33333333 0.33333333 0.11111111 0.11111111]\n",
91 | "sensing [green] -> prior distribution [0.2 0.2 0.2 0.2 0.2]\n"
92 | ]
93 | }
94 | ],
95 | "source": [
96 | "measurements = ['red', 'green']\n",
97 | "\n",
98 | "for i in measurements:\n",
99 | " p = sense(p, i)\n",
100 | " print(\"sensing [{}] -> prior distribution {}\".format(i, p))"
101 | ]
102 | },
103 | {
104 | "cell_type": "code",
105 | "execution_count": 34,
106 | "metadata": {},
107 | "outputs": [],
108 | "source": [
109 | "def move(p, U):\n",
110 | " \"\"\"\n",
111 | " p: probability distribution \n",
112 | " U: number of steps moving\n",
113 | " \"\"\"\n",
114 | " n = len(p)\n",
115 | " U = U % n\n",
116 | " \n",
117 | " return p[-U:] + p[:(n-U)]"
118 | ]
119 | },
120 | {
121 | "cell_type": "code",
122 | "execution_count": 37,
123 | "metadata": {
124 | "scrolled": true
125 | },
126 | "outputs": [
127 | {
128 | "data": {
129 | "text/plain": [
130 | "[0.33333333, 0.33333333, 0.11111111, 0.11111111, 0.11111111]"
131 | ]
132 | },
133 | "execution_count": 37,
134 | "metadata": {},
135 | "output_type": "execute_result"
136 | }
137 | ],
138 | "source": [
139 | "p = [0.11111111, 0.33333333, 0.33333333, 0.11111111, 0.11111111]\n",
140 | "\n",
141 | "move(p, -1)"
142 | ]
143 | },
144 | {
145 | "cell_type": "markdown",
146 | "metadata": {},
147 | "source": [
148 | "### Non-accurate Move"
149 | ]
150 | },
151 | {
152 | "cell_type": "markdown",
153 | "metadata": {},
154 | "source": [
155 | "$$ P(X_j) = \\sum_{i}P(X_i)P(X_j|X_i) \\hspace{1cm}(4) $$"
156 | ]
157 | },
158 | {
159 | "cell_type": "code",
160 | "execution_count": 39,
161 | "metadata": {},
162 | "outputs": [],
163 | "source": [
164 | "pExact = 0.8\n",
165 | "pOvershoot = 0.1\n",
166 | "pUndershoot = 0.1"
167 | ]
168 | },
169 | {
170 | "cell_type": "code",
171 | "execution_count": 44,
172 | "metadata": {},
173 | "outputs": [],
174 | "source": [
175 | "def move(p, U):\n",
176 | " \"\"\"\n",
177 | " movement inaccurate\n",
178 | " p: probability distribution \n",
179 | " U: number of steps moving\n",
180 | " \"\"\"\n",
181 | " p = np.array(p)\n",
182 | " move_prob = np.array([pOvershoot, pExact, pUndershoot])\n",
183 | " n = len(p)\n",
184 | " U = U % n\n",
185 | " \n",
186 | " q = []\n",
187 | " for i in range(n):\n",
188 | " steps = [i-U-1, i-U, i-U+1]\n",
189 | " q_prob = np.dot(p[steps], move_prob)\n",
190 | " q.append(q_prob)\n",
191 | " return q"
192 | ]
193 | },
194 | {
195 | "cell_type": "code",
196 | "execution_count": 46,
197 | "metadata": {},
198 | "outputs": [
199 | {
200 | "data": {
201 | "text/plain": [
202 | "[0.0, 0.1, 0.8, 0.1, 0.0]"
203 | ]
204 | },
205 | "execution_count": 46,
206 | "metadata": {},
207 | "output_type": "execute_result"
208 | }
209 | ],
210 | "source": [
211 | "p = [0, 1, 0, 0, 0]\n",
212 | "move(p, 1)"
213 | ]
214 | },
215 | {
216 | "cell_type": "code",
217 | "execution_count": 48,
218 | "metadata": {},
219 | "outputs": [
220 | {
221 | "name": "stdout",
222 | "output_type": "stream",
223 | "text": [
224 | "[0.20000004294005816, 0.20000013895694374, 0.20000004294005816, 0.19999988758147377, 0.19999988758147375]\n"
225 | ]
226 | }
227 | ],
228 | "source": [
229 | "# when move infinite number of times, the distribution would go normal\n",
230 | "p = [0, 1, 0, 0, 0]\n",
231 | "n = 100\n",
232 | "U = 2\n",
233 | "\n",
234 | "for _ in range(n):\n",
235 | " p = move(p, U)\n",
236 | "\n",
237 | "print(p)"
238 | ]
239 | },
240 | {
241 | "cell_type": "markdown",
242 | "metadata": {},
243 | "source": [
244 | "Sense & Move Cycle\n",
245 | "---\n",
246 | "In effect, this is `Bayes Probability` + `Total Probability`\n",
247 | "
"
248 | ]
249 | },
250 | {
251 | "cell_type": "code",
252 | "execution_count": 52,
253 | "metadata": {},
254 | "outputs": [],
255 | "source": [
256 | "p = [0.2, 0.2, 0.2, 0.2, 0.2]\n",
257 | "world = ['green', 'red', 'red', 'green', 'green']\n",
258 | "measurements = ['red', 'green']\n",
259 | "motions = [1, 1]\n",
260 | "pHit = 0.6\n",
261 | "pMiss = 0.2\n",
262 | "pExact = 0.8\n",
263 | "pOvershoot = 0.1\n",
264 | "pUndershoot = 0.1"
265 | ]
266 | },
267 | {
268 | "cell_type": "code",
269 | "execution_count": 49,
270 | "metadata": {},
271 | "outputs": [],
272 | "source": [
273 | "def move(p, U):\n",
274 | " \"\"\"\n",
275 | " movement inaccurate\n",
276 | " p: probability distribution \n",
277 | " U: number of steps moving\n",
278 | " \"\"\"\n",
279 | " p = np.array(p)\n",
280 | " move_prob = np.array([pOvershoot, pExact, pUndershoot])\n",
281 | " n = len(p)\n",
282 | " U = U % n\n",
283 | " \n",
284 | " q = []\n",
285 | " for i in range(n):\n",
286 | " steps = [i-U-1, i-U, i-U+1]\n",
287 | " q_prob = np.dot(p[steps], move_prob)\n",
288 | " q.append(q_prob)\n",
289 | " return q"
290 | ]
291 | },
292 | {
293 | "cell_type": "code",
294 | "execution_count": 50,
295 | "metadata": {},
296 | "outputs": [],
297 | "source": [
298 | "def sense(p, Z):\n",
299 | " prob = np.array(p)\n",
300 | " measure = np.array([pHit if i == Z else pMiss for i in world])\n",
301 | " \n",
302 | " combine_prob = prob*measure\n",
303 | " \n",
304 | " norm_prob = combine_prob/sum(combine_prob)\n",
305 | " return norm_prob"
306 | ]
307 | },
308 | {
309 | "cell_type": "code",
310 | "execution_count": 53,
311 | "metadata": {},
312 | "outputs": [
313 | {
314 | "name": "stdout",
315 | "output_type": "stream",
316 | "text": [
317 | "[0.21157894736842103, 0.1515789473684211, 0.08105263157894739, 0.16842105263157897, 0.3873684210526316]\n"
318 | ]
319 | }
320 | ],
321 | "source": [
322 | "# first sense red\n",
323 | "p = sense(p, \"red\")\n",
324 | "# move right\n",
325 | "p = move(p, 1)\n",
326 | "# second sense green\n",
327 | "p = sense(p, \"green\")\n",
328 | "# move right again\n",
329 | "p = move(p, 1)\n",
330 | "\n",
331 | "print(p)"
332 | ]
333 | },
334 | {
335 | "cell_type": "code",
336 | "execution_count": 54,
337 | "metadata": {},
338 | "outputs": [
339 | {
340 | "name": "stdout",
341 | "output_type": "stream",
342 | "text": [
343 | "[0.07882352941176471, 0.07529411764705884, 0.22470588235294123, 0.4329411764705882, 0.18823529411764706]\n"
344 | ]
345 | }
346 | ],
347 | "source": [
348 | "measurements = [\"red\", \"red\"]\n",
349 | "motions = [1, 1]\n",
350 | "p = [0.2, 0.2, 0.2, 0.2, 0.2]\n",
351 | "\n",
352 | "for i in range(len(measurements)):\n",
353 | " # sense -> move\n",
354 | " p = sense(p, measurements[i])\n",
355 | " p = move(p, motions[i])\n",
356 | "print(p)"
357 | ]
358 | },
359 | {
360 | "cell_type": "code",
361 | "execution_count": null,
362 | "metadata": {},
363 | "outputs": [],
364 | "source": []
365 | }
366 | ],
367 | "metadata": {
368 | "kernelspec": {
369 | "display_name": "Python 3",
370 | "language": "python",
371 | "name": "python3"
372 | },
373 | "language_info": {
374 | "codemirror_mode": {
375 | "name": "ipython",
376 | "version": 3
377 | },
378 | "file_extension": ".py",
379 | "mimetype": "text/x-python",
380 | "name": "python",
381 | "nbconvert_exporter": "python",
382 | "pygments_lexer": "ipython3",
383 | "version": "3.6.5"
384 | }
385 | },
386 | "nbformat": 4,
387 | "nbformat_minor": 2
388 | }
389 |
--------------------------------------------------------------------------------
/Localisation/basics.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | def move(p, U):
5 | """
6 | movement inaccurate
7 | p: probability distribution
8 | U: number of steps moving
9 | """
10 | p = np.array(p)
11 | move_prob = np.array([pOvershoot, pExact, pUndershoot])
12 | n = len(p)
13 | U = U % n
14 |
15 | q = []
16 | for i in range(n):
17 | steps = [i - U - 1, i - U, i - U + 1]
18 | q_prob = np.dot(p[steps], move_prob)
19 | q.append(q_prob)
20 | return q
21 |
22 |
23 | def sense(p, Z):
24 | prob = np.array(p)
25 | measure = np.array([pHit if i == Z else pMiss for i in world])
26 |
27 | combine_prob = prob * measure
28 |
29 | norm_prob = combine_prob / sum(combine_prob)
30 | return norm_prob
31 |
32 |
33 | if __name__ == "__main__":
34 | p = [0.2, 0.2, 0.2, 0.2, 0.2]
35 | world = ['green', 'red', 'red', 'green', 'green']
36 | measurements = ['red', 'green']
37 | motions = [1, 1]
38 | pHit = 0.6
39 | pMiss = 0.2
40 | pExact = 0.8
41 | pOvershoot = 0.1
42 | pUndershoot = 0.1
43 |
44 | for i in range(len(measurements)):
45 | # sense -> move
46 | p = sense(p, measurements[i])
47 | p = move(p, motions[i])
48 | print(p)
49 |
--------------------------------------------------------------------------------
/Localisation/kalman-filter.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "Guassian\n",
8 | "---\n",
9 | "Uni-model\n",
10 | "\n",
11 | "$$ f(x) = \\frac{1}{\\sqrt{2\\pi\\sigma^2}}\\exp[-\\frac{(x-\\mu)^2}{2\\sigma^2}] $$"
12 | ]
13 | },
14 | {
15 | "cell_type": "code",
16 | "execution_count": 1,
17 | "metadata": {},
18 | "outputs": [],
19 | "source": [
20 | "import numpy as np"
21 | ]
22 | },
23 | {
24 | "cell_type": "markdown",
25 | "metadata": {},
26 | "source": [
27 | "Bayes Rule\n",
28 | "---\n",
29 | "Measurements is gaining information\n",
30 | "
\n",
31 | "\n",
32 | "### Multiplication of Guassian(Measurement Update Procedure)\n",
33 | "---\n",
34 | "$$ N_1(\\mu, \\sigma^2)$$\n",
35 | "$$ N_2(\\gamma, r^2) $$\n",
36 | "\n",
37 | "$$ \\mu_{new} = \\frac{1}{\\sigma^2 + r^2}(r^2\\mu + \\sigma^2\\gamma)$$\n",
38 | "\n",
39 | "$$ \\sigma_{new} = 1/[{\\frac{1}{\\sigma^2} + \\frac{1}{\\gamma^2}}] $$\n",
40 | "\n",
41 | "Motion Adds Uncertainty\n",
42 | "---\n",
43 | "With initial distribution $N(\\mu, \\sigma^2)$, and move $U$ with uncertainty $\\gamma^2$, the distribution after movement is $N(\\mu+U, \\sigma^2+\\gamma^2)$\n"
44 | ]
45 | },
46 | {
47 | "cell_type": "code",
48 | "execution_count": 21,
49 | "metadata": {},
50 | "outputs": [],
51 | "source": [
52 | "# update mean and variance given 2 gaussian distribution\n",
53 | "def update(mean1, var1, mean2, var2):\n",
54 | " new_mean = (var1*mean2 + var2*mean1)/(var1 + var2)\n",
55 | " new_var = 1./(1./var1 + 1./var2)\n",
56 | " return [new_mean, new_var]\n",
57 | "\n",
58 | "# motion\n",
59 | "def predict(mean1, var1, mean2, var2):\n",
60 | " new_mean = mean1 + mean2\n",
61 | " new_var = var1 + var2\n",
62 | " return [new_mean, new_var]"
63 | ]
64 | },
65 | {
66 | "cell_type": "code",
67 | "execution_count": 23,
68 | "metadata": {},
69 | "outputs": [
70 | {
71 | "name": "stdout",
72 | "output_type": "stream",
73 | "text": [
74 | "[12.4, 1.6]\n",
75 | "[23.0, 10.0]\n"
76 | ]
77 | }
78 | ],
79 | "source": [
80 | "print(update(10., 8., 13., 2.))\n",
81 | "print(predict(10., 8., 13., 2.))"
82 | ]
83 | },
84 | {
85 | "cell_type": "code",
86 | "execution_count": 45,
87 | "metadata": {},
88 | "outputs": [],
89 | "source": [
90 | "def kalman_filter_1d(mu, sig, measurements, motion, measurement_sig, motion_sig):\n",
91 | " n = len(measurements)\n",
92 | " for i in range(n):\n",
93 | " measure = measurements[i]\n",
94 | " move = motion[i]\n",
95 | " \n",
96 | " mu, sig = update(mu, sig, measure, measurement_sig)\n",
97 | " print(\"estimate after measurement [{}, {}]\".format(mu, sig))\n",
98 | " mu, sig = predict(mu, sig, move, motion_sig)\n",
99 | " print(\"estimate after movement [{}, {}]\".format(mu, sig))\n",
100 | " return [mu, sig]"
101 | ]
102 | },
103 | {
104 | "cell_type": "code",
105 | "execution_count": 46,
106 | "metadata": {},
107 | "outputs": [
108 | {
109 | "name": "stdout",
110 | "output_type": "stream",
111 | "text": [
112 | "estimate after measurement [4.998000799680128, 3.9984006397441023]\n",
113 | "estimate after movement [5.998000799680128, 5.998400639744102]\n",
114 | "estimate after measurement [5.999200191953932, 2.399744061425258]\n",
115 | "estimate after movement [6.999200191953932, 4.399744061425258]\n",
116 | "estimate after measurement [6.999619127420922, 2.0951800575117594]\n",
117 | "estimate after movement [8.999619127420921, 4.09518005751176]\n",
118 | "estimate after measurement [8.999811802788143, 2.0235152416216957]\n",
119 | "estimate after movement [9.999811802788143, 4.023515241621696]\n",
120 | "estimate after measurement [9.999906177177365, 2.0058615808441944]\n",
121 | "estimate after movement [10.999906177177365, 4.005861580844194]\n"
122 | ]
123 | },
124 | {
125 | "data": {
126 | "text/plain": [
127 | "[10.999906177177365, 4.005861580844194]"
128 | ]
129 | },
130 | "execution_count": 46,
131 | "metadata": {},
132 | "output_type": "execute_result"
133 | }
134 | ],
135 | "source": [
136 | "measurements = [5., 6., 7., 9., 10.]\n",
137 | "motion = [1., 1., 2., 1., 1.]\n",
138 | "measurement_sig = 4.\n",
139 | "motion_sig = 2.\n",
140 | "mu = 0.\n",
141 | "sig = 10000.\n",
142 | "\n",
143 | "kalman_filter_1d(mu, sig, measurements, motion, measurement_sig, motion_sig)"
144 | ]
145 | },
146 | {
147 | "cell_type": "markdown",
148 | "metadata": {},
149 | "source": [
150 | "## Multi-Dimensional Kalman Filter\n",
151 | "---\n",
152 | "- **F**: state transition matrix\n",
153 | "- **P**: covariance matrix\n",
154 | "- $\\mu$: external motion vector\n",
155 | "\n",
156 | "### Prediction\n",
157 | "---\n",
158 | "$$ x' = Fx + \\mu $$\n",
159 | "$$ P' = FPF^{T} $$\n",
160 | "\n",
161 | "In other words, the new best estimate is a prediction made from previous best estimate, plus a correction for known external influences.\n",
162 | "\n",
163 | "And the new uncertainty is predicted from the old uncertainty, with some additional uncertainty from the environment.\n",
164 | "\n",
165 | "### Measurement Update\n",
166 | "---\n",
167 | "\n",
168 | "- **Z**: measurement\n",
169 | "- **H**: measurement transition function\n",
170 | "- **R**: measurement noise\n",
171 | "\n",
172 | "We have two distributions: The predicted measurement with $(\\mu_0, \\sigma_0) = (Hx, HPH^{T})$, and the observed measurement with $(\\mu_1, \\sigma_1) = (Z, R)$\n",
173 | "\n",
174 | "Combining together, we get posterior distribution:\n",
175 | "\n",
176 | "$$ y = Z - Hx $$\n",
177 | "$$ S = HPH^{T} + R $$\n",
178 | "$$ K = PH^{T}S^{-1} $$\n",
179 | "\n",
180 | "\n",
181 | "$$ x' = x + Ky $$\n",
182 | "$$ P' = (I - KH)P $$\n",
183 | "\n",
184 | "the sensors operate on a state and produce a set of readings."
185 | ]
186 | },
187 | {
188 | "cell_type": "code",
189 | "execution_count": 26,
190 | "metadata": {},
191 | "outputs": [],
192 | "source": [
193 | "measurements = [1, 2, 3]\n",
194 | "\n",
195 | "x = np.array([[0.], [0.]]) # initial state (location and velocity)\n",
196 | "P = np.array([[1000., 0.], [0., 1000.]]) # initial uncertainty, covariance\n",
197 | "u = np.array([[0.], [0.]]) # external motion\n",
198 | "F = np.array([[1., 1.], [0, 1.]]) # next state function, state transition\n",
199 | "H = np.array([[1., 0.]]) # measurement function, measurement transition\n",
200 | "R = np.array([[1.]]) # measurement uncertainty\n",
201 | "I = np.array([[1., 0.], [0., 1.]]) # identity matrix"
202 | ]
203 | },
204 | {
205 | "cell_type": "code",
206 | "execution_count": 49,
207 | "metadata": {},
208 | "outputs": [],
209 | "source": [
210 | "# input: initial state x and covariance P\n",
211 | "def kalman_filter(x, P):\n",
212 | " for i in range(len(measurements)):\n",
213 | " # measurement\n",
214 | " Z = measurements[i] # current sense\n",
215 | " y = Z - np.dot(H, x) # err between actual observation and expected observation\n",
216 | " S = np.dot(np.dot(H, P), np.transpose(H)) + R\n",
217 | " K = np.dot(np.dot(P, np.transpose(H)), np.linalg.inv(S))\n",
218 | " \n",
219 | " # posterier mu and sigma\n",
220 | " x = x + np.dot(K, y)\n",
221 | " P = np.dot((I - np.dot(K, H)), P)\n",
222 | " \n",
223 | " # predict\n",
224 | " x = np.dot(F, x) + u\n",
225 | " P = np.dot(np.dot(F, P), np.transpose(F))\n",
226 | " return x, P"
227 | ]
228 | },
229 | {
230 | "cell_type": "code",
231 | "execution_count": 16,
232 | "metadata": {},
233 | "outputs": [
234 | {
235 | "name": "stdout",
236 | "output_type": "stream",
237 | "text": [
238 | "x_new \n",
239 | " [[3.99966644]\n",
240 | " [0.99999983]]\n",
241 | "P_new \n",
242 | " [[2.33189042 0.99916761]\n",
243 | " [0.99916761 0.49950058]]\n"
244 | ]
245 | }
246 | ],
247 | "source": [
248 | "x_new, P_new = kalman_filter(x, P)\n",
249 | "print(\"x_new \\n\", x_new)\n",
250 | "print(\"P_new \\n\", P_new)"
251 | ]
252 | },
253 | {
254 | "cell_type": "markdown",
255 | "metadata": {},
256 | "source": [
257 | "### 2D State\n",
258 | "---\n",
259 | "initial state: $(x, y, x', y')$ --> `(loc1, loc2, vel1, vel2)`"
260 | ]
261 | },
262 | {
263 | "cell_type": "code",
264 | "execution_count": 50,
265 | "metadata": {},
266 | "outputs": [],
267 | "source": [
268 | "measurements = np.array([[[5.], [10.]], [[6.], [8.]], [[7.], [6.]], [[8.], [4.]], [[9.], [2.]], [[10.], [0.]]])\n",
269 | "initial_xy = np.array([4., 12.])\n",
270 | "\n",
271 | "dt = 0.1\n",
272 | "\n",
273 | "x = np.array([[initial_xy[0]], [initial_xy[1]], [0.], [0.]]) # initial state (location and velocity)\n",
274 | "u = np.array([[0.], [0.], [0.], [0.]]) # external motion\n",
275 | "\n",
276 | "P = np.array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 1000, 0], [0, 0, 0, 1000]]) # initial uncertainty: 0 for positions x and y, 1000 for the two velocities\n",
277 | "F = np.array([[1, 0, 0.1, 0], [0, 1, 0, 0.1], [0, 0, 1, 0], [0, 0, 0, 1]]) # next state function: generalize the 2d version to 4d\n",
278 | "H = np.array([[1, 0, 0, 0], [0, 1, 0, 0]]) # measurement function: reflect the fact that we observe x and y but not the two velocities\n",
279 | "R = np.array([[0.1, 0], [0, 0.1]]) # measurement uncertainty: use 2x2 matrix with 0.1 as main diagonal\n",
280 | "I = np.array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]) # 4d identity matrix"
281 | ]
282 | },
283 | {
284 | "cell_type": "code",
285 | "execution_count": 51,
286 | "metadata": {},
287 | "outputs": [
288 | {
289 | "name": "stdout",
290 | "output_type": "stream",
291 | "text": [
292 | "x_new \n",
293 | " [[ 11.63497546]\n",
294 | " [ -3.26995092]\n",
295 | " [ 12.7249591 ]\n",
296 | " [-25.4499182 ]]\n",
297 | "P_new \n",
298 | " [[0.06544265 0. 0.10907108 0. ]\n",
299 | " [0. 0.06544265 0. 0.10907108]\n",
300 | " [0.10907108 0. 0.18178513 0. ]\n",
301 | " [0. 0.10907108 0. 0.18178513]]\n"
302 | ]
303 | }
304 | ],
305 | "source": [
306 | "x_new, P_new = kalman_filter(x, P)\n",
307 | "print(\"x_new \\n\", x_new)\n",
308 | "print(\"P_new \\n\", P_new)"
309 | ]
310 | },
311 | {
312 | "cell_type": "code",
313 | "execution_count": null,
314 | "metadata": {},
315 | "outputs": [],
316 | "source": []
317 | }
318 | ],
319 | "metadata": {
320 | "kernelspec": {
321 | "display_name": "Python 3",
322 | "language": "python",
323 | "name": "python3"
324 | },
325 | "language_info": {
326 | "codemirror_mode": {
327 | "name": "ipython",
328 | "version": 3
329 | },
330 | "file_extension": ".py",
331 | "mimetype": "text/x-python",
332 | "name": "python",
333 | "nbconvert_exporter": "python",
334 | "pygments_lexer": "ipython3",
335 | "version": "3.6.5"
336 | }
337 | },
338 | "nbformat": 4,
339 | "nbformat_minor": 2
340 | }
341 |
--------------------------------------------------------------------------------
/Localisation/localization.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "Grid World Localization\n",
8 | "---\n",
9 | "Consider a 2 dimensional world, the robot can move only left, right, up, or down. It cannot move diagonally. Also, for this assignment, the robot will never overshoot its destination square; it will either make the movement or it will remain stationary.\n",
10 | "\n",
11 | "**Motions:**\n",
12 | "- [0, 0]: no movement\n",
13 | "- [0, 1]: move right\n",
14 | "- [1, 0]: move down\n",
15 | "- [-1, 0]: move up\n",
16 | "..."
17 | ]
18 | },
19 | {
20 | "cell_type": "code",
21 | "execution_count": 1,
22 | "metadata": {},
23 | "outputs": [],
24 | "source": [
25 | "import numpy as np"
26 | ]
27 | },
28 | {
29 | "cell_type": "code",
30 | "execution_count": 2,
31 | "metadata": {},
32 | "outputs": [],
33 | "source": [
34 | "def init_p():\n",
35 | " nrow, ncol = colors.shape\n",
36 | " p = np.zeros([nrow, ncol])\n",
37 | " for i in range(nrow):\n",
38 | " for j in range(ncol):\n",
39 | " p[i, j] = 1/(nrow*ncol)\n",
40 | " return p"
41 | ]
42 | },
43 | {
44 | "cell_type": "code",
45 | "execution_count": 3,
46 | "metadata": {},
47 | "outputs": [],
48 | "source": [
49 | "# sense\n",
50 | "\n",
51 | "def sense(p, Z):\n",
52 | " nrow, ncol = colors.shape\n",
53 | " sense_prob = np.zeros([nrow, ncol])\n",
54 | " for i in range(nrow):\n",
55 | " for j in range(ncol):\n",
56 | " sense_prob[i, j] = sensor_right if colors[i, j] == Z else 1 - sensor_right\n",
57 | " q = p * sense_prob\n",
58 | " # normalization\n",
59 | " q = q/np.sum(q)\n",
60 | " return q"
61 | ]
62 | },
63 | {
64 | "cell_type": "code",
65 | "execution_count": 4,
66 | "metadata": {},
67 | "outputs": [],
68 | "source": [
69 | "# move\n",
70 | "\n",
71 | "def move(p, U):\n",
72 | " nrow, ncol = p.shape\n",
73 | " q = np.zeros([nrow, ncol])\n",
74 | " for i in range(nrow):\n",
75 | " for j in range(ncol):\n",
76 | " q[i, j] = p_move*p[i-U[0], j-U[1]] + (1-p_move)*p[i, j]\n",
77 | " return q"
78 | ]
79 | },
80 | {
81 | "cell_type": "code",
82 | "execution_count": 5,
83 | "metadata": {},
84 | "outputs": [],
85 | "source": [
86 | "# combine\n",
87 | "\n",
88 | "def localize(colors, measurements, motions, sensor_right, p_move):\n",
89 | " assert len(measurements) == len(motions)\n",
90 | " \n",
91 | " p = init_p()\n",
92 | " for i in range(len(measurements)):\n",
93 | " measure = measurements[i]\n",
94 | " motion = motions[i]\n",
95 | " \n",
96 | " p = sense(p, measure)\n",
97 | " p = move(p, motion)\n",
98 | " return p"
99 | ]
100 | },
101 | {
102 | "cell_type": "code",
103 | "execution_count": 6,
104 | "metadata": {},
105 | "outputs": [
106 | {
107 | "name": "stdout",
108 | "output_type": "stream",
109 | "text": [
110 | "[[0. 0. 0.]\n",
111 | " [0. 1. 0.]\n",
112 | " [0. 0. 0.]]\n"
113 | ]
114 | }
115 | ],
116 | "source": [
117 | "# test\n",
118 | "\n",
119 | "colors = np.array(\n",
120 | " [['G', 'G', 'G'],\n",
121 | " ['G', 'R', 'G'],\n",
122 | " ['G', 'G', 'G']])\n",
123 | "\n",
124 | "measurements = ['R']\n",
125 | "motions = [[0, 0]]\n",
126 | "sensor_right = 1.0\n",
127 | "p_move = 1.0 # probability motion executed correctly\n",
128 | "\n",
129 | "p = localize(colors, measurements, motions, sensor_right, p_move)\n",
130 | "\n",
131 | "print(p)"
132 | ]
133 | },
134 | {
135 | "cell_type": "code",
136 | "execution_count": 7,
137 | "metadata": {},
138 | "outputs": [
139 | {
140 | "name": "stdout",
141 | "output_type": "stream",
142 | "text": [
143 | "[[0.06666667 0.06666667 0.06666667]\n",
144 | " [0.06666667 0.26666667 0.26666667]\n",
145 | " [0.06666667 0.06666667 0.06666667]]\n"
146 | ]
147 | }
148 | ],
149 | "source": [
150 | "colors = np.array(\n",
151 | " [['G', 'G', 'G'],\n",
152 | " ['G', 'R', 'R'],\n",
153 | " ['G', 'G', 'G']])\n",
154 | "\n",
155 | "measurements = ['R']\n",
156 | "motions = [[0, 0]]\n",
157 | "sensor_right = 0.8\n",
158 | "p_move = 1.0 # probability motion executed correctly\n",
159 | "\n",
160 | "p = localize(colors, measurements, motions, sensor_right, p_move)\n",
161 | "\n",
162 | "print(p)"
163 | ]
164 | },
165 | {
166 | "cell_type": "code",
167 | "execution_count": 8,
168 | "metadata": {},
169 | "outputs": [
170 | {
171 | "name": "stdout",
172 | "output_type": "stream",
173 | "text": [
174 | "[[0.02564103 0.02564103 0.02564103]\n",
175 | " [0.21794872 0.21794872 0.41025641]\n",
176 | " [0.02564103 0.02564103 0.02564103]]\n"
177 | ]
178 | }
179 | ],
180 | "source": [
181 | "colors = np.array(\n",
182 | " [['G', 'G', 'G'],\n",
183 | " ['G', 'R', 'R'],\n",
184 | " ['G', 'G', 'G']])\n",
185 | "\n",
186 | "measurements = ['R', 'R']\n",
187 | "motions = [[0, 0], [0, 1]]\n",
188 | "sensor_right = 0.8\n",
189 | "p_move = 0.5\n",
190 | "\n",
191 | "p = localize(colors, measurements, motions, sensor_right, p_move)\n",
192 | "\n",
193 | "print(p)"
194 | ]
195 | },
196 | {
197 | "cell_type": "code",
198 | "execution_count": null,
199 | "metadata": {},
200 | "outputs": [],
201 | "source": []
202 | }
203 | ],
204 | "metadata": {
205 | "kernelspec": {
206 | "display_name": "Python 3",
207 | "language": "python",
208 | "name": "python3"
209 | },
210 | "language_info": {
211 | "codemirror_mode": {
212 | "name": "ipython",
213 | "version": 3
214 | },
215 | "file_extension": ".py",
216 | "mimetype": "text/x-python",
217 | "name": "python",
218 | "nbconvert_exporter": "python",
219 | "pygments_lexer": "ipython3",
220 | "version": "3.6.5"
221 | }
222 | },
223 | "nbformat": 4,
224 | "nbformat_minor": 2
225 | }
226 |
--------------------------------------------------------------------------------
/Localisation/particle-filters.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "[ref](https://classroom.udacity.com/courses/cs373/lessons/48704330/concepts/484805920923)"
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": 51,
13 | "metadata": {},
14 | "outputs": [],
15 | "source": [
16 | "from math import *\n",
17 | "import random\n",
18 | "import numpy as np"
19 | ]
20 | },
21 | {
22 | "cell_type": "code",
23 | "execution_count": 2,
24 | "metadata": {},
25 | "outputs": [],
26 | "source": [
27 | "landmarks = [[20.0, 20.0], [80.0, 80.0], [20.0, 80.0], [80.0, 20.0]]\n",
28 | "world_size = 100.0\n",
29 | "\n",
30 | "\n",
31 | "class robot:\n",
32 | " def __init__(self):\n",
33 | " self.x = random.random() * world_size # initialise with random\n",
34 | " self.y = random.random() * world_size\n",
35 | " self.orientation = random.random() * 2.0 * pi\n",
36 | " \n",
37 | " self.forward_noise = 0.0\n",
38 | " self.turn_noise = 0.0\n",
39 | " self.sense_noise = 0.0\n",
40 | " \n",
41 | " def set(self, new_x, new_y, new_orientation):\n",
42 | " if new_x < 0 or new_x >= world_size:\n",
43 | " raise ValueError('X coordinate out of bound')\n",
44 | " if new_y < 0 or new_y >= world_size:\n",
45 | " raise ValueError('Y coordinate out of bound')\n",
46 | " if new_orientation < 0 or new_orientation >= 2 * pi:\n",
47 | " raise ValueError('Orientation must be in [0..2pi]')\n",
48 | " self.x = float(new_x)\n",
49 | " self.y = float(new_y)\n",
50 | " self.orientation = float(new_orientation)\n",
51 | " \n",
52 | " \n",
53 | " def set_noise(self, new_f_noise, new_t_noise, new_s_noise):\n",
54 | " # makes it possible to change the noise parameters\n",
55 | " # this is often useful in particle filters\n",
56 | " self.forward_noise = float(new_f_noise);\n",
57 | " self.turn_noise = float(new_t_noise);\n",
58 | " self.sense_noise = float(new_s_noise);\n",
59 | " \n",
60 | " \n",
61 | " def sense(self):\n",
62 | " Z = []\n",
63 | " for i in range(len(landmarks)):\n",
64 | " dist = sqrt((self.x - landmarks[i][0]) ** 2 + (self.y - landmarks[i][1]) ** 2)\n",
65 | " dist += random.gauss(0.0, self.sense_noise)\n",
66 | " Z.append(dist)\n",
67 | " return Z\n",
68 | " \n",
69 | " \n",
70 | " def move(self, turn, forward):\n",
71 | " if forward < 0:\n",
72 | " raise ValueError('Robot cant move backwards')\n",
73 | " \n",
74 | " # turn, and add randomness to the turning command\n",
75 | " orientation = self.orientation + float(turn) + random.gauss(0.0, self.turn_noise)\n",
76 | " orientation %= 2 * pi\n",
77 | " \n",
78 | " # move, and add randomness to the motion command\n",
79 | " dist = float(forward) + random.gauss(0.0, self.forward_noise)\n",
80 | " x = self.x + (cos(orientation) * dist)\n",
81 | " y = self.y + (sin(orientation) * dist)\n",
82 | " x %= world_size # cyclic truncate\n",
83 | " y %= world_size\n",
84 | " \n",
85 | " # set particle\n",
86 | " res = robot()\n",
87 | " res.set(x, y, orientation)\n",
88 | " res.set_noise(self.forward_noise, self.turn_noise, self.sense_noise)\n",
89 | " return res\n",
90 | " \n",
91 | " \n",
92 | " def Gaussian(self, mu, sigma, x):\n",
93 | " # calculates the probability of x for 1-dim Gaussian with mean mu and var. sigma\n",
94 | " return exp(- ((mu - x) ** 2) / (sigma ** 2) / 2.0) / sqrt(2.0 * pi * (sigma ** 2))\n",
95 | " \n",
96 | " \n",
97 | " def measurement_prob(self, measurement):\n",
98 | " # calculates how likely a measurement should be\n",
99 | " prob = 1.0\n",
100 | " for i in range(len(landmarks)):\n",
101 | " dist = sqrt((self.x - landmarks[i][0]) ** 2 + (self.y - landmarks[i][1]) ** 2)\n",
102 | " prob *= self.Gaussian(dist, self.sense_noise, measurement[i])\n",
103 | " return prob\n",
104 | " \n",
105 | " \n",
106 | " def __repr__(self):\n",
107 | " return '[x=%.6s y=%.6s orient=%.6s]' % (str(self.x), str(self.y), str(self.orientation))\n",
108 | "\n",
109 | "\n",
110 | "\n",
111 | "def eval(r, p):\n",
112 | " s = 0.0;\n",
113 | " for i in range(len(p)): # calculate mean error\n",
114 | " dx = (p[i].x - r.x + (world_size/2.0)) % world_size - (world_size/2.0)\n",
115 | " dy = (p[i].y - r.y + (world_size/2.0)) % world_size - (world_size/2.0)\n",
116 | " err = sqrt(dx * dx + dy * dy)\n",
117 | " s += err\n",
118 | " return s / float(len(p))"
119 | ]
120 | },
121 | {
122 | "cell_type": "code",
123 | "execution_count": 30,
124 | "metadata": {},
125 | "outputs": [
126 | {
127 | "name": "stdout",
128 | "output_type": "stream",
129 | "text": [
130 | "[33.06686926692568, 39.3402893507033, 37.23534432750467, 34.18137281438475]\n",
131 | "[39.212529957622685, 48.586856817416525, 41.54059854560854, 21.899392112217093]\n"
132 | ]
133 | }
134 | ],
135 | "source": [
136 | "forward_noise = 5.0 \n",
137 | "turn_noise = 0.1\n",
138 | "sense_noise = 5.0 # noise variance\n",
139 | "\n",
140 | "myrobot = robot()\n",
141 | "\n",
142 | "myrobot.set_noise(forward_noise, turn_noise, sense_noise)\n",
143 | "myrobot.set(30., 50., pi/2)\n",
144 | "myrobot = myrobot.move(-pi/2, 15.)\n",
145 | "\n",
146 | "print(myrobot.sense())\n",
147 | "\n",
148 | "myrobot = myrobot.move(-pi/2, 10.)\n",
149 | "\n",
150 | "print(myrobot.sense())"
151 | ]
152 | },
153 | {
154 | "cell_type": "markdown",
155 | "metadata": {},
156 | "source": [
157 | "## Particles\n",
158 | "---\n",
159 | "Initialise random robots in the world\n",
160 | "\n",
161 | "### Particle Filter Steps\n",
162 | "---\n",
163 | "1. measure the probability of each particle\n",
164 | "2. resample based on the probability weight\n",
165 | "3. repeatedly move to approach orientation"
166 | ]
167 | },
168 | {
169 | "cell_type": "code",
170 | "execution_count": 38,
171 | "metadata": {},
172 | "outputs": [
173 | {
174 | "data": {
175 | "text/plain": [
176 | "[[x=89.958 y=44.058 orient=0.5153],\n",
177 | " [x=67.425 y=71.905 orient=3.0930],\n",
178 | " [x=11.044 y=60.100 orient=2.3018],\n",
179 | " [x=21.359 y=43.892 orient=4.0847],\n",
180 | " [x=6.6223 y=41.454 orient=0.8310]]"
181 | ]
182 | },
183 | "execution_count": 38,
184 | "metadata": {},
185 | "output_type": "execute_result"
186 | }
187 | ],
188 | "source": [
189 | "N = 1000\n",
190 | "p = []\n",
191 | "\n",
192 | "for _ in range(N):\n",
193 | " x = robot()\n",
194 | " x.set_noise(0.05, 0.05, 5.0) # add noise\n",
195 | " p.append(x)\n",
196 | "\n",
197 | "p[:5]"
198 | ]
199 | },
200 | {
201 | "cell_type": "code",
202 | "execution_count": 67,
203 | "metadata": {},
204 | "outputs": [
205 | {
206 | "name": "stdout",
207 | "output_type": "stream",
208 | "text": [
209 | "[x=27.984 y=67.990 orient=4.0437]\n"
210 | ]
211 | }
212 | ],
213 | "source": [
214 | "# actual location\n",
215 | "myrobot = robot()\n",
216 | "myrobot = myrobot.move(0.1, 5.0)\n",
217 | "Z = myrobot.sense()\n",
218 | "print(myrobot)"
219 | ]
220 | },
221 | {
222 | "cell_type": "code",
223 | "execution_count": 76,
224 | "metadata": {},
225 | "outputs": [
226 | {
227 | "name": "stdout",
228 | "output_type": "stream",
229 | "text": [
230 | "initial err 38.345686399304476\n",
231 | "round 0 err 3.745998779941494\n",
232 | "round 1 err 3.1602952926358405\n",
233 | "round 2 err 3.655242638130221\n",
234 | "round 3 err 5.1982818441233665\n",
235 | "round 4 err 5.663416540263246\n",
236 | "round 5 err 5.964230357642405\n",
237 | "round 6 err 6.598688724498126\n",
238 | "round 7 err 7.455490663989307\n",
239 | "round 8 err 9.468173946683207\n",
240 | "round 9 err 4.393607927352691\n"
241 | ]
242 | }
243 | ],
244 | "source": [
245 | "N = 1000\n",
246 | "T = 10\n",
247 | "\n",
248 | "# initialise randomly guessed particles\n",
249 | "p = []\n",
250 | "for i in range(N):\n",
251 | " x = robot()\n",
252 | " x.set_noise(0.05, 0.05, 5.0)\n",
253 | " p.append(x)\n",
254 | "\n",
255 | "init_err = eval(myrobot, p)\n",
256 | "print(\"initial err\", init_err)\n",
257 | "\n",
258 | "for rd in range(T):\n",
259 | " myrobot = myrobot.move(0.1, 5.0)\n",
260 | " Z = myrobot.sense()\n",
261 | " \n",
262 | " p2 = []\n",
263 | " for i in range(N):\n",
264 | " # turn 0.1 and move 5 meters\n",
265 | " p2.append(p[i].move(0.1, 5.0))\n",
266 | " p = p2\n",
267 | "\n",
268 | " # given the particle's location, how likely measure it as Z\n",
269 | " w = []\n",
270 | "\n",
271 | " for rob in p:\n",
272 | " prob = rob.measurement_prob(Z) # Z remains the same\n",
273 | " w.append(prob)\n",
274 | " \n",
275 | " # resampling particles based on prabability weights\n",
276 | " p3 = []\n",
277 | " index = int(random.random()*N)\n",
278 | " beta = 0\n",
279 | " mw = max(w)\n",
280 | "\n",
281 | " for i in range(N):\n",
282 | " beta += random.random() * 2 * mw\n",
283 | " while beta > w[index]:\n",
284 | " beta -= w[index]\n",
285 | " index = (index + 1)%N\n",
286 | " p3.append(p[index])\n",
287 | " p = p3\n",
288 | " \n",
289 | " # calculate err\n",
290 | " err = eval(myrobot, p)\n",
291 | " print(\"round {} err {}\".format(rd, err))"
292 | ]
293 | },
294 | {
295 | "cell_type": "code",
296 | "execution_count": null,
297 | "metadata": {},
298 | "outputs": [],
299 | "source": []
300 | }
301 | ],
302 | "metadata": {
303 | "kernelspec": {
304 | "display_name": "Python 3",
305 | "language": "python",
306 | "name": "python3"
307 | },
308 | "language_info": {
309 | "codemirror_mode": {
310 | "name": "ipython",
311 | "version": 3
312 | },
313 | "file_extension": ".py",
314 | "mimetype": "text/x-python",
315 | "name": "python",
316 | "nbconvert_exporter": "python",
317 | "pygments_lexer": "ipython3",
318 | "version": "3.6.5"
319 | }
320 | },
321 | "nbformat": 4,
322 | "nbformat_minor": 2
323 | }
324 |
--------------------------------------------------------------------------------
/Localisation/pics/bayes.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MJeremy2017/machine-learning-models/e8b81f3172d5e309e977fe51ac5c812b650ccd80/Localisation/pics/bayes.png
--------------------------------------------------------------------------------
/Localisation/pics/sense-move.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MJeremy2017/machine-learning-models/e8b81f3172d5e309e977fe51ac5c812b650ccd80/Localisation/pics/sense-move.png
--------------------------------------------------------------------------------
/Optimisation/adagrad.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | def adagrad(inits, X, Y, lr=0.01, n_iter=10, epsilon=1e-6):
5 | n = len(X)
6 | a, b = inits
7 | grad_a, grad_b = lambda x, y: -2 * x * (y - (a * x + b)), lambda x, y: -2 * (y - (a * x + b))
8 | s_a, s_b = 0, 0
9 | a_list, b_list = [a], [b]
10 | a_lr_list, b_lr_list = [], []
11 | for _ in range(n_iter):
12 | for i in range(n):
13 | x_i, y_i = X[i], Y[i]
14 |
15 | s_a += (grad_a(x_i, y_i)) ** 2
16 | s_b += (grad_b(x_i, y_i)) ** 2
17 |
18 | lr_a = lr / np.sqrt(s_a + epsilon)
19 | lr_b = lr / np.sqrt(s_b + epsilon)
20 |
21 | a -= grad_a(x_i, y_i) * lr_a
22 | b -= grad_b(x_i, y_i) * lr_b
23 |
24 | a_lr_list.append(lr_a)
25 | b_lr_list.append(lr_b)
26 | a_list.append(a)
27 | b_list.append(b)
28 | return a_list, b_list, a_lr_list, b_lr_list
29 |
30 |
31 | def adagrad_batch(inits, X, Y, lr=0.01, n_iter=10, batch_size=50, epsilon=1e-6, shuffle=True):
32 | n = len(X)
33 | ind = list(range(n))
34 | a, b = inits
35 | grad_a, grad_b = lambda x, y: -2 * x * (y - (a * x + b)), lambda x, y: -2 * (y - (a * x + b))
36 | s_a, s_b = 0, 0
37 | a_list, b_list = [a], [b]
38 | a_lr_list, b_lr_list = [], []
39 | for _ in range(n_iter):
40 | if shuffle:
41 | np.random.shuffle(ind) # shuffle the index on every iteration
42 | batch_indices = [ind[i:(i + batch_size)] for i in range(0, len(ind), batch_size)]
43 | for indices in batch_indices:
44 | grad_sum_a = 0
45 | grad_sum_b = 0
46 | # each batch compute total gradient
47 | for j in indices:
48 | x_j, y_j = X[j], Y[j]
49 | grad_sum_a += grad_a(x_j, y_j)
50 | grad_sum_b += grad_b(x_j, y_j)
51 | # update on average gradient
52 | grad_avg_a, grad_avg_b = grad_sum_a / batch_size, grad_sum_b / batch_size
53 | s_a += grad_avg_a ** 2
54 | s_b += grad_avg_b ** 2
55 |
56 | lr_a = lr / np.sqrt(s_a + epsilon)
57 | lr_b = lr / np.sqrt(s_b + epsilon)
58 |
59 | a -= grad_avg_a * lr_a
60 | b -= grad_avg_b * lr_b
61 |
62 | a_lr_list.append(lr_a)
63 | b_lr_list.append(lr_b)
64 | a_list.append(a)
65 | b_list.append(b)
66 | return a_list, b_list, a_lr_list, b_lr_list
67 |
--------------------------------------------------------------------------------
/Optimisation/adam.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | def adam(inits, X, Y, lr=0.01, n_iter=10, beta1=0.9, beta2=0.999, epsilon=1e-6):
5 | n = len(X)
6 | a, b = inits
7 | grad_a, grad_b = lambda x, y: -2*x*(y-(a*x+b)), lambda x, y: -2*(y-(a*x+b))
8 | v_a, v_b = 0, 0
9 | s_a, s_b = 0, 0
10 | a_list, b_list = [a], [b]
11 | t = 1
12 | for _ in range(n_iter):
13 | for i in range(n):
14 | x_i, y_i = X[i], Y[i]
15 | g_a, g_b = grad_a(x_i, y_i), grad_b(x_i, y_i)
16 | # compute the first moment
17 | v_a = beta1*v_a + (1-beta1)*g_a
18 | v_b = beta1*v_b + (1-beta1)*g_b
19 | # compute the second moment
20 | s_a = beta2*s_a + (1-beta2)*(g_a**2)
21 | s_b = beta2*s_b + (1-beta2)*(g_b**2)
22 |
23 | # normalisation
24 | v_a_norm, v_b_norm = v_a/(1 - np.power(beta1, t)), v_b/(1 - np.power(beta1, t))
25 | s_a_norm, s_b_norm = s_a/(1 - np.power(beta2, t)), s_b/(1 - np.power(beta2, t))
26 | t += 1
27 |
28 | # update gradient
29 | g_a_norm = lr * v_a_norm / (np.sqrt(s_a_norm) + epsilon)
30 | g_b_norm = lr * v_b_norm / (np.sqrt(s_b_norm) + epsilon)
31 |
32 | # update params
33 | a -= g_a_norm
34 | b -= g_b_norm
35 |
36 | a_list.append(a)
37 | b_list.append(b)
38 | return a_list, b_list
39 |
40 |
41 | def adam_matrix(inits, X, Y, lr=0.01, n_iter=10, beta1=0.9, beta2=0.999, epsilon=1e-6):
42 | n = len(X)
43 | a, b = inits
44 | grad_func = [lambda x, y: -2*x*(y-(a*x+b)), lambda x, y: -2*(y-(a*x+b))]
45 | v = np.array([0, 0])
46 | s = np.array([0, 0])
47 | a_list, b_list = [a], [b]
48 | for _ in range(n_iter):
49 | t = 1
50 | for i in range(n):
51 | x_i, y_i = X[i], Y[i]
52 | grad = np.array([f(x_i, y_i) for f in grad_func])
53 | # compute the first moment
54 | v = beta1 * v + (1-beta1)*grad
55 | # compute the second moment
56 | s = beta2*s + (1-beta2)*(grad**2)
57 |
58 | # normalisation
59 | v_norm = v/(1 - np.power(beta1, t))
60 | s_norm = s/(1 - np.power(beta1, t))
61 | t += 1
62 |
63 | # update gradient
64 | grad_norm = lr*v_norm/(np.sqrt(s_norm) + epsilon)
65 | # update params
66 | a -= grad_norm[0]
67 | b -= grad_norm[1]
68 |
69 | a_list.append(a)
70 | b_list.append(b)
71 | return a_list, b_list
72 |
--------------------------------------------------------------------------------
/Optimisation/rmsprop.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | def rmsprop(inits, X, Y, lr=0.01, n_iter=10, gamma=0.9, epsilon=1e-6):
5 | n = len(X)
6 | a, b = inits
7 | grad_a, grad_b = lambda x, y: -2 * x * (y - (a * x + b)), lambda x, y: -2 * (y - (a * x + b))
8 | s_a, s_b = 0, 0
9 | a_list, b_list = [a], [b]
10 | a_lr_list, b_lr_list = [], []
11 | for _ in range(n_iter):
12 | for i in range(n):
13 | x_i, y_i = X[i], Y[i]
14 |
15 | s_a = gamma * s_a + (1 - gamma) * (grad_a(x_i, y_i)) ** 2
16 | s_b = gamma * s_b + (1 - gamma) * (grad_b(x_i, y_i)) ** 2
17 |
18 | lr_a = lr / np.sqrt(s_a + epsilon)
19 | lr_b = lr / np.sqrt(s_b + epsilon)
20 |
21 | a -= grad_a(x_i, y_i) * lr_a
22 | b -= grad_b(x_i, y_i) * lr_b
23 |
24 | a_lr_list.append(lr_a)
25 | b_lr_list.append(lr_b)
26 | a_list.append(a)
27 | b_list.append(b)
28 | return a_list, b_list, a_lr_list, b_lr_list
29 |
--------------------------------------------------------------------------------
/Optimisation/sgd.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | def fx2(x):
5 | return 2*x + 3
6 |
7 |
8 | x_range = np.linspace(-1, 1, 100)
9 | y_value = [fx2(x) for x in x_range]
10 |
11 |
12 | def sgd(inits, X, Y, lr=0.01, n_iter=10):
13 | n = len(X)
14 | ind = list(range(n))
15 | a, b = inits
16 | grad_a, grad_b = lambda x, y: -2*x*(y-(a*x+b)), lambda x, y: -2*(y-(a*x+b))
17 | a_list, b_list = [a], [b]
18 | for i in range(n_iter):
19 | np.random.shuffle(ind) # shuffle the index on every iteration
20 | for j in ind:
21 | x_j, y_j = X[j], Y[j]
22 | a -= lr*grad_a(x_j, y_j)
23 | b -= lr*grad_b(x_j, y_j)
24 | a_list.append(a)
25 | b_list.append(b)
26 | return a_list, b_list
27 |
28 |
29 | def sgd(inits, X, Y, lr=0.01, n_iter=10, batch_size=50, shuffle=True):
30 | n = len(X)
31 | ind = list(range(n))
32 | a, b = inits
33 | grad_a, grad_b = lambda x, y: -2*x*(y-(a*x+b)), lambda x, y: -2*(y-(a*x+b))
34 | a_list, b_list = [a], [b]
35 | for i in range(n_iter):
36 | if shuffle:
37 | np.random.shuffle(ind) # shuffle the index on every iteration
38 | batch_indices = [ind[i:(i+batch_size)] for i in range(0, len(ind), batch_size)]
39 | for indices in batch_indices:
40 | grad_sum_a = 0
41 | grad_sum_b = 0
42 | for j in indices:
43 | x_j, y_j = X[j], Y[j]
44 | grad_sum_a += grad_a(x_j, y_j)
45 | grad_sum_b += grad_b(x_j, y_j)
46 | a -= lr*grad_sum_a/batch_size
47 | b -= lr*grad_sum_b/batch_size
48 | a_list.append(a)
49 | b_list.append(b)
50 | return a_list, b_list
51 |
52 |
53 | def sgd_mom(inits, X, Y, lr=0.01, n_iter=10, gamma=0.9):
54 | n = len(X)
55 | ind = list(range(n))
56 | a, b = inits
57 | grad_a, grad_b = lambda x, y: -2*x*(y-(a*x+b)), lambda x, y: -2*(y-(a*x+b))
58 | v_a, v_b = 0, 0
59 | a_list, b_list = [a], [b]
60 | for i in range(n_iter):
61 | np.random.shuffle(ind) # shuffle the index on every iteration
62 | for j in ind:
63 | x_j, y_j = X[j], Y[j]
64 | # update momentum
65 | v_a = gamma*v_a + lr*grad_a(x_j, y_j)
66 | v_b = gamma*v_b + lr*grad_b(x_j, y_j)
67 | # update params
68 | a -= v_a
69 | b -= v_b
70 | a_list.append(a)
71 | b_list.append(b)
72 | return a_list, b_list
--------------------------------------------------------------------------------
/Optimisation/vanilla-gd.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import matplotlib.pyplot as plt
3 |
4 |
5 | def fx(x):
6 | return x**2
7 |
8 |
9 | x_range = np.linspace(-1, 1, 100)
10 | y_value = [fx(x) for x in x_range]
11 |
12 |
13 | def gd(init_x, grad_fn, lr=0.01, n_iter=10):
14 | x = init_x
15 | x_list = [x]
16 | for i in range(n_iter):
17 | x -= lr*grad_fn(x)
18 | x_list.append(x)
19 | return x_list
20 |
21 |
22 | init_x = -1
23 | grad_fn = lambda x: 2*x
24 |
25 | x_list = gd(init_x, grad_fn, lr=0.02, n_iter=100)
26 |
27 |
28 | def fx2(x):
29 | return 2*x + 3
30 |
31 |
32 | x_range = np.linspace(-1, 1, 100)
33 | y_value = [fx2(x) for x in x_range]
34 |
35 |
36 | def gd2(inits, X, Y, lr=0.01, n_iter=10):
37 | n = len(X)
38 | a, b = inits
39 | grad_a, grad_b = lambda x, y: -2*x*(y-(a*x+b)), lambda x, y: -2*(y-(a*x+b))
40 | a_list, b_list = [a], [b]
41 | for i in range(n_iter):
42 | for j in range(n):
43 | x_j, y_j = X[j], Y[j]
44 | a -= lr*grad_a(x_j, y_j)
45 | b -= lr*grad_b(x_j, y_j)
46 | a_list.append(a)
47 | b_list.append(b)
48 | return a_list, b_list
49 |
50 |
51 | inits = [0, 0]
52 | a_list, b_list = gd2(inits, x_range, y_value, n_iter=10)
53 |
54 |
55 | def gd3(inits, X, Y, lr=0.01, n_iter=10):
56 | n = len(X)
57 | a, b = inits
58 | grad_a, grad_b = lambda x, y: -2*x*(y-(a*x+b)), lambda x, y: -2*(y-(a*x+b))
59 | a_list, b_list = [a], [b]
60 | for i in range(n_iter):
61 | grad_sum_a = 0
62 | grad_sum_b = 0
63 | for j in range(n):
64 | x_j, y_j = X[j], Y[j]
65 | grad_sum_a += grad_a(x_j, y_j)
66 | grad_sum_b += grad_b(x_j, y_j)
67 | a -= lr*grad_sum_a/n
68 | b -= lr*grad_sum_b/n
69 | a_list.append(a)
70 | b_list.append(b)
71 | return a_list, b_list
72 |
73 |
74 | def plot_gd(a_list, b_list):
75 | plt.figure(figsize=[8, 4])
76 | plt.plot(range(len(a_list)), a_list, label="a")
77 | plt.plot(range(len(b_list)), b_list, label="b")
78 | plt.xlabel("n_iteration")
79 | plt.legend()
80 |
81 |
82 | inits = [0, 0]
83 | a_list, b_list = gd3(inits, x_range, y_value, lr=0.1, n_iter=100)
84 |
85 | plot_gd(a_list, b_list)
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Machine-Learning-Models
2 | ---
3 | Relevant machine learning techniques.
--------------------------------------------------------------------------------
/Search/A-star.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {
7 | "code_folding": []
8 | },
9 | "outputs": [],
10 | "source": [
11 | "grid = [[0, 1, 0, 0, 0, 0],\n",
12 | " [0, 1, 0, 0, 0, 0],\n",
13 | " [0, 1, 0, 0, 0, 0],\n",
14 | " [0, 1, 0, 0, 0, 0],\n",
15 | " [0, 0, 0, 0, 1, 0]]\n",
16 | "heuristic = [[9, 8, 7, 6, 5, 4],\n",
17 | " [8, 7, 6, 5, 4, 3],\n",
18 | " [7, 6, 5, 4, 3, 2],\n",
19 | " [6, 5, 4, 3, 2, 1],\n",
20 | " [5, 4, 3, 2, 1, 0]]\n",
21 | "\n",
22 | "init = [0, 0]\n",
23 | "goal = [len(grid)-1, len(grid[0])-1]\n",
24 | "cost = 1\n",
25 | "\n",
26 | "delta = [[-1, 0 ], # go up\n",
27 | " [ 0, -1], # go left\n",
28 | " [ 1, 0 ], # go down\n",
29 | " [ 0, 1 ]] # go right\n",
30 | "\n",
31 | "delta_name = ['^', '<', 'v', '>']"
32 | ]
33 | },
34 | {
35 | "cell_type": "code",
36 | "execution_count": 4,
37 | "metadata": {},
38 | "outputs": [],
39 | "source": [
40 | "def search(grid, init, goal, cost, heuristic):\n",
41 | " closed = [[0 for col in range(len(grid[0]))] for row in range(len(grid))]\n",
42 | " closed[init[0]][init[1]] = 1\n",
43 | "\n",
44 | " expand = [[-1 for col in range(len(grid[0]))] for row in range(len(grid))]\n",
45 | " action = [[-1 for col in range(len(grid[0]))] for row in range(len(grid))]\n",
46 | "\n",
47 | " x = init[0]\n",
48 | " y = init[1]\n",
49 | " g = 0\n",
50 | " h = heuristic[x][y]\n",
51 | " f = g + h\n",
52 | "\n",
53 | " open = [[f, g, x, y]]\n",
54 | "\n",
55 | " found = False # flag that is set when search is complete\n",
56 | " resign = False # flag set if we can't find expand\n",
57 | " count = 0\n",
58 | " \n",
59 | " while not found and not resign:\n",
60 | " if len(open) == 0:\n",
61 | " resign = True\n",
62 | " return \"Fail\"\n",
63 | " else:\n",
64 | " open.sort()\n",
65 | " open.reverse()\n",
66 | " next = open.pop()\n",
67 | " x = next[2]\n",
68 | " y = next[3]\n",
69 | " g = next[1]\n",
70 | " expand[x][y] = count\n",
71 | " count += 1\n",
72 | " \n",
73 | " if x == goal[0] and y == goal[1]:\n",
74 | " found = True\n",
75 | " else:\n",
76 | " for i in range(len(delta)):\n",
77 | " x2 = x + delta[i][0]\n",
78 | " y2 = y + delta[i][1]\n",
79 | " if x2 >= 0 and x2 < len(grid) and y2 >=0 and y2 < len(grid[0]):\n",
80 | " if closed[x2][y2] == 0 and grid[x2][y2] == 0:\n",
81 | " h2 = heuristic[x2][y2]\n",
82 | " g2 = g + cost\n",
83 | " f2 = g2 + h2\n",
84 | " open.append([f2, g2, x2, y2])\n",
85 | " closed[x2][y2] = 1\n",
86 | "\n",
87 | " return expand"
88 | ]
89 | },
90 | {
91 | "cell_type": "code",
92 | "execution_count": 5,
93 | "metadata": {},
94 | "outputs": [
95 | {
96 | "data": {
97 | "text/plain": [
98 | "[[0, -1, -1, -1, -1, -1],\n",
99 | " [1, -1, -1, -1, -1, -1],\n",
100 | " [2, -1, -1, -1, -1, -1],\n",
101 | " [3, -1, 8, 9, 10, 11],\n",
102 | " [4, 5, 6, 7, -1, 12]]"
103 | ]
104 | },
105 | "execution_count": 5,
106 | "metadata": {},
107 | "output_type": "execute_result"
108 | }
109 | ],
110 | "source": [
111 | "search(grid, init, goal, cost, heuristic)"
112 | ]
113 | },
114 | {
115 | "cell_type": "code",
116 | "execution_count": 7,
117 | "metadata": {},
118 | "outputs": [
119 | {
120 | "data": {
121 | "text/plain": [
122 | "[[0, 1, 3, 5, 7, 9],\n",
123 | " [2, -1, -1, -1, -1, 10],\n",
124 | " [4, -1, -1, -1, -1, 11],\n",
125 | " [6, -1, -1, -1, -1, 12],\n",
126 | " [8, -1, -1, -1, -1, 13]]"
127 | ]
128 | },
129 | "execution_count": 7,
130 | "metadata": {},
131 | "output_type": "execute_result"
132 | }
133 | ],
134 | "source": [
135 | "grid = [[0, 0, 0, 0, 0, 0],\n",
136 | " [0, 1, 1, 1, 1, 0],\n",
137 | " [0, 1, 0, 0, 0, 0],\n",
138 | " [0, 1, 0, 0, 0, 0],\n",
139 | " [0, 1, 0, 0, 1, 0]]\n",
140 | "\n",
141 | "search(grid, init, goal, cost, heuristic)"
142 | ]
143 | },
144 | {
145 | "cell_type": "code",
146 | "execution_count": null,
147 | "metadata": {},
148 | "outputs": [],
149 | "source": []
150 | }
151 | ],
152 | "metadata": {
153 | "kernelspec": {
154 | "display_name": "Python 3",
155 | "language": "python",
156 | "name": "python3"
157 | },
158 | "language_info": {
159 | "codemirror_mode": {
160 | "name": "ipython",
161 | "version": 3
162 | },
163 | "file_extension": ".py",
164 | "mimetype": "text/x-python",
165 | "name": "python",
166 | "nbconvert_exporter": "python",
167 | "pygments_lexer": "ipython3",
168 | "version": "3.6.5"
169 | }
170 | },
171 | "nbformat": 4,
172 | "nbformat_minor": 2
173 | }
174 |
--------------------------------------------------------------------------------
/Search/dynamic-programming.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "from copy import deepcopy"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 2,
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "# If a cell is a wall or it is impossible to reach the goal from a cell,\n",
19 | "# assign that cell a value of 99.\n",
20 | "# ----------\n",
21 | "\n",
22 | "grid = [[0, 1, 0, 0, 0, 0],\n",
23 | " [0, 1, 0, 0, 0, 0],\n",
24 | " [0, 1, 0, 0, 0, 0],\n",
25 | " [0, 1, 0, 0, 0, 0],\n",
26 | " [0, 0, 0, 0, 1, 0]]\n",
27 | "goal = [len(grid)-1, len(grid[0])-1]\n",
28 | "cost = 1 # the cost associated with moving from a cell to an adjacent one\n",
29 | "\n",
30 | "delta = [[-1, 0 ], # go up\n",
31 | " [ 0, -1], # go left\n",
32 | " [ 1, 0 ], # go down\n",
33 | " [ 0, 1 ]] # go right\n",
34 | "\n",
35 | "delta_name = ['^', '<', 'v', '>']"
36 | ]
37 | },
38 | {
39 | "cell_type": "code",
40 | "execution_count": 16,
41 | "metadata": {},
42 | "outputs": [],
43 | "source": [
44 | "def compute_value(grid, goal, cost):\n",
45 | " level = 1\n",
46 | " value = [[99 for col in range(len(grid[0]))] for row in range(len(grid))]\n",
47 | " value[goal[0]][goal[1]] = level\n",
48 | " open = [[level, goal[0], goal[1]]]\n",
49 | "\n",
50 | " resign = False\n",
51 | " while not resign:\n",
52 | " if len(open) == 0:\n",
53 | " resign = True\n",
54 | " break\n",
55 | " else:\n",
56 | " open.sort()\n",
57 | " open.reverse()\n",
58 | " next = open.pop()\n",
59 | " level = next[0] + 1\n",
60 | " x = next[1]\n",
61 | " y = next[2]\n",
62 | " \n",
63 | " for i in range(len(delta)):\n",
64 | " x2 = x + delta[i][0]\n",
65 | " y2 = y + delta[i][1]\n",
66 | " if x2 >= 0 and x2 < len(grid) and y2 >=0 and y2 < len(grid[0]):\n",
67 | " if value[x2][y2] == 99 and grid[x2][y2] == 0:\n",
68 | " open.append([level, x2, y2])\n",
69 | " value[x2][y2] = level\n",
70 | " \n",
71 | " # get optimal policy by choosing path that reduces level score\n",
72 | " policy = [[\" \" for j in grid[0]] for i in grid]\n",
73 | " x, y = goal[0], goal[1]\n",
74 | " policy[x][y] = \"*\"\n",
75 | " \n",
76 | " for x in range(len(value)):\n",
77 | " for y in range(len(value[0])):\n",
78 | " cur_level = value[x][y]\n",
79 | " if cur_level == 99:\n",
80 | " continue\n",
81 | " else:\n",
82 | " for i in range(len(delta)):\n",
83 | " d = delta[i]\n",
84 | " x2, y2 = x+d[0], y+d[1]\n",
85 | " if x2 >= 0 and x2 < len(value) and y2 >=0 and y2 < len(value[0]):\n",
86 | " nxt_leval = value[x2][y2]\n",
87 | " if nxt_leval < cur_level:\n",
88 | " policy[x][y] = delta_name[i]\n",
89 | " break\n",
90 | " \n",
91 | "\n",
92 | " return policy"
93 | ]
94 | },
95 | {
96 | "cell_type": "code",
97 | "execution_count": 17,
98 | "metadata": {},
99 | "outputs": [
100 | {
101 | "data": {
102 | "text/plain": [
103 | "[['v', ' ', 'v', 'v', 'v', 'v'],\n",
104 | " ['v', ' ', 'v', 'v', 'v', 'v'],\n",
105 | " ['v', ' ', 'v', 'v', 'v', 'v'],\n",
106 | " ['v', ' ', '>', '>', '>', 'v'],\n",
107 | " ['>', '>', '^', '^', ' ', '*']]"
108 | ]
109 | },
110 | "execution_count": 17,
111 | "metadata": {},
112 | "output_type": "execute_result"
113 | }
114 | ],
115 | "source": [
116 | "compute_value(grid, goal, cost)"
117 | ]
118 | },
119 | {
120 | "cell_type": "code",
121 | "execution_count": 18,
122 | "metadata": {},
123 | "outputs": [
124 | {
125 | "data": {
126 | "text/plain": [
127 | "[['v', 'v', ' ', 'v', 'v', 'v'],\n",
128 | " ['v', 'v', ' ', 'v', 'v', 'v'],\n",
129 | " ['v', 'v', ' ', '>', '>', 'v'],\n",
130 | " ['>', '>', '>', '^', ' ', 'v'],\n",
131 | " ['^', '^', ' ', ' ', ' ', 'v'],\n",
132 | " ['^', '^', '<', '<', ' ', '*']]"
133 | ]
134 | },
135 | "execution_count": 18,
136 | "metadata": {},
137 | "output_type": "execute_result"
138 | }
139 | ],
140 | "source": [
141 | "grid = [[0, 0, 1, 0, 0, 0],\n",
142 | " [0, 0, 1, 0, 0, 0],\n",
143 | " [0, 0, 1, 0, 0, 0],\n",
144 | " [0, 0, 0, 0, 1, 0],\n",
145 | " [0, 0, 1, 1, 1, 0],\n",
146 | " [0, 0, 0, 0, 1, 0]]\n",
147 | "goal = [len(grid)-1, len(grid[0])-1]\n",
148 | "cost = 1 \n",
149 | "\n",
150 | "compute_value(grid, goal, cost)"
151 | ]
152 | },
153 | {
154 | "cell_type": "code",
155 | "execution_count": null,
156 | "metadata": {},
157 | "outputs": [],
158 | "source": []
159 | }
160 | ],
161 | "metadata": {
162 | "kernelspec": {
163 | "display_name": "Python 3",
164 | "language": "python",
165 | "name": "python3"
166 | },
167 | "language_info": {
168 | "codemirror_mode": {
169 | "name": "ipython",
170 | "version": 3
171 | },
172 | "file_extension": ".py",
173 | "mimetype": "text/x-python",
174 | "name": "python",
175 | "nbconvert_exporter": "python",
176 | "pygments_lexer": "ipython3",
177 | "version": "3.6.5"
178 | }
179 | },
180 | "nbformat": 4,
181 | "nbformat_minor": 2
182 | }
183 |
--------------------------------------------------------------------------------
/Search/first_search.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "from copy import deepcopy"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 2,
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "grid = [[0, 0, 1, 0, 0, 0],\n",
19 | " [0, 0, 1, 0, 0, 0],\n",
20 | " [0, 0, 0, 0, 1, 0],\n",
21 | " [0, 0, 1, 1, 1, 0],\n",
22 | " [0, 0, 0, 0, 1, 0]]\n",
23 | "init = [0, 0]\n",
24 | "goal = [len(grid)-1, len(grid[0])-1]\n",
25 | "cost = 1\n",
26 | "\n",
27 | "delta = [[-1, 0], # go up\n",
28 | " [ 0,-1], # go left\n",
29 | " [ 1, 0], # go down\n",
30 | " [ 0, 1]] # go right\n",
31 | "\n",
32 | "delta_name = ['^', '<', 'v', '>']"
33 | ]
34 | },
35 | {
36 | "cell_type": "code",
37 | "execution_count": 7,
38 | "metadata": {},
39 | "outputs": [],
40 | "source": [
41 | "def search(grid, init, goal, cost):\n",
42 | " close = deepcopy(grid)\n",
43 | " close[0][0] = 1\n",
44 | " \n",
45 | " open_list = [[0, init[0], init[1]]]\n",
46 | " \n",
47 | " action = [[-1 for j in grid[0]] for i in grid]\n",
48 | " while 1:\n",
49 | " if len(open_list) == 0:\n",
50 | " return \"failed\"\n",
51 | " \n",
52 | " open_list.sort()\n",
53 | " open_list.reverse()\n",
54 | " cur_g, cur_x, cur_y = open_list.pop() # pop the one with the lowest g value\n",
55 | " \n",
56 | " if [cur_x, cur_y] == goal:\n",
57 | " print(\"goal found\", [cur_g, cur_x, cur_y])\n",
58 | " for i in action:\n",
59 | " print(i)\n",
60 | " break\n",
61 | " \n",
62 | " # expanding\n",
63 | " for d in delta:\n",
64 | " nxt_x, nxt_y = cur_x + d[0], cur_y + d[1]\n",
65 | " if (nxt_x >= 0 and nxt_x < len(grid)) and (nxt_y >= 0 and nxt_y < len(grid[0])):\n",
66 | " if close[nxt_x][nxt_y] == 0:\n",
67 | " nxt_g = cur_g + cost\n",
68 | " close[nxt_x][nxt_y] = 1\n",
69 | " open_list.append([nxt_g, nxt_x, nxt_y])\n",
70 | " \n",
71 | " action[nxt_x][nxt_y] = delta.index(d)\n",
72 | " \n",
73 | " policy = [[\" \" for j in grid[0]] for i in grid]\n",
74 | " x, y = goal[0], goal[1]\n",
75 | " policy[x][y] = \"*\"\n",
76 | " while x != init[0] or y != init[1]:\n",
77 | " # go reverse\n",
78 | " x2 = x - delta[action[x][y]][0] \n",
79 | " y2 = y - delta[action[x][y]][1]\n",
80 | " policy[x2][y2] = delta_name[action[x][y]]\n",
81 | " x = x2\n",
82 | " y = y2\n",
83 | " for i in policy:\n",
84 | " print(i)"
85 | ]
86 | },
87 | {
88 | "cell_type": "code",
89 | "execution_count": 8,
90 | "metadata": {},
91 | "outputs": [
92 | {
93 | "name": "stdout",
94 | "output_type": "stream",
95 | "text": [
96 | "goal found [11, 4, 5]\n",
97 | "[-1, 3, -1, 0, 3, 3]\n",
98 | "[2, 2, -1, 0, 3, 3]\n",
99 | "[2, 2, 3, 3, -1, 2]\n",
100 | "[2, 2, -1, -1, -1, 2]\n",
101 | "[2, 2, 3, 3, -1, 2]\n",
102 | "['>', 'v', ' ', ' ', ' ', ' ']\n",
103 | "[' ', 'v', ' ', '>', '>', 'v']\n",
104 | "[' ', '>', '>', '^', ' ', 'v']\n",
105 | "[' ', ' ', ' ', ' ', ' ', 'v']\n",
106 | "[' ', ' ', ' ', ' ', ' ', '*']\n"
107 | ]
108 | }
109 | ],
110 | "source": [
111 | "search(grid, init, goal, 1)"
112 | ]
113 | },
114 | {
115 | "cell_type": "code",
116 | "execution_count": null,
117 | "metadata": {},
118 | "outputs": [],
119 | "source": []
120 | }
121 | ],
122 | "metadata": {
123 | "kernelspec": {
124 | "display_name": "Python 3",
125 | "language": "python",
126 | "name": "python3"
127 | },
128 | "language_info": {
129 | "codemirror_mode": {
130 | "name": "ipython",
131 | "version": 3
132 | },
133 | "file_extension": ".py",
134 | "mimetype": "text/x-python",
135 | "name": "python",
136 | "nbconvert_exporter": "python",
137 | "pygments_lexer": "ipython3",
138 | "version": "3.6.5"
139 | }
140 | },
141 | "nbformat": 4,
142 | "nbformat_minor": 2
143 | }
144 |
--------------------------------------------------------------------------------
/UNet/model-tgs-salt.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MJeremy2017/machine-learning-models/e8b81f3172d5e309e977fe51ac5c812b650ccd80/UNet/model-tgs-salt.h5
--------------------------------------------------------------------------------
/Uplift/blift-spark.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import os\n",
10 | "import pandas as pd\n",
11 | "\n",
12 | "from pyspark.sql import SparkSession\n",
13 | "from pyspark.ml.classification import GBTClassifier\n",
14 | "from pyspark.ml.regression import GBTRegressor\n",
15 | "from pyspark.ml.feature import VectorAssembler\n",
16 | "from pyspark.ml.feature import OneHotEncoder, StringIndexer\n",
17 | "\n",
18 | "from bytecausal.metrics import SparkPercentileGain, LocalAUUCScorer, LocalQiniScorer\n",
19 | "from bytecausal.models.inference.dml.spark import SparkDMLEstimator\n",
20 | "from pylift.generate_data import dgp"
21 | ]
22 | },
23 | {
24 | "cell_type": "code",
25 | "execution_count": 2,
26 | "metadata": {},
27 | "outputs": [
28 | {
29 | "data": {
30 | "text/html": [
31 | "\n",
32 | "\n",
45 | "
\n",
46 | " \n",
47 | " \n",
48 | " | \n",
49 | " 0 | \n",
50 | " 1 | \n",
51 | " 2 | \n",
52 | " Treatment | \n",
53 | " Outcome | \n",
54 | "
\n",
55 | " \n",
56 | " \n",
57 | " \n",
58 | " 0 | \n",
59 | " 0.276367 | \n",
60 | " 0.256155 | \n",
61 | " 0.279303 | \n",
62 | " 1.0 | \n",
63 | " 1.0 | \n",
64 | "
\n",
65 | " \n",
66 | " 1 | \n",
67 | " 0.715732 | \n",
68 | " 0.905237 | \n",
69 | " 0.169450 | \n",
70 | " 0.0 | \n",
71 | " 1.0 | \n",
72 | "
\n",
73 | " \n",
74 | " 2 | \n",
75 | " 0.724559 | \n",
76 | " 0.507140 | \n",
77 | " 0.977195 | \n",
78 | " 0.0 | \n",
79 | " 1.0 | \n",
80 | "
\n",
81 | " \n",
82 | " 3 | \n",
83 | " 0.220467 | \n",
84 | " 0.767321 | \n",
85 | " 0.153159 | \n",
86 | " 1.0 | \n",
87 | " 1.0 | \n",
88 | "
\n",
89 | " \n",
90 | " 4 | \n",
91 | " 0.884408 | \n",
92 | " 0.656192 | \n",
93 | " 0.311776 | \n",
94 | " 0.0 | \n",
95 | " 1.0 | \n",
96 | "
\n",
97 | " \n",
98 | "
\n",
99 | "
"
100 | ],
101 | "text/plain": [
102 | " 0 1 2 Treatment Outcome\n",
103 | "0 0.276367 0.256155 0.279303 1.0 1.0\n",
104 | "1 0.715732 0.905237 0.169450 0.0 1.0\n",
105 | "2 0.724559 0.507140 0.977195 0.0 1.0\n",
106 | "3 0.220467 0.767321 0.153159 1.0 1.0\n",
107 | "4 0.884408 0.656192 0.311776 0.0 1.0"
108 | ]
109 | },
110 | "execution_count": 2,
111 | "metadata": {},
112 | "output_type": "execute_result"
113 | }
114 | ],
115 | "source": [
116 | "pd_data = dgp(N=10000, discrete_outcome=True)\n",
117 | "\n",
118 | "Y = 'Outcome'\n",
119 | "T = 'Treatment'\n",
120 | "pd_data.head()"
121 | ]
122 | },
123 | {
124 | "cell_type": "code",
125 | "execution_count": 3,
126 | "metadata": {},
127 | "outputs": [
128 | {
129 | "ename": "Exception",
130 | "evalue": "Java gateway process exited before sending its port number",
131 | "output_type": "error",
132 | "traceback": [
133 | "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
134 | "\u001b[0;31mException\u001b[0m Traceback (most recent call last)",
135 | "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mspark\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mSparkSession\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbuilder\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgetOrCreate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0mspark_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mspark\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcreateDataFrame\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpd_data\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mspark_data\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshow\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
136 | "\u001b[0;32m/usr/local/lib/python3.9/site-packages/pyspark/sql/session.py\u001b[0m in \u001b[0;36mgetOrCreate\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 226\u001b[0m \u001b[0msparkConf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 227\u001b[0m \u001b[0;31m# This SparkContext may be an existing one.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 228\u001b[0;31m \u001b[0msc\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mSparkContext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgetOrCreate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msparkConf\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 229\u001b[0m \u001b[0;31m# Do not update `SparkConf` for existing `SparkContext`, as it's shared\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 230\u001b[0m \u001b[0;31m# by all sessions.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
137 | "\u001b[0;32m/usr/local/lib/python3.9/site-packages/pyspark/context.py\u001b[0m in \u001b[0;36mgetOrCreate\u001b[0;34m(cls, conf)\u001b[0m\n\u001b[1;32m 382\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mSparkContext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_lock\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 383\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mSparkContext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_active_spark_context\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 384\u001b[0;31m \u001b[0mSparkContext\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconf\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mconf\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mSparkConf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 385\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mSparkContext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_active_spark_context\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 386\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
138 | "\u001b[0;32m/usr/local/lib/python3.9/site-packages/pyspark/context.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, gateway, jsc, profiler_cls)\u001b[0m\n\u001b[1;32m 142\u001b[0m \" is not allowed as it is a security risk.\")\n\u001b[1;32m 143\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 144\u001b[0;31m \u001b[0mSparkContext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_ensure_initialized\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgateway\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mgateway\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mconf\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mconf\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 145\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 146\u001b[0m self._do_init(master, appName, sparkHome, pyFiles, environment, batchSize, serializer,\n",
139 | "\u001b[0;32m/usr/local/lib/python3.9/site-packages/pyspark/context.py\u001b[0m in \u001b[0;36m_ensure_initialized\u001b[0;34m(cls, instance, gateway, conf)\u001b[0m\n\u001b[1;32m 329\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mSparkContext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_lock\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 330\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mSparkContext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_gateway\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 331\u001b[0;31m \u001b[0mSparkContext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_gateway\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgateway\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mlaunch_gateway\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconf\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 332\u001b[0m \u001b[0mSparkContext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_jvm\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mSparkContext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_gateway\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjvm\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 333\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
140 | "\u001b[0;32m/usr/local/lib/python3.9/site-packages/pyspark/java_gateway.py\u001b[0m in \u001b[0;36mlaunch_gateway\u001b[0;34m(conf, popen_kwargs)\u001b[0m\n\u001b[1;32m 106\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 107\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mos\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpath\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0misfile\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconn_info_file\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 108\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mException\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"Java gateway process exited before sending its port number\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 109\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 110\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mopen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconn_info_file\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"rb\"\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0minfo\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
141 | "\u001b[0;31mException\u001b[0m: Java gateway process exited before sending its port number"
142 | ]
143 | }
144 | ],
145 | "source": [
146 | "spark = SparkSession.builder.getOrCreate()\n",
147 | "spark_data = spark.createDataFrame(pd_data)\n",
148 | "spark_data.show()"
149 | ]
150 | },
151 | {
152 | "cell_type": "code",
153 | "execution_count": null,
154 | "metadata": {},
155 | "outputs": [],
156 | "source": []
157 | }
158 | ],
159 | "metadata": {
160 | "kernelspec": {
161 | "display_name": "python-3.9",
162 | "language": "python",
163 | "name": "python-3.9"
164 | },
165 | "language_info": {
166 | "codemirror_mode": {
167 | "name": "ipython",
168 | "version": 3
169 | },
170 | "file_extension": ".py",
171 | "mimetype": "text/x-python",
172 | "name": "python",
173 | "nbconvert_exporter": "python",
174 | "pygments_lexer": "ipython3",
175 | "version": "3.9.5"
176 | }
177 | },
178 | "nbformat": 4,
179 | "nbformat_minor": 4
180 | }
181 |
--------------------------------------------------------------------------------
/quantile-regression/pinball_loss.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MJeremy2017/machine-learning-models/e8b81f3172d5e309e977fe51ac5c812b650ccd80/quantile-regression/pinball_loss.png
--------------------------------------------------------------------------------