├── .DS_Store
├── .gitattributes
├── .ipynb_checkpoints
├── Boston Dataset-checkpoint.ipynb
├── Diabetes-checkpoint.ipynb
├── Iris-checkpoint.ipynb
├── Linear Regression-checkpoint.ipynb
├── Modules-checkpoint.ipynb
├── Plotting-checkpoint.ipynb
├── Practice-checkpoint.ipynb
├── Python basics-checkpoint.ipynb
├── Titanic Dataset-checkpoint.ipynb
└── Untitled-checkpoint.ipynb
├── Module-01-Python-Basics
├── .DS_Store
├── Anaconda-Installation-Guide.pdf
├── Module-1-Class-Notes.pdf
├── Module-1-Handout.pdf
└── Module-1-Instructor-Notebook.ipynb
├── Module-02-Conditionals-Loops-and-Functions
├── .DS_Store
├── All-Prime-Numbers.py
├── Decimal-Binary.py
├── Even-Fibonacci-Numbers.py
├── Module-2-Class-Notes.pdf
├── Module-2-Handout.pdf
├── Number-Pattern.py
├── Reverse-Every-Word.py
├── Reversing-Series-Pattern.py
└── Trailing-Zeros.py
├── Module-03-Lists-and-Dictionaries
├── .DS_Store
├── Equilibium-Index.py
├── Largest-Unique-Substirng.py
├── Leaders-in-Array.py
├── Maximise-the-sum.py
├── Module-3-Class-Notes.pdf
├── Module-3-Handout.pdf
├── Module-3-Instructor-Notebook.ipynb
├── Reverse-String-Word-Wise.py
└── Selection-Sort.py
├── Module-04-2DLists-and-Numpy
├── .DS_Store
├── Largest-Row-or-Column.py
├── Module-4-Class-Notes.pdf
├── Module-4-Handout.pdf
├── Module-4-Numpy-Notebook.ipynb
└── Spiral-Print.py
├── Module-05-Pandas
├── .DS_Store
├── .ipynb_checkpoints
│ ├── Iris-Workbook-checkpoint.ipynb
│ └── Titanic-Workbook-checkpoint.ipynb
├── Iris-Workbook.ipynb
├── Module-5-Class-Notes.pdf
├── Module-5-Handout.pdf
├── Module-5-Instructor-Notebooks
│ ├── .DS_Store
│ ├── .ipynb_checkpoints
│ │ └── Module-5-Notebook-1-checkpoint.ipynb
│ ├── Module-5-Notebook-1.ipynb
│ └── Module-5-Notebook-2.ipynb
├── Titanic-Features-Description.png
├── Titanic-Workbook.ipynb
├── Titanic_cleaned.csv
└── titanic_train.csv
├── Module-06-Plotting-Graphs
├── .DS_Store
├── .ipynb_checkpoints
│ └── Plotting-graphs-Workbook-checkpoint.ipynb
├── Exploded Pie-graph.png
├── Module-6-Class-Notes.pdf
├── Module-6-Handout.pdf
├── Module-6-Instructor-Notebook.ipynb
├── Pie-graph.png
├── Plotting-graphs-Workbook.ipynb
└── comparative-plots.png
├── Module-07-Introduction-to-Machine-Learning
├── .DS_Store
├── .ipynb_checkpoints
│ ├── Boston-Workbook-checkpoint.ipynb
│ └── Diabetes-Workbook-checkpoint.ipynb
├── Boston-Workbook.ipynb
├── Boston.png
├── Diabetes-Workbook.ipynb
├── Diabetes.png
├── Module-7-Class-Notes-Practical.pdf
├── Module-7-Class-Notes-Theory.pdf
└── Module-7-Handout.pdf
├── Module-08-Linear-Regression
├── .DS_Store
├── .ipynb_checkpoints
│ ├── Diabetes-Linear-Regression-Workbook-checkpoint.ipynb
│ ├── LR-Dummy-Data-Workbook-checkpoint.ipynb
│ ├── Linear-Regression-Single-Feature-Workbook-checkpoint.ipynb
│ └── Linear-Regression-Two-Feature-Workbook-checkpoint.ipynb
├── Diabetes-Linear-Regression-Workbook.ipynb
├── Diabetes-Test.csv
├── Diabetes-Train.csv
├── Dummy-data-testing.png
├── Dummy-data-training.png
├── Instructor-Notebook-Linear-Regression-Dummy-Data
│ ├── data.csv
│ └── linear_regression_on_dummy.ipynb
├── LR-Dummy-Data-Workbook.ipynb
├── Linear-Regression-Single-Feature-Workbook.ipynb
├── Linear-Regression-Two-Feature-Workbook.ipynb
├── Module-8-Instructor-Notes.pdf
├── Module-8-Linear-Regression-2-Feature-Derivation.pdf
├── Module-8-Linear-Regression-Coding.pdf
├── Module-8-Linear-Regression-Theory.pdf
├── dummy_data.csv
├── pred.csv
├── pred2.csv
├── pred3.csv
├── pred4.csv
└── pred5.csv
├── Module-09-Multivariate-Regression-and-Gradient-Descent
├── .DS_Store
├── .ipynb_checkpoints
│ ├── Boston-Dummy-Feature-2-Degree-Workbook-checkpoint.ipynb
│ ├── Dummy-Feature-Workbook-checkpoint.ipynb
│ ├── Gradient-Descent-1-Feature-Workbook-checkpoint.ipynb
│ ├── Gradient-Descent-N-feature-Boston-Workbook-checkpoint.ipynb
│ └── Gradient-Descent-N-feature-Diabetes-Workbook-checkpoint.ipynb
├── 1-Feature-Gradient-Descent-Learning-Process.png
├── Boston-Dummy-Feature-2-Degree-Workbook.ipynb
├── Dummy-Feature-Workbook.ipynb
├── Gradient-Descent-1-Feature-Workbook.ipynb
├── Gradient-Descent-N-feature-Boston-Workbook.ipynb
├── Gradient-Descent-N-feature-Diabetes-Workbook.ipynb
├── Lecture-9-Multivariate-Regression-and-Gradient-Descent-Coding.pdf
├── Lecture-9-Multivariate-Regression-and-Gradient-Descent-Theory.pdf
├── Module-9-Multivariable-Regression-Gradient-Descent-Instructor-Notes.pdf
└── dummy_data.csv
├── Module-10-Project-Gradient-Descent
├── .DS_Store
├── .ipynb_checkpoints
│ └── Gradient-Descent-Boston-Workbook-checkpoint.ipynb
├── Boston-Gradient-Descent
│ ├── .DS_Store
│ ├── .ipynb_checkpoints
│ │ ├── Gradient-Descent-Boston-Dummy-Features-Heterogeneous-Workbook-checkpoint.ipynb
│ │ ├── Gradient-Descent-Boston-Dummy-Features-Homogeneous-Regularisation-Workbook-checkpoint.ipynb
│ │ ├── Gradient-Descent-Boston-Dummy-Features-Homogeneous-Workbook-checkpoint.ipynb
│ │ └── Gradient-Descent-Boston-Workbook-checkpoint.ipynb
│ ├── Boston-Testing-Data.csv
│ ├── Boston-Training-Data.csv
│ ├── Gradient-Descent-Boston-Dummy-Features-Heterogeneous-Workbook.ipynb
│ ├── Gradient-Descent-Boston-Dummy-Features-Homogeneous-Regularisation-Workbook.ipynb
│ ├── Gradient-Descent-Boston-Dummy-Features-Homogeneous-Workbook.ipynb
│ ├── Gradient-Descent-Boston-Workbook.ipynb
│ ├── pred.csv
│ ├── pred_dummy_features_feature_scaling_hetero.csv
│ ├── pred_dummy_features_homogeneous_feature_scaling.csv
│ ├── pred_dummy_features_homogeneous_feature_scaling_reg.csv
│ └── pred_feature_scaling.csv
└── Combined-Cycle-Power-Plant
│ ├── .DS_Store
│ ├── .ipynb_checkpoints
│ ├── Gradient-Descent-Combined-Cycle-Power-Plant-Dummy-Features-Heterogeneous-Workbook-checkpoint.ipynb
│ ├── Gradient-Descent-Combined-Cycle-Power-Plant-Dummy-Features-Homogeneous-Regularisation-Workbook-checkpoint.ipynb
│ ├── Gradient-Descent-Combined-Cycle-Power-Plant-Dummy-Features-Homogeneous-Workbook-checkpoint.ipynb
│ └── Gradient-Descent-Combined-Cycle-Power-Plant-Workbook-checkpoint.ipynb
│ ├── Combined-Cycle-Power-Plant-Testing-Data.csv
│ ├── Combined-Cycle-Power-Plant-Training-Data.csv
│ ├── Gradient-Descent-Combined-Cycle-Power-Plant-Dummy-Features-Heterogeneous-Workbook.ipynb
│ ├── Gradient-Descent-Combined-Cycle-Power-Plant-Dummy-Features-Homogeneous-Regularisation-Workbook.ipynb
│ ├── Gradient-Descent-Combined-Cycle-Power-Plant-Dummy-Features-Homogeneous-Workbook.ipynb
│ ├── Gradient-Descent-Combined-Cycle-Power-Plant-Workbook.ipynb
│ ├── README.md
│ ├── pred.csv
│ ├── pred_dummy_features_feature_scaling-2.csv
│ ├── pred_dummy_features_feature_scaling.csv
│ ├── pred_dummy_features_feature_scaling_hetero.csv
│ ├── pred_feature_scaling.csv
│ ├── pred_homogeneous_dummy_features_feature_scaling.csv
│ ├── pred_homogeneous_dummy_features_feature_scaling_reg.csv
│ └── pred_homogeneous_dummy_features_feature_scaling_reg_0.csv
├── Module-11-Logistic-Regression
├── .DS_Store
├── .ipynb_checkpoints
│ └── Logistic-Regression-Workbook-checkpoint.ipynb
├── Logistic-Regression-Workbook.ipynb
├── Module-11-Instructor-Notebook
│ ├── Logistic Regression.ipynb
│ ├── M1.png
│ ├── R1.png
│ ├── S1.png
│ ├── S2.png
│ ├── S3.png
│ ├── S4.png
│ ├── log.png
│ ├── mwalah.jpeg
│ ├── one.png
│ ├── three.png
│ └── two.png
├── Module-11-Logistic-Regression-Code.pdf
└── Module-11-Logistic-Regression-Theory.pdf
├── Module-12-Project-Logistic-Regression
├── .ipynb_checkpoints
│ ├── Titanic-Logistic-Regression-Homogeneous-Dummy-Features-Workbook-checkpoint.ipynb
│ └── Titanic-Logistic-Regression-Workbook-checkpoint.ipynb
├── Titanic-Logistic-Regression-Homogeneous-Dummy-Features-Workbook.ipynb
├── Titanic-Logistic-Regression-Workbook.ipynb
├── Titanic-Test-Data.csv
├── Titanic-Train-Data.csv
├── pred.csv
└── pred_homogeneous_dummy.csv
├── Module-13-Classification-Measures
├── .DS_Store
├── Module-12-Instructor-Notebook.zip
└── Module-13-Classification-Measures.pdf
├── Module-15-Decision-Trees-2
├── .ipynb_checkpoints
│ └── Visualising-Decision-Tree-Workbook-checkpoint.ipynb
├── Module-15-Instructor-Notebook.zip
├── Visualising-Decision-Tree-Workbook.ipynb
├── iris.pdf
├── iris.png
└── iris2.pdf
├── README.md
└── tensorflow
├── .ipynb_checkpoints
├── 13. MNIST-Tensorflow-checkpoint.ipynb
└── MNIST-TensorFlow-checkpoint.ipynb
├── 13. MNIST-Tensorflow.ipynb
├── MNIST-TensorFlow.ipynb
└── MNIST_data
├── t10k-images-idx3-ubyte.gz
├── t10k-labels-idx1-ubyte.gz
├── train-images-idx3-ubyte.gz
└── train-labels-idx1-ubyte.gz
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/.DS_Store
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | * linguist-vendored
2 | *.py linguist-vendored=false
3 |
--------------------------------------------------------------------------------
/.ipynb_checkpoints/Boston Dataset-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/.ipynb_checkpoints/Diabetes-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/.ipynb_checkpoints/Iris-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/.ipynb_checkpoints/Linear Regression-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/.ipynb_checkpoints/Modules-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/.ipynb_checkpoints/Plotting-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/.ipynb_checkpoints/Practice-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/.ipynb_checkpoints/Python basics-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [
8 | {
9 | "name": "stdout",
10 | "output_type": "stream",
11 | "text": [
12 | "10\n"
13 | ]
14 | }
15 | ],
16 | "source": [
17 | "print(10)"
18 | ]
19 | },
20 | {
21 | "cell_type": "code",
22 | "execution_count": 2,
23 | "metadata": {},
24 | "outputs": [
25 | {
26 | "name": "stdout",
27 | "output_type": "stream",
28 | "text": [
29 | "10\n"
30 | ]
31 | }
32 | ],
33 | "source": [
34 | "a = 10\n",
35 | "print(a)"
36 | ]
37 | },
38 | {
39 | "cell_type": "code",
40 | "execution_count": 3,
41 | "metadata": {},
42 | "outputs": [
43 | {
44 | "data": {
45 | "text/plain": [
46 | "10"
47 | ]
48 | },
49 | "execution_count": 3,
50 | "metadata": {},
51 | "output_type": "execute_result"
52 | }
53 | ],
54 | "source": [
55 | "a"
56 | ]
57 | },
58 | {
59 | "cell_type": "code",
60 | "execution_count": 4,
61 | "metadata": {},
62 | "outputs": [
63 | {
64 | "name": "stdout",
65 | "output_type": "stream",
66 | "text": [
67 | "hello world\n"
68 | ]
69 | }
70 | ],
71 | "source": [
72 | "b = \"hello world\"\n",
73 | "print(b)"
74 | ]
75 | },
76 | {
77 | "cell_type": "code",
78 | "execution_count": 6,
79 | "metadata": {},
80 | "outputs": [
81 | {
82 | "name": "stdout",
83 | "output_type": "stream",
84 | "text": [
85 | "10 21\n",
86 | "11\n"
87 | ]
88 | }
89 | ],
90 | "source": [
91 | "a = 10\n",
92 | "b = 11\n",
93 | "print(a,end = \" \")##used to specify how the printed data should end with\n",
94 | "print(a+b, end = \"\\n\\n\")\n",
95 | "print(b)"
96 | ]
97 | },
98 | {
99 | "cell_type": "code",
100 | "execution_count": null,
101 | "metadata": {
102 | "collapsed": true
103 | },
104 | "outputs": [],
105 | "source": []
106 | }
107 | ],
108 | "metadata": {
109 | "kernelspec": {
110 | "display_name": "Python 3",
111 | "language": "python",
112 | "name": "python3"
113 | },
114 | "language_info": {
115 | "codemirror_mode": {
116 | "name": "ipython",
117 | "version": 3
118 | },
119 | "file_extension": ".py",
120 | "mimetype": "text/x-python",
121 | "name": "python",
122 | "nbconvert_exporter": "python",
123 | "pygments_lexer": "ipython3",
124 | "version": "3.6.3"
125 | }
126 | },
127 | "nbformat": 4,
128 | "nbformat_minor": 2
129 | }
130 |
--------------------------------------------------------------------------------
/.ipynb_checkpoints/Titanic Dataset-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/.ipynb_checkpoints/Untitled-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/Module-01-Python-Basics/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-01-Python-Basics/.DS_Store
--------------------------------------------------------------------------------
/Module-01-Python-Basics/Anaconda-Installation-Guide.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-01-Python-Basics/Anaconda-Installation-Guide.pdf
--------------------------------------------------------------------------------
/Module-01-Python-Basics/Module-1-Class-Notes.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-01-Python-Basics/Module-1-Class-Notes.pdf
--------------------------------------------------------------------------------
/Module-01-Python-Basics/Module-1-Handout.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-01-Python-Basics/Module-1-Handout.pdf
--------------------------------------------------------------------------------
/Module-01-Python-Basics/Module-1-Instructor-Notebook.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "Anaconda and Jupyter Notebook"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "\n",
15 | "
To Create a new notebook in the window (you can see it’ll be saved in the main window as “notebook name”.ipynb). You may observe that there are cells in the notebook. Cells can be used for writing code and/or notes.
\n",
16 | "In order to write notes, you need to select Markdown, which is a language or editor for writing notes. Then, if you type in your cell ## “Hear name”, you’ll get a header with your header name.
\n",
17 | "Writing code:
\n",
18 | "Suppose you write a=10
\n",
19 | "In order to print it, write print(a), press Ctrl+Enter, and you can see the value of a printed right below the cell. Pressing Alt+Enter will print the value as well as create and move onto a new cell.
\n",
20 | "The main advantage of Jupyter lies in convenience: instead of creating Python files and running them separately, one can see the output right under the code itself. We can also insert text in between code for simplicity and better understanding.
\n"
21 | ]
22 | },
23 | {
24 | "cell_type": "markdown",
25 | "metadata": {},
26 | "source": [
27 | "Variables in Python\n"
28 | ]
29 | },
30 | {
31 | "cell_type": "markdown",
32 | "metadata": {},
33 | "source": [
34 | "Variables are anything that store some value in them and are subject to be changed. In Python, we don’t declare the datatype of variables while initializing them; we simply write their name and assign them values. There is no semicolon to be used to end lines unlike in C or C++.\n",
35 | "Jupyter always prints the variables after we initialize them, so there usually is no need to write print(variable) separately. But only the variable declared last is printed and not the ones before it, so the print() function is to be used for printing out all variables. \n",
36 | "To check the datatype of variables, simply write type(variable), which will output the datatype of the variable. \n",
37 | "Python has five standard data types – Numbers, String, List, Tuple and Dictionary. \n",
38 | "1.\tNumbers store numerical data and are of four types: int (signed integers), long (long integers), float (floating point real values) and complex (complex numbers).\n",
39 | "2.\tStrings in Python are identified as a contiguous set of characters represented in the quotation marks.\n",
40 | "3.\tLists are the Python equivalent of arrays, and are the most versatile of Python's data types.\n",
41 | "4.\tA tuple is another sequence data type that is similar to the list. A tuple consists of a number of values separated by commas. Unlike lists, however, tuples are enclosed within parentheses.\n",
42 | "5.\tPython's dictionaries are kind of hash table type, and consist of key-value pairs.\n",
43 | "It is to be kept in mind that the order of cells is not important; the order of execution is important. So you can run the second cell before the first (if they’re not connected, that is) – and it would not be a problem. Connected means that there is some dependency between the two cells – for instance, the first cell has declared some variables that are being used in the second cell. In that case, the first cell has to be executed first and only then the second can be called.\n",
44 | "You can perform operations on Python variables just as in all other languages. However, in the case of division: dividing two integers will return a float value which won’t be floored to an int (unlike in C++). In order to get the floor values only, you have to use a//b instead of a/b. a**2 is used for exponentiation. \n",
45 | "\n",
46 | "In case you are new to Python don't worry, we will be covering them one by one.\n"
47 | ]
48 | },
49 | {
50 | "cell_type": "code",
51 | "execution_count": 4,
52 | "metadata": {},
53 | "outputs": [
54 | {
55 | "name": "stdout",
56 | "output_type": "stream",
57 | "text": [
58 | "10\n",
59 | "0.8333333333333334\n",
60 | "0\n"
61 | ]
62 | }
63 | ],
64 | "source": [
65 | "a = 10\n",
66 | "b=12\n",
67 | "\n",
68 | "# There is no need to declare datatype of variable during initialization.\n",
69 | "# No semicolon is to be used like in other languages.\n",
70 | "\n",
71 | "# Prints a\n",
72 | "print(a)\n",
73 | "\n",
74 | "# Get datatype of a\n",
75 | "type(a)\n",
76 | "\n",
77 | "# prints quotient of a divided by b\n",
78 | "print(a/b)\n",
79 | "\n",
80 | "# Prints floor value of quotient\n",
81 | "print(a//b)\n",
82 | "\n",
83 | "# It is allowed to assign the same value to multiple variables in one line.\n",
84 | "a = b = 2\n"
85 | ]
86 | },
87 | {
88 | "cell_type": "markdown",
89 | "metadata": {},
90 | "source": [
91 | "Strings and Input\n"
92 | ]
93 | },
94 | {
95 | "cell_type": "markdown",
96 | "metadata": {},
97 | "source": [
98 | "Strings are declared like all variables in Python. A string’s length can be printed using the len() function. Indexing starts from zero, and we can access elements of a string using indices. Strings are immutable; we cannot change them.
\n",
99 | "One thing to note that we can add strings to strings but cannot add strings to integers etc. And the character datatype doesn’t exist in Python at all; even a single alphabet or number will be a string datatype.
\n",
100 | "One of the best things about strings in Python is that we need not write out our entire string in one line; we can write it in different lines using pairs of triple quotes. These triple quotes can also contain line breaks and escape sequences.
\n",
101 | "Strings have many functionalities in python like conversion between cases, concatenation etc. Some of them are given below.
\n",
102 | "If you wish to convert your string to the upper case, the function upper() will return the string in upper case, and similarly for lower case. Note that this returns a new string with the changed case; this does not alter the original string. The function strip() will remove the white spaces at the beginning and end of our string. We can also check if a certain character or substring is present within a string.
\n",
103 | "If you wish to take input from a user and store it in a variable, use input(). This will take input in the form of a string, which can be converted to other datatypes (integer, for instance).
\n"
104 | ]
105 | },
106 | {
107 | "cell_type": "code",
108 | "execution_count": 16,
109 | "metadata": {},
110 | "outputs": [
111 | {
112 | "name": "stdout",
113 | "output_type": "stream",
114 | "text": [
115 | "12\n",
116 | "e\n",
117 | "d\n",
118 | "HelloAnjali\n",
119 | "Hello Anjali\n",
120 | "False\n",
121 | "HELLO WORLD!\n",
122 | "Wonder\n"
123 | ]
124 | }
125 | ],
126 | "source": [
127 | "# Strings can be declared just like regular variables.\n",
128 | "myString = \"Hello World!\"\n",
129 | "\n",
130 | "# Print length of string\n",
131 | "print(len(myString))\n",
132 | "\n",
133 | "# Access string elements by array-like indexing\n",
134 | "print(myString[1])\n",
135 | "\n",
136 | "# Negative indices to access string elements\n",
137 | "print(myString[-2])\n",
138 | "\n",
139 | "# Concatenating two strings\n",
140 | "string1 = \"Hello\"\n",
141 | "string2 = \"Anjali\"\n",
142 | "\n",
143 | "print(string1 + string2)\n",
144 | "print(string1 + \" \" + string2)\n",
145 | "\n",
146 | "# Triple quotes for strings\n",
147 | "bigString = \"\"\"Hi. My name is Sam.\n",
148 | " I like to play chess.\"\"\"\n",
149 | "\n",
150 | "# Check if a character is present within a string\n",
151 | "print('x' in myString)\n",
152 | "\n",
153 | "# Convert string to upper-case\n",
154 | "myStringUpper = myString.upper()\n",
155 | "print(myStringUpper)\n",
156 | "\n",
157 | "# Replace an element of a string\n",
158 | "notMyString = \"Wonger\"\n",
159 | "isMyString = notMyString.replace('g','d')\n",
160 | "print(isMyString)\n",
161 | "\n",
162 | "# Take input from user and print it\n",
163 | "newString = input()\n",
164 | "print(newString)"
165 | ]
166 | },
167 | {
168 | "cell_type": "markdown",
169 | "metadata": {},
170 | "source": [
171 | "String Slicing\n"
172 | ]
173 | },
174 | {
175 | "cell_type": "markdown",
176 | "metadata": {},
177 | "source": [
178 | "Suppose you have a string, and wish to access its elements. You can do that by printing the specific index of your element, but what if you want to extract a chunk of more than one character with known position and size?
\n",
179 | "This is where slicing comes to the rescue. Slicing is something which works for both strings and arrays (lists), and is a useful way to access and operate on data.
\n",
180 | "What exactly is slicing? Slicing comes in handy if we wish to obtain a subset of a string. For instance, s[2:6] will return the elements numbered 2,3,4 and 5. It won’t return the element numbered 6 because the last index in the slice is not included and returned.
\n",
181 | "Slicing can be used similarly for negative indices and printing the entire string as well. Negative indices will print the elements from the last onwards instead of the first onwards.
\n",
182 | "There is one additional important concept related to string slicing: that of stride, or how many characters you want to move forward after each character is retrieved from the original string. The first retrieved character always corresponds to the index before the colon; but thereafter, the pointer moves forward however many characters you specify as your stride, and retrieves the character at that position. And so on, until the ending index is reached or exceeded.
\n",
183 | "You can specify a negative stride too. As you might expect, this indicates that you want Python to go backwards when retrieving characters.
\n"
184 | ]
185 | },
186 | {
187 | "cell_type": "code",
188 | "execution_count": 1,
189 | "metadata": {},
190 | "outputs": [
191 | {
192 | "name": "stdout",
193 | "output_type": "stream",
194 | "text": [
195 | "Cod\n",
196 | "ing\n",
197 | "Cdn\n"
198 | ]
199 | }
200 | ],
201 | "source": [
202 | "name = \"Coding\"\n",
203 | "\n",
204 | "# Print first three elements of string \n",
205 | "# Indexing starts at 0, so to print the first three \n",
206 | "# we write 0:3, because the last index element isn't\n",
207 | "# included and printed.\n",
208 | "print(name[0:3])\n",
209 | "\n",
210 | "# Print last three elements of string\n",
211 | "print(name[-3:])\n",
212 | "\n",
213 | "# Print alternate elements of string\n",
214 | "# The third slicing number defines the stride, that is\n",
215 | "# the amount of elements you want to skip while retrieving \n",
216 | "# the elements of the string.\n",
217 | "print(name[::2])"
218 | ]
219 | },
220 | {
221 | "cell_type": "markdown",
222 | "metadata": {},
223 | "source": [
224 | "Tuples\n"
225 | ]
226 | },
227 | {
228 | "cell_type": "markdown",
229 | "metadata": {},
230 | "source": [
231 | "Tuples are simply a way to store multiple elements in one place – but they’re not the same as arrays. The differences between tuples and lists are that tuples cannot be changed unlike lists and tuples use parentheses, whereas lists use square brackets.
\n",
232 | "They are initialized with their values in parentheses, and their individual elements can be accessed just as in strings. Like string indices, tuple indices start at 0, and they can be sliced, concatenated, and so on.
\n",
233 | "Tuples are handy in that we can have different types of data like floats and characters as elements of a single tuple, and slicing works on them just as with strings and arrays. They’re, however, immutable: they cannot be changed or updated.
\n",
234 | "You can also perform operations like addition, multiplication etc. on them, and tuples can be queried as well.
\n"
235 | ]
236 | },
237 | {
238 | "cell_type": "code",
239 | "execution_count": 27,
240 | "metadata": {},
241 | "outputs": [
242 | {
243 | "name": "stdout",
244 | "output_type": "stream",
245 | "text": [
246 | "2\n",
247 | "(2, 3)\n",
248 | "Hello\n",
249 | "(1, 2, 3, 5, 6, 7)\n",
250 | "True\n",
251 | "False\n"
252 | ]
253 | }
254 | ],
255 | "source": [
256 | "# Initialize a tuple\n",
257 | "myTuple = (1,2,3)\n",
258 | "\n",
259 | "# Print second element of tuple\n",
260 | "print(myTuple[1])\n",
261 | "\n",
262 | "# Print last two elements of tuple\n",
263 | "print(myTuple[-2:])\n",
264 | "\n",
265 | "# Mixed tuples: tuples can contain data of various types\n",
266 | "mixTuple = ('Hello',2,3.4)\n",
267 | "print(mixTuple[0])\n",
268 | "\n",
269 | "# Adding two tuples: this concatenates two tuples together\n",
270 | "tuple1 = (1,2,3)\n",
271 | "tuple2 = (5,6,7)\n",
272 | "print(tuple1 + tuple2)\n",
273 | "\n",
274 | "# Query a tuple\n",
275 | "print(2 in myTuple)\n",
276 | "print('x' in mixTuple)"
277 | ]
278 | }
279 | ],
280 | "metadata": {
281 | "kernelspec": {
282 | "display_name": "Python 3",
283 | "language": "python",
284 | "name": "python3"
285 | },
286 | "language_info": {
287 | "codemirror_mode": {
288 | "name": "ipython",
289 | "version": 3
290 | },
291 | "file_extension": ".py",
292 | "mimetype": "text/x-python",
293 | "name": "python",
294 | "nbconvert_exporter": "python",
295 | "pygments_lexer": "ipython3",
296 | "version": "3.6.3"
297 | }
298 | },
299 | "nbformat": 4,
300 | "nbformat_minor": 2
301 | }
302 |
--------------------------------------------------------------------------------
/Module-02-Conditionals-Loops-and-Functions/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-02-Conditionals-Loops-and-Functions/.DS_Store
--------------------------------------------------------------------------------
/Module-02-Conditionals-Loops-and-Functions/All-Prime-Numbers.py:
--------------------------------------------------------------------------------
1 | def prime(n):
2 | if n==2:
3 | return True
4 | for i in range(2,n-1):
5 | if n%i==0:
6 | return False
7 |
8 | return True
9 |
10 | n = int(input())
11 |
12 | for i in range(2,n+1):
13 | if prime(i):
14 | print(i)
--------------------------------------------------------------------------------
/Module-02-Conditionals-Loops-and-Functions/Decimal-Binary.py:
--------------------------------------------------------------------------------
1 | ## Read input as specified in the question.
2 | ## Print output as specified in the question.
3 |
4 | def reverse(s):
5 | result = ""
6 | for index in range(len(s)):
7 | result+=s[len(s)-index-1]
8 |
9 | return result
10 |
11 | result = ""
12 | n = int(input())
13 | if n==0:
14 | print(0)
15 | exit
16 |
17 | while n>0:
18 | if n%2==0:
19 | result+='0'
20 | else:
21 | result+='1'
22 | n = n//2
23 | print(reverse(result))
--------------------------------------------------------------------------------
/Module-02-Conditionals-Loops-and-Functions/Even-Fibonacci-Numbers.py:
--------------------------------------------------------------------------------
1 | sum = 0
2 | current = 1
3 | next = 1
4 | n = int(input())
5 |
6 | while current<=n:
7 | if current%2==0:
8 | sum+=current
9 | temp = current
10 | current = next
11 | next = next+temp
12 |
13 | print(sum)
--------------------------------------------------------------------------------
/Module-02-Conditionals-Loops-and-Functions/Module-2-Class-Notes.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-02-Conditionals-Loops-and-Functions/Module-2-Class-Notes.pdf
--------------------------------------------------------------------------------
/Module-02-Conditionals-Loops-and-Functions/Module-2-Handout.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-02-Conditionals-Loops-and-Functions/Module-2-Handout.pdf
--------------------------------------------------------------------------------
/Module-02-Conditionals-Loops-and-Functions/Number-Pattern.py:
--------------------------------------------------------------------------------
1 | n = int(input())
2 |
3 | for i in range(1,n+1):
4 | for j in range(1,n+1):
5 | if j<=i:
6 | print(j,end="")
7 | else:
8 | print(" ",end="")
9 |
10 | for j in range(1,n+1):
11 | if n-j>=i:
12 | print(" ",end="")
13 | else:
14 | print(n-j+1,end="")
15 | print()
--------------------------------------------------------------------------------
/Module-02-Conditionals-Loops-and-Functions/Reverse-Every-Word.py:
--------------------------------------------------------------------------------
1 | s = input()
2 |
3 | def reverse(s):
4 | result = ""
5 | for index in range(len(s)):
6 | result+=s[len(s)-index-1]
7 |
8 | return result
9 |
10 | to_reverse = s.split()
11 | result = ""
12 | for index,i in enumerate(to_reverse):
13 | result+=reverse(i)
14 | if index!=len(i)-1:
15 | result+=" "
16 | print(result)
17 |
--------------------------------------------------------------------------------
/Module-02-Conditionals-Loops-and-Functions/Reversing-Series-Pattern.py:
--------------------------------------------------------------------------------
1 | end = 0
2 | n = int(input())
3 |
4 | for i in range(1,n+1):
5 | if i%2!=0:
6 | current = end+1
7 | for j in range(i):
8 | print(current,end=" ")
9 | current+=1
10 | print()
11 | end = current-1
12 | else:
13 | current = end+i
14 | end = current
15 | for j in range(i):
16 | print(current,end=" ")
17 | current-=1
18 | print()
--------------------------------------------------------------------------------
/Module-02-Conditionals-Loops-and-Functions/Trailing-Zeros.py:
--------------------------------------------------------------------------------
1 | result = 0
2 | div = 5
3 |
4 | n = int(input())
5 | while div<=n:
6 | result+=(n//div)
7 | div*=5
8 |
9 | print(result)
--------------------------------------------------------------------------------
/Module-03-Lists-and-Dictionaries/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-03-Lists-and-Dictionaries/.DS_Store
--------------------------------------------------------------------------------
/Module-03-Lists-and-Dictionaries/Equilibium-Index.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | def equilibriumIndex(arr):
4 | # Please add your code here
5 | larr = list([0])
6 | rarr = list([0])
7 |
8 | for i in range(len(arr)-1):
9 | larr.append(larr[i]+arr[i])
10 | rarr.append(rarr[i]+arr[len(arr)-i-1])
11 |
12 | #print(larr)
13 | #print(rarr)
14 | for i in range(len(larr)):
15 | if larr[i]==rarr[len(arr)-i-1]:
16 | return i
17 | return -1
18 |
19 | # Main
20 | n = int(input())
21 | if n==0:
22 | print(-1)
23 | sys.exit()
24 |
25 |
26 | arr = [int(i) for i in input().strip().split()]
27 | print(equilibriumIndex(arr))
28 |
--------------------------------------------------------------------------------
/Module-03-Lists-and-Dictionaries/Largest-Unique-Substirng.py:
--------------------------------------------------------------------------------
1 | s = input()
2 |
3 | result = ""
4 | length = 0
5 | map = {}
6 | for i in range(len(s)):
7 | current = ""
8 | map.clear()
9 | map[s[i]] = 1
10 | for j in range(i,len(s)):
11 | if i==j:
12 | continue
13 | else:
14 | if s[j] in map:
15 | if j-i>length:
16 | length = j-i
17 | result = s[i:j]
18 | break
19 | else:
20 | map[s[j]] = 1
21 |
22 | print(result)
--------------------------------------------------------------------------------
/Module-03-Lists-and-Dictionaries/Leaders-in-Array.py:
--------------------------------------------------------------------------------
1 | ## Read input as specified in the question.
2 | ## Print output as specified in the question.
3 |
4 | import sys
5 |
6 | result = list()
7 |
8 | n = int(input())
9 | if n==0:
10 | sys.exit()
11 | arr = [int(x) for x in input().strip().split(" ")]
12 |
13 | mx_element = arr[-1]
14 | for i in range(n):
15 | if arr[n-i-1]>=mx_element:
16 | result.append(str(arr[n-i-1]))
17 | mx_element = arr[n-i-1]
18 |
19 | result.reverse()
20 | st = ""
21 | for index,i in enumerate(result):
22 | st+=i
23 | if index!=len(result)-1:
24 | st+=" "
25 | print(st)
--------------------------------------------------------------------------------
/Module-03-Lists-and-Dictionaries/Maximise-the-sum.py:
--------------------------------------------------------------------------------
1 | n1 = int(input())
2 | arr1 = [int(x) for x in input().strip().split(" ")]
3 |
4 | n2 = int(input())
5 | arr2 = [int(x) for x in input().strip().split(" ")]
6 |
7 | result = 0
8 | sum1 = 0
9 | sum2 = 0
10 | i,j = 0,0
11 | while i arr2[j]:
16 | sum2+=arr2[j]
17 | j+=1
18 | else:
19 | result+=max(sum1, sum2)
20 | sum1, sum2 = 0,0
21 |
22 | while iLists-1"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "Lists are exactly the same as arrays. We need lists to store multiple data elements in one place. You may ask that the same thing is done by tuples - yes, but the problem with tuples is that they're immutable i.e. their values cannot be modified etc. This is where lists come in. A list is nothing but a collection of elements of various kinds.
\n",
15 | "The list type is a container that holds a number of other objects, in a given order. The list type implements the sequence protocol, and also allows you to add and remove objects from the sequence.
\n",
16 | "Lists are simply declared in square brackets (not round brackets, or that'll be a tuple declaration). Note that a list need not have all elements of the same datatype; we can store integers, boolean values, strings etc. in one array itself.
\n"
17 | ]
18 | },
19 | {
20 | "cell_type": "code",
21 | "execution_count": 6,
22 | "metadata": {
23 | "collapsed": true
24 | },
25 | "outputs": [],
26 | "source": [
27 | "# Declaring a list\n",
28 | "\n",
29 | "myList = [1,'hello!',3.4563]\n",
30 | "\n",
31 | "myList1 = list()\n"
32 | ]
33 | },
34 | {
35 | "cell_type": "markdown",
36 | "metadata": {},
37 | "source": [
38 | "How do we access elements of a list? It is done very easily using the same method of zero-indexing as in strings. For instance, if a = [1,2,3] is a list, then a[2] will return 3.
"
39 | ]
40 | },
41 | {
42 | "cell_type": "code",
43 | "execution_count": 7,
44 | "metadata": {},
45 | "outputs": [
46 | {
47 | "name": "stdout",
48 | "output_type": "stream",
49 | "text": [
50 | "hello!\n",
51 | "3.4563\n",
52 | "[1, 'hello!']\n"
53 | ]
54 | }
55 | ],
56 | "source": [
57 | "# Accessing list elements\n",
58 | "\n",
59 | "print(myList[1])\n",
60 | "print(myList[-1])\n",
61 | "print(myList[0:2])\n"
62 | ]
63 | },
64 | {
65 | "cell_type": "markdown",
66 | "metadata": {},
67 | "source": [
68 | "The next thing that comes is taking input from the user and storing it into a list. We can do that by taking in elements one by one, but what if we input all the elements at once (separated by spaces)? This is where we need to use a few functions for help.
\n",
69 | "We'll use the strip() and split() functions to help us take input. When we take a list as input in the usual way, we get elements all at once with spaces in between. If we want them one by one (without including the spaces), we first strip() the spaces in the beginning and end so they aren't counted as elements, then split() the list on spaces, and finally store it in another list.
"
70 | ]
71 | },
72 | {
73 | "cell_type": "code",
74 | "execution_count": 1,
75 | "metadata": {},
76 | "outputs": [
77 | {
78 | "name": "stdout",
79 | "output_type": "stream",
80 | "text": [
81 | "1 2 3 4\n",
82 | "['1', '2', '3', '4']\n",
83 | "1 2 3 4 5\n",
84 | "[1, 2, 3, 4, 5]\n"
85 | ]
86 | }
87 | ],
88 | "source": [
89 | "# Taking input in a list\n",
90 | "\n",
91 | "myStr = input().strip()\n",
92 | "myList = myStr.split(\" \")\n",
93 | "print(myList)\n",
94 | "\n",
95 | "# The above can be done in a single line.\n",
96 | "\n",
97 | "myList = [int(x) for x in input().strip().split(\" \")]\n",
98 | "print(myList)"
99 | ]
100 | },
101 | {
102 | "cell_type": "markdown",
103 | "metadata": {},
104 | "source": [
105 | "Lists-2"
106 | ]
107 | },
108 | {
109 | "cell_type": "markdown",
110 | "metadata": {},
111 | "source": [
112 | "We next look at some further characteristics and functionalities of lists.
\n",
113 | "We can add elements to our list in three ways: by using the list.append(element) function, the list.insert(index,element) function and the list.extend(another_list) function. The advantage of using insert() is that we can insert the elements in exactly our position of choice. The extend function appends all the elements of a list to another.
"
114 | ]
115 | },
116 | {
117 | "cell_type": "code",
118 | "execution_count": 16,
119 | "metadata": {},
120 | "outputs": [
121 | {
122 | "name": "stdout",
123 | "output_type": "stream",
124 | "text": [
125 | "[1, 'hello', 2, 'world', 3]\n"
126 | ]
127 | }
128 | ],
129 | "source": [
130 | "# Adding elements to list\n",
131 | "myList = [1,2,'world']\n",
132 | "emptyList = list()\n",
133 | "\n",
134 | "myList.append(3)\n",
135 | "\n",
136 | "myList.insert(1,'hello')\n",
137 | "\n",
138 | "emptyList.extend(myList)\n",
139 | "print(emptyList)\n",
140 | "\n"
141 | ]
142 | },
143 | {
144 | "cell_type": "markdown",
145 | "metadata": {},
146 | "source": [
147 | "We can delete elements from a list in three ways: by using the list.pop() function, the list.remove() function and del. If we don't specify an index in pop() then the last element will be deleted, while specifying an index will delete the element at that index in the list. The remove() function takes as input the element value you want to delete, not the index. Del can be used to remove multiple elements using slicing.
"
148 | ]
149 | },
150 | {
151 | "cell_type": "code",
152 | "execution_count": 17,
153 | "metadata": {},
154 | "outputs": [
155 | {
156 | "name": "stdout",
157 | "output_type": "stream",
158 | "text": [
159 | "[2, 'world', 5]\n"
160 | ]
161 | }
162 | ],
163 | "source": [
164 | "# Deleting elements from list\n",
165 | "\n",
166 | "myList.pop();\n",
167 | "\n",
168 | "myList.append(4);\n",
169 | "myList.append(5);\n",
170 | "\n",
171 | "myList.pop(0);\n",
172 | "\n",
173 | "myList.remove(4);\n",
174 | "\n",
175 | "del myList[0:1]\n",
176 | "\n",
177 | "print(myList)"
178 | ]
179 | },
180 | {
181 | "cell_type": "markdown",
182 | "metadata": {},
183 | "source": [
184 | "Note that we cannot add strings, integers etc. to the list by arithmetic operators. Only lists can be added to other lists using the '+' operator.
\n",
185 | "There are some other functions we can perform on lists. The sort() function sorts the list in increasing order, while the count() function will return the number of elements in the list.
"
186 | ]
187 | },
188 | {
189 | "cell_type": "code",
190 | "execution_count": 22,
191 | "metadata": {},
192 | "outputs": [
193 | {
194 | "name": "stdout",
195 | "output_type": "stream",
196 | "text": [
197 | "[1, 2, 3, 4]\n"
198 | ]
199 | }
200 | ],
201 | "source": [
202 | "#myList = myList + 2 # Wrong!\n",
203 | "myList = myList + [1,2] # Correct\n",
204 | "\n",
205 | "newList = [3,2,1,4]\n",
206 | "newList.sort()\n",
207 | "print(newList)\n"
208 | ]
209 | },
210 | {
211 | "cell_type": "markdown",
212 | "metadata": {},
213 | "source": [
214 | "Bubble Sort"
215 | ]
216 | },
217 | {
218 | "cell_type": "markdown",
219 | "metadata": {},
220 | "source": [
221 | "Bubble sort is a popular and basic sorting algorithm that pushes the largest element to the end in every iteration. It compares each element to the adjacent one, and swaps if the adjacent one is smaller than the element. Given below is a simple code for Bubble Sort.
"
222 | ]
223 | },
224 | {
225 | "cell_type": "code",
226 | "execution_count": 23,
227 | "metadata": {},
228 | "outputs": [
229 | {
230 | "name": "stdout",
231 | "output_type": "stream",
232 | "text": [
233 | "[14, 21, 27, 41, 43, 45, 46, 57, 70]\n"
234 | ]
235 | }
236 | ],
237 | "source": [
238 | "def bubbleSort(nlist):\n",
239 | " for passnum in range(len(nlist)-1,0,-1):\n",
240 | " for i in range(passnum):\n",
241 | " if nlist[i]>nlist[i+1]:\n",
242 | " temp = nlist[i]\n",
243 | " nlist[i] = nlist[i+1]\n",
244 | " nlist[i+1] = temp\n",
245 | "\n",
246 | "nlist = [14,46,43,27,57,41,45,21,70]\n",
247 | "bubbleSort(nlist)\n",
248 | "print(nlist)"
249 | ]
250 | },
251 | {
252 | "cell_type": "markdown",
253 | "metadata": {},
254 | "source": [
255 | "Dictionaries"
256 | ]
257 | },
258 | {
259 | "cell_type": "markdown",
260 | "metadata": {},
261 | "source": [
262 | "Dictionaries are same as hash maps in other languages. They are a collection of key-value pairs: that is, values are mapped to keys. The keys can be of any nature - integer, strings, tuples etc. (But lists cannot be used as keys.)
\n",
263 | "Like lists are represented with square brackets, dictionaries are represented by curly brackets. The keys of a dictionary are immutable, and cannot be changed; the values, however, can be changed. If we set a key with a certain value, it will update the value in the dictionary if the key exists, or it will create a new key-value pair if the key does not already exist.
\n",
264 | "Each key is separated from its value by a colon (:), the items are separated by commas, and the whole thing is enclosed in curly braces. An empty dictionary without any items is written with just two curly braces, like this: {}.
"
265 | ]
266 | },
267 | {
268 | "cell_type": "code",
269 | "execution_count": 3,
270 | "metadata": {},
271 | "outputs": [
272 | {
273 | "name": "stdout",
274 | "output_type": "stream",
275 | "text": [
276 | "{2: 6, 'hello': 4}\n"
277 | ]
278 | }
279 | ],
280 | "source": [
281 | "# Initializing a dictionary\n",
282 | "\n",
283 | "myDict = {}\n",
284 | "\n",
285 | "myDict[2]=6\n",
286 | "myDict[\"hello\"]=4\n",
287 | "\n",
288 | "print(myDict)\n"
289 | ]
290 | },
291 | {
292 | "cell_type": "markdown",
293 | "metadata": {},
294 | "source": [
295 | "Keys are unique within a dictionary while values may not be. The values of a dictionary can be of any type, but the keys must be of an immutable data type such as strings, numbers, or tuples.
\n",
296 | "Some operators on dictionaries are as follows:\n",
297 | "\n",
298 | " \n",
299 | " Operators | \n",
300 | " Explanation | \n",
301 | "
\n",
302 | " \n",
303 | " len(d) | \n",
304 | " returns the number of stored entries, i.e. the number of (key,value) pairs. | \n",
305 | "
\n",
306 | " \n",
307 | " del d[k] | \n",
308 | " deletes the key k together with his value | \n",
309 | "
\n",
310 | " \n",
311 | " k in d | \n",
312 | " True, if a key k exists in the dictionary d | \n",
313 | "
\n",
314 | "\n",
315 | " k not in d | \n",
316 | " True, if a key k doesn't exist in the dictionary d | \n",
317 | "
\n",
318 | "\n",
319 | "
\n",
320 | "We can insert values into a dictionary by simply writing a new key-value pair, delete by using del, and iterate using the usual iteration methods. We can also query dictionaries, access their keys and values etc.
"
321 | ]
322 | },
323 | {
324 | "cell_type": "code",
325 | "execution_count": 4,
326 | "metadata": {},
327 | "outputs": [
328 | {
329 | "name": "stdout",
330 | "output_type": "stream",
331 | "text": [
332 | "2\n",
333 | "hello\n",
334 | "{2: 6}\n"
335 | ]
336 | }
337 | ],
338 | "source": [
339 | "# iterating over a dictionary\n",
340 | "for i in myDict:\n",
341 | " print(i)\n",
342 | "\n",
343 | "# deleting elements from a dictionary\n",
344 | "del myDict['hello']\n",
345 | "print(myDict)"
346 | ]
347 | }
348 | ],
349 | "metadata": {
350 | "kernelspec": {
351 | "display_name": "Python 3",
352 | "language": "python",
353 | "name": "python3"
354 | },
355 | "language_info": {
356 | "codemirror_mode": {
357 | "name": "ipython",
358 | "version": 3
359 | },
360 | "file_extension": ".py",
361 | "mimetype": "text/x-python",
362 | "name": "python",
363 | "nbconvert_exporter": "python",
364 | "pygments_lexer": "ipython3",
365 | "version": "3.6.3"
366 | }
367 | },
368 | "nbformat": 4,
369 | "nbformat_minor": 2
370 | }
371 |
--------------------------------------------------------------------------------
/Module-03-Lists-and-Dictionaries/Reverse-String-Word-Wise.py:
--------------------------------------------------------------------------------
1 | s = input()
2 |
3 | words = s.strip().split(" ")
4 | words.reverse()
5 | output = ""
6 |
7 | for index, word in enumerate(words):
8 | output+=word
9 | if index!=len(words)-1:
10 | output+=" "
11 |
12 | print(output)
13 |
--------------------------------------------------------------------------------
/Module-03-Lists-and-Dictionaries/Selection-Sort.py:
--------------------------------------------------------------------------------
1 | from sys import stdin
2 |
3 | def selectionSort(arr, n) :
4 | #Your code goes here
5 |
6 | for i in range(n):
7 | mn_index = i
8 | for j in range(i+1,n):
9 | if arr[j] 0 :
39 |
40 | arr, n = takeInput()
41 | selectionSort(arr, n)
42 | printList(arr, n)
43 |
44 | t-= 1
--------------------------------------------------------------------------------
/Module-04-2DLists-and-Numpy/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-04-2DLists-and-Numpy/.DS_Store
--------------------------------------------------------------------------------
/Module-04-2DLists-and-Numpy/Largest-Row-or-Column.py:
--------------------------------------------------------------------------------
1 | '''
2 | In order to print two or more integers in a line separated by a single
3 | space then you may consider printing it with the statement,
4 |
5 | print(str(num1) + " " + str(num2))
6 |
7 | '''
8 |
9 | from sys import stdin
10 |
11 | def findLargest(arr, nRows, mCols):
12 | #Your code goes here
13 |
14 | maxRowSum = -2147483648
15 | maxRowIndex = 0
16 | maxColSum = -2147483648
17 | maxColIndex = 0
18 |
19 | for i in range(nRows):
20 | sum = 0
21 | for j in range(mCols):
22 | sum+=arr[i][j]
23 |
24 | if i == 0:
25 | maxRowSum = sum
26 | maxRowIndex = i
27 |
28 | if sum > maxRowSum:
29 | maxRowSum = sum
30 | maxRowIndex = i
31 |
32 | for i in range(mCols):
33 | sum = 0
34 | for j in range(nRows):
35 | sum+=arr[j][i]
36 | if i == 0:
37 | maxColSum = sum
38 | maxColIndex = i
39 |
40 | if sum > maxColSum:
41 | maxColSum = sum
42 | maxColIndex = i
43 |
44 | if maxRowSum >= maxColSum:
45 | print("row",maxRowIndex,maxRowSum)
46 | else:
47 | print("column",maxColIndex,maxColSum)
48 |
49 | #Taking Input Using Fast I/O
50 | def take2DInput() :
51 | li = stdin.readline().rstrip().split(" ")
52 | nRows = int(li[0])
53 | mCols = int(li[1])
54 |
55 | if nRows == 0 :
56 | return list(), 0, 0
57 |
58 | mat = [list(map(int, input().strip().split(" "))) for row in range(nRows)]
59 | return mat, nRows, mCols
60 |
61 |
62 | #main
63 | t = int(stdin.readline().rstrip())
64 |
65 | while t > 0 :
66 |
67 | mat, nRows, mCols = take2DInput()
68 | findLargest(mat, nRows, mCols)
69 |
70 | t -= 1
--------------------------------------------------------------------------------
/Module-04-2DLists-and-Numpy/Module-4-Class-Notes.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-04-2DLists-and-Numpy/Module-4-Class-Notes.pdf
--------------------------------------------------------------------------------
/Module-04-2DLists-and-Numpy/Module-4-Handout.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-04-2DLists-and-Numpy/Module-4-Handout.pdf
--------------------------------------------------------------------------------
/Module-04-2DLists-and-Numpy/Module-4-Numpy-Notebook.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "colab": {
6 | "name": "Module-4-Numpy.ipynb",
7 | "provenance": []
8 | },
9 | "kernelspec": {
10 | "name": "python3",
11 | "display_name": "Python 3"
12 | }
13 | },
14 | "cells": [
15 | {
16 | "cell_type": "code",
17 | "metadata": {
18 | "id": "hlwHmsokOe0U",
19 | "colab_type": "code",
20 | "colab": {}
21 | },
22 | "source": [
23 | "import numpy as np"
24 | ],
25 | "execution_count": 2,
26 | "outputs": []
27 | },
28 | {
29 | "cell_type": "code",
30 | "metadata": {
31 | "id": "8sCCCn7DOiJU",
32 | "colab_type": "code",
33 | "colab": {
34 | "base_uri": "https://localhost:8080/",
35 | "height": 34
36 | },
37 | "outputId": "a5b74604-b0b9-4f2a-c539-91b86d8955ca"
38 | },
39 | "source": [
40 | "l1 = [1,2,3]\n",
41 | "np.array(l1)"
42 | ],
43 | "execution_count": 4,
44 | "outputs": [
45 | {
46 | "output_type": "execute_result",
47 | "data": {
48 | "text/plain": [
49 | "array([1, 2, 3])"
50 | ]
51 | },
52 | "metadata": {
53 | "tags": []
54 | },
55 | "execution_count": 4
56 | }
57 | ]
58 | },
59 | {
60 | "cell_type": "code",
61 | "metadata": {
62 | "id": "KQ8bVeBvO3Sx",
63 | "colab_type": "code",
64 | "colab": {
65 | "base_uri": "https://localhost:8080/",
66 | "height": 34
67 | },
68 | "outputId": "d1b19075-8b5d-4aef-cdba-754e081407e7"
69 | },
70 | "source": [
71 | "#numpy converts all elements to same datatype\n",
72 | "l2 = [1,2,\"arr\"]\n",
73 | "np.array(l2)"
74 | ],
75 | "execution_count": 6,
76 | "outputs": [
77 | {
78 | "output_type": "execute_result",
79 | "data": {
80 | "text/plain": [
81 | "array(['1', '2', 'arr'], dtype=' 2, it is treated as a stack of matrices residing in the last two indexes and broadcast accordingly.\n",
217 | "\n",
218 | "For np.dot:\n",
219 | "\n",
220 | "For 2-D arrays it is equivalent to matrix multiplication, and for 1-D arrays to inner product of vectors (without complex conjugation). For N dimensions it is a sum product over the last axis of a and the second-to-last of b"
221 | ]
222 | },
223 | {
224 | "cell_type": "code",
225 | "metadata": {
226 | "id": "tbYof3eUPrXA",
227 | "colab_type": "code",
228 | "colab": {
229 | "base_uri": "https://localhost:8080/",
230 | "height": 153
231 | },
232 | "outputId": "d8aba983-66f1-4e6a-9af1-b48646507e68"
233 | },
234 | "source": [
235 | "#matrix multiplication\n",
236 | "c = np.array([[1,3],[1,2],[0,1]])\n",
237 | "r = np.array([[1,0,2],[0,1,2]])\n",
238 | "\n",
239 | "print(c.shape)\n",
240 | "print(r.shape)\n",
241 | "print(np.matmul(c,r))\n",
242 | "#print(c*r) -> error, since point-wise operation and dimensions don't match\n",
243 | "print(np.dot(c,r))"
244 | ],
245 | "execution_count": 28,
246 | "outputs": [
247 | {
248 | "output_type": "stream",
249 | "text": [
250 | "(3, 2)\n",
251 | "(2, 3)\n",
252 | "[[1 3 8]\n",
253 | " [1 2 6]\n",
254 | " [0 1 2]]\n",
255 | "[[1 3 8]\n",
256 | " [1 2 6]\n",
257 | " [0 1 2]]\n"
258 | ],
259 | "name": "stdout"
260 | }
261 | ]
262 | },
263 | {
264 | "cell_type": "code",
265 | "metadata": {
266 | "id": "AQR5GslPQpDq",
267 | "colab_type": "code",
268 | "colab": {}
269 | },
270 | "source": [
271 | ""
272 | ],
273 | "execution_count": null,
274 | "outputs": []
275 | }
276 | ]
277 | }
--------------------------------------------------------------------------------
/Module-04-2DLists-and-Numpy/Spiral-Print.py:
--------------------------------------------------------------------------------
1 | from sys import stdin
2 |
3 | def spiralPrint(arr, nRows, mCols):
4 | #Your code goes here
5 | p1 = [0,0]
6 | p2 = [0,mCols-1]
7 | p3 = [nRows-1, mCols-1]
8 | p4 = [nRows-1,0]
9 |
10 | while p1[0]<=mCols//2 and p1[1]<=nRows//2:
11 |
12 | for i in range(p1[1],p2[1]+1):
13 | print(arr[p1[0]][i],end=" ")
14 | # print(arr[p1[0]][i],end=" ")
15 |
16 | for i in range(p2[0]+1, p3[0]+1):
17 | print(arr[i][p2[1]],end=" ")
18 | # print(arr[i][p2[1]],end=" ")
19 |
20 | for i in range(p3[1]-1,p4[1]-1,-1):
21 | print(arr[p3[0]][i],end=" ")
22 | # print(arr[p3[0]][i],end=" ")
23 |
24 | for i in range(p4[0]-1,p1[0],-1):
25 | print(arr[i][p4[1]],end=" ")
26 | # print(arr[i][p4[1]],end=" ")
27 |
28 | p1[0],p1[1] = p1[0]+1, p1[1]+1
29 | p2[0],p2[1] = p2[0]+1, p2[1]-1
30 | p3[0],p3[1] = p3[0]-1, p3[1]-1
31 | p4[0],p4[1] = p4[0]-1, p4[1]+1
32 |
33 | #Taking Input Using Fast I/O
34 | def take2DInput() :
35 | li = stdin.readline().rstrip().split(" ")
36 | nRows = int(li[0])
37 | mCols = int(li[1])
38 |
39 | if nRows == 0 :
40 | return list(), 0, 0
41 |
42 | mat = [list(map(int, input().strip().split(" "))) for row in range(nRows)]
43 | return mat, nRows, mCols
44 |
45 |
46 | #main
47 | t = int(stdin.readline().rstrip())
48 |
49 | while t > 0 :
50 |
51 | mat, nRows, mCols = take2DInput()
52 | spiralPrint(mat, nRows, mCols)
53 | print()
54 |
55 | t -= 1
--------------------------------------------------------------------------------
/Module-05-Pandas/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-05-Pandas/.DS_Store
--------------------------------------------------------------------------------
/Module-05-Pandas/Module-5-Class-Notes.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-05-Pandas/Module-5-Class-Notes.pdf
--------------------------------------------------------------------------------
/Module-05-Pandas/Module-5-Handout.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-05-Pandas/Module-5-Handout.pdf
--------------------------------------------------------------------------------
/Module-05-Pandas/Module-5-Instructor-Notebooks/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-05-Pandas/Module-5-Instructor-Notebooks/.DS_Store
--------------------------------------------------------------------------------
/Module-05-Pandas/Titanic-Features-Description.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-05-Pandas/Titanic-Features-Description.png
--------------------------------------------------------------------------------
/Module-06-Plotting-Graphs/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-06-Plotting-Graphs/.DS_Store
--------------------------------------------------------------------------------
/Module-06-Plotting-Graphs/Exploded Pie-graph.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-06-Plotting-Graphs/Exploded Pie-graph.png
--------------------------------------------------------------------------------
/Module-06-Plotting-Graphs/Module-6-Class-Notes.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-06-Plotting-Graphs/Module-6-Class-Notes.pdf
--------------------------------------------------------------------------------
/Module-06-Plotting-Graphs/Module-6-Handout.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-06-Plotting-Graphs/Module-6-Handout.pdf
--------------------------------------------------------------------------------
/Module-06-Plotting-Graphs/Pie-graph.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-06-Plotting-Graphs/Pie-graph.png
--------------------------------------------------------------------------------
/Module-06-Plotting-Graphs/comparative-plots.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-06-Plotting-Graphs/comparative-plots.png
--------------------------------------------------------------------------------
/Module-07-Introduction-to-Machine-Learning/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-07-Introduction-to-Machine-Learning/.DS_Store
--------------------------------------------------------------------------------
/Module-07-Introduction-to-Machine-Learning/Boston.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-07-Introduction-to-Machine-Learning/Boston.png
--------------------------------------------------------------------------------
/Module-07-Introduction-to-Machine-Learning/Diabetes.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-07-Introduction-to-Machine-Learning/Diabetes.png
--------------------------------------------------------------------------------
/Module-07-Introduction-to-Machine-Learning/Module-7-Class-Notes-Practical.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-07-Introduction-to-Machine-Learning/Module-7-Class-Notes-Practical.pdf
--------------------------------------------------------------------------------
/Module-07-Introduction-to-Machine-Learning/Module-7-Class-Notes-Theory.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-07-Introduction-to-Machine-Learning/Module-7-Class-Notes-Theory.pdf
--------------------------------------------------------------------------------
/Module-07-Introduction-to-Machine-Learning/Module-7-Handout.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-07-Introduction-to-Machine-Learning/Module-7-Handout.pdf
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-08-Linear-Regression/.DS_Store
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/.ipynb_checkpoints/Linear-Regression-Single-Feature-Workbook-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/.ipynb_checkpoints/Linear-Regression-Two-Feature-Workbook-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import numpy as np\n",
10 | "import pandas as pd\n",
11 | "import sklearn.datasets"
12 | ]
13 | },
14 | {
15 | "cell_type": "code",
16 | "execution_count": 34,
17 | "metadata": {},
18 | "outputs": [],
19 | "source": [
20 | "X, Y = sklearn.datasets.make_regression(n_samples=500, n_features=2, bias=1)"
21 | ]
22 | },
23 | {
24 | "cell_type": "code",
25 | "execution_count": 35,
26 | "metadata": {},
27 | "outputs": [
28 | {
29 | "data": {
30 | "text/plain": [
31 | "(500,)"
32 | ]
33 | },
34 | "execution_count": 35,
35 | "metadata": {},
36 | "output_type": "execute_result"
37 | }
38 | ],
39 | "source": [
40 | "X[:,0].shape"
41 | ]
42 | },
43 | {
44 | "cell_type": "code",
45 | "execution_count": 36,
46 | "metadata": {},
47 | "outputs": [],
48 | "source": [
49 | "df = pd.DataFrame(X)"
50 | ]
51 | },
52 | {
53 | "cell_type": "code",
54 | "execution_count": 37,
55 | "metadata": {},
56 | "outputs": [
57 | {
58 | "data": {
59 | "text/html": [
60 | "\n",
61 | "\n",
74 | "
\n",
75 | " \n",
76 | " \n",
77 | " | \n",
78 | " 0 | \n",
79 | " 1 | \n",
80 | "
\n",
81 | " \n",
82 | " \n",
83 | " \n",
84 | " count | \n",
85 | " 500.000000 | \n",
86 | " 500.000000 | \n",
87 | "
\n",
88 | " \n",
89 | " mean | \n",
90 | " -0.027919 | \n",
91 | " 0.093867 | \n",
92 | "
\n",
93 | " \n",
94 | " std | \n",
95 | " 0.980243 | \n",
96 | " 1.059402 | \n",
97 | "
\n",
98 | " \n",
99 | " min | \n",
100 | " -2.372996 | \n",
101 | " -3.030447 | \n",
102 | "
\n",
103 | " \n",
104 | " 25% | \n",
105 | " -0.720534 | \n",
106 | " -0.579339 | \n",
107 | "
\n",
108 | " \n",
109 | " 50% | \n",
110 | " -0.049479 | \n",
111 | " 0.090556 | \n",
112 | "
\n",
113 | " \n",
114 | " 75% | \n",
115 | " 0.602104 | \n",
116 | " 0.853455 | \n",
117 | "
\n",
118 | " \n",
119 | " max | \n",
120 | " 3.268004 | \n",
121 | " 3.453502 | \n",
122 | "
\n",
123 | " \n",
124 | "
\n",
125 | "
"
126 | ],
127 | "text/plain": [
128 | " 0 1\n",
129 | "count 500.000000 500.000000\n",
130 | "mean -0.027919 0.093867\n",
131 | "std 0.980243 1.059402\n",
132 | "min -2.372996 -3.030447\n",
133 | "25% -0.720534 -0.579339\n",
134 | "50% -0.049479 0.090556\n",
135 | "75% 0.602104 0.853455\n",
136 | "max 3.268004 3.453502"
137 | ]
138 | },
139 | "execution_count": 37,
140 | "metadata": {},
141 | "output_type": "execute_result"
142 | }
143 | ],
144 | "source": [
145 | "df.describe()"
146 | ]
147 | },
148 | {
149 | "cell_type": "code",
150 | "execution_count": 38,
151 | "metadata": {},
152 | "outputs": [],
153 | "source": [
154 | "def fit(X,Y):\n",
155 | " x1 = X[:,0]\n",
156 | " x2 = X[:,1]\n",
157 | " alpha_num = (x2*x1).mean() + (x2.mean())*(x1.mean())\n",
158 | " alpha_den = (x2.mean())**2 + (x2**2).mean()\n",
159 | " alpha = alpha_num/alpha_den\n",
160 | " \n",
161 | " m1_num = (Y*x1).mean() + (Y.mean())*(x1.mean()) + alpha*((Y*x2).mean()) - alpha*((Y.mean())*x2.mean())\n",
162 | " m1_den = (x1**2).mean() + (x1.mean())**2 - alpha*((x1*x2).mean()) - alpha*(x1.mean())*(x2.mean())\n",
163 | " \n",
164 | " m1 = m1_num/m1_den\n",
165 | " \n",
166 | " m2_num = (Y*x2).mean() - m1*((x1*x2).mean()) +((Y.mean())*(x2.mean())) - m1*(x1.mean())*(x2.mean())\n",
167 | " m2_den = (x2.mean())**2 + (x2**2).mean()\n",
168 | " \n",
169 | " m2 = m2_num/m2_den\n",
170 | " \n",
171 | " c = Y.mean() - m1*(x1.mean()) - m2*(x2.mean())\n",
172 | " \n",
173 | " return m1,m2,c"
174 | ]
175 | },
176 | {
177 | "cell_type": "code",
178 | "execution_count": 39,
179 | "metadata": {},
180 | "outputs": [],
181 | "source": [
182 | "m1,m2,c = fit(X,Y)"
183 | ]
184 | },
185 | {
186 | "cell_type": "code",
187 | "execution_count": 40,
188 | "metadata": {},
189 | "outputs": [
190 | {
191 | "data": {
192 | "text/plain": [
193 | "(45.90929481464371, 40.81142388385525, 0.8697521309589047)"
194 | ]
195 | },
196 | "execution_count": 40,
197 | "metadata": {},
198 | "output_type": "execute_result"
199 | }
200 | ],
201 | "source": [
202 | "m1,m2,c"
203 | ]
204 | },
205 | {
206 | "cell_type": "code",
207 | "execution_count": 41,
208 | "metadata": {},
209 | "outputs": [],
210 | "source": [
211 | "from sklearn.linear_model import LinearRegression"
212 | ]
213 | },
214 | {
215 | "cell_type": "code",
216 | "execution_count": 42,
217 | "metadata": {},
218 | "outputs": [
219 | {
220 | "data": {
221 | "text/plain": [
222 | "LinearRegression(copy_X=True, fit_intercept=True, n_jobs=None,\n",
223 | " normalize=False)"
224 | ]
225 | },
226 | "execution_count": 42,
227 | "metadata": {},
228 | "output_type": "execute_result"
229 | }
230 | ],
231 | "source": [
232 | "alg = LinearRegression()\n",
233 | "alg.fit(X,Y)"
234 | ]
235 | },
236 | {
237 | "cell_type": "code",
238 | "execution_count": 43,
239 | "metadata": {},
240 | "outputs": [
241 | {
242 | "data": {
243 | "text/plain": [
244 | "array([50.96081575, 40.92630005])"
245 | ]
246 | },
247 | "execution_count": 43,
248 | "metadata": {},
249 | "output_type": "execute_result"
250 | }
251 | ],
252 | "source": [
253 | "alg.coef_"
254 | ]
255 | },
256 | {
257 | "cell_type": "code",
258 | "execution_count": 44,
259 | "metadata": {},
260 | "outputs": [
261 | {
262 | "data": {
263 | "text/plain": [
264 | "0.9999999999999996"
265 | ]
266 | },
267 | "execution_count": 44,
268 | "metadata": {},
269 | "output_type": "execute_result"
270 | }
271 | ],
272 | "source": [
273 | "alg.intercept_"
274 | ]
275 | },
276 | {
277 | "cell_type": "code",
278 | "execution_count": 45,
279 | "metadata": {},
280 | "outputs": [],
281 | "source": [
282 | "def predict(X,m1,m2,c):\n",
283 | " x1 = X[:,0]\n",
284 | " x2 = X[:,1]\n",
285 | " return x1*m1+x2*m2+c"
286 | ]
287 | },
288 | {
289 | "cell_type": "code",
290 | "execution_count": 46,
291 | "metadata": {},
292 | "outputs": [],
293 | "source": [
294 | "y_pred = predict(X,m1,m2,c)"
295 | ]
296 | },
297 | {
298 | "cell_type": "code",
299 | "execution_count": 47,
300 | "metadata": {},
301 | "outputs": [],
302 | "source": [
303 | "def score(Y_true,Y_pred):\n",
304 | " \n",
305 | " u = ((Y_true-Y_pred)**2).sum()\n",
306 | " v = ((Y_true-Y.mean())**2).sum()\n",
307 | " return 1-(u/v)"
308 | ]
309 | },
310 | {
311 | "cell_type": "code",
312 | "execution_count": 48,
313 | "metadata": {},
314 | "outputs": [
315 | {
316 | "data": {
317 | "text/plain": [
318 | "0.994081066192795"
319 | ]
320 | },
321 | "execution_count": 48,
322 | "metadata": {},
323 | "output_type": "execute_result"
324 | }
325 | ],
326 | "source": [
327 | "score(Y,y_pred)"
328 | ]
329 | },
330 | {
331 | "cell_type": "code",
332 | "execution_count": 49,
333 | "metadata": {},
334 | "outputs": [
335 | {
336 | "data": {
337 | "text/plain": [
338 | "1.0"
339 | ]
340 | },
341 | "execution_count": 49,
342 | "metadata": {},
343 | "output_type": "execute_result"
344 | }
345 | ],
346 | "source": [
347 | "alg.score(X,Y)"
348 | ]
349 | },
350 | {
351 | "cell_type": "code",
352 | "execution_count": 50,
353 | "metadata": {},
354 | "outputs": [],
355 | "source": [
356 | "def cost(Y_true,Y_pred):\n",
357 | " return ((Y_true-Y_pred)**2).mean()"
358 | ]
359 | },
360 | {
361 | "cell_type": "code",
362 | "execution_count": 51,
363 | "metadata": {},
364 | "outputs": [
365 | {
366 | "data": {
367 | "text/plain": [
368 | "24.418162570824098"
369 | ]
370 | },
371 | "execution_count": 51,
372 | "metadata": {},
373 | "output_type": "execute_result"
374 | }
375 | ],
376 | "source": [
377 | "cost(Y,y_pred)"
378 | ]
379 | }
380 | ],
381 | "metadata": {
382 | "kernelspec": {
383 | "display_name": "Python 3",
384 | "language": "python",
385 | "name": "python3"
386 | },
387 | "language_info": {
388 | "codemirror_mode": {
389 | "name": "ipython",
390 | "version": 3
391 | },
392 | "file_extension": ".py",
393 | "mimetype": "text/x-python",
394 | "name": "python",
395 | "nbconvert_exporter": "python",
396 | "pygments_lexer": "ipython3",
397 | "version": "3.6.8"
398 | }
399 | },
400 | "nbformat": 4,
401 | "nbformat_minor": 2
402 | }
403 |
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/Dummy-data-testing.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-08-Linear-Regression/Dummy-data-testing.png
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/Dummy-data-training.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-08-Linear-Regression/Dummy-data-training.png
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/Instructor-Notebook-Linear-Regression-Dummy-Data/data.csv:
--------------------------------------------------------------------------------
1 | 32.502345269453031,31.70700584656992
2 | 53.426804033275019,68.77759598163891
3 | 61.530358025636438,62.562382297945803
4 | 47.475639634786098,71.546632233567777
5 | 59.813207869512318,87.230925133687393
6 | 55.142188413943821,78.211518270799232
7 | 52.211796692214001,79.64197304980874
8 | 39.299566694317065,59.171489321869508
9 | 48.10504169176825,75.331242297063056
10 | 52.550014442733818,71.300879886850353
11 | 45.419730144973755,55.165677145959123
12 | 54.351634881228918,82.478846757497919
13 | 44.164049496773352,62.008923245725825
14 | 58.16847071685779,75.392870425994957
15 | 56.727208057096611,81.43619215887864
16 | 48.955888566093719,60.723602440673965
17 | 44.687196231480904,82.892503731453715
18 | 60.297326851333466,97.379896862166078
19 | 45.618643772955828,48.847153317355072
20 | 38.816817537445637,56.877213186268506
21 | 66.189816606752601,83.878564664602763
22 | 65.41605174513407,118.59121730252249
23 | 47.48120860786787,57.251819462268969
24 | 41.57564261748702,51.391744079832307
25 | 51.84518690563943,75.380651665312357
26 | 59.370822011089523,74.765564032151374
27 | 57.31000343834809,95.455052922574737
28 | 63.615561251453308,95.229366017555307
29 | 46.737619407976972,79.052406169565586
30 | 50.556760148547767,83.432071421323712
31 | 52.223996085553047,63.358790317497878
32 | 35.567830047746632,41.412885303700563
33 | 42.436476944055642,76.617341280074044
34 | 58.16454011019286,96.769566426108199
35 | 57.504447615341789,74.084130116602523
36 | 45.440530725319981,66.588144414228594
37 | 61.89622268029126,77.768482417793024
38 | 33.093831736163963,50.719588912312084
39 | 36.436009511386871,62.124570818071781
40 | 37.675654860850742,60.810246649902211
41 | 44.555608383275356,52.682983366387781
42 | 43.318282631865721,58.569824717692867
43 | 50.073145632289034,82.905981485070512
44 | 43.870612645218372,61.424709804339123
45 | 62.997480747553091,115.24415280079529
46 | 32.669043763467187,45.570588823376085
47 | 40.166899008703702,54.084054796223612
48 | 53.575077531673656,87.994452758110413
49 | 33.864214971778239,52.725494375900425
50 | 64.707138666121296,93.576118692658241
51 | 38.119824026822805,80.166275447370964
52 | 44.502538064645101,65.101711570560326
53 | 40.599538384552318,65.562301260400375
54 | 41.720676356341293,65.280886920822823
55 | 51.088634678336796,73.434641546324301
56 | 55.078095904923202,71.13972785861894
57 | 41.377726534895203,79.102829683549857
58 | 62.494697427269791,86.520538440347153
59 | 49.203887540826003,84.742697807826218
60 | 41.102685187349664,59.358850248624933
61 | 41.182016105169822,61.684037524833627
62 | 50.186389494880601,69.847604158249183
63 | 52.378446219236217,86.098291205774103
64 | 50.135485486286122,59.108839267699643
65 | 33.644706006191782,69.89968164362763
66 | 39.557901222906828,44.862490711164398
67 | 56.130388816875467,85.498067778840223
68 | 57.362052133238237,95.536686846467219
69 | 60.269214393997906,70.251934419771587
70 | 35.678093889410732,52.721734964774988
71 | 31.588116998132829,50.392670135079896
72 | 53.66093226167304,63.642398775657753
73 | 46.682228649471917,72.247251068662365
74 | 43.107820219102464,57.812512976181402
75 | 70.34607561504933,104.25710158543822
76 | 44.492855880854073,86.642020318822006
77 | 57.50453330326841,91.486778000110135
78 | 36.930076609191808,55.231660886212836
79 | 55.805733357942742,79.550436678507609
80 | 38.954769073377065,44.847124242467601
81 | 56.901214702247074,80.207523139682763
82 | 56.868900661384046,83.14274979204346
83 | 34.33312470421609,55.723489260543914
84 | 59.04974121466681,77.634182511677864
85 | 57.788223993230673,99.051414841748269
86 | 54.282328705967409,79.120646274680027
87 | 51.088719898979143,69.588897851118475
88 | 50.282836348230731,69.510503311494389
89 | 44.211741752090113,73.687564318317285
90 | 38.005488008060688,61.366904537240131
91 | 32.940479942618296,67.170655768995118
92 | 53.691639571070056,85.668203145001542
93 | 68.76573426962166,114.85387123391394
94 | 46.230966498310252,90.123572069967423
95 | 68.319360818255362,97.919821035242848
96 | 50.030174340312143,81.536990783015028
97 | 49.239765342753763,72.111832469615663
98 | 50.039575939875988,85.232007342325673
99 | 48.149858891028863,66.224957888054632
100 | 25.128484647772304,53.454394214850524
101 |
102 |
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/Linear-Regression-Single-Feature-Workbook.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import numpy as np\n",
10 | "import pandas as pd\n",
11 | "import matplotlib.pyplot as plt"
12 | ]
13 | },
14 | {
15 | "cell_type": "code",
16 | "execution_count": 2,
17 | "metadata": {},
18 | "outputs": [],
19 | "source": [
20 | "data = np.loadtxt(\"dummy_data.csv\",delimiter=\",\")"
21 | ]
22 | },
23 | {
24 | "cell_type": "code",
25 | "execution_count": 3,
26 | "metadata": {},
27 | "outputs": [],
28 | "source": [
29 | "X = data[:,0]\n",
30 | "Y = data[:,1]"
31 | ]
32 | },
33 | {
34 | "cell_type": "code",
35 | "execution_count": 4,
36 | "metadata": {},
37 | "outputs": [
38 | {
39 | "data": {
40 | "text/plain": [
41 | "((100,), (100,))"
42 | ]
43 | },
44 | "execution_count": 4,
45 | "metadata": {},
46 | "output_type": "execute_result"
47 | }
48 | ],
49 | "source": [
50 | "X.shape,Y.shape"
51 | ]
52 | },
53 | {
54 | "cell_type": "code",
55 | "execution_count": 5,
56 | "metadata": {},
57 | "outputs": [],
58 | "source": [
59 | "from sklearn import model_selection"
60 | ]
61 | },
62 | {
63 | "cell_type": "code",
64 | "execution_count": 6,
65 | "metadata": {},
66 | "outputs": [],
67 | "source": [
68 | "X_train, X_val, Y_train, Y_val = model_selection.train_test_split(X,Y)"
69 | ]
70 | },
71 | {
72 | "cell_type": "code",
73 | "execution_count": 13,
74 | "metadata": {},
75 | "outputs": [],
76 | "source": [
77 | "def fit(X,Y):\n",
78 | " num = (Y*X).mean() - (Y.mean())*(X.mean())\n",
79 | " den = (X**2).mean() - (X.mean())**2\n",
80 | " \n",
81 | " m = num/den\n",
82 | " c = Y.mean() - m*(X.mean())\n",
83 | " return m,c"
84 | ]
85 | },
86 | {
87 | "cell_type": "code",
88 | "execution_count": 15,
89 | "metadata": {},
90 | "outputs": [
91 | {
92 | "name": "stdout",
93 | "output_type": "stream",
94 | "text": [
95 | "1.3264851960581232 8.821135864835227\n"
96 | ]
97 | }
98 | ],
99 | "source": [
100 | "m,c = fit(X_train,Y_train)\n",
101 | "print(m,c)"
102 | ]
103 | },
104 | {
105 | "cell_type": "code",
106 | "execution_count": 16,
107 | "metadata": {},
108 | "outputs": [],
109 | "source": [
110 | "def predict(X,m,c):\n",
111 | " return m*X+c"
112 | ]
113 | },
114 | {
115 | "cell_type": "code",
116 | "execution_count": 18,
117 | "metadata": {},
118 | "outputs": [],
119 | "source": [
120 | "y_pred = predict(X_val, m, c)"
121 | ]
122 | },
123 | {
124 | "cell_type": "code",
125 | "execution_count": 19,
126 | "metadata": {},
127 | "outputs": [
128 | {
129 | "data": {
130 | "text/plain": [
131 | "array([69.33359149, 96.62094772, 58.79733429, 64.16299542, 56.00133588,\n",
132 | " 60.31106969, 81.88141471, 61.29410622, 76.5895665 , 95.59456009,\n",
133 | " 93.2062361 , 78.52795208, 59.23485308, 72.63176152, 87.57565234,\n",
134 | " 84.06893757, 72.69121088, 68.09804012, 75.88392976, 84.29975481,\n",
135 | " 66.28219649, 84.25689071, 90.44024489, 57.80833578, 71.79686901])"
136 | ]
137 | },
138 | "execution_count": 19,
139 | "metadata": {},
140 | "output_type": "execute_result"
141 | }
142 | ],
143 | "source": [
144 | "y_pred"
145 | ]
146 | },
147 | {
148 | "cell_type": "code",
149 | "execution_count": 21,
150 | "metadata": {},
151 | "outputs": [],
152 | "source": [
153 | "def score(Y_val,Y_pred):\n",
154 | " \n",
155 | " u = ((Y_val - Y_pred)**2).sum()\n",
156 | " v = ((Y_val - Y.mean())**2).sum()\n",
157 | " return 1-(u/v)"
158 | ]
159 | },
160 | {
161 | "cell_type": "code",
162 | "execution_count": 22,
163 | "metadata": {},
164 | "outputs": [
165 | {
166 | "data": {
167 | "text/plain": [
168 | "0.5287975397525602"
169 | ]
170 | },
171 | "execution_count": 22,
172 | "metadata": {},
173 | "output_type": "execute_result"
174 | }
175 | ],
176 | "source": [
177 | "score(Y_val,y_pred)"
178 | ]
179 | },
180 | {
181 | "cell_type": "code",
182 | "execution_count": 27,
183 | "metadata": {},
184 | "outputs": [],
185 | "source": [
186 | "def cost(Y_val, Y_pred):\n",
187 | " return ((Y_val-Y_pred)**2).mean()"
188 | ]
189 | },
190 | {
191 | "cell_type": "code",
192 | "execution_count": 28,
193 | "metadata": {},
194 | "outputs": [
195 | {
196 | "data": {
197 | "text/plain": [
198 | "127.7364122993684"
199 | ]
200 | },
201 | "execution_count": 28,
202 | "metadata": {},
203 | "output_type": "execute_result"
204 | }
205 | ],
206 | "source": [
207 | "cost(Y_val, y_pred)"
208 | ]
209 | },
210 | {
211 | "cell_type": "code",
212 | "execution_count": 29,
213 | "metadata": {},
214 | "outputs": [],
215 | "source": [
216 | "from sklearn.linear_model import LinearRegression"
217 | ]
218 | },
219 | {
220 | "cell_type": "code",
221 | "execution_count": 34,
222 | "metadata": {},
223 | "outputs": [],
224 | "source": [
225 | "X_train_, X_val_ = X_train.reshape(-1,1),X_val.reshape(-1,1)"
226 | ]
227 | },
228 | {
229 | "cell_type": "code",
230 | "execution_count": 35,
231 | "metadata": {},
232 | "outputs": [
233 | {
234 | "data": {
235 | "text/plain": [
236 | "((75, 1), (25, 1))"
237 | ]
238 | },
239 | "execution_count": 35,
240 | "metadata": {},
241 | "output_type": "execute_result"
242 | }
243 | ],
244 | "source": [
245 | "X_train_.shape,X_val_.shape"
246 | ]
247 | },
248 | {
249 | "cell_type": "code",
250 | "execution_count": 36,
251 | "metadata": {},
252 | "outputs": [
253 | {
254 | "data": {
255 | "text/plain": [
256 | "(75,)"
257 | ]
258 | },
259 | "execution_count": 36,
260 | "metadata": {},
261 | "output_type": "execute_result"
262 | }
263 | ],
264 | "source": [
265 | "Y_train.shape"
266 | ]
267 | },
268 | {
269 | "cell_type": "code",
270 | "execution_count": 37,
271 | "metadata": {},
272 | "outputs": [
273 | {
274 | "data": {
275 | "text/plain": [
276 | "LinearRegression(copy_X=True, fit_intercept=True, n_jobs=None,\n",
277 | " normalize=False)"
278 | ]
279 | },
280 | "execution_count": 37,
281 | "metadata": {},
282 | "output_type": "execute_result"
283 | }
284 | ],
285 | "source": [
286 | "alg = LinearRegression()\n",
287 | "alg.fit(X_train_,Y_train)"
288 | ]
289 | },
290 | {
291 | "cell_type": "code",
292 | "execution_count": 39,
293 | "metadata": {},
294 | "outputs": [
295 | {
296 | "data": {
297 | "text/plain": [
298 | "0.5205934165296406"
299 | ]
300 | },
301 | "execution_count": 39,
302 | "metadata": {},
303 | "output_type": "execute_result"
304 | }
305 | ],
306 | "source": [
307 | "alg.score(X_val_,Y_val)"
308 | ]
309 | },
310 | {
311 | "cell_type": "code",
312 | "execution_count": 40,
313 | "metadata": {},
314 | "outputs": [
315 | {
316 | "data": {
317 | "text/plain": [
318 | "1.3264851960581117"
319 | ]
320 | },
321 | "execution_count": 40,
322 | "metadata": {},
323 | "output_type": "execute_result"
324 | }
325 | ],
326 | "source": [
327 | "alg.coef_[0]"
328 | ]
329 | },
330 | {
331 | "cell_type": "code",
332 | "execution_count": 41,
333 | "metadata": {},
334 | "outputs": [
335 | {
336 | "data": {
337 | "text/plain": [
338 | "8.821135864835782"
339 | ]
340 | },
341 | "execution_count": 41,
342 | "metadata": {},
343 | "output_type": "execute_result"
344 | }
345 | ],
346 | "source": [
347 | "alg.intercept_"
348 | ]
349 | },
350 | {
351 | "cell_type": "code",
352 | "execution_count": null,
353 | "metadata": {},
354 | "outputs": [],
355 | "source": []
356 | }
357 | ],
358 | "metadata": {
359 | "kernelspec": {
360 | "display_name": "Python 3",
361 | "language": "python",
362 | "name": "python3"
363 | },
364 | "language_info": {
365 | "codemirror_mode": {
366 | "name": "ipython",
367 | "version": 3
368 | },
369 | "file_extension": ".py",
370 | "mimetype": "text/x-python",
371 | "name": "python",
372 | "nbconvert_exporter": "python",
373 | "pygments_lexer": "ipython3",
374 | "version": "3.6.8"
375 | }
376 | },
377 | "nbformat": 4,
378 | "nbformat_minor": 2
379 | }
380 |
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/Linear-Regression-Two-Feature-Workbook.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import numpy as np\n",
10 | "import pandas as pd\n",
11 | "import sklearn.datasets"
12 | ]
13 | },
14 | {
15 | "cell_type": "code",
16 | "execution_count": 34,
17 | "metadata": {},
18 | "outputs": [],
19 | "source": [
20 | "X, Y = sklearn.datasets.make_regression(n_samples=500, n_features=2, bias=1)"
21 | ]
22 | },
23 | {
24 | "cell_type": "code",
25 | "execution_count": 35,
26 | "metadata": {},
27 | "outputs": [
28 | {
29 | "data": {
30 | "text/plain": [
31 | "(500,)"
32 | ]
33 | },
34 | "execution_count": 35,
35 | "metadata": {},
36 | "output_type": "execute_result"
37 | }
38 | ],
39 | "source": [
40 | "X[:,0].shape"
41 | ]
42 | },
43 | {
44 | "cell_type": "code",
45 | "execution_count": 36,
46 | "metadata": {},
47 | "outputs": [],
48 | "source": [
49 | "df = pd.DataFrame(X)"
50 | ]
51 | },
52 | {
53 | "cell_type": "code",
54 | "execution_count": 37,
55 | "metadata": {},
56 | "outputs": [
57 | {
58 | "data": {
59 | "text/html": [
60 | "\n",
61 | "\n",
74 | "
\n",
75 | " \n",
76 | " \n",
77 | " | \n",
78 | " 0 | \n",
79 | " 1 | \n",
80 | "
\n",
81 | " \n",
82 | " \n",
83 | " \n",
84 | " count | \n",
85 | " 500.000000 | \n",
86 | " 500.000000 | \n",
87 | "
\n",
88 | " \n",
89 | " mean | \n",
90 | " -0.027919 | \n",
91 | " 0.093867 | \n",
92 | "
\n",
93 | " \n",
94 | " std | \n",
95 | " 0.980243 | \n",
96 | " 1.059402 | \n",
97 | "
\n",
98 | " \n",
99 | " min | \n",
100 | " -2.372996 | \n",
101 | " -3.030447 | \n",
102 | "
\n",
103 | " \n",
104 | " 25% | \n",
105 | " -0.720534 | \n",
106 | " -0.579339 | \n",
107 | "
\n",
108 | " \n",
109 | " 50% | \n",
110 | " -0.049479 | \n",
111 | " 0.090556 | \n",
112 | "
\n",
113 | " \n",
114 | " 75% | \n",
115 | " 0.602104 | \n",
116 | " 0.853455 | \n",
117 | "
\n",
118 | " \n",
119 | " max | \n",
120 | " 3.268004 | \n",
121 | " 3.453502 | \n",
122 | "
\n",
123 | " \n",
124 | "
\n",
125 | "
"
126 | ],
127 | "text/plain": [
128 | " 0 1\n",
129 | "count 500.000000 500.000000\n",
130 | "mean -0.027919 0.093867\n",
131 | "std 0.980243 1.059402\n",
132 | "min -2.372996 -3.030447\n",
133 | "25% -0.720534 -0.579339\n",
134 | "50% -0.049479 0.090556\n",
135 | "75% 0.602104 0.853455\n",
136 | "max 3.268004 3.453502"
137 | ]
138 | },
139 | "execution_count": 37,
140 | "metadata": {},
141 | "output_type": "execute_result"
142 | }
143 | ],
144 | "source": [
145 | "df.describe()"
146 | ]
147 | },
148 | {
149 | "cell_type": "code",
150 | "execution_count": 38,
151 | "metadata": {},
152 | "outputs": [],
153 | "source": [
154 | "def fit(X,Y):\n",
155 | " x1 = X[:,0]\n",
156 | " x2 = X[:,1]\n",
157 | " alpha_num = (x2*x1).mean() + (x2.mean())*(x1.mean())\n",
158 | " alpha_den = (x2.mean())**2 + (x2**2).mean()\n",
159 | " alpha = alpha_num/alpha_den\n",
160 | " \n",
161 | " m1_num = (Y*x1).mean() + (Y.mean())*(x1.mean()) + alpha*((Y*x2).mean()) - alpha*((Y.mean())*x2.mean())\n",
162 | " m1_den = (x1**2).mean() + (x1.mean())**2 - alpha*((x1*x2).mean()) - alpha*(x1.mean())*(x2.mean())\n",
163 | " \n",
164 | " m1 = m1_num/m1_den\n",
165 | " \n",
166 | " m2_num = (Y*x2).mean() - m1*((x1*x2).mean()) +((Y.mean())*(x2.mean())) - m1*(x1.mean())*(x2.mean())\n",
167 | " m2_den = (x2.mean())**2 + (x2**2).mean()\n",
168 | " \n",
169 | " m2 = m2_num/m2_den\n",
170 | " \n",
171 | " c = Y.mean() - m1*(x1.mean()) - m2*(x2.mean())\n",
172 | " \n",
173 | " return m1,m2,c"
174 | ]
175 | },
176 | {
177 | "cell_type": "code",
178 | "execution_count": 39,
179 | "metadata": {},
180 | "outputs": [],
181 | "source": [
182 | "m1,m2,c = fit(X,Y)"
183 | ]
184 | },
185 | {
186 | "cell_type": "code",
187 | "execution_count": 40,
188 | "metadata": {},
189 | "outputs": [
190 | {
191 | "data": {
192 | "text/plain": [
193 | "(45.90929481464371, 40.81142388385525, 0.8697521309589047)"
194 | ]
195 | },
196 | "execution_count": 40,
197 | "metadata": {},
198 | "output_type": "execute_result"
199 | }
200 | ],
201 | "source": [
202 | "m1,m2,c"
203 | ]
204 | },
205 | {
206 | "cell_type": "code",
207 | "execution_count": 41,
208 | "metadata": {},
209 | "outputs": [],
210 | "source": [
211 | "from sklearn.linear_model import LinearRegression"
212 | ]
213 | },
214 | {
215 | "cell_type": "code",
216 | "execution_count": 42,
217 | "metadata": {},
218 | "outputs": [
219 | {
220 | "data": {
221 | "text/plain": [
222 | "LinearRegression(copy_X=True, fit_intercept=True, n_jobs=None,\n",
223 | " normalize=False)"
224 | ]
225 | },
226 | "execution_count": 42,
227 | "metadata": {},
228 | "output_type": "execute_result"
229 | }
230 | ],
231 | "source": [
232 | "alg = LinearRegression()\n",
233 | "alg.fit(X,Y)"
234 | ]
235 | },
236 | {
237 | "cell_type": "code",
238 | "execution_count": 43,
239 | "metadata": {},
240 | "outputs": [
241 | {
242 | "data": {
243 | "text/plain": [
244 | "array([50.96081575, 40.92630005])"
245 | ]
246 | },
247 | "execution_count": 43,
248 | "metadata": {},
249 | "output_type": "execute_result"
250 | }
251 | ],
252 | "source": [
253 | "alg.coef_"
254 | ]
255 | },
256 | {
257 | "cell_type": "code",
258 | "execution_count": 44,
259 | "metadata": {},
260 | "outputs": [
261 | {
262 | "data": {
263 | "text/plain": [
264 | "0.9999999999999996"
265 | ]
266 | },
267 | "execution_count": 44,
268 | "metadata": {},
269 | "output_type": "execute_result"
270 | }
271 | ],
272 | "source": [
273 | "alg.intercept_"
274 | ]
275 | },
276 | {
277 | "cell_type": "code",
278 | "execution_count": 45,
279 | "metadata": {},
280 | "outputs": [],
281 | "source": [
282 | "def predict(X,m1,m2,c):\n",
283 | " x1 = X[:,0]\n",
284 | " x2 = X[:,1]\n",
285 | " return x1*m1+x2*m2+c"
286 | ]
287 | },
288 | {
289 | "cell_type": "code",
290 | "execution_count": 46,
291 | "metadata": {},
292 | "outputs": [],
293 | "source": [
294 | "y_pred = predict(X,m1,m2,c)"
295 | ]
296 | },
297 | {
298 | "cell_type": "code",
299 | "execution_count": 47,
300 | "metadata": {},
301 | "outputs": [],
302 | "source": [
303 | "def score(Y_true,Y_pred):\n",
304 | " \n",
305 | " u = ((Y_true-Y_pred)**2).sum()\n",
306 | " v = ((Y_true-Y.mean())**2).sum()\n",
307 | " return 1-(u/v)"
308 | ]
309 | },
310 | {
311 | "cell_type": "code",
312 | "execution_count": 48,
313 | "metadata": {},
314 | "outputs": [
315 | {
316 | "data": {
317 | "text/plain": [
318 | "0.994081066192795"
319 | ]
320 | },
321 | "execution_count": 48,
322 | "metadata": {},
323 | "output_type": "execute_result"
324 | }
325 | ],
326 | "source": [
327 | "score(Y,y_pred)"
328 | ]
329 | },
330 | {
331 | "cell_type": "code",
332 | "execution_count": 49,
333 | "metadata": {},
334 | "outputs": [
335 | {
336 | "data": {
337 | "text/plain": [
338 | "1.0"
339 | ]
340 | },
341 | "execution_count": 49,
342 | "metadata": {},
343 | "output_type": "execute_result"
344 | }
345 | ],
346 | "source": [
347 | "alg.score(X,Y)"
348 | ]
349 | },
350 | {
351 | "cell_type": "code",
352 | "execution_count": 50,
353 | "metadata": {},
354 | "outputs": [],
355 | "source": [
356 | "def cost(Y_true,Y_pred):\n",
357 | " return ((Y_true-Y_pred)**2).mean()"
358 | ]
359 | },
360 | {
361 | "cell_type": "code",
362 | "execution_count": 51,
363 | "metadata": {},
364 | "outputs": [
365 | {
366 | "data": {
367 | "text/plain": [
368 | "24.418162570824098"
369 | ]
370 | },
371 | "execution_count": 51,
372 | "metadata": {},
373 | "output_type": "execute_result"
374 | }
375 | ],
376 | "source": [
377 | "cost(Y,y_pred)"
378 | ]
379 | }
380 | ],
381 | "metadata": {
382 | "kernelspec": {
383 | "display_name": "Python 3",
384 | "language": "python",
385 | "name": "python3"
386 | },
387 | "language_info": {
388 | "codemirror_mode": {
389 | "name": "ipython",
390 | "version": 3
391 | },
392 | "file_extension": ".py",
393 | "mimetype": "text/x-python",
394 | "name": "python",
395 | "nbconvert_exporter": "python",
396 | "pygments_lexer": "ipython3",
397 | "version": "3.6.8"
398 | }
399 | },
400 | "nbformat": 4,
401 | "nbformat_minor": 2
402 | }
403 |
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/Module-8-Instructor-Notes.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-08-Linear-Regression/Module-8-Instructor-Notes.pdf
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/Module-8-Linear-Regression-2-Feature-Derivation.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-08-Linear-Regression/Module-8-Linear-Regression-2-Feature-Derivation.pdf
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/Module-8-Linear-Regression-Coding.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-08-Linear-Regression/Module-8-Linear-Regression-Coding.pdf
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/Module-8-Linear-Regression-Theory.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-08-Linear-Regression/Module-8-Linear-Regression-Theory.pdf
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/dummy_data.csv:
--------------------------------------------------------------------------------
1 | 32.502345269453031,31.70700584656992
2 | 53.426804033275019,68.77759598163891
3 | 61.530358025636438,62.562382297945803
4 | 47.475639634786098,71.546632233567777
5 | 59.813207869512318,87.230925133687393
6 | 55.142188413943821,78.211518270799232
7 | 52.211796692214001,79.64197304980874
8 | 39.299566694317065,59.171489321869508
9 | 48.10504169176825,75.331242297063056
10 | 52.550014442733818,71.300879886850353
11 | 45.419730144973755,55.165677145959123
12 | 54.351634881228918,82.478846757497919
13 | 44.164049496773352,62.008923245725825
14 | 58.16847071685779,75.392870425994957
15 | 56.727208057096611,81.43619215887864
16 | 48.955888566093719,60.723602440673965
17 | 44.687196231480904,82.892503731453715
18 | 60.297326851333466,97.379896862166078
19 | 45.618643772955828,48.847153317355072
20 | 38.816817537445637,56.877213186268506
21 | 66.189816606752601,83.878564664602763
22 | 65.41605174513407,118.59121730252249
23 | 47.48120860786787,57.251819462268969
24 | 41.57564261748702,51.391744079832307
25 | 51.84518690563943,75.380651665312357
26 | 59.370822011089523,74.765564032151374
27 | 57.31000343834809,95.455052922574737
28 | 63.615561251453308,95.229366017555307
29 | 46.737619407976972,79.052406169565586
30 | 50.556760148547767,83.432071421323712
31 | 52.223996085553047,63.358790317497878
32 | 35.567830047746632,41.412885303700563
33 | 42.436476944055642,76.617341280074044
34 | 58.16454011019286,96.769566426108199
35 | 57.504447615341789,74.084130116602523
36 | 45.440530725319981,66.588144414228594
37 | 61.89622268029126,77.768482417793024
38 | 33.093831736163963,50.719588912312084
39 | 36.436009511386871,62.124570818071781
40 | 37.675654860850742,60.810246649902211
41 | 44.555608383275356,52.682983366387781
42 | 43.318282631865721,58.569824717692867
43 | 50.073145632289034,82.905981485070512
44 | 43.870612645218372,61.424709804339123
45 | 62.997480747553091,115.24415280079529
46 | 32.669043763467187,45.570588823376085
47 | 40.166899008703702,54.084054796223612
48 | 53.575077531673656,87.994452758110413
49 | 33.864214971778239,52.725494375900425
50 | 64.707138666121296,93.576118692658241
51 | 38.119824026822805,80.166275447370964
52 | 44.502538064645101,65.101711570560326
53 | 40.599538384552318,65.562301260400375
54 | 41.720676356341293,65.280886920822823
55 | 51.088634678336796,73.434641546324301
56 | 55.078095904923202,71.13972785861894
57 | 41.377726534895203,79.102829683549857
58 | 62.494697427269791,86.520538440347153
59 | 49.203887540826003,84.742697807826218
60 | 41.102685187349664,59.358850248624933
61 | 41.182016105169822,61.684037524833627
62 | 50.186389494880601,69.847604158249183
63 | 52.378446219236217,86.098291205774103
64 | 50.135485486286122,59.108839267699643
65 | 33.644706006191782,69.89968164362763
66 | 39.557901222906828,44.862490711164398
67 | 56.130388816875467,85.498067778840223
68 | 57.362052133238237,95.536686846467219
69 | 60.269214393997906,70.251934419771587
70 | 35.678093889410732,52.721734964774988
71 | 31.588116998132829,50.392670135079896
72 | 53.66093226167304,63.642398775657753
73 | 46.682228649471917,72.247251068662365
74 | 43.107820219102464,57.812512976181402
75 | 70.34607561504933,104.25710158543822
76 | 44.492855880854073,86.642020318822006
77 | 57.50453330326841,91.486778000110135
78 | 36.930076609191808,55.231660886212836
79 | 55.805733357942742,79.550436678507609
80 | 38.954769073377065,44.847124242467601
81 | 56.901214702247074,80.207523139682763
82 | 56.868900661384046,83.14274979204346
83 | 34.33312470421609,55.723489260543914
84 | 59.04974121466681,77.634182511677864
85 | 57.788223993230673,99.051414841748269
86 | 54.282328705967409,79.120646274680027
87 | 51.088719898979143,69.588897851118475
88 | 50.282836348230731,69.510503311494389
89 | 44.211741752090113,73.687564318317285
90 | 38.005488008060688,61.366904537240131
91 | 32.940479942618296,67.170655768995118
92 | 53.691639571070056,85.668203145001542
93 | 68.76573426962166,114.85387123391394
94 | 46.230966498310252,90.123572069967423
95 | 68.319360818255362,97.919821035242848
96 | 50.030174340312143,81.536990783015028
97 | 49.239765342753763,72.111832469615663
98 | 50.039575939875988,85.232007342325673
99 | 48.149858891028863,66.224957888054632
100 | 25.128484647772304,53.454394214850524
101 |
102 |
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/pred.csv:
--------------------------------------------------------------------------------
1 | 105.01752
2 | 106.55391
3 | 184.05558
4 | 83.95993
5 | 53.07755
6 | 100.42741
7 | 152.13249
8 | 37.84418
9 | 111.97708
10 | 164.42674
11 | 137.77640
12 | 96.44149
13 | 138.35269
14 | 137.87540
15 | 162.44771
16 | 171.87823
17 | 109.20798
18 | 105.68729
19 | 93.35118
20 | 167.73058
21 | 171.29417
22 | 100.34851
23 | 252.54783
24 | 145.71034
25 | 216.57186
26 | 161.29918
27 | 213.98343
28 | 75.32330
29 | 190.28001
30 | 204.95917
31 | 220.80747
32 | 166.79811
33 | 112.62704
34 | 179.37984
35 | 77.20079
36 | 62.38093
37 | 108.85422
38 | 153.49546
39 | 154.37405
40 | 204.56079
41 | 112.91530
42 | 153.88803
43 | 85.58130
44 | 112.77262
45 | 149.05404
46 | 145.04429
47 | 87.73973
48 | 80.17274
49 | 132.63191
50 | 260.12812
51 | 212.82065
52 | 251.31958
53 | 166.28086
54 | 181.93927
55 | 168.61832
56 | 203.15082
57 | 217.51594
58 | 173.08719
59 | 175.04740
60 | 112.62256
61 | 276.95553
62 | 91.22778
63 | 294.98127
64 | 117.69053
65 | 80.13376
66 | 177.72194
67 | 147.91294
68 | 154.12167
69 | 44.88344
70 | 254.89593
71 | 206.80064
72 | 87.78971
73 | 228.07789
74 | 190.79471
75 | 185.93026
76 | 164.71938
77 | 188.61200
78 | 104.72266
79 | 196.66016
80 | 251.32202
81 | 125.39160
82 | 118.81767
83 | 151.36446
84 | 207.43900
85 | 150.65499
86 | 143.47562
87 | 154.04793
88 | 110.66636
89 | 133.21059
90 | 117.62724
91 | 200.27164
92 | 168.90490
93 | 126.25024
94 | 59.42000
95 | 232.58257
96 | 182.33701
97 | 185.42020
98 | 89.27382
99 | 190.17697
100 | 221.59087
101 | 106.89572
102 | 81.29316
103 | 126.87020
104 | 242.39750
105 | 152.96285
106 | 124.06605
107 | 252.62663
108 | 233.82727
109 | 189.08734
110 | 163.40102
111 | 163.49443
112 |
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/pred2.csv:
--------------------------------------------------------------------------------
1 | 105.01752
2 | 106.55391
3 | 184.05558
4 | 83.95993
5 | 53.07755
6 | 100.42741
7 | 152.13249
8 | 37.84418
9 | 111.97708
10 | 164.42674
11 | 137.77640
12 | 96.44149
13 | 138.35269
14 | 137.87540
15 | 162.44771
16 | 171.87823
17 | 109.20798
18 | 105.68729
19 | 93.35118
20 | 167.73058
21 | 171.29417
22 | 100.34851
23 | 252.54783
24 | 145.71034
25 | 216.57186
26 | 161.29918
27 | 213.98343
28 | 75.32330
29 | 190.28001
30 | 204.95917
31 | 220.80747
32 | 166.79811
33 | 112.62704
34 | 179.37984
35 | 77.20079
36 | 62.38093
37 | 108.85422
38 | 153.49546
39 | 154.37405
40 | 204.56079
41 | 112.91530
42 | 153.88803
43 | 85.58130
44 | 112.77262
45 | 149.05404
46 | 145.04429
47 | 87.73973
48 | 80.17274
49 | 132.63191
50 | 260.12812
51 | 212.82065
52 | 251.31958
53 | 166.28086
54 | 181.93927
55 | 168.61832
56 | 203.15082
57 | 217.51594
58 | 173.08719
59 | 175.04740
60 | 112.62256
61 | 276.95553
62 | 91.22778
63 | 294.98127
64 | 117.69053
65 | 80.13376
66 | 177.72194
67 | 147.91294
68 | 154.12167
69 | 44.88344
70 | 254.89593
71 | 206.80064
72 | 87.78971
73 | 228.07789
74 | 190.79471
75 | 185.93026
76 | 164.71938
77 | 188.61200
78 | 104.72266
79 | 196.66016
80 | 251.32202
81 | 125.39160
82 | 118.81767
83 | 151.36446
84 | 207.43900
85 | 150.65499
86 | 143.47562
87 | 154.04793
88 | 110.66636
89 | 133.21059
90 | 117.62724
91 | 200.27164
92 | 168.90490
93 | 126.25024
94 | 59.42000
95 | 232.58257
96 | 182.33701
97 | 185.42020
98 | 89.27382
99 | 190.17697
100 | 221.59087
101 | 106.89572
102 | 81.29316
103 | 126.87020
104 | 242.39750
105 | 152.96285
106 | 124.06605
107 | 252.62663
108 | 233.82727
109 | 189.08734
110 | 163.40102
111 | 163.49443
112 |
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/pred3.csv:
--------------------------------------------------------------------------------
1 | 99.10440
2 | 99.40017
3 | 177.29785
4 | 83.46361
5 | 52.45192
6 | 96.82596
7 | 143.78099
8 | 37.93761
9 | 110.14680
10 | 159.68690
11 | 128.81618
12 | 90.89368
13 | 134.62586
14 | 144.30391
15 | 171.35822
16 | 180.39143
17 | 110.01180
18 | 113.64893
19 | 100.09822
20 | 170.03480
21 | 179.17433
22 | 102.66196
23 | 262.20863
24 | 140.29691
25 | 227.16584
26 | 157.70851
27 | 220.53865
28 | 64.41144
29 | 194.89222
30 | 204.23028
31 | 227.08989
32 | 174.23840
33 | 111.28681
34 | 180.12707
35 | 65.71839
36 | 63.54505
37 | 112.26346
38 | 156.72097
39 | 148.39327
40 | 205.09272
41 | 112.66235
42 | 151.37078
43 | 86.11988
44 | 108.39721
45 | 152.50809
46 | 145.78958
47 | 81.95961
48 | 68.48161
49 | 132.30053
50 | 266.03877
51 | 215.37278
52 | 243.12619
53 | 174.71080
54 | 192.79887
55 | 175.53135
56 | 217.84002
57 | 229.17675
58 | 172.37789
59 | 174.72619
60 | 119.86971
61 | 288.05998
62 | 83.93085
63 | 299.85788
64 | 127.27470
65 | 73.88812
66 | 190.67286
67 | 147.96199
68 | 157.31267
69 | 33.25478
70 | 245.47441
71 | 212.91729
72 | 84.97664
73 | 226.53303
74 | 212.23403
75 | 185.55919
76 | 170.96500
77 | 192.25786
78 | 112.62650
79 | 206.15321
80 | 272.16640
81 | 125.56203
82 | 112.08439
83 | 157.74624
84 | 214.27431
85 | 161.90358
86 | 141.54357
87 | 159.64935
88 | 119.09559
89 | 137.92911
90 | 117.12829
91 | 205.56867
92 | 172.32327
93 | 120.80099
94 | 50.84250
95 | 253.35849
96 | 188.58882
97 | 187.83219
98 | 86.53686
99 | 177.49512
100 | 217.66551
101 | 97.78021
102 | 79.13093
103 | 134.47668
104 | 256.60953
105 | 159.13527
106 | 125.95533
107 | 244.70168
108 | 251.30347
109 | 195.24909
110 | 165.43305
111 | 157.35125
112 |
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/pred4.csv:
--------------------------------------------------------------------------------
1 | 106.13884
2 | 102.12632
3 | 172.48328
4 | 80.09330
5 | 52.84626
6 | 94.23587
7 | 145.40002
8 | 35.77235
9 | 113.02473
10 | 154.61198
11 | 130.68677
12 | 90.66660
13 | 133.69054
14 | 139.78835
15 | 157.45491
16 | 172.28110
17 | 104.74024
18 | 104.03139
19 | 98.64276
20 | 164.18484
21 | 163.94530
22 | 99.39713
23 | 252.81285
24 | 145.59717
25 | 209.27829
26 | 159.41437
27 | 205.00023
28 | 69.60211
29 | 182.01622
30 | 204.64341
31 | 219.28816
32 | 165.58413
33 | 116.17222
34 | 172.02290
35 | 76.01173
36 | 61.02727
37 | 112.70627
38 | 154.88268
39 | 151.75810
40 | 195.29102
41 | 116.37200
42 | 149.11312
43 | 84.83724
44 | 113.00760
45 | 141.77180
46 | 143.74075
47 | 79.92372
48 | 76.73036
49 | 124.76470
50 | 263.09805
51 | 213.53475
52 | 243.51234
53 | 163.08099
54 | 180.54709
55 | 157.10017
56 | 201.73950
57 | 218.57990
58 | 170.12816
59 | 173.72284
60 | 107.01053
61 | 275.70635
62 | 87.24421
63 | 290.34768
64 | 117.19232
65 | 70.98653
66 | 184.00342
67 | 143.75142
68 | 155.04492
69 | 37.58162
70 | 247.89664
71 | 208.36063
72 | 93.61414
73 | 213.75315
74 | 191.06092
75 | 176.53627
76 | 162.81190
77 | 188.82298
78 | 107.62632
79 | 199.09254
80 | 245.68062
81 | 121.49137
82 | 115.24503
83 | 144.38880
84 | 206.24268
85 | 155.60719
86 | 137.68583
87 | 153.50438
88 | 120.59259
89 | 134.34709
90 | 117.44477
91 | 203.32249
92 | 168.91113
93 | 121.76208
94 | 55.56696
95 | 234.66588
96 | 182.77261
97 | 186.69197
98 | 80.28876
99 | 185.31549
100 | 213.00499
101 | 107.57629
102 | 77.66608
103 | 128.24812
104 | 240.91429
105 | 157.64925
106 | 125.01447
107 | 245.07648
108 | 233.85804
109 | 188.56923
110 | 164.02843
111 | 163.98866
112 |
--------------------------------------------------------------------------------
/Module-08-Linear-Regression/pred5.csv:
--------------------------------------------------------------------------------
1 | 105.52975
2 | 105.80392
3 | 178.60857
4 | 79.38288
5 | 52.95869
6 | 98.87264
7 | 150.71755
8 | 34.86515
9 | 113.13536
10 | 161.50184
11 | 135.86156
12 | 94.71592
13 | 138.48094
14 | 141.37442
15 | 158.76863
16 | 171.65289
17 | 106.44836
18 | 103.92660
19 | 95.38694
20 | 167.40118
21 | 166.53426
22 | 101.53465
23 | 252.45036
24 | 147.02259
25 | 214.78907
26 | 161.27557
27 | 210.61315
28 | 71.78092
29 | 189.65032
30 | 206.61343
31 | 219.98643
32 | 168.80193
33 | 116.84846
34 | 178.74400
35 | 77.03247
36 | 59.54633
37 | 111.56738
38 | 156.95187
39 | 154.59591
40 | 198.94457
41 | 115.53884
42 | 153.46699
43 | 84.96180
44 | 113.70337
45 | 142.14156
46 | 147.31040
47 | 82.78072
48 | 77.89237
49 | 128.99006
50 | 261.58712
51 | 213.31188
52 | 243.98791
53 | 167.68132
54 | 183.69712
55 | 166.85927
56 | 202.11440
57 | 220.39236
58 | 172.40288
59 | 176.60898
60 | 109.04657
61 | 276.37790
62 | 90.99942
63 | 289.37221
64 | 119.56253
65 | 75.45688
66 | 180.78599
67 | 146.62093
68 | 156.42382
69 | 41.07904
70 | 247.90645
71 | 207.99121
72 | 90.09641
73 | 222.24170
74 | 189.86378
75 | 182.26954
76 | 164.39881
77 | 190.27067
78 | 105.44868
79 | 199.86136
80 | 245.79639
81 | 123.20281
82 | 119.41032
83 | 145.86620
84 | 206.19247
85 | 150.86282
86 | 142.23222
87 | 154.56875
88 | 115.29233
89 | 134.07810
90 | 118.42225
91 | 203.57676
92 | 170.66484
93 | 127.40980
94 | 57.13508
95 | 231.92227
96 | 185.20714
97 | 187.37495
98 | 83.55921
99 | 185.42586
100 | 217.34036
101 | 108.86715
102 | 80.98715
103 | 127.05524
104 | 242.00739
105 | 155.97234
106 | 125.67575
107 | 246.41236
108 | 234.96393
109 | 191.51298
110 | 168.33254
111 | 163.02965
112 |
--------------------------------------------------------------------------------
/Module-09-Multivariate-Regression-and-Gradient-Descent/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-09-Multivariate-Regression-and-Gradient-Descent/.DS_Store
--------------------------------------------------------------------------------
/Module-09-Multivariate-Regression-and-Gradient-Descent/.ipynb_checkpoints/Gradient-Descent-N-feature-Diabetes-Workbook-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 2,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import numpy as np\n",
10 | "import pandas as pd\n",
11 | "from sklearn import datasets"
12 | ]
13 | },
14 | {
15 | "cell_type": "code",
16 | "execution_count": null,
17 | "metadata": {},
18 | "outputs": [],
19 | "source": []
20 | }
21 | ],
22 | "metadata": {
23 | "kernelspec": {
24 | "display_name": "Python 3",
25 | "language": "python",
26 | "name": "python3"
27 | },
28 | "language_info": {
29 | "codemirror_mode": {
30 | "name": "ipython",
31 | "version": 3
32 | },
33 | "file_extension": ".py",
34 | "mimetype": "text/x-python",
35 | "name": "python",
36 | "nbconvert_exporter": "python",
37 | "pygments_lexer": "ipython3",
38 | "version": "3.6.8"
39 | }
40 | },
41 | "nbformat": 4,
42 | "nbformat_minor": 2
43 | }
44 |
--------------------------------------------------------------------------------
/Module-09-Multivariate-Regression-and-Gradient-Descent/1-Feature-Gradient-Descent-Learning-Process.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-09-Multivariate-Regression-and-Gradient-Descent/1-Feature-Gradient-Descent-Learning-Process.png
--------------------------------------------------------------------------------
/Module-09-Multivariate-Regression-and-Gradient-Descent/Lecture-9-Multivariate-Regression-and-Gradient-Descent-Coding.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-09-Multivariate-Regression-and-Gradient-Descent/Lecture-9-Multivariate-Regression-and-Gradient-Descent-Coding.pdf
--------------------------------------------------------------------------------
/Module-09-Multivariate-Regression-and-Gradient-Descent/Lecture-9-Multivariate-Regression-and-Gradient-Descent-Theory.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-09-Multivariate-Regression-and-Gradient-Descent/Lecture-9-Multivariate-Regression-and-Gradient-Descent-Theory.pdf
--------------------------------------------------------------------------------
/Module-09-Multivariate-Regression-and-Gradient-Descent/Module-9-Multivariable-Regression-Gradient-Descent-Instructor-Notes.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-09-Multivariate-Regression-and-Gradient-Descent/Module-9-Multivariable-Regression-Gradient-Descent-Instructor-Notes.pdf
--------------------------------------------------------------------------------
/Module-09-Multivariate-Regression-and-Gradient-Descent/dummy_data.csv:
--------------------------------------------------------------------------------
1 | 32.502345269453031,31.70700584656992
2 | 53.426804033275019,68.77759598163891
3 | 61.530358025636438,62.562382297945803
4 | 47.475639634786098,71.546632233567777
5 | 59.813207869512318,87.230925133687393
6 | 55.142188413943821,78.211518270799232
7 | 52.211796692214001,79.64197304980874
8 | 39.299566694317065,59.171489321869508
9 | 48.10504169176825,75.331242297063056
10 | 52.550014442733818,71.300879886850353
11 | 45.419730144973755,55.165677145959123
12 | 54.351634881228918,82.478846757497919
13 | 44.164049496773352,62.008923245725825
14 | 58.16847071685779,75.392870425994957
15 | 56.727208057096611,81.43619215887864
16 | 48.955888566093719,60.723602440673965
17 | 44.687196231480904,82.892503731453715
18 | 60.297326851333466,97.379896862166078
19 | 45.618643772955828,48.847153317355072
20 | 38.816817537445637,56.877213186268506
21 | 66.189816606752601,83.878564664602763
22 | 65.41605174513407,118.59121730252249
23 | 47.48120860786787,57.251819462268969
24 | 41.57564261748702,51.391744079832307
25 | 51.84518690563943,75.380651665312357
26 | 59.370822011089523,74.765564032151374
27 | 57.31000343834809,95.455052922574737
28 | 63.615561251453308,95.229366017555307
29 | 46.737619407976972,79.052406169565586
30 | 50.556760148547767,83.432071421323712
31 | 52.223996085553047,63.358790317497878
32 | 35.567830047746632,41.412885303700563
33 | 42.436476944055642,76.617341280074044
34 | 58.16454011019286,96.769566426108199
35 | 57.504447615341789,74.084130116602523
36 | 45.440530725319981,66.588144414228594
37 | 61.89622268029126,77.768482417793024
38 | 33.093831736163963,50.719588912312084
39 | 36.436009511386871,62.124570818071781
40 | 37.675654860850742,60.810246649902211
41 | 44.555608383275356,52.682983366387781
42 | 43.318282631865721,58.569824717692867
43 | 50.073145632289034,82.905981485070512
44 | 43.870612645218372,61.424709804339123
45 | 62.997480747553091,115.24415280079529
46 | 32.669043763467187,45.570588823376085
47 | 40.166899008703702,54.084054796223612
48 | 53.575077531673656,87.994452758110413
49 | 33.864214971778239,52.725494375900425
50 | 64.707138666121296,93.576118692658241
51 | 38.119824026822805,80.166275447370964
52 | 44.502538064645101,65.101711570560326
53 | 40.599538384552318,65.562301260400375
54 | 41.720676356341293,65.280886920822823
55 | 51.088634678336796,73.434641546324301
56 | 55.078095904923202,71.13972785861894
57 | 41.377726534895203,79.102829683549857
58 | 62.494697427269791,86.520538440347153
59 | 49.203887540826003,84.742697807826218
60 | 41.102685187349664,59.358850248624933
61 | 41.182016105169822,61.684037524833627
62 | 50.186389494880601,69.847604158249183
63 | 52.378446219236217,86.098291205774103
64 | 50.135485486286122,59.108839267699643
65 | 33.644706006191782,69.89968164362763
66 | 39.557901222906828,44.862490711164398
67 | 56.130388816875467,85.498067778840223
68 | 57.362052133238237,95.536686846467219
69 | 60.269214393997906,70.251934419771587
70 | 35.678093889410732,52.721734964774988
71 | 31.588116998132829,50.392670135079896
72 | 53.66093226167304,63.642398775657753
73 | 46.682228649471917,72.247251068662365
74 | 43.107820219102464,57.812512976181402
75 | 70.34607561504933,104.25710158543822
76 | 44.492855880854073,86.642020318822006
77 | 57.50453330326841,91.486778000110135
78 | 36.930076609191808,55.231660886212836
79 | 55.805733357942742,79.550436678507609
80 | 38.954769073377065,44.847124242467601
81 | 56.901214702247074,80.207523139682763
82 | 56.868900661384046,83.14274979204346
83 | 34.33312470421609,55.723489260543914
84 | 59.04974121466681,77.634182511677864
85 | 57.788223993230673,99.051414841748269
86 | 54.282328705967409,79.120646274680027
87 | 51.088719898979143,69.588897851118475
88 | 50.282836348230731,69.510503311494389
89 | 44.211741752090113,73.687564318317285
90 | 38.005488008060688,61.366904537240131
91 | 32.940479942618296,67.170655768995118
92 | 53.691639571070056,85.668203145001542
93 | 68.76573426962166,114.85387123391394
94 | 46.230966498310252,90.123572069967423
95 | 68.319360818255362,97.919821035242848
96 | 50.030174340312143,81.536990783015028
97 | 49.239765342753763,72.111832469615663
98 | 50.039575939875988,85.232007342325673
99 | 48.149858891028863,66.224957888054632
100 | 25.128484647772304,53.454394214850524
101 |
102 |
--------------------------------------------------------------------------------
/Module-10-Project-Gradient-Descent/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-10-Project-Gradient-Descent/.DS_Store
--------------------------------------------------------------------------------
/Module-10-Project-Gradient-Descent/Boston-Gradient-Descent/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-10-Project-Gradient-Descent/Boston-Gradient-Descent/.DS_Store
--------------------------------------------------------------------------------
/Module-10-Project-Gradient-Descent/Boston-Gradient-Descent/pred.csv:
--------------------------------------------------------------------------------
1 | 1.243328316734385020e+01
2 | 2.903367207669942474e+01
3 | 2.237164470248639248e+01
4 | 2.447778566303232495e+01
5 | 2.060167143566362569e+01
6 | 2.725332263279632627e+00
7 | 3.040000253294710220e+01
8 | 2.486119999484002818e+01
9 | 1.865725021214505830e+01
10 | 2.353985836804875831e+01
11 | 2.411396868326947285e+01
12 | 1.771143894574947808e+01
13 | 1.744000496177124049e+01
14 | 2.165356360784404899e+01
15 | 4.231137753323746153e+01
16 | 2.384974575891088477e+01
17 | 2.447573228839508630e+01
18 | 2.753872770923690183e+01
19 | 2.023606770827543500e+01
20 | 3.115155566148795785e+01
21 | 2.378238081908789070e+01
22 | 2.500979406539383376e+01
23 | 3.395768518384305423e+01
24 | 3.643515607296056658e+01
25 | 3.204098398401801262e+01
26 | 1.671322560067181229e+01
27 | 2.347176648444524361e+01
28 | 3.293828176921177686e+01
29 | 2.518069967932824937e+01
30 | 3.371008693302793091e+01
31 | 1.688580247524440381e+01
32 | 2.602760941961605567e+01
33 | 2.327040054131720836e+01
34 | 2.547758947810988062e+01
35 | 1.500946626244240356e+01
36 | 2.958574991086698702e+01
37 | 2.624821246391856988e+01
38 | 2.037245803799354960e+01
39 | 2.443681391400793856e+01
40 | 9.447069046393735903e+00
41 | 8.380965960459771935e+00
42 | 2.901392304929692045e+01
43 | 2.959085453828596712e+01
44 | 1.975757024378083671e+01
45 | 2.037196749982114241e+01
46 | 3.144425926052800691e+00
47 | 3.952420106062155014e+01
48 | 2.571741223562210621e+01
49 | 3.037729443928580508e+01
50 | 1.679453238547102245e+01
51 | 1.789088614299525659e+01
52 | 4.102574348303303253e+01
53 | 1.757238762365249585e+01
54 | 2.089662704954427497e+01
55 | 1.559837734052600311e+01
56 | 2.141394828907494130e+01
57 | 1.845436447241696598e+01
58 | 2.315576399920987427e+01
59 | 1.367245062980573600e+01
60 | 1.723573843647665527e+01
61 | 1.502710402452732730e+01
62 | 2.915131354454391754e+01
63 | 2.517166510911535582e+01
64 | 2.549749371494977623e+01
65 | 1.721186634228583756e+01
66 | 1.742936975423420520e+01
67 | 3.470372744359244876e+01
68 | 1.701340773312504950e+01
69 | 2.710724189332476897e+01
70 | 2.254695676860188769e+01
71 | 2.925337055127061348e+01
72 | 2.711018112720801199e+01
73 | 1.773402202966429542e+01
74 | 5.747047258391575042e+00
75 | 3.687653736880265853e+01
76 | 2.509193926864024604e+01
77 | 3.015053552585906971e+01
78 | 2.724080859564258361e+01
79 | 1.625218418119833075e+01
80 | 3.263542248249019906e+01
81 | 1.927351352718963895e+01
82 | 2.265416913695181123e+01
83 | 2.222935422917452541e+01
84 | 8.550848782101393653e+00
85 | 1.733054333883865894e+01
86 | 2.917591735368559114e+01
87 | 2.720836154567724918e+01
88 | 5.885768898216777956e+00
89 | 2.191465196885749478e+01
90 | 2.011549809749638484e+01
91 | 2.217673949706111003e+01
92 | 2.052700632603674791e+01
93 | 2.085181397297683148e+01
94 | 1.318132034688625431e+01
95 | 1.969404965541091101e+01
96 | 2.598666780330177062e+01
97 | 4.027067679776327935e+01
98 | 1.977706017227774282e+01
99 | 3.370256509766845454e+01
100 | 2.722074029081424840e+01
101 | 2.874517096358929535e+01
102 | 2.211543849728048983e+01
103 | 2.591351904673763329e+01
104 | 3.130678487243613262e+01
105 | 1.715297452363245156e+01
106 | 2.637137861560210084e+01
107 | 2.144657082579308849e+01
108 | 3.673924640775078387e+01
109 | 2.208272792026319564e+01
110 | 1.670906162783376914e+01
111 | 2.759458226798656710e+01
112 | -5.974769126904888594e-02
113 | 1.386188158028668305e+01
114 | 1.628772085823745641e+01
115 | 3.577134415043777693e+01
116 | 2.085214631437199273e+01
117 | 2.077286887743407462e+01
118 | 2.535136008312820977e+01
119 | 2.178782231581693907e+01
120 | 1.884720299482092543e+01
121 | 1.351489117797891737e+01
122 | 3.561958187626196803e+01
123 | 2.309139679354694508e+01
124 | 2.501812774951412521e+01
125 | 1.746493376694795074e+01
126 | 2.073710019167040741e+01
127 | 1.472993742214605994e+01
128 |
--------------------------------------------------------------------------------
/Module-10-Project-Gradient-Descent/Boston-Gradient-Descent/pred_dummy_features_feature_scaling_hetero.csv:
--------------------------------------------------------------------------------
1 | 1.793296065492189584e+01
2 | 2.972736923962101940e+01
3 | 1.824760806105136268e+01
4 | 2.181162016652825386e+01
5 | 2.493337120368668280e+01
6 | 9.831485053058782952e+00
7 | 3.025041048271266675e+01
8 | 2.569532657874631809e+01
9 | 1.771428626948123863e+01
10 | 2.263784648400421418e+01
11 | 2.461631926949340610e+01
12 | 1.568316989317437304e+01
13 | 2.044463362244121285e+01
14 | 1.904754796103014769e+01
15 | 4.662946097077017527e+01
16 | 2.074601655868405103e+01
17 | 2.248042588201683500e+01
18 | 2.627534319139164154e+01
19 | 1.833952674413732709e+01
20 | 2.904893701709565690e+01
21 | 2.031814977068705019e+01
22 | 2.173078077224760563e+01
23 | 3.794742682653443921e+01
24 | 3.605414513937009247e+01
25 | 3.307608786430560599e+01
26 | 2.047420086905876602e+01
27 | 2.151862776139232025e+01
28 | 3.080245625771787132e+01
29 | 1.804545399009446527e+01
30 | 2.862648717098731410e+01
31 | 1.609602899470611348e+01
32 | 2.559083840974027169e+01
33 | 2.146555558701381017e+01
34 | 2.340205863293756394e+01
35 | 8.904839327034462571e+00
36 | 2.747778848614367675e+01
37 | 2.466521079957400886e+01
38 | 2.046360058395926984e+01
39 | 2.184862667671619363e+01
40 | 7.656958230697370027e+00
41 | 1.865942115051663563e+01
42 | 2.591556732220848858e+01
43 | 2.489546271442224068e+01
44 | 2.113495750772421999e+01
45 | 2.017056477741609299e+01
46 | 1.280181323374952207e+01
47 | 4.502360274730050804e+01
48 | 2.232525319811302822e+01
49 | 2.969336621640527696e+01
50 | 1.019869868228261467e+01
51 | 1.681066027502389204e+01
52 | 4.541531734747056248e+01
53 | 1.060265762870361250e+01
54 | 2.192973020606981294e+01
55 | 1.408031566445448846e+01
56 | 2.078249483114009166e+01
57 | 1.701905799972112732e+01
58 | 2.410534844336962479e+01
59 | 1.457102085746392994e+01
60 | 1.235025372554790835e+01
61 | 8.589697182104133688e+00
62 | 2.664175088150626891e+01
63 | 2.484615094538754931e+01
64 | 2.558500240622173294e+01
65 | 1.780263716307546318e+01
66 | 1.107389025898522306e+01
67 | 3.482830513033915310e+01
68 | 1.354441988277153008e+01
69 | 2.214722836537160333e+01
70 | 2.156939468086805434e+01
71 | 2.900084816325804482e+01
72 | 2.568317695037602988e+01
73 | 1.798826061318860781e+01
74 | 8.985231597619485200e+00
75 | 3.624065408974561819e+01
76 | 2.554709924040914970e+01
77 | 2.802037002316114211e+01
78 | 2.551552399187651687e+01
79 | 1.173162939331201926e+01
80 | 3.253557456325271602e+01
81 | 1.963893577263236168e+01
82 | 2.123339393995292568e+01
83 | 1.912599765014932629e+01
84 | 2.202761371088466191e+01
85 | 1.624015575464908778e+01
86 | 3.129930520913883640e+01
87 | 2.527754254920047217e+01
88 | 9.775866700941808318e+00
89 | 2.305340914903528926e+01
90 | 1.961995754823064786e+01
91 | 1.957593068752739640e+01
92 | 1.820733367213101772e+01
93 | 1.451271540674672167e+01
94 | 1.026683435531253608e+01
95 | 1.878931047892961814e+01
96 | 3.155902355985549690e+01
97 | 4.635875123261544672e+01
98 | 2.074770273749853189e+01
99 | 3.550371670686430292e+01
100 | 2.117030683116313128e+01
101 | 2.248181040318086588e+01
102 | 2.020866464796872819e+01
103 | 2.448473516517388404e+01
104 | 3.180605637654783280e+01
105 | 1.443362374433836948e+01
106 | 2.555886585651266074e+01
107 | 1.927091219896461638e+01
108 | 4.332479649146010559e+01
109 | 2.033092621948958723e+01
110 | 1.600534019591087542e+01
111 | 2.393464467668759710e+01
112 | 1.487855676603908250e+01
113 | 1.539252211891232669e+01
114 | 2.448299507807554676e+01
115 | 4.246858133477509512e+01
116 | 1.584618676325195352e+01
117 | 1.848110035081295166e+01
118 | 2.567727558578875247e+01
119 | 1.983650265689013281e+01
120 | 1.610814923557532552e+01
121 | 1.272157821577545000e+01
122 | 3.586659642706226236e+01
123 | 1.946649709534971606e+01
124 | 2.085037846238235559e+01
125 | 2.228586136963819087e+01
126 | 2.226327645684883905e+01
127 | 8.954592245430644581e+00
128 |
--------------------------------------------------------------------------------
/Module-10-Project-Gradient-Descent/Boston-Gradient-Descent/pred_dummy_features_homogeneous_feature_scaling.csv:
--------------------------------------------------------------------------------
1 | 1.296505551332548301e+01
2 | 2.868730943369244102e+01
3 | 2.200628620776147670e+01
4 | 2.375491192561572262e+01
5 | 1.982901138538239749e+01
6 | 1.434921724720587832e+01
7 | 2.838254726956833807e+01
8 | 2.338541289934634193e+01
9 | 1.857652412699012245e+01
10 | 2.349530791869289459e+01
11 | 2.454739821900186314e+01
12 | 1.662286711404628647e+01
13 | 1.815497416211185921e+01
14 | 1.910674124319510980e+01
15 | 4.851941798616616097e+01
16 | 2.180238675296678608e+01
17 | 2.517627906368706192e+01
18 | 2.622025127343627915e+01
19 | 1.714125930943920295e+01
20 | 3.127802992166557061e+01
21 | 2.098493933294078317e+01
22 | 2.396834754050163596e+01
23 | 3.553779881969205690e+01
24 | 3.539733676175471544e+01
25 | 3.356126910357492932e+01
26 | 1.770108062035806995e+01
27 | 2.334424444344335114e+01
28 | 3.203058875197966415e+01
29 | 2.290489261980533087e+01
30 | 3.291913775671554276e+01
31 | 1.558183768079791420e+01
32 | 2.630338181034446521e+01
33 | 2.208435681518334448e+01
34 | 2.317149296190358854e+01
35 | 1.188820108541294474e+01
36 | 2.880787771168904499e+01
37 | 2.512514883657155451e+01
38 | 1.976953355353211350e+01
39 | 2.253294502022776413e+01
40 | 9.367924662860035667e+00
41 | 1.623573324476847191e+01
42 | 2.684133698491978848e+01
43 | 3.064053521265516622e+01
44 | 1.932597578175440844e+01
45 | 1.761552233612694351e+01
46 | 1.282841779616458311e+01
47 | 4.720363072438436092e+01
48 | 2.299798446114363415e+01
49 | 3.189552912744863278e+01
50 | 1.377655293023704530e+01
51 | 1.531271791286087947e+01
52 | 4.195309504609220852e+01
53 | 1.449537306177546725e+01
54 | 1.973413530811514960e+01
55 | 1.446460844855964289e+01
56 | 2.143945281961594063e+01
57 | 1.547911548765081768e+01
58 | 2.264753553025910193e+01
59 | 1.413049295611871692e+01
60 | 1.439077690170075030e+01
61 | 8.840123804959969078e+00
62 | 2.858475813843902102e+01
63 | 2.368456570662595695e+01
64 | 2.553506842018916601e+01
65 | 1.635415513160404188e+01
66 | 1.587341962470327239e+01
67 | 3.450233854808739409e+01
68 | 1.536133878222102922e+01
69 | 2.478313662001163920e+01
70 | 2.157427473179097888e+01
71 | 2.939145003578810389e+01
72 | 2.611706140115007457e+01
73 | 1.544085812038487049e+01
74 | 1.060660443407693165e+01
75 | 3.655397664264864943e+01
76 | 2.364336508516651847e+01
77 | 2.703626253281837677e+01
78 | 2.605630841450481938e+01
79 | 1.372716158175088808e+01
80 | 3.299839183881704940e+01
81 | 1.698433133719756682e+01
82 | 2.227515341374818547e+01
83 | 2.180992315777784896e+01
84 | 1.328666343538793626e+01
85 | 1.495474048915617082e+01
86 | 3.192685540753303997e+01
87 | 2.401010703279650116e+01
88 | 1.258854562084556861e+01
89 | 2.160117305737919580e+01
90 | 1.790055963977787101e+01
91 | 2.213363792005556974e+01
92 | 1.812667303405365971e+01
93 | 1.812958617677806572e+01
94 | 1.373254148190570767e+01
95 | 2.001468287785229805e+01
96 | 2.355933423325439335e+01
97 | 4.289252579179056113e+01
98 | 1.974532702280316698e+01
99 | 3.351655590925726358e+01
100 | 2.576913089642223298e+01
101 | 2.976296753139419593e+01
102 | 1.927703079375406858e+01
103 | 2.280687622677953996e+01
104 | 3.202773499023859927e+01
105 | 1.415730233953283879e+01
106 | 2.582410804154571693e+01
107 | 2.113151783250016180e+01
108 | 3.772948847300476416e+01
109 | 2.372922330320984585e+01
110 | 1.364152331575203192e+01
111 | 2.553354713734636405e+01
112 | 1.782560390478084145e+01
113 | 1.353133602620575715e+01
114 | 2.012658851028220397e+01
115 | 3.911982790417330591e+01
116 | 2.104375089335943372e+01
117 | 1.922065646377743775e+01
118 | 2.355807178809153868e+01
119 | 2.207527971827638780e+01
120 | 1.758831131398036973e+01
121 | 1.307734877033964693e+01
122 | 3.645008412583328550e+01
123 | 2.222703893724196789e+01
124 | 2.342219036951424371e+01
125 | 2.168736492011626638e+01
126 | 2.064041458157748821e+01
127 | 1.342534757633911191e+01
128 |
--------------------------------------------------------------------------------
/Module-10-Project-Gradient-Descent/Boston-Gradient-Descent/pred_dummy_features_homogeneous_feature_scaling_reg.csv:
--------------------------------------------------------------------------------
1 | 1.353705283262795334e+01
2 | 3.009861835569741473e+01
3 | 2.122120625635101732e+01
4 | 2.470149240784737543e+01
5 | 1.997318164584719824e+01
6 | 1.328577726350725463e+01
7 | 2.640462683605017347e+01
8 | 2.376833855646452065e+01
9 | 1.786073298292081546e+01
10 | 2.379866347173193475e+01
11 | 2.593650558915652482e+01
12 | 1.611260979380440972e+01
13 | 2.030670402055707768e+01
14 | 1.848393703923270692e+01
15 | 4.902511657896854302e+01
16 | 2.075773484007858727e+01
17 | 2.320275855222658379e+01
18 | 2.593342127811626341e+01
19 | 1.664382649693629901e+01
20 | 3.141466581309106587e+01
21 | 2.018313898991385713e+01
22 | 2.227153183831144645e+01
23 | 3.584259965474504384e+01
24 | 3.541031826993413034e+01
25 | 3.316063638072574093e+01
26 | 1.761511954680678116e+01
27 | 2.347714010934733153e+01
28 | 3.117020134385914787e+01
29 | 2.309277381108509175e+01
30 | 3.129354796768988223e+01
31 | 1.500887509673160558e+01
32 | 2.444935923593703819e+01
33 | 2.120182666014440187e+01
34 | 2.316324744232773014e+01
35 | 1.282678804264301320e+01
36 | 2.876342729251098618e+01
37 | 2.467058957890708726e+01
38 | 1.949525006351170475e+01
39 | 2.302767301988058790e+01
40 | 9.276281624376920831e+00
41 | 1.787019330987676824e+01
42 | 2.463591089956412716e+01
43 | 3.071919102807571988e+01
44 | 1.911931503903078777e+01
45 | 1.655928703329803042e+01
46 | 1.127619058623255199e+01
47 | 4.580524936728397734e+01
48 | 2.243470546414952693e+01
49 | 3.351302991469928827e+01
50 | 1.331089592885386175e+01
51 | 1.485711750570431988e+01
52 | 4.271827029307907253e+01
53 | 1.429050230129364785e+01
54 | 1.931119515307537426e+01
55 | 1.456195124070756464e+01
56 | 2.246988335513303170e+01
57 | 1.518494347200471495e+01
58 | 2.284417204070231122e+01
59 | 1.293975548547958532e+01
60 | 1.397586537557684849e+01
61 | 8.331812869071598016e+00
62 | 2.867226143208050004e+01
63 | 2.256554635384740948e+01
64 | 2.645664592901470513e+01
65 | 1.647626130578198556e+01
66 | 1.643497183012662077e+01
67 | 3.338266777406531105e+01
68 | 1.436737722602886791e+01
69 | 2.457978841687473093e+01
70 | 2.209144161194421940e+01
71 | 2.841825530982745818e+01
72 | 2.443483041082670582e+01
73 | 1.548800352159843641e+01
74 | 1.100620481440336107e+01
75 | 3.738388322156276189e+01
76 | 2.415917866801714098e+01
77 | 2.615628786002159956e+01
78 | 2.555422037862317097e+01
79 | 1.482467000270995072e+01
80 | 3.296577021556009868e+01
81 | 1.578793868297189640e+01
82 | 2.268895822345911739e+01
83 | 2.237344921177914969e+01
84 | 1.409580921284675803e+01
85 | 1.244636059629821645e+01
86 | 3.511600626876436593e+01
87 | 2.322135113341165180e+01
88 | 1.190390643138277049e+01
89 | 2.138189194745150701e+01
90 | 1.776941775797792644e+01
91 | 2.099114134853333624e+01
92 | 1.720808971578843227e+01
93 | 1.823147546554432097e+01
94 | 1.541610935164052343e+01
95 | 2.007950493951670268e+01
96 | 2.414620802335518945e+01
97 | 4.414127225151712963e+01
98 | 2.025554380084042094e+01
99 | 3.409410042462977941e+01
100 | 2.553190057322588657e+01
101 | 2.871747088203487053e+01
102 | 1.970134597320813441e+01
103 | 2.182369458747007585e+01
104 | 3.303299003574834103e+01
105 | 1.379135147654822546e+01
106 | 2.565306196180092613e+01
107 | 2.115260598644100654e+01
108 | 3.821465349795737865e+01
109 | 2.384284984381939410e+01
110 | 1.218972365613835507e+01
111 | 2.452709631268407975e+01
112 | 1.485002883503165094e+01
113 | 1.299070991867748326e+01
114 | 1.948913651562294902e+01
115 | 4.100507105057705814e+01
116 | 2.075208976822799656e+01
117 | 1.851728035276443052e+01
118 | 2.376732336751467045e+01
119 | 2.187963448031909053e+01
120 | 1.798101738992221144e+01
121 | 1.370593448835135675e+01
122 | 3.675837188064304684e+01
123 | 2.174142687159095289e+01
124 | 2.392213471149220538e+01
125 | 2.009932077780510440e+01
126 | 2.086223472630624087e+01
127 | 1.398800729970849943e+01
128 |
--------------------------------------------------------------------------------
/Module-10-Project-Gradient-Descent/Boston-Gradient-Descent/pred_feature_scaling.csv:
--------------------------------------------------------------------------------
1 | 1.243328344378047312e+01
2 | 2.903367317629623301e+01
3 | 2.237164454596212337e+01
4 | 2.447778655359709532e+01
5 | 2.060166977226341345e+01
6 | 2.725331753901730991e+00
7 | 3.040000409196840536e+01
8 | 2.486120041936419511e+01
9 | 1.865724977312630983e+01
10 | 2.353985837499822154e+01
11 | 2.411396871794023156e+01
12 | 1.771143853810546176e+01
13 | 1.744000298347799927e+01
14 | 2.165356277880125901e+01
15 | 4.231137693701630553e+01
16 | 2.384974492796795076e+01
17 | 2.447573232453114400e+01
18 | 2.753872655480581244e+01
19 | 2.023606694195363431e+01
20 | 3.115155453556772969e+01
21 | 2.378237904562362814e+01
22 | 2.500979442953921250e+01
23 | 3.395768541086538050e+01
24 | 3.643515649267922640e+01
25 | 3.204098328840191101e+01
26 | 1.671322632224065785e+01
27 | 2.347176611115320100e+01
28 | 3.293828008740312896e+01
29 | 2.518070080112763520e+01
30 | 3.371008685256688864e+01
31 | 1.688580202474184944e+01
32 | 2.602760837261646643e+01
33 | 2.327040024835148202e+01
34 | 2.547758967742603531e+01
35 | 1.500946630872053689e+01
36 | 2.958574940030883127e+01
37 | 2.624821245251621704e+01
38 | 2.037245654278343210e+01
39 | 2.443681497631943955e+01
40 | 9.447068938148525774e+00
41 | 8.380966537392783522e+00
42 | 2.901392344752948205e+01
43 | 2.959085403202033149e+01
44 | 1.975756970049048533e+01
45 | 2.037196790444205163e+01
46 | 3.144426250122833721e+00
47 | 3.952420117980864944e+01
48 | 2.571741222210889433e+01
49 | 3.037729627352073436e+01
50 | 1.679453263936903085e+01
51 | 1.789088652364641874e+01
52 | 4.102574532936452556e+01
53 | 1.757238787443839811e+01
54 | 2.089662584120571509e+01
55 | 1.559837696007843277e+01
56 | 2.141394911628820452e+01
57 | 1.845436449711874971e+01
58 | 2.315576390500533677e+01
59 | 1.367245022019544010e+01
60 | 1.723573881963315912e+01
61 | 1.502710364969216172e+01
62 | 2.915131352706562495e+01
63 | 2.517166386636557007e+01
64 | 2.549749375494834780e+01
65 | 1.721186687090570189e+01
66 | 1.742936995444784287e+01
67 | 3.470372762672874956e+01
68 | 1.701340802986351974e+01
69 | 2.710724289172678780e+01
70 | 2.254695733217258891e+01
71 | 2.925336918155507959e+01
72 | 2.711018136278622848e+01
73 | 1.773402191676487760e+01
74 | 5.747047580041208192e+00
75 | 3.687653876478943715e+01
76 | 2.509193904625114868e+01
77 | 3.015053650865833035e+01
78 | 2.724080949260382667e+01
79 | 1.625218439924606884e+01
80 | 3.263542160843072537e+01
81 | 1.927351390429527811e+01
82 | 2.265416762257336103e+01
83 | 2.222935077412652660e+01
84 | 8.550849558165371178e+00
85 | 1.733054361970870261e+01
86 | 2.917591713027006151e+01
87 | 2.720836198150773200e+01
88 | 5.885769320319401032e+00
89 | 2.191465625252541116e+01
90 | 2.011549850099247649e+01
91 | 2.217673962519968001e+01
92 | 2.052700516068410153e+01
93 | 2.085181227345765009e+01
94 | 1.318132049257054916e+01
95 | 1.969404919129292253e+01
96 | 2.598666769509629049e+01
97 | 4.027067741878931884e+01
98 | 1.977706446739054869e+01
99 | 3.370256573484251561e+01
100 | 2.722074149048627234e+01
101 | 2.874517137247048026e+01
102 | 2.211543886647293178e+01
103 | 2.591351959666897997e+01
104 | 3.130678413589818376e+01
105 | 1.715297458588404922e+01
106 | 2.637137845974449490e+01
107 | 2.144656977003788967e+01
108 | 3.673924503599971558e+01
109 | 2.208272915499570033e+01
110 | 1.670906176174669611e+01
111 | 2.759458133222485898e+01
112 | -5.974722065408855087e-02
113 | 1.386188045558680493e+01
114 | 1.628772152283231023e+01
115 | 3.577134424030066384e+01
116 | 2.085214459369679219e+01
117 | 2.077286833233261731e+01
118 | 2.535136054315859155e+01
119 | 2.178782121140940831e+01
120 | 1.884720303392126084e+01
121 | 1.351489079725841336e+01
122 | 3.561958301739088029e+01
123 | 2.309139596918324955e+01
124 | 2.501812898332392265e+01
125 | 1.746493170882561685e+01
126 | 2.073709865389793094e+01
127 | 1.472993727205184555e+01
128 |
--------------------------------------------------------------------------------
/Module-10-Project-Gradient-Descent/Combined-Cycle-Power-Plant/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-10-Project-Gradient-Descent/Combined-Cycle-Power-Plant/.DS_Store
--------------------------------------------------------------------------------
/Module-10-Project-Gradient-Descent/Combined-Cycle-Power-Plant/README.md:
--------------------------------------------------------------------------------
1 |
2 | The dataset contains 9568 data points collected from a Combined Cycle Power Plant over 6 years (2006-2011), when the power plant was set to work with full load. Features consist of hourly average ambient variables Temperature (T), Ambient Pressure (AP), Relative Humidity (RH) and Exhaust Vacuum (V) to predict the net hourly electrical energy output (EP) of the plant.
3 | A combined cycle power plant (CCPP) is composed of gas turbines (GT), steam turbines (ST) and heat recovery steam generators. In a CCPP, the electricity is generated by gas and steam turbines, which are combined in one cycle, and is transferred from one turbine to another. While the Vacuum is colected from and has effect on the Steam Turbine, he other three of the ambient variables effect the GT performance.
4 | For comparability with our baseline studies, and to allow 5x2 fold statistical tests be carried out, we provide the data shuffled five times. For each shuffling 2-fold CV is carried out and the resulting 10 measurements are used for statistical testing.
5 |
6 | Attribute Information:
7 |
8 | Features consist of hourly average ambient variables
9 | - Temperature (T) in the range 1.81°C and 37.11°C,
10 | - Ambient Pressure (AP) in the range 992.89-1033.30 milibar,
11 | - Relative Humidity (RH) in the range 25.56% to 100.16%
12 | - Exhaust Vacuum (V) in the range 25.36-81.56 cm Hg
13 | - Net hourly electrical energy output (EP) 420.26-495.76 MW
14 | The averages are taken from various sensors located around the plant that record the ambient variables every second. The variables are given without normalization.
15 |
--------------------------------------------------------------------------------
/Module-11-Logistic-Regression/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-11-Logistic-Regression/.DS_Store
--------------------------------------------------------------------------------
/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/M1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/M1.png
--------------------------------------------------------------------------------
/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/R1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/R1.png
--------------------------------------------------------------------------------
/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/S1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/S1.png
--------------------------------------------------------------------------------
/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/S2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/S2.png
--------------------------------------------------------------------------------
/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/S3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/S3.png
--------------------------------------------------------------------------------
/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/S4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/S4.png
--------------------------------------------------------------------------------
/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/log.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/log.png
--------------------------------------------------------------------------------
/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/mwalah.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/mwalah.jpeg
--------------------------------------------------------------------------------
/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/one.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/one.png
--------------------------------------------------------------------------------
/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/three.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/three.png
--------------------------------------------------------------------------------
/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/two.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-11-Logistic-Regression/Module-11-Instructor-Notebook/two.png
--------------------------------------------------------------------------------
/Module-11-Logistic-Regression/Module-11-Logistic-Regression-Code.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-11-Logistic-Regression/Module-11-Logistic-Regression-Code.pdf
--------------------------------------------------------------------------------
/Module-11-Logistic-Regression/Module-11-Logistic-Regression-Theory.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-11-Logistic-Regression/Module-11-Logistic-Regression-Theory.pdf
--------------------------------------------------------------------------------
/Module-12-Project-Logistic-Regression/.ipynb_checkpoints/Titanic-Logistic-Regression-Homogeneous-Dummy-Features-Workbook-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/Module-12-Project-Logistic-Regression/Titanic-Test-Data.csv:
--------------------------------------------------------------------------------
1 | Pclass,Name,Sex,Age,SibSp,Parch,Ticket,Fare,Cabin,Embarked
2 | 2,"Davies, Master. John Morgan Jr",male,8.0,1,1,C.A. 33112,36.75,,S
3 | 1,"Leader, Dr. Alice (Farnham)",female,49.0,0,0,17465,25.9292,D17,S
4 | 3,"Kilgannon, Mr. Thomas J",male,,0,0,36865,7.7375,,Q
5 | 2,"Jacobsohn, Mrs. Sidney Samuel (Amy Frances Christy)",female,24.0,2,1,243847,27.0,,S
6 | 1,"McGough, Mr. James Robert",male,36.0,0,0,PC 17473,26.2875,E25,S
7 | 1,"Artagaveytia, Mr. Ramon",male,71.0,0,0,PC 17609,49.5042,,C
8 | 3,"de Pelsmaeker, Mr. Alfons",male,16.0,0,0,345778,9.5,,S
9 | 3,"Scanlan, Mr. James",male,,0,0,36209,7.725,,Q
10 | 3,"Pettersson, Miss. Ellen Natalia",female,18.0,0,0,347087,7.775,,S
11 | 3,"Smiljanic, Mr. Mile",male,,0,0,315037,8.6625,,S
12 | 1,"Andrews, Miss. Kornelia Theodosia",female,63.0,1,0,13502,77.9583,D7,S
13 | 3,"Sdycoff, Mr. Todor",male,,0,0,349222,7.8958,,S
14 | 1,"Hays, Miss. Margaret Bechstein",female,24.0,0,0,11767,83.1583,C54,C
15 | 3,"Heininen, Miss. Wendla Maria",female,23.0,0,0,STON/O2. 3101290,7.925,,S
16 | 1,"Goldschmidt, Mr. George B",male,71.0,0,0,PC 17754,34.6542,A5,C
17 | 2,"Giles, Mr. Frederick Edward",male,21.0,1,0,28134,11.5,,S
18 | 3,"Kalvik, Mr. Johannes Halvorsen",male,21.0,0,0,8475,8.4333,,S
19 | 3,"Johnson, Master. Harold Theodor",male,4.0,1,1,347742,11.1333,,S
20 | 3,"Karlsson, Mr. Nils August",male,22.0,0,0,350060,7.5208,,S
21 | 3,"Hegarty, Miss. Hanora ""Nora""",female,18.0,0,0,365226,6.75,,Q
22 | 3,"Moran, Miss. Bertha",female,,1,0,371110,24.15,,Q
23 | 3,"Dooley, Mr. Patrick",male,32.0,0,0,370376,7.75,,Q
24 | 1,"Bonnell, Miss. Elizabeth",female,58.0,0,0,113783,26.55,C103,S
25 | 2,"Herman, Miss. Alice",female,24.0,1,2,220845,65.0,,S
26 | 3,"Keefe, Mr. Arthur",male,,0,0,323592,7.25,,S
27 | 1,"Williams-Lambert, Mr. Fletcher Fellows",male,,0,0,113510,35.0,C128,S
28 | 2,"Banfield, Mr. Frederick James",male,28.0,0,0,C.A./SOTON 34068,10.5,,S
29 | 1,"Holverson, Mr. Alexander Oskar",male,42.0,1,0,113789,52.0,,S
30 | 3,"Baclini, Miss. Eugenie",female,0.75,2,1,2666,19.2583,,C
31 | 2,"Ridsdale, Miss. Lucy",female,50.0,0,0,W./C. 14258,10.5,,S
32 | 2,"Berriman, Mr. William John",male,23.0,0,0,28425,13.0,,S
33 | 3,"Sage, Mr. Douglas Bullen",male,,8,2,CA. 2343,69.55,,S
34 | 3,"Bengtsson, Mr. John Viktor",male,26.0,0,0,347068,7.775,,S
35 | 2,"Hunt, Mr. George Henry",male,33.0,0,0,SCO/W 1585,12.275,,S
36 | 3,"McNamee, Mr. Neal",male,24.0,1,0,376566,16.1,,S
37 | 1,"Rothschild, Mrs. Martin (Elizabeth L. Barrett)",female,54.0,1,0,PC 17603,59.4,,C
38 | 3,"Hirvonen, Miss. Hildur E",female,2.0,0,1,3101298,12.2875,,S
39 | 2,"Gaskell, Mr. Alfred",male,16.0,0,0,239865,26.0,,S
40 | 3,"Olsson, Mr. Nils Johan Goransson",male,28.0,0,0,347464,7.8542,,S
41 | 1,"Colley, Mr. Edward Pomeroy",male,47.0,0,0,5727,25.5875,E58,S
42 | 3,"Kelly, Miss. Mary",female,,0,0,14312,7.75,,Q
43 | 3,"Vovk, Mr. Janko",male,22.0,0,0,349252,7.8958,,S
44 | 2,"Pain, Dr. Alfred",male,23.0,0,0,244278,10.5,,S
45 | 3,"Carlsson, Mr. August Sigfrid",male,28.0,0,0,350042,7.7958,,S
46 | 3,"Dahlberg, Miss. Gerda Ulrika",female,22.0,0,0,7552,10.5167,,S
47 | 1,"Dodge, Master. Washington",male,4.0,0,2,33638,81.8583,A34,S
48 | 3,"Johnston, Mr. Andrew G",male,,1,2,W./C. 6607,23.45,,S
49 | 3,"O'Dwyer, Miss. Ellen ""Nellie""",female,,0,0,330959,7.8792,,Q
50 | 2,"Nasser, Mr. Nicholas",male,32.5,1,0,237736,30.0708,,C
51 | 3,"Hanna, Mr. Mansour",male,23.5,0,0,2693,7.2292,,C
52 | 3,"Daly, Mr. Eugene Patrick",male,29.0,0,0,382651,7.75,,Q
53 | 3,"Devaney, Miss. Margaret Delia",female,19.0,0,0,330958,7.8792,,Q
54 | 2,"Ball, Mrs. (Ada E Hall)",female,36.0,0,0,28551,13.0,D,S
55 | 2,"Laroche, Miss. Simonne Marie Anne Andree",female,3.0,1,2,SC/Paris 2123,41.5792,,C
56 | 3,"Coleff, Mr. Peju",male,36.0,0,0,349210,7.4958,,S
57 | 3,"Goldsmith, Mr. Frank John",male,33.0,1,1,363291,20.525,,S
58 | 3,"Thomas, Master. Assad Alexander",male,0.42,0,1,2625,8.5167,,C
59 | 3,"Morley, Mr. William",male,34.0,0,0,364506,8.05,,S
60 | 2,"Chapman, Mr. Charles Henry",male,52.0,0,0,248731,13.5,,S
61 | 3,"Badt, Mr. Mohamed",male,40.0,0,0,2623,7.225,,C
62 | 3,"Calic, Mr. Jovo",male,17.0,0,0,315093,8.6625,,S
63 | 1,"Simonius-Blumer, Col. Oberst Alfons",male,56.0,0,0,13213,35.5,A26,C
64 | 1,"Wick, Mrs. George Dennick (Mary Hitchcock)",female,45.0,1,1,36928,164.8667,,S
65 | 3,"O'Sullivan, Miss. Bridget Mary",female,,0,0,330909,7.6292,,Q
66 | 2,"Nye, Mrs. (Elizabeth Ramell)",female,29.0,0,0,C.A. 29395,10.5,F33,S
67 | 3,"van Billiard, Mr. Austin Blyler",male,40.5,0,2,A/5. 851,14.5,,S
68 | 3,"Farrell, Mr. James",male,40.5,0,0,367232,7.75,,Q
69 | 1,"Brewe, Dr. Arthur Jackson",male,,0,0,112379,39.6,,C
70 | 1,"Bowerman, Miss. Elsie Edith",female,22.0,0,1,113505,55.0,E33,S
71 | 1,"Minahan, Dr. William Edward",male,44.0,2,0,19928,90.0,C78,Q
72 | 3,"Coelho, Mr. Domingos Fernandeo",male,20.0,0,0,SOTON/O.Q. 3101307,7.05,,S
73 | 3,"Zimmerman, Mr. Leo",male,29.0,0,0,315082,7.875,,S
74 | 1,"Clifford, Mr. George Quincy",male,,0,0,110465,52.0,A14,S
75 | 3,"Larsson, Mr. August Viktor",male,29.0,0,0,7545,9.4833,,S
76 | 3,"Gronnestad, Mr. Daniel Danielsen",male,32.0,0,0,8471,8.3625,,S
77 | 1,"Baxter, Mrs. James (Helene DeLaudeniere Chaput)",female,50.0,0,1,PC 17558,247.5208,B58 B60,C
78 | 3,"Baclini, Miss. Marie Catherine",female,5.0,2,1,2666,19.2583,,C
79 | 2,"Mallet, Mr. Albert",male,31.0,1,1,S.C./PARIS 2079,37.0042,,C
80 | 1,"Chaffee, Mr. Herbert Fuller",male,46.0,1,0,W.E.P. 5734,61.175,E31,S
81 | 3,"Green, Mr. George Henry",male,51.0,0,0,21440,8.05,,S
82 | 1,"Smart, Mr. John Montgomery",male,56.0,0,0,113792,26.55,,S
83 | 1,"Cardeza, Mr. Thomas Drake Martinez",male,36.0,0,1,PC 17755,512.3292,B51 B53 B55,C
84 | 3,"Gustafsson, Mr. Anders Vilhelm",male,37.0,2,0,3101276,7.925,,S
85 | 1,"Sutton, Mr. Frederick",male,61.0,0,0,36963,32.3208,D50,S
86 | 3,"Olsen, Mr. Henry Margido",male,28.0,0,0,C 4001,22.525,,S
87 | 1,"Baxter, Mr. Quigg Edmond",male,24.0,0,1,PC 17558,247.5208,B58 B60,C
88 | 1,"White, Mr. Richard Frasar",male,21.0,0,1,35281,77.2875,D26,S
89 | 3,"Palsson, Miss. Torborg Danira",female,8.0,3,1,349909,21.075,,S
90 | 3,"Johansson, Mr. Erik",male,22.0,0,0,350052,7.7958,,S
91 | 3,"Laleff, Mr. Kristo",male,,0,0,349217,7.8958,,S
92 | 3,"Harknett, Miss. Alice Phoebe",female,,0,0,W./C. 6609,7.55,,S
93 | 3,"Meo, Mr. Alfonzo",male,55.5,0,0,A.5. 11206,8.05,,S
94 | 3,"Stranden, Mr. Juho",male,31.0,0,0,STON/O 2. 3101288,7.925,,S
95 | 3,"Lindahl, Miss. Agda Thorilda Viktoria",female,25.0,0,0,347071,7.775,,S
96 | 3,"Sadlier, Mr. Matthew",male,,0,0,367655,7.7292,,Q
97 | 1,"Stead, Mr. William Thomas",male,62.0,0,0,113514,26.55,C87,S
98 | 3,"Ekstrom, Mr. Johan",male,45.0,0,0,347061,6.975,,S
99 | 1,"Davidson, Mr. Thornton",male,31.0,1,0,F.C. 12750,52.0,B71,S
100 | 3,"Skoog, Master. Karl Thorsten",male,10.0,3,2,347088,27.9,,S
101 | 1,"Allison, Miss. Helen Loraine",female,2.0,1,2,113781,151.55,C22 C26,S
102 | 3,"Henry, Miss. Delia",female,,0,0,382649,7.75,,Q
103 | 3,"van Melkebeke, Mr. Philemon",male,,0,0,345777,9.5,,S
104 | 1,"Bissette, Miss. Amelia",female,35.0,0,0,PC 17760,135.6333,C99,S
105 | 3,"Goodwin, Master. William Frederick",male,11.0,5,2,CA 2144,46.9,,S
106 | 2,"Phillips, Miss. Kate Florence (""Mrs Kate Louise Phillips Marshall"")",female,19.0,0,0,250655,26.0,,S
107 | 1,"Ryerson, Miss. Emily Borie",female,18.0,2,2,PC 17608,262.375,B57 B59 B63 B66,C
108 | 2,"Carter, Rev. Ernest Courtenay",male,54.0,1,0,244252,26.0,,S
109 | 2,"Kirkland, Rev. Charles Leonard",male,57.0,0,0,219533,12.35,,Q
110 | 3,"Sage, Mr. George John Jr",male,,8,2,CA. 2343,69.55,,S
111 | 3,"Barton, Mr. David John",male,22.0,0,0,324669,8.05,,S
112 | 3,"Myhrman, Mr. Pehr Fabian Oliver Malkolm",male,18.0,0,0,347078,7.75,,S
113 | 1,"Carter, Mrs. William Ernest (Lucile Polk)",female,36.0,1,2,113760,120.0,B96 B98,S
114 | 2,"Quick, Mrs. Frederick Charles (Jane Richards)",female,33.0,0,2,26360,26.0,,S
115 | 1,"Harrison, Mr. William",male,40.0,0,0,112059,0.0,B94,S
116 | 3,"Lobb, Mrs. William Arthur (Cordelia K Stanlick)",female,26.0,1,0,A/5. 3336,16.1,,S
117 | 2,"Collyer, Mr. Harvey",male,31.0,1,1,C.A. 31921,26.25,,S
118 | 2,"Hodges, Mr. Henry Price",male,50.0,0,0,250643,13.0,,S
119 | 3,"O'Driscoll, Miss. Bridget",female,,0,0,14311,7.75,,Q
120 | 3,"Lindell, Mr. Edvard Bengtsson",male,36.0,1,0,349910,15.55,,S
121 | 3,"Plotcharsky, Mr. Vasil",male,,0,0,349227,7.8958,,S
122 | 3,"Braund, Mr. Owen Harris",male,22.0,1,0,A/5 21171,7.25,,S
123 | 3,"Staneff, Mr. Ivan",male,,0,0,349208,7.8958,,S
124 | 3,"Johnson, Mr. Malkolm Joackim",male,33.0,0,0,347062,7.775,,S
125 | 3,"Odahl, Mr. Nils Martin",male,23.0,0,0,7267,9.225,,S
126 | 1,"Hoyt, Mr. William Fisher",male,,0,0,PC 17600,30.6958,,C
127 | 3,"Rice, Master. Arthur",male,4.0,4,1,382652,29.125,,Q
128 | 3,"Rice, Master. Eric",male,7.0,4,1,382652,29.125,,Q
129 | 1,"Wright, Mr. George",male,62.0,0,0,113807,26.55,,S
130 | 3,"Goodwin, Mr. Charles Edward",male,14.0,5,2,CA 2144,46.9,,S
131 | 1,"Woolner, Mr. Hugh",male,,0,0,19947,35.5,C52,S
132 | 3,"Cook, Mr. Jacob",male,43.0,0,0,A/5 3536,8.05,,S
133 | 3,"Oreskovic, Mr. Luka",male,20.0,0,0,315094,8.6625,,S
134 | 3,"Elias, Mr. Joseph Jr",male,17.0,1,1,2690,7.2292,,C
135 | 3,"Johnson, Mr. William Cahoone Jr",male,19.0,0,0,LINE,0.0,,S
136 | 2,"Fynney, Mr. Joseph J",male,35.0,0,0,239865,26.0,,S
137 | 3,"Olsen, Mr. Ole Martin",male,,0,0,Fa 265302,7.3125,,S
138 | 3,"Murphy, Miss. Margaret Jane",female,,1,0,367230,15.5,,Q
139 | 1,"Hawksford, Mr. Walter James",male,,0,0,16988,30.0,D45,S
140 | 3,"Sjoblom, Miss. Anna Sofia",female,18.0,0,0,3101265,7.4958,,S
141 | 1,"Smith, Mr. James Clinch",male,56.0,0,0,17764,30.6958,A7,C
142 | 3,"Boulos, Mr. Hanna",male,,0,0,2664,7.225,,C
143 | 3,"Alexander, Mr. William",male,26.0,0,0,3474,7.8875,,S
144 | 1,"Chambers, Mr. Norman Campbell",male,27.0,1,0,113806,53.1,E8,S
145 | 3,"Wiklund, Mr. Jakob Alfred",male,18.0,1,0,3101267,6.4958,,S
146 | 3,"Charters, Mr. David",male,21.0,0,0,A/5. 13032,7.7333,,Q
147 | 3,"Lievens, Mr. Rene Aime",male,24.0,0,0,345781,9.5,,S
148 | 3,"Beavan, Mr. William Thomas",male,19.0,0,0,323951,8.05,,S
149 | 2,"Slemen, Mr. Richard James",male,35.0,0,0,28206,10.5,,S
150 | 3,"McDermott, Miss. Brigdet Delia",female,,0,0,330932,7.7875,,Q
151 | 1,"Potter, Mrs. Thomas Jr (Lily Alexenia Wilson)",female,56.0,0,1,11767,83.1583,C50,C
152 | 1,"Ward, Miss. Anna",female,35.0,0,0,PC 17755,512.3292,,C
153 | 2,"Pengelly, Mr. Frederick William",male,19.0,0,0,28665,10.5,,S
154 | 3,"Andersson, Miss. Ingeborg Constanzia",female,9.0,4,2,347082,31.275,,S
155 | 3,"Svensson, Mr. Olof",male,24.0,0,0,350035,7.7958,,S
156 | 3,"Emanuel, Miss. Virginia Ethel",female,5.0,0,0,364516,12.475,,S
157 | 3,"Novel, Mr. Mansouer",male,28.5,0,0,2697,7.2292,,C
158 | 1,"Taylor, Mr. Elmer Zebley",male,48.0,1,0,19996,52.0,C126,S
159 | 2,"Funk, Miss. Annie Clemmer",female,38.0,0,0,237671,13.0,,S
160 | 3,"Crease, Mr. Ernest James",male,19.0,0,0,S.P. 3464,8.1583,,S
161 | 3,"Dean, Master. Bertram Vere",male,1.0,1,2,C.A. 2315,20.575,,S
162 | 3,"Maisner, Mr. Simon",male,,0,0,A/S 2816,8.05,,S
163 | 2,"Doling, Miss. Elsie",female,18.0,0,1,231919,23.0,,S
164 | 2,"Parrish, Mrs. (Lutie Davis)",female,50.0,0,1,230433,26.0,,S
165 | 3,"Kelly, Miss. Anna Katherine ""Annie Kate""",female,,0,0,9234,7.75,,Q
166 | 1,"Graham, Miss. Margaret Edith",female,19.0,0,0,112053,30.0,B42,S
167 | 3,"Hansen, Mr. Henry Damsgaard",male,21.0,0,0,350029,7.8542,,S
168 | 1,"Harrington, Mr. Charles H",male,,0,0,113796,42.4,,S
169 | 3,"Moss, Mr. Albert Johan",male,,0,0,312991,7.775,,S
170 | 2,"Carter, Mrs. Ernest Courtenay (Lilian Hughes)",female,44.0,1,0,244252,26.0,,S
171 | 3,"Bourke, Miss. Mary",female,,0,2,364848,7.75,,Q
172 | 2,"Trout, Mrs. William H (Jessie L)",female,28.0,0,0,240929,12.65,,S
173 | 3,"Vander Planke, Miss. Augusta Maria",female,18.0,2,0,345764,18.0,,S
174 | 3,"Honkanen, Miss. Eliina",female,27.0,0,0,STON/O2. 3101283,7.925,,S
175 | 2,"Richard, Mr. Emile",male,23.0,0,0,SC/PARIS 2133,15.0458,,C
176 | 1,"Fortune, Miss. Alice Elizabeth",female,24.0,3,2,19950,263.0,C23 C25 C27,S
177 | 3,"Maenpaa, Mr. Matti Alexanteri",male,22.0,0,0,STON/O 2. 3101275,7.125,,S
178 | 3,"Hansen, Mr. Henrik Juul",male,26.0,1,0,350025,7.8542,,S
179 | 3,"Culumovic, Mr. Jeso",male,17.0,0,0,315090,8.6625,,S
180 | 3,"O'Brien, Mr. Thomas",male,,1,0,370365,15.5,,Q
181 | 2,"Garside, Miss. Ethel",female,34.0,0,0,243880,13.0,,S
182 | 3,"Andersson, Mr. August Edvard (""Wennerstrom"")",male,27.0,0,0,350043,7.7958,,S
183 | 3,"Elias, Mr. Dibo",male,,0,0,2674,7.225,,C
184 | 3,"Goodwin, Master. Harold Victor",male,9.0,5,2,CA 2144,46.9,,S
185 | 3,"Zabour, Miss. Thamine",female,,1,0,2665,14.4542,,C
186 | 3,"Yasbeck, Mrs. Antoni (Selini Alexander)",female,15.0,1,0,2659,14.4542,,C
187 | 3,"Nirva, Mr. Iisakki Antino Aijo",male,41.0,0,0,SOTON/O2 3101272,7.125,,S
188 | 3,"Osman, Mrs. Mara",female,31.0,0,0,349244,8.6833,,S
189 | 1,"Frolicher-Stehli, Mr. Maxmillian",male,60.0,1,1,13567,79.2,B41,C
190 | 1,"Perreault, Miss. Anne",female,30.0,0,0,12749,93.5,B73,S
191 | 3,"Tikkanen, Mr. Juho",male,32.0,0,0,STON/O 2. 3101293,7.925,,S
192 | 3,"Gustafsson, Mr. Alfred Ossian",male,20.0,0,0,7534,9.8458,,S
193 | 3,"Abbott, Mr. Rossmore Edward",male,16.0,1,1,C.A. 2673,20.25,,S
194 | 1,"Warren, Mrs. Frank Manley (Anna Sophia Atkinson)",female,60.0,1,0,110813,75.25,D37,C
195 | 3,"Moutal, Mr. Rahamin Haim",male,,0,0,374746,8.05,,S
196 | 3,"Rekic, Mr. Tido",male,38.0,0,0,349249,7.8958,,S
197 | 3,"Heikkinen, Miss. Laina",female,26.0,0,0,STON/O2. 3101282,7.925,,S
198 | 1,"Hippach, Miss. Jean Gertrude",female,16.0,0,1,111361,57.9792,B18,C
199 | 3,"Laitinen, Miss. Kristina Sofia",female,37.0,0,0,4135,9.5875,,S
200 | 2,"Clarke, Mrs. Charles V (Ada Maria Winfield)",female,28.0,1,0,2003,26.0,,S
201 | 1,"Rothes, the Countess. of (Lucy Noel Martha Dyer-Edwards)",female,33.0,0,0,110152,86.5,B77,S
202 | 3,"Tobin, Mr. Roger",male,,0,0,383121,7.75,F38,Q
203 | 1,"Stahelin-Maeglin, Dr. Max",male,32.0,0,0,13214,30.5,B50,C
204 | 3,"Perkin, Mr. John Henry",male,22.0,0,0,A/5 21174,7.25,,S
205 | 1,"Gee, Mr. Arthur H",male,47.0,0,0,111320,38.5,E63,S
206 | 3,"McMahon, Mr. Martin",male,,0,0,370372,7.75,,Q
207 | 3,"Hagland, Mr. Konrad Mathias Reiersen",male,,1,0,65304,19.9667,,S
208 | 3,"Chip, Mr. Chang",male,32.0,0,0,1601,56.4958,,S
209 | 3,"Bostandyeff, Mr. Guentcho",male,26.0,0,0,349224,7.8958,,S
210 | 1,"Stone, Mrs. George Nelson (Martha Evelyn)",female,62.0,0,0,113572,80.0,B28,
211 | 3,"Skoog, Miss. Mabel",female,9.0,3,2,347088,27.9,,S
212 | 3,"Samaan, Mr. Youssef",male,,2,0,2662,21.6792,,C
213 | 3,"Ohman, Miss. Velin",female,22.0,0,0,347085,7.775,,S
214 | 2,"Sobey, Mr. Samuel James Hayden",male,25.0,0,0,C.A. 29178,13.0,,S
215 | 1,"Molson, Mr. Harry Markland",male,55.0,0,0,113787,30.5,C30,S
216 | 3,"Vestrom, Miss. Hulda Amanda Adolfina",female,14.0,0,0,350406,7.8542,,S
217 | 2,"Keane, Miss. Nora A",female,,0,0,226593,12.35,E101,Q
218 | 3,"Pasic, Mr. Jakob",male,21.0,0,0,315097,8.6625,,S
219 | 3,"Moore, Mr. Leonard Charles",male,,0,0,A4. 54510,8.05,,S
220 | 3,"Lindqvist, Mr. Eino William",male,20.0,1,0,STON/O 2. 3101285,7.925,,S
221 | 1,"Butt, Major. Archibald Willingham",male,45.0,0,0,113050,26.55,B38,S
222 | 1,"Penasco y Castellana, Mrs. Victor de Satode (Maria Josefa Perez de Soto y Vallejo)",female,17.0,1,0,PC 17758,108.9,C65,C
223 | 3,"Holm, Mr. John Fredrik Alexander",male,43.0,0,0,C 7075,6.45,,S
224 | 2,"Navratil, Mr. Michel (""Louis M Hoffman"")",male,36.5,0,2,230080,26.0,F2,S
225 |
--------------------------------------------------------------------------------
/Module-12-Project-Logistic-Regression/pred.csv:
--------------------------------------------------------------------------------
1 | 0.000000000000000000e+00
2 | 1.000000000000000000e+00
3 | 0.000000000000000000e+00
4 | 1.000000000000000000e+00
5 | 1.000000000000000000e+00
6 | 0.000000000000000000e+00
7 | 0.000000000000000000e+00
8 | 0.000000000000000000e+00
9 | 1.000000000000000000e+00
10 | 0.000000000000000000e+00
11 | 1.000000000000000000e+00
12 | 0.000000000000000000e+00
13 | 1.000000000000000000e+00
14 | 1.000000000000000000e+00
15 | 0.000000000000000000e+00
16 | 0.000000000000000000e+00
17 | 0.000000000000000000e+00
18 | 0.000000000000000000e+00
19 | 0.000000000000000000e+00
20 | 1.000000000000000000e+00
21 | 1.000000000000000000e+00
22 | 0.000000000000000000e+00
23 | 1.000000000000000000e+00
24 | 1.000000000000000000e+00
25 | 0.000000000000000000e+00
26 | 1.000000000000000000e+00
27 | 0.000000000000000000e+00
28 | 0.000000000000000000e+00
29 | 1.000000000000000000e+00
30 | 1.000000000000000000e+00
31 | 0.000000000000000000e+00
32 | 0.000000000000000000e+00
33 | 0.000000000000000000e+00
34 | 0.000000000000000000e+00
35 | 0.000000000000000000e+00
36 | 1.000000000000000000e+00
37 | 1.000000000000000000e+00
38 | 0.000000000000000000e+00
39 | 0.000000000000000000e+00
40 | 0.000000000000000000e+00
41 | 1.000000000000000000e+00
42 | 0.000000000000000000e+00
43 | 0.000000000000000000e+00
44 | 0.000000000000000000e+00
45 | 1.000000000000000000e+00
46 | 1.000000000000000000e+00
47 | 0.000000000000000000e+00
48 | 1.000000000000000000e+00
49 | 0.000000000000000000e+00
50 | 0.000000000000000000e+00
51 | 0.000000000000000000e+00
52 | 1.000000000000000000e+00
53 | 1.000000000000000000e+00
54 | 1.000000000000000000e+00
55 | 0.000000000000000000e+00
56 | 0.000000000000000000e+00
57 | 0.000000000000000000e+00
58 | 0.000000000000000000e+00
59 | 0.000000000000000000e+00
60 | 0.000000000000000000e+00
61 | 0.000000000000000000e+00
62 | 0.000000000000000000e+00
63 | 1.000000000000000000e+00
64 | 1.000000000000000000e+00
65 | 1.000000000000000000e+00
66 | 0.000000000000000000e+00
67 | 0.000000000000000000e+00
68 | 1.000000000000000000e+00
69 | 1.000000000000000000e+00
70 | 0.000000000000000000e+00
71 | 0.000000000000000000e+00
72 | 0.000000000000000000e+00
73 | 1.000000000000000000e+00
74 | 0.000000000000000000e+00
75 | 0.000000000000000000e+00
76 | 1.000000000000000000e+00
77 | 1.000000000000000000e+00
78 | 0.000000000000000000e+00
79 | 0.000000000000000000e+00
80 | 0.000000000000000000e+00
81 | 0.000000000000000000e+00
82 | 1.000000000000000000e+00
83 | 0.000000000000000000e+00
84 | 0.000000000000000000e+00
85 | 0.000000000000000000e+00
86 | 1.000000000000000000e+00
87 | 1.000000000000000000e+00
88 | 1.000000000000000000e+00
89 | 0.000000000000000000e+00
90 | 0.000000000000000000e+00
91 | 1.000000000000000000e+00
92 | 0.000000000000000000e+00
93 | 0.000000000000000000e+00
94 | 1.000000000000000000e+00
95 | 0.000000000000000000e+00
96 | 0.000000000000000000e+00
97 | 0.000000000000000000e+00
98 | 0.000000000000000000e+00
99 | 0.000000000000000000e+00
100 | 1.000000000000000000e+00
101 | 1.000000000000000000e+00
102 | 0.000000000000000000e+00
103 | 1.000000000000000000e+00
104 | 0.000000000000000000e+00
105 | 1.000000000000000000e+00
106 | 1.000000000000000000e+00
107 | 0.000000000000000000e+00
108 | 0.000000000000000000e+00
109 | 0.000000000000000000e+00
110 | 0.000000000000000000e+00
111 | 0.000000000000000000e+00
112 | 1.000000000000000000e+00
113 | 1.000000000000000000e+00
114 | 0.000000000000000000e+00
115 | 1.000000000000000000e+00
116 | 0.000000000000000000e+00
117 | 0.000000000000000000e+00
118 | 1.000000000000000000e+00
119 | 0.000000000000000000e+00
120 | 0.000000000000000000e+00
121 | 0.000000000000000000e+00
122 | 0.000000000000000000e+00
123 | 0.000000000000000000e+00
124 | 0.000000000000000000e+00
125 | 1.000000000000000000e+00
126 | 0.000000000000000000e+00
127 | 0.000000000000000000e+00
128 | 0.000000000000000000e+00
129 | 0.000000000000000000e+00
130 | 1.000000000000000000e+00
131 | 0.000000000000000000e+00
132 | 0.000000000000000000e+00
133 | 0.000000000000000000e+00
134 | 0.000000000000000000e+00
135 | 0.000000000000000000e+00
136 | 0.000000000000000000e+00
137 | 1.000000000000000000e+00
138 | 1.000000000000000000e+00
139 | 1.000000000000000000e+00
140 | 0.000000000000000000e+00
141 | 0.000000000000000000e+00
142 | 0.000000000000000000e+00
143 | 1.000000000000000000e+00
144 | 0.000000000000000000e+00
145 | 0.000000000000000000e+00
146 | 0.000000000000000000e+00
147 | 0.000000000000000000e+00
148 | 0.000000000000000000e+00
149 | 1.000000000000000000e+00
150 | 1.000000000000000000e+00
151 | 1.000000000000000000e+00
152 | 0.000000000000000000e+00
153 | 0.000000000000000000e+00
154 | 0.000000000000000000e+00
155 | 1.000000000000000000e+00
156 | 0.000000000000000000e+00
157 | 0.000000000000000000e+00
158 | 1.000000000000000000e+00
159 | 0.000000000000000000e+00
160 | 0.000000000000000000e+00
161 | 0.000000000000000000e+00
162 | 1.000000000000000000e+00
163 | 1.000000000000000000e+00
164 | 1.000000000000000000e+00
165 | 1.000000000000000000e+00
166 | 0.000000000000000000e+00
167 | 0.000000000000000000e+00
168 | 0.000000000000000000e+00
169 | 1.000000000000000000e+00
170 | 1.000000000000000000e+00
171 | 1.000000000000000000e+00
172 | 1.000000000000000000e+00
173 | 1.000000000000000000e+00
174 | 0.000000000000000000e+00
175 | 1.000000000000000000e+00
176 | 0.000000000000000000e+00
177 | 0.000000000000000000e+00
178 | 0.000000000000000000e+00
179 | 0.000000000000000000e+00
180 | 1.000000000000000000e+00
181 | 0.000000000000000000e+00
182 | 0.000000000000000000e+00
183 | 0.000000000000000000e+00
184 | 1.000000000000000000e+00
185 | 1.000000000000000000e+00
186 | 0.000000000000000000e+00
187 | 1.000000000000000000e+00
188 | 0.000000000000000000e+00
189 | 1.000000000000000000e+00
190 | 0.000000000000000000e+00
191 | 0.000000000000000000e+00
192 | 0.000000000000000000e+00
193 | 1.000000000000000000e+00
194 | 0.000000000000000000e+00
195 | 0.000000000000000000e+00
196 | 1.000000000000000000e+00
197 | 1.000000000000000000e+00
198 | 1.000000000000000000e+00
199 | 1.000000000000000000e+00
200 | 1.000000000000000000e+00
201 | 0.000000000000000000e+00
202 | 1.000000000000000000e+00
203 | 0.000000000000000000e+00
204 | 0.000000000000000000e+00
205 | 0.000000000000000000e+00
206 | 0.000000000000000000e+00
207 | 0.000000000000000000e+00
208 | 0.000000000000000000e+00
209 | 1.000000000000000000e+00
210 | 1.000000000000000000e+00
211 | 0.000000000000000000e+00
212 | 1.000000000000000000e+00
213 | 0.000000000000000000e+00
214 | 0.000000000000000000e+00
215 | 1.000000000000000000e+00
216 | 1.000000000000000000e+00
217 | 0.000000000000000000e+00
218 | 0.000000000000000000e+00
219 | 0.000000000000000000e+00
220 | 0.000000000000000000e+00
221 | 1.000000000000000000e+00
222 | 0.000000000000000000e+00
223 | 0.000000000000000000e+00
224 |
--------------------------------------------------------------------------------
/Module-12-Project-Logistic-Regression/pred_homogeneous_dummy.csv:
--------------------------------------------------------------------------------
1 | 0.000000000000000000e+00
2 | 1.000000000000000000e+00
3 | 0.000000000000000000e+00
4 | 1.000000000000000000e+00
5 | 0.000000000000000000e+00
6 | 0.000000000000000000e+00
7 | 0.000000000000000000e+00
8 | 0.000000000000000000e+00
9 | 1.000000000000000000e+00
10 | 0.000000000000000000e+00
11 | 1.000000000000000000e+00
12 | 0.000000000000000000e+00
13 | 1.000000000000000000e+00
14 | 1.000000000000000000e+00
15 | 1.000000000000000000e+00
16 | 0.000000000000000000e+00
17 | 0.000000000000000000e+00
18 | 0.000000000000000000e+00
19 | 0.000000000000000000e+00
20 | 1.000000000000000000e+00
21 | 1.000000000000000000e+00
22 | 0.000000000000000000e+00
23 | 1.000000000000000000e+00
24 | 1.000000000000000000e+00
25 | 0.000000000000000000e+00
26 | 1.000000000000000000e+00
27 | 0.000000000000000000e+00
28 | 0.000000000000000000e+00
29 | 1.000000000000000000e+00
30 | 1.000000000000000000e+00
31 | 0.000000000000000000e+00
32 | 0.000000000000000000e+00
33 | 0.000000000000000000e+00
34 | 0.000000000000000000e+00
35 | 0.000000000000000000e+00
36 | 1.000000000000000000e+00
37 | 1.000000000000000000e+00
38 | 0.000000000000000000e+00
39 | 0.000000000000000000e+00
40 | 0.000000000000000000e+00
41 | 1.000000000000000000e+00
42 | 0.000000000000000000e+00
43 | 0.000000000000000000e+00
44 | 0.000000000000000000e+00
45 | 1.000000000000000000e+00
46 | 1.000000000000000000e+00
47 | 0.000000000000000000e+00
48 | 1.000000000000000000e+00
49 | 0.000000000000000000e+00
50 | 0.000000000000000000e+00
51 | 0.000000000000000000e+00
52 | 1.000000000000000000e+00
53 | 1.000000000000000000e+00
54 | 1.000000000000000000e+00
55 | 0.000000000000000000e+00
56 | 0.000000000000000000e+00
57 | 0.000000000000000000e+00
58 | 0.000000000000000000e+00
59 | 0.000000000000000000e+00
60 | 0.000000000000000000e+00
61 | 0.000000000000000000e+00
62 | 0.000000000000000000e+00
63 | 1.000000000000000000e+00
64 | 1.000000000000000000e+00
65 | 1.000000000000000000e+00
66 | 0.000000000000000000e+00
67 | 0.000000000000000000e+00
68 | 0.000000000000000000e+00
69 | 1.000000000000000000e+00
70 | 0.000000000000000000e+00
71 | 0.000000000000000000e+00
72 | 0.000000000000000000e+00
73 | 1.000000000000000000e+00
74 | 0.000000000000000000e+00
75 | 0.000000000000000000e+00
76 | 1.000000000000000000e+00
77 | 1.000000000000000000e+00
78 | 0.000000000000000000e+00
79 | 0.000000000000000000e+00
80 | 0.000000000000000000e+00
81 | 0.000000000000000000e+00
82 | 1.000000000000000000e+00
83 | 0.000000000000000000e+00
84 | 0.000000000000000000e+00
85 | 0.000000000000000000e+00
86 | 1.000000000000000000e+00
87 | 1.000000000000000000e+00
88 | 1.000000000000000000e+00
89 | 0.000000000000000000e+00
90 | 0.000000000000000000e+00
91 | 1.000000000000000000e+00
92 | 0.000000000000000000e+00
93 | 0.000000000000000000e+00
94 | 1.000000000000000000e+00
95 | 0.000000000000000000e+00
96 | 0.000000000000000000e+00
97 | 0.000000000000000000e+00
98 | 0.000000000000000000e+00
99 | 0.000000000000000000e+00
100 | 1.000000000000000000e+00
101 | 1.000000000000000000e+00
102 | 0.000000000000000000e+00
103 | 1.000000000000000000e+00
104 | 0.000000000000000000e+00
105 | 1.000000000000000000e+00
106 | 1.000000000000000000e+00
107 | 0.000000000000000000e+00
108 | 0.000000000000000000e+00
109 | 0.000000000000000000e+00
110 | 0.000000000000000000e+00
111 | 0.000000000000000000e+00
112 | 1.000000000000000000e+00
113 | 1.000000000000000000e+00
114 | 0.000000000000000000e+00
115 | 1.000000000000000000e+00
116 | 0.000000000000000000e+00
117 | 0.000000000000000000e+00
118 | 1.000000000000000000e+00
119 | 0.000000000000000000e+00
120 | 0.000000000000000000e+00
121 | 0.000000000000000000e+00
122 | 0.000000000000000000e+00
123 | 0.000000000000000000e+00
124 | 0.000000000000000000e+00
125 | 0.000000000000000000e+00
126 | 0.000000000000000000e+00
127 | 0.000000000000000000e+00
128 | 0.000000000000000000e+00
129 | 0.000000000000000000e+00
130 | 1.000000000000000000e+00
131 | 0.000000000000000000e+00
132 | 0.000000000000000000e+00
133 | 0.000000000000000000e+00
134 | 0.000000000000000000e+00
135 | 0.000000000000000000e+00
136 | 0.000000000000000000e+00
137 | 1.000000000000000000e+00
138 | 1.000000000000000000e+00
139 | 1.000000000000000000e+00
140 | 0.000000000000000000e+00
141 | 0.000000000000000000e+00
142 | 0.000000000000000000e+00
143 | 0.000000000000000000e+00
144 | 0.000000000000000000e+00
145 | 0.000000000000000000e+00
146 | 0.000000000000000000e+00
147 | 0.000000000000000000e+00
148 | 0.000000000000000000e+00
149 | 1.000000000000000000e+00
150 | 1.000000000000000000e+00
151 | 1.000000000000000000e+00
152 | 0.000000000000000000e+00
153 | 0.000000000000000000e+00
154 | 0.000000000000000000e+00
155 | 1.000000000000000000e+00
156 | 0.000000000000000000e+00
157 | 0.000000000000000000e+00
158 | 1.000000000000000000e+00
159 | 0.000000000000000000e+00
160 | 0.000000000000000000e+00
161 | 0.000000000000000000e+00
162 | 1.000000000000000000e+00
163 | 1.000000000000000000e+00
164 | 1.000000000000000000e+00
165 | 1.000000000000000000e+00
166 | 0.000000000000000000e+00
167 | 0.000000000000000000e+00
168 | 0.000000000000000000e+00
169 | 1.000000000000000000e+00
170 | 1.000000000000000000e+00
171 | 1.000000000000000000e+00
172 | 1.000000000000000000e+00
173 | 1.000000000000000000e+00
174 | 0.000000000000000000e+00
175 | 1.000000000000000000e+00
176 | 0.000000000000000000e+00
177 | 0.000000000000000000e+00
178 | 0.000000000000000000e+00
179 | 0.000000000000000000e+00
180 | 1.000000000000000000e+00
181 | 0.000000000000000000e+00
182 | 0.000000000000000000e+00
183 | 0.000000000000000000e+00
184 | 1.000000000000000000e+00
185 | 1.000000000000000000e+00
186 | 0.000000000000000000e+00
187 | 1.000000000000000000e+00
188 | 0.000000000000000000e+00
189 | 1.000000000000000000e+00
190 | 0.000000000000000000e+00
191 | 0.000000000000000000e+00
192 | 0.000000000000000000e+00
193 | 1.000000000000000000e+00
194 | 0.000000000000000000e+00
195 | 0.000000000000000000e+00
196 | 1.000000000000000000e+00
197 | 1.000000000000000000e+00
198 | 1.000000000000000000e+00
199 | 1.000000000000000000e+00
200 | 1.000000000000000000e+00
201 | 0.000000000000000000e+00
202 | 1.000000000000000000e+00
203 | 0.000000000000000000e+00
204 | 0.000000000000000000e+00
205 | 0.000000000000000000e+00
206 | 0.000000000000000000e+00
207 | 0.000000000000000000e+00
208 | 0.000000000000000000e+00
209 | 1.000000000000000000e+00
210 | 1.000000000000000000e+00
211 | 0.000000000000000000e+00
212 | 1.000000000000000000e+00
213 | 0.000000000000000000e+00
214 | 0.000000000000000000e+00
215 | 1.000000000000000000e+00
216 | 1.000000000000000000e+00
217 | 0.000000000000000000e+00
218 | 0.000000000000000000e+00
219 | 0.000000000000000000e+00
220 | 0.000000000000000000e+00
221 | 1.000000000000000000e+00
222 | 0.000000000000000000e+00
223 | 0.000000000000000000e+00
224 |
--------------------------------------------------------------------------------
/Module-13-Classification-Measures/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-13-Classification-Measures/.DS_Store
--------------------------------------------------------------------------------
/Module-13-Classification-Measures/Module-12-Instructor-Notebook.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-13-Classification-Measures/Module-12-Instructor-Notebook.zip
--------------------------------------------------------------------------------
/Module-13-Classification-Measures/Module-13-Classification-Measures.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-13-Classification-Measures/Module-13-Classification-Measures.pdf
--------------------------------------------------------------------------------
/Module-15-Decision-Trees-2/Module-15-Instructor-Notebook.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-15-Decision-Trees-2/Module-15-Instructor-Notebook.zip
--------------------------------------------------------------------------------
/Module-15-Decision-Trees-2/iris.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-15-Decision-Trees-2/iris.pdf
--------------------------------------------------------------------------------
/Module-15-Decision-Trees-2/iris.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-15-Decision-Trees-2/iris.png
--------------------------------------------------------------------------------
/Module-15-Decision-Trees-2/iris2.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/Module-15-Decision-Trees-2/iris2.pdf
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Machine Learning
2 |
3 | -  `Complete`
4 |
5 | |S.no| Lecture | Status| Notes|
6 | |----|---------|-------| ---- |
7 | |1|Python Basics||[Here](/Module-01-Python-Basics/Module-1-Class-Notes.pdf)|
8 | |2|Conditionals, Loops and Functions||[Here](/Module-02-Conditionals-Loops-and-Functions/Module-2-Class-Notes.pdf)|
9 | |3|Lists and Dictionaries||[Here](/Module-03-Lists-and-Dictionaries/Module-3-Class-Notes.pdf)|
10 | |4|2D Lists and Numpy||[Here](/Module-04-2DLists-and-Numpy/Module-4-Class-Notes.pdf)|
11 | |5|Pandas||[Here](/Module-05-Pandas/Module-5-Class-Notes.pdf)|
12 | |6|Plotting Graphs||[Here](/Module-06-Plotting-Graphs/Module-6-Class-Notes.pdf)|
13 | |7|Introduction to Machine Learning||[Here](/Module-07-Introduction-to-Machine-Learning/Module-7-Class-Notes-Theory.pdf)|
14 | |8|Linear Regression||[Here](/Module-08-Linear-Regression/Module-8-Linear-Regression-Theory.pdf)|
15 | |9|Multi-variable Regression and Gradient Descent||[Here](/Module-09-Multivariate-Regression-and-Gradient-Descent/Lecture-9-Multivariate-Regression-and-Gradient-Descent-Theory.pdf)|
16 | |10|Project - Gradient Descent|||
17 | |11|Logistic Regression||[Here](/Module-11-Logistic-Regression/Module-11-Logistic-Regression-Theory.pdf)|
18 | |12|Project - Logistic Regression|||
19 | |13|Classification Measures||[Here](/Module-13-Classification-Measures/Module-13-Classification-Measures.pdf)|
20 | |14|Decision Trees - 1||
21 | |15|Decision Trees - 2||
22 | |16|Project - Decision Tree Implementation||
23 | |17|Feature Scaling||
24 | |18|Random Forests||
25 | |19|Naive Bayes||
26 | |20|Project - Text Classification||
27 | |21|K Nearest Neighbours||
28 | |22|Support Vector Machines||
29 | |23|Principal Component Analysis||
30 | |24|Principal Component Analysis - 2||
31 | |25|Project - CIFAR10||
32 | |26|Natural Language Processing - 1||
33 | |27|Natural Language Processing - 2||
34 | |28|Project - Twitter Sentiment Analysis||
35 | |29|Git||
36 | |30|Neural Networks - 1||
37 | |31|Neural Networks - 2||
38 | |32|Tensorflow||
39 | |33|Keras||
40 | |34|Convolutional Neural Networks - 1||
41 | |35|Convolutional Neural Networks - 2||
42 | |36|Recurrent Neural Networks||
43 | |37|LSTM||
44 | |38|Unsupervised Learning - 1||
45 | |39|Unsupervised Learning - 2||
46 |
47 | ## Additional References
48 |
49 | 1. [Visualising Decision Trees Using Scikit learn and Graphviz](https://towardsdatascience.com/visualizing-decision-trees-with-python-scikit-learn-graphviz-matplotlib-1c50b4aa68dc)
50 |
--------------------------------------------------------------------------------
/tensorflow/.ipynb_checkpoints/13. MNIST-Tensorflow-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 22,
6 | "metadata": {
7 | "collapsed": true
8 | },
9 | "outputs": [],
10 | "source": [
11 | "import tensorflow as tf"
12 | ]
13 | },
14 | {
15 | "cell_type": "code",
16 | "execution_count": 23,
17 | "metadata": {
18 | "collapsed": true
19 | },
20 | "outputs": [],
21 | "source": [
22 | "from tensorflow.examples.tutorials.mnist import input_data"
23 | ]
24 | },
25 | {
26 | "cell_type": "code",
27 | "execution_count": 24,
28 | "metadata": {},
29 | "outputs": [
30 | {
31 | "name": "stdout",
32 | "output_type": "stream",
33 | "text": [
34 | "Extracting MNIST_data/train-images-idx3-ubyte.gz\n",
35 | "Extracting MNIST_data/train-labels-idx1-ubyte.gz\n",
36 | "Extracting MNIST_data/t10k-images-idx3-ubyte.gz\n",
37 | "Extracting MNIST_data/t10k-labels-idx1-ubyte.gz\n"
38 | ]
39 | }
40 | ],
41 | "source": [
42 | "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)"
43 | ]
44 | },
45 | {
46 | "cell_type": "code",
47 | "execution_count": 25,
48 | "metadata": {},
49 | "outputs": [
50 | {
51 | "data": {
52 | "text/plain": [
53 | "Datasets(train=, validation=, test=)"
54 | ]
55 | },
56 | "execution_count": 25,
57 | "metadata": {},
58 | "output_type": "execute_result"
59 | }
60 | ],
61 | "source": [
62 | "mnist"
63 | ]
64 | },
65 | {
66 | "cell_type": "code",
67 | "execution_count": 26,
68 | "metadata": {},
69 | "outputs": [
70 | {
71 | "data": {
72 | "text/plain": [
73 | "((55000, 784), (55000, 10))"
74 | ]
75 | },
76 | "execution_count": 26,
77 | "metadata": {},
78 | "output_type": "execute_result"
79 | }
80 | ],
81 | "source": [
82 | "mnist.train.images.shape, mnist.train.labels.shape"
83 | ]
84 | },
85 | {
86 | "cell_type": "code",
87 | "execution_count": 27,
88 | "metadata": {},
89 | "outputs": [
90 | {
91 | "data": {
92 | "text/plain": [
93 | "array([ 0., 0., 0., 0., 0., 0., 0., 1., 0., 0.])"
94 | ]
95 | },
96 | "execution_count": 27,
97 | "metadata": {},
98 | "output_type": "execute_result"
99 | }
100 | ],
101 | "source": [
102 | "mnist.train.labels[0]"
103 | ]
104 | },
105 | {
106 | "cell_type": "code",
107 | "execution_count": 28,
108 | "metadata": {},
109 | "outputs": [
110 | {
111 | "data": {
112 | "text/plain": [
113 | "((10000, 784), (10000, 10))"
114 | ]
115 | },
116 | "execution_count": 28,
117 | "metadata": {},
118 | "output_type": "execute_result"
119 | }
120 | ],
121 | "source": [
122 | "mnist.test.images.shape, mnist.test.labels.shape"
123 | ]
124 | },
125 | {
126 | "cell_type": "code",
127 | "execution_count": 29,
128 | "metadata": {},
129 | "outputs": [
130 | {
131 | "data": {
132 | "text/plain": [
133 | "(5000, 784)"
134 | ]
135 | },
136 | "execution_count": 29,
137 | "metadata": {},
138 | "output_type": "execute_result"
139 | }
140 | ],
141 | "source": [
142 | "mnist.validation.images.shape"
143 | ]
144 | },
145 | {
146 | "cell_type": "code",
147 | "execution_count": 30,
148 | "metadata": {
149 | "collapsed": true
150 | },
151 | "outputs": [],
152 | "source": [
153 | "import numpy as np\n",
154 | "from matplotlib import pyplot as plt"
155 | ]
156 | },
157 | {
158 | "cell_type": "code",
159 | "execution_count": 31,
160 | "metadata": {
161 | "scrolled": true
162 | },
163 | "outputs": [
164 | {
165 | "data": {
166 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAADfdJREFUeJzt3XuMXGUZx/Hfw3bbSkuxTS+UdrWl\nVEJDtODaGlGDEhQMScFApQipBF0VSTRqlDQm4JVqRK3XWKChJNwFpIEGJQ1QuVhZKlKwIA0W6MWW\nUtILSml3H//YU7O2e96ZzpyZM93n+0mamTnPOXOeTPvrmZn3zHnN3QUgniPKbgBAOQg/EBThB4Ii\n/EBQhB8IivADQRF+ICjCDwRF+IGghjRzZ0NtmA/XiGbuEgjlTb2ht3yPVbNuXeE3szMlLZLUJuk6\nd1+YWn+4Rmi2nV7PLgEkrPIVVa9b89t+M2uT9CtJZ0maIWmemc2o9fkANFc9n/lnSVrn7i+6+1uS\nbpU0p5i2ADRaPeGfJOmVfo83ZMv+j5l1mVm3mXXv1Z46dgegSPWEf6AvFQ76fbC7L3b3TnfvbNew\nOnYHoEj1hH+DpI5+jydL2lRfOwCapZ7wPyFpuplNNbOhki6QtKyYtgA0Ws1Dfe6+z8wul/QH9Q31\nLXH3ZwvrDEBD1TXO7+7LJS0vqBcATcTpvUBQhB8IivADQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCE\nHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8E1dQpujGwTd/4QLI+6qP/\nStZ3PHhMbq19V3rf43/9WHoFDFoc+YGgCD8QFOEHgiL8QFCEHwiK8ANBEX4gqLrG+c1svaRdknok\n7XP3ziKaGmyGTMwfh5ek8z79ULJ+5bi/p3fw7vzSjt7/JDf94edmJ+u/e35msj5lkSXr9vjfknWU\np4iTfD7i7tsKeB4ATcTbfiCoesPvkv5oZk+aWVcRDQFojnrf9p/q7pvMbLykB8zsOXdf2X+F7D+F\nLkkariPr3B2AotR15Hf3TdntVkl3S5o1wDqL3b3T3TvbNaye3QEoUM3hN7MRZnbU/vuSPibpmaIa\nA9BY9bztnyDpbjPb/zw3u/v9hXQFoOFqDr+7vyjpPQX2Mmj1TB6XrB8//ImG7fvoI96WrP9gwtN1\n1R+d1Zusf+e4U5J1lIehPiAowg8ERfiBoAg/EBThB4Ii/EBQ5u5N29koG+Oz7fSm7e9w0XbC8cn6\nKwuHJuv7/vr23FrPkem/32En7EjWV3Zel6yPbkufsp36SfFHv/vV5LZjf/t4so6DrfIV2unb07+z\nznDkB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgmKK7BfQ8vy5ZP/bcJjUygA9d8fVk/f7LfpSsTx4y\nMrc29aIXktvu+m2yjDpx5AeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoBjnR9KxD7+RrD9ySUeyfsFR\nrxfZDgrEkR8IivADQRF+ICjCDwRF+IGgCD8QFOEHgqo4zm9mSySdLWmru5+ULRsj6TZJUyStlzTX\n3RnQLUnquv8n3vxictvLx65M1kcc8WiyPr5tRLKO1lXNkf8GSWcesOwKSSvcfbqkFdljAIeRiuF3\n95WSth+weI6kpdn9pZLOKbgvAA1W62f+Ce6+WZKy2/HFtQSgGRp+br+ZdUnqkqThSs/rBqB5aj3y\nbzGziZKU3W7NW9HdF7t7p7t3tmtYjbsDULRaw79M0vzs/nxJ9xTTDoBmqRh+M7tF0uOSTjCzDWZ2\nqaSFks4wsxcknZE9BnAYqfiZ393n5ZROL7gX1Cg1ln/NxNUVts6/rn419npPsn7fv4/OrW3++bTk\ntiO1raaeUB3O8AOCIvxAUIQfCIrwA0ERfiAowg8ExaW7B4H0z3LrG8qr5MSHL03Wp134VG5tpFYV\n3Q4OAUd+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiKcf5B4Af/+nhu7dqO9KW367Vo1q3J+rfnX5Jb\nG3v3s8lte3burKknVIcjPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8EZe7etJ2NsjE+27jid9Hajp+a\nWxtzY3rm9IWT703WJw9p3PUApv4hfS2AGd9JX7rbd+5K1nteO3B+2cFvla/QTt9u1azLkR8IivAD\nQRF+ICjCDwRF+IGgCD8QFOEHgqo4zm9mSySdLWmru5+ULbtK0uckvZqttsDdl1faGeP8rWf3+bOT\n9b2fSY+V/+XkO4ps55BctvH9yfpL54zJre3buKnodlpC0eP8N0g6c4DlP3X3mdmfisEH0Foqht/d\nV0qKd6oUMMjV85n/cjN72syWmNnowjoC0BS1hv83kqZJmilps6Rr8lY0sy4z6zaz7r3aU+PuABSt\npvC7+xZ373H3XknXSpqVWHexu3e6e2e7htXaJ4CC1RR+M5vY7+G5kp4pph0AzVLx0t1mdouk0ySN\nNbMNkq6UdJqZzZTkktZL+nwDewTQAPyeH0lto0alV5g0IVl+6dxxubXzPvVwcttvj0tf17+SS17+\nUG5ty0Vjk9v2rPtnXfsuC7/nB1AR4QeCIvxAUIQfCIrwA0ERfiAohvpQmrZ3TUvWT7n9H8n698av\nqXnfM6++LFmf8IvHan7uMjHUB6Aiwg8ERfiBoAg/EBThB4Ii/EBQhB8IquLv+YFGWfuN9KUfl9cx\njo/KOPIDQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCM8yNpz1nvS9b/My79T+iLC+7Mrc0d+esKex9a\noZ72nr/My61NuuX55LY9de358MCRHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCqjjOb2Ydkm6UdIyk\nXkmL3X2RmY2RdJukKZLWS5rr7q83rtXBq+3E6cn6xqvTf00jbz46t7b7wh019bTfQ+9dlKyPbjuy\njmdPj+Pv7n0zWZ/z3NxkfdJnt+bWera9ltw2gmqO/Pskfc3dT5T0fklfMrMZkq6QtMLdp0takT0G\ncJioGH533+zuq7P7uyStlTRJ0hxJS7PVlko6p1FNAijeIX3mN7Mpkk6WtErSBHffLPX9ByFpfNHN\nAWicqsNvZiMl3SnpK+6+8xC26zKzbjPr3qs9tfQIoAGqCr+Ztasv+De5+13Z4i1mNjGrT5Q04Lcr\n7r7Y3TvdvbNdw4roGUABKobfzEzS9ZLWuvtP+pWWSZqf3Z8v6Z7i2wPQKNX8pPdUSRdLWmNmT2XL\nFkhaKOl2M7tU0suSzm9Mi4Nfx9INyfryyY+nn2BWgc0cpJ6hvLQZj12UrB/7y/RQ4JCHVifrEX6W\nW4+K4Xf3RyTlzfd9erHtAGgWzvADgiL8QFCEHwiK8ANBEX4gKMIPBMWlu1vAYxunpleoNM5fout3\nHJOsX31f/u+9pn/rqdyaJPW+mf5JL+rDkR8IivADQRF+ICjCDwRF+IGgCD8QFOEHgmKcvwV0fGFb\nsv7JO85I1p97IP/S320NHirvuPfVZH3a2j/n1nqLbgaHhCM/EBThB4Ii/EBQhB8IivADQRF+ICjC\nDwTFOH8L6NmSP5W0JL3x4fT2HUqPtTcS18Y/fHHkB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgKobf\nzDrM7EEzW2tmz5rZl7PlV5nZRjN7Kvvzica3C6Ao1Zzks0/S19x9tZkdJelJM3sgq/3U3X/cuPYA\nNErF8Lv7Zkmbs/u7zGytpEmNbgxAYx3SZ34zmyLpZEmrskWXm9nTZrbEzEbnbNNlZt1m1r1Xe+pq\nFkBxqg6/mY2UdKekr7j7Tkm/kTRN0kz1vTO4ZqDt3H2xu3e6e2e7hhXQMoAiVBV+M2tXX/Bvcve7\nJMndt7h7j7v3SrpW0qzGtQmgaNV822+Srpe01t1/0m/5xH6rnSvpmeLbA9Ao1Xzbf6qkiyWtMbP9\ncyovkDTPzGZKcknrJX2+IR0CaIhqvu1/RJINUFpefDsAmoUz/ICgCD8QFOEHgiL8QFCEHwiK8ANB\nEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0GZuzdvZ2avSnqp36KxkrY1rYFD06q9tWpfEr3V\nqsje3unu46pZsanhP2jnZt3u3llaAwmt2lur9iXRW63K6o23/UBQhB8IquzwLy55/ymt2lur9iXR\nW61K6a3Uz/wAylP2kR9ASUoJv5mdaWbPm9k6M7uijB7ymNl6M1uTzTzcXXIvS8xsq5k902/ZGDN7\nwMxeyG4HnCatpN5aYubmxMzSpb52rTbjddPf9ptZm6R/SDpD0gZJT0ia5+5/b2ojOcxsvaROdy99\nTNjMPixpt6Qb3f2kbNmPJG1394XZf5yj3f2bLdLbVZJ2lz1zczahzMT+M0tLOkfSZ1Tia5foa65K\neN3KOPLPkrTO3V9097ck3SppTgl9tDx3Xylp+wGL50hamt1fqr5/PE2X01tLcPfN7r46u79L0v6Z\npUt97RJ9laKM8E+S9Eq/xxvUWlN+u6Q/mtmTZtZVdjMDmJBNm75/+vTxJfdzoIozNzfTATNLt8xr\nV8uM10UrI/wDzf7TSkMOp7r7KZLOkvSl7O0tqlPVzM3NMsDM0i2h1hmvi1ZG+DdI6uj3eLKkTSX0\nMSB335TdbpV0t1pv9uEt+ydJzW63ltzP/7TSzM0DzSytFnjtWmnG6zLC/4Sk6WY21cyGSrpA0rIS\n+jiImY3IvoiRmY2Q9DG13uzDyyTNz+7Pl3RPib38n1aZuTlvZmmV/Nq12ozXpZzkkw1l/ExSm6Ql\n7v79pjcxADM7Tn1He6lvEtOby+zNzG6RdJr6fvW1RdKVkn4v6XZJ75D0sqTz3b3pX7zl9Haa+t66\n/m/m5v2fsZvc2wcl/UnSGkm92eIF6vt8Xdprl+hrnkp43TjDDwiKM/yAoAg/EBThB4Ii/EBQhB8I\nivADQRF+ICjCDwT1X00+72AlohAXAAAAAElFTkSuQmCC\n",
167 | "text/plain": [
168 | ""
169 | ]
170 | },
171 | "metadata": {},
172 | "output_type": "display_data"
173 | }
174 | ],
175 | "source": [
176 | "first_image = mnist.train.images[412]\n",
177 | "first_image = np.array(first_image, dtype='float')\n",
178 | "first_image = first_image.reshape((28,28))\n",
179 | "plt.imshow(first_image)\n",
180 | "plt.show()"
181 | ]
182 | },
183 | {
184 | "cell_type": "code",
185 | "execution_count": 32,
186 | "metadata": {},
187 | "outputs": [
188 | {
189 | "name": "stdout",
190 | "output_type": "stream",
191 | "text": [
192 | "[[ 0.27737778 1.9903717 -0.3673577 ..., 1.03369391 -0.64082879\n",
193 | " -0.49761856]\n",
194 | " [-0.33661583 -0.44995251 0.57330441 ..., -0.45720869 1.37859488\n",
195 | " -1.52848101]\n",
196 | " [-1.31242561 -0.03728846 0.51944399 ..., 0.12360641 0.0640543\n",
197 | " -0.36333963]\n",
198 | " ..., \n",
199 | " [ 0.59617668 1.67344177 -1.55677068 ..., 0.87780756 -1.06859076\n",
200 | " -0.78537446]\n",
201 | " [ 0.94489622 -0.14393929 -0.14852545 ..., -0.90294963 1.29174137\n",
202 | " 1.19068635]\n",
203 | " [-1.86703086 -1.80996883 -1.09966731 ..., 0.88350892 -0.92721862\n",
204 | " 0.50529331]]\n"
205 | ]
206 | }
207 | ],
208 | "source": [
209 | "with tf.Session() as sess:\n",
210 | " print(tf.random_normal([784, 256]).eval())"
211 | ]
212 | },
213 | {
214 | "cell_type": "code",
215 | "execution_count": 33,
216 | "metadata": {
217 | "collapsed": true
218 | },
219 | "outputs": [],
220 | "source": [
221 | "# weights & biases\n",
222 | "n_input = 784\n",
223 | "n_hidden_1 = 256\n",
224 | "n_hidden_2 = 256\n",
225 | "n_classes = 10\n",
226 | "\n",
227 | "weights = {\n",
228 | " 'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),\n",
229 | " 'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),\n",
230 | " 'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))\n",
231 | "}\n",
232 | "\n",
233 | "biases = {\n",
234 | " 'h1': tf.Variable(tf.random_normal([n_hidden_1])),\n",
235 | " 'h2': tf.Variable(tf.random_normal([n_hidden_2])),\n",
236 | " 'out': tf.Variable(tf.random_normal([n_classes]))\n",
237 | "}\n"
238 | ]
239 | },
240 | {
241 | "cell_type": "code",
242 | "execution_count": 34,
243 | "metadata": {},
244 | "outputs": [
245 | {
246 | "data": {
247 | "text/plain": [
248 | "[,\n",
249 | " ,\n",
250 | " ,\n",
251 | " ,\n",
252 | " ,\n",
253 | " ,\n",
254 | " ,\n",
255 | " ,\n",
256 | " ,\n",
257 | " ,\n",
258 | " ,\n",
259 | " ,\n",
260 | " ]"
261 | ]
262 | },
263 | "execution_count": 34,
264 | "metadata": {},
265 | "output_type": "execute_result"
266 | }
267 | ],
268 | "source": [
269 | "tf.trainable_variables()"
270 | ]
271 | },
272 | {
273 | "cell_type": "code",
274 | "execution_count": 35,
275 | "metadata": {
276 | "collapsed": true
277 | },
278 | "outputs": [],
279 | "source": [
280 | "def forward_propagation(x, weights, biases):\n",
281 | " in_layer1 = tf.add(tf.matmul(x, weights['h1']), biases['h1'])\n",
282 | " out_layer1 = tf.nn.relu(in_layer1)\n",
283 | " \n",
284 | " in_layer2 = tf.add(tf.matmul(out_layer1, weights['h2']), biases['h2'])\n",
285 | " out_layer2 = tf.nn.relu(in_layer2)\n",
286 | " \n",
287 | " output = tf.add(tf.matmul(out_layer2, weights['out']), biases['out'])\n",
288 | " return output"
289 | ]
290 | },
291 | {
292 | "cell_type": "code",
293 | "execution_count": 36,
294 | "metadata": {},
295 | "outputs": [],
296 | "source": [
297 | "x = tf.placeholder(\"float\", [None, n_input])\n",
298 | "y =tf.placeholder(tf.int32, [None, n_classes])\n",
299 | "pred = forward_propagation(x, weights, biases)"
300 | ]
301 | },
302 | {
303 | "cell_type": "code",
304 | "execution_count": 37,
305 | "metadata": {},
306 | "outputs": [],
307 | "source": [
308 | "cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels = y))"
309 | ]
310 | },
311 | {
312 | "cell_type": "code",
313 | "execution_count": 38,
314 | "metadata": {
315 | "collapsed": true
316 | },
317 | "outputs": [],
318 | "source": [
319 | "optimizer = tf.train.AdamOptimizer(learning_rate=0.01)\n",
320 | "optimize = optimizer.minimize(cost)"
321 | ]
322 | },
323 | {
324 | "cell_type": "code",
325 | "execution_count": 45,
326 | "metadata": {},
327 | "outputs": [],
328 | "source": [
329 | "sess = tf.Session()\n",
330 | "sess.run(tf.global_variables_initializer())"
331 | ]
332 | },
333 | {
334 | "cell_type": "code",
335 | "execution_count": 46,
336 | "metadata": {},
337 | "outputs": [
338 | {
339 | "name": "stdout",
340 | "output_type": "stream",
341 | "text": [
342 | "25293.5888176\n",
343 | "5315.23647187\n",
344 | "2712.12287982\n",
345 | "1838.5750719\n",
346 | "1445.4085486\n",
347 | "1305.87004675\n",
348 | "1111.03826703\n",
349 | "944.198320175\n",
350 | "811.903118697\n",
351 | "734.364716449\n",
352 | "693.887420736\n",
353 | "637.789177042\n",
354 | "533.368781224\n",
355 | "453.776671396\n",
356 | "394.457511591\n",
357 | "360.047066105\n",
358 | "338.558640063\n",
359 | "290.97362693\n",
360 | "289.484946223\n",
361 | "220.941823438\n",
362 | "208.768152281\n",
363 | "192.970408337\n",
364 | "200.511622493\n",
365 | "144.955585311\n",
366 | "141.870637628\n"
367 | ]
368 | }
369 | ],
370 | "source": [
371 | "batch_size = 100\n",
372 | "for i in range(25):\n",
373 | " num_batches = int(mnist.train.num_examples/batch_size)\n",
374 | " total_cost = 0\n",
375 | " for j in range(num_batches):\n",
376 | " batch_x, batch_y = mnist.train.next_batch(batch_size)\n",
377 | " c, _ = sess.run([cost,optimize], feed_dict={x:batch_x , y:batch_y})\n",
378 | " total_cost += c\n",
379 | " print(total_cost)"
380 | ]
381 | },
382 | {
383 | "cell_type": "code",
384 | "execution_count": 47,
385 | "metadata": {},
386 | "outputs": [
387 | {
388 | "data": {
389 | "text/plain": [
390 | "9608"
391 | ]
392 | },
393 | "execution_count": 47,
394 | "metadata": {},
395 | "output_type": "execute_result"
396 | }
397 | ],
398 | "source": [
399 | "predictions = tf.argmax(pred, 1)\n",
400 | "correct_labels = tf.argmax(y, 1)\n",
401 | "correct_predictions = tf.equal(predictions, correct_labels)\n",
402 | "predictions,correct_predictions = sess.run([predictions, correct_predictions], feed_dict={x:mnist.test.images,\n",
403 | " y:mnist.test.labels})\n",
404 | "correct_predictions.sum()"
405 | ]
406 | }
407 | ],
408 | "metadata": {
409 | "kernelspec": {
410 | "display_name": "Python 3",
411 | "language": "python",
412 | "name": "python3"
413 | },
414 | "language_info": {
415 | "codemirror_mode": {
416 | "name": "ipython",
417 | "version": 3
418 | },
419 | "file_extension": ".py",
420 | "mimetype": "text/x-python",
421 | "name": "python",
422 | "nbconvert_exporter": "python",
423 | "pygments_lexer": "ipython3",
424 | "version": "3.6.6"
425 | }
426 | },
427 | "nbformat": 4,
428 | "nbformat_minor": 2
429 | }
430 |
--------------------------------------------------------------------------------
/tensorflow/.ipynb_checkpoints/MNIST-TensorFlow-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/tensorflow/13. MNIST-Tensorflow.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 22,
6 | "metadata": {
7 | "collapsed": true
8 | },
9 | "outputs": [],
10 | "source": [
11 | "import tensorflow as tf"
12 | ]
13 | },
14 | {
15 | "cell_type": "code",
16 | "execution_count": 23,
17 | "metadata": {
18 | "collapsed": true
19 | },
20 | "outputs": [],
21 | "source": [
22 | "from tensorflow.examples.tutorials.mnist import input_data"
23 | ]
24 | },
25 | {
26 | "cell_type": "code",
27 | "execution_count": 24,
28 | "metadata": {},
29 | "outputs": [
30 | {
31 | "name": "stdout",
32 | "output_type": "stream",
33 | "text": [
34 | "Extracting MNIST_data/train-images-idx3-ubyte.gz\n",
35 | "Extracting MNIST_data/train-labels-idx1-ubyte.gz\n",
36 | "Extracting MNIST_data/t10k-images-idx3-ubyte.gz\n",
37 | "Extracting MNIST_data/t10k-labels-idx1-ubyte.gz\n"
38 | ]
39 | }
40 | ],
41 | "source": [
42 | "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)"
43 | ]
44 | },
45 | {
46 | "cell_type": "code",
47 | "execution_count": 25,
48 | "metadata": {},
49 | "outputs": [
50 | {
51 | "data": {
52 | "text/plain": [
53 | "Datasets(train=, validation=, test=)"
54 | ]
55 | },
56 | "execution_count": 25,
57 | "metadata": {},
58 | "output_type": "execute_result"
59 | }
60 | ],
61 | "source": [
62 | "mnist"
63 | ]
64 | },
65 | {
66 | "cell_type": "code",
67 | "execution_count": 26,
68 | "metadata": {},
69 | "outputs": [
70 | {
71 | "data": {
72 | "text/plain": [
73 | "((55000, 784), (55000, 10))"
74 | ]
75 | },
76 | "execution_count": 26,
77 | "metadata": {},
78 | "output_type": "execute_result"
79 | }
80 | ],
81 | "source": [
82 | "mnist.train.images.shape, mnist.train.labels.shape"
83 | ]
84 | },
85 | {
86 | "cell_type": "code",
87 | "execution_count": 27,
88 | "metadata": {},
89 | "outputs": [
90 | {
91 | "data": {
92 | "text/plain": [
93 | "array([ 0., 0., 0., 0., 0., 0., 0., 1., 0., 0.])"
94 | ]
95 | },
96 | "execution_count": 27,
97 | "metadata": {},
98 | "output_type": "execute_result"
99 | }
100 | ],
101 | "source": [
102 | "mnist.train.labels[0]"
103 | ]
104 | },
105 | {
106 | "cell_type": "code",
107 | "execution_count": 28,
108 | "metadata": {},
109 | "outputs": [
110 | {
111 | "data": {
112 | "text/plain": [
113 | "((10000, 784), (10000, 10))"
114 | ]
115 | },
116 | "execution_count": 28,
117 | "metadata": {},
118 | "output_type": "execute_result"
119 | }
120 | ],
121 | "source": [
122 | "mnist.test.images.shape, mnist.test.labels.shape"
123 | ]
124 | },
125 | {
126 | "cell_type": "code",
127 | "execution_count": 29,
128 | "metadata": {},
129 | "outputs": [
130 | {
131 | "data": {
132 | "text/plain": [
133 | "(5000, 784)"
134 | ]
135 | },
136 | "execution_count": 29,
137 | "metadata": {},
138 | "output_type": "execute_result"
139 | }
140 | ],
141 | "source": [
142 | "mnist.validation.images.shape"
143 | ]
144 | },
145 | {
146 | "cell_type": "code",
147 | "execution_count": 30,
148 | "metadata": {
149 | "collapsed": true
150 | },
151 | "outputs": [],
152 | "source": [
153 | "import numpy as np\n",
154 | "from matplotlib import pyplot as plt"
155 | ]
156 | },
157 | {
158 | "cell_type": "code",
159 | "execution_count": 31,
160 | "metadata": {
161 | "scrolled": true
162 | },
163 | "outputs": [
164 | {
165 | "data": {
166 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAADfdJREFUeJzt3XuMXGUZx/Hfw3bbSkuxTS+UdrWl\nVEJDtODaGlGDEhQMScFApQipBF0VSTRqlDQm4JVqRK3XWKChJNwFpIEGJQ1QuVhZKlKwIA0W6MWW\nUtILSml3H//YU7O2e96ZzpyZM93n+0mamTnPOXOeTPvrmZn3zHnN3QUgniPKbgBAOQg/EBThB4Ii\n/EBQhB8IivADQRF+ICjCDwRF+IGghjRzZ0NtmA/XiGbuEgjlTb2ht3yPVbNuXeE3szMlLZLUJuk6\nd1+YWn+4Rmi2nV7PLgEkrPIVVa9b89t+M2uT9CtJZ0maIWmemc2o9fkANFc9n/lnSVrn7i+6+1uS\nbpU0p5i2ADRaPeGfJOmVfo83ZMv+j5l1mVm3mXXv1Z46dgegSPWEf6AvFQ76fbC7L3b3TnfvbNew\nOnYHoEj1hH+DpI5+jydL2lRfOwCapZ7wPyFpuplNNbOhki6QtKyYtgA0Ws1Dfe6+z8wul/QH9Q31\nLXH3ZwvrDEBD1TXO7+7LJS0vqBcATcTpvUBQhB8IivADQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCE\nHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8E1dQpujGwTd/4QLI+6qP/\nStZ3PHhMbq19V3rf43/9WHoFDFoc+YGgCD8QFOEHgiL8QFCEHwiK8ANBEX4gqLrG+c1svaRdknok\n7XP3ziKaGmyGTMwfh5ek8z79ULJ+5bi/p3fw7vzSjt7/JDf94edmJ+u/e35msj5lkSXr9vjfknWU\np4iTfD7i7tsKeB4ATcTbfiCoesPvkv5oZk+aWVcRDQFojnrf9p/q7pvMbLykB8zsOXdf2X+F7D+F\nLkkariPr3B2AotR15Hf3TdntVkl3S5o1wDqL3b3T3TvbNaye3QEoUM3hN7MRZnbU/vuSPibpmaIa\nA9BY9bztnyDpbjPb/zw3u/v9hXQFoOFqDr+7vyjpPQX2Mmj1TB6XrB8//ImG7fvoI96WrP9gwtN1\n1R+d1Zusf+e4U5J1lIehPiAowg8ERfiBoAg/EBThB4Ii/EBQ5u5N29koG+Oz7fSm7e9w0XbC8cn6\nKwuHJuv7/vr23FrPkem/32En7EjWV3Zel6yPbkufsp36SfFHv/vV5LZjf/t4so6DrfIV2unb07+z\nznDkB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgmKK7BfQ8vy5ZP/bcJjUygA9d8fVk/f7LfpSsTx4y\nMrc29aIXktvu+m2yjDpx5AeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoBjnR9KxD7+RrD9ySUeyfsFR\nrxfZDgrEkR8IivADQRF+ICjCDwRF+IGgCD8QFOEHgqo4zm9mSySdLWmru5+ULRsj6TZJUyStlzTX\n3RnQLUnquv8n3vxictvLx65M1kcc8WiyPr5tRLKO1lXNkf8GSWcesOwKSSvcfbqkFdljAIeRiuF3\n95WSth+weI6kpdn9pZLOKbgvAA1W62f+Ce6+WZKy2/HFtQSgGRp+br+ZdUnqkqThSs/rBqB5aj3y\nbzGziZKU3W7NW9HdF7t7p7t3tmtYjbsDULRaw79M0vzs/nxJ9xTTDoBmqRh+M7tF0uOSTjCzDWZ2\nqaSFks4wsxcknZE9BnAYqfiZ393n5ZROL7gX1Cg1ln/NxNUVts6/rn419npPsn7fv4/OrW3++bTk\ntiO1raaeUB3O8AOCIvxAUIQfCIrwA0ERfiAowg8ExaW7B4H0z3LrG8qr5MSHL03Wp134VG5tpFYV\n3Q4OAUd+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiKcf5B4Af/+nhu7dqO9KW367Vo1q3J+rfnX5Jb\nG3v3s8lte3burKknVIcjPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8EZe7etJ2NsjE+27jid9Hajp+a\nWxtzY3rm9IWT703WJw9p3PUApv4hfS2AGd9JX7rbd+5K1nteO3B+2cFvla/QTt9u1azLkR8IivAD\nQRF+ICjCDwRF+IGgCD8QFOEHgqo4zm9mSySdLWmru5+ULbtK0uckvZqttsDdl1faGeP8rWf3+bOT\n9b2fSY+V/+XkO4ps55BctvH9yfpL54zJre3buKnodlpC0eP8N0g6c4DlP3X3mdmfisEH0Foqht/d\nV0qKd6oUMMjV85n/cjN72syWmNnowjoC0BS1hv83kqZJmilps6Rr8lY0sy4z6zaz7r3aU+PuABSt\npvC7+xZ373H3XknXSpqVWHexu3e6e2e7htXaJ4CC1RR+M5vY7+G5kp4pph0AzVLx0t1mdouk0ySN\nNbMNkq6UdJqZzZTkktZL+nwDewTQAPyeH0lto0alV5g0IVl+6dxxubXzPvVwcttvj0tf17+SS17+\nUG5ty0Vjk9v2rPtnXfsuC7/nB1AR4QeCIvxAUIQfCIrwA0ERfiAohvpQmrZ3TUvWT7n9H8n698av\nqXnfM6++LFmf8IvHan7uMjHUB6Aiwg8ERfiBoAg/EBThB4Ii/EBQhB8IquLv+YFGWfuN9KUfl9cx\njo/KOPIDQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCM8yNpz1nvS9b/My79T+iLC+7Mrc0d+esKex9a\noZ72nr/My61NuuX55LY9de358MCRHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCqjjOb2Ydkm6UdIyk\nXkmL3X2RmY2RdJukKZLWS5rr7q83rtXBq+3E6cn6xqvTf00jbz46t7b7wh019bTfQ+9dlKyPbjuy\njmdPj+Pv7n0zWZ/z3NxkfdJnt+bWera9ltw2gmqO/Pskfc3dT5T0fklfMrMZkq6QtMLdp0takT0G\ncJioGH533+zuq7P7uyStlTRJ0hxJS7PVlko6p1FNAijeIX3mN7Mpkk6WtErSBHffLPX9ByFpfNHN\nAWicqsNvZiMl3SnpK+6+8xC26zKzbjPr3qs9tfQIoAGqCr+Ztasv+De5+13Z4i1mNjGrT5Q04Lcr\n7r7Y3TvdvbNdw4roGUABKobfzEzS9ZLWuvtP+pWWSZqf3Z8v6Z7i2wPQKNX8pPdUSRdLWmNmT2XL\nFkhaKOl2M7tU0suSzm9Mi4Nfx9INyfryyY+nn2BWgc0cpJ6hvLQZj12UrB/7y/RQ4JCHVifrEX6W\nW4+K4Xf3RyTlzfd9erHtAGgWzvADgiL8QFCEHwiK8ANBEX4gKMIPBMWlu1vAYxunpleoNM5fout3\nHJOsX31f/u+9pn/rqdyaJPW+mf5JL+rDkR8IivADQRF+ICjCDwRF+IGgCD8QFOEHgmKcvwV0fGFb\nsv7JO85I1p97IP/S320NHirvuPfVZH3a2j/n1nqLbgaHhCM/EBThB4Ii/EBQhB8IivADQRF+ICjC\nDwTFOH8L6NmSP5W0JL3x4fT2HUqPtTcS18Y/fHHkB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgKobf\nzDrM7EEzW2tmz5rZl7PlV5nZRjN7Kvvzica3C6Ao1Zzks0/S19x9tZkdJelJM3sgq/3U3X/cuPYA\nNErF8Lv7Zkmbs/u7zGytpEmNbgxAYx3SZ34zmyLpZEmrskWXm9nTZrbEzEbnbNNlZt1m1r1Xe+pq\nFkBxqg6/mY2UdKekr7j7Tkm/kTRN0kz1vTO4ZqDt3H2xu3e6e2e7hhXQMoAiVBV+M2tXX/Bvcve7\nJMndt7h7j7v3SrpW0qzGtQmgaNV822+Srpe01t1/0m/5xH6rnSvpmeLbA9Ao1Xzbf6qkiyWtMbP9\ncyovkDTPzGZKcknrJX2+IR0CaIhqvu1/RJINUFpefDsAmoUz/ICgCD8QFOEHgiL8QFCEHwiK8ANB\nEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0GZuzdvZ2avSnqp36KxkrY1rYFD06q9tWpfEr3V\nqsje3unu46pZsanhP2jnZt3u3llaAwmt2lur9iXRW63K6o23/UBQhB8IquzwLy55/ymt2lur9iXR\nW61K6a3Uz/wAylP2kR9ASUoJv5mdaWbPm9k6M7uijB7ymNl6M1uTzTzcXXIvS8xsq5k902/ZGDN7\nwMxeyG4HnCatpN5aYubmxMzSpb52rTbjddPf9ptZm6R/SDpD0gZJT0ia5+5/b2ojOcxsvaROdy99\nTNjMPixpt6Qb3f2kbNmPJG1394XZf5yj3f2bLdLbVZJ2lz1zczahzMT+M0tLOkfSZ1Tia5foa65K\neN3KOPLPkrTO3V9097ck3SppTgl9tDx3Xylp+wGL50hamt1fqr5/PE2X01tLcPfN7r46u79L0v6Z\npUt97RJ9laKM8E+S9Eq/xxvUWlN+u6Q/mtmTZtZVdjMDmJBNm75/+vTxJfdzoIozNzfTATNLt8xr\nV8uM10UrI/wDzf7TSkMOp7r7KZLOkvSl7O0tqlPVzM3NMsDM0i2h1hmvi1ZG+DdI6uj3eLKkTSX0\nMSB335TdbpV0t1pv9uEt+ydJzW63ltzP/7TSzM0DzSytFnjtWmnG6zLC/4Sk6WY21cyGSrpA0rIS\n+jiImY3IvoiRmY2Q9DG13uzDyyTNz+7Pl3RPib38n1aZuTlvZmmV/Nq12ozXpZzkkw1l/ExSm6Ql\n7v79pjcxADM7Tn1He6lvEtOby+zNzG6RdJr6fvW1RdKVkn4v6XZJ75D0sqTz3b3pX7zl9Haa+t66\n/m/m5v2fsZvc2wcl/UnSGkm92eIF6vt8Xdprl+hrnkp43TjDDwiKM/yAoAg/EBThB4Ii/EBQhB8I\nivADQRF+ICjCDwT1X00+72AlohAXAAAAAElFTkSuQmCC\n",
167 | "text/plain": [
168 | ""
169 | ]
170 | },
171 | "metadata": {},
172 | "output_type": "display_data"
173 | }
174 | ],
175 | "source": [
176 | "first_image = mnist.train.images[412]\n",
177 | "first_image = np.array(first_image, dtype='float')\n",
178 | "first_image = first_image.reshape((28,28))\n",
179 | "plt.imshow(first_image)\n",
180 | "plt.show()"
181 | ]
182 | },
183 | {
184 | "cell_type": "code",
185 | "execution_count": 32,
186 | "metadata": {},
187 | "outputs": [
188 | {
189 | "name": "stdout",
190 | "output_type": "stream",
191 | "text": [
192 | "[[ 0.27737778 1.9903717 -0.3673577 ..., 1.03369391 -0.64082879\n",
193 | " -0.49761856]\n",
194 | " [-0.33661583 -0.44995251 0.57330441 ..., -0.45720869 1.37859488\n",
195 | " -1.52848101]\n",
196 | " [-1.31242561 -0.03728846 0.51944399 ..., 0.12360641 0.0640543\n",
197 | " -0.36333963]\n",
198 | " ..., \n",
199 | " [ 0.59617668 1.67344177 -1.55677068 ..., 0.87780756 -1.06859076\n",
200 | " -0.78537446]\n",
201 | " [ 0.94489622 -0.14393929 -0.14852545 ..., -0.90294963 1.29174137\n",
202 | " 1.19068635]\n",
203 | " [-1.86703086 -1.80996883 -1.09966731 ..., 0.88350892 -0.92721862\n",
204 | " 0.50529331]]\n"
205 | ]
206 | }
207 | ],
208 | "source": [
209 | "with tf.Session() as sess:\n",
210 | " print(tf.random_normal([784, 256]).eval())"
211 | ]
212 | },
213 | {
214 | "cell_type": "code",
215 | "execution_count": 33,
216 | "metadata": {
217 | "collapsed": true
218 | },
219 | "outputs": [],
220 | "source": [
221 | "# weights & biases\n",
222 | "n_input = 784\n",
223 | "n_hidden_1 = 256\n",
224 | "n_hidden_2 = 256\n",
225 | "n_classes = 10\n",
226 | "\n",
227 | "weights = {\n",
228 | " 'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),\n",
229 | " 'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),\n",
230 | " 'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))\n",
231 | "}\n",
232 | "\n",
233 | "biases = {\n",
234 | " 'h1': tf.Variable(tf.random_normal([n_hidden_1])),\n",
235 | " 'h2': tf.Variable(tf.random_normal([n_hidden_2])),\n",
236 | " 'out': tf.Variable(tf.random_normal([n_classes]))\n",
237 | "}\n"
238 | ]
239 | },
240 | {
241 | "cell_type": "code",
242 | "execution_count": 34,
243 | "metadata": {},
244 | "outputs": [
245 | {
246 | "data": {
247 | "text/plain": [
248 | "[,\n",
249 | " ,\n",
250 | " ,\n",
251 | " ,\n",
252 | " ,\n",
253 | " ,\n",
254 | " ,\n",
255 | " ,\n",
256 | " ,\n",
257 | " ,\n",
258 | " ,\n",
259 | " ,\n",
260 | " ]"
261 | ]
262 | },
263 | "execution_count": 34,
264 | "metadata": {},
265 | "output_type": "execute_result"
266 | }
267 | ],
268 | "source": [
269 | "tf.trainable_variables()"
270 | ]
271 | },
272 | {
273 | "cell_type": "code",
274 | "execution_count": 35,
275 | "metadata": {
276 | "collapsed": true
277 | },
278 | "outputs": [],
279 | "source": [
280 | "def forward_propagation(x, weights, biases):\n",
281 | " in_layer1 = tf.add(tf.matmul(x, weights['h1']), biases['h1'])\n",
282 | " out_layer1 = tf.nn.relu(in_layer1)\n",
283 | " \n",
284 | " in_layer2 = tf.add(tf.matmul(out_layer1, weights['h2']), biases['h2'])\n",
285 | " out_layer2 = tf.nn.relu(in_layer2)\n",
286 | " \n",
287 | " output = tf.add(tf.matmul(out_layer2, weights['out']), biases['out'])\n",
288 | " return output"
289 | ]
290 | },
291 | {
292 | "cell_type": "code",
293 | "execution_count": 36,
294 | "metadata": {},
295 | "outputs": [],
296 | "source": [
297 | "x = tf.placeholder(\"float\", [None, n_input])\n",
298 | "y =tf.placeholder(tf.int32, [None, n_classes])\n",
299 | "pred = forward_propagation(x, weights, biases)"
300 | ]
301 | },
302 | {
303 | "cell_type": "code",
304 | "execution_count": 37,
305 | "metadata": {},
306 | "outputs": [],
307 | "source": [
308 | "cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels = y))"
309 | ]
310 | },
311 | {
312 | "cell_type": "code",
313 | "execution_count": 38,
314 | "metadata": {
315 | "collapsed": true
316 | },
317 | "outputs": [],
318 | "source": [
319 | "optimizer = tf.train.AdamOptimizer(learning_rate=0.01)\n",
320 | "optimize = optimizer.minimize(cost)"
321 | ]
322 | },
323 | {
324 | "cell_type": "code",
325 | "execution_count": 45,
326 | "metadata": {},
327 | "outputs": [],
328 | "source": [
329 | "sess = tf.Session()\n",
330 | "sess.run(tf.global_variables_initializer())"
331 | ]
332 | },
333 | {
334 | "cell_type": "code",
335 | "execution_count": 46,
336 | "metadata": {},
337 | "outputs": [
338 | {
339 | "name": "stdout",
340 | "output_type": "stream",
341 | "text": [
342 | "25293.5888176\n",
343 | "5315.23647187\n",
344 | "2712.12287982\n",
345 | "1838.5750719\n",
346 | "1445.4085486\n",
347 | "1305.87004675\n",
348 | "1111.03826703\n",
349 | "944.198320175\n",
350 | "811.903118697\n",
351 | "734.364716449\n",
352 | "693.887420736\n",
353 | "637.789177042\n",
354 | "533.368781224\n",
355 | "453.776671396\n",
356 | "394.457511591\n",
357 | "360.047066105\n",
358 | "338.558640063\n",
359 | "290.97362693\n",
360 | "289.484946223\n",
361 | "220.941823438\n",
362 | "208.768152281\n",
363 | "192.970408337\n",
364 | "200.511622493\n",
365 | "144.955585311\n",
366 | "141.870637628\n"
367 | ]
368 | }
369 | ],
370 | "source": [
371 | "batch_size = 100\n",
372 | "for i in range(25):\n",
373 | " num_batches = int(mnist.train.num_examples/batch_size)\n",
374 | " total_cost = 0\n",
375 | " for j in range(num_batches):\n",
376 | " batch_x, batch_y = mnist.train.next_batch(batch_size)\n",
377 | " c, _ = sess.run([cost,optimize], feed_dict={x:batch_x , y:batch_y})\n",
378 | " total_cost += c\n",
379 | " print(total_cost)"
380 | ]
381 | },
382 | {
383 | "cell_type": "code",
384 | "execution_count": 47,
385 | "metadata": {},
386 | "outputs": [
387 | {
388 | "data": {
389 | "text/plain": [
390 | "9608"
391 | ]
392 | },
393 | "execution_count": 47,
394 | "metadata": {},
395 | "output_type": "execute_result"
396 | }
397 | ],
398 | "source": [
399 | "predictions = tf.argmax(pred, 1)\n",
400 | "correct_labels = tf.argmax(y, 1)\n",
401 | "correct_predictions = tf.equal(predictions, correct_labels)\n",
402 | "predictions,correct_predictions = sess.run([predictions, correct_predictions], feed_dict={x:mnist.test.images,\n",
403 | " y:mnist.test.labels})\n",
404 | "correct_predictions.sum()"
405 | ]
406 | }
407 | ],
408 | "metadata": {
409 | "kernelspec": {
410 | "display_name": "Python 3",
411 | "language": "python",
412 | "name": "python3"
413 | },
414 | "language_info": {
415 | "codemirror_mode": {
416 | "name": "ipython",
417 | "version": 3
418 | },
419 | "file_extension": ".py",
420 | "mimetype": "text/x-python",
421 | "name": "python",
422 | "nbconvert_exporter": "python",
423 | "pygments_lexer": "ipython3",
424 | "version": "3.6.6"
425 | }
426 | },
427 | "nbformat": 4,
428 | "nbformat_minor": 2
429 | }
430 |
--------------------------------------------------------------------------------
/tensorflow/MNIST_data/t10k-images-idx3-ubyte.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/tensorflow/MNIST_data/t10k-images-idx3-ubyte.gz
--------------------------------------------------------------------------------
/tensorflow/MNIST_data/t10k-labels-idx1-ubyte.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/tensorflow/MNIST_data/t10k-labels-idx1-ubyte.gz
--------------------------------------------------------------------------------
/tensorflow/MNIST_data/train-images-idx3-ubyte.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/tensorflow/MNIST_data/train-images-idx3-ubyte.gz
--------------------------------------------------------------------------------
/tensorflow/MNIST_data/train-labels-idx1-ubyte.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/championballer/coding-ninjas-machine-learning/e4edda4fe0cb0a3b1703a55e30e050994de27810/tensorflow/MNIST_data/train-labels-idx1-ubyte.gz
--------------------------------------------------------------------------------