├── .gitignore ├── Algorithms ├── Clustering │ ├── K_Means-K_Medians │ │ ├── generic_kmeans_medians.py │ │ ├── k_means.py │ │ └── k_means_sklearn.py │ └── Mean_Shift │ │ ├── mean_shift.py │ │ └── mean_shift_sklearn.py ├── Deep_Learning │ ├── Deep_Neural_Networks │ │ ├── checkpoint │ │ ├── mnist_nn.ckpt.data-00000-of-00001 │ │ ├── mnist_nn.ckpt.index │ │ ├── mnist_nn.ckpt.meta │ │ └── mnist_nn.py │ └── Recurrent_Neural_Networks │ │ └── mnist_rnn.py ├── K_Nearest_Neighbours │ ├── k_nearest_neighbours.py │ └── knn_sklearn.py ├── Linear_Regression │ ├── linear_regression_sklearn.pickle │ └── linear_regression_sklearn.py ├── README.md ├── Support_Vector_Machine │ └── svm_sklearn.py └── datasets │ ├── breast-cancer-wisconsin.data.txt │ ├── mnist │ ├── t10k-images-idx3-ubyte.gz │ ├── t10k-labels-idx1-ubyte.gz │ ├── train-images-idx3-ubyte.gz │ └── train-labels-idx1-ubyte.gz │ └── titanic.xls ├── Kaggle-Competitions ├── Dog-Breed-Identification │ └── Dog-Breed-Identification.ipynb └── Dogs-vs-Cats-Redux-Kernels │ └── Dogs-vs-cats-kaggle.ipynb ├── LICENSE └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | datasets/mnist/ 3 | 4 | **/.ipynb_checkpoints/** 5 | Kaggle-Competitions/Dogs-vs-Cats-Redux-Kernels/\.ipynb_checkpoints/ 6 | 7 | \.DS_Store 8 | -------------------------------------------------------------------------------- /Algorithms/Clustering/K_Means-K_Medians/generic_kmeans_medians.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from collections import defaultdict 3 | from copy import deepcopy 4 | 5 | 6 | class RepresentativeClustering: 7 | def __init__(self, random=True, max_itr=200, tolerance=0.001): 8 | """ 9 | :param max_itr: Maximum number of iterations if convergence is not reached. default = 200 10 | :param tolerance: Percent change allowed in updated centroids and old centroids. default = 0.1% 11 | :param random: Tells whether to initialize seeds randomly or manually. default True 12 | """ 13 | self.max_itr = max_itr 14 | self.tolerance = tolerance 15 | self.random = random 16 | 17 | def clustering(self, dataset, k, type): 18 | """ 19 | Creates k clusters in a dataset 20 | :param dataset: list of data points 21 | :param k: number of clusters 22 | :param type: type of clustering algorithm to be applied (1: "k-means", 2: "k-medians") 23 | :return: set of k clusters found in the dataset 24 | """ 25 | 26 | centroids = self.initialize_cluster_representatives(k, dataset) 27 | clusters = None 28 | 29 | for i in range(self.max_itr): 30 | clusters = defaultdict(list) 31 | for data_point in dataset: 32 | # calculate distances of data point from all centroids 33 | distances = self.distance_function(centroids, data_point, type) 34 | 35 | # assign data_point to the closes centroid 36 | closest_index = distances.index(min(distances)) 37 | clusters[closest_index].append(data_point) 38 | 39 | prev_centroids = deepcopy(centroids) 40 | print(f"Iteration {i+1}: ") 41 | print_instance(centroids, clusters) 42 | # find the new centroid 43 | self.update_centroids(centroids, clusters, type) 44 | 45 | # check if convergence is reached 46 | if self.converged(prev_centroids, centroids): 47 | break 48 | 49 | return centroids, clusters 50 | 51 | def distance_function(self, centroids, data_point, type): 52 | """ 53 | Apply distance function over data point and centroids 54 | :param centroids: set of centroids 55 | :param data_point: one data point from the dataset 56 | :param type: algorithm applied (k-means or k-medians) 57 | :return: returns the distance array of data-point from respective centroids 58 | """ 59 | if type == 1: 60 | return self.euclidean_distance(data_point, centroids) 61 | if type == 2: 62 | return self.manhattan_distance(data_point, centroids) 63 | 64 | @staticmethod 65 | def update_centroids(centroids, clusters, type): 66 | """ 67 | Updates the centroids based on the clusters formed 68 | :param centroids: set of centroid 69 | :param clusters: set of clusters belonging to each centroids 70 | :param type: algorithm applied (k-means or k-medians) 71 | """ 72 | for c_indx in clusters: 73 | if type == 1: 74 | centroids[c_indx] = np.average(clusters[c_indx], axis=0) 75 | if type == 2: 76 | centroids[c_indx] = np.median(clusters[c_indx], axis=0) 77 | 78 | def converged(self, prev_centroids, centroids): 79 | """ 80 | Check if the % change of previous centroids & new centroids is greater than the tolerance 81 | :param prev_centroids: 82 | :param centroids: 83 | :return: 84 | """ 85 | converge = True 86 | for c in centroids: 87 | new_centroid = centroids[c] 88 | old_centroid = prev_centroids[c] 89 | if np.sum(np.abs((new_centroid - old_centroid) / old_centroid)) * 100 > self.tolerance: 90 | converge = False 91 | break 92 | return converge 93 | 94 | @staticmethod 95 | def euclidean_distance(data_point, centroids): 96 | return [np.linalg.norm(data_point - centroids[c]) for c in centroids] 97 | 98 | @staticmethod 99 | def manhattan_distance(data_point, centroids): 100 | return [np.sum(np.abs(data_point - centroids[c]), axis=0) for c in centroids] 101 | 102 | def initialize_cluster_representatives(self, k, dataset): 103 | """ 104 | Gets the initial cluster representatives as input 105 | :param k: Number of cluster representatives 106 | :param dataset: list of data points 107 | :return: k initial cluster representatives 108 | """ 109 | centroids = {} 110 | if not self.random: 111 | print(f"Enter {k} cluster representatives [Separated by newline, with " 112 | f"{len(dataset[0])} dimensions (, separated)]: ") 113 | for i in range(k): 114 | centroids[i] = np.array(list(map(int, input().strip().split(",")))) 115 | else: 116 | # Randomly initialized centroids from data points 117 | for i, data_index in enumerate(np.random.randint(0, len(dataset), k)): 118 | centroids[i] = deepcopy(dataset[data_index]) 119 | 120 | # Randomly initialized centroids from data space 121 | """ 122 | for j in range(k): 123 | temp = [] 124 | for i in range(len(dataset[0])): 125 | temp.append(np.random.randint(np.min(dataset[:, i]), np.max(dataset[:, i]+1))) 126 | centroids[j] = np.array(temp) 127 | """ 128 | 129 | print("Initial Centroids: ") 130 | for j in centroids: 131 | print(f'Centroid {j+1}: {centroids[j]}') 132 | print() 133 | 134 | return centroids 135 | 136 | 137 | def print_instance(centroids, clusters): 138 | print() 139 | for c in clusters: 140 | print(f'Cluster {c+1}: ') 141 | print('\tCentroid:', centroids[c]) 142 | print('\tCluster:', *clusters[c]) 143 | print("--------------------------------------") 144 | 145 | 146 | file = input("Enter input file name: ") 147 | dataset = [] 148 | 149 | try: 150 | with open(file) as f: 151 | for line in f: 152 | row = list(map(float, line.strip().split(","))) 153 | dataset.append(row) 154 | except FileNotFoundError: 155 | print("Incorrect file name") 156 | exit(0) 157 | 158 | dataset = np.array(dataset) 159 | 160 | # Make an object of RepresentativeClustering class 161 | rc = RepresentativeClustering() 162 | 163 | k = int(input("Enter number of clusters to be created: ")) 164 | 165 | choice = int(input("Select the clustering algorithm you want to use:\n 1. K-Means 2. K-Medians\n")) 166 | if 1 <= choice <= 2: 167 | centroids, clusters = rc.clustering(dataset, k, choice) 168 | 169 | print("Final Clusters found:") 170 | print_instance(centroids, clusters) 171 | 172 | else: 173 | print("Invalid choice") 174 | -------------------------------------------------------------------------------- /Algorithms/Clustering/K_Means-K_Medians/k_means.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import numpy as np 3 | import pandas as pd 4 | from matplotlib import style 5 | from collections import defaultdict 6 | from sklearn import preprocessing 7 | 8 | 9 | class K_Means: 10 | def __init__(self, k=2, tol=0.001, max_iter=300): 11 | self.k = k 12 | self.tol = tol 13 | self.max_iter = max_iter 14 | 15 | def fit(self, data): 16 | self.centroids = {} 17 | 18 | # Take first k points in data as centroids 19 | # can take random points as well 20 | for i in range(self.k): 21 | self.centroids[i] = data[i] 22 | 23 | for i in range(self.max_iter): 24 | self.classifications = defaultdict(list) 25 | 26 | # Find to which centroid is the feature closest to and append it in that centroid's classification 27 | for features in data: 28 | distances = [np.linalg.norm(features - self.centroids[c]) for c in self.centroids] 29 | classification = distances.index(min(distances)) 30 | self.classifications[classification].append(features) 31 | 32 | prev_centroids = dict(self.centroids) 33 | 34 | # Calculate the average centroid point for each classification 35 | # by averaging the features in that classification 36 | for classification in self.classifications: 37 | self.centroids[classification] = np.average(self.classifications[classification], axis=0) 38 | 39 | # If the desired tolerance is achieved i.e. the centroids are not changing values 40 | # more than tolerance % then simply break the loop 41 | optimized = True 42 | for c in self.centroids: 43 | original_centroid = prev_centroids[c] 44 | new_centroid = self.centroids[c] 45 | if np.sum((new_centroid - original_centroid)/original_centroid) * 100.0 > self.tol: 46 | optimized = False 47 | if optimized: 48 | break 49 | 50 | # used for predicting in which classification does the new data point or feature lie 51 | def predict(self, data): 52 | distances = [np.linalg.norm(data - self.centroids[c]) for c in self.centroids] 53 | classification = distances.index(min(distances)) 54 | 55 | return classification 56 | 57 | 58 | df = pd.read_excel('../../datasets/titanic.xls') 59 | 60 | df.drop(['body', 'name'], 1, inplace=True) 61 | df.fillna(0, inplace=True) 62 | 63 | 64 | def convert_non_numeric_data(df): 65 | columns = df.columns.values 66 | for col in columns: 67 | text_digits = {} 68 | 69 | def convert_to_int(val): 70 | return text_digits[val] 71 | 72 | if df[col].dtype != np.int64 or df[col].dtype != np.float64: 73 | col_contents = df[col].values.tolist() 74 | unique_elements = set(col_contents) 75 | 76 | x = 0 77 | for unique in unique_elements: 78 | if unique not in text_digits: 79 | text_digits[unique] = x 80 | x += 1 81 | df[col] = list(map(convert_to_int, df[col])) 82 | 83 | return df 84 | 85 | df = convert_non_numeric_data(df) 86 | 87 | X = np.array(df.drop(['survived'], 1)).astype(float) 88 | X = preprocessing.scale(X) 89 | y = np.array(df['survived']) 90 | 91 | clf = K_Means() 92 | clf.fit(X) 93 | 94 | correct = 0 95 | for i in range(len(X)): 96 | feature = np.array(X[i].astype(float)) 97 | prediction = clf.predict(feature) 98 | if prediction == y[i]: 99 | correct += 1 100 | 101 | print(correct/len(X)) 102 | 103 | ''' 104 | style.use('ggplot') 105 | 106 | X = np.array([[1, 2], 107 | [1.5, 1.8], 108 | [5, 8], 109 | [8, 8], 110 | [1, 0.6], 111 | [9, 11]]) 112 | 113 | colors = ['r', 'g', 'b']*10 114 | 115 | clf = K_Means() 116 | clf.fit(X) 117 | 118 | for c in clf.centroids: 119 | plt.scatter(clf.centroids[c][0], clf.centroids[c][1], marker='o', color='k', s=150, linewidths=5) 120 | 121 | for classification in clf.classifications: 122 | color = colors[classification] 123 | for features in clf.classifications[classification]: 124 | plt.scatter(features[0], features[1], marker='x', s=150, color=color, linewidths=5) 125 | 126 | unknowns = np.array([[1, 3], 127 | [3, 5], 128 | [3, 7], 129 | [-3, -1], 130 | [0, 0], 131 | [8, 9]]) 132 | 133 | for u in unknowns: 134 | classification = clf.predict(u) 135 | plt.scatter(u[0], u[1], marker='*', color=colors[classification], s=150, linewidths=5) 136 | plt.show() 137 | ''' -------------------------------------------------------------------------------- /Algorithms/Clustering/K_Means-K_Medians/k_means_sklearn.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | from matplotlib import style 3 | import numpy as np 4 | from sklearn.cluster import KMeans 5 | from sklearn import preprocessing 6 | import pandas as pd 7 | 8 | ''' 9 | Titanic Dataset column details:- 10 | 11 | Pclass - Passanger Class (1 = 1st class; 2 = 2nd class; 3 = 3rd class) 12 | survival - Survived (0 = No; 1 = Yes) 13 | name - Name of the passanger 14 | sex - Gender of the passanger 15 | sibsp - Number of siblings/spouses on board 16 | parch - Number of parents/children on board 17 | ticket - ticket number 18 | fare - Passanger fare (british pounds) 19 | cabin - Cabin 20 | embarked - Port of embarkation (C = Cherbourg; Q = Queenstow; S = Southhampton) 21 | boat - Lifeboat number 22 | body - Body Identification Number 23 | home.dest - Home or Destination 24 | ''' 25 | 26 | df = pd.read_excel('../../datasets/titanic.xls') 27 | 28 | df.drop(['body', 'name'], 1, inplace=True) 29 | df.fillna(0, inplace=True) 30 | 31 | 32 | def convert_non_numeric_data(df): 33 | columns = df.columns.values 34 | for col in columns: 35 | text_digits = {} 36 | 37 | def convert_to_int(val): 38 | return text_digits[val] 39 | 40 | if df[col].dtype != np.int64 or df[col].dtype != np.float64: 41 | col_contents = df[col].values.tolist() 42 | unique_elements = set(col_contents) 43 | 44 | x = 0 45 | for unique in unique_elements: 46 | if unique not in text_digits: 47 | text_digits[unique] = x 48 | x += 1 49 | df[col] = list(map(convert_to_int, df[col])) 50 | 51 | return df 52 | 53 | df = convert_non_numeric_data(df) 54 | 55 | X = np.array(df.drop(['survived'], 1)).astype(float) 56 | X = preprocessing.scale(X) 57 | y = np.array(df['survived']) 58 | 59 | clf = KMeans(n_clusters=2) 60 | clf.fit(X) 61 | labels = clf.labels_ 62 | 63 | correct = 0 64 | for i in range(len(y)): 65 | if y[i] == labels[i]: 66 | correct += 1 67 | 68 | print('Accuracy of classifying dead or alive:', correct/len(y)) 69 | 70 | ''' 71 | style.use('ggplot') 72 | 73 | X = np.array([[1, 2], 74 | [1.5, 1.8], 75 | [5, 8], 76 | [8, 8], 77 | [1, 0.6], 78 | [9, 11]]) 79 | 80 | # plt.scatter(X[:, 0], X[:, 1], s=150) 81 | # plt.show() 82 | 83 | clf = KMeans(n_clusters=2) 84 | clf.fit(X) 85 | centroids = clf.cluster_centers_ 86 | labels = clf.labels_ 87 | 88 | colors = ['r.', 'g.', 'b.', 'y.']*10 89 | 90 | for i in range(len(X)): 91 | plt.plot(X[i][0], X[i][1], colors[labels[i]], markersize=25) 92 | plt.scatter(centroids[:, 0], centroids[:, 1], marker='x', s=150, linewidths=100) 93 | plt.show() 94 | ''' -------------------------------------------------------------------------------- /Algorithms/Clustering/Mean_Shift/mean_shift.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import numpy as np 3 | from matplotlib import style 4 | from sklearn.datasets.samples_generator import make_blobs 5 | from collections import defaultdict 6 | style.use('ggplot') 7 | 8 | # n_samples = Number of data points 9 | # centers = Number of groups or classifications 10 | # n_features = Number of features in each sample 11 | X, y = make_blobs(n_samples=30, centers=4, n_features=2) 12 | 13 | colors = ['r', 'g', 'b', 'y', 'c']*10 14 | 15 | 16 | class Mean_Shift: 17 | # TODO: Bandwith should be calculated dynamically rather than hardcoded 18 | def __init__(self, bandwidth=4): 19 | self.bandwidth = bandwidth 20 | 21 | def fit(self, data): 22 | centroids = {} 23 | 24 | for i in range(len(data)): 25 | centroids[i] = data[i] 26 | 27 | while True: 28 | new_centroids = [] 29 | for i in centroids: 30 | in_bandwidth = [] 31 | centroid = centroids[i] 32 | 33 | for feature in data: 34 | if np.linalg.norm(feature - centroid) < self.bandwidth: 35 | in_bandwidth.append(feature) 36 | 37 | new_centroid = np.average(in_bandwidth, axis=0) 38 | new_centroids.append(tuple(new_centroid)) 39 | 40 | uniques = sorted(list(set(new_centroids))) 41 | 42 | prev_centroids = dict(centroids) 43 | 44 | centroids = {} 45 | for i in range(len(uniques)): 46 | centroids[i] = np.array(uniques[i]) 47 | 48 | optimized = True 49 | 50 | for i in centroids: 51 | if not np.array_equal(centroids[i], prev_centroids[i]): 52 | optimized = False 53 | break 54 | 55 | if optimized: 56 | break 57 | 58 | self.centroids = centroids 59 | 60 | # Find to which centroid is the feature closest to and append it in that centroid's classification 61 | self.classifications = defaultdict(list) 62 | for feature in data: 63 | distances = [np.linalg.norm(feature - self.centroids[c]) for c in self.centroids] 64 | classification = distances.index(min(distances)) 65 | self.classifications[classification].append(feature) 66 | 67 | def predict(self, data): 68 | distances = [np.linalg.norm(data - self.centroids[c]) for c in self.centroids] 69 | classification = distances.index(min(distances)) 70 | 71 | return classification 72 | 73 | 74 | clf = Mean_Shift() 75 | clf.fit(X) 76 | 77 | centroids = clf.centroids 78 | 79 | for classification in clf.classifications: 80 | color = colors[classification] 81 | for features in clf.classifications[classification]: 82 | plt.scatter(features[0], features[1], marker='o', color=color, linewidths=5) 83 | 84 | for i in centroids: 85 | plt.scatter(centroids[i][0], centroids[i][1], marker='x', color='k', s=150) 86 | 87 | plt.show() -------------------------------------------------------------------------------- /Algorithms/Clustering/Mean_Shift/mean_shift_sklearn.py: -------------------------------------------------------------------------------- 1 | from sklearn.cluster import MeanShift 2 | from sklearn.datasets.samples_generator import make_blobs 3 | import matplotlib.pyplot as plt 4 | from mpl_toolkits.mplot3d import Axes3D 5 | from matplotlib import style 6 | 7 | style.use('ggplot') 8 | 9 | 10 | # Create random data points whose centers are the following 11 | centers = [[20, 0, 0], [0, 20, 0], [0, 0, 20], [0, 0, 0]] 12 | X, _ = make_blobs(n_samples=200, centers=centers, cluster_std=2) 13 | 14 | # Fit the data into MeanShift classifier with search bandwidth = 10 15 | clf = MeanShift(bandwidth=10) 16 | clf.fit(X) 17 | 18 | # Get the labels of each data point 19 | # and cluster centers of the number of clusters formed 20 | labels = clf.labels_ 21 | cluster_centers = clf.cluster_centers_ 22 | print(cluster_centers) 23 | n_clusters = len(cluster_centers) 24 | print('Number of clusters found:', n_clusters) 25 | 26 | # Plot the data points with their clusters and centers on a 3d graph 27 | colors = 10*['r', 'g', 'b', 'y', 'c'] 28 | fig = plt.figure() 29 | ax = fig.add_subplot(111, projection='3d') 30 | 31 | for i in range(len(X)): 32 | ax.scatter(X[i][0], X[i][1], X[i][2], c=colors[labels[i]], marker='o') 33 | 34 | ax.scatter(cluster_centers[:, 0], cluster_centers[:, 1], cluster_centers[:, 2], 35 | marker='x', s=150, linewidth=5, zorder=10, color='k') 36 | 37 | plt.show() 38 | -------------------------------------------------------------------------------- /Algorithms/Deep_Learning/Deep_Neural_Networks/checkpoint: -------------------------------------------------------------------------------- 1 | model_checkpoint_path: "mnist_nn.ckpt" 2 | all_model_checkpoint_paths: "mnist_nn.ckpt" 3 | -------------------------------------------------------------------------------- /Algorithms/Deep_Learning/Deep_Neural_Networks/mnist_nn.ckpt.data-00000-of-00001: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/anubhavshrimal/Machine-Learning/2a51a6bbba2cdd34d88c5bb3c50314f5409d9bc9/Algorithms/Deep_Learning/Deep_Neural_Networks/mnist_nn.ckpt.data-00000-of-00001 -------------------------------------------------------------------------------- /Algorithms/Deep_Learning/Deep_Neural_Networks/mnist_nn.ckpt.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/anubhavshrimal/Machine-Learning/2a51a6bbba2cdd34d88c5bb3c50314f5409d9bc9/Algorithms/Deep_Learning/Deep_Neural_Networks/mnist_nn.ckpt.index -------------------------------------------------------------------------------- /Algorithms/Deep_Learning/Deep_Neural_Networks/mnist_nn.ckpt.meta: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/anubhavshrimal/Machine-Learning/2a51a6bbba2cdd34d88c5bb3c50314f5409d9bc9/Algorithms/Deep_Learning/Deep_Neural_Networks/mnist_nn.ckpt.meta -------------------------------------------------------------------------------- /Algorithms/Deep_Learning/Deep_Neural_Networks/mnist_nn.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | from tensorflow.examples.tutorials.mnist import input_data 3 | import os 4 | 5 | # one_hot gives labels as: 6 | # 0 = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0] 7 | # 4 = [0, 0, 0, 0, 1, 0, 0, 0, 0, 0] and so on 8 | 9 | # 28x28 = 784 pixel images of numbers 10 | mnist = input_data.read_data_sets('../../datasets/mnist/', one_hot=True) 11 | 12 | # Number of nodes in each hidden layer 13 | nodes_hl1 = 500 14 | nodes_hl2 = 500 15 | nodes_hl3 = 500 16 | # Number of output nodes 17 | n_classes = 10 18 | 19 | # Number of images fed into the Neural Network at a time 20 | batch_size = 100 21 | 22 | # shape of the input data and output labels 23 | # shape parameter is optional 24 | x = tf.placeholder('float', [None, 784]) 25 | y = tf.placeholder('float') 26 | 27 | 28 | def nn_model(data): 29 | hiddent_layer_1 = { 30 | 'weights': tf.Variable(tf.truncated_normal([784, nodes_hl1], stddev=0.1)), 31 | 'biases': tf.Variable(tf.constant(0.1, shape=[nodes_hl1])) 32 | } 33 | 34 | hiddent_layer_2 = { 35 | 'weights': tf.Variable(tf.truncated_normal([nodes_hl1, nodes_hl2], stddev=0.1)), 36 | 'biases': tf.Variable(tf.constant(0.1, shape=[nodes_hl2])) 37 | } 38 | 39 | hiddent_layer_3 = { 40 | 'weights': tf.Variable(tf.truncated_normal([nodes_hl2, nodes_hl3], stddev=0.1)), 41 | 'biases': tf.Variable(tf.constant(0.1, shape=[nodes_hl3])) 42 | } 43 | 44 | output_layer = { 45 | 'weights': tf.Variable(tf.truncated_normal([nodes_hl3, n_classes], stddev=0.1)), 46 | 'biases': tf.Variable(tf.constant(0.1, shape=[n_classes])) 47 | } 48 | 49 | # output_for_layer = (input * weights) + biases 50 | # pass the layer_outputs to the activation function; we used relu in this case 51 | l1 = tf.add(tf.matmul(data, hiddent_layer_1['weights']), hiddent_layer_1['biases']) 52 | l1 = tf.nn.relu(l1) 53 | 54 | l2 = tf.add(tf.matmul(l1, hiddent_layer_2['weights']), hiddent_layer_2['biases']) 55 | l2 = tf.nn.relu(l2) 56 | 57 | l3 = tf.add(tf.matmul(l2, hiddent_layer_3['weights']), hiddent_layer_3['biases']) 58 | l3 = tf.nn.relu(l3) 59 | 60 | output = tf.add(tf.matmul(l3, output_layer['weights']), output_layer['biases']) 61 | 62 | return output 63 | 64 | 65 | def train_nn(x, y): 66 | prediction = nn_model(x) 67 | 68 | # Find the cost or error in prediction made and labels 69 | cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=prediction, labels=y)) 70 | 71 | # Reduce the cost using an optimizer 72 | # default learning_rate = 0.001 73 | optimizer = tf.train.AdamOptimizer().minimize(cost) 74 | 75 | # epochs = number of cycles = forward propagation + back propagation 76 | epochs = 10 77 | saver = tf.train.Saver() 78 | 79 | with tf.Session() as sess: 80 | sess.run(tf.global_variables_initializer()) 81 | if os.path.isfile('./checkpoint'): 82 | print('checkpoint restored') 83 | saver.restore(sess, "mnist_nn.ckpt") 84 | 85 | # Train the model 86 | for epoch in range(epochs): 87 | epoch_loss = 0 88 | for _ in range(int(mnist.train.num_examples/batch_size)): 89 | # Get data and labels in batch_size 90 | x_, y_ = mnist.train.next_batch(batch_size) 91 | # Run the optimizer to minimize the cost = c 92 | _, c = sess.run([optimizer, cost], feed_dict={x: x_, y: y_}) 93 | epoch_loss += c 94 | print('Epoch', (epoch + 1), 'completed out of:', epochs, 'loss:', epoch_loss) 95 | 96 | # Save the model in checkpoint after each epoch 97 | save_path = saver.save(sess, "mnist_nn.ckpt") 98 | print("Model saved in file: %s" % save_path) 99 | 100 | # Test the model accuracy 101 | correct = tf.equal(tf.arg_max(prediction, 1), tf.arg_max(y, 1)) 102 | accuracy = tf.reduce_mean(tf.cast(correct, 'float')) 103 | print('Accuracy:', accuracy.eval({x: mnist.test.images, y: mnist.test.labels})) 104 | 105 | train_nn(x, y) 106 | -------------------------------------------------------------------------------- /Algorithms/Deep_Learning/Recurrent_Neural_Networks/mnist_rnn.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | from tensorflow.examples.tutorials.mnist import input_data 3 | from tensorflow.contrib import rnn 4 | # one_hot gives labels as: 5 | # 0 = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0] 6 | # 4 = [0, 0, 0, 0, 1, 0, 0, 0, 0, 0] and so on 7 | 8 | # 28x28 = 784 pixel images of numbers 9 | mnist = input_data.read_data_sets('../../datasets/mnist/', one_hot=True) 10 | 11 | # epochs = number of cycles = forward propagation + back propagation 12 | epochs = 3 13 | # Number of output nodes 14 | n_classes = 10 15 | # Number of images fed into the Neural Network at a time 16 | batch_size = 128 17 | 18 | # Set size of chunks and number of chunks to pass in the RNN 19 | chunk_size = 28 20 | n_chunks = 28 21 | 22 | rnn_size = 128 23 | 24 | # shape of the input data and output labels 25 | # shape parameter is optional 26 | x = tf.placeholder('float', [None, n_chunks, chunk_size]) 27 | y = tf.placeholder('float') 28 | 29 | 30 | def rnn_model(x): 31 | layer = { 32 | 'weights': tf.Variable(tf.random_normal([rnn_size, n_classes])), 33 | 'biases': tf.Variable(tf.random_normal([n_classes])) 34 | } 35 | 36 | x = tf.transpose(x, [1, 0, 2]) 37 | x = tf.reshape(x, [-1, chunk_size]) 38 | x = tf.split(x, n_chunks, 0) 39 | 40 | lstm_cell = rnn.BasicLSTMCell(rnn_size) 41 | outputs, states = rnn.static_rnn(lstm_cell, x, dtype=tf.float32) 42 | 43 | output = tf.add(tf.matmul(outputs[-1], layer['weights']), layer['biases']) 44 | 45 | return output 46 | 47 | 48 | def train_nn(x, y): 49 | prediction = rnn_model(x) 50 | 51 | # Find the cost or error in prediction made and labels 52 | cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=prediction, labels=y)) 53 | 54 | # Reduce the cost using an optimizer 55 | # default learning_rate = 0.001 56 | optimizer = tf.train.AdamOptimizer().minimize(cost) 57 | 58 | with tf.Session() as sess: 59 | sess.run(tf.global_variables_initializer()) 60 | 61 | # Train the model 62 | for epoch in range(epochs): 63 | epoch_loss = 0 64 | for _ in range(int(mnist.train.num_examples/batch_size)): 65 | # Get data and labels in batch_size 66 | x_, y_ = mnist.train.next_batch(batch_size) 67 | 68 | x_ = x_.reshape((batch_size, n_chunks, chunk_size)) 69 | 70 | # Run the optimizer to minimize the cost = c 71 | _, c = sess.run([optimizer, cost], feed_dict={x: x_, y: y_}) 72 | epoch_loss += c 73 | print('Epoch', (epoch + 1), 'completed out of:', epochs, 'loss:', epoch_loss) 74 | 75 | # Test the model accuracy 76 | correct = tf.equal(tf.arg_max(prediction, 1), tf.arg_max(y, 1)) 77 | accuracy = tf.reduce_mean(tf.cast(correct, 'float')) 78 | print('Accuracy:', accuracy.eval({x: mnist.test.images.reshape((-1, n_chunks, chunk_size)), 79 | y: mnist.test.labels})) 80 | 81 | train_nn(x, y) 82 | -------------------------------------------------------------------------------- /Algorithms/K_Nearest_Neighbours/k_nearest_neighbours.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | # import matplotlib.pyplot as plt 3 | # from matplotlib import style 4 | from collections import Counter 5 | import warnings 6 | import random 7 | import pandas as pd 8 | 9 | 10 | def k_nearest_neighbors(data, predict, k=5): 11 | # If k is greater than total number of points in the dataset 12 | if k > sum(len(v) for v in data.values()): 13 | warnings.warn('K is set to a value more than total data points!') 14 | distances = [] 15 | 16 | for label in data: 17 | for features in data[label]: 18 | # Calculate the eucledian distance between all the features and prediction points 19 | # euclidean_distance = np.sqrt(np.sum((np.array(features) - np.array(predict))**2)) 20 | euclidean_distance = np.linalg.norm(np.array(features) - np.array(predict)) 21 | 22 | # Append the distance with the label of the features 23 | distances.append([euclidean_distance, label]) 24 | 25 | votes = [i[1] for i in sorted(distances)[:k]] 26 | # Find the most common label from the k closest points 27 | vote_results = Counter(votes).most_common(1)[0][0] 28 | 29 | # Find the confidence of prediction 30 | confidence = Counter(votes).most_common(1)[0][1] / k 31 | return vote_results, confidence 32 | 33 | 34 | # Load the data set 35 | df = pd.read_csv('breast-cancer-wisconsin.data.txt') 36 | 37 | # Replace the unassigned values with -99999 and drop the id column 38 | df.replace('?', -99999, inplace=True) 39 | df.drop(['id'], 1, inplace=True) 40 | 41 | # Extract the data without the table headers in float format 42 | full_data = df.astype(float).values.tolist() 43 | random.shuffle(full_data) 44 | 45 | train_set = {2: [], 4: []} 46 | test_set = {2: [], 4: []} 47 | 48 | # Divide the data set into 80% training and 20% testing data 49 | test_size = 0.2 50 | train_data = full_data[:-int(test_size * len(full_data))] 51 | test_data = full_data[-int(test_size * len(full_data)):] 52 | 53 | # Append the training data into the dict train_set without the last column of labels 54 | for i in train_data: 55 | train_set[i[-1]].append(i[:-1]) 56 | 57 | # Append the testing data into the dict test_set without the last column of labels 58 | for i in test_data: 59 | test_set[i[-1]].append(i[:-1]) 60 | 61 | correct = 0 62 | total = 0 63 | 64 | for label in test_set: 65 | for predict in test_set[label]: 66 | vote, confidence = k_nearest_neighbors(train_set, predict, k=5) 67 | 68 | # If prediction is correct 69 | if vote == label: 70 | correct += 1 71 | # else print the confidence of the wrong prediction 72 | else: 73 | print(confidence) 74 | 75 | total += 1 76 | 77 | print('Accuracy:', correct/total) 78 | 79 | """ 80 | # style.use('fivethirtyeight') 81 | 82 | # {Label: [[data-point], [data-point], ...]} 83 | dataset = {'k': [[1, 2], [2, 3], [3, 1]], 'r': [[6, 5], [7, 7], [8, 6]]} 84 | new_features = [5, 7] 85 | 86 | prediction, confidence = k_nearest_neighbors(dataset, new_features, k=3) 87 | print(prediction, confidence) 88 | 89 | # Visualising dataset and the feature which we want to predict 90 | [[plt.scatter(point[0], point[1], s=100, color=label) for point in dataset[label]] for label in dataset] 91 | plt.scatter(new_features[0], new_features[1], color=prediction) 92 | plt.show() 93 | """ -------------------------------------------------------------------------------- /Algorithms/K_Nearest_Neighbours/knn_sklearn.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from sklearn import neighbors 3 | from sklearn.model_selection import train_test_split 4 | import pandas as pd 5 | 6 | # Load the data set 7 | df = pd.read_csv('../datasets/breast-cancer-wisconsin.data.txt') 8 | 9 | # Replace the unassigned values with -99999 and drop the id column 10 | df.replace('?', -99999, inplace=True) 11 | df.drop(['id'], 1, inplace=True) 12 | 13 | # Get the features and labels 14 | X = np.array(df.drop(['class'], 1)) 15 | y = np.array(df['class']) 16 | 17 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2) 18 | 19 | # Default k = 5 20 | clf = neighbors.KNeighborsClassifier() 21 | # train the model on training data 22 | clf.fit(X_train, y_train) 23 | 24 | # Get the accuracy on the testing data 25 | accuracy = clf.score(X_test, y_test) 26 | print(accuracy) 27 | 28 | # Make predictions on some random values 29 | example_dataset = np.array([[8,3,3,1,2,3,4,4,2], [3,3,2,3,1,3,5,4,1], [5,3,3,1,2,2,4,3,1]]) 30 | # example_dataset = example_dataset.reshape(len(example_dataset), -1) 31 | prediction = clf.predict(example_dataset) 32 | print(prediction) 33 | -------------------------------------------------------------------------------- /Algorithms/Linear_Regression/linear_regression_sklearn.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/anubhavshrimal/Machine-Learning/2a51a6bbba2cdd34d88c5bb3c50314f5409d9bc9/Algorithms/Linear_Regression/linear_regression_sklearn.pickle -------------------------------------------------------------------------------- /Algorithms/Linear_Regression/linear_regression_sklearn.py: -------------------------------------------------------------------------------- 1 | import quandl 2 | import numpy as np 3 | from sklearn import preprocessing 4 | from sklearn.model_selection import train_test_split 5 | from sklearn.linear_model import LinearRegression 6 | import pickle 7 | import os 8 | 9 | """ 10 | # Needed if more than 50 request/day 11 | quandl.ApiConfig.api_key = "Quandl_API_KEY" 12 | """ 13 | # Get the data set from quandl 14 | df = quandl.get('WIKI/GOOGL') 15 | 16 | df = df[['Adj. Open', 'Adj. High', 'Adj. Low', 'Adj. Close', 'Adj. Volume']] 17 | 18 | # Refine the data set to our needs 19 | df['HL_Percent'] = ((df['Adj. High'] - df['Adj. Close']) / df['Adj. Close']) * 100 20 | df['Percent_Change'] = ((df['Adj. Close'] - df['Adj. Open']) / df['Adj. Open']) * 100 21 | 22 | # Extract only the relevant features 23 | df = df[['Adj. Close', 'HL_Percent', 'Percent_Change', 'Adj. Volume']] 24 | 25 | # prediction column 26 | forecast_col = 'Adj. Close' 27 | 28 | # Replace null values in the dataset with a very small value so it has the least impact 29 | df.fillna(-99999, inplace=True) 30 | 31 | # Number of days in future that we want to predict the price for 32 | future_days = 10 33 | 34 | # define the label as Adj. Close future_days ahead in time 35 | # shift Adj. Close column future_days rows up i.e. future prediction 36 | df['label'] = df[forecast_col].shift(-future_days) 37 | 38 | # Get the features array in X 39 | X = np.array(df.drop(['label'], 1)) 40 | 41 | # Regularize the data set across all the features for better training 42 | X = preprocessing.scale(X) 43 | 44 | # Extract the last future_days rows for prediction as they don't have the values due to the shift 45 | predict_X = X[-future_days:] 46 | 47 | # Get the data for training and testing 48 | X = X[:-future_days] 49 | 50 | # Drop the last future_days rows as there is no label for them because we shifted the column up 51 | df.dropna(inplace=True) 52 | 53 | # Get the labels in y 54 | y = np.array(df['label']) 55 | 56 | # Shuffle the data and get Training and Testing data 57 | # Testing data = 20% of total data 58 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2) 59 | 60 | # Load Classifier of linear regression 61 | pickle_file_name = 'linear_regression_sklearn.pickle' 62 | 63 | # If pre-trained pickle exists load it 64 | if os.path.isfile('./' + pickle_file_name): 65 | # Load the classifier from the pre-trained pickle 66 | pickle_file = open(pickle_file_name, 'rb') 67 | clf = pickle.load(pickle_file) 68 | # Otherwise train the classifier and save it in a pickle 69 | else: 70 | # n_jobs = -1 means training the model parallely, as many jobs as possible 71 | clf = LinearRegression(n_jobs=-1) 72 | 73 | # train the model on training data 74 | clf.fit(X_train, y_train) 75 | 76 | # save the pickle 77 | with open(pickle_file_name, 'wb') as f: 78 | pickle.dump(clf, f) 79 | 80 | 81 | # Test the accuracy of the data on the testing data set 82 | # How well is the model predicting the future prices 83 | accuracy = clf.score(X_test, y_test) 84 | print('Accuracy on test data:', accuracy) 85 | 86 | predictions = clf.predict(predict_X) 87 | print('Predictions for next 10 days: ') 88 | print(predictions) 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | -------------------------------------------------------------------------------- /Algorithms/README.md: -------------------------------------------------------------------------------- 1 | * [Clustering Algorithms](Clustering/) 2 | * [K Means](Clustering/K_Means) 3 | * [Implemented](Clustering/K_Means/k_means.py) 4 | * [Scikit Learn Implementation](Clustering/K_Means/k_means_sklearn.py) 5 | 6 | * [Mean Shift](Clustering/Mean_Shift) 7 | * [Implemented](Clustering/Mean_Shift/mean_shift.py) 8 | * [Scikit Learn Implementation](Clustering/Mean_Shift/mean_shift_sklearn.py) 9 | 10 | * [Deep Learning](Deep_Learning/) 11 | * [Deep Neural Networks](Deep_Learning/Deep_Neural_Networks) 12 | * [Recurrent Neural Networks](Deep_Learning/Recurrent_Neural_Networks) 13 | 14 | * [K Nearest Neighbours](K_Nearest_Neighbours/) 15 | * [Implemented](K_Nearest_Neighbours/k_nearest_neighbours.py) 16 | * [Scikit Learn Implementation](K_Nearest_Neighbours/knn_sklearn.py) 17 | 18 | * [Linear Regression](Linear_Regression/) 19 | * [Scikit Learn Implementation](Linear_Regression/linear_regression_sklearn.py) 20 | 21 | * [Support Vector Machine](Support_Vector_Machine/) 22 | * [Scikit Learn Implementation](Support_Vector_Machine/svm_sklearn.py) -------------------------------------------------------------------------------- /Algorithms/Support_Vector_Machine/svm_sklearn.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from sklearn import svm 3 | from sklearn.model_selection import train_test_split 4 | import pandas as pd 5 | 6 | # Load the data set 7 | df = pd.read_csv('../datasets/breast-cancer-wisconsin.data.txt') 8 | 9 | # Replace the unassigned values with -99999 and drop the id column 10 | df.replace('?', -99999, inplace=True) 11 | df.drop(['id'], 1, inplace=True) 12 | 13 | # Get the features and labels 14 | X = np.array(df.drop(['class'], 1)) 15 | y = np.array(df['class']) 16 | 17 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2) 18 | 19 | clf = svm.SVC() 20 | # train the model on training data 21 | clf.fit(X_train, y_train) 22 | 23 | # Get the accuracy on the testing data 24 | accuracy = clf.score(X_test, y_test) 25 | print(accuracy) 26 | 27 | # Make predictions on some random values 28 | example_dataset = np.array([[8,3,3,1,2,3,4,4,2], [3,3,2,3,1,3,5,4,1], [5,3,3,1,2,2,4,3,1]]) 29 | # example_dataset = example_dataset.reshape(len(example_dataset), -1) 30 | prediction = clf.predict(example_dataset) 31 | print(prediction) 32 | -------------------------------------------------------------------------------- /Algorithms/datasets/breast-cancer-wisconsin.data.txt: -------------------------------------------------------------------------------- 1 | id,clump_thickness,unif_cell_size,unif_cell_shape,marg_adhesion,single_epith_cell_size,bare_nuclei,bland_chrom,norm_nucleoli,mitoses,class 2 | 1000025,5,1,1,1,2,1,3,1,1,2 3 | 1002945,5,4,4,5,7,10,3,2,1,2 4 | 1015425,3,1,1,1,2,2,3,1,1,2 5 | 1016277,6,8,8,1,3,4,3,7,1,2 6 | 1017023,4,1,1,3,2,1,3,1,1,2 7 | 1017122,8,10,10,8,7,10,9,7,1,4 8 | 1018099,1,1,1,1,2,10,3,1,1,2 9 | 1018561,2,1,2,1,2,1,3,1,1,2 10 | 1033078,2,1,1,1,2,1,1,1,5,2 11 | 1033078,4,2,1,1,2,1,2,1,1,2 12 | 1035283,1,1,1,1,1,1,3,1,1,2 13 | 1036172,2,1,1,1,2,1,2,1,1,2 14 | 1041801,5,3,3,3,2,3,4,4,1,4 15 | 1043999,1,1,1,1,2,3,3,1,1,2 16 | 1044572,8,7,5,10,7,9,5,5,4,4 17 | 1047630,7,4,6,4,6,1,4,3,1,4 18 | 1048672,4,1,1,1,2,1,2,1,1,2 19 | 1049815,4,1,1,1,2,1,3,1,1,2 20 | 1050670,10,7,7,6,4,10,4,1,2,4 21 | 1050718,6,1,1,1,2,1,3,1,1,2 22 | 1054590,7,3,2,10,5,10,5,4,4,4 23 | 1054593,10,5,5,3,6,7,7,10,1,4 24 | 1056784,3,1,1,1,2,1,2,1,1,2 25 | 1057013,8,4,5,1,2,?,7,3,1,4 26 | 1059552,1,1,1,1,2,1,3,1,1,2 27 | 1065726,5,2,3,4,2,7,3,6,1,4 28 | 1066373,3,2,1,1,1,1,2,1,1,2 29 | 1066979,5,1,1,1,2,1,2,1,1,2 30 | 1067444,2,1,1,1,2,1,2,1,1,2 31 | 1070935,1,1,3,1,2,1,1,1,1,2 32 | 1070935,3,1,1,1,1,1,2,1,1,2 33 | 1071760,2,1,1,1,2,1,3,1,1,2 34 | 1072179,10,7,7,3,8,5,7,4,3,4 35 | 1074610,2,1,1,2,2,1,3,1,1,2 36 | 1075123,3,1,2,1,2,1,2,1,1,2 37 | 1079304,2,1,1,1,2,1,2,1,1,2 38 | 1080185,10,10,10,8,6,1,8,9,1,4 39 | 1081791,6,2,1,1,1,1,7,1,1,2 40 | 1084584,5,4,4,9,2,10,5,6,1,4 41 | 1091262,2,5,3,3,6,7,7,5,1,4 42 | 1096800,6,6,6,9,6,?,7,8,1,2 43 | 1099510,10,4,3,1,3,3,6,5,2,4 44 | 1100524,6,10,10,2,8,10,7,3,3,4 45 | 1102573,5,6,5,6,10,1,3,1,1,4 46 | 1103608,10,10,10,4,8,1,8,10,1,4 47 | 1103722,1,1,1,1,2,1,2,1,2,2 48 | 1105257,3,7,7,4,4,9,4,8,1,4 49 | 1105524,1,1,1,1,2,1,2,1,1,2 50 | 1106095,4,1,1,3,2,1,3,1,1,2 51 | 1106829,7,8,7,2,4,8,3,8,2,4 52 | 1108370,9,5,8,1,2,3,2,1,5,4 53 | 1108449,5,3,3,4,2,4,3,4,1,4 54 | 1110102,10,3,6,2,3,5,4,10,2,4 55 | 1110503,5,5,5,8,10,8,7,3,7,4 56 | 1110524,10,5,5,6,8,8,7,1,1,4 57 | 1111249,10,6,6,3,4,5,3,6,1,4 58 | 1112209,8,10,10,1,3,6,3,9,1,4 59 | 1113038,8,2,4,1,5,1,5,4,4,4 60 | 1113483,5,2,3,1,6,10,5,1,1,4 61 | 1113906,9,5,5,2,2,2,5,1,1,4 62 | 1115282,5,3,5,5,3,3,4,10,1,4 63 | 1115293,1,1,1,1,2,2,2,1,1,2 64 | 1116116,9,10,10,1,10,8,3,3,1,4 65 | 1116132,6,3,4,1,5,2,3,9,1,4 66 | 1116192,1,1,1,1,2,1,2,1,1,2 67 | 1116998,10,4,2,1,3,2,4,3,10,4 68 | 1117152,4,1,1,1,2,1,3,1,1,2 69 | 1118039,5,3,4,1,8,10,4,9,1,4 70 | 1120559,8,3,8,3,4,9,8,9,8,4 71 | 1121732,1,1,1,1,2,1,3,2,1,2 72 | 1121919,5,1,3,1,2,1,2,1,1,2 73 | 1123061,6,10,2,8,10,2,7,8,10,4 74 | 1124651,1,3,3,2,2,1,7,2,1,2 75 | 1125035,9,4,5,10,6,10,4,8,1,4 76 | 1126417,10,6,4,1,3,4,3,2,3,4 77 | 1131294,1,1,2,1,2,2,4,2,1,2 78 | 1132347,1,1,4,1,2,1,2,1,1,2 79 | 1133041,5,3,1,2,2,1,2,1,1,2 80 | 1133136,3,1,1,1,2,3,3,1,1,2 81 | 1136142,2,1,1,1,3,1,2,1,1,2 82 | 1137156,2,2,2,1,1,1,7,1,1,2 83 | 1143978,4,1,1,2,2,1,2,1,1,2 84 | 1143978,5,2,1,1,2,1,3,1,1,2 85 | 1147044,3,1,1,1,2,2,7,1,1,2 86 | 1147699,3,5,7,8,8,9,7,10,7,4 87 | 1147748,5,10,6,1,10,4,4,10,10,4 88 | 1148278,3,3,6,4,5,8,4,4,1,4 89 | 1148873,3,6,6,6,5,10,6,8,3,4 90 | 1152331,4,1,1,1,2,1,3,1,1,2 91 | 1155546,2,1,1,2,3,1,2,1,1,2 92 | 1156272,1,1,1,1,2,1,3,1,1,2 93 | 1156948,3,1,1,2,2,1,1,1,1,2 94 | 1157734,4,1,1,1,2,1,3,1,1,2 95 | 1158247,1,1,1,1,2,1,2,1,1,2 96 | 1160476,2,1,1,1,2,1,3,1,1,2 97 | 1164066,1,1,1,1,2,1,3,1,1,2 98 | 1165297,2,1,1,2,2,1,1,1,1,2 99 | 1165790,5,1,1,1,2,1,3,1,1,2 100 | 1165926,9,6,9,2,10,6,2,9,10,4 101 | 1166630,7,5,6,10,5,10,7,9,4,4 102 | 1166654,10,3,5,1,10,5,3,10,2,4 103 | 1167439,2,3,4,4,2,5,2,5,1,4 104 | 1167471,4,1,2,1,2,1,3,1,1,2 105 | 1168359,8,2,3,1,6,3,7,1,1,4 106 | 1168736,10,10,10,10,10,1,8,8,8,4 107 | 1169049,7,3,4,4,3,3,3,2,7,4 108 | 1170419,10,10,10,8,2,10,4,1,1,4 109 | 1170420,1,6,8,10,8,10,5,7,1,4 110 | 1171710,1,1,1,1,2,1,2,3,1,2 111 | 1171710,6,5,4,4,3,9,7,8,3,4 112 | 1171795,1,3,1,2,2,2,5,3,2,2 113 | 1171845,8,6,4,3,5,9,3,1,1,4 114 | 1172152,10,3,3,10,2,10,7,3,3,4 115 | 1173216,10,10,10,3,10,8,8,1,1,4 116 | 1173235,3,3,2,1,2,3,3,1,1,2 117 | 1173347,1,1,1,1,2,5,1,1,1,2 118 | 1173347,8,3,3,1,2,2,3,2,1,2 119 | 1173509,4,5,5,10,4,10,7,5,8,4 120 | 1173514,1,1,1,1,4,3,1,1,1,2 121 | 1173681,3,2,1,1,2,2,3,1,1,2 122 | 1174057,1,1,2,2,2,1,3,1,1,2 123 | 1174057,4,2,1,1,2,2,3,1,1,2 124 | 1174131,10,10,10,2,10,10,5,3,3,4 125 | 1174428,5,3,5,1,8,10,5,3,1,4 126 | 1175937,5,4,6,7,9,7,8,10,1,4 127 | 1176406,1,1,1,1,2,1,2,1,1,2 128 | 1176881,7,5,3,7,4,10,7,5,5,4 129 | 1177027,3,1,1,1,2,1,3,1,1,2 130 | 1177399,8,3,5,4,5,10,1,6,2,4 131 | 1177512,1,1,1,1,10,1,1,1,1,2 132 | 1178580,5,1,3,1,2,1,2,1,1,2 133 | 1179818,2,1,1,1,2,1,3,1,1,2 134 | 1180194,5,10,8,10,8,10,3,6,3,4 135 | 1180523,3,1,1,1,2,1,2,2,1,2 136 | 1180831,3,1,1,1,3,1,2,1,1,2 137 | 1181356,5,1,1,1,2,2,3,3,1,2 138 | 1182404,4,1,1,1,2,1,2,1,1,2 139 | 1182410,3,1,1,1,2,1,1,1,1,2 140 | 1183240,4,1,2,1,2,1,2,1,1,2 141 | 1183246,1,1,1,1,1,?,2,1,1,2 142 | 1183516,3,1,1,1,2,1,1,1,1,2 143 | 1183911,2,1,1,1,2,1,1,1,1,2 144 | 1183983,9,5,5,4,4,5,4,3,3,4 145 | 1184184,1,1,1,1,2,5,1,1,1,2 146 | 1184241,2,1,1,1,2,1,2,1,1,2 147 | 1184840,1,1,3,1,2,?,2,1,1,2 148 | 1185609,3,4,5,2,6,8,4,1,1,4 149 | 1185610,1,1,1,1,3,2,2,1,1,2 150 | 1187457,3,1,1,3,8,1,5,8,1,2 151 | 1187805,8,8,7,4,10,10,7,8,7,4 152 | 1188472,1,1,1,1,1,1,3,1,1,2 153 | 1189266,7,2,4,1,6,10,5,4,3,4 154 | 1189286,10,10,8,6,4,5,8,10,1,4 155 | 1190394,4,1,1,1,2,3,1,1,1,2 156 | 1190485,1,1,1,1,2,1,1,1,1,2 157 | 1192325,5,5,5,6,3,10,3,1,1,4 158 | 1193091,1,2,2,1,2,1,2,1,1,2 159 | 1193210,2,1,1,1,2,1,3,1,1,2 160 | 1193683,1,1,2,1,3,?,1,1,1,2 161 | 1196295,9,9,10,3,6,10,7,10,6,4 162 | 1196915,10,7,7,4,5,10,5,7,2,4 163 | 1197080,4,1,1,1,2,1,3,2,1,2 164 | 1197270,3,1,1,1,2,1,3,1,1,2 165 | 1197440,1,1,1,2,1,3,1,1,7,2 166 | 1197510,5,1,1,1,2,?,3,1,1,2 167 | 1197979,4,1,1,1,2,2,3,2,1,2 168 | 1197993,5,6,7,8,8,10,3,10,3,4 169 | 1198128,10,8,10,10,6,1,3,1,10,4 170 | 1198641,3,1,1,1,2,1,3,1,1,2 171 | 1199219,1,1,1,2,1,1,1,1,1,2 172 | 1199731,3,1,1,1,2,1,1,1,1,2 173 | 1199983,1,1,1,1,2,1,3,1,1,2 174 | 1200772,1,1,1,1,2,1,2,1,1,2 175 | 1200847,6,10,10,10,8,10,10,10,7,4 176 | 1200892,8,6,5,4,3,10,6,1,1,4 177 | 1200952,5,8,7,7,10,10,5,7,1,4 178 | 1201834,2,1,1,1,2,1,3,1,1,2 179 | 1201936,5,10,10,3,8,1,5,10,3,4 180 | 1202125,4,1,1,1,2,1,3,1,1,2 181 | 1202812,5,3,3,3,6,10,3,1,1,4 182 | 1203096,1,1,1,1,1,1,3,1,1,2 183 | 1204242,1,1,1,1,2,1,1,1,1,2 184 | 1204898,6,1,1,1,2,1,3,1,1,2 185 | 1205138,5,8,8,8,5,10,7,8,1,4 186 | 1205579,8,7,6,4,4,10,5,1,1,4 187 | 1206089,2,1,1,1,1,1,3,1,1,2 188 | 1206695,1,5,8,6,5,8,7,10,1,4 189 | 1206841,10,5,6,10,6,10,7,7,10,4 190 | 1207986,5,8,4,10,5,8,9,10,1,4 191 | 1208301,1,2,3,1,2,1,3,1,1,2 192 | 1210963,10,10,10,8,6,8,7,10,1,4 193 | 1211202,7,5,10,10,10,10,4,10,3,4 194 | 1212232,5,1,1,1,2,1,2,1,1,2 195 | 1212251,1,1,1,1,2,1,3,1,1,2 196 | 1212422,3,1,1,1,2,1,3,1,1,2 197 | 1212422,4,1,1,1,2,1,3,1,1,2 198 | 1213375,8,4,4,5,4,7,7,8,2,2 199 | 1213383,5,1,1,4,2,1,3,1,1,2 200 | 1214092,1,1,1,1,2,1,1,1,1,2 201 | 1214556,3,1,1,1,2,1,2,1,1,2 202 | 1214966,9,7,7,5,5,10,7,8,3,4 203 | 1216694,10,8,8,4,10,10,8,1,1,4 204 | 1216947,1,1,1,1,2,1,3,1,1,2 205 | 1217051,5,1,1,1,2,1,3,1,1,2 206 | 1217264,1,1,1,1,2,1,3,1,1,2 207 | 1218105,5,10,10,9,6,10,7,10,5,4 208 | 1218741,10,10,9,3,7,5,3,5,1,4 209 | 1218860,1,1,1,1,1,1,3,1,1,2 210 | 1218860,1,1,1,1,1,1,3,1,1,2 211 | 1219406,5,1,1,1,1,1,3,1,1,2 212 | 1219525,8,10,10,10,5,10,8,10,6,4 213 | 1219859,8,10,8,8,4,8,7,7,1,4 214 | 1220330,1,1,1,1,2,1,3,1,1,2 215 | 1221863,10,10,10,10,7,10,7,10,4,4 216 | 1222047,10,10,10,10,3,10,10,6,1,4 217 | 1222936,8,7,8,7,5,5,5,10,2,4 218 | 1223282,1,1,1,1,2,1,2,1,1,2 219 | 1223426,1,1,1,1,2,1,3,1,1,2 220 | 1223793,6,10,7,7,6,4,8,10,2,4 221 | 1223967,6,1,3,1,2,1,3,1,1,2 222 | 1224329,1,1,1,2,2,1,3,1,1,2 223 | 1225799,10,6,4,3,10,10,9,10,1,4 224 | 1226012,4,1,1,3,1,5,2,1,1,4 225 | 1226612,7,5,6,3,3,8,7,4,1,4 226 | 1227210,10,5,5,6,3,10,7,9,2,4 227 | 1227244,1,1,1,1,2,1,2,1,1,2 228 | 1227481,10,5,7,4,4,10,8,9,1,4 229 | 1228152,8,9,9,5,3,5,7,7,1,4 230 | 1228311,1,1,1,1,1,1,3,1,1,2 231 | 1230175,10,10,10,3,10,10,9,10,1,4 232 | 1230688,7,4,7,4,3,7,7,6,1,4 233 | 1231387,6,8,7,5,6,8,8,9,2,4 234 | 1231706,8,4,6,3,3,1,4,3,1,2 235 | 1232225,10,4,5,5,5,10,4,1,1,4 236 | 1236043,3,3,2,1,3,1,3,6,1,2 237 | 1241232,3,1,4,1,2,?,3,1,1,2 238 | 1241559,10,8,8,2,8,10,4,8,10,4 239 | 1241679,9,8,8,5,6,2,4,10,4,4 240 | 1242364,8,10,10,8,6,9,3,10,10,4 241 | 1243256,10,4,3,2,3,10,5,3,2,4 242 | 1270479,5,1,3,3,2,2,2,3,1,2 243 | 1276091,3,1,1,3,1,1,3,1,1,2 244 | 1277018,2,1,1,1,2,1,3,1,1,2 245 | 128059,1,1,1,1,2,5,5,1,1,2 246 | 1285531,1,1,1,1,2,1,3,1,1,2 247 | 1287775,5,1,1,2,2,2,3,1,1,2 248 | 144888,8,10,10,8,5,10,7,8,1,4 249 | 145447,8,4,4,1,2,9,3,3,1,4 250 | 167528,4,1,1,1,2,1,3,6,1,2 251 | 169356,3,1,1,1,2,?,3,1,1,2 252 | 183913,1,2,2,1,2,1,1,1,1,2 253 | 191250,10,4,4,10,2,10,5,3,3,4 254 | 1017023,6,3,3,5,3,10,3,5,3,2 255 | 1100524,6,10,10,2,8,10,7,3,3,4 256 | 1116116,9,10,10,1,10,8,3,3,1,4 257 | 1168736,5,6,6,2,4,10,3,6,1,4 258 | 1182404,3,1,1,1,2,1,1,1,1,2 259 | 1182404,3,1,1,1,2,1,2,1,1,2 260 | 1198641,3,1,1,1,2,1,3,1,1,2 261 | 242970,5,7,7,1,5,8,3,4,1,2 262 | 255644,10,5,8,10,3,10,5,1,3,4 263 | 263538,5,10,10,6,10,10,10,6,5,4 264 | 274137,8,8,9,4,5,10,7,8,1,4 265 | 303213,10,4,4,10,6,10,5,5,1,4 266 | 314428,7,9,4,10,10,3,5,3,3,4 267 | 1182404,5,1,4,1,2,1,3,2,1,2 268 | 1198641,10,10,6,3,3,10,4,3,2,4 269 | 320675,3,3,5,2,3,10,7,1,1,4 270 | 324427,10,8,8,2,3,4,8,7,8,4 271 | 385103,1,1,1,1,2,1,3,1,1,2 272 | 390840,8,4,7,1,3,10,3,9,2,4 273 | 411453,5,1,1,1,2,1,3,1,1,2 274 | 320675,3,3,5,2,3,10,7,1,1,4 275 | 428903,7,2,4,1,3,4,3,3,1,4 276 | 431495,3,1,1,1,2,1,3,2,1,2 277 | 432809,3,1,3,1,2,?,2,1,1,2 278 | 434518,3,1,1,1,2,1,2,1,1,2 279 | 452264,1,1,1,1,2,1,2,1,1,2 280 | 456282,1,1,1,1,2,1,3,1,1,2 281 | 476903,10,5,7,3,3,7,3,3,8,4 282 | 486283,3,1,1,1,2,1,3,1,1,2 283 | 486662,2,1,1,2,2,1,3,1,1,2 284 | 488173,1,4,3,10,4,10,5,6,1,4 285 | 492268,10,4,6,1,2,10,5,3,1,4 286 | 508234,7,4,5,10,2,10,3,8,2,4 287 | 527363,8,10,10,10,8,10,10,7,3,4 288 | 529329,10,10,10,10,10,10,4,10,10,4 289 | 535331,3,1,1,1,3,1,2,1,1,2 290 | 543558,6,1,3,1,4,5,5,10,1,4 291 | 555977,5,6,6,8,6,10,4,10,4,4 292 | 560680,1,1,1,1,2,1,1,1,1,2 293 | 561477,1,1,1,1,2,1,3,1,1,2 294 | 563649,8,8,8,1,2,?,6,10,1,4 295 | 601265,10,4,4,6,2,10,2,3,1,4 296 | 606140,1,1,1,1,2,?,2,1,1,2 297 | 606722,5,5,7,8,6,10,7,4,1,4 298 | 616240,5,3,4,3,4,5,4,7,1,2 299 | 61634,5,4,3,1,2,?,2,3,1,2 300 | 625201,8,2,1,1,5,1,1,1,1,2 301 | 63375,9,1,2,6,4,10,7,7,2,4 302 | 635844,8,4,10,5,4,4,7,10,1,4 303 | 636130,1,1,1,1,2,1,3,1,1,2 304 | 640744,10,10,10,7,9,10,7,10,10,4 305 | 646904,1,1,1,1,2,1,3,1,1,2 306 | 653777,8,3,4,9,3,10,3,3,1,4 307 | 659642,10,8,4,4,4,10,3,10,4,4 308 | 666090,1,1,1,1,2,1,3,1,1,2 309 | 666942,1,1,1,1,2,1,3,1,1,2 310 | 667204,7,8,7,6,4,3,8,8,4,4 311 | 673637,3,1,1,1,2,5,5,1,1,2 312 | 684955,2,1,1,1,3,1,2,1,1,2 313 | 688033,1,1,1,1,2,1,1,1,1,2 314 | 691628,8,6,4,10,10,1,3,5,1,4 315 | 693702,1,1,1,1,2,1,1,1,1,2 316 | 704097,1,1,1,1,1,1,2,1,1,2 317 | 704168,4,6,5,6,7,?,4,9,1,2 318 | 706426,5,5,5,2,5,10,4,3,1,4 319 | 709287,6,8,7,8,6,8,8,9,1,4 320 | 718641,1,1,1,1,5,1,3,1,1,2 321 | 721482,4,4,4,4,6,5,7,3,1,2 322 | 730881,7,6,3,2,5,10,7,4,6,4 323 | 733639,3,1,1,1,2,?,3,1,1,2 324 | 733639,3,1,1,1,2,1,3,1,1,2 325 | 733823,5,4,6,10,2,10,4,1,1,4 326 | 740492,1,1,1,1,2,1,3,1,1,2 327 | 743348,3,2,2,1,2,1,2,3,1,2 328 | 752904,10,1,1,1,2,10,5,4,1,4 329 | 756136,1,1,1,1,2,1,2,1,1,2 330 | 760001,8,10,3,2,6,4,3,10,1,4 331 | 760239,10,4,6,4,5,10,7,1,1,4 332 | 76389,10,4,7,2,2,8,6,1,1,4 333 | 764974,5,1,1,1,2,1,3,1,2,2 334 | 770066,5,2,2,2,2,1,2,2,1,2 335 | 785208,5,4,6,6,4,10,4,3,1,4 336 | 785615,8,6,7,3,3,10,3,4,2,4 337 | 792744,1,1,1,1,2,1,1,1,1,2 338 | 797327,6,5,5,8,4,10,3,4,1,4 339 | 798429,1,1,1,1,2,1,3,1,1,2 340 | 704097,1,1,1,1,1,1,2,1,1,2 341 | 806423,8,5,5,5,2,10,4,3,1,4 342 | 809912,10,3,3,1,2,10,7,6,1,4 343 | 810104,1,1,1,1,2,1,3,1,1,2 344 | 814265,2,1,1,1,2,1,1,1,1,2 345 | 814911,1,1,1,1,2,1,1,1,1,2 346 | 822829,7,6,4,8,10,10,9,5,3,4 347 | 826923,1,1,1,1,2,1,1,1,1,2 348 | 830690,5,2,2,2,3,1,1,3,1,2 349 | 831268,1,1,1,1,1,1,1,3,1,2 350 | 832226,3,4,4,10,5,1,3,3,1,4 351 | 832567,4,2,3,5,3,8,7,6,1,4 352 | 836433,5,1,1,3,2,1,1,1,1,2 353 | 837082,2,1,1,1,2,1,3,1,1,2 354 | 846832,3,4,5,3,7,3,4,6,1,2 355 | 850831,2,7,10,10,7,10,4,9,4,4 356 | 855524,1,1,1,1,2,1,2,1,1,2 357 | 857774,4,1,1,1,3,1,2,2,1,2 358 | 859164,5,3,3,1,3,3,3,3,3,4 359 | 859350,8,10,10,7,10,10,7,3,8,4 360 | 866325,8,10,5,3,8,4,4,10,3,4 361 | 873549,10,3,5,4,3,7,3,5,3,4 362 | 877291,6,10,10,10,10,10,8,10,10,4 363 | 877943,3,10,3,10,6,10,5,1,4,4 364 | 888169,3,2,2,1,4,3,2,1,1,2 365 | 888523,4,4,4,2,2,3,2,1,1,2 366 | 896404,2,1,1,1,2,1,3,1,1,2 367 | 897172,2,1,1,1,2,1,2,1,1,2 368 | 95719,6,10,10,10,8,10,7,10,7,4 369 | 160296,5,8,8,10,5,10,8,10,3,4 370 | 342245,1,1,3,1,2,1,1,1,1,2 371 | 428598,1,1,3,1,1,1,2,1,1,2 372 | 492561,4,3,2,1,3,1,2,1,1,2 373 | 493452,1,1,3,1,2,1,1,1,1,2 374 | 493452,4,1,2,1,2,1,2,1,1,2 375 | 521441,5,1,1,2,2,1,2,1,1,2 376 | 560680,3,1,2,1,2,1,2,1,1,2 377 | 636437,1,1,1,1,2,1,1,1,1,2 378 | 640712,1,1,1,1,2,1,2,1,1,2 379 | 654244,1,1,1,1,1,1,2,1,1,2 380 | 657753,3,1,1,4,3,1,2,2,1,2 381 | 685977,5,3,4,1,4,1,3,1,1,2 382 | 805448,1,1,1,1,2,1,1,1,1,2 383 | 846423,10,6,3,6,4,10,7,8,4,4 384 | 1002504,3,2,2,2,2,1,3,2,1,2 385 | 1022257,2,1,1,1,2,1,1,1,1,2 386 | 1026122,2,1,1,1,2,1,1,1,1,2 387 | 1071084,3,3,2,2,3,1,1,2,3,2 388 | 1080233,7,6,6,3,2,10,7,1,1,4 389 | 1114570,5,3,3,2,3,1,3,1,1,2 390 | 1114570,2,1,1,1,2,1,2,2,1,2 391 | 1116715,5,1,1,1,3,2,2,2,1,2 392 | 1131411,1,1,1,2,2,1,2,1,1,2 393 | 1151734,10,8,7,4,3,10,7,9,1,4 394 | 1156017,3,1,1,1,2,1,2,1,1,2 395 | 1158247,1,1,1,1,1,1,1,1,1,2 396 | 1158405,1,2,3,1,2,1,2,1,1,2 397 | 1168278,3,1,1,1,2,1,2,1,1,2 398 | 1176187,3,1,1,1,2,1,3,1,1,2 399 | 1196263,4,1,1,1,2,1,1,1,1,2 400 | 1196475,3,2,1,1,2,1,2,2,1,2 401 | 1206314,1,2,3,1,2,1,1,1,1,2 402 | 1211265,3,10,8,7,6,9,9,3,8,4 403 | 1213784,3,1,1,1,2,1,1,1,1,2 404 | 1223003,5,3,3,1,2,1,2,1,1,2 405 | 1223306,3,1,1,1,2,4,1,1,1,2 406 | 1223543,1,2,1,3,2,1,1,2,1,2 407 | 1229929,1,1,1,1,2,1,2,1,1,2 408 | 1231853,4,2,2,1,2,1,2,1,1,2 409 | 1234554,1,1,1,1,2,1,2,1,1,2 410 | 1236837,2,3,2,2,2,2,3,1,1,2 411 | 1237674,3,1,2,1,2,1,2,1,1,2 412 | 1238021,1,1,1,1,2,1,2,1,1,2 413 | 1238464,1,1,1,1,1,?,2,1,1,2 414 | 1238633,10,10,10,6,8,4,8,5,1,4 415 | 1238915,5,1,2,1,2,1,3,1,1,2 416 | 1238948,8,5,6,2,3,10,6,6,1,4 417 | 1239232,3,3,2,6,3,3,3,5,1,2 418 | 1239347,8,7,8,5,10,10,7,2,1,4 419 | 1239967,1,1,1,1,2,1,2,1,1,2 420 | 1240337,5,2,2,2,2,2,3,2,2,2 421 | 1253505,2,3,1,1,5,1,1,1,1,2 422 | 1255384,3,2,2,3,2,3,3,1,1,2 423 | 1257200,10,10,10,7,10,10,8,2,1,4 424 | 1257648,4,3,3,1,2,1,3,3,1,2 425 | 1257815,5,1,3,1,2,1,2,1,1,2 426 | 1257938,3,1,1,1,2,1,1,1,1,2 427 | 1258549,9,10,10,10,10,10,10,10,1,4 428 | 1258556,5,3,6,1,2,1,1,1,1,2 429 | 1266154,8,7,8,2,4,2,5,10,1,4 430 | 1272039,1,1,1,1,2,1,2,1,1,2 431 | 1276091,2,1,1,1,2,1,2,1,1,2 432 | 1276091,1,3,1,1,2,1,2,2,1,2 433 | 1276091,5,1,1,3,4,1,3,2,1,2 434 | 1277629,5,1,1,1,2,1,2,2,1,2 435 | 1293439,3,2,2,3,2,1,1,1,1,2 436 | 1293439,6,9,7,5,5,8,4,2,1,2 437 | 1294562,10,8,10,1,3,10,5,1,1,4 438 | 1295186,10,10,10,1,6,1,2,8,1,4 439 | 527337,4,1,1,1,2,1,1,1,1,2 440 | 558538,4,1,3,3,2,1,1,1,1,2 441 | 566509,5,1,1,1,2,1,1,1,1,2 442 | 608157,10,4,3,10,4,10,10,1,1,4 443 | 677910,5,2,2,4,2,4,1,1,1,2 444 | 734111,1,1,1,3,2,3,1,1,1,2 445 | 734111,1,1,1,1,2,2,1,1,1,2 446 | 780555,5,1,1,6,3,1,2,1,1,2 447 | 827627,2,1,1,1,2,1,1,1,1,2 448 | 1049837,1,1,1,1,2,1,1,1,1,2 449 | 1058849,5,1,1,1,2,1,1,1,1,2 450 | 1182404,1,1,1,1,1,1,1,1,1,2 451 | 1193544,5,7,9,8,6,10,8,10,1,4 452 | 1201870,4,1,1,3,1,1,2,1,1,2 453 | 1202253,5,1,1,1,2,1,1,1,1,2 454 | 1227081,3,1,1,3,2,1,1,1,1,2 455 | 1230994,4,5,5,8,6,10,10,7,1,4 456 | 1238410,2,3,1,1,3,1,1,1,1,2 457 | 1246562,10,2,2,1,2,6,1,1,2,4 458 | 1257470,10,6,5,8,5,10,8,6,1,4 459 | 1259008,8,8,9,6,6,3,10,10,1,4 460 | 1266124,5,1,2,1,2,1,1,1,1,2 461 | 1267898,5,1,3,1,2,1,1,1,1,2 462 | 1268313,5,1,1,3,2,1,1,1,1,2 463 | 1268804,3,1,1,1,2,5,1,1,1,2 464 | 1276091,6,1,1,3,2,1,1,1,1,2 465 | 1280258,4,1,1,1,2,1,1,2,1,2 466 | 1293966,4,1,1,1,2,1,1,1,1,2 467 | 1296572,10,9,8,7,6,4,7,10,3,4 468 | 1298416,10,6,6,2,4,10,9,7,1,4 469 | 1299596,6,6,6,5,4,10,7,6,2,4 470 | 1105524,4,1,1,1,2,1,1,1,1,2 471 | 1181685,1,1,2,1,2,1,2,1,1,2 472 | 1211594,3,1,1,1,1,1,2,1,1,2 473 | 1238777,6,1,1,3,2,1,1,1,1,2 474 | 1257608,6,1,1,1,1,1,1,1,1,2 475 | 1269574,4,1,1,1,2,1,1,1,1,2 476 | 1277145,5,1,1,1,2,1,1,1,1,2 477 | 1287282,3,1,1,1,2,1,1,1,1,2 478 | 1296025,4,1,2,1,2,1,1,1,1,2 479 | 1296263,4,1,1,1,2,1,1,1,1,2 480 | 1296593,5,2,1,1,2,1,1,1,1,2 481 | 1299161,4,8,7,10,4,10,7,5,1,4 482 | 1301945,5,1,1,1,1,1,1,1,1,2 483 | 1302428,5,3,2,4,2,1,1,1,1,2 484 | 1318169,9,10,10,10,10,5,10,10,10,4 485 | 474162,8,7,8,5,5,10,9,10,1,4 486 | 787451,5,1,2,1,2,1,1,1,1,2 487 | 1002025,1,1,1,3,1,3,1,1,1,2 488 | 1070522,3,1,1,1,1,1,2,1,1,2 489 | 1073960,10,10,10,10,6,10,8,1,5,4 490 | 1076352,3,6,4,10,3,3,3,4,1,4 491 | 1084139,6,3,2,1,3,4,4,1,1,4 492 | 1115293,1,1,1,1,2,1,1,1,1,2 493 | 1119189,5,8,9,4,3,10,7,1,1,4 494 | 1133991,4,1,1,1,1,1,2,1,1,2 495 | 1142706,5,10,10,10,6,10,6,5,2,4 496 | 1155967,5,1,2,10,4,5,2,1,1,2 497 | 1170945,3,1,1,1,1,1,2,1,1,2 498 | 1181567,1,1,1,1,1,1,1,1,1,2 499 | 1182404,4,2,1,1,2,1,1,1,1,2 500 | 1204558,4,1,1,1,2,1,2,1,1,2 501 | 1217952,4,1,1,1,2,1,2,1,1,2 502 | 1224565,6,1,1,1,2,1,3,1,1,2 503 | 1238186,4,1,1,1,2,1,2,1,1,2 504 | 1253917,4,1,1,2,2,1,2,1,1,2 505 | 1265899,4,1,1,1,2,1,3,1,1,2 506 | 1268766,1,1,1,1,2,1,1,1,1,2 507 | 1277268,3,3,1,1,2,1,1,1,1,2 508 | 1286943,8,10,10,10,7,5,4,8,7,4 509 | 1295508,1,1,1,1,2,4,1,1,1,2 510 | 1297327,5,1,1,1,2,1,1,1,1,2 511 | 1297522,2,1,1,1,2,1,1,1,1,2 512 | 1298360,1,1,1,1,2,1,1,1,1,2 513 | 1299924,5,1,1,1,2,1,2,1,1,2 514 | 1299994,5,1,1,1,2,1,1,1,1,2 515 | 1304595,3,1,1,1,1,1,2,1,1,2 516 | 1306282,6,6,7,10,3,10,8,10,2,4 517 | 1313325,4,10,4,7,3,10,9,10,1,4 518 | 1320077,1,1,1,1,1,1,1,1,1,2 519 | 1320077,1,1,1,1,1,1,2,1,1,2 520 | 1320304,3,1,2,2,2,1,1,1,1,2 521 | 1330439,4,7,8,3,4,10,9,1,1,4 522 | 333093,1,1,1,1,3,1,1,1,1,2 523 | 369565,4,1,1,1,3,1,1,1,1,2 524 | 412300,10,4,5,4,3,5,7,3,1,4 525 | 672113,7,5,6,10,4,10,5,3,1,4 526 | 749653,3,1,1,1,2,1,2,1,1,2 527 | 769612,3,1,1,2,2,1,1,1,1,2 528 | 769612,4,1,1,1,2,1,1,1,1,2 529 | 798429,4,1,1,1,2,1,3,1,1,2 530 | 807657,6,1,3,2,2,1,1,1,1,2 531 | 8233704,4,1,1,1,1,1,2,1,1,2 532 | 837480,7,4,4,3,4,10,6,9,1,4 533 | 867392,4,2,2,1,2,1,2,1,1,2 534 | 869828,1,1,1,1,1,1,3,1,1,2 535 | 1043068,3,1,1,1,2,1,2,1,1,2 536 | 1056171,2,1,1,1,2,1,2,1,1,2 537 | 1061990,1,1,3,2,2,1,3,1,1,2 538 | 1113061,5,1,1,1,2,1,3,1,1,2 539 | 1116192,5,1,2,1,2,1,3,1,1,2 540 | 1135090,4,1,1,1,2,1,2,1,1,2 541 | 1145420,6,1,1,1,2,1,2,1,1,2 542 | 1158157,5,1,1,1,2,2,2,1,1,2 543 | 1171578,3,1,1,1,2,1,1,1,1,2 544 | 1174841,5,3,1,1,2,1,1,1,1,2 545 | 1184586,4,1,1,1,2,1,2,1,1,2 546 | 1186936,2,1,3,2,2,1,2,1,1,2 547 | 1197527,5,1,1,1,2,1,2,1,1,2 548 | 1222464,6,10,10,10,4,10,7,10,1,4 549 | 1240603,2,1,1,1,1,1,1,1,1,2 550 | 1240603,3,1,1,1,1,1,1,1,1,2 551 | 1241035,7,8,3,7,4,5,7,8,2,4 552 | 1287971,3,1,1,1,2,1,2,1,1,2 553 | 1289391,1,1,1,1,2,1,3,1,1,2 554 | 1299924,3,2,2,2,2,1,4,2,1,2 555 | 1306339,4,4,2,1,2,5,2,1,2,2 556 | 1313658,3,1,1,1,2,1,1,1,1,2 557 | 1313982,4,3,1,1,2,1,4,8,1,2 558 | 1321264,5,2,2,2,1,1,2,1,1,2 559 | 1321321,5,1,1,3,2,1,1,1,1,2 560 | 1321348,2,1,1,1,2,1,2,1,1,2 561 | 1321931,5,1,1,1,2,1,2,1,1,2 562 | 1321942,5,1,1,1,2,1,3,1,1,2 563 | 1321942,5,1,1,1,2,1,3,1,1,2 564 | 1328331,1,1,1,1,2,1,3,1,1,2 565 | 1328755,3,1,1,1,2,1,2,1,1,2 566 | 1331405,4,1,1,1,2,1,3,2,1,2 567 | 1331412,5,7,10,10,5,10,10,10,1,4 568 | 1333104,3,1,2,1,2,1,3,1,1,2 569 | 1334071,4,1,1,1,2,3,2,1,1,2 570 | 1343068,8,4,4,1,6,10,2,5,2,4 571 | 1343374,10,10,8,10,6,5,10,3,1,4 572 | 1344121,8,10,4,4,8,10,8,2,1,4 573 | 142932,7,6,10,5,3,10,9,10,2,4 574 | 183936,3,1,1,1,2,1,2,1,1,2 575 | 324382,1,1,1,1,2,1,2,1,1,2 576 | 378275,10,9,7,3,4,2,7,7,1,4 577 | 385103,5,1,2,1,2,1,3,1,1,2 578 | 690557,5,1,1,1,2,1,2,1,1,2 579 | 695091,1,1,1,1,2,1,2,1,1,2 580 | 695219,1,1,1,1,2,1,2,1,1,2 581 | 824249,1,1,1,1,2,1,3,1,1,2 582 | 871549,5,1,2,1,2,1,2,1,1,2 583 | 878358,5,7,10,6,5,10,7,5,1,4 584 | 1107684,6,10,5,5,4,10,6,10,1,4 585 | 1115762,3,1,1,1,2,1,1,1,1,2 586 | 1217717,5,1,1,6,3,1,1,1,1,2 587 | 1239420,1,1,1,1,2,1,1,1,1,2 588 | 1254538,8,10,10,10,6,10,10,10,1,4 589 | 1261751,5,1,1,1,2,1,2,2,1,2 590 | 1268275,9,8,8,9,6,3,4,1,1,4 591 | 1272166,5,1,1,1,2,1,1,1,1,2 592 | 1294261,4,10,8,5,4,1,10,1,1,4 593 | 1295529,2,5,7,6,4,10,7,6,1,4 594 | 1298484,10,3,4,5,3,10,4,1,1,4 595 | 1311875,5,1,2,1,2,1,1,1,1,2 596 | 1315506,4,8,6,3,4,10,7,1,1,4 597 | 1320141,5,1,1,1,2,1,2,1,1,2 598 | 1325309,4,1,2,1,2,1,2,1,1,2 599 | 1333063,5,1,3,1,2,1,3,1,1,2 600 | 1333495,3,1,1,1,2,1,2,1,1,2 601 | 1334659,5,2,4,1,1,1,1,1,1,2 602 | 1336798,3,1,1,1,2,1,2,1,1,2 603 | 1344449,1,1,1,1,1,1,2,1,1,2 604 | 1350568,4,1,1,1,2,1,2,1,1,2 605 | 1352663,5,4,6,8,4,1,8,10,1,4 606 | 188336,5,3,2,8,5,10,8,1,2,4 607 | 352431,10,5,10,3,5,8,7,8,3,4 608 | 353098,4,1,1,2,2,1,1,1,1,2 609 | 411453,1,1,1,1,2,1,1,1,1,2 610 | 557583,5,10,10,10,10,10,10,1,1,4 611 | 636375,5,1,1,1,2,1,1,1,1,2 612 | 736150,10,4,3,10,3,10,7,1,2,4 613 | 803531,5,10,10,10,5,2,8,5,1,4 614 | 822829,8,10,10,10,6,10,10,10,10,4 615 | 1016634,2,3,1,1,2,1,2,1,1,2 616 | 1031608,2,1,1,1,1,1,2,1,1,2 617 | 1041043,4,1,3,1,2,1,2,1,1,2 618 | 1042252,3,1,1,1,2,1,2,1,1,2 619 | 1057067,1,1,1,1,1,?,1,1,1,2 620 | 1061990,4,1,1,1,2,1,2,1,1,2 621 | 1073836,5,1,1,1,2,1,2,1,1,2 622 | 1083817,3,1,1,1,2,1,2,1,1,2 623 | 1096352,6,3,3,3,3,2,6,1,1,2 624 | 1140597,7,1,2,3,2,1,2,1,1,2 625 | 1149548,1,1,1,1,2,1,1,1,1,2 626 | 1174009,5,1,1,2,1,1,2,1,1,2 627 | 1183596,3,1,3,1,3,4,1,1,1,2 628 | 1190386,4,6,6,5,7,6,7,7,3,4 629 | 1190546,2,1,1,1,2,5,1,1,1,2 630 | 1213273,2,1,1,1,2,1,1,1,1,2 631 | 1218982,4,1,1,1,2,1,1,1,1,2 632 | 1225382,6,2,3,1,2,1,1,1,1,2 633 | 1235807,5,1,1,1,2,1,2,1,1,2 634 | 1238777,1,1,1,1,2,1,1,1,1,2 635 | 1253955,8,7,4,4,5,3,5,10,1,4 636 | 1257366,3,1,1,1,2,1,1,1,1,2 637 | 1260659,3,1,4,1,2,1,1,1,1,2 638 | 1268952,10,10,7,8,7,1,10,10,3,4 639 | 1275807,4,2,4,3,2,2,2,1,1,2 640 | 1277792,4,1,1,1,2,1,1,1,1,2 641 | 1277792,5,1,1,3,2,1,1,1,1,2 642 | 1285722,4,1,1,3,2,1,1,1,1,2 643 | 1288608,3,1,1,1,2,1,2,1,1,2 644 | 1290203,3,1,1,1,2,1,2,1,1,2 645 | 1294413,1,1,1,1,2,1,1,1,1,2 646 | 1299596,2,1,1,1,2,1,1,1,1,2 647 | 1303489,3,1,1,1,2,1,2,1,1,2 648 | 1311033,1,2,2,1,2,1,1,1,1,2 649 | 1311108,1,1,1,3,2,1,1,1,1,2 650 | 1315807,5,10,10,10,10,2,10,10,10,4 651 | 1318671,3,1,1,1,2,1,2,1,1,2 652 | 1319609,3,1,1,2,3,4,1,1,1,2 653 | 1323477,1,2,1,3,2,1,2,1,1,2 654 | 1324572,5,1,1,1,2,1,2,2,1,2 655 | 1324681,4,1,1,1,2,1,2,1,1,2 656 | 1325159,3,1,1,1,2,1,3,1,1,2 657 | 1326892,3,1,1,1,2,1,2,1,1,2 658 | 1330361,5,1,1,1,2,1,2,1,1,2 659 | 1333877,5,4,5,1,8,1,3,6,1,2 660 | 1334015,7,8,8,7,3,10,7,2,3,4 661 | 1334667,1,1,1,1,2,1,1,1,1,2 662 | 1339781,1,1,1,1,2,1,2,1,1,2 663 | 1339781,4,1,1,1,2,1,3,1,1,2 664 | 13454352,1,1,3,1,2,1,2,1,1,2 665 | 1345452,1,1,3,1,2,1,2,1,1,2 666 | 1345593,3,1,1,3,2,1,2,1,1,2 667 | 1347749,1,1,1,1,2,1,1,1,1,2 668 | 1347943,5,2,2,2,2,1,1,1,2,2 669 | 1348851,3,1,1,1,2,1,3,1,1,2 670 | 1350319,5,7,4,1,6,1,7,10,3,4 671 | 1350423,5,10,10,8,5,5,7,10,1,4 672 | 1352848,3,10,7,8,5,8,7,4,1,4 673 | 1353092,3,2,1,2,2,1,3,1,1,2 674 | 1354840,2,1,1,1,2,1,3,1,1,2 675 | 1354840,5,3,2,1,3,1,1,1,1,2 676 | 1355260,1,1,1,1,2,1,2,1,1,2 677 | 1365075,4,1,4,1,2,1,1,1,1,2 678 | 1365328,1,1,2,1,2,1,2,1,1,2 679 | 1368267,5,1,1,1,2,1,1,1,1,2 680 | 1368273,1,1,1,1,2,1,1,1,1,2 681 | 1368882,2,1,1,1,2,1,1,1,1,2 682 | 1369821,10,10,10,10,5,10,10,10,7,4 683 | 1371026,5,10,10,10,4,10,5,6,3,4 684 | 1371920,5,1,1,1,2,1,3,2,1,2 685 | 466906,1,1,1,1,2,1,1,1,1,2 686 | 466906,1,1,1,1,2,1,1,1,1,2 687 | 534555,1,1,1,1,2,1,1,1,1,2 688 | 536708,1,1,1,1,2,1,1,1,1,2 689 | 566346,3,1,1,1,2,1,2,3,1,2 690 | 603148,4,1,1,1,2,1,1,1,1,2 691 | 654546,1,1,1,1,2,1,1,1,8,2 692 | 654546,1,1,1,3,2,1,1,1,1,2 693 | 695091,5,10,10,5,4,5,4,4,1,4 694 | 714039,3,1,1,1,2,1,1,1,1,2 695 | 763235,3,1,1,1,2,1,2,1,2,2 696 | 776715,3,1,1,1,3,2,1,1,1,2 697 | 841769,2,1,1,1,2,1,1,1,1,2 698 | 888820,5,10,10,3,7,3,8,10,2,4 699 | 897471,4,8,6,4,3,4,10,6,1,4 700 | 897471,4,8,8,5,4,5,10,4,1,4 701 | -------------------------------------------------------------------------------- /Algorithms/datasets/mnist/t10k-images-idx3-ubyte.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/anubhavshrimal/Machine-Learning/2a51a6bbba2cdd34d88c5bb3c50314f5409d9bc9/Algorithms/datasets/mnist/t10k-images-idx3-ubyte.gz -------------------------------------------------------------------------------- /Algorithms/datasets/mnist/t10k-labels-idx1-ubyte.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/anubhavshrimal/Machine-Learning/2a51a6bbba2cdd34d88c5bb3c50314f5409d9bc9/Algorithms/datasets/mnist/t10k-labels-idx1-ubyte.gz -------------------------------------------------------------------------------- /Algorithms/datasets/mnist/train-images-idx3-ubyte.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/anubhavshrimal/Machine-Learning/2a51a6bbba2cdd34d88c5bb3c50314f5409d9bc9/Algorithms/datasets/mnist/train-images-idx3-ubyte.gz -------------------------------------------------------------------------------- /Algorithms/datasets/mnist/train-labels-idx1-ubyte.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/anubhavshrimal/Machine-Learning/2a51a6bbba2cdd34d88c5bb3c50314f5409d9bc9/Algorithms/datasets/mnist/train-labels-idx1-ubyte.gz -------------------------------------------------------------------------------- /Algorithms/datasets/titanic.xls: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/anubhavshrimal/Machine-Learning/2a51a6bbba2cdd34d88c5bb3c50314f5409d9bc9/Algorithms/datasets/titanic.xls -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Anubhav Shrimal 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Machine Learning 2 | 3 | My Projects, Kaggle Competitions and implementation of some popularly known machine learning algorithms. 4 | 5 | I have also included a list of research papers I have curated in the feild of ML/DL/AI. 6 | 7 | ## Index: 8 | 9 | - [Projects](#projects) 10 | - [Research Papers (Anubhav Reads)](#research-papers-anubhav-reads) 11 | - [Kaggle Competitions](#kaggle-competitions) 12 | - [Algorithms](#algorithms) 13 | 14 | ------------------------------------------------------------------------------ 15 | ## Content: 16 | 17 | ### Projects 18 | 19 | Following are the projects I've done in the Machine Learning field: 20 | 21 | | Name | Description | 22 | | -------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------- | 23 | | [Reinforcement Learning Algorithms](https://github.com/anubhavshrimal/Reinforcement-Learning) | Implementation of different Reinforcement Learning Algorithms such as DQN, Double-DQN, Dueling-DQN, Monte Carlo, Temporal Difference, etc. | 24 | | [RL-DQN-Navigation](https://github.com/anubhavshrimal/Navigation_Udacity_DRLND_P1) | Deep-Q Learning agent that is able to maximize the reward in the Unity ML-Agents based Banana Collector environment | 25 | | [RL-DDPG-Continuous_Control](https://github.com/anubhavshrimal/Continuous_Control_Udacity_DRLND_P2) | Deep Deterministic Policy Gradient Learning agent that is able to maximize the reward in the Unity ML-Agents based Reacher continuous environment | 26 | | [RL-Multi_Agent_DDPG-Collaboration](https://github.com/anubhavshrimal/Collaboration_Competition_Udacity_DRLND_P3) | Multi Agent Deep Deterministic Policy Gradient Learning agent that is able to maximize the reward in the Unity ML-Agents based Tennis continuous environment | 27 | | [Image Captioning](https://github.com/anubhavshrimal/Attention-Beam-Image-Captioning) | Used Encoder(CNN)-Decoder(RNN) along with Attention and Beam Search heuristic to generate caption for a given image | 28 | | [GANs Generate Faces](https://github.com/anubhavshrimal/GANs-Generate-Faces) | Used Generative Adversarial Networks (GANs) to generate new images of human faces | 29 | | [SageMaker Pytorch Model Deployment](https://github.com/anubhavshrimal/SageMaker-PyTorch-Model-Deployment) | Deployed a Pytorch model which predicts the sentiment of a review using Amazon's SageMaker | 30 | | [RNN Generate TV Scripts](https://github.com/anubhavshrimal/RNN-Generate-TV-Scripts) | An LSTM implementation in PyTorch that generates a new, "fake" TV script using Seinfeld dataset of scripts from 9 seasons | 31 | | [SML Malaria Detection](https://github.com/anubhavshrimal/SML-Malaria-Detection) | Compare Naive Bayes, SVM, XGBoost, Bagging, AdaBoost, K-Nearest Neighbors, Random Forests for classification of Malaria Cells | 32 | | [Quick Draw](https://github.com/anubhavshrimal/Quick-Draw) | Implementation of Google Quick Draw doodle recognition game in PyTorch and other classifiers | 33 | | [CNN Dog Breed Classifier](https://github.com/anubhavshrimal/CNN-Dog-Breed-Classifier) | CNN implementation in PyTorch to identify dog's breed from given image. If supplied an image of a human face, the code will identify the resembling dog breed | 34 | | [Neural Networks Bike Sharing Prediction](https://github.com/anubhavshrimal/Neural-Networks-Bike-Sharing-Prediction) | Neural Network implemented from scratch in numpy to predict bike rentals daily on an hourly basis | 35 | | [Face Recognition](https://github.com/anubhavshrimal/Face-Recognition) | Project to recognise people from an Image or video similar to facebook | 36 | | [Simulated Self Driving Car](https://github.com/anubhavshrimal/Simulated_Self_Driving_Car) | Training an CNN model to drive a car in simulation | 37 | | [Chess AI](https://github.com/anubhavshrimal/Chess-AI) | Chess AI which uses Alpha-Beta Pruning to select best moves. | 38 | | [Amazon Alexa Skills](https://github.com/anubhavshrimal/Amazon-Alexa-Skills) | Amazon Alexa Skills made using Alexa Skills Kit and AWS Lambda function | 39 | 40 | There are a few other projects I've done but currently not on Github: 41 | 42 | | Name | Description | 43 | | ------------------------------------- | ------------------------------------------------------------------------------------------------------------ | 44 | | Game Bot using Reinforcement Learning | Game bot trained using Deep Q Networks and Q-learning to play games such as PACMAN, Frozen-lake, etc. | 45 | | Course Recommendation System | Appriori Algorithm based recommendation system to suggest courses based on past student's data at IIIT Delhi | 46 | 47 | ### Research Papers (Anubhav Reads) 48 | 49 | A list of research papers in the domain of machine learning, deep learning and related fields. The list of papers can be viewed based on differentiating criteria's such as (Conference venue, Year Published, Topic Covered, Authors, etc.). 50 | 51 | You can find the curated list of research papers in [this repository](https://github.com/anubhavshrimal/Machine-Learning-Research-Papers). 52 | 53 | 54 | ### Kaggle Competitions 55 | 56 | | Algorithm/Topic | Description | 57 | | ----------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | 58 | | [Dogs-vs-Cats-Redux-Kernels](Kaggle-Competitions/Dogs-vs-Cats-Redux-Kernels/) | Dogs vs Cats Classification using transfer learning `Resnet34` (CNN) in PyTorch | 59 | | [Dog-Breed-Identification](Kaggle-Competitions/Dog-Breed-Identification/) | Dog Breed Identification using transfer learning `Resnet101_64` (CNN) in PyTorch | 60 | 61 | ### Algorithms 62 | 63 | | Algorithm/Topic | Description | 64 | | ------------------------------------------------------------ | ------------------------------------------------------ | 65 | | [Clustering Algorithms](Algorithms/Clustering/) | Unsupervised algorithms such as K-Means and Mean-Shift | 66 | | [Deep Learning](Algorithms/Deep_Learning/) | Deep Neural Networks and Recurrent Neural Networks | 67 | | [K Nearest Neighbours](Algorithms/K_Nearest_Neighbours/) | | 68 | | [Linear Regression](Algorithms/Linear_Regression/) | | 69 | | [Support Vector Machine](Algorithms/Support_Vector_Machine/) | | --------------------------------------------------------------------------------