├── Part 3 - Classification ├── Section 17 - Kernel SVM │ ├── Kernel_SVM │ │ ├── untitled1.py │ │ ├── classification_template.R │ │ ├── kernel_svm.R │ │ ├── classification_template.py │ │ ├── kernel_svm.py │ │ └── Social_Network_Ads.csv │ └── P14-Kernel-SVM.zip ├── Section 16 - Support Vector Machine (SVM) │ ├── P14-SVM.zip │ └── SVM │ │ ├── classification_template.R │ │ ├── svm.R │ │ ├── classification_template.py │ │ ├── svm.py │ │ ├── untitled0.py │ │ └── Social_Network_Ads.csv ├── Section 14 - Logistic Regression │ ├── P14-Logistic-Regression.zip │ ├── P14-Classification-Template.zip │ ├── Logistic_Regression │ │ ├── socialnetworkads.py │ │ ├── logistic_regression.R │ │ ├── logistic_regression.py │ │ └── Social_Network_Ads.csv │ └── Classification_Template │ │ ├── classification_template.R │ │ └── classification_template.py ├── Section 15 - K-Nearest Neighbors (K-NN) │ ├── classification_template.R │ ├── knn.R │ ├── classification_template.py │ ├── knn.py │ ├── knearesrt neighbour0.py │ └── Social_Network_Ads.csv └── Section 18 - Naive Bayes │ └── Naive_Bayes │ ├── classification_template.R │ ├── naive_bayes.R │ ├── classification_template.py │ ├── naive_bayes.py │ └── Social_Network_Ads.csv └── README.md /Part 3 - Classification/Section 17 - Kernel SVM/Kernel_SVM/untitled1.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /Part 3 - Classification/Section 17 - Kernel SVM/P14-Kernel-SVM.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sayantann11/all-classification-templetes-for-ML/HEAD/Part 3 - Classification/Section 17 - Kernel SVM/P14-Kernel-SVM.zip -------------------------------------------------------------------------------- /Part 3 - Classification/Section 16 - Support Vector Machine (SVM)/P14-SVM.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sayantann11/all-classification-templetes-for-ML/HEAD/Part 3 - Classification/Section 16 - Support Vector Machine (SVM)/P14-SVM.zip -------------------------------------------------------------------------------- /Part 3 - Classification/Section 14 - Logistic Regression/P14-Logistic-Regression.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sayantann11/all-classification-templetes-for-ML/HEAD/Part 3 - Classification/Section 14 - Logistic Regression/P14-Logistic-Regression.zip -------------------------------------------------------------------------------- /Part 3 - Classification/Section 14 - Logistic Regression/P14-Classification-Template.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sayantann11/all-classification-templetes-for-ML/HEAD/Part 3 - Classification/Section 14 - Logistic Regression/P14-Classification-Template.zip -------------------------------------------------------------------------------- /Part 3 - Classification/Section 14 - Logistic Regression/Logistic_Regression/socialnetworkads.py: -------------------------------------------------------------------------------- 1 | # Data Preprocessing Template 2 | 3 | # Importing the libraries 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | 8 | # Importing the dataset 9 | dataset = pd.read_csv('Social_Network_Ads.csv') 10 | X = dataset.iloc[:, [2,3]].values 11 | y = dataset.iloc[:, 4].values 12 | 13 | # Splitting the dataset into the Training set and Test set 14 | from sklearn.cross_validation import train_test_split 15 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0) 16 | 17 | # Feature Scaling 18 | from sklearn.preprocessing import StandardScaler 19 | sc_X = StandardScaler() 20 | X_train = sc_X.fit_transform(X_train) 21 | X_test = sc_X.transform(X_test) 22 | 23 | #fittig logistic regression to the training set 24 | from sklearn.linear_model import LogisticRegression 25 | classifier = LogisticRegression(random_state = 0) 26 | classifier.fit(X_train,y_train) 27 | 28 | 29 | #predicting the test set 30 | 31 | 32 | y_pred = classifier.predict(X_test) 33 | 34 | #making the confusion matrix 35 | 36 | from sklearn.metrics import confusion_matrix 37 | cm = confusion_matrix(y_test, y_pred) 38 | 39 | #visualising the training set result 40 | 41 | -------------------------------------------------------------------------------- /Part 3 - Classification/Section 17 - Kernel SVM/Kernel_SVM/classification_template.R: -------------------------------------------------------------------------------- 1 | # Classification template 2 | 3 | # Importing the dataset 4 | dataset = read.csv('Social_Network_Ads.csv') 5 | dataset = dataset[3:5] 6 | 7 | # Encoding the target feature as factor 8 | dataset$Purchased = factor(dataset$Purchased, levels = c(0, 1)) 9 | 10 | # Splitting the dataset into the Training set and Test set 11 | # install.packages('caTools') 12 | library(caTools) 13 | set.seed(123) 14 | split = sample.split(dataset$Purchased, SplitRatio = 0.75) 15 | training_set = subset(dataset, split == TRUE) 16 | test_set = subset(dataset, split == FALSE) 17 | 18 | # Feature Scaling 19 | training_set[-3] = scale(training_set[-3]) 20 | test_set[-3] = scale(test_set[-3]) 21 | 22 | # Fitting classifier to the Training set 23 | # Create your classifier here 24 | 25 | # Predicting the Test set results 26 | y_pred = predict(classifier, newdata = test_set[-3]) 27 | 28 | # Making the Confusion Matrix 29 | cm = table(test_set[, 3], y_pred) 30 | 31 | # Visualising the Training set results 32 | library(ElemStatLearn) 33 | set = training_set 34 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 35 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 36 | grid_set = expand.grid(X1, X2) 37 | colnames(grid_set) = c('Age', 'EstimatedSalary') 38 | y_grid = predict(classifier, newdata = grid_set) 39 | plot(set[, -3], 40 | main = 'Classifier (Training set)', 41 | xlab = 'Age', ylab = 'Estimated Salary', 42 | xlim = range(X1), ylim = range(X2)) 43 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 44 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 45 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) 46 | 47 | # Visualising the Test set results 48 | library(ElemStatLearn) 49 | set = test_set 50 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 51 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 52 | grid_set = expand.grid(X1, X2) 53 | colnames(grid_set) = c('Age', 'EstimatedSalary') 54 | y_grid = predict(classifier, newdata = grid_set) 55 | plot(set[, -3], main = 'Classifier (Test set)', 56 | xlab = 'Age', ylab = 'Estimated Salary', 57 | xlim = range(X1), ylim = range(X2)) 58 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 59 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 60 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) -------------------------------------------------------------------------------- /Part 3 - Classification/Section 15 - K-Nearest Neighbors (K-NN)/classification_template.R: -------------------------------------------------------------------------------- 1 | # Classification template 2 | 3 | # Importing the dataset 4 | dataset = read.csv('Social_Network_Ads.csv') 5 | dataset = dataset[3:5] 6 | 7 | # Encoding the target feature as factor 8 | dataset$Purchased = factor(dataset$Purchased, levels = c(0, 1)) 9 | 10 | # Splitting the dataset into the Training set and Test set 11 | # install.packages('caTools') 12 | library(caTools) 13 | set.seed(123) 14 | split = sample.split(dataset$Purchased, SplitRatio = 0.75) 15 | training_set = subset(dataset, split == TRUE) 16 | test_set = subset(dataset, split == FALSE) 17 | 18 | # Feature Scaling 19 | training_set[-3] = scale(training_set[-3]) 20 | test_set[-3] = scale(test_set[-3]) 21 | 22 | # Fitting classifier to the Training set 23 | # Create your classifier here 24 | 25 | # Predicting the Test set results 26 | y_pred = predict(classifier, newdata = test_set[-3]) 27 | 28 | # Making the Confusion Matrix 29 | cm = table(test_set[, 3], y_pred) 30 | 31 | # Visualising the Training set results 32 | library(ElemStatLearn) 33 | set = training_set 34 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 35 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 36 | grid_set = expand.grid(X1, X2) 37 | colnames(grid_set) = c('Age', 'EstimatedSalary') 38 | y_grid = predict(classifier, newdata = grid_set) 39 | plot(set[, -3], 40 | main = 'Classifier (Training set)', 41 | xlab = 'Age', ylab = 'Estimated Salary', 42 | xlim = range(X1), ylim = range(X2)) 43 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 44 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 45 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) 46 | 47 | # Visualising the Test set results 48 | library(ElemStatLearn) 49 | set = test_set 50 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 51 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 52 | grid_set = expand.grid(X1, X2) 53 | colnames(grid_set) = c('Age', 'EstimatedSalary') 54 | y_grid = predict(classifier, newdata = grid_set) 55 | plot(set[, -3], main = 'Classifier (Test set)', 56 | xlab = 'Age', ylab = 'Estimated Salary', 57 | xlim = range(X1), ylim = range(X2)) 58 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 59 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 60 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) -------------------------------------------------------------------------------- /Part 3 - Classification/Section 18 - Naive Bayes/Naive_Bayes/classification_template.R: -------------------------------------------------------------------------------- 1 | # Classification template 2 | 3 | # Importing the dataset 4 | dataset = read.csv('Social_Network_Ads.csv') 5 | dataset = dataset[3:5] 6 | 7 | # Encoding the target feature as factor 8 | dataset$Purchased = factor(dataset$Purchased, levels = c(0, 1)) 9 | 10 | # Splitting the dataset into the Training set and Test set 11 | # install.packages('caTools') 12 | library(caTools) 13 | set.seed(123) 14 | split = sample.split(dataset$Purchased, SplitRatio = 0.75) 15 | training_set = subset(dataset, split == TRUE) 16 | test_set = subset(dataset, split == FALSE) 17 | 18 | # Feature Scaling 19 | training_set[-3] = scale(training_set[-3]) 20 | test_set[-3] = scale(test_set[-3]) 21 | 22 | # Fitting classifier to the Training set 23 | # Create your classifier here 24 | 25 | # Predicting the Test set results 26 | y_pred = predict(classifier, newdata = test_set[-3]) 27 | 28 | # Making the Confusion Matrix 29 | cm = table(test_set[, 3], y_pred) 30 | 31 | # Visualising the Training set results 32 | library(ElemStatLearn) 33 | set = training_set 34 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 35 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 36 | grid_set = expand.grid(X1, X2) 37 | colnames(grid_set) = c('Age', 'EstimatedSalary') 38 | y_grid = predict(classifier, newdata = grid_set) 39 | plot(set[, -3], 40 | main = 'Classifier (Training set)', 41 | xlab = 'Age', ylab = 'Estimated Salary', 42 | xlim = range(X1), ylim = range(X2)) 43 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 44 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 45 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) 46 | 47 | # Visualising the Test set results 48 | library(ElemStatLearn) 49 | set = test_set 50 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 51 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 52 | grid_set = expand.grid(X1, X2) 53 | colnames(grid_set) = c('Age', 'EstimatedSalary') 54 | y_grid = predict(classifier, newdata = grid_set) 55 | plot(set[, -3], main = 'Classifier (Test set)', 56 | xlab = 'Age', ylab = 'Estimated Salary', 57 | xlim = range(X1), ylim = range(X2)) 58 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 59 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 60 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) -------------------------------------------------------------------------------- /Part 3 - Classification/Section 16 - Support Vector Machine (SVM)/SVM/classification_template.R: -------------------------------------------------------------------------------- 1 | # Classification template 2 | 3 | # Importing the dataset 4 | dataset = read.csv('Social_Network_Ads.csv') 5 | dataset = dataset[3:5] 6 | 7 | # Encoding the target feature as factor 8 | dataset$Purchased = factor(dataset$Purchased, levels = c(0, 1)) 9 | 10 | # Splitting the dataset into the Training set and Test set 11 | # install.packages('caTools') 12 | library(caTools) 13 | set.seed(123) 14 | split = sample.split(dataset$Purchased, SplitRatio = 0.75) 15 | training_set = subset(dataset, split == TRUE) 16 | test_set = subset(dataset, split == FALSE) 17 | 18 | # Feature Scaling 19 | training_set[-3] = scale(training_set[-3]) 20 | test_set[-3] = scale(test_set[-3]) 21 | 22 | # Fitting classifier to the Training set 23 | # Create your classifier here 24 | 25 | # Predicting the Test set results 26 | y_pred = predict(classifier, newdata = test_set[-3]) 27 | 28 | # Making the Confusion Matrix 29 | cm = table(test_set[, 3], y_pred) 30 | 31 | # Visualising the Training set results 32 | library(ElemStatLearn) 33 | set = training_set 34 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 35 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 36 | grid_set = expand.grid(X1, X2) 37 | colnames(grid_set) = c('Age', 'EstimatedSalary') 38 | y_grid = predict(classifier, newdata = grid_set) 39 | plot(set[, -3], 40 | main = 'Classifier (Training set)', 41 | xlab = 'Age', ylab = 'Estimated Salary', 42 | xlim = range(X1), ylim = range(X2)) 43 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 44 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 45 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) 46 | 47 | # Visualising the Test set results 48 | library(ElemStatLearn) 49 | set = test_set 50 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 51 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 52 | grid_set = expand.grid(X1, X2) 53 | colnames(grid_set) = c('Age', 'EstimatedSalary') 54 | y_grid = predict(classifier, newdata = grid_set) 55 | plot(set[, -3], main = 'Classifier (Test set)', 56 | xlab = 'Age', ylab = 'Estimated Salary', 57 | xlim = range(X1), ylim = range(X2)) 58 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 59 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 60 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) -------------------------------------------------------------------------------- /Part 3 - Classification/Section 14 - Logistic Regression/Classification_Template/classification_template.R: -------------------------------------------------------------------------------- 1 | # Classification template 2 | 3 | # Importing the dataset 4 | dataset = read.csv('Social_Network_Ads.csv') 5 | dataset = dataset[3:5] 6 | 7 | # Encoding the target feature as factor 8 | dataset$Purchased = factor(dataset$Purchased, levels = c(0, 1)) 9 | 10 | # Splitting the dataset into the Training set and Test set 11 | # install.packages('caTools') 12 | library(caTools) 13 | set.seed(123) 14 | split = sample.split(dataset$Purchased, SplitRatio = 0.75) 15 | training_set = subset(dataset, split == TRUE) 16 | test_set = subset(dataset, split == FALSE) 17 | 18 | # Feature Scaling 19 | training_set[-3] = scale(training_set[-3]) 20 | test_set[-3] = scale(test_set[-3]) 21 | 22 | # Fitting classifier to the Training set 23 | # Create your classifier here 24 | 25 | # Predicting the Test set results 26 | y_pred = predict(classifier, newdata = test_set[-3]) 27 | 28 | # Making the Confusion Matrix 29 | cm = table(test_set[, 3], y_pred) 30 | 31 | # Visualising the Training set results 32 | library(ElemStatLearn) 33 | set = training_set 34 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 35 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 36 | grid_set = expand.grid(X1, X2) 37 | colnames(grid_set) = c('Age', 'EstimatedSalary') 38 | y_grid = predict(classifier, newdata = grid_set) 39 | plot(set[, -3], 40 | main = 'Classifier (Training set)', 41 | xlab = 'Age', ylab = 'Estimated Salary', 42 | xlim = range(X1), ylim = range(X2)) 43 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 44 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 45 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) 46 | 47 | # Visualising the Test set results 48 | library(ElemStatLearn) 49 | set = test_set 50 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 51 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 52 | grid_set = expand.grid(X1, X2) 53 | colnames(grid_set) = c('Age', 'EstimatedSalary') 54 | y_grid = predict(classifier, newdata = grid_set) 55 | plot(set[, -3], main = 'Classifier (Test set)', 56 | xlab = 'Age', ylab = 'Estimated Salary', 57 | xlim = range(X1), ylim = range(X2)) 58 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 59 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 60 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) -------------------------------------------------------------------------------- /Part 3 - Classification/Section 18 - Naive Bayes/Naive_Bayes/naive_bayes.R: -------------------------------------------------------------------------------- 1 | # Naive Bayes 2 | 3 | # Importing the dataset 4 | dataset = read.csv('Social_Network_Ads.csv') 5 | dataset = dataset[3:5] 6 | 7 | # Encoding the target feature as factor 8 | dataset$Purchased = factor(dataset$Purchased, levels = c(0, 1)) 9 | 10 | # Splitting the dataset into the Training set and Test set 11 | # install.packages('caTools') 12 | library(caTools) 13 | set.seed(123) 14 | split = sample.split(dataset$Purchased, SplitRatio = 0.75) 15 | training_set = subset(dataset, split == TRUE) 16 | test_set = subset(dataset, split == FALSE) 17 | 18 | # Feature Scaling 19 | training_set[-3] = scale(training_set[-3]) 20 | test_set[-3] = scale(test_set[-3]) 21 | 22 | # Fitting SVM to the Training set 23 | # install.packages('e1071') 24 | library(e1071) 25 | classifier = naiveBayes(x = training_set[-3], 26 | y = training_set$Purchased) 27 | 28 | # Predicting the Test set results 29 | y_pred = predict(classifier, newdata = test_set[-3]) 30 | 31 | # Making the Confusion Matrix 32 | cm = table(test_set[, 3], y_pred) 33 | 34 | # Visualising the Training set results 35 | library(ElemStatLearn) 36 | set = training_set 37 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 38 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 39 | grid_set = expand.grid(X1, X2) 40 | colnames(grid_set) = c('Age', 'EstimatedSalary') 41 | y_grid = predict(classifier, newdata = grid_set) 42 | plot(set[, -3], 43 | main = 'SVM (Training set)', 44 | xlab = 'Age', ylab = 'Estimated Salary', 45 | xlim = range(X1), ylim = range(X2)) 46 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 47 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 48 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) 49 | 50 | # Visualising the Test set results 51 | library(ElemStatLearn) 52 | set = test_set 53 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 54 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 55 | grid_set = expand.grid(X1, X2) 56 | colnames(grid_set) = c('Age', 'EstimatedSalary') 57 | y_grid = predict(classifier, newdata = grid_set) 58 | plot(set[, -3], main = 'SVM (Test set)', 59 | xlab = 'Age', ylab = 'Estimated Salary', 60 | xlim = range(X1), ylim = range(X2)) 61 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 62 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 63 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) -------------------------------------------------------------------------------- /Part 3 - Classification/Section 15 - K-Nearest Neighbors (K-NN)/knn.R: -------------------------------------------------------------------------------- 1 | # K-Nearest Neighbors (K-NN) 2 | 3 | # Importing the dataset 4 | dataset = read.csv('Social_Network_Ads.csv') 5 | dataset = dataset[3:5] 6 | 7 | # Encoding the target feature as factor 8 | dataset$Purchased = factor(dataset$Purchased, levels = c(0, 1)) 9 | 10 | # Splitting the dataset into the Training set and Test set 11 | # install.packages('caTools') 12 | library(caTools) 13 | set.seed(123) 14 | split = sample.split(dataset$Purchased, SplitRatio = 0.75) 15 | training_set = subset(dataset, split == TRUE) 16 | test_set = subset(dataset, split == FALSE) 17 | 18 | # Feature Scaling 19 | training_set[-3] = scale(training_set[-3]) 20 | test_set[-3] = scale(test_set[-3]) 21 | 22 | # Fitting K-NN to the Training set and Predicting the Test set results 23 | library(class) 24 | y_pred = knn(train = training_set[, -3], 25 | test = test_set[, -3], 26 | cl = training_set[, 3], 27 | k = 5, 28 | prob = TRUE) 29 | 30 | # Making the Confusion Matrix 31 | cm = table(test_set[, 3], y_pred) 32 | 33 | # Visualising the Training set results 34 | library(ElemStatLearn) 35 | set = training_set 36 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 37 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 38 | grid_set = expand.grid(X1, X2) 39 | colnames(grid_set) = c('Age', 'EstimatedSalary') 40 | y_grid = knn(train = training_set[, -3], test = grid_set, cl = training_set[, 3], k = 5) 41 | plot(set[, -3], 42 | main = 'K-NN (Training set)', 43 | xlab = 'Age', ylab = 'Estimated Salary', 44 | xlim = range(X1), ylim = range(X2)) 45 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 46 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 47 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) 48 | 49 | # Visualising the Test set results 50 | library(ElemStatLearn) 51 | set = test_set 52 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 53 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 54 | grid_set = expand.grid(X1, X2) 55 | colnames(grid_set) = c('Age', 'EstimatedSalary') 56 | y_grid = knn(train = training_set[, -3], test = grid_set, cl = training_set[, 3], k = 5) 57 | plot(set[, -3], 58 | main = 'K-NN (Test set)', 59 | xlab = 'Age', ylab = 'Estimated Salary', 60 | xlim = range(X1), ylim = range(X2)) 61 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 62 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 63 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) -------------------------------------------------------------------------------- /Part 3 - Classification/Section 16 - Support Vector Machine (SVM)/SVM/svm.R: -------------------------------------------------------------------------------- 1 | # Support Vector Machine (SVM) 2 | 3 | # Importing the dataset 4 | dataset = read.csv('Social_Network_Ads.csv') 5 | dataset = dataset[3:5] 6 | 7 | # Encoding the target feature as factor 8 | dataset$Purchased = factor(dataset$Purchased, levels = c(0, 1)) 9 | 10 | # Splitting the dataset into the Training set and Test set 11 | # install.packages('caTools') 12 | library(caTools) 13 | set.seed(123) 14 | split = sample.split(dataset$Purchased, SplitRatio = 0.75) 15 | training_set = subset(dataset, split == TRUE) 16 | test_set = subset(dataset, split == FALSE) 17 | 18 | # Feature Scaling 19 | training_set[-3] = scale(training_set[-3]) 20 | test_set[-3] = scale(test_set[-3]) 21 | 22 | # Fitting SVM to the Training set 23 | # install.packages('e1071') 24 | library(e1071) 25 | classifier = svm(formula = Purchased ~ ., 26 | data = training_set, 27 | type = 'C-classification', 28 | kernel = 'linear') 29 | 30 | # Predicting the Test set results 31 | y_pred = predict(classifier, newdata = test_set[-3]) 32 | 33 | # Making the Confusion Matrix 34 | cm = table(test_set[, 3], y_pred) 35 | 36 | # Visualising the Training set results 37 | library(ElemStatLearn) 38 | set = training_set 39 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 40 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 41 | grid_set = expand.grid(X1, X2) 42 | colnames(grid_set) = c('Age', 'EstimatedSalary') 43 | y_grid = predict(classifier, newdata = grid_set) 44 | plot(set[, -3], 45 | main = 'SVM (Training set)', 46 | xlab = 'Age', ylab = 'Estimated Salary', 47 | xlim = range(X1), ylim = range(X2)) 48 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 49 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 50 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) 51 | 52 | # Visualising the Test set results 53 | library(ElemStatLearn) 54 | set = test_set 55 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 56 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 57 | grid_set = expand.grid(X1, X2) 58 | colnames(grid_set) = c('Age', 'EstimatedSalary') 59 | y_grid = predict(classifier, newdata = grid_set) 60 | plot(set[, -3], main = 'SVM (Test set)', 61 | xlab = 'Age', ylab = 'Estimated Salary', 62 | xlim = range(X1), ylim = range(X2)) 63 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 64 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 65 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) -------------------------------------------------------------------------------- /Part 3 - Classification/Section 17 - Kernel SVM/Kernel_SVM/kernel_svm.R: -------------------------------------------------------------------------------- 1 | # Kernel SVM 2 | 3 | # Importing the dataset 4 | dataset = read.csv('Social_Network_Ads.csv') 5 | dataset = dataset[3:5] 6 | 7 | # Encoding the target feature as factor 8 | dataset$Purchased = factor(dataset$Purchased, levels = c(0, 1)) 9 | 10 | # Splitting the dataset into the Training set and Test set 11 | # install.packages('caTools') 12 | library(caTools) 13 | set.seed(123) 14 | split = sample.split(dataset$Purchased, SplitRatio = 0.75) 15 | training_set = subset(dataset, split == TRUE) 16 | test_set = subset(dataset, split == FALSE) 17 | 18 | # Feature Scaling 19 | training_set[-3] = scale(training_set[-3]) 20 | test_set[-3] = scale(test_set[-3]) 21 | 22 | # Fitting Kernel SVM to the Training set 23 | # install.packages('e1071') 24 | library(e1071) 25 | classifier = svm(formula = Purchased ~ ., 26 | data = training_set, 27 | type = 'C-classification', 28 | kernel = 'radial') 29 | 30 | # Predicting the Test set results 31 | y_pred = predict(classifier, newdata = test_set[-3]) 32 | 33 | # Making the Confusion Matrix 34 | cm = table(test_set[, 3], y_pred) 35 | 36 | # Visualising the Training set results 37 | library(ElemStatLearn) 38 | set = training_set 39 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 40 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 41 | grid_set = expand.grid(X1, X2) 42 | colnames(grid_set) = c('Age', 'EstimatedSalary') 43 | y_grid = predict(classifier, newdata = grid_set) 44 | plot(set[, -3], 45 | main = 'Kernel SVM (Training set)', 46 | xlab = 'Age', ylab = 'Estimated Salary', 47 | xlim = range(X1), ylim = range(X2)) 48 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 49 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 50 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) 51 | 52 | # Visualising the Test set results 53 | library(ElemStatLearn) 54 | set = test_set 55 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 56 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 57 | grid_set = expand.grid(X1, X2) 58 | colnames(grid_set) = c('Age', 'EstimatedSalary') 59 | y_grid = predict(classifier, newdata = grid_set) 60 | plot(set[, -3], main = 'Kernel SVM (Test set)', 61 | xlab = 'Age', ylab = 'Estimated Salary', 62 | xlim = range(X1), ylim = range(X2)) 63 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 64 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 65 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) -------------------------------------------------------------------------------- /Part 3 - Classification/Section 14 - Logistic Regression/Logistic_Regression/logistic_regression.R: -------------------------------------------------------------------------------- 1 | # Logistic Regression 2 | 3 | # Importing the dataset 4 | dataset = read.csv('Social_Network_Ads.csv') 5 | dataset = dataset[3:5] 6 | 7 | # Encoding the target feature as factor 8 | dataset$Purchased = factor(dataset$Purchased, levels = c(0, 1)) 9 | 10 | # Splitting the dataset into the Training set and Test set 11 | # install.packages('caTools') 12 | library(caTools) 13 | set.seed(123) 14 | split = sample.split(dataset$Purchased, SplitRatio = 0.75) 15 | training_set = subset(dataset, split == TRUE) 16 | test_set = subset(dataset, split == FALSE) 17 | 18 | # Feature Scaling 19 | training_set[-3] = scale(training_set[-3]) 20 | test_set[-3] = scale(test_set[-3]) 21 | 22 | # Fitting Logistic Regression to the Training set 23 | classifier = glm(formula = Purchased ~ ., 24 | family = binomial, 25 | data = training_set) 26 | 27 | # Predicting the Test set results 28 | prob_pred = predict(classifier, type = 'response', newdata = test_set[-3]) 29 | y_pred = ifelse(prob_pred > 0.5, 1, 0) 30 | 31 | # Making the Confusion Matrix 32 | cm = table(test_set[, 3], y_pred > 0.5) 33 | 34 | # Visualising the Training set results 35 | library(ElemStatLearn) 36 | set = training_set 37 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 38 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 39 | grid_set = expand.grid(X1, X2) 40 | colnames(grid_set) = c('Age', 'EstimatedSalary') 41 | prob_set = predict(classifier, type = 'response', newdata = grid_set) 42 | y_grid = ifelse(prob_set > 0.5, 1, 0) 43 | plot(set[, -3], 44 | main = 'Logistic Regression (Training set)', 45 | xlab = 'Age', ylab = 'Estimated Salary', 46 | xlim = range(X1), ylim = range(X2)) 47 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 48 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 49 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) 50 | 51 | # Visualising the Test set results 52 | library(ElemStatLearn) 53 | set = test_set 54 | X1 = seq(min(set[, 1]) - 1, max(set[, 1]) + 1, by = 0.01) 55 | X2 = seq(min(set[, 2]) - 1, max(set[, 2]) + 1, by = 0.01) 56 | grid_set = expand.grid(X1, X2) 57 | colnames(grid_set) = c('Age', 'EstimatedSalary') 58 | prob_set = predict(classifier, type = 'response', newdata = grid_set) 59 | y_grid = ifelse(prob_set > 0.5, 1, 0) 60 | plot(set[, -3], 61 | main = 'Logistic Regression (Test set)', 62 | xlab = 'Age', ylab = 'Estimated Salary', 63 | xlim = range(X1), ylim = range(X2)) 64 | contour(X1, X2, matrix(as.numeric(y_grid), length(X1), length(X2)), add = TRUE) 65 | points(grid_set, pch = '.', col = ifelse(y_grid == 1, 'springgreen3', 'tomato')) 66 | points(set, pch = 21, bg = ifelse(set[, 3] == 1, 'green4', 'red3')) -------------------------------------------------------------------------------- /Part 3 - Classification/Section 17 - Kernel SVM/Kernel_SVM/classification_template.py: -------------------------------------------------------------------------------- 1 | # Classification template 2 | 3 | # Importing the libraries 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | 8 | # Importing the dataset 9 | dataset = pd.read_csv('Social_Network_Ads.csv') 10 | X = dataset.iloc[:, [2, 3]].values 11 | y = dataset.iloc[:, 4].values 12 | 13 | # Splitting the dataset into the Training set and Test set 14 | from sklearn.cross_validation import train_test_split 15 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0) 16 | 17 | # Feature Scaling 18 | from sklearn.preprocessing import StandardScaler 19 | sc = StandardScaler() 20 | X_train = sc.fit_transform(X_train) 21 | X_test = sc.transform(X_test) 22 | 23 | # Fitting classifier to the Training set 24 | # Create your classifier here 25 | 26 | # Predicting the Test set results 27 | y_pred = classifier.predict(X_test) 28 | 29 | # Making the Confusion Matrix 30 | from sklearn.metrics import confusion_matrix 31 | cm = confusion_matrix(y_test, y_pred) 32 | 33 | # Visualising the Training set results 34 | from matplotlib.colors import ListedColormap 35 | X_set, y_set = X_train, y_train 36 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 37 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 38 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 39 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 40 | plt.xlim(X1.min(), X1.max()) 41 | plt.ylim(X2.min(), X2.max()) 42 | for i, j in enumerate(np.unique(y_set)): 43 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 44 | c = ListedColormap(('red', 'green'))(i), label = j) 45 | plt.title('Classifier (Training set)') 46 | plt.xlabel('Age') 47 | plt.ylabel('Estimated Salary') 48 | plt.legend() 49 | plt.show() 50 | 51 | # Visualising the Test set results 52 | from matplotlib.colors import ListedColormap 53 | X_set, y_set = X_test, y_test 54 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 55 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 56 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 57 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 58 | plt.xlim(X1.min(), X1.max()) 59 | plt.ylim(X2.min(), X2.max()) 60 | for i, j in enumerate(np.unique(y_set)): 61 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 62 | c = ListedColormap(('red', 'green'))(i), label = j) 63 | plt.title('Classifier (Test set)') 64 | plt.xlabel('Age') 65 | plt.ylabel('Estimated Salary') 66 | plt.legend() 67 | plt.show() -------------------------------------------------------------------------------- /Part 3 - Classification/Section 18 - Naive Bayes/Naive_Bayes/classification_template.py: -------------------------------------------------------------------------------- 1 | # Classification template 2 | 3 | # Importing the libraries 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | 8 | # Importing the dataset 9 | dataset = pd.read_csv('Social_Network_Ads.csv') 10 | X = dataset.iloc[:, [2, 3]].values 11 | y = dataset.iloc[:, 4].values 12 | 13 | # Splitting the dataset into the Training set and Test set 14 | from sklearn.cross_validation import train_test_split 15 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0) 16 | 17 | # Feature Scaling 18 | from sklearn.preprocessing import StandardScaler 19 | sc = StandardScaler() 20 | X_train = sc.fit_transform(X_train) 21 | X_test = sc.transform(X_test) 22 | 23 | # Fitting classifier to the Training set 24 | # Create your classifier here 25 | 26 | # Predicting the Test set results 27 | y_pred = classifier.predict(X_test) 28 | 29 | # Making the Confusion Matrix 30 | from sklearn.metrics import confusion_matrix 31 | cm = confusion_matrix(y_test, y_pred) 32 | 33 | # Visualising the Training set results 34 | from matplotlib.colors import ListedColormap 35 | X_set, y_set = X_train, y_train 36 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 37 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 38 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 39 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 40 | plt.xlim(X1.min(), X1.max()) 41 | plt.ylim(X2.min(), X2.max()) 42 | for i, j in enumerate(np.unique(y_set)): 43 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 44 | c = ListedColormap(('red', 'green'))(i), label = j) 45 | plt.title('Classifier (Training set)') 46 | plt.xlabel('Age') 47 | plt.ylabel('Estimated Salary') 48 | plt.legend() 49 | plt.show() 50 | 51 | # Visualising the Test set results 52 | from matplotlib.colors import ListedColormap 53 | X_set, y_set = X_test, y_test 54 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 55 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 56 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 57 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 58 | plt.xlim(X1.min(), X1.max()) 59 | plt.ylim(X2.min(), X2.max()) 60 | for i, j in enumerate(np.unique(y_set)): 61 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 62 | c = ListedColormap(('red', 'green'))(i), label = j) 63 | plt.title('Classifier (Test set)') 64 | plt.xlabel('Age') 65 | plt.ylabel('Estimated Salary') 66 | plt.legend() 67 | plt.show() -------------------------------------------------------------------------------- /Part 3 - Classification/Section 15 - K-Nearest Neighbors (K-NN)/classification_template.py: -------------------------------------------------------------------------------- 1 | # Classification template 2 | 3 | # Importing the libraries 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | 8 | # Importing the dataset 9 | dataset = pd.read_csv('Social_Network_Ads.csv') 10 | X = dataset.iloc[:, [2, 3]].values 11 | y = dataset.iloc[:, 4].values 12 | 13 | # Splitting the dataset into the Training set and Test set 14 | from sklearn.cross_validation import train_test_split 15 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0) 16 | 17 | # Feature Scaling 18 | from sklearn.preprocessing import StandardScaler 19 | sc = StandardScaler() 20 | X_train = sc.fit_transform(X_train) 21 | X_test = sc.transform(X_test) 22 | 23 | # Fitting classifier to the Training set 24 | # Create your classifier here 25 | 26 | # Predicting the Test set results 27 | y_pred = classifier.predict(X_test) 28 | 29 | # Making the Confusion Matrix 30 | from sklearn.metrics import confusion_matrix 31 | cm = confusion_matrix(y_test, y_pred) 32 | 33 | # Visualising the Training set results 34 | from matplotlib.colors import ListedColormap 35 | X_set, y_set = X_train, y_train 36 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 37 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 38 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 39 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 40 | plt.xlim(X1.min(), X1.max()) 41 | plt.ylim(X2.min(), X2.max()) 42 | for i, j in enumerate(np.unique(y_set)): 43 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 44 | c = ListedColormap(('red', 'green'))(i), label = j) 45 | plt.title('Classifier (Training set)') 46 | plt.xlabel('Age') 47 | plt.ylabel('Estimated Salary') 48 | plt.legend() 49 | plt.show() 50 | 51 | # Visualising the Test set results 52 | from matplotlib.colors import ListedColormap 53 | X_set, y_set = X_test, y_test 54 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 55 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 56 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 57 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 58 | plt.xlim(X1.min(), X1.max()) 59 | plt.ylim(X2.min(), X2.max()) 60 | for i, j in enumerate(np.unique(y_set)): 61 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 62 | c = ListedColormap(('red', 'green'))(i), label = j) 63 | plt.title('Classifier (Test set)') 64 | plt.xlabel('Age') 65 | plt.ylabel('Estimated Salary') 66 | plt.legend() 67 | plt.show() -------------------------------------------------------------------------------- /Part 3 - Classification/Section 16 - Support Vector Machine (SVM)/SVM/classification_template.py: -------------------------------------------------------------------------------- 1 | # Classification template 2 | 3 | # Importing the libraries 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | 8 | # Importing the dataset 9 | dataset = pd.read_csv('Social_Network_Ads.csv') 10 | X = dataset.iloc[:, [2, 3]].values 11 | y = dataset.iloc[:, 4].values 12 | 13 | # Splitting the dataset into the Training set and Test set 14 | from sklearn.cross_validation import train_test_split 15 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0) 16 | 17 | # Feature Scaling 18 | from sklearn.preprocessing import StandardScaler 19 | sc = StandardScaler() 20 | X_train = sc.fit_transform(X_train) 21 | X_test = sc.transform(X_test) 22 | 23 | # Fitting classifier to the Training set 24 | # Create your classifier here 25 | 26 | # Predicting the Test set results 27 | y_pred = classifier.predict(X_test) 28 | 29 | # Making the Confusion Matrix 30 | from sklearn.metrics import confusion_matrix 31 | cm = confusion_matrix(y_test, y_pred) 32 | 33 | # Visualising the Training set results 34 | from matplotlib.colors import ListedColormap 35 | X_set, y_set = X_train, y_train 36 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 37 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 38 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 39 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 40 | plt.xlim(X1.min(), X1.max()) 41 | plt.ylim(X2.min(), X2.max()) 42 | for i, j in enumerate(np.unique(y_set)): 43 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 44 | c = ListedColormap(('red', 'green'))(i), label = j) 45 | plt.title('Classifier (Training set)') 46 | plt.xlabel('Age') 47 | plt.ylabel('Estimated Salary') 48 | plt.legend() 49 | plt.show() 50 | 51 | # Visualising the Test set results 52 | from matplotlib.colors import ListedColormap 53 | X_set, y_set = X_test, y_test 54 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 55 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 56 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 57 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 58 | plt.xlim(X1.min(), X1.max()) 59 | plt.ylim(X2.min(), X2.max()) 60 | for i, j in enumerate(np.unique(y_set)): 61 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 62 | c = ListedColormap(('red', 'green'))(i), label = j) 63 | plt.title('Classifier (Test set)') 64 | plt.xlabel('Age') 65 | plt.ylabel('Estimated Salary') 66 | plt.legend() 67 | plt.show() -------------------------------------------------------------------------------- /Part 3 - Classification/Section 14 - Logistic Regression/Classification_Template/classification_template.py: -------------------------------------------------------------------------------- 1 | # Classification template 2 | 3 | # Importing the libraries 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | 8 | # Importing the dataset 9 | dataset = pd.read_csv('Social_Network_Ads.csv') 10 | X = dataset.iloc[:, [2, 3]].values 11 | y = dataset.iloc[:, 4].values 12 | 13 | # Splitting the dataset into the Training set and Test set 14 | from sklearn.cross_validation import train_test_split 15 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0) 16 | 17 | # Feature Scaling 18 | from sklearn.preprocessing import StandardScaler 19 | sc = StandardScaler() 20 | X_train = sc.fit_transform(X_train) 21 | X_test = sc.transform(X_test) 22 | 23 | # Fitting classifier to the Training set 24 | # Create your classifier here 25 | 26 | # Predicting the Test set results 27 | y_pred = classifier.predict(X_test) 28 | 29 | # Making the Confusion Matrix 30 | from sklearn.metrics import confusion_matrix 31 | cm = confusion_matrix(y_test, y_pred) 32 | 33 | # Visualising the Training set results 34 | from matplotlib.colors import ListedColormap 35 | X_set, y_set = X_train, y_train 36 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 37 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 38 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 39 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 40 | plt.xlim(X1.min(), X1.max()) 41 | plt.ylim(X2.min(), X2.max()) 42 | for i, j in enumerate(np.unique(y_set)): 43 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 44 | c = ListedColormap(('red', 'green'))(i), label = j) 45 | plt.title('Classifier (Training set)') 46 | plt.xlabel('Age') 47 | plt.ylabel('Estimated Salary') 48 | plt.legend() 49 | plt.show() 50 | 51 | # Visualising the Test set results 52 | from matplotlib.colors import ListedColormap 53 | X_set, y_set = X_test, y_test 54 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 55 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 56 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 57 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 58 | plt.xlim(X1.min(), X1.max()) 59 | plt.ylim(X2.min(), X2.max()) 60 | for i, j in enumerate(np.unique(y_set)): 61 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 62 | c = ListedColormap(('red', 'green'))(i), label = j) 63 | plt.title('Classifier (Test set)') 64 | plt.xlabel('Age') 65 | plt.ylabel('Estimated Salary') 66 | plt.legend() 67 | plt.show() -------------------------------------------------------------------------------- /Part 3 - Classification/Section 18 - Naive Bayes/Naive_Bayes/naive_bayes.py: -------------------------------------------------------------------------------- 1 | # Naive Bayes 2 | 3 | # Importing the libraries 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | 8 | # Importing the dataset 9 | dataset = pd.read_csv('Social_Network_Ads.csv') 10 | X = dataset.iloc[:, [2, 3]].values 11 | y = dataset.iloc[:, 4].values 12 | 13 | # Splitting the dataset into the Training set and Test set 14 | from sklearn.cross_validation import train_test_split 15 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0) 16 | 17 | # Feature Scaling 18 | from sklearn.preprocessing import StandardScaler 19 | sc = StandardScaler() 20 | X_train = sc.fit_transform(X_train) 21 | X_test = sc.transform(X_test) 22 | 23 | # Fitting Naive Bayes to the Training set 24 | from sklearn.naive_bayes import GaussianNB 25 | classifier = GaussianNB() 26 | classifier.fit(X_train, y_train) 27 | 28 | # Predicting the Test set results 29 | y_pred = classifier.predict(X_test) 30 | 31 | # Making the Confusion Matrix 32 | from sklearn.metrics import confusion_matrix 33 | cm = confusion_matrix(y_test, y_pred) 34 | 35 | # Visualising the Training set results 36 | from matplotlib.colors import ListedColormap 37 | X_set, y_set = X_train, y_train 38 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 39 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 40 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 41 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 42 | plt.xlim(X1.min(), X1.max()) 43 | plt.ylim(X2.min(), X2.max()) 44 | for i, j in enumerate(np.unique(y_set)): 45 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 46 | c = ListedColormap(('red', 'green'))(i), label = j) 47 | plt.title('Naive Bayes (Training set)') 48 | plt.xlabel('Age') 49 | plt.ylabel('Estimated Salary') 50 | plt.legend() 51 | plt.show() 52 | 53 | # Visualising the Test set results 54 | from matplotlib.colors import ListedColormap 55 | X_set, y_set = X_test, y_test 56 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 57 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 58 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 59 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 60 | plt.xlim(X1.min(), X1.max()) 61 | plt.ylim(X2.min(), X2.max()) 62 | for i, j in enumerate(np.unique(y_set)): 63 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 64 | c = ListedColormap(('red', 'green'))(i), label = j) 65 | plt.title('Naive Bayes (Test set)') 66 | plt.xlabel('Age') 67 | plt.ylabel('Estimated Salary') 68 | plt.legend() 69 | plt.show() -------------------------------------------------------------------------------- /Part 3 - Classification/Section 17 - Kernel SVM/Kernel_SVM/kernel_svm.py: -------------------------------------------------------------------------------- 1 | # Kernel SVM 2 | 3 | # Importing the libraries 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | 8 | # Importing the dataset 9 | dataset = pd.read_csv('Social_Network_Ads.csv') 10 | X = dataset.iloc[:, [2, 3]].values 11 | y = dataset.iloc[:, 4].values 12 | 13 | # Splitting the dataset into the Training set and Test set 14 | from sklearn.cross_validation import train_test_split 15 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0) 16 | 17 | # Feature Scaling 18 | from sklearn.preprocessing import StandardScaler 19 | sc = StandardScaler() 20 | X_train = sc.fit_transform(X_train) 21 | X_test = sc.transform(X_test) 22 | 23 | # Fitting Kernel SVM to the Training set 24 | from sklearn.svm import SVC 25 | classifier = SVC(kernel = 'rbf', random_state = 0) 26 | classifier.fit(X_train, y_train) 27 | 28 | # Predicting the Test set results 29 | y_pred = classifier.predict(X_test) 30 | 31 | # Making the Confusion Matrix 32 | from sklearn.metrics import confusion_matrix 33 | cm = confusion_matrix(y_test, y_pred) 34 | 35 | # Visualising the Training set results 36 | from matplotlib.colors import ListedColormap 37 | X_set, y_set = X_train, y_train 38 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 39 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 40 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 41 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 42 | plt.xlim(X1.min(), X1.max()) 43 | plt.ylim(X2.min(), X2.max()) 44 | for i, j in enumerate(np.unique(y_set)): 45 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 46 | c = ListedColormap(('red', 'green'))(i), label = j) 47 | plt.title('Kernel SVM (Training set)') 48 | plt.xlabel('Age') 49 | plt.ylabel('Estimated Salary') 50 | plt.legend() 51 | plt.show() 52 | 53 | # Visualising the Test set results 54 | from matplotlib.colors import ListedColormap 55 | X_set, y_set = X_test, y_test 56 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 57 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 58 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 59 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 60 | plt.xlim(X1.min(), X1.max()) 61 | plt.ylim(X2.min(), X2.max()) 62 | for i, j in enumerate(np.unique(y_set)): 63 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 64 | c = ListedColormap(('red', 'green'))(i), label = j) 65 | plt.title('Kernel SVM (Test set)') 66 | plt.xlabel('Age') 67 | plt.ylabel('Estimated Salary') 68 | plt.legend() 69 | plt.show() -------------------------------------------------------------------------------- /Part 3 - Classification/Section 16 - Support Vector Machine (SVM)/SVM/svm.py: -------------------------------------------------------------------------------- 1 | # Support Vector Machine (SVM) 2 | 3 | # Importing the libraries 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | 8 | # Importing the dataset 9 | dataset = pd.read_csv('Social_Network_Ads.csv') 10 | X = dataset.iloc[:, [2, 3]].values 11 | y = dataset.iloc[:, 4].values 12 | 13 | # Splitting the dataset into the Training set and Test set 14 | from sklearn.cross_validation import train_test_split 15 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0) 16 | 17 | # Feature Scaling 18 | from sklearn.preprocessing import StandardScaler 19 | sc = StandardScaler() 20 | X_train = sc.fit_transform(X_train) 21 | X_test = sc.transform(X_test) 22 | 23 | # Fitting SVM to the Training set 24 | from sklearn.svm import SVC 25 | classifier = SVC(kernel = 'linear', random_state = 0) 26 | classifier.fit(X_train, y_train) 27 | 28 | # Predicting the Test set results 29 | y_pred = classifier.predict(X_test) 30 | 31 | # Making the Confusion Matrix 32 | from sklearn.metrics import confusion_matrix 33 | cm = confusion_matrix(y_test, y_pred) 34 | 35 | # Visualising the Training set results 36 | from matplotlib.colors import ListedColormap 37 | X_set, y_set = X_train, y_train 38 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 39 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 40 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 41 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 42 | plt.xlim(X1.min(), X1.max()) 43 | plt.ylim(X2.min(), X2.max()) 44 | for i, j in enumerate(np.unique(y_set)): 45 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 46 | c = ListedColormap(('red', 'green'))(i), label = j) 47 | plt.title('SVM (Training set)') 48 | plt.xlabel('Age') 49 | plt.ylabel('Estimated Salary') 50 | plt.legend() 51 | plt.show() 52 | 53 | # Visualising the Test set results 54 | from matplotlib.colors import ListedColormap 55 | X_set, y_set = X_test, y_test 56 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 57 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 58 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 59 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 60 | plt.xlim(X1.min(), X1.max()) 61 | plt.ylim(X2.min(), X2.max()) 62 | for i, j in enumerate(np.unique(y_set)): 63 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 64 | c = ListedColormap(('red', 'green'))(i), label = j) 65 | plt.title('SVM (Test set)') 66 | plt.xlabel('Age') 67 | plt.ylabel('Estimated Salary') 68 | plt.legend() 69 | plt.show() -------------------------------------------------------------------------------- /Part 3 - Classification/Section 15 - K-Nearest Neighbors (K-NN)/knn.py: -------------------------------------------------------------------------------- 1 | # K-Nearest Neighbors (K-NN) 2 | 3 | # Importing the libraries 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | 8 | # Importing the dataset 9 | dataset = pd.read_csv('Social_Network_Ads.csv') 10 | X = dataset.iloc[:, [2, 3]].values 11 | y = dataset.iloc[:, 4].values 12 | 13 | # Splitting the dataset into the Training set and Test set 14 | from sklearn.cross_validation import train_test_split 15 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0) 16 | 17 | # Feature Scaling 18 | from sklearn.preprocessing import StandardScaler 19 | sc = StandardScaler() 20 | X_train = sc.fit_transform(X_train) 21 | X_test = sc.transform(X_test) 22 | 23 | # Fitting K-NN to the Training set 24 | from sklearn.neighbors import KNeighborsClassifier 25 | classifier = KNeighborsClassifier(n_neighbors = 5, metric = 'minkowski', p = 2) 26 | classifier.fit(X_train, y_train) 27 | 28 | # Predicting the Test set results 29 | y_pred = classifier.predict(X_test) 30 | 31 | # Making the Confusion Matrix 32 | from sklearn.metrics import confusion_matrix 33 | cm = confusion_matrix(y_test, y_pred) 34 | 35 | # Visualising the Training set results 36 | from matplotlib.colors import ListedColormap 37 | X_set, y_set = X_train, y_train 38 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 39 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 40 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 41 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 42 | plt.xlim(X1.min(), X1.max()) 43 | plt.ylim(X2.min(), X2.max()) 44 | for i, j in enumerate(np.unique(y_set)): 45 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 46 | c = ListedColormap(('red', 'green'))(i), label = j) 47 | plt.title('K-NN (Training set)') 48 | plt.xlabel('Age') 49 | plt.ylabel('Estimated Salary') 50 | plt.legend() 51 | plt.show() 52 | 53 | # Visualising the Test set results 54 | from matplotlib.colors import ListedColormap 55 | X_set, y_set = X_test, y_test 56 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 57 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 58 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 59 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 60 | plt.xlim(X1.min(), X1.max()) 61 | plt.ylim(X2.min(), X2.max()) 62 | for i, j in enumerate(np.unique(y_set)): 63 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 64 | c = ListedColormap(('red', 'green'))(i), label = j) 65 | plt.title('K-NN (Test set)') 66 | plt.xlabel('Age') 67 | plt.ylabel('Estimated Salary') 68 | plt.legend() 69 | plt.show() -------------------------------------------------------------------------------- /Part 3 - Classification/Section 14 - Logistic Regression/Logistic_Regression/logistic_regression.py: -------------------------------------------------------------------------------- 1 | # Logistic Regression 2 | 3 | # Importing the libraries 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | 8 | # Importing the dataset 9 | dataset = pd.read_csv('Social_Network_Ads.csv') 10 | X = dataset.iloc[:, [2, 3]].values 11 | y = dataset.iloc[:, 4].values 12 | 13 | # Splitting the dataset into the Training set and Test set 14 | from sklearn.cross_validation import train_test_split 15 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0) 16 | 17 | # Feature Scaling 18 | from sklearn.preprocessing import StandardScaler 19 | sc = StandardScaler() 20 | X_train = sc.fit_transform(X_train) 21 | X_test = sc.transform(X_test) 22 | 23 | # Fitting Logistic Regression to the Training set 24 | from sklearn.linear_model import LogisticRegression 25 | classifier = LogisticRegression(random_state = 0) 26 | classifier.fit(X_train, y_train) 27 | 28 | # Predicting the Test set results 29 | y_pred = classifier.predict(X_test) 30 | 31 | # Making the Confusion Matrix 32 | from sklearn.metrics import confusion_matrix 33 | cm = confusion_matrix(y_test, y_pred) 34 | 35 | # Visualising the Training set results 36 | from matplotlib.colors import ListedColormap 37 | X_set, y_set = X_train, y_train 38 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 39 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 40 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 41 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 42 | plt.xlim(X1.min(), X1.max()) 43 | plt.ylim(X2.min(), X2.max()) 44 | for i, j in enumerate(np.unique(y_set)): 45 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 46 | c = ListedColormap(('red', 'green'))(i), label = j) 47 | plt.title('Logistic Regression (Training set)') 48 | plt.xlabel('Age') 49 | plt.ylabel('Estimated Salary') 50 | plt.legend() 51 | plt.show() 52 | 53 | # Visualising the Test set results 54 | from matplotlib.colors import ListedColormap 55 | X_set, y_set = X_test, y_test 56 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 57 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 58 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 59 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 60 | plt.xlim(X1.min(), X1.max()) 61 | plt.ylim(X2.min(), X2.max()) 62 | for i, j in enumerate(np.unique(y_set)): 63 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 64 | c = ListedColormap(('red', 'green'))(i), label = j) 65 | plt.title('Logistic Regression (Test set)') 66 | plt.xlabel('Age') 67 | plt.ylabel('Estimated Salary') 68 | plt.legend() 69 | plt.show() -------------------------------------------------------------------------------- /Part 3 - Classification/Section 16 - Support Vector Machine (SVM)/SVM/untitled0.py: -------------------------------------------------------------------------------- 1 | # Classification template 2 | 3 | # Importing the libraries 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | 8 | # Importing the dataset 9 | dataset = pd.read_csv('Social_Network_Ads.csv') 10 | X = dataset.iloc[:, [2, 3]].values 11 | y = dataset.iloc[:, 4].values 12 | 13 | # Splitting the dataset into the Training set and Test set 14 | from sklearn.cross_validation import train_test_split 15 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0) 16 | 17 | # Feature Scaling 18 | from sklearn.preprocessing import StandardScaler 19 | sc = StandardScaler() 20 | X_train = sc.fit_transform(X_train) 21 | X_test = sc.transform(X_test) 22 | 23 | # Fitting classifier to the Training set 24 | # Create your classifier here 25 | from sklearn.svm import SVC 26 | classifier = SVC(kernel = 'poly',random_state = 0) 27 | classifier.fit(X_train,y_train) 28 | # Predicting the Test set results 29 | y_pred = classifier.predict(X_test) 30 | 31 | # Making the Confusion Matrix 32 | from sklearn.metrics import confusion_matrix 33 | cm = confusion_matrix(y_test, y_pred) 34 | 35 | # Visualising the Training set results 36 | from matplotlib.colors import ListedColormap 37 | X_set, y_set = X_train, y_train 38 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 39 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 40 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 41 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 42 | plt.xlim(X1.min(), X1.max()) 43 | plt.ylim(X2.min(), X2.max()) 44 | for i, j in enumerate(np.unique(y_set)): 45 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 46 | c = ListedColormap(('red', 'green'))(i), label = j) 47 | plt.title('Classifier (Training set)') 48 | plt.xlabel('Age') 49 | plt.ylabel('Estimated Salary') 50 | plt.legend() 51 | plt.show() 52 | 53 | # Visualising the Test set results 54 | from matplotlib.colors import ListedColormap 55 | X_set, y_set = X_test, y_test 56 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 57 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 58 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 59 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 60 | plt.xlim(X1.min(), X1.max()) 61 | plt.ylim(X2.min(), X2.max()) 62 | for i, j in enumerate(np.unique(y_set)): 63 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 64 | c = ListedColormap(('red', 'green'))(i), label = j) 65 | plt.title('Classifier (Test set)') 66 | plt.xlabel('Age') 67 | plt.ylabel('Estimated Salary') 68 | plt.legend() 69 | plt.show() -------------------------------------------------------------------------------- /Part 3 - Classification/Section 15 - K-Nearest Neighbors (K-NN)/knearesrt neighbour0.py: -------------------------------------------------------------------------------- 1 | # Classification template 2 | 3 | # Importing the libraries 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | 8 | # Importing the dataset 9 | dataset = pd.read_csv('Social_Network_Ads.csv') 10 | X = dataset.iloc[:, [2, 3]].values 11 | y = dataset.iloc[:, 4].values 12 | 13 | # Splitting the dataset into the Training set and Test set 14 | from sklearn.cross_validation import train_test_split 15 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0) 16 | 17 | # Feature Scaling 18 | from sklearn.preprocessing import StandardScaler 19 | sc = StandardScaler() 20 | X_train = sc.fit_transform(X_train) 21 | X_test = sc.transform(X_test) 22 | 23 | # Fitting classifier to the Training set 24 | # Create your classifier here 25 | from sklearn.neighbors import KNeighborsClassifier 26 | classifier = KNeighborsClassifier(n_neighbors = 5 , metric = 'minkowski',p=2) 27 | classifier.fit(X_train,y_train) 28 | # Predicting the Test set results 29 | y_pred = classifier.predict(X_test) 30 | 31 | # Making the Confusion Matrix 32 | from sklearn.metrics import confusion_matrix 33 | cm = confusion_matrix(y_test, y_pred) 34 | 35 | # Visualising the Training set results 36 | from matplotlib.colors import ListedColormap 37 | X_set, y_set = X_train, y_train 38 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 39 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 40 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 41 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 42 | plt.xlim(X1.min(), X1.max()) 43 | plt.ylim(X2.min(), X2.max()) 44 | for i, j in enumerate(np.unique(y_set)): 45 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 46 | c = ListedColormap(('red', 'green'))(i), label = j) 47 | plt.title('Classifier (Training set)') 48 | plt.xlabel('Age') 49 | plt.ylabel('Estimated Salary') 50 | plt.legend() 51 | plt.show() 52 | 53 | # Visualising the Test set results 54 | from matplotlib.colors import ListedColormap 55 | X_set, y_set = X_test, y_test 56 | X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), 57 | np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) 58 | plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), 59 | alpha = 0.75, cmap = ListedColormap(('red', 'green'))) 60 | plt.xlim(X1.min(), X1.max()) 61 | plt.ylim(X2.min(), X2.max()) 62 | for i, j in enumerate(np.unique(y_set)): 63 | plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1], 64 | c = ListedColormap(('red', 'green'))(i), label = j) 65 | plt.title('Classifier (Test set)') 66 | plt.xlabel('Age') 67 | plt.ylabel('Estimated Salary') 68 | plt.legend() 69 | plt.show() -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Machine Learning Classification Algorithms 2 | 3 | This repository contains a comprehensive guide to various classification algorithms in machine learning. This guide covers both theoretical concepts and practical implementation examples. The algorithms discussed include Logistic Regression, K-Nearest Neighbors (KNN), Support Vector Machines (SVM), Naive Bayes, Decision Tree, and Random Forest classifiers. 4 | 5 | ## Table of Contents 6 | 1. [Introduction](#introduction) 7 | 2. [Classification Algorithms](#classification-algorithms) 8 | - [Logistic Regression](#logistic-regression) 9 | - [K-Nearest Neighbors (KNN)](#k-nearest-neighbors-knn) 10 | - [Support Vector Machines (SVM)](#support-vector-machines-svm) 11 | - [Naive Bayes Classifier](#naive-bayes-classifier) 12 | - [Decision Tree Classifier](#decision-tree-classifier) 13 | - [Random Forest Classifier](#random-forest-classifier) 14 | 3. [Use Cases](#use-cases) 15 | 4. [Installation](#installation) 16 | 5. [Usage](#usage) 17 | 6. [Contributing](#contributing) 18 | 7. [License](#license) 19 | 20 | ## Introduction 21 | Classification is a type of supervised learning that predicts the class or category of a data point. It is best used when the output variable is discrete. This repository explores different classification algorithms and their implementation using Python. 22 | 23 | ## Classification Algorithms 24 | 25 | ### Logistic Regression 26 | Logistic Regression is used for binary classification problems. It models the probability distribution of the output variable using a sigmoid function. 27 | 28 | ![Logistic Regression Example](https://www.simplilearn.com/ice9/free_resources_article_thumb/logistic-regression-example-graph.JPG) 29 | 30 | ### K-Nearest Neighbors (KNN) 31 | KNN is a simple, instance-based learning algorithm that assigns a data point to the class of its nearest neighbors. 32 | 33 | ![KNN Classification](https://www.simplilearn.com/ice9/free_resources_article_thumb/knn-classification-machine-learning.JPG) 34 | 35 | ### Support Vector Machines (SVM) 36 | SVMs are powerful classifiers that find the hyperplane which best separates the classes. They can handle linear and non-linear data using kernel tricks. 37 | 38 | ![SVM Example](https://www.simplilearn.com/ice9/free_resources_article_thumb/support-vector-machines-graph-machine-learning.JPG) 39 | 40 | ### Naive Bayes Classifier 41 | Naive Bayes is a probabilistic classifier based on Bayes' Theorem. It assumes independence between features given the class label. 42 | 43 | ![Naive Bayes Example](https://www.simplilearn.com/ice9/free_resources_article_thumb/naive-bayes-spam-machine-learning.JPG) 44 | 45 | ### Decision Tree Classifier 46 | Decision Trees classify data by splitting it into subsets based on feature values, creating a tree of decisions. 47 | 48 | ![Decision Tree Example](https://www.simplilearn.com/ice9/free_resources_article_thumb/decision-tree-classifier-machine-learning.JPG) 49 | 50 | ### Random Forest Classifier 51 | Random Forests are ensembles of Decision Trees, which improve classification accuracy by reducing overfitting. 52 | 53 | ![Random Forest Example](https://www.simplilearn.com/ice9/free_resources_article_thumb/random-forest-classifier-graph.JPG) 54 | 55 | ## Use Cases 56 | - **Spam Detection**: Classifying emails as spam or ham. 57 | - **Customer Segmentation**: Identifying different customer segments based on purchase behavior. 58 | - **Loan Approval**: Predicting if a bank loan should be granted. 59 | - **Sentiment Analysis**: Classifying social media posts as positive or negative. 60 | 61 | ## Installation 62 | Clone the repository and install the necessary dependencies. 63 | 64 | ```bash 65 | git clone https://github.com/your-username/ml-classification-algorithms.git 66 | cd ml-classification-algorithms 67 | pip install -r requirements.txt 68 | ``` 69 | 70 | ## Usage 71 | Examples of using each classifier can be found in the `examples` directory. Each example includes data loading, model training, and prediction. 72 | 73 | ```python 74 | from sklearn.model_selection import train_test_split 75 | from sklearn.metrics import accuracy_score 76 | from logistic_regression import LogisticRegressionModel 77 | 78 | # Load dataset 79 | X, y = load_data('dataset.csv') 80 | 81 | # Split data 82 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42) 83 | 84 | # Initialize and train model 85 | model = LogisticRegressionModel() 86 | model.train(X_train, y_train) 87 | 88 | # Make predictions 89 | y_pred = model.predict(X_test) 90 | 91 | # Evaluate model 92 | accuracy = accuracy_score(y_test, y_pred) 93 | print(f'Accuracy: {accuracy}') 94 | ``` 95 | 96 | ## Contributing 97 | Contributions are welcome! Please read the [contributing guidelines](CONTRIBUTING.md) before submitting a pull request. 98 | 99 | ## License 100 | This project is licensed under the MIT License. See the [LICENSE](LICENSE) file for details. 101 | 102 | --- 103 | 104 | For detailed tutorials and theoretical explanations, please refer to the included markdown files and Jupyter notebooks. If you have any questions or need further clarification, feel free to open an issue or contact the repository maintainers. 105 | -------------------------------------------------------------------------------- /Part 3 - Classification/Section 17 - Kernel SVM/Kernel_SVM/Social_Network_Ads.csv: -------------------------------------------------------------------------------- 1 | User ID,Gender,Age,EstimatedSalary,Purchased 2 | 15624510,Male,19,19000,0 3 | 15810944,Male,35,20000,0 4 | 15668575,Female,26,43000,0 5 | 15603246,Female,27,57000,0 6 | 15804002,Male,19,76000,0 7 | 15728773,Male,27,58000,0 8 | 15598044,Female,27,84000,0 9 | 15694829,Female,32,150000,1 10 | 15600575,Male,25,33000,0 11 | 15727311,Female,35,65000,0 12 | 15570769,Female,26,80000,0 13 | 15606274,Female,26,52000,0 14 | 15746139,Male,20,86000,0 15 | 15704987,Male,32,18000,0 16 | 15628972,Male,18,82000,0 17 | 15697686,Male,29,80000,0 18 | 15733883,Male,47,25000,1 19 | 15617482,Male,45,26000,1 20 | 15704583,Male,46,28000,1 21 | 15621083,Female,48,29000,1 22 | 15649487,Male,45,22000,1 23 | 15736760,Female,47,49000,1 24 | 15714658,Male,48,41000,1 25 | 15599081,Female,45,22000,1 26 | 15705113,Male,46,23000,1 27 | 15631159,Male,47,20000,1 28 | 15792818,Male,49,28000,1 29 | 15633531,Female,47,30000,1 30 | 15744529,Male,29,43000,0 31 | 15669656,Male,31,18000,0 32 | 15581198,Male,31,74000,0 33 | 15729054,Female,27,137000,1 34 | 15573452,Female,21,16000,0 35 | 15776733,Female,28,44000,0 36 | 15724858,Male,27,90000,0 37 | 15713144,Male,35,27000,0 38 | 15690188,Female,33,28000,0 39 | 15689425,Male,30,49000,0 40 | 15671766,Female,26,72000,0 41 | 15782806,Female,27,31000,0 42 | 15764419,Female,27,17000,0 43 | 15591915,Female,33,51000,0 44 | 15772798,Male,35,108000,0 45 | 15792008,Male,30,15000,0 46 | 15715541,Female,28,84000,0 47 | 15639277,Male,23,20000,0 48 | 15798850,Male,25,79000,0 49 | 15776348,Female,27,54000,0 50 | 15727696,Male,30,135000,1 51 | 15793813,Female,31,89000,0 52 | 15694395,Female,24,32000,0 53 | 15764195,Female,18,44000,0 54 | 15744919,Female,29,83000,0 55 | 15671655,Female,35,23000,0 56 | 15654901,Female,27,58000,0 57 | 15649136,Female,24,55000,0 58 | 15775562,Female,23,48000,0 59 | 15807481,Male,28,79000,0 60 | 15642885,Male,22,18000,0 61 | 15789109,Female,32,117000,0 62 | 15814004,Male,27,20000,0 63 | 15673619,Male,25,87000,0 64 | 15595135,Female,23,66000,0 65 | 15583681,Male,32,120000,1 66 | 15605000,Female,59,83000,0 67 | 15718071,Male,24,58000,0 68 | 15679760,Male,24,19000,0 69 | 15654574,Female,23,82000,0 70 | 15577178,Female,22,63000,0 71 | 15595324,Female,31,68000,0 72 | 15756932,Male,25,80000,0 73 | 15726358,Female,24,27000,0 74 | 15595228,Female,20,23000,0 75 | 15782530,Female,33,113000,0 76 | 15592877,Male,32,18000,0 77 | 15651983,Male,34,112000,1 78 | 15746737,Male,18,52000,0 79 | 15774179,Female,22,27000,0 80 | 15667265,Female,28,87000,0 81 | 15655123,Female,26,17000,0 82 | 15595917,Male,30,80000,0 83 | 15668385,Male,39,42000,0 84 | 15709476,Male,20,49000,0 85 | 15711218,Male,35,88000,0 86 | 15798659,Female,30,62000,0 87 | 15663939,Female,31,118000,1 88 | 15694946,Male,24,55000,0 89 | 15631912,Female,28,85000,0 90 | 15768816,Male,26,81000,0 91 | 15682268,Male,35,50000,0 92 | 15684801,Male,22,81000,0 93 | 15636428,Female,30,116000,0 94 | 15809823,Male,26,15000,0 95 | 15699284,Female,29,28000,0 96 | 15786993,Female,29,83000,0 97 | 15709441,Female,35,44000,0 98 | 15710257,Female,35,25000,0 99 | 15582492,Male,28,123000,1 100 | 15575694,Male,35,73000,0 101 | 15756820,Female,28,37000,0 102 | 15766289,Male,27,88000,0 103 | 15593014,Male,28,59000,0 104 | 15584545,Female,32,86000,0 105 | 15675949,Female,33,149000,1 106 | 15672091,Female,19,21000,0 107 | 15801658,Male,21,72000,0 108 | 15706185,Female,26,35000,0 109 | 15789863,Male,27,89000,0 110 | 15720943,Male,26,86000,0 111 | 15697997,Female,38,80000,0 112 | 15665416,Female,39,71000,0 113 | 15660200,Female,37,71000,0 114 | 15619653,Male,38,61000,0 115 | 15773447,Male,37,55000,0 116 | 15739160,Male,42,80000,0 117 | 15689237,Male,40,57000,0 118 | 15679297,Male,35,75000,0 119 | 15591433,Male,36,52000,0 120 | 15642725,Male,40,59000,0 121 | 15701962,Male,41,59000,0 122 | 15811613,Female,36,75000,0 123 | 15741049,Male,37,72000,0 124 | 15724423,Female,40,75000,0 125 | 15574305,Male,35,53000,0 126 | 15678168,Female,41,51000,0 127 | 15697020,Female,39,61000,0 128 | 15610801,Male,42,65000,0 129 | 15745232,Male,26,32000,0 130 | 15722758,Male,30,17000,0 131 | 15792102,Female,26,84000,0 132 | 15675185,Male,31,58000,0 133 | 15801247,Male,33,31000,0 134 | 15725660,Male,30,87000,0 135 | 15638963,Female,21,68000,0 136 | 15800061,Female,28,55000,0 137 | 15578006,Male,23,63000,0 138 | 15668504,Female,20,82000,0 139 | 15687491,Male,30,107000,1 140 | 15610403,Female,28,59000,0 141 | 15741094,Male,19,25000,0 142 | 15807909,Male,19,85000,0 143 | 15666141,Female,18,68000,0 144 | 15617134,Male,35,59000,0 145 | 15783029,Male,30,89000,0 146 | 15622833,Female,34,25000,0 147 | 15746422,Female,24,89000,0 148 | 15750839,Female,27,96000,1 149 | 15749130,Female,41,30000,0 150 | 15779862,Male,29,61000,0 151 | 15767871,Male,20,74000,0 152 | 15679651,Female,26,15000,0 153 | 15576219,Male,41,45000,0 154 | 15699247,Male,31,76000,0 155 | 15619087,Female,36,50000,0 156 | 15605327,Male,40,47000,0 157 | 15610140,Female,31,15000,0 158 | 15791174,Male,46,59000,0 159 | 15602373,Male,29,75000,0 160 | 15762605,Male,26,30000,0 161 | 15598840,Female,32,135000,1 162 | 15744279,Male,32,100000,1 163 | 15670619,Male,25,90000,0 164 | 15599533,Female,37,33000,0 165 | 15757837,Male,35,38000,0 166 | 15697574,Female,33,69000,0 167 | 15578738,Female,18,86000,0 168 | 15762228,Female,22,55000,0 169 | 15614827,Female,35,71000,0 170 | 15789815,Male,29,148000,1 171 | 15579781,Female,29,47000,0 172 | 15587013,Male,21,88000,0 173 | 15570932,Male,34,115000,0 174 | 15794661,Female,26,118000,0 175 | 15581654,Female,34,43000,0 176 | 15644296,Female,34,72000,0 177 | 15614420,Female,23,28000,0 178 | 15609653,Female,35,47000,0 179 | 15594577,Male,25,22000,0 180 | 15584114,Male,24,23000,0 181 | 15673367,Female,31,34000,0 182 | 15685576,Male,26,16000,0 183 | 15774727,Female,31,71000,0 184 | 15694288,Female,32,117000,1 185 | 15603319,Male,33,43000,0 186 | 15759066,Female,33,60000,0 187 | 15814816,Male,31,66000,0 188 | 15724402,Female,20,82000,0 189 | 15571059,Female,33,41000,0 190 | 15674206,Male,35,72000,0 191 | 15715160,Male,28,32000,0 192 | 15730448,Male,24,84000,0 193 | 15662067,Female,19,26000,0 194 | 15779581,Male,29,43000,0 195 | 15662901,Male,19,70000,0 196 | 15689751,Male,28,89000,0 197 | 15667742,Male,34,43000,0 198 | 15738448,Female,30,79000,0 199 | 15680243,Female,20,36000,0 200 | 15745083,Male,26,80000,0 201 | 15708228,Male,35,22000,0 202 | 15628523,Male,35,39000,0 203 | 15708196,Male,49,74000,0 204 | 15735549,Female,39,134000,1 205 | 15809347,Female,41,71000,0 206 | 15660866,Female,58,101000,1 207 | 15766609,Female,47,47000,0 208 | 15654230,Female,55,130000,1 209 | 15794566,Female,52,114000,0 210 | 15800890,Female,40,142000,1 211 | 15697424,Female,46,22000,0 212 | 15724536,Female,48,96000,1 213 | 15735878,Male,52,150000,1 214 | 15707596,Female,59,42000,0 215 | 15657163,Male,35,58000,0 216 | 15622478,Male,47,43000,0 217 | 15779529,Female,60,108000,1 218 | 15636023,Male,49,65000,0 219 | 15582066,Male,40,78000,0 220 | 15666675,Female,46,96000,0 221 | 15732987,Male,59,143000,1 222 | 15789432,Female,41,80000,0 223 | 15663161,Male,35,91000,1 224 | 15694879,Male,37,144000,1 225 | 15593715,Male,60,102000,1 226 | 15575002,Female,35,60000,0 227 | 15622171,Male,37,53000,0 228 | 15795224,Female,36,126000,1 229 | 15685346,Male,56,133000,1 230 | 15691808,Female,40,72000,0 231 | 15721007,Female,42,80000,1 232 | 15794253,Female,35,147000,1 233 | 15694453,Male,39,42000,0 234 | 15813113,Male,40,107000,1 235 | 15614187,Male,49,86000,1 236 | 15619407,Female,38,112000,0 237 | 15646227,Male,46,79000,1 238 | 15660541,Male,40,57000,0 239 | 15753874,Female,37,80000,0 240 | 15617877,Female,46,82000,0 241 | 15772073,Female,53,143000,1 242 | 15701537,Male,42,149000,1 243 | 15736228,Male,38,59000,0 244 | 15780572,Female,50,88000,1 245 | 15769596,Female,56,104000,1 246 | 15586996,Female,41,72000,0 247 | 15722061,Female,51,146000,1 248 | 15638003,Female,35,50000,0 249 | 15775590,Female,57,122000,1 250 | 15730688,Male,41,52000,0 251 | 15753102,Female,35,97000,1 252 | 15810075,Female,44,39000,0 253 | 15723373,Male,37,52000,0 254 | 15795298,Female,48,134000,1 255 | 15584320,Female,37,146000,1 256 | 15724161,Female,50,44000,0 257 | 15750056,Female,52,90000,1 258 | 15609637,Female,41,72000,0 259 | 15794493,Male,40,57000,0 260 | 15569641,Female,58,95000,1 261 | 15815236,Female,45,131000,1 262 | 15811177,Female,35,77000,0 263 | 15680587,Male,36,144000,1 264 | 15672821,Female,55,125000,1 265 | 15767681,Female,35,72000,0 266 | 15600379,Male,48,90000,1 267 | 15801336,Female,42,108000,1 268 | 15721592,Male,40,75000,0 269 | 15581282,Male,37,74000,0 270 | 15746203,Female,47,144000,1 271 | 15583137,Male,40,61000,0 272 | 15680752,Female,43,133000,0 273 | 15688172,Female,59,76000,1 274 | 15791373,Male,60,42000,1 275 | 15589449,Male,39,106000,1 276 | 15692819,Female,57,26000,1 277 | 15727467,Male,57,74000,1 278 | 15734312,Male,38,71000,0 279 | 15764604,Male,49,88000,1 280 | 15613014,Female,52,38000,1 281 | 15759684,Female,50,36000,1 282 | 15609669,Female,59,88000,1 283 | 15685536,Male,35,61000,0 284 | 15750447,Male,37,70000,1 285 | 15663249,Female,52,21000,1 286 | 15638646,Male,48,141000,0 287 | 15734161,Female,37,93000,1 288 | 15631070,Female,37,62000,0 289 | 15761950,Female,48,138000,1 290 | 15649668,Male,41,79000,0 291 | 15713912,Female,37,78000,1 292 | 15586757,Male,39,134000,1 293 | 15596522,Male,49,89000,1 294 | 15625395,Male,55,39000,1 295 | 15760570,Male,37,77000,0 296 | 15566689,Female,35,57000,0 297 | 15725794,Female,36,63000,0 298 | 15673539,Male,42,73000,1 299 | 15705298,Female,43,112000,1 300 | 15675791,Male,45,79000,0 301 | 15747043,Male,46,117000,1 302 | 15736397,Female,58,38000,1 303 | 15678201,Male,48,74000,1 304 | 15720745,Female,37,137000,1 305 | 15637593,Male,37,79000,1 306 | 15598070,Female,40,60000,0 307 | 15787550,Male,42,54000,0 308 | 15603942,Female,51,134000,0 309 | 15733973,Female,47,113000,1 310 | 15596761,Male,36,125000,1 311 | 15652400,Female,38,50000,0 312 | 15717893,Female,42,70000,0 313 | 15622585,Male,39,96000,1 314 | 15733964,Female,38,50000,0 315 | 15753861,Female,49,141000,1 316 | 15747097,Female,39,79000,0 317 | 15594762,Female,39,75000,1 318 | 15667417,Female,54,104000,1 319 | 15684861,Male,35,55000,0 320 | 15742204,Male,45,32000,1 321 | 15623502,Male,36,60000,0 322 | 15774872,Female,52,138000,1 323 | 15611191,Female,53,82000,1 324 | 15674331,Male,41,52000,0 325 | 15619465,Female,48,30000,1 326 | 15575247,Female,48,131000,1 327 | 15695679,Female,41,60000,0 328 | 15713463,Male,41,72000,0 329 | 15785170,Female,42,75000,0 330 | 15796351,Male,36,118000,1 331 | 15639576,Female,47,107000,1 332 | 15693264,Male,38,51000,0 333 | 15589715,Female,48,119000,1 334 | 15769902,Male,42,65000,0 335 | 15587177,Male,40,65000,0 336 | 15814553,Male,57,60000,1 337 | 15601550,Female,36,54000,0 338 | 15664907,Male,58,144000,1 339 | 15612465,Male,35,79000,0 340 | 15810800,Female,38,55000,0 341 | 15665760,Male,39,122000,1 342 | 15588080,Female,53,104000,1 343 | 15776844,Male,35,75000,0 344 | 15717560,Female,38,65000,0 345 | 15629739,Female,47,51000,1 346 | 15729908,Male,47,105000,1 347 | 15716781,Female,41,63000,0 348 | 15646936,Male,53,72000,1 349 | 15768151,Female,54,108000,1 350 | 15579212,Male,39,77000,0 351 | 15721835,Male,38,61000,0 352 | 15800515,Female,38,113000,1 353 | 15591279,Male,37,75000,0 354 | 15587419,Female,42,90000,1 355 | 15750335,Female,37,57000,0 356 | 15699619,Male,36,99000,1 357 | 15606472,Male,60,34000,1 358 | 15778368,Male,54,70000,1 359 | 15671387,Female,41,72000,0 360 | 15573926,Male,40,71000,1 361 | 15709183,Male,42,54000,0 362 | 15577514,Male,43,129000,1 363 | 15778830,Female,53,34000,1 364 | 15768072,Female,47,50000,1 365 | 15768293,Female,42,79000,0 366 | 15654456,Male,42,104000,1 367 | 15807525,Female,59,29000,1 368 | 15574372,Female,58,47000,1 369 | 15671249,Male,46,88000,1 370 | 15779744,Male,38,71000,0 371 | 15624755,Female,54,26000,1 372 | 15611430,Female,60,46000,1 373 | 15774744,Male,60,83000,1 374 | 15629885,Female,39,73000,0 375 | 15708791,Male,59,130000,1 376 | 15793890,Female,37,80000,0 377 | 15646091,Female,46,32000,1 378 | 15596984,Female,46,74000,0 379 | 15800215,Female,42,53000,0 380 | 15577806,Male,41,87000,1 381 | 15749381,Female,58,23000,1 382 | 15683758,Male,42,64000,0 383 | 15670615,Male,48,33000,1 384 | 15715622,Female,44,139000,1 385 | 15707634,Male,49,28000,1 386 | 15806901,Female,57,33000,1 387 | 15775335,Male,56,60000,1 388 | 15724150,Female,49,39000,1 389 | 15627220,Male,39,71000,0 390 | 15672330,Male,47,34000,1 391 | 15668521,Female,48,35000,1 392 | 15807837,Male,48,33000,1 393 | 15592570,Male,47,23000,1 394 | 15748589,Female,45,45000,1 395 | 15635893,Male,60,42000,1 396 | 15757632,Female,39,59000,0 397 | 15691863,Female,46,41000,1 398 | 15706071,Male,51,23000,1 399 | 15654296,Female,50,20000,1 400 | 15755018,Male,36,33000,0 401 | 15594041,Female,49,36000,1 -------------------------------------------------------------------------------- /Part 3 - Classification/Section 18 - Naive Bayes/Naive_Bayes/Social_Network_Ads.csv: -------------------------------------------------------------------------------- 1 | User ID,Gender,Age,EstimatedSalary,Purchased 2 | 15624510,Male,19,19000,0 3 | 15810944,Male,35,20000,0 4 | 15668575,Female,26,43000,0 5 | 15603246,Female,27,57000,0 6 | 15804002,Male,19,76000,0 7 | 15728773,Male,27,58000,0 8 | 15598044,Female,27,84000,0 9 | 15694829,Female,32,150000,1 10 | 15600575,Male,25,33000,0 11 | 15727311,Female,35,65000,0 12 | 15570769,Female,26,80000,0 13 | 15606274,Female,26,52000,0 14 | 15746139,Male,20,86000,0 15 | 15704987,Male,32,18000,0 16 | 15628972,Male,18,82000,0 17 | 15697686,Male,29,80000,0 18 | 15733883,Male,47,25000,1 19 | 15617482,Male,45,26000,1 20 | 15704583,Male,46,28000,1 21 | 15621083,Female,48,29000,1 22 | 15649487,Male,45,22000,1 23 | 15736760,Female,47,49000,1 24 | 15714658,Male,48,41000,1 25 | 15599081,Female,45,22000,1 26 | 15705113,Male,46,23000,1 27 | 15631159,Male,47,20000,1 28 | 15792818,Male,49,28000,1 29 | 15633531,Female,47,30000,1 30 | 15744529,Male,29,43000,0 31 | 15669656,Male,31,18000,0 32 | 15581198,Male,31,74000,0 33 | 15729054,Female,27,137000,1 34 | 15573452,Female,21,16000,0 35 | 15776733,Female,28,44000,0 36 | 15724858,Male,27,90000,0 37 | 15713144,Male,35,27000,0 38 | 15690188,Female,33,28000,0 39 | 15689425,Male,30,49000,0 40 | 15671766,Female,26,72000,0 41 | 15782806,Female,27,31000,0 42 | 15764419,Female,27,17000,0 43 | 15591915,Female,33,51000,0 44 | 15772798,Male,35,108000,0 45 | 15792008,Male,30,15000,0 46 | 15715541,Female,28,84000,0 47 | 15639277,Male,23,20000,0 48 | 15798850,Male,25,79000,0 49 | 15776348,Female,27,54000,0 50 | 15727696,Male,30,135000,1 51 | 15793813,Female,31,89000,0 52 | 15694395,Female,24,32000,0 53 | 15764195,Female,18,44000,0 54 | 15744919,Female,29,83000,0 55 | 15671655,Female,35,23000,0 56 | 15654901,Female,27,58000,0 57 | 15649136,Female,24,55000,0 58 | 15775562,Female,23,48000,0 59 | 15807481,Male,28,79000,0 60 | 15642885,Male,22,18000,0 61 | 15789109,Female,32,117000,0 62 | 15814004,Male,27,20000,0 63 | 15673619,Male,25,87000,0 64 | 15595135,Female,23,66000,0 65 | 15583681,Male,32,120000,1 66 | 15605000,Female,59,83000,0 67 | 15718071,Male,24,58000,0 68 | 15679760,Male,24,19000,0 69 | 15654574,Female,23,82000,0 70 | 15577178,Female,22,63000,0 71 | 15595324,Female,31,68000,0 72 | 15756932,Male,25,80000,0 73 | 15726358,Female,24,27000,0 74 | 15595228,Female,20,23000,0 75 | 15782530,Female,33,113000,0 76 | 15592877,Male,32,18000,0 77 | 15651983,Male,34,112000,1 78 | 15746737,Male,18,52000,0 79 | 15774179,Female,22,27000,0 80 | 15667265,Female,28,87000,0 81 | 15655123,Female,26,17000,0 82 | 15595917,Male,30,80000,0 83 | 15668385,Male,39,42000,0 84 | 15709476,Male,20,49000,0 85 | 15711218,Male,35,88000,0 86 | 15798659,Female,30,62000,0 87 | 15663939,Female,31,118000,1 88 | 15694946,Male,24,55000,0 89 | 15631912,Female,28,85000,0 90 | 15768816,Male,26,81000,0 91 | 15682268,Male,35,50000,0 92 | 15684801,Male,22,81000,0 93 | 15636428,Female,30,116000,0 94 | 15809823,Male,26,15000,0 95 | 15699284,Female,29,28000,0 96 | 15786993,Female,29,83000,0 97 | 15709441,Female,35,44000,0 98 | 15710257,Female,35,25000,0 99 | 15582492,Male,28,123000,1 100 | 15575694,Male,35,73000,0 101 | 15756820,Female,28,37000,0 102 | 15766289,Male,27,88000,0 103 | 15593014,Male,28,59000,0 104 | 15584545,Female,32,86000,0 105 | 15675949,Female,33,149000,1 106 | 15672091,Female,19,21000,0 107 | 15801658,Male,21,72000,0 108 | 15706185,Female,26,35000,0 109 | 15789863,Male,27,89000,0 110 | 15720943,Male,26,86000,0 111 | 15697997,Female,38,80000,0 112 | 15665416,Female,39,71000,0 113 | 15660200,Female,37,71000,0 114 | 15619653,Male,38,61000,0 115 | 15773447,Male,37,55000,0 116 | 15739160,Male,42,80000,0 117 | 15689237,Male,40,57000,0 118 | 15679297,Male,35,75000,0 119 | 15591433,Male,36,52000,0 120 | 15642725,Male,40,59000,0 121 | 15701962,Male,41,59000,0 122 | 15811613,Female,36,75000,0 123 | 15741049,Male,37,72000,0 124 | 15724423,Female,40,75000,0 125 | 15574305,Male,35,53000,0 126 | 15678168,Female,41,51000,0 127 | 15697020,Female,39,61000,0 128 | 15610801,Male,42,65000,0 129 | 15745232,Male,26,32000,0 130 | 15722758,Male,30,17000,0 131 | 15792102,Female,26,84000,0 132 | 15675185,Male,31,58000,0 133 | 15801247,Male,33,31000,0 134 | 15725660,Male,30,87000,0 135 | 15638963,Female,21,68000,0 136 | 15800061,Female,28,55000,0 137 | 15578006,Male,23,63000,0 138 | 15668504,Female,20,82000,0 139 | 15687491,Male,30,107000,1 140 | 15610403,Female,28,59000,0 141 | 15741094,Male,19,25000,0 142 | 15807909,Male,19,85000,0 143 | 15666141,Female,18,68000,0 144 | 15617134,Male,35,59000,0 145 | 15783029,Male,30,89000,0 146 | 15622833,Female,34,25000,0 147 | 15746422,Female,24,89000,0 148 | 15750839,Female,27,96000,1 149 | 15749130,Female,41,30000,0 150 | 15779862,Male,29,61000,0 151 | 15767871,Male,20,74000,0 152 | 15679651,Female,26,15000,0 153 | 15576219,Male,41,45000,0 154 | 15699247,Male,31,76000,0 155 | 15619087,Female,36,50000,0 156 | 15605327,Male,40,47000,0 157 | 15610140,Female,31,15000,0 158 | 15791174,Male,46,59000,0 159 | 15602373,Male,29,75000,0 160 | 15762605,Male,26,30000,0 161 | 15598840,Female,32,135000,1 162 | 15744279,Male,32,100000,1 163 | 15670619,Male,25,90000,0 164 | 15599533,Female,37,33000,0 165 | 15757837,Male,35,38000,0 166 | 15697574,Female,33,69000,0 167 | 15578738,Female,18,86000,0 168 | 15762228,Female,22,55000,0 169 | 15614827,Female,35,71000,0 170 | 15789815,Male,29,148000,1 171 | 15579781,Female,29,47000,0 172 | 15587013,Male,21,88000,0 173 | 15570932,Male,34,115000,0 174 | 15794661,Female,26,118000,0 175 | 15581654,Female,34,43000,0 176 | 15644296,Female,34,72000,0 177 | 15614420,Female,23,28000,0 178 | 15609653,Female,35,47000,0 179 | 15594577,Male,25,22000,0 180 | 15584114,Male,24,23000,0 181 | 15673367,Female,31,34000,0 182 | 15685576,Male,26,16000,0 183 | 15774727,Female,31,71000,0 184 | 15694288,Female,32,117000,1 185 | 15603319,Male,33,43000,0 186 | 15759066,Female,33,60000,0 187 | 15814816,Male,31,66000,0 188 | 15724402,Female,20,82000,0 189 | 15571059,Female,33,41000,0 190 | 15674206,Male,35,72000,0 191 | 15715160,Male,28,32000,0 192 | 15730448,Male,24,84000,0 193 | 15662067,Female,19,26000,0 194 | 15779581,Male,29,43000,0 195 | 15662901,Male,19,70000,0 196 | 15689751,Male,28,89000,0 197 | 15667742,Male,34,43000,0 198 | 15738448,Female,30,79000,0 199 | 15680243,Female,20,36000,0 200 | 15745083,Male,26,80000,0 201 | 15708228,Male,35,22000,0 202 | 15628523,Male,35,39000,0 203 | 15708196,Male,49,74000,0 204 | 15735549,Female,39,134000,1 205 | 15809347,Female,41,71000,0 206 | 15660866,Female,58,101000,1 207 | 15766609,Female,47,47000,0 208 | 15654230,Female,55,130000,1 209 | 15794566,Female,52,114000,0 210 | 15800890,Female,40,142000,1 211 | 15697424,Female,46,22000,0 212 | 15724536,Female,48,96000,1 213 | 15735878,Male,52,150000,1 214 | 15707596,Female,59,42000,0 215 | 15657163,Male,35,58000,0 216 | 15622478,Male,47,43000,0 217 | 15779529,Female,60,108000,1 218 | 15636023,Male,49,65000,0 219 | 15582066,Male,40,78000,0 220 | 15666675,Female,46,96000,0 221 | 15732987,Male,59,143000,1 222 | 15789432,Female,41,80000,0 223 | 15663161,Male,35,91000,1 224 | 15694879,Male,37,144000,1 225 | 15593715,Male,60,102000,1 226 | 15575002,Female,35,60000,0 227 | 15622171,Male,37,53000,0 228 | 15795224,Female,36,126000,1 229 | 15685346,Male,56,133000,1 230 | 15691808,Female,40,72000,0 231 | 15721007,Female,42,80000,1 232 | 15794253,Female,35,147000,1 233 | 15694453,Male,39,42000,0 234 | 15813113,Male,40,107000,1 235 | 15614187,Male,49,86000,1 236 | 15619407,Female,38,112000,0 237 | 15646227,Male,46,79000,1 238 | 15660541,Male,40,57000,0 239 | 15753874,Female,37,80000,0 240 | 15617877,Female,46,82000,0 241 | 15772073,Female,53,143000,1 242 | 15701537,Male,42,149000,1 243 | 15736228,Male,38,59000,0 244 | 15780572,Female,50,88000,1 245 | 15769596,Female,56,104000,1 246 | 15586996,Female,41,72000,0 247 | 15722061,Female,51,146000,1 248 | 15638003,Female,35,50000,0 249 | 15775590,Female,57,122000,1 250 | 15730688,Male,41,52000,0 251 | 15753102,Female,35,97000,1 252 | 15810075,Female,44,39000,0 253 | 15723373,Male,37,52000,0 254 | 15795298,Female,48,134000,1 255 | 15584320,Female,37,146000,1 256 | 15724161,Female,50,44000,0 257 | 15750056,Female,52,90000,1 258 | 15609637,Female,41,72000,0 259 | 15794493,Male,40,57000,0 260 | 15569641,Female,58,95000,1 261 | 15815236,Female,45,131000,1 262 | 15811177,Female,35,77000,0 263 | 15680587,Male,36,144000,1 264 | 15672821,Female,55,125000,1 265 | 15767681,Female,35,72000,0 266 | 15600379,Male,48,90000,1 267 | 15801336,Female,42,108000,1 268 | 15721592,Male,40,75000,0 269 | 15581282,Male,37,74000,0 270 | 15746203,Female,47,144000,1 271 | 15583137,Male,40,61000,0 272 | 15680752,Female,43,133000,0 273 | 15688172,Female,59,76000,1 274 | 15791373,Male,60,42000,1 275 | 15589449,Male,39,106000,1 276 | 15692819,Female,57,26000,1 277 | 15727467,Male,57,74000,1 278 | 15734312,Male,38,71000,0 279 | 15764604,Male,49,88000,1 280 | 15613014,Female,52,38000,1 281 | 15759684,Female,50,36000,1 282 | 15609669,Female,59,88000,1 283 | 15685536,Male,35,61000,0 284 | 15750447,Male,37,70000,1 285 | 15663249,Female,52,21000,1 286 | 15638646,Male,48,141000,0 287 | 15734161,Female,37,93000,1 288 | 15631070,Female,37,62000,0 289 | 15761950,Female,48,138000,1 290 | 15649668,Male,41,79000,0 291 | 15713912,Female,37,78000,1 292 | 15586757,Male,39,134000,1 293 | 15596522,Male,49,89000,1 294 | 15625395,Male,55,39000,1 295 | 15760570,Male,37,77000,0 296 | 15566689,Female,35,57000,0 297 | 15725794,Female,36,63000,0 298 | 15673539,Male,42,73000,1 299 | 15705298,Female,43,112000,1 300 | 15675791,Male,45,79000,0 301 | 15747043,Male,46,117000,1 302 | 15736397,Female,58,38000,1 303 | 15678201,Male,48,74000,1 304 | 15720745,Female,37,137000,1 305 | 15637593,Male,37,79000,1 306 | 15598070,Female,40,60000,0 307 | 15787550,Male,42,54000,0 308 | 15603942,Female,51,134000,0 309 | 15733973,Female,47,113000,1 310 | 15596761,Male,36,125000,1 311 | 15652400,Female,38,50000,0 312 | 15717893,Female,42,70000,0 313 | 15622585,Male,39,96000,1 314 | 15733964,Female,38,50000,0 315 | 15753861,Female,49,141000,1 316 | 15747097,Female,39,79000,0 317 | 15594762,Female,39,75000,1 318 | 15667417,Female,54,104000,1 319 | 15684861,Male,35,55000,0 320 | 15742204,Male,45,32000,1 321 | 15623502,Male,36,60000,0 322 | 15774872,Female,52,138000,1 323 | 15611191,Female,53,82000,1 324 | 15674331,Male,41,52000,0 325 | 15619465,Female,48,30000,1 326 | 15575247,Female,48,131000,1 327 | 15695679,Female,41,60000,0 328 | 15713463,Male,41,72000,0 329 | 15785170,Female,42,75000,0 330 | 15796351,Male,36,118000,1 331 | 15639576,Female,47,107000,1 332 | 15693264,Male,38,51000,0 333 | 15589715,Female,48,119000,1 334 | 15769902,Male,42,65000,0 335 | 15587177,Male,40,65000,0 336 | 15814553,Male,57,60000,1 337 | 15601550,Female,36,54000,0 338 | 15664907,Male,58,144000,1 339 | 15612465,Male,35,79000,0 340 | 15810800,Female,38,55000,0 341 | 15665760,Male,39,122000,1 342 | 15588080,Female,53,104000,1 343 | 15776844,Male,35,75000,0 344 | 15717560,Female,38,65000,0 345 | 15629739,Female,47,51000,1 346 | 15729908,Male,47,105000,1 347 | 15716781,Female,41,63000,0 348 | 15646936,Male,53,72000,1 349 | 15768151,Female,54,108000,1 350 | 15579212,Male,39,77000,0 351 | 15721835,Male,38,61000,0 352 | 15800515,Female,38,113000,1 353 | 15591279,Male,37,75000,0 354 | 15587419,Female,42,90000,1 355 | 15750335,Female,37,57000,0 356 | 15699619,Male,36,99000,1 357 | 15606472,Male,60,34000,1 358 | 15778368,Male,54,70000,1 359 | 15671387,Female,41,72000,0 360 | 15573926,Male,40,71000,1 361 | 15709183,Male,42,54000,0 362 | 15577514,Male,43,129000,1 363 | 15778830,Female,53,34000,1 364 | 15768072,Female,47,50000,1 365 | 15768293,Female,42,79000,0 366 | 15654456,Male,42,104000,1 367 | 15807525,Female,59,29000,1 368 | 15574372,Female,58,47000,1 369 | 15671249,Male,46,88000,1 370 | 15779744,Male,38,71000,0 371 | 15624755,Female,54,26000,1 372 | 15611430,Female,60,46000,1 373 | 15774744,Male,60,83000,1 374 | 15629885,Female,39,73000,0 375 | 15708791,Male,59,130000,1 376 | 15793890,Female,37,80000,0 377 | 15646091,Female,46,32000,1 378 | 15596984,Female,46,74000,0 379 | 15800215,Female,42,53000,0 380 | 15577806,Male,41,87000,1 381 | 15749381,Female,58,23000,1 382 | 15683758,Male,42,64000,0 383 | 15670615,Male,48,33000,1 384 | 15715622,Female,44,139000,1 385 | 15707634,Male,49,28000,1 386 | 15806901,Female,57,33000,1 387 | 15775335,Male,56,60000,1 388 | 15724150,Female,49,39000,1 389 | 15627220,Male,39,71000,0 390 | 15672330,Male,47,34000,1 391 | 15668521,Female,48,35000,1 392 | 15807837,Male,48,33000,1 393 | 15592570,Male,47,23000,1 394 | 15748589,Female,45,45000,1 395 | 15635893,Male,60,42000,1 396 | 15757632,Female,39,59000,0 397 | 15691863,Female,46,41000,1 398 | 15706071,Male,51,23000,1 399 | 15654296,Female,50,20000,1 400 | 15755018,Male,36,33000,0 401 | 15594041,Female,49,36000,1 -------------------------------------------------------------------------------- /Part 3 - Classification/Section 15 - K-Nearest Neighbors (K-NN)/Social_Network_Ads.csv: -------------------------------------------------------------------------------- 1 | User ID,Gender,Age,EstimatedSalary,Purchased 2 | 15624510,Male,19,19000,0 3 | 15810944,Male,35,20000,0 4 | 15668575,Female,26,43000,0 5 | 15603246,Female,27,57000,0 6 | 15804002,Male,19,76000,0 7 | 15728773,Male,27,58000,0 8 | 15598044,Female,27,84000,0 9 | 15694829,Female,32,150000,1 10 | 15600575,Male,25,33000,0 11 | 15727311,Female,35,65000,0 12 | 15570769,Female,26,80000,0 13 | 15606274,Female,26,52000,0 14 | 15746139,Male,20,86000,0 15 | 15704987,Male,32,18000,0 16 | 15628972,Male,18,82000,0 17 | 15697686,Male,29,80000,0 18 | 15733883,Male,47,25000,1 19 | 15617482,Male,45,26000,1 20 | 15704583,Male,46,28000,1 21 | 15621083,Female,48,29000,1 22 | 15649487,Male,45,22000,1 23 | 15736760,Female,47,49000,1 24 | 15714658,Male,48,41000,1 25 | 15599081,Female,45,22000,1 26 | 15705113,Male,46,23000,1 27 | 15631159,Male,47,20000,1 28 | 15792818,Male,49,28000,1 29 | 15633531,Female,47,30000,1 30 | 15744529,Male,29,43000,0 31 | 15669656,Male,31,18000,0 32 | 15581198,Male,31,74000,0 33 | 15729054,Female,27,137000,1 34 | 15573452,Female,21,16000,0 35 | 15776733,Female,28,44000,0 36 | 15724858,Male,27,90000,0 37 | 15713144,Male,35,27000,0 38 | 15690188,Female,33,28000,0 39 | 15689425,Male,30,49000,0 40 | 15671766,Female,26,72000,0 41 | 15782806,Female,27,31000,0 42 | 15764419,Female,27,17000,0 43 | 15591915,Female,33,51000,0 44 | 15772798,Male,35,108000,0 45 | 15792008,Male,30,15000,0 46 | 15715541,Female,28,84000,0 47 | 15639277,Male,23,20000,0 48 | 15798850,Male,25,79000,0 49 | 15776348,Female,27,54000,0 50 | 15727696,Male,30,135000,1 51 | 15793813,Female,31,89000,0 52 | 15694395,Female,24,32000,0 53 | 15764195,Female,18,44000,0 54 | 15744919,Female,29,83000,0 55 | 15671655,Female,35,23000,0 56 | 15654901,Female,27,58000,0 57 | 15649136,Female,24,55000,0 58 | 15775562,Female,23,48000,0 59 | 15807481,Male,28,79000,0 60 | 15642885,Male,22,18000,0 61 | 15789109,Female,32,117000,0 62 | 15814004,Male,27,20000,0 63 | 15673619,Male,25,87000,0 64 | 15595135,Female,23,66000,0 65 | 15583681,Male,32,120000,1 66 | 15605000,Female,59,83000,0 67 | 15718071,Male,24,58000,0 68 | 15679760,Male,24,19000,0 69 | 15654574,Female,23,82000,0 70 | 15577178,Female,22,63000,0 71 | 15595324,Female,31,68000,0 72 | 15756932,Male,25,80000,0 73 | 15726358,Female,24,27000,0 74 | 15595228,Female,20,23000,0 75 | 15782530,Female,33,113000,0 76 | 15592877,Male,32,18000,0 77 | 15651983,Male,34,112000,1 78 | 15746737,Male,18,52000,0 79 | 15774179,Female,22,27000,0 80 | 15667265,Female,28,87000,0 81 | 15655123,Female,26,17000,0 82 | 15595917,Male,30,80000,0 83 | 15668385,Male,39,42000,0 84 | 15709476,Male,20,49000,0 85 | 15711218,Male,35,88000,0 86 | 15798659,Female,30,62000,0 87 | 15663939,Female,31,118000,1 88 | 15694946,Male,24,55000,0 89 | 15631912,Female,28,85000,0 90 | 15768816,Male,26,81000,0 91 | 15682268,Male,35,50000,0 92 | 15684801,Male,22,81000,0 93 | 15636428,Female,30,116000,0 94 | 15809823,Male,26,15000,0 95 | 15699284,Female,29,28000,0 96 | 15786993,Female,29,83000,0 97 | 15709441,Female,35,44000,0 98 | 15710257,Female,35,25000,0 99 | 15582492,Male,28,123000,1 100 | 15575694,Male,35,73000,0 101 | 15756820,Female,28,37000,0 102 | 15766289,Male,27,88000,0 103 | 15593014,Male,28,59000,0 104 | 15584545,Female,32,86000,0 105 | 15675949,Female,33,149000,1 106 | 15672091,Female,19,21000,0 107 | 15801658,Male,21,72000,0 108 | 15706185,Female,26,35000,0 109 | 15789863,Male,27,89000,0 110 | 15720943,Male,26,86000,0 111 | 15697997,Female,38,80000,0 112 | 15665416,Female,39,71000,0 113 | 15660200,Female,37,71000,0 114 | 15619653,Male,38,61000,0 115 | 15773447,Male,37,55000,0 116 | 15739160,Male,42,80000,0 117 | 15689237,Male,40,57000,0 118 | 15679297,Male,35,75000,0 119 | 15591433,Male,36,52000,0 120 | 15642725,Male,40,59000,0 121 | 15701962,Male,41,59000,0 122 | 15811613,Female,36,75000,0 123 | 15741049,Male,37,72000,0 124 | 15724423,Female,40,75000,0 125 | 15574305,Male,35,53000,0 126 | 15678168,Female,41,51000,0 127 | 15697020,Female,39,61000,0 128 | 15610801,Male,42,65000,0 129 | 15745232,Male,26,32000,0 130 | 15722758,Male,30,17000,0 131 | 15792102,Female,26,84000,0 132 | 15675185,Male,31,58000,0 133 | 15801247,Male,33,31000,0 134 | 15725660,Male,30,87000,0 135 | 15638963,Female,21,68000,0 136 | 15800061,Female,28,55000,0 137 | 15578006,Male,23,63000,0 138 | 15668504,Female,20,82000,0 139 | 15687491,Male,30,107000,1 140 | 15610403,Female,28,59000,0 141 | 15741094,Male,19,25000,0 142 | 15807909,Male,19,85000,0 143 | 15666141,Female,18,68000,0 144 | 15617134,Male,35,59000,0 145 | 15783029,Male,30,89000,0 146 | 15622833,Female,34,25000,0 147 | 15746422,Female,24,89000,0 148 | 15750839,Female,27,96000,1 149 | 15749130,Female,41,30000,0 150 | 15779862,Male,29,61000,0 151 | 15767871,Male,20,74000,0 152 | 15679651,Female,26,15000,0 153 | 15576219,Male,41,45000,0 154 | 15699247,Male,31,76000,0 155 | 15619087,Female,36,50000,0 156 | 15605327,Male,40,47000,0 157 | 15610140,Female,31,15000,0 158 | 15791174,Male,46,59000,0 159 | 15602373,Male,29,75000,0 160 | 15762605,Male,26,30000,0 161 | 15598840,Female,32,135000,1 162 | 15744279,Male,32,100000,1 163 | 15670619,Male,25,90000,0 164 | 15599533,Female,37,33000,0 165 | 15757837,Male,35,38000,0 166 | 15697574,Female,33,69000,0 167 | 15578738,Female,18,86000,0 168 | 15762228,Female,22,55000,0 169 | 15614827,Female,35,71000,0 170 | 15789815,Male,29,148000,1 171 | 15579781,Female,29,47000,0 172 | 15587013,Male,21,88000,0 173 | 15570932,Male,34,115000,0 174 | 15794661,Female,26,118000,0 175 | 15581654,Female,34,43000,0 176 | 15644296,Female,34,72000,0 177 | 15614420,Female,23,28000,0 178 | 15609653,Female,35,47000,0 179 | 15594577,Male,25,22000,0 180 | 15584114,Male,24,23000,0 181 | 15673367,Female,31,34000,0 182 | 15685576,Male,26,16000,0 183 | 15774727,Female,31,71000,0 184 | 15694288,Female,32,117000,1 185 | 15603319,Male,33,43000,0 186 | 15759066,Female,33,60000,0 187 | 15814816,Male,31,66000,0 188 | 15724402,Female,20,82000,0 189 | 15571059,Female,33,41000,0 190 | 15674206,Male,35,72000,0 191 | 15715160,Male,28,32000,0 192 | 15730448,Male,24,84000,0 193 | 15662067,Female,19,26000,0 194 | 15779581,Male,29,43000,0 195 | 15662901,Male,19,70000,0 196 | 15689751,Male,28,89000,0 197 | 15667742,Male,34,43000,0 198 | 15738448,Female,30,79000,0 199 | 15680243,Female,20,36000,0 200 | 15745083,Male,26,80000,0 201 | 15708228,Male,35,22000,0 202 | 15628523,Male,35,39000,0 203 | 15708196,Male,49,74000,0 204 | 15735549,Female,39,134000,1 205 | 15809347,Female,41,71000,0 206 | 15660866,Female,58,101000,1 207 | 15766609,Female,47,47000,0 208 | 15654230,Female,55,130000,1 209 | 15794566,Female,52,114000,0 210 | 15800890,Female,40,142000,1 211 | 15697424,Female,46,22000,0 212 | 15724536,Female,48,96000,1 213 | 15735878,Male,52,150000,1 214 | 15707596,Female,59,42000,0 215 | 15657163,Male,35,58000,0 216 | 15622478,Male,47,43000,0 217 | 15779529,Female,60,108000,1 218 | 15636023,Male,49,65000,0 219 | 15582066,Male,40,78000,0 220 | 15666675,Female,46,96000,0 221 | 15732987,Male,59,143000,1 222 | 15789432,Female,41,80000,0 223 | 15663161,Male,35,91000,1 224 | 15694879,Male,37,144000,1 225 | 15593715,Male,60,102000,1 226 | 15575002,Female,35,60000,0 227 | 15622171,Male,37,53000,0 228 | 15795224,Female,36,126000,1 229 | 15685346,Male,56,133000,1 230 | 15691808,Female,40,72000,0 231 | 15721007,Female,42,80000,1 232 | 15794253,Female,35,147000,1 233 | 15694453,Male,39,42000,0 234 | 15813113,Male,40,107000,1 235 | 15614187,Male,49,86000,1 236 | 15619407,Female,38,112000,0 237 | 15646227,Male,46,79000,1 238 | 15660541,Male,40,57000,0 239 | 15753874,Female,37,80000,0 240 | 15617877,Female,46,82000,0 241 | 15772073,Female,53,143000,1 242 | 15701537,Male,42,149000,1 243 | 15736228,Male,38,59000,0 244 | 15780572,Female,50,88000,1 245 | 15769596,Female,56,104000,1 246 | 15586996,Female,41,72000,0 247 | 15722061,Female,51,146000,1 248 | 15638003,Female,35,50000,0 249 | 15775590,Female,57,122000,1 250 | 15730688,Male,41,52000,0 251 | 15753102,Female,35,97000,1 252 | 15810075,Female,44,39000,0 253 | 15723373,Male,37,52000,0 254 | 15795298,Female,48,134000,1 255 | 15584320,Female,37,146000,1 256 | 15724161,Female,50,44000,0 257 | 15750056,Female,52,90000,1 258 | 15609637,Female,41,72000,0 259 | 15794493,Male,40,57000,0 260 | 15569641,Female,58,95000,1 261 | 15815236,Female,45,131000,1 262 | 15811177,Female,35,77000,0 263 | 15680587,Male,36,144000,1 264 | 15672821,Female,55,125000,1 265 | 15767681,Female,35,72000,0 266 | 15600379,Male,48,90000,1 267 | 15801336,Female,42,108000,1 268 | 15721592,Male,40,75000,0 269 | 15581282,Male,37,74000,0 270 | 15746203,Female,47,144000,1 271 | 15583137,Male,40,61000,0 272 | 15680752,Female,43,133000,0 273 | 15688172,Female,59,76000,1 274 | 15791373,Male,60,42000,1 275 | 15589449,Male,39,106000,1 276 | 15692819,Female,57,26000,1 277 | 15727467,Male,57,74000,1 278 | 15734312,Male,38,71000,0 279 | 15764604,Male,49,88000,1 280 | 15613014,Female,52,38000,1 281 | 15759684,Female,50,36000,1 282 | 15609669,Female,59,88000,1 283 | 15685536,Male,35,61000,0 284 | 15750447,Male,37,70000,1 285 | 15663249,Female,52,21000,1 286 | 15638646,Male,48,141000,0 287 | 15734161,Female,37,93000,1 288 | 15631070,Female,37,62000,0 289 | 15761950,Female,48,138000,1 290 | 15649668,Male,41,79000,0 291 | 15713912,Female,37,78000,1 292 | 15586757,Male,39,134000,1 293 | 15596522,Male,49,89000,1 294 | 15625395,Male,55,39000,1 295 | 15760570,Male,37,77000,0 296 | 15566689,Female,35,57000,0 297 | 15725794,Female,36,63000,0 298 | 15673539,Male,42,73000,1 299 | 15705298,Female,43,112000,1 300 | 15675791,Male,45,79000,0 301 | 15747043,Male,46,117000,1 302 | 15736397,Female,58,38000,1 303 | 15678201,Male,48,74000,1 304 | 15720745,Female,37,137000,1 305 | 15637593,Male,37,79000,1 306 | 15598070,Female,40,60000,0 307 | 15787550,Male,42,54000,0 308 | 15603942,Female,51,134000,0 309 | 15733973,Female,47,113000,1 310 | 15596761,Male,36,125000,1 311 | 15652400,Female,38,50000,0 312 | 15717893,Female,42,70000,0 313 | 15622585,Male,39,96000,1 314 | 15733964,Female,38,50000,0 315 | 15753861,Female,49,141000,1 316 | 15747097,Female,39,79000,0 317 | 15594762,Female,39,75000,1 318 | 15667417,Female,54,104000,1 319 | 15684861,Male,35,55000,0 320 | 15742204,Male,45,32000,1 321 | 15623502,Male,36,60000,0 322 | 15774872,Female,52,138000,1 323 | 15611191,Female,53,82000,1 324 | 15674331,Male,41,52000,0 325 | 15619465,Female,48,30000,1 326 | 15575247,Female,48,131000,1 327 | 15695679,Female,41,60000,0 328 | 15713463,Male,41,72000,0 329 | 15785170,Female,42,75000,0 330 | 15796351,Male,36,118000,1 331 | 15639576,Female,47,107000,1 332 | 15693264,Male,38,51000,0 333 | 15589715,Female,48,119000,1 334 | 15769902,Male,42,65000,0 335 | 15587177,Male,40,65000,0 336 | 15814553,Male,57,60000,1 337 | 15601550,Female,36,54000,0 338 | 15664907,Male,58,144000,1 339 | 15612465,Male,35,79000,0 340 | 15810800,Female,38,55000,0 341 | 15665760,Male,39,122000,1 342 | 15588080,Female,53,104000,1 343 | 15776844,Male,35,75000,0 344 | 15717560,Female,38,65000,0 345 | 15629739,Female,47,51000,1 346 | 15729908,Male,47,105000,1 347 | 15716781,Female,41,63000,0 348 | 15646936,Male,53,72000,1 349 | 15768151,Female,54,108000,1 350 | 15579212,Male,39,77000,0 351 | 15721835,Male,38,61000,0 352 | 15800515,Female,38,113000,1 353 | 15591279,Male,37,75000,0 354 | 15587419,Female,42,90000,1 355 | 15750335,Female,37,57000,0 356 | 15699619,Male,36,99000,1 357 | 15606472,Male,60,34000,1 358 | 15778368,Male,54,70000,1 359 | 15671387,Female,41,72000,0 360 | 15573926,Male,40,71000,1 361 | 15709183,Male,42,54000,0 362 | 15577514,Male,43,129000,1 363 | 15778830,Female,53,34000,1 364 | 15768072,Female,47,50000,1 365 | 15768293,Female,42,79000,0 366 | 15654456,Male,42,104000,1 367 | 15807525,Female,59,29000,1 368 | 15574372,Female,58,47000,1 369 | 15671249,Male,46,88000,1 370 | 15779744,Male,38,71000,0 371 | 15624755,Female,54,26000,1 372 | 15611430,Female,60,46000,1 373 | 15774744,Male,60,83000,1 374 | 15629885,Female,39,73000,0 375 | 15708791,Male,59,130000,1 376 | 15793890,Female,37,80000,0 377 | 15646091,Female,46,32000,1 378 | 15596984,Female,46,74000,0 379 | 15800215,Female,42,53000,0 380 | 15577806,Male,41,87000,1 381 | 15749381,Female,58,23000,1 382 | 15683758,Male,42,64000,0 383 | 15670615,Male,48,33000,1 384 | 15715622,Female,44,139000,1 385 | 15707634,Male,49,28000,1 386 | 15806901,Female,57,33000,1 387 | 15775335,Male,56,60000,1 388 | 15724150,Female,49,39000,1 389 | 15627220,Male,39,71000,0 390 | 15672330,Male,47,34000,1 391 | 15668521,Female,48,35000,1 392 | 15807837,Male,48,33000,1 393 | 15592570,Male,47,23000,1 394 | 15748589,Female,45,45000,1 395 | 15635893,Male,60,42000,1 396 | 15757632,Female,39,59000,0 397 | 15691863,Female,46,41000,1 398 | 15706071,Male,51,23000,1 399 | 15654296,Female,50,20000,1 400 | 15755018,Male,36,33000,0 401 | 15594041,Female,49,36000,1 -------------------------------------------------------------------------------- /Part 3 - Classification/Section 16 - Support Vector Machine (SVM)/SVM/Social_Network_Ads.csv: -------------------------------------------------------------------------------- 1 | User ID,Gender,Age,EstimatedSalary,Purchased 2 | 15624510,Male,19,19000,0 3 | 15810944,Male,35,20000,0 4 | 15668575,Female,26,43000,0 5 | 15603246,Female,27,57000,0 6 | 15804002,Male,19,76000,0 7 | 15728773,Male,27,58000,0 8 | 15598044,Female,27,84000,0 9 | 15694829,Female,32,150000,1 10 | 15600575,Male,25,33000,0 11 | 15727311,Female,35,65000,0 12 | 15570769,Female,26,80000,0 13 | 15606274,Female,26,52000,0 14 | 15746139,Male,20,86000,0 15 | 15704987,Male,32,18000,0 16 | 15628972,Male,18,82000,0 17 | 15697686,Male,29,80000,0 18 | 15733883,Male,47,25000,1 19 | 15617482,Male,45,26000,1 20 | 15704583,Male,46,28000,1 21 | 15621083,Female,48,29000,1 22 | 15649487,Male,45,22000,1 23 | 15736760,Female,47,49000,1 24 | 15714658,Male,48,41000,1 25 | 15599081,Female,45,22000,1 26 | 15705113,Male,46,23000,1 27 | 15631159,Male,47,20000,1 28 | 15792818,Male,49,28000,1 29 | 15633531,Female,47,30000,1 30 | 15744529,Male,29,43000,0 31 | 15669656,Male,31,18000,0 32 | 15581198,Male,31,74000,0 33 | 15729054,Female,27,137000,1 34 | 15573452,Female,21,16000,0 35 | 15776733,Female,28,44000,0 36 | 15724858,Male,27,90000,0 37 | 15713144,Male,35,27000,0 38 | 15690188,Female,33,28000,0 39 | 15689425,Male,30,49000,0 40 | 15671766,Female,26,72000,0 41 | 15782806,Female,27,31000,0 42 | 15764419,Female,27,17000,0 43 | 15591915,Female,33,51000,0 44 | 15772798,Male,35,108000,0 45 | 15792008,Male,30,15000,0 46 | 15715541,Female,28,84000,0 47 | 15639277,Male,23,20000,0 48 | 15798850,Male,25,79000,0 49 | 15776348,Female,27,54000,0 50 | 15727696,Male,30,135000,1 51 | 15793813,Female,31,89000,0 52 | 15694395,Female,24,32000,0 53 | 15764195,Female,18,44000,0 54 | 15744919,Female,29,83000,0 55 | 15671655,Female,35,23000,0 56 | 15654901,Female,27,58000,0 57 | 15649136,Female,24,55000,0 58 | 15775562,Female,23,48000,0 59 | 15807481,Male,28,79000,0 60 | 15642885,Male,22,18000,0 61 | 15789109,Female,32,117000,0 62 | 15814004,Male,27,20000,0 63 | 15673619,Male,25,87000,0 64 | 15595135,Female,23,66000,0 65 | 15583681,Male,32,120000,1 66 | 15605000,Female,59,83000,0 67 | 15718071,Male,24,58000,0 68 | 15679760,Male,24,19000,0 69 | 15654574,Female,23,82000,0 70 | 15577178,Female,22,63000,0 71 | 15595324,Female,31,68000,0 72 | 15756932,Male,25,80000,0 73 | 15726358,Female,24,27000,0 74 | 15595228,Female,20,23000,0 75 | 15782530,Female,33,113000,0 76 | 15592877,Male,32,18000,0 77 | 15651983,Male,34,112000,1 78 | 15746737,Male,18,52000,0 79 | 15774179,Female,22,27000,0 80 | 15667265,Female,28,87000,0 81 | 15655123,Female,26,17000,0 82 | 15595917,Male,30,80000,0 83 | 15668385,Male,39,42000,0 84 | 15709476,Male,20,49000,0 85 | 15711218,Male,35,88000,0 86 | 15798659,Female,30,62000,0 87 | 15663939,Female,31,118000,1 88 | 15694946,Male,24,55000,0 89 | 15631912,Female,28,85000,0 90 | 15768816,Male,26,81000,0 91 | 15682268,Male,35,50000,0 92 | 15684801,Male,22,81000,0 93 | 15636428,Female,30,116000,0 94 | 15809823,Male,26,15000,0 95 | 15699284,Female,29,28000,0 96 | 15786993,Female,29,83000,0 97 | 15709441,Female,35,44000,0 98 | 15710257,Female,35,25000,0 99 | 15582492,Male,28,123000,1 100 | 15575694,Male,35,73000,0 101 | 15756820,Female,28,37000,0 102 | 15766289,Male,27,88000,0 103 | 15593014,Male,28,59000,0 104 | 15584545,Female,32,86000,0 105 | 15675949,Female,33,149000,1 106 | 15672091,Female,19,21000,0 107 | 15801658,Male,21,72000,0 108 | 15706185,Female,26,35000,0 109 | 15789863,Male,27,89000,0 110 | 15720943,Male,26,86000,0 111 | 15697997,Female,38,80000,0 112 | 15665416,Female,39,71000,0 113 | 15660200,Female,37,71000,0 114 | 15619653,Male,38,61000,0 115 | 15773447,Male,37,55000,0 116 | 15739160,Male,42,80000,0 117 | 15689237,Male,40,57000,0 118 | 15679297,Male,35,75000,0 119 | 15591433,Male,36,52000,0 120 | 15642725,Male,40,59000,0 121 | 15701962,Male,41,59000,0 122 | 15811613,Female,36,75000,0 123 | 15741049,Male,37,72000,0 124 | 15724423,Female,40,75000,0 125 | 15574305,Male,35,53000,0 126 | 15678168,Female,41,51000,0 127 | 15697020,Female,39,61000,0 128 | 15610801,Male,42,65000,0 129 | 15745232,Male,26,32000,0 130 | 15722758,Male,30,17000,0 131 | 15792102,Female,26,84000,0 132 | 15675185,Male,31,58000,0 133 | 15801247,Male,33,31000,0 134 | 15725660,Male,30,87000,0 135 | 15638963,Female,21,68000,0 136 | 15800061,Female,28,55000,0 137 | 15578006,Male,23,63000,0 138 | 15668504,Female,20,82000,0 139 | 15687491,Male,30,107000,1 140 | 15610403,Female,28,59000,0 141 | 15741094,Male,19,25000,0 142 | 15807909,Male,19,85000,0 143 | 15666141,Female,18,68000,0 144 | 15617134,Male,35,59000,0 145 | 15783029,Male,30,89000,0 146 | 15622833,Female,34,25000,0 147 | 15746422,Female,24,89000,0 148 | 15750839,Female,27,96000,1 149 | 15749130,Female,41,30000,0 150 | 15779862,Male,29,61000,0 151 | 15767871,Male,20,74000,0 152 | 15679651,Female,26,15000,0 153 | 15576219,Male,41,45000,0 154 | 15699247,Male,31,76000,0 155 | 15619087,Female,36,50000,0 156 | 15605327,Male,40,47000,0 157 | 15610140,Female,31,15000,0 158 | 15791174,Male,46,59000,0 159 | 15602373,Male,29,75000,0 160 | 15762605,Male,26,30000,0 161 | 15598840,Female,32,135000,1 162 | 15744279,Male,32,100000,1 163 | 15670619,Male,25,90000,0 164 | 15599533,Female,37,33000,0 165 | 15757837,Male,35,38000,0 166 | 15697574,Female,33,69000,0 167 | 15578738,Female,18,86000,0 168 | 15762228,Female,22,55000,0 169 | 15614827,Female,35,71000,0 170 | 15789815,Male,29,148000,1 171 | 15579781,Female,29,47000,0 172 | 15587013,Male,21,88000,0 173 | 15570932,Male,34,115000,0 174 | 15794661,Female,26,118000,0 175 | 15581654,Female,34,43000,0 176 | 15644296,Female,34,72000,0 177 | 15614420,Female,23,28000,0 178 | 15609653,Female,35,47000,0 179 | 15594577,Male,25,22000,0 180 | 15584114,Male,24,23000,0 181 | 15673367,Female,31,34000,0 182 | 15685576,Male,26,16000,0 183 | 15774727,Female,31,71000,0 184 | 15694288,Female,32,117000,1 185 | 15603319,Male,33,43000,0 186 | 15759066,Female,33,60000,0 187 | 15814816,Male,31,66000,0 188 | 15724402,Female,20,82000,0 189 | 15571059,Female,33,41000,0 190 | 15674206,Male,35,72000,0 191 | 15715160,Male,28,32000,0 192 | 15730448,Male,24,84000,0 193 | 15662067,Female,19,26000,0 194 | 15779581,Male,29,43000,0 195 | 15662901,Male,19,70000,0 196 | 15689751,Male,28,89000,0 197 | 15667742,Male,34,43000,0 198 | 15738448,Female,30,79000,0 199 | 15680243,Female,20,36000,0 200 | 15745083,Male,26,80000,0 201 | 15708228,Male,35,22000,0 202 | 15628523,Male,35,39000,0 203 | 15708196,Male,49,74000,0 204 | 15735549,Female,39,134000,1 205 | 15809347,Female,41,71000,0 206 | 15660866,Female,58,101000,1 207 | 15766609,Female,47,47000,0 208 | 15654230,Female,55,130000,1 209 | 15794566,Female,52,114000,0 210 | 15800890,Female,40,142000,1 211 | 15697424,Female,46,22000,0 212 | 15724536,Female,48,96000,1 213 | 15735878,Male,52,150000,1 214 | 15707596,Female,59,42000,0 215 | 15657163,Male,35,58000,0 216 | 15622478,Male,47,43000,0 217 | 15779529,Female,60,108000,1 218 | 15636023,Male,49,65000,0 219 | 15582066,Male,40,78000,0 220 | 15666675,Female,46,96000,0 221 | 15732987,Male,59,143000,1 222 | 15789432,Female,41,80000,0 223 | 15663161,Male,35,91000,1 224 | 15694879,Male,37,144000,1 225 | 15593715,Male,60,102000,1 226 | 15575002,Female,35,60000,0 227 | 15622171,Male,37,53000,0 228 | 15795224,Female,36,126000,1 229 | 15685346,Male,56,133000,1 230 | 15691808,Female,40,72000,0 231 | 15721007,Female,42,80000,1 232 | 15794253,Female,35,147000,1 233 | 15694453,Male,39,42000,0 234 | 15813113,Male,40,107000,1 235 | 15614187,Male,49,86000,1 236 | 15619407,Female,38,112000,0 237 | 15646227,Male,46,79000,1 238 | 15660541,Male,40,57000,0 239 | 15753874,Female,37,80000,0 240 | 15617877,Female,46,82000,0 241 | 15772073,Female,53,143000,1 242 | 15701537,Male,42,149000,1 243 | 15736228,Male,38,59000,0 244 | 15780572,Female,50,88000,1 245 | 15769596,Female,56,104000,1 246 | 15586996,Female,41,72000,0 247 | 15722061,Female,51,146000,1 248 | 15638003,Female,35,50000,0 249 | 15775590,Female,57,122000,1 250 | 15730688,Male,41,52000,0 251 | 15753102,Female,35,97000,1 252 | 15810075,Female,44,39000,0 253 | 15723373,Male,37,52000,0 254 | 15795298,Female,48,134000,1 255 | 15584320,Female,37,146000,1 256 | 15724161,Female,50,44000,0 257 | 15750056,Female,52,90000,1 258 | 15609637,Female,41,72000,0 259 | 15794493,Male,40,57000,0 260 | 15569641,Female,58,95000,1 261 | 15815236,Female,45,131000,1 262 | 15811177,Female,35,77000,0 263 | 15680587,Male,36,144000,1 264 | 15672821,Female,55,125000,1 265 | 15767681,Female,35,72000,0 266 | 15600379,Male,48,90000,1 267 | 15801336,Female,42,108000,1 268 | 15721592,Male,40,75000,0 269 | 15581282,Male,37,74000,0 270 | 15746203,Female,47,144000,1 271 | 15583137,Male,40,61000,0 272 | 15680752,Female,43,133000,0 273 | 15688172,Female,59,76000,1 274 | 15791373,Male,60,42000,1 275 | 15589449,Male,39,106000,1 276 | 15692819,Female,57,26000,1 277 | 15727467,Male,57,74000,1 278 | 15734312,Male,38,71000,0 279 | 15764604,Male,49,88000,1 280 | 15613014,Female,52,38000,1 281 | 15759684,Female,50,36000,1 282 | 15609669,Female,59,88000,1 283 | 15685536,Male,35,61000,0 284 | 15750447,Male,37,70000,1 285 | 15663249,Female,52,21000,1 286 | 15638646,Male,48,141000,0 287 | 15734161,Female,37,93000,1 288 | 15631070,Female,37,62000,0 289 | 15761950,Female,48,138000,1 290 | 15649668,Male,41,79000,0 291 | 15713912,Female,37,78000,1 292 | 15586757,Male,39,134000,1 293 | 15596522,Male,49,89000,1 294 | 15625395,Male,55,39000,1 295 | 15760570,Male,37,77000,0 296 | 15566689,Female,35,57000,0 297 | 15725794,Female,36,63000,0 298 | 15673539,Male,42,73000,1 299 | 15705298,Female,43,112000,1 300 | 15675791,Male,45,79000,0 301 | 15747043,Male,46,117000,1 302 | 15736397,Female,58,38000,1 303 | 15678201,Male,48,74000,1 304 | 15720745,Female,37,137000,1 305 | 15637593,Male,37,79000,1 306 | 15598070,Female,40,60000,0 307 | 15787550,Male,42,54000,0 308 | 15603942,Female,51,134000,0 309 | 15733973,Female,47,113000,1 310 | 15596761,Male,36,125000,1 311 | 15652400,Female,38,50000,0 312 | 15717893,Female,42,70000,0 313 | 15622585,Male,39,96000,1 314 | 15733964,Female,38,50000,0 315 | 15753861,Female,49,141000,1 316 | 15747097,Female,39,79000,0 317 | 15594762,Female,39,75000,1 318 | 15667417,Female,54,104000,1 319 | 15684861,Male,35,55000,0 320 | 15742204,Male,45,32000,1 321 | 15623502,Male,36,60000,0 322 | 15774872,Female,52,138000,1 323 | 15611191,Female,53,82000,1 324 | 15674331,Male,41,52000,0 325 | 15619465,Female,48,30000,1 326 | 15575247,Female,48,131000,1 327 | 15695679,Female,41,60000,0 328 | 15713463,Male,41,72000,0 329 | 15785170,Female,42,75000,0 330 | 15796351,Male,36,118000,1 331 | 15639576,Female,47,107000,1 332 | 15693264,Male,38,51000,0 333 | 15589715,Female,48,119000,1 334 | 15769902,Male,42,65000,0 335 | 15587177,Male,40,65000,0 336 | 15814553,Male,57,60000,1 337 | 15601550,Female,36,54000,0 338 | 15664907,Male,58,144000,1 339 | 15612465,Male,35,79000,0 340 | 15810800,Female,38,55000,0 341 | 15665760,Male,39,122000,1 342 | 15588080,Female,53,104000,1 343 | 15776844,Male,35,75000,0 344 | 15717560,Female,38,65000,0 345 | 15629739,Female,47,51000,1 346 | 15729908,Male,47,105000,1 347 | 15716781,Female,41,63000,0 348 | 15646936,Male,53,72000,1 349 | 15768151,Female,54,108000,1 350 | 15579212,Male,39,77000,0 351 | 15721835,Male,38,61000,0 352 | 15800515,Female,38,113000,1 353 | 15591279,Male,37,75000,0 354 | 15587419,Female,42,90000,1 355 | 15750335,Female,37,57000,0 356 | 15699619,Male,36,99000,1 357 | 15606472,Male,60,34000,1 358 | 15778368,Male,54,70000,1 359 | 15671387,Female,41,72000,0 360 | 15573926,Male,40,71000,1 361 | 15709183,Male,42,54000,0 362 | 15577514,Male,43,129000,1 363 | 15778830,Female,53,34000,1 364 | 15768072,Female,47,50000,1 365 | 15768293,Female,42,79000,0 366 | 15654456,Male,42,104000,1 367 | 15807525,Female,59,29000,1 368 | 15574372,Female,58,47000,1 369 | 15671249,Male,46,88000,1 370 | 15779744,Male,38,71000,0 371 | 15624755,Female,54,26000,1 372 | 15611430,Female,60,46000,1 373 | 15774744,Male,60,83000,1 374 | 15629885,Female,39,73000,0 375 | 15708791,Male,59,130000,1 376 | 15793890,Female,37,80000,0 377 | 15646091,Female,46,32000,1 378 | 15596984,Female,46,74000,0 379 | 15800215,Female,42,53000,0 380 | 15577806,Male,41,87000,1 381 | 15749381,Female,58,23000,1 382 | 15683758,Male,42,64000,0 383 | 15670615,Male,48,33000,1 384 | 15715622,Female,44,139000,1 385 | 15707634,Male,49,28000,1 386 | 15806901,Female,57,33000,1 387 | 15775335,Male,56,60000,1 388 | 15724150,Female,49,39000,1 389 | 15627220,Male,39,71000,0 390 | 15672330,Male,47,34000,1 391 | 15668521,Female,48,35000,1 392 | 15807837,Male,48,33000,1 393 | 15592570,Male,47,23000,1 394 | 15748589,Female,45,45000,1 395 | 15635893,Male,60,42000,1 396 | 15757632,Female,39,59000,0 397 | 15691863,Female,46,41000,1 398 | 15706071,Male,51,23000,1 399 | 15654296,Female,50,20000,1 400 | 15755018,Male,36,33000,0 401 | 15594041,Female,49,36000,1 -------------------------------------------------------------------------------- /Part 3 - Classification/Section 14 - Logistic Regression/Logistic_Regression/Social_Network_Ads.csv: -------------------------------------------------------------------------------- 1 | User ID,Gender,Age,EstimatedSalary,Purchased 2 | 15624510,Male,19,19000,0 3 | 15810944,Male,35,20000,0 4 | 15668575,Female,26,43000,0 5 | 15603246,Female,27,57000,0 6 | 15804002,Male,19,76000,0 7 | 15728773,Male,27,58000,0 8 | 15598044,Female,27,84000,0 9 | 15694829,Female,32,150000,1 10 | 15600575,Male,25,33000,0 11 | 15727311,Female,35,65000,0 12 | 15570769,Female,26,80000,0 13 | 15606274,Female,26,52000,0 14 | 15746139,Male,20,86000,0 15 | 15704987,Male,32,18000,0 16 | 15628972,Male,18,82000,0 17 | 15697686,Male,29,80000,0 18 | 15733883,Male,47,25000,1 19 | 15617482,Male,45,26000,1 20 | 15704583,Male,46,28000,1 21 | 15621083,Female,48,29000,1 22 | 15649487,Male,45,22000,1 23 | 15736760,Female,47,49000,1 24 | 15714658,Male,48,41000,1 25 | 15599081,Female,45,22000,1 26 | 15705113,Male,46,23000,1 27 | 15631159,Male,47,20000,1 28 | 15792818,Male,49,28000,1 29 | 15633531,Female,47,30000,1 30 | 15744529,Male,29,43000,0 31 | 15669656,Male,31,18000,0 32 | 15581198,Male,31,74000,0 33 | 15729054,Female,27,137000,1 34 | 15573452,Female,21,16000,0 35 | 15776733,Female,28,44000,0 36 | 15724858,Male,27,90000,0 37 | 15713144,Male,35,27000,0 38 | 15690188,Female,33,28000,0 39 | 15689425,Male,30,49000,0 40 | 15671766,Female,26,72000,0 41 | 15782806,Female,27,31000,0 42 | 15764419,Female,27,17000,0 43 | 15591915,Female,33,51000,0 44 | 15772798,Male,35,108000,0 45 | 15792008,Male,30,15000,0 46 | 15715541,Female,28,84000,0 47 | 15639277,Male,23,20000,0 48 | 15798850,Male,25,79000,0 49 | 15776348,Female,27,54000,0 50 | 15727696,Male,30,135000,1 51 | 15793813,Female,31,89000,0 52 | 15694395,Female,24,32000,0 53 | 15764195,Female,18,44000,0 54 | 15744919,Female,29,83000,0 55 | 15671655,Female,35,23000,0 56 | 15654901,Female,27,58000,0 57 | 15649136,Female,24,55000,0 58 | 15775562,Female,23,48000,0 59 | 15807481,Male,28,79000,0 60 | 15642885,Male,22,18000,0 61 | 15789109,Female,32,117000,0 62 | 15814004,Male,27,20000,0 63 | 15673619,Male,25,87000,0 64 | 15595135,Female,23,66000,0 65 | 15583681,Male,32,120000,1 66 | 15605000,Female,59,83000,0 67 | 15718071,Male,24,58000,0 68 | 15679760,Male,24,19000,0 69 | 15654574,Female,23,82000,0 70 | 15577178,Female,22,63000,0 71 | 15595324,Female,31,68000,0 72 | 15756932,Male,25,80000,0 73 | 15726358,Female,24,27000,0 74 | 15595228,Female,20,23000,0 75 | 15782530,Female,33,113000,0 76 | 15592877,Male,32,18000,0 77 | 15651983,Male,34,112000,1 78 | 15746737,Male,18,52000,0 79 | 15774179,Female,22,27000,0 80 | 15667265,Female,28,87000,0 81 | 15655123,Female,26,17000,0 82 | 15595917,Male,30,80000,0 83 | 15668385,Male,39,42000,0 84 | 15709476,Male,20,49000,0 85 | 15711218,Male,35,88000,0 86 | 15798659,Female,30,62000,0 87 | 15663939,Female,31,118000,1 88 | 15694946,Male,24,55000,0 89 | 15631912,Female,28,85000,0 90 | 15768816,Male,26,81000,0 91 | 15682268,Male,35,50000,0 92 | 15684801,Male,22,81000,0 93 | 15636428,Female,30,116000,0 94 | 15809823,Male,26,15000,0 95 | 15699284,Female,29,28000,0 96 | 15786993,Female,29,83000,0 97 | 15709441,Female,35,44000,0 98 | 15710257,Female,35,25000,0 99 | 15582492,Male,28,123000,1 100 | 15575694,Male,35,73000,0 101 | 15756820,Female,28,37000,0 102 | 15766289,Male,27,88000,0 103 | 15593014,Male,28,59000,0 104 | 15584545,Female,32,86000,0 105 | 15675949,Female,33,149000,1 106 | 15672091,Female,19,21000,0 107 | 15801658,Male,21,72000,0 108 | 15706185,Female,26,35000,0 109 | 15789863,Male,27,89000,0 110 | 15720943,Male,26,86000,0 111 | 15697997,Female,38,80000,0 112 | 15665416,Female,39,71000,0 113 | 15660200,Female,37,71000,0 114 | 15619653,Male,38,61000,0 115 | 15773447,Male,37,55000,0 116 | 15739160,Male,42,80000,0 117 | 15689237,Male,40,57000,0 118 | 15679297,Male,35,75000,0 119 | 15591433,Male,36,52000,0 120 | 15642725,Male,40,59000,0 121 | 15701962,Male,41,59000,0 122 | 15811613,Female,36,75000,0 123 | 15741049,Male,37,72000,0 124 | 15724423,Female,40,75000,0 125 | 15574305,Male,35,53000,0 126 | 15678168,Female,41,51000,0 127 | 15697020,Female,39,61000,0 128 | 15610801,Male,42,65000,0 129 | 15745232,Male,26,32000,0 130 | 15722758,Male,30,17000,0 131 | 15792102,Female,26,84000,0 132 | 15675185,Male,31,58000,0 133 | 15801247,Male,33,31000,0 134 | 15725660,Male,30,87000,0 135 | 15638963,Female,21,68000,0 136 | 15800061,Female,28,55000,0 137 | 15578006,Male,23,63000,0 138 | 15668504,Female,20,82000,0 139 | 15687491,Male,30,107000,1 140 | 15610403,Female,28,59000,0 141 | 15741094,Male,19,25000,0 142 | 15807909,Male,19,85000,0 143 | 15666141,Female,18,68000,0 144 | 15617134,Male,35,59000,0 145 | 15783029,Male,30,89000,0 146 | 15622833,Female,34,25000,0 147 | 15746422,Female,24,89000,0 148 | 15750839,Female,27,96000,1 149 | 15749130,Female,41,30000,0 150 | 15779862,Male,29,61000,0 151 | 15767871,Male,20,74000,0 152 | 15679651,Female,26,15000,0 153 | 15576219,Male,41,45000,0 154 | 15699247,Male,31,76000,0 155 | 15619087,Female,36,50000,0 156 | 15605327,Male,40,47000,0 157 | 15610140,Female,31,15000,0 158 | 15791174,Male,46,59000,0 159 | 15602373,Male,29,75000,0 160 | 15762605,Male,26,30000,0 161 | 15598840,Female,32,135000,1 162 | 15744279,Male,32,100000,1 163 | 15670619,Male,25,90000,0 164 | 15599533,Female,37,33000,0 165 | 15757837,Male,35,38000,0 166 | 15697574,Female,33,69000,0 167 | 15578738,Female,18,86000,0 168 | 15762228,Female,22,55000,0 169 | 15614827,Female,35,71000,0 170 | 15789815,Male,29,148000,1 171 | 15579781,Female,29,47000,0 172 | 15587013,Male,21,88000,0 173 | 15570932,Male,34,115000,0 174 | 15794661,Female,26,118000,0 175 | 15581654,Female,34,43000,0 176 | 15644296,Female,34,72000,0 177 | 15614420,Female,23,28000,0 178 | 15609653,Female,35,47000,0 179 | 15594577,Male,25,22000,0 180 | 15584114,Male,24,23000,0 181 | 15673367,Female,31,34000,0 182 | 15685576,Male,26,16000,0 183 | 15774727,Female,31,71000,0 184 | 15694288,Female,32,117000,1 185 | 15603319,Male,33,43000,0 186 | 15759066,Female,33,60000,0 187 | 15814816,Male,31,66000,0 188 | 15724402,Female,20,82000,0 189 | 15571059,Female,33,41000,0 190 | 15674206,Male,35,72000,0 191 | 15715160,Male,28,32000,0 192 | 15730448,Male,24,84000,0 193 | 15662067,Female,19,26000,0 194 | 15779581,Male,29,43000,0 195 | 15662901,Male,19,70000,0 196 | 15689751,Male,28,89000,0 197 | 15667742,Male,34,43000,0 198 | 15738448,Female,30,79000,0 199 | 15680243,Female,20,36000,0 200 | 15745083,Male,26,80000,0 201 | 15708228,Male,35,22000,0 202 | 15628523,Male,35,39000,0 203 | 15708196,Male,49,74000,0 204 | 15735549,Female,39,134000,1 205 | 15809347,Female,41,71000,0 206 | 15660866,Female,58,101000,1 207 | 15766609,Female,47,47000,0 208 | 15654230,Female,55,130000,1 209 | 15794566,Female,52,114000,0 210 | 15800890,Female,40,142000,1 211 | 15697424,Female,46,22000,0 212 | 15724536,Female,48,96000,1 213 | 15735878,Male,52,150000,1 214 | 15707596,Female,59,42000,0 215 | 15657163,Male,35,58000,0 216 | 15622478,Male,47,43000,0 217 | 15779529,Female,60,108000,1 218 | 15636023,Male,49,65000,0 219 | 15582066,Male,40,78000,0 220 | 15666675,Female,46,96000,0 221 | 15732987,Male,59,143000,1 222 | 15789432,Female,41,80000,0 223 | 15663161,Male,35,91000,1 224 | 15694879,Male,37,144000,1 225 | 15593715,Male,60,102000,1 226 | 15575002,Female,35,60000,0 227 | 15622171,Male,37,53000,0 228 | 15795224,Female,36,126000,1 229 | 15685346,Male,56,133000,1 230 | 15691808,Female,40,72000,0 231 | 15721007,Female,42,80000,1 232 | 15794253,Female,35,147000,1 233 | 15694453,Male,39,42000,0 234 | 15813113,Male,40,107000,1 235 | 15614187,Male,49,86000,1 236 | 15619407,Female,38,112000,0 237 | 15646227,Male,46,79000,1 238 | 15660541,Male,40,57000,0 239 | 15753874,Female,37,80000,0 240 | 15617877,Female,46,82000,0 241 | 15772073,Female,53,143000,1 242 | 15701537,Male,42,149000,1 243 | 15736228,Male,38,59000,0 244 | 15780572,Female,50,88000,1 245 | 15769596,Female,56,104000,1 246 | 15586996,Female,41,72000,0 247 | 15722061,Female,51,146000,1 248 | 15638003,Female,35,50000,0 249 | 15775590,Female,57,122000,1 250 | 15730688,Male,41,52000,0 251 | 15753102,Female,35,97000,1 252 | 15810075,Female,44,39000,0 253 | 15723373,Male,37,52000,0 254 | 15795298,Female,48,134000,1 255 | 15584320,Female,37,146000,1 256 | 15724161,Female,50,44000,0 257 | 15750056,Female,52,90000,1 258 | 15609637,Female,41,72000,0 259 | 15794493,Male,40,57000,0 260 | 15569641,Female,58,95000,1 261 | 15815236,Female,45,131000,1 262 | 15811177,Female,35,77000,0 263 | 15680587,Male,36,144000,1 264 | 15672821,Female,55,125000,1 265 | 15767681,Female,35,72000,0 266 | 15600379,Male,48,90000,1 267 | 15801336,Female,42,108000,1 268 | 15721592,Male,40,75000,0 269 | 15581282,Male,37,74000,0 270 | 15746203,Female,47,144000,1 271 | 15583137,Male,40,61000,0 272 | 15680752,Female,43,133000,0 273 | 15688172,Female,59,76000,1 274 | 15791373,Male,60,42000,1 275 | 15589449,Male,39,106000,1 276 | 15692819,Female,57,26000,1 277 | 15727467,Male,57,74000,1 278 | 15734312,Male,38,71000,0 279 | 15764604,Male,49,88000,1 280 | 15613014,Female,52,38000,1 281 | 15759684,Female,50,36000,1 282 | 15609669,Female,59,88000,1 283 | 15685536,Male,35,61000,0 284 | 15750447,Male,37,70000,1 285 | 15663249,Female,52,21000,1 286 | 15638646,Male,48,141000,0 287 | 15734161,Female,37,93000,1 288 | 15631070,Female,37,62000,0 289 | 15761950,Female,48,138000,1 290 | 15649668,Male,41,79000,0 291 | 15713912,Female,37,78000,1 292 | 15586757,Male,39,134000,1 293 | 15596522,Male,49,89000,1 294 | 15625395,Male,55,39000,1 295 | 15760570,Male,37,77000,0 296 | 15566689,Female,35,57000,0 297 | 15725794,Female,36,63000,0 298 | 15673539,Male,42,73000,1 299 | 15705298,Female,43,112000,1 300 | 15675791,Male,45,79000,0 301 | 15747043,Male,46,117000,1 302 | 15736397,Female,58,38000,1 303 | 15678201,Male,48,74000,1 304 | 15720745,Female,37,137000,1 305 | 15637593,Male,37,79000,1 306 | 15598070,Female,40,60000,0 307 | 15787550,Male,42,54000,0 308 | 15603942,Female,51,134000,0 309 | 15733973,Female,47,113000,1 310 | 15596761,Male,36,125000,1 311 | 15652400,Female,38,50000,0 312 | 15717893,Female,42,70000,0 313 | 15622585,Male,39,96000,1 314 | 15733964,Female,38,50000,0 315 | 15753861,Female,49,141000,1 316 | 15747097,Female,39,79000,0 317 | 15594762,Female,39,75000,1 318 | 15667417,Female,54,104000,1 319 | 15684861,Male,35,55000,0 320 | 15742204,Male,45,32000,1 321 | 15623502,Male,36,60000,0 322 | 15774872,Female,52,138000,1 323 | 15611191,Female,53,82000,1 324 | 15674331,Male,41,52000,0 325 | 15619465,Female,48,30000,1 326 | 15575247,Female,48,131000,1 327 | 15695679,Female,41,60000,0 328 | 15713463,Male,41,72000,0 329 | 15785170,Female,42,75000,0 330 | 15796351,Male,36,118000,1 331 | 15639576,Female,47,107000,1 332 | 15693264,Male,38,51000,0 333 | 15589715,Female,48,119000,1 334 | 15769902,Male,42,65000,0 335 | 15587177,Male,40,65000,0 336 | 15814553,Male,57,60000,1 337 | 15601550,Female,36,54000,0 338 | 15664907,Male,58,144000,1 339 | 15612465,Male,35,79000,0 340 | 15810800,Female,38,55000,0 341 | 15665760,Male,39,122000,1 342 | 15588080,Female,53,104000,1 343 | 15776844,Male,35,75000,0 344 | 15717560,Female,38,65000,0 345 | 15629739,Female,47,51000,1 346 | 15729908,Male,47,105000,1 347 | 15716781,Female,41,63000,0 348 | 15646936,Male,53,72000,1 349 | 15768151,Female,54,108000,1 350 | 15579212,Male,39,77000,0 351 | 15721835,Male,38,61000,0 352 | 15800515,Female,38,113000,1 353 | 15591279,Male,37,75000,0 354 | 15587419,Female,42,90000,1 355 | 15750335,Female,37,57000,0 356 | 15699619,Male,36,99000,1 357 | 15606472,Male,60,34000,1 358 | 15778368,Male,54,70000,1 359 | 15671387,Female,41,72000,0 360 | 15573926,Male,40,71000,1 361 | 15709183,Male,42,54000,0 362 | 15577514,Male,43,129000,1 363 | 15778830,Female,53,34000,1 364 | 15768072,Female,47,50000,1 365 | 15768293,Female,42,79000,0 366 | 15654456,Male,42,104000,1 367 | 15807525,Female,59,29000,1 368 | 15574372,Female,58,47000,1 369 | 15671249,Male,46,88000,1 370 | 15779744,Male,38,71000,0 371 | 15624755,Female,54,26000,1 372 | 15611430,Female,60,46000,1 373 | 15774744,Male,60,83000,1 374 | 15629885,Female,39,73000,0 375 | 15708791,Male,59,130000,1 376 | 15793890,Female,37,80000,0 377 | 15646091,Female,46,32000,1 378 | 15596984,Female,46,74000,0 379 | 15800215,Female,42,53000,0 380 | 15577806,Male,41,87000,1 381 | 15749381,Female,58,23000,1 382 | 15683758,Male,42,64000,0 383 | 15670615,Male,48,33000,1 384 | 15715622,Female,44,139000,1 385 | 15707634,Male,49,28000,1 386 | 15806901,Female,57,33000,1 387 | 15775335,Male,56,60000,1 388 | 15724150,Female,49,39000,1 389 | 15627220,Male,39,71000,0 390 | 15672330,Male,47,34000,1 391 | 15668521,Female,48,35000,1 392 | 15807837,Male,48,33000,1 393 | 15592570,Male,47,23000,1 394 | 15748589,Female,45,45000,1 395 | 15635893,Male,60,42000,1 396 | 15757632,Female,39,59000,0 397 | 15691863,Female,46,41000,1 398 | 15706071,Male,51,23000,1 399 | 15654296,Female,50,20000,1 400 | 15755018,Male,36,33000,0 401 | 15594041,Female,49,36000,1 --------------------------------------------------------------------------------