├── Chatbot and Restaurant Order Script
├── chatbot.py
└── restaurant.py
├── Data Cleaning
├── Data Cleaning.ipynb
└── house_price.csv
├── Data Frame Handling
├── Dataframe Handling.ipynb
├── student.csv
└── students2.csv
├── Diabetes Prediction
├── Diabetes Prediction.ipynb
├── Diabetes Test Model.ipynb
├── Predictions.csv
├── bars.png
├── diabetes.csv
├── diabetes_predictor_script.py
├── diabetes_rf_model.pkl
├── diabetes_test.csv
├── heat.png
└── heat2.png
├── Digit Recognition with Keras and Tensorflow
└── Digit_Recognition.ipynb
├── Fish Weight Prediction
├── BoxPlot.png
├── Fish_Predictor_Script.py
├── Weight of Fish Prediction.ipynb
├── barplots.png
├── corrmap.png
├── fish_predictor.pkl
├── fish_proj.csv
└── pairplots.png
├── HomePrices Prediction
├── HomePrices Prediction.ipynb
├── homemodel.pkl
└── homeprices.csv
├── IMDB Movie Reviews Sentiment Analysis with NLP
├── IMDB Movie Reviews Sentiment Analysis 2.ipynb
├── IMDB Movie Reviews Sentiment Analysis.ipynb
├── imdb_reviews.csv
├── imdb_sentiment_analyser_script.py
└── imdb_sentiment_analyzer.pkl
├── Image Recognition with Keras
├── Image_Classification.ipynb
├── class.png
└── number.png
├── Iris Flower Classification
├── Iris Flower Prediction.ipynb
├── Predicted Classes Using OVR and OVO Approach.csv
├── heatmap.png
├── heatmap2.png
├── hh.py
├── iris.csv
├── iris_prediction_script.py
└── one_vs_one_classifier.pkl
├── Iris Flower KMeans Classifier
├── Iris Classification using KMeans Clustering.ipynb
├── iris2.png
├── iris_classifier_kmeans.pkl
├── iris_kmeans.csv
├── k_elbow_plot.png
├── petal.png
└── petal2.png
├── Iris KMeans Classifier
├── Iris Classification using KMeans Clustering.ipynb
├── iris2.png
├── iris_classifier_kmeans.pkl
├── iris_kmeans.csv
├── k_elbow_plot.png
├── petal.png
└── petal2.png
├── KMeans Clustering
├── K-Means Clustering.ipynb
├── income.csv
├── kmeans_classifier.pkl
├── scatter.png
├── scatter2.png
├── scatter3.png
└── scatter5.png
├── LICENSE
├── Market Basket Association
├── 005 Transactions.txt
├── Market Basket Association Rules.ipynb
└── Market Basket Association.ipynb
├── Movie Recommendation System
├── Movie Recommendation System With Collaborative Filtering II.ipynb
├── Movie Recommendation System using Collaborative Filtering .ipynb
├── ratings.csv
└── userRatings.csv
├── Multi Fruit Classification
├── Fruit Classification.ipynb
├── Fruit Predicted Classes Using OVR and OVO Approach.csv
├── fruit.xlsx
├── fruit_data.csv
├── fruit_one_vs_one_classifier.pkl
├── heatmap3.png
└── heatmap4.png
├── Object Recognition
└── Object_Recognition.ipynb
├── README.md
├── Salary Prediction
├── Salary Prediction.ipynb
├── salaries.csv
└── salary_model.pkl
├── Spam Detection with NLP
├── Spam Detection with NLP.ipynb
└── spam.csv
├── Titanic Prediction
├── Titanic Survival Prediction.ipynb
├── heat3.png
├── heat4.png
├── titanic.csv
├── titanic_predictor_script.py
└── titanic_rf_model.pkl
└── Video Game Sales Prediction
├── Video Game Sales Prediction.ipynb
├── apps
└── endpoints
│ ├── __init__.py
│ ├── admin.py
│ ├── apps.py
│ ├── migrations
│ └── __init__.py
│ ├── models.py
│ ├── tests.py
│ └── views.py
├── db.sqlite3
├── manage.py
├── pycharmtut
├── __init__.py
├── __pycache__
│ ├── __init__.cpython-310.pyc
│ ├── settings.cpython-310.pyc
│ ├── urls.cpython-310.pyc
│ └── wsgi.cpython-310.pyc
├── asgi.py
├── settings.py
├── urls.py
└── wsgi.py
├── video_games_sales.csv
└── videogame_sale_model.pkl
/Chatbot and Restaurant Order Script/chatbot.py:
--------------------------------------------------------------------------------
1 | from re import T
2 |
3 |
4 | print("This is a Simple Chatbot")
5 |
6 | print("Marvis: Hello There!")
7 | user_input = input("you: ")
8 |
9 | for _ in range(len(user_input)):
10 | if user_input in ["hi", "hello"]:
11 | print("Marvis: Hi, how can I help you?")
12 |
13 | elif user_input == "who are you?":
14 | print("Marvis: I am your chatbot... do you remember me?")
15 | if user_input == "yes":
16 | print("Marvis: I am glad to see you remember me!")
17 | else:
18 | print("Marvis: I am sorry to hear that!")
19 |
20 | elif user_input == "how are you?":
21 | print("Marvis: I am fine, thank you for asking. What about you?")
22 | if user_input == "fine":
23 | print("Marvis: I am glad to hear that!")
24 |
25 | elif user_input == "what is your name?":
26 | print("Marvis: My name is Marvis. What is yours?")
27 | if user_input == "":
28 | print("Marvis: I am glad to hear that!")
29 |
30 | elif user_input in ["what is your age?", "how old are you?"]:
31 | print("Marvis: I am a computer program, I am not born yet. What is yours?")
32 | if user_input == "":
33 | print("Marvis: I am glad to hear that!")
34 |
35 | elif user_input == "what is your favorite color?":
36 | print("Marvis: I like blue. What is yours?")
37 | if user_input == "":
38 | print("Marvis: That is nice!")
39 |
40 |
41 | elif user_input == "what is your favorite food?":
42 | print("Marvis: I like pizza. What is yours?")
43 | if user_input == "":
44 | print("Marvis: I like that too!")
45 |
46 | elif user_input == "what is your favorite sport?":
47 | print("Marvis: I like football")
48 | if user_input == "football":
49 | print("Marvis: I like that too!")
50 | else:
51 | print("Marvis: That is a cool sport!")
52 |
53 | elif user_input == "Do you think Liverpool will win the Premier League?":
54 | print("Marvis: I think they will")
55 | if user_input == "Are you a Liverpool fan?":
56 | print("Marvis: Yes I am a Liverpool fan! What team do you support?")
57 | if user_input == "Liverpool":
58 | print("Marvis: I support Liverpool!")
59 | if user_input == "":
60 | print("Marvis: That's a good team!")
61 |
62 | elif user_input == "Do you think Liverpool will win the Champions League?":
63 | print("Marvis: I think they will")
64 | if user_input == "I don't think so":
65 | print("Marvis: I am sorry to hear that!")
66 |
67 | else:
68 | print("Marvis: I don't understand")
69 |
70 | print(user_input)
71 |
72 |
--------------------------------------------------------------------------------
/Chatbot and Restaurant Order Script/restaurant.py:
--------------------------------------------------------------------------------
1 | print("This is a Simple Food Delivery Script Made with Python ")
2 | def steps():
3 | # Opening the app
4 | open_app = input("Do you want to use the app? Type y if yes.\n")
5 | if open_app == "y":
6 | print("Opening app...")
7 | else:
8 | return "Exit"
9 |
10 | # Entering Email and Password
11 | print("Input Your Username/Email Address and Password")
12 | user_login_email = input("Email address: ")
13 | user_login_password = input("Password: ")
14 |
15 |
16 | order_food = input("Do you want to order food? type y if yes.\n")
17 | if order_food == "y":
18 | print("Ordering food")
19 | else:
20 | return "Okay no problem"
21 | type_of_food = input("What kind of food do you want?\n")
22 | price = len(type_of_food)
23 | print(f"You ordered {type_of_food}")
24 | if price>=5:
25 | print("This will cost you:$ ", price*100)
26 | else:
27 | print("This will cost you:$ ", price*50)
28 |
29 |
30 | available_restaurants = ["Uber Eats", "The Place "]
31 | print(f"These are the restaurants available {available_restaurants}")
32 | select_restaurant = input("What Restaurant Do you want to order from: ")
33 | if select_restaurant in available_restaurants:
34 | print(f"Ordering from {select_restaurant}")
35 | else:
36 | print("The restaurant you entered ", select_restaurant, " is not available")
37 |
38 | location = input("Do you want your food delivered? Type y for Yes\n")
39 | if location == "y":
40 | delivery_locations = ["Ajah", "VGC", "Lekki", "Ikorodu"]
41 | print(f"These are our Delivery locations\n {delivery_locations} ")
42 | user_location=input("Choose a location:\n")
43 | if user_location in delivery_locations:
44 | print("Your food is going to be delivered to this location: ", user_location)
45 | else:
46 | print("This location is not available")
47 |
48 | list_of_payment = ["Bank Transfer", "Credit Card", "Pay on Delivery"]
49 | print("These are the payment methods that are available", list_of_payment)
50 | user_payment = input("Select your payment method:\n")
51 | if user_payment in list_of_payment:
52 | print(f"You have chosen the {user_payment} method of payment\n")
53 | print("Your order has now been placed!\n")
54 | print("This is your order summary:\n")
55 | print(f"Username: {user_login_email}\n")
56 | print(f"What you ordered: {type_of_food}\n")
57 | print(f"Price of food: {len(type_of_food)}\n")
58 | print(f"Restaurant Selected: {select_restaurant}")
59 | print(f"Delivery Location: {user_location}\n")
60 | print(f"Payment Method: {user_payment}")
61 | else:
62 | print("This payment method is either unavailable or not offered by us.\nPlease Choose from the available payment methods")
63 | print("Your order is invalid. Please try again!")
64 |
65 |
66 |
67 | steps()
--------------------------------------------------------------------------------
/Diabetes Prediction/Predictions.csv:
--------------------------------------------------------------------------------
1 | ,Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction,Predicted Outcome
2 | 0,6,148,72,35,0,33.6,0.627,Positve
3 | 1,1,85,66,29,0,26.6,0.351,Negative
4 | 2,8,183,64,0,0,23.3,0.672,Positve
5 | 3,1,89,66,23,94,28.1,0.167,Negative
6 | 4,0,137,40,35,168,43.1,2.288,Positve
7 | 5,5,116,74,0,0,25.6,0.201,Negative
8 | 6,3,78,50,32,88,31.0,0.248,Positve
9 | 7,10,115,0,0,0,35.3,0.134,Negative
10 | 8,2,197,70,45,543,30.5,0.158,Positve
11 | 9,8,125,96,0,0,0.0,0.232,Positve
12 | 10,4,110,92,0,0,37.6,0.191,Negative
13 | 11,10,168,74,0,0,38.0,0.537,Positve
14 | 12,10,139,80,0,0,27.1,1.441,Negative
15 | 13,1,189,60,23,846,30.1,0.398,Positve
16 | 14,5,166,72,19,175,25.8,0.587,Positve
17 | 15,7,100,0,0,0,30.0,0.484,Positve
18 | 16,0,118,84,47,230,45.8,0.551,Positve
19 | 17,7,107,74,0,0,29.6,0.254,Negative
20 | 18,1,103,30,38,83,43.3,0.183,Negative
21 | 19,1,115,70,30,96,34.6,0.529,Negative
22 | 20,3,126,88,41,235,39.3,0.704,Negative
23 | 21,8,99,84,0,0,35.4,0.388,Negative
24 | 22,7,196,90,0,0,39.8,0.451,Positve
25 | 23,9,119,80,35,0,29.0,0.263,Positve
26 | 24,11,143,94,33,146,36.6,0.254,Positve
27 | 25,10,125,70,26,115,31.1,0.205,Positve
28 | 26,7,147,76,0,0,39.4,0.257,Positve
29 | 27,1,97,66,15,140,23.2,0.487,Negative
30 | 28,13,145,82,19,110,22.2,0.245,Negative
31 | 29,5,117,92,0,0,34.1,0.337,Negative
32 | 30,5,109,75,26,0,36.0,0.546,Negative
33 | 31,3,158,76,36,245,31.6,0.851,Positve
34 | 32,3,88,58,11,54,24.8,0.267,Negative
35 | 33,6,92,92,0,0,19.9,0.188,Negative
36 | 34,10,122,78,31,0,27.6,0.512,Positve
37 | 35,4,103,60,33,192,24.0,0.966,Negative
38 | 36,11,138,76,0,0,33.2,0.42,Negative
39 | 37,9,102,76,37,0,32.9,0.665,Positve
40 | 38,2,90,68,42,0,38.2,0.503,Positve
41 | 39,4,111,72,47,207,37.1,1.39,Positve
42 | 40,3,180,64,25,70,34.0,0.271,Negative
43 | 41,7,133,84,0,0,40.2,0.696,Negative
44 | 42,7,106,92,18,0,22.7,0.235,Negative
45 | 43,9,171,110,24,240,45.4,0.721,Positve
46 | 44,7,159,64,0,0,27.4,0.294,Positve
47 | 45,0,180,66,39,0,42.0,1.893,Positve
48 | 46,1,146,56,0,0,29.7,0.564,Negative
49 | 47,2,71,70,27,0,28.0,0.586,Negative
50 | 48,7,103,66,32,0,39.1,0.344,Positve
51 |
--------------------------------------------------------------------------------
/Diabetes Prediction/bars.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Diabetes Prediction/bars.png
--------------------------------------------------------------------------------
/Diabetes Prediction/diabetes.csv:
--------------------------------------------------------------------------------
1 | Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction,Age,Outcome
2 | 6,148,72,35,0,33.6,0.627,50,1
3 | 1,85,66,29,0,26.6,0.351,31,0
4 | 8,183,64,0,0,23.3,0.672,32,1
5 | 1,89,66,23,94,28.1,0.167,21,0
6 | 0,137,40,35,168,43.1,2.288,33,1
7 | 5,116,74,0,0,25.6,0.201,30,0
8 | 3,78,50,32,88,31,0.248,26,1
9 | 10,115,0,0,0,35.3,0.134,29,0
10 | 2,197,70,45,543,30.5,0.158,53,1
11 | 8,125,96,0,0,0,0.232,54,1
12 | 4,110,92,0,0,37.6,0.191,30,0
13 | 10,168,74,0,0,38,0.537,34,1
14 | 10,139,80,0,0,27.1,1.441,57,0
15 | 1,189,60,23,846,30.1,0.398,59,1
16 | 5,166,72,19,175,25.8,0.587,51,1
17 | 7,100,0,0,0,30,0.484,32,1
18 | 0,118,84,47,230,45.8,0.551,31,1
19 | 7,107,74,0,0,29.6,0.254,31,1
20 | 1,103,30,38,83,43.3,0.183,33,0
21 | 1,115,70,30,96,34.6,0.529,32,1
22 | 3,126,88,41,235,39.3,0.704,27,0
23 | 8,99,84,0,0,35.4,0.388,50,0
24 | 7,196,90,0,0,39.8,0.451,41,1
25 | 9,119,80,35,0,29,0.263,29,1
26 | 11,143,94,33,146,36.6,0.254,51,1
27 | 10,125,70,26,115,31.1,0.205,41,1
28 | 7,147,76,0,0,39.4,0.257,43,1
29 | 1,97,66,15,140,23.2,0.487,22,0
30 | 13,145,82,19,110,22.2,0.245,57,0
31 | 5,117,92,0,0,34.1,0.337,38,0
32 | 5,109,75,26,0,36,0.546,60,0
33 | 3,158,76,36,245,31.6,0.851,28,1
34 | 3,88,58,11,54,24.8,0.267,22,0
35 | 6,92,92,0,0,19.9,0.188,28,0
36 | 10,122,78,31,0,27.6,0.512,45,0
37 | 4,103,60,33,192,24,0.966,33,0
38 | 11,138,76,0,0,33.2,0.42,35,0
39 | 9,102,76,37,0,32.9,0.665,46,1
40 | 2,90,68,42,0,38.2,0.503,27,1
41 | 4,111,72,47,207,37.1,1.39,56,1
42 | 3,180,64,25,70,34,0.271,26,0
43 | 7,133,84,0,0,40.2,0.696,37,0
44 | 7,106,92,18,0,22.7,0.235,48,0
45 | 9,171,110,24,240,45.4,0.721,54,1
46 | 7,159,64,0,0,27.4,0.294,40,0
47 | 0,180,66,39,0,42,1.893,25,1
48 | 1,146,56,0,0,29.7,0.564,29,0
49 | 2,71,70,27,0,28,0.586,22,0
50 | 7,103,66,32,0,39.1,0.344,31,1
51 | 7,105,0,0,0,0,0.305,24,0
52 | 1,103,80,11,82,19.4,0.491,22,0
53 | 1,101,50,15,36,24.2,0.526,26,0
54 | 5,88,66,21,23,24.4,0.342,30,0
55 | 8,176,90,34,300,33.7,0.467,58,1
56 | 7,150,66,42,342,34.7,0.718,42,0
57 | 1,73,50,10,0,23,0.248,21,0
58 | 7,187,68,39,304,37.7,0.254,41,1
59 | 0,100,88,60,110,46.8,0.962,31,0
60 | 0,146,82,0,0,40.5,1.781,44,0
61 | 0,105,64,41,142,41.5,0.173,22,0
62 | 2,84,0,0,0,0,0.304,21,0
63 | 8,133,72,0,0,32.9,0.27,39,1
64 | 5,44,62,0,0,25,0.587,36,0
65 | 2,141,58,34,128,25.4,0.699,24,0
66 | 7,114,66,0,0,32.8,0.258,42,1
67 | 5,99,74,27,0,29,0.203,32,0
68 | 0,109,88,30,0,32.5,0.855,38,1
69 | 2,109,92,0,0,42.7,0.845,54,0
70 | 1,95,66,13,38,19.6,0.334,25,0
71 | 4,146,85,27,100,28.9,0.189,27,0
72 | 2,100,66,20,90,32.9,0.867,28,1
73 | 5,139,64,35,140,28.6,0.411,26,0
74 | 13,126,90,0,0,43.4,0.583,42,1
75 | 4,129,86,20,270,35.1,0.231,23,0
76 | 1,79,75,30,0,32,0.396,22,0
77 | 1,0,48,20,0,24.7,0.14,22,0
78 | 7,62,78,0,0,32.6,0.391,41,0
79 | 5,95,72,33,0,37.7,0.37,27,0
80 | 0,131,0,0,0,43.2,0.27,26,1
81 | 2,112,66,22,0,25,0.307,24,0
82 | 3,113,44,13,0,22.4,0.14,22,0
83 | 2,74,0,0,0,0,0.102,22,0
84 | 7,83,78,26,71,29.3,0.767,36,0
85 | 0,101,65,28,0,24.6,0.237,22,0
86 | 5,137,108,0,0,48.8,0.227,37,1
87 | 2,110,74,29,125,32.4,0.698,27,0
88 | 13,106,72,54,0,36.6,0.178,45,0
89 | 2,100,68,25,71,38.5,0.324,26,0
90 | 15,136,70,32,110,37.1,0.153,43,1
91 | 1,107,68,19,0,26.5,0.165,24,0
92 | 1,80,55,0,0,19.1,0.258,21,0
93 | 4,123,80,15,176,32,0.443,34,0
94 | 7,81,78,40,48,46.7,0.261,42,0
95 | 4,134,72,0,0,23.8,0.277,60,1
96 | 2,142,82,18,64,24.7,0.761,21,0
97 | 6,144,72,27,228,33.9,0.255,40,0
98 | 2,92,62,28,0,31.6,0.13,24,0
99 | 1,71,48,18,76,20.4,0.323,22,0
100 | 6,93,50,30,64,28.7,0.356,23,0
101 | 1,122,90,51,220,49.7,0.325,31,1
102 | 1,163,72,0,0,39,1.222,33,1
103 | 1,151,60,0,0,26.1,0.179,22,0
104 | 0,125,96,0,0,22.5,0.262,21,0
105 | 1,81,72,18,40,26.6,0.283,24,0
106 | 2,85,65,0,0,39.6,0.93,27,0
107 | 1,126,56,29,152,28.7,0.801,21,0
108 | 1,96,122,0,0,22.4,0.207,27,0
109 | 4,144,58,28,140,29.5,0.287,37,0
110 | 3,83,58,31,18,34.3,0.336,25,0
111 | 0,95,85,25,36,37.4,0.247,24,1
112 | 3,171,72,33,135,33.3,0.199,24,1
113 | 8,155,62,26,495,34,0.543,46,1
114 | 1,89,76,34,37,31.2,0.192,23,0
115 | 4,76,62,0,0,34,0.391,25,0
116 | 7,160,54,32,175,30.5,0.588,39,1
117 | 4,146,92,0,0,31.2,0.539,61,1
118 | 5,124,74,0,0,34,0.22,38,1
119 | 5,78,48,0,0,33.7,0.654,25,0
120 | 4,97,60,23,0,28.2,0.443,22,0
121 | 4,99,76,15,51,23.2,0.223,21,0
122 | 0,162,76,56,100,53.2,0.759,25,1
123 | 6,111,64,39,0,34.2,0.26,24,0
124 | 2,107,74,30,100,33.6,0.404,23,0
125 | 5,132,80,0,0,26.8,0.186,69,0
126 | 0,113,76,0,0,33.3,0.278,23,1
127 | 1,88,30,42,99,55,0.496,26,1
128 | 3,120,70,30,135,42.9,0.452,30,0
129 | 1,118,58,36,94,33.3,0.261,23,0
130 | 1,117,88,24,145,34.5,0.403,40,1
131 | 0,105,84,0,0,27.9,0.741,62,1
132 | 4,173,70,14,168,29.7,0.361,33,1
133 | 9,122,56,0,0,33.3,1.114,33,1
134 | 3,170,64,37,225,34.5,0.356,30,1
135 | 8,84,74,31,0,38.3,0.457,39,0
136 | 2,96,68,13,49,21.1,0.647,26,0
137 | 2,125,60,20,140,33.8,0.088,31,0
138 | 0,100,70,26,50,30.8,0.597,21,0
139 | 0,93,60,25,92,28.7,0.532,22,0
140 | 0,129,80,0,0,31.2,0.703,29,0
141 | 5,105,72,29,325,36.9,0.159,28,0
142 | 3,128,78,0,0,21.1,0.268,55,0
143 | 5,106,82,30,0,39.5,0.286,38,0
144 | 2,108,52,26,63,32.5,0.318,22,0
145 | 10,108,66,0,0,32.4,0.272,42,1
146 | 4,154,62,31,284,32.8,0.237,23,0
147 | 0,102,75,23,0,0,0.572,21,0
148 | 9,57,80,37,0,32.8,0.096,41,0
149 | 2,106,64,35,119,30.5,1.4,34,0
150 | 5,147,78,0,0,33.7,0.218,65,0
151 | 2,90,70,17,0,27.3,0.085,22,0
152 | 1,136,74,50,204,37.4,0.399,24,0
153 | 4,114,65,0,0,21.9,0.432,37,0
154 | 9,156,86,28,155,34.3,1.189,42,1
155 | 1,153,82,42,485,40.6,0.687,23,0
156 | 8,188,78,0,0,47.9,0.137,43,1
157 | 7,152,88,44,0,50,0.337,36,1
158 | 2,99,52,15,94,24.6,0.637,21,0
159 | 1,109,56,21,135,25.2,0.833,23,0
160 | 2,88,74,19,53,29,0.229,22,0
161 | 17,163,72,41,114,40.9,0.817,47,1
162 | 4,151,90,38,0,29.7,0.294,36,0
163 | 7,102,74,40,105,37.2,0.204,45,0
164 | 0,114,80,34,285,44.2,0.167,27,0
165 | 2,100,64,23,0,29.7,0.368,21,0
166 | 0,131,88,0,0,31.6,0.743,32,1
167 | 6,104,74,18,156,29.9,0.722,41,1
168 | 3,148,66,25,0,32.5,0.256,22,0
169 | 4,120,68,0,0,29.6,0.709,34,0
170 | 4,110,66,0,0,31.9,0.471,29,0
171 | 3,111,90,12,78,28.4,0.495,29,0
172 | 6,102,82,0,0,30.8,0.18,36,1
173 | 6,134,70,23,130,35.4,0.542,29,1
174 | 2,87,0,23,0,28.9,0.773,25,0
175 | 1,79,60,42,48,43.5,0.678,23,0
176 | 2,75,64,24,55,29.7,0.37,33,0
177 | 8,179,72,42,130,32.7,0.719,36,1
178 | 6,85,78,0,0,31.2,0.382,42,0
179 | 0,129,110,46,130,67.1,0.319,26,1
180 | 5,143,78,0,0,45,0.19,47,0
181 | 5,130,82,0,0,39.1,0.956,37,1
182 | 6,87,80,0,0,23.2,0.084,32,0
183 | 0,119,64,18,92,34.9,0.725,23,0
184 | 1,0,74,20,23,27.7,0.299,21,0
185 | 5,73,60,0,0,26.8,0.268,27,0
186 | 4,141,74,0,0,27.6,0.244,40,0
187 | 7,194,68,28,0,35.9,0.745,41,1
188 | 8,181,68,36,495,30.1,0.615,60,1
189 | 1,128,98,41,58,32,1.321,33,1
190 | 8,109,76,39,114,27.9,0.64,31,1
191 | 5,139,80,35,160,31.6,0.361,25,1
192 | 3,111,62,0,0,22.6,0.142,21,0
193 | 9,123,70,44,94,33.1,0.374,40,0
194 | 7,159,66,0,0,30.4,0.383,36,1
195 | 11,135,0,0,0,52.3,0.578,40,1
196 | 8,85,55,20,0,24.4,0.136,42,0
197 | 5,158,84,41,210,39.4,0.395,29,1
198 | 1,105,58,0,0,24.3,0.187,21,0
199 | 3,107,62,13,48,22.9,0.678,23,1
200 | 4,109,64,44,99,34.8,0.905,26,1
201 | 4,148,60,27,318,30.9,0.15,29,1
202 | 0,113,80,16,0,31,0.874,21,0
203 | 1,138,82,0,0,40.1,0.236,28,0
204 | 0,108,68,20,0,27.3,0.787,32,0
205 | 2,99,70,16,44,20.4,0.235,27,0
206 | 6,103,72,32,190,37.7,0.324,55,0
207 | 5,111,72,28,0,23.9,0.407,27,0
208 | 8,196,76,29,280,37.5,0.605,57,1
209 | 5,162,104,0,0,37.7,0.151,52,1
210 | 1,96,64,27,87,33.2,0.289,21,0
211 | 7,184,84,33,0,35.5,0.355,41,1
212 | 2,81,60,22,0,27.7,0.29,25,0
213 | 0,147,85,54,0,42.8,0.375,24,0
214 | 7,179,95,31,0,34.2,0.164,60,0
215 | 0,140,65,26,130,42.6,0.431,24,1
216 | 9,112,82,32,175,34.2,0.26,36,1
217 | 12,151,70,40,271,41.8,0.742,38,1
218 | 5,109,62,41,129,35.8,0.514,25,1
219 | 6,125,68,30,120,30,0.464,32,0
220 | 5,85,74,22,0,29,1.224,32,1
221 | 5,112,66,0,0,37.8,0.261,41,1
222 | 0,177,60,29,478,34.6,1.072,21,1
223 | 2,158,90,0,0,31.6,0.805,66,1
224 | 7,119,0,0,0,25.2,0.209,37,0
225 | 7,142,60,33,190,28.8,0.687,61,0
226 | 1,100,66,15,56,23.6,0.666,26,0
227 | 1,87,78,27,32,34.6,0.101,22,0
228 | 0,101,76,0,0,35.7,0.198,26,0
229 | 3,162,52,38,0,37.2,0.652,24,1
230 | 4,197,70,39,744,36.7,2.329,31,0
231 | 0,117,80,31,53,45.2,0.089,24,0
232 | 4,142,86,0,0,44,0.645,22,1
233 | 6,134,80,37,370,46.2,0.238,46,1
234 | 1,79,80,25,37,25.4,0.583,22,0
235 | 4,122,68,0,0,35,0.394,29,0
236 | 3,74,68,28,45,29.7,0.293,23,0
237 | 4,171,72,0,0,43.6,0.479,26,1
238 | 7,181,84,21,192,35.9,0.586,51,1
239 | 0,179,90,27,0,44.1,0.686,23,1
240 | 9,164,84,21,0,30.8,0.831,32,1
241 | 0,104,76,0,0,18.4,0.582,27,0
242 | 1,91,64,24,0,29.2,0.192,21,0
243 | 4,91,70,32,88,33.1,0.446,22,0
244 | 3,139,54,0,0,25.6,0.402,22,1
245 | 6,119,50,22,176,27.1,1.318,33,1
246 | 2,146,76,35,194,38.2,0.329,29,0
247 | 9,184,85,15,0,30,1.213,49,1
248 | 10,122,68,0,0,31.2,0.258,41,0
249 | 0,165,90,33,680,52.3,0.427,23,0
250 | 9,124,70,33,402,35.4,0.282,34,0
251 | 1,111,86,19,0,30.1,0.143,23,0
252 | 9,106,52,0,0,31.2,0.38,42,0
253 | 2,129,84,0,0,28,0.284,27,0
254 | 2,90,80,14,55,24.4,0.249,24,0
255 | 0,86,68,32,0,35.8,0.238,25,0
256 | 12,92,62,7,258,27.6,0.926,44,1
257 | 1,113,64,35,0,33.6,0.543,21,1
258 | 3,111,56,39,0,30.1,0.557,30,0
259 | 2,114,68,22,0,28.7,0.092,25,0
260 | 1,193,50,16,375,25.9,0.655,24,0
261 | 11,155,76,28,150,33.3,1.353,51,1
262 | 3,191,68,15,130,30.9,0.299,34,0
263 | 3,141,0,0,0,30,0.761,27,1
264 | 4,95,70,32,0,32.1,0.612,24,0
265 | 3,142,80,15,0,32.4,0.2,63,0
266 | 4,123,62,0,0,32,0.226,35,1
267 | 5,96,74,18,67,33.6,0.997,43,0
268 | 0,138,0,0,0,36.3,0.933,25,1
269 | 2,128,64,42,0,40,1.101,24,0
270 | 0,102,52,0,0,25.1,0.078,21,0
271 | 2,146,0,0,0,27.5,0.24,28,1
272 | 10,101,86,37,0,45.6,1.136,38,1
273 | 2,108,62,32,56,25.2,0.128,21,0
274 | 3,122,78,0,0,23,0.254,40,0
275 | 1,71,78,50,45,33.2,0.422,21,0
276 | 13,106,70,0,0,34.2,0.251,52,0
277 | 2,100,70,52,57,40.5,0.677,25,0
278 | 7,106,60,24,0,26.5,0.296,29,1
279 | 0,104,64,23,116,27.8,0.454,23,0
280 | 5,114,74,0,0,24.9,0.744,57,0
281 | 2,108,62,10,278,25.3,0.881,22,0
282 | 0,146,70,0,0,37.9,0.334,28,1
283 | 10,129,76,28,122,35.9,0.28,39,0
284 | 7,133,88,15,155,32.4,0.262,37,0
285 | 7,161,86,0,0,30.4,0.165,47,1
286 | 2,108,80,0,0,27,0.259,52,1
287 | 7,136,74,26,135,26,0.647,51,0
288 | 5,155,84,44,545,38.7,0.619,34,0
289 | 1,119,86,39,220,45.6,0.808,29,1
290 | 4,96,56,17,49,20.8,0.34,26,0
291 | 5,108,72,43,75,36.1,0.263,33,0
292 | 0,78,88,29,40,36.9,0.434,21,0
293 | 0,107,62,30,74,36.6,0.757,25,1
294 | 2,128,78,37,182,43.3,1.224,31,1
295 | 1,128,48,45,194,40.5,0.613,24,1
296 | 0,161,50,0,0,21.9,0.254,65,0
297 | 6,151,62,31,120,35.5,0.692,28,0
298 | 2,146,70,38,360,28,0.337,29,1
299 | 0,126,84,29,215,30.7,0.52,24,0
300 | 14,100,78,25,184,36.6,0.412,46,1
301 | 8,112,72,0,0,23.6,0.84,58,0
302 | 0,167,0,0,0,32.3,0.839,30,1
303 | 2,144,58,33,135,31.6,0.422,25,1
304 | 5,77,82,41,42,35.8,0.156,35,0
305 | 5,115,98,0,0,52.9,0.209,28,1
306 | 3,150,76,0,0,21,0.207,37,0
307 | 2,120,76,37,105,39.7,0.215,29,0
308 | 10,161,68,23,132,25.5,0.326,47,1
309 | 0,137,68,14,148,24.8,0.143,21,0
310 | 0,128,68,19,180,30.5,1.391,25,1
311 | 2,124,68,28,205,32.9,0.875,30,1
312 | 6,80,66,30,0,26.2,0.313,41,0
313 | 0,106,70,37,148,39.4,0.605,22,0
314 | 2,155,74,17,96,26.6,0.433,27,1
315 | 3,113,50,10,85,29.5,0.626,25,0
316 | 7,109,80,31,0,35.9,1.127,43,1
317 | 2,112,68,22,94,34.1,0.315,26,0
318 | 3,99,80,11,64,19.3,0.284,30,0
319 | 3,182,74,0,0,30.5,0.345,29,1
320 | 3,115,66,39,140,38.1,0.15,28,0
321 | 6,194,78,0,0,23.5,0.129,59,1
322 | 4,129,60,12,231,27.5,0.527,31,0
323 | 3,112,74,30,0,31.6,0.197,25,1
324 | 0,124,70,20,0,27.4,0.254,36,1
325 | 13,152,90,33,29,26.8,0.731,43,1
326 | 2,112,75,32,0,35.7,0.148,21,0
327 | 1,157,72,21,168,25.6,0.123,24,0
328 | 1,122,64,32,156,35.1,0.692,30,1
329 | 10,179,70,0,0,35.1,0.2,37,0
330 | 2,102,86,36,120,45.5,0.127,23,1
331 | 6,105,70,32,68,30.8,0.122,37,0
332 | 8,118,72,19,0,23.1,1.476,46,0
333 | 2,87,58,16,52,32.7,0.166,25,0
334 | 1,180,0,0,0,43.3,0.282,41,1
335 | 12,106,80,0,0,23.6,0.137,44,0
336 | 1,95,60,18,58,23.9,0.26,22,0
337 | 0,165,76,43,255,47.9,0.259,26,0
338 | 0,117,0,0,0,33.8,0.932,44,0
339 | 5,115,76,0,0,31.2,0.343,44,1
340 | 9,152,78,34,171,34.2,0.893,33,1
341 | 7,178,84,0,0,39.9,0.331,41,1
342 | 1,130,70,13,105,25.9,0.472,22,0
343 | 1,95,74,21,73,25.9,0.673,36,0
344 | 1,0,68,35,0,32,0.389,22,0
345 | 5,122,86,0,0,34.7,0.29,33,0
346 | 8,95,72,0,0,36.8,0.485,57,0
347 | 8,126,88,36,108,38.5,0.349,49,0
348 | 1,139,46,19,83,28.7,0.654,22,0
349 | 3,116,0,0,0,23.5,0.187,23,0
350 | 3,99,62,19,74,21.8,0.279,26,0
351 | 5,0,80,32,0,41,0.346,37,1
352 | 4,92,80,0,0,42.2,0.237,29,0
353 | 4,137,84,0,0,31.2,0.252,30,0
354 | 3,61,82,28,0,34.4,0.243,46,0
355 | 1,90,62,12,43,27.2,0.58,24,0
356 | 3,90,78,0,0,42.7,0.559,21,0
357 | 9,165,88,0,0,30.4,0.302,49,1
358 | 1,125,50,40,167,33.3,0.962,28,1
359 | 13,129,0,30,0,39.9,0.569,44,1
360 | 12,88,74,40,54,35.3,0.378,48,0
361 | 1,196,76,36,249,36.5,0.875,29,1
362 | 5,189,64,33,325,31.2,0.583,29,1
363 | 5,158,70,0,0,29.8,0.207,63,0
364 | 5,103,108,37,0,39.2,0.305,65,0
365 | 4,146,78,0,0,38.5,0.52,67,1
366 | 4,147,74,25,293,34.9,0.385,30,0
367 | 5,99,54,28,83,34,0.499,30,0
368 | 6,124,72,0,0,27.6,0.368,29,1
369 | 0,101,64,17,0,21,0.252,21,0
370 | 3,81,86,16,66,27.5,0.306,22,0
371 | 1,133,102,28,140,32.8,0.234,45,1
372 | 3,173,82,48,465,38.4,2.137,25,1
373 | 0,118,64,23,89,0,1.731,21,0
374 | 0,84,64,22,66,35.8,0.545,21,0
375 | 2,105,58,40,94,34.9,0.225,25,0
376 | 2,122,52,43,158,36.2,0.816,28,0
377 | 12,140,82,43,325,39.2,0.528,58,1
378 | 0,98,82,15,84,25.2,0.299,22,0
379 | 1,87,60,37,75,37.2,0.509,22,0
380 | 4,156,75,0,0,48.3,0.238,32,1
381 | 0,93,100,39,72,43.4,1.021,35,0
382 | 1,107,72,30,82,30.8,0.821,24,0
383 | 0,105,68,22,0,20,0.236,22,0
384 | 1,109,60,8,182,25.4,0.947,21,0
385 | 1,90,62,18,59,25.1,1.268,25,0
386 | 1,125,70,24,110,24.3,0.221,25,0
387 | 1,119,54,13,50,22.3,0.205,24,0
388 | 5,116,74,29,0,32.3,0.66,35,1
389 | 8,105,100,36,0,43.3,0.239,45,1
390 | 5,144,82,26,285,32,0.452,58,1
391 | 3,100,68,23,81,31.6,0.949,28,0
392 | 1,100,66,29,196,32,0.444,42,0
393 | 5,166,76,0,0,45.7,0.34,27,1
394 | 1,131,64,14,415,23.7,0.389,21,0
395 | 4,116,72,12,87,22.1,0.463,37,0
396 | 4,158,78,0,0,32.9,0.803,31,1
397 | 2,127,58,24,275,27.7,1.6,25,0
398 | 3,96,56,34,115,24.7,0.944,39,0
399 | 0,131,66,40,0,34.3,0.196,22,1
400 | 3,82,70,0,0,21.1,0.389,25,0
401 | 3,193,70,31,0,34.9,0.241,25,1
402 | 4,95,64,0,0,32,0.161,31,1
403 | 6,137,61,0,0,24.2,0.151,55,0
404 | 5,136,84,41,88,35,0.286,35,1
405 | 9,72,78,25,0,31.6,0.28,38,0
406 | 5,168,64,0,0,32.9,0.135,41,1
407 | 2,123,48,32,165,42.1,0.52,26,0
408 | 4,115,72,0,0,28.9,0.376,46,1
409 | 0,101,62,0,0,21.9,0.336,25,0
410 | 8,197,74,0,0,25.9,1.191,39,1
411 | 1,172,68,49,579,42.4,0.702,28,1
412 | 6,102,90,39,0,35.7,0.674,28,0
413 | 1,112,72,30,176,34.4,0.528,25,0
414 | 1,143,84,23,310,42.4,1.076,22,0
415 | 1,143,74,22,61,26.2,0.256,21,0
416 | 0,138,60,35,167,34.6,0.534,21,1
417 | 3,173,84,33,474,35.7,0.258,22,1
418 | 1,97,68,21,0,27.2,1.095,22,0
419 | 4,144,82,32,0,38.5,0.554,37,1
420 | 1,83,68,0,0,18.2,0.624,27,0
421 | 3,129,64,29,115,26.4,0.219,28,1
422 | 1,119,88,41,170,45.3,0.507,26,0
423 | 2,94,68,18,76,26,0.561,21,0
424 | 0,102,64,46,78,40.6,0.496,21,0
425 | 2,115,64,22,0,30.8,0.421,21,0
426 | 8,151,78,32,210,42.9,0.516,36,1
427 | 4,184,78,39,277,37,0.264,31,1
428 | 0,94,0,0,0,0,0.256,25,0
429 | 1,181,64,30,180,34.1,0.328,38,1
430 | 0,135,94,46,145,40.6,0.284,26,0
431 | 1,95,82,25,180,35,0.233,43,1
432 | 2,99,0,0,0,22.2,0.108,23,0
433 | 3,89,74,16,85,30.4,0.551,38,0
434 | 1,80,74,11,60,30,0.527,22,0
435 | 2,139,75,0,0,25.6,0.167,29,0
436 | 1,90,68,8,0,24.5,1.138,36,0
437 | 0,141,0,0,0,42.4,0.205,29,1
438 | 12,140,85,33,0,37.4,0.244,41,0
439 | 5,147,75,0,0,29.9,0.434,28,0
440 | 1,97,70,15,0,18.2,0.147,21,0
441 | 6,107,88,0,0,36.8,0.727,31,0
442 | 0,189,104,25,0,34.3,0.435,41,1
443 | 2,83,66,23,50,32.2,0.497,22,0
444 | 4,117,64,27,120,33.2,0.23,24,0
445 | 8,108,70,0,0,30.5,0.955,33,1
446 | 4,117,62,12,0,29.7,0.38,30,1
447 | 0,180,78,63,14,59.4,2.42,25,1
448 | 1,100,72,12,70,25.3,0.658,28,0
449 | 0,95,80,45,92,36.5,0.33,26,0
450 | 0,104,64,37,64,33.6,0.51,22,1
451 | 0,120,74,18,63,30.5,0.285,26,0
452 | 1,82,64,13,95,21.2,0.415,23,0
453 | 2,134,70,0,0,28.9,0.542,23,1
454 | 0,91,68,32,210,39.9,0.381,25,0
455 | 2,119,0,0,0,19.6,0.832,72,0
456 | 2,100,54,28,105,37.8,0.498,24,0
457 | 14,175,62,30,0,33.6,0.212,38,1
458 | 1,135,54,0,0,26.7,0.687,62,0
459 | 5,86,68,28,71,30.2,0.364,24,0
460 | 10,148,84,48,237,37.6,1.001,51,1
461 | 9,134,74,33,60,25.9,0.46,81,0
462 | 9,120,72,22,56,20.8,0.733,48,0
463 | 1,71,62,0,0,21.8,0.416,26,0
464 | 8,74,70,40,49,35.3,0.705,39,0
465 | 5,88,78,30,0,27.6,0.258,37,0
466 | 10,115,98,0,0,24,1.022,34,0
467 | 0,124,56,13,105,21.8,0.452,21,0
468 | 0,74,52,10,36,27.8,0.269,22,0
469 | 0,97,64,36,100,36.8,0.6,25,0
470 | 8,120,0,0,0,30,0.183,38,1
471 | 6,154,78,41,140,46.1,0.571,27,0
472 | 1,144,82,40,0,41.3,0.607,28,0
473 | 0,137,70,38,0,33.2,0.17,22,0
474 | 0,119,66,27,0,38.8,0.259,22,0
475 | 7,136,90,0,0,29.9,0.21,50,0
476 | 4,114,64,0,0,28.9,0.126,24,0
477 | 0,137,84,27,0,27.3,0.231,59,0
478 | 2,105,80,45,191,33.7,0.711,29,1
479 | 7,114,76,17,110,23.8,0.466,31,0
480 | 8,126,74,38,75,25.9,0.162,39,0
481 | 4,132,86,31,0,28,0.419,63,0
482 | 3,158,70,30,328,35.5,0.344,35,1
483 | 0,123,88,37,0,35.2,0.197,29,0
484 | 4,85,58,22,49,27.8,0.306,28,0
485 | 0,84,82,31,125,38.2,0.233,23,0
486 | 0,145,0,0,0,44.2,0.63,31,1
487 | 0,135,68,42,250,42.3,0.365,24,1
488 | 1,139,62,41,480,40.7,0.536,21,0
489 | 0,173,78,32,265,46.5,1.159,58,0
490 | 4,99,72,17,0,25.6,0.294,28,0
491 | 8,194,80,0,0,26.1,0.551,67,0
492 | 2,83,65,28,66,36.8,0.629,24,0
493 | 2,89,90,30,0,33.5,0.292,42,0
494 | 4,99,68,38,0,32.8,0.145,33,0
495 | 4,125,70,18,122,28.9,1.144,45,1
496 | 3,80,0,0,0,0,0.174,22,0
497 | 6,166,74,0,0,26.6,0.304,66,0
498 | 5,110,68,0,0,26,0.292,30,0
499 | 2,81,72,15,76,30.1,0.547,25,0
500 | 7,195,70,33,145,25.1,0.163,55,1
501 | 6,154,74,32,193,29.3,0.839,39,0
502 | 2,117,90,19,71,25.2,0.313,21,0
503 | 3,84,72,32,0,37.2,0.267,28,0
504 | 6,0,68,41,0,39,0.727,41,1
505 | 7,94,64,25,79,33.3,0.738,41,0
506 | 3,96,78,39,0,37.3,0.238,40,0
507 | 10,75,82,0,0,33.3,0.263,38,0
508 | 0,180,90,26,90,36.5,0.314,35,1
509 | 1,130,60,23,170,28.6,0.692,21,0
510 | 2,84,50,23,76,30.4,0.968,21,0
511 | 8,120,78,0,0,25,0.409,64,0
512 | 12,84,72,31,0,29.7,0.297,46,1
513 | 0,139,62,17,210,22.1,0.207,21,0
514 | 9,91,68,0,0,24.2,0.2,58,0
515 | 2,91,62,0,0,27.3,0.525,22,0
516 | 3,99,54,19,86,25.6,0.154,24,0
517 | 3,163,70,18,105,31.6,0.268,28,1
518 | 9,145,88,34,165,30.3,0.771,53,1
519 | 7,125,86,0,0,37.6,0.304,51,0
520 | 13,76,60,0,0,32.8,0.18,41,0
521 | 6,129,90,7,326,19.6,0.582,60,0
522 | 2,68,70,32,66,25,0.187,25,0
523 | 3,124,80,33,130,33.2,0.305,26,0
524 | 6,114,0,0,0,0,0.189,26,0
525 | 9,130,70,0,0,34.2,0.652,45,1
526 | 3,125,58,0,0,31.6,0.151,24,0
527 | 3,87,60,18,0,21.8,0.444,21,0
528 | 1,97,64,19,82,18.2,0.299,21,0
529 | 3,116,74,15,105,26.3,0.107,24,0
530 | 0,117,66,31,188,30.8,0.493,22,0
531 | 0,111,65,0,0,24.6,0.66,31,0
532 | 2,122,60,18,106,29.8,0.717,22,0
533 | 0,107,76,0,0,45.3,0.686,24,0
534 | 1,86,66,52,65,41.3,0.917,29,0
535 | 6,91,0,0,0,29.8,0.501,31,0
536 | 1,77,56,30,56,33.3,1.251,24,0
537 | 4,132,0,0,0,32.9,0.302,23,1
538 | 0,105,90,0,0,29.6,0.197,46,0
539 | 0,57,60,0,0,21.7,0.735,67,0
540 | 0,127,80,37,210,36.3,0.804,23,0
541 | 3,129,92,49,155,36.4,0.968,32,1
542 | 8,100,74,40,215,39.4,0.661,43,1
543 | 3,128,72,25,190,32.4,0.549,27,1
544 | 10,90,85,32,0,34.9,0.825,56,1
545 | 4,84,90,23,56,39.5,0.159,25,0
546 | 1,88,78,29,76,32,0.365,29,0
547 | 8,186,90,35,225,34.5,0.423,37,1
548 | 5,187,76,27,207,43.6,1.034,53,1
549 | 4,131,68,21,166,33.1,0.16,28,0
550 | 1,164,82,43,67,32.8,0.341,50,0
551 | 4,189,110,31,0,28.5,0.68,37,0
552 | 1,116,70,28,0,27.4,0.204,21,0
553 | 3,84,68,30,106,31.9,0.591,25,0
554 | 6,114,88,0,0,27.8,0.247,66,0
555 | 1,88,62,24,44,29.9,0.422,23,0
556 | 1,84,64,23,115,36.9,0.471,28,0
557 | 7,124,70,33,215,25.5,0.161,37,0
558 | 1,97,70,40,0,38.1,0.218,30,0
559 | 8,110,76,0,0,27.8,0.237,58,0
560 | 11,103,68,40,0,46.2,0.126,42,0
561 | 11,85,74,0,0,30.1,0.3,35,0
562 | 6,125,76,0,0,33.8,0.121,54,1
563 | 0,198,66,32,274,41.3,0.502,28,1
564 | 1,87,68,34,77,37.6,0.401,24,0
565 | 6,99,60,19,54,26.9,0.497,32,0
566 | 0,91,80,0,0,32.4,0.601,27,0
567 | 2,95,54,14,88,26.1,0.748,22,0
568 | 1,99,72,30,18,38.6,0.412,21,0
569 | 6,92,62,32,126,32,0.085,46,0
570 | 4,154,72,29,126,31.3,0.338,37,0
571 | 0,121,66,30,165,34.3,0.203,33,1
572 | 3,78,70,0,0,32.5,0.27,39,0
573 | 2,130,96,0,0,22.6,0.268,21,0
574 | 3,111,58,31,44,29.5,0.43,22,0
575 | 2,98,60,17,120,34.7,0.198,22,0
576 | 1,143,86,30,330,30.1,0.892,23,0
577 | 1,119,44,47,63,35.5,0.28,25,0
578 | 6,108,44,20,130,24,0.813,35,0
579 | 2,118,80,0,0,42.9,0.693,21,1
580 | 10,133,68,0,0,27,0.245,36,0
581 | 2,197,70,99,0,34.7,0.575,62,1
582 | 0,151,90,46,0,42.1,0.371,21,1
583 | 6,109,60,27,0,25,0.206,27,0
584 | 12,121,78,17,0,26.5,0.259,62,0
585 | 8,100,76,0,0,38.7,0.19,42,0
586 | 8,124,76,24,600,28.7,0.687,52,1
587 | 1,93,56,11,0,22.5,0.417,22,0
588 | 8,143,66,0,0,34.9,0.129,41,1
589 | 6,103,66,0,0,24.3,0.249,29,0
590 | 3,176,86,27,156,33.3,1.154,52,1
591 | 0,73,0,0,0,21.1,0.342,25,0
592 | 11,111,84,40,0,46.8,0.925,45,1
593 | 2,112,78,50,140,39.4,0.175,24,0
594 | 3,132,80,0,0,34.4,0.402,44,1
595 | 2,82,52,22,115,28.5,1.699,25,0
596 | 6,123,72,45,230,33.6,0.733,34,0
597 | 0,188,82,14,185,32,0.682,22,1
598 | 0,67,76,0,0,45.3,0.194,46,0
599 | 1,89,24,19,25,27.8,0.559,21,0
600 | 1,173,74,0,0,36.8,0.088,38,1
601 | 1,109,38,18,120,23.1,0.407,26,0
602 | 1,108,88,19,0,27.1,0.4,24,0
603 | 6,96,0,0,0,23.7,0.19,28,0
604 | 1,124,74,36,0,27.8,0.1,30,0
605 | 7,150,78,29,126,35.2,0.692,54,1
606 | 4,183,0,0,0,28.4,0.212,36,1
607 | 1,124,60,32,0,35.8,0.514,21,0
608 | 1,181,78,42,293,40,1.258,22,1
609 | 1,92,62,25,41,19.5,0.482,25,0
610 | 0,152,82,39,272,41.5,0.27,27,0
611 | 1,111,62,13,182,24,0.138,23,0
612 | 3,106,54,21,158,30.9,0.292,24,0
613 | 3,174,58,22,194,32.9,0.593,36,1
614 | 7,168,88,42,321,38.2,0.787,40,1
615 | 6,105,80,28,0,32.5,0.878,26,0
616 | 11,138,74,26,144,36.1,0.557,50,1
617 | 3,106,72,0,0,25.8,0.207,27,0
618 | 6,117,96,0,0,28.7,0.157,30,0
619 | 2,68,62,13,15,20.1,0.257,23,0
620 | 9,112,82,24,0,28.2,1.282,50,1
621 | 0,119,0,0,0,32.4,0.141,24,1
622 | 2,112,86,42,160,38.4,0.246,28,0
623 | 2,92,76,20,0,24.2,1.698,28,0
624 | 6,183,94,0,0,40.8,1.461,45,0
625 | 0,94,70,27,115,43.5,0.347,21,0
626 | 2,108,64,0,0,30.8,0.158,21,0
627 | 4,90,88,47,54,37.7,0.362,29,0
628 | 0,125,68,0,0,24.7,0.206,21,0
629 | 0,132,78,0,0,32.4,0.393,21,0
630 | 5,128,80,0,0,34.6,0.144,45,0
631 | 4,94,65,22,0,24.7,0.148,21,0
632 | 7,114,64,0,0,27.4,0.732,34,1
633 | 0,102,78,40,90,34.5,0.238,24,0
634 | 2,111,60,0,0,26.2,0.343,23,0
635 | 1,128,82,17,183,27.5,0.115,22,0
636 | 10,92,62,0,0,25.9,0.167,31,0
637 | 13,104,72,0,0,31.2,0.465,38,1
638 | 5,104,74,0,0,28.8,0.153,48,0
639 | 2,94,76,18,66,31.6,0.649,23,0
640 | 7,97,76,32,91,40.9,0.871,32,1
641 | 1,100,74,12,46,19.5,0.149,28,0
642 | 0,102,86,17,105,29.3,0.695,27,0
643 | 4,128,70,0,0,34.3,0.303,24,0
644 | 6,147,80,0,0,29.5,0.178,50,1
645 | 4,90,0,0,0,28,0.61,31,0
646 | 3,103,72,30,152,27.6,0.73,27,0
647 | 2,157,74,35,440,39.4,0.134,30,0
648 | 1,167,74,17,144,23.4,0.447,33,1
649 | 0,179,50,36,159,37.8,0.455,22,1
650 | 11,136,84,35,130,28.3,0.26,42,1
651 | 0,107,60,25,0,26.4,0.133,23,0
652 | 1,91,54,25,100,25.2,0.234,23,0
653 | 1,117,60,23,106,33.8,0.466,27,0
654 | 5,123,74,40,77,34.1,0.269,28,0
655 | 2,120,54,0,0,26.8,0.455,27,0
656 | 1,106,70,28,135,34.2,0.142,22,0
657 | 2,155,52,27,540,38.7,0.24,25,1
658 | 2,101,58,35,90,21.8,0.155,22,0
659 | 1,120,80,48,200,38.9,1.162,41,0
660 | 11,127,106,0,0,39,0.19,51,0
661 | 3,80,82,31,70,34.2,1.292,27,1
662 | 10,162,84,0,0,27.7,0.182,54,0
663 | 1,199,76,43,0,42.9,1.394,22,1
664 | 8,167,106,46,231,37.6,0.165,43,1
665 | 9,145,80,46,130,37.9,0.637,40,1
666 | 6,115,60,39,0,33.7,0.245,40,1
667 | 1,112,80,45,132,34.8,0.217,24,0
668 | 4,145,82,18,0,32.5,0.235,70,1
669 | 10,111,70,27,0,27.5,0.141,40,1
670 | 6,98,58,33,190,34,0.43,43,0
671 | 9,154,78,30,100,30.9,0.164,45,0
672 | 6,165,68,26,168,33.6,0.631,49,0
673 | 1,99,58,10,0,25.4,0.551,21,0
674 | 10,68,106,23,49,35.5,0.285,47,0
675 | 3,123,100,35,240,57.3,0.88,22,0
676 | 8,91,82,0,0,35.6,0.587,68,0
677 | 6,195,70,0,0,30.9,0.328,31,1
678 | 9,156,86,0,0,24.8,0.23,53,1
679 | 0,93,60,0,0,35.3,0.263,25,0
680 | 3,121,52,0,0,36,0.127,25,1
681 | 2,101,58,17,265,24.2,0.614,23,0
682 | 2,56,56,28,45,24.2,0.332,22,0
683 | 0,162,76,36,0,49.6,0.364,26,1
684 | 0,95,64,39,105,44.6,0.366,22,0
685 | 4,125,80,0,0,32.3,0.536,27,1
686 | 5,136,82,0,0,0,0.64,69,0
687 | 2,129,74,26,205,33.2,0.591,25,0
688 | 3,130,64,0,0,23.1,0.314,22,0
689 | 1,107,50,19,0,28.3,0.181,29,0
690 | 1,140,74,26,180,24.1,0.828,23,0
691 | 1,144,82,46,180,46.1,0.335,46,1
692 | 8,107,80,0,0,24.6,0.856,34,0
693 | 13,158,114,0,0,42.3,0.257,44,1
694 | 2,121,70,32,95,39.1,0.886,23,0
695 | 7,129,68,49,125,38.5,0.439,43,1
696 | 2,90,60,0,0,23.5,0.191,25,0
697 | 7,142,90,24,480,30.4,0.128,43,1
698 | 3,169,74,19,125,29.9,0.268,31,1
699 | 0,99,0,0,0,25,0.253,22,0
700 | 4,127,88,11,155,34.5,0.598,28,0
701 | 4,118,70,0,0,44.5,0.904,26,0
702 | 2,122,76,27,200,35.9,0.483,26,0
703 | 6,125,78,31,0,27.6,0.565,49,1
704 | 1,168,88,29,0,35,0.905,52,1
705 | 2,129,0,0,0,38.5,0.304,41,0
706 | 4,110,76,20,100,28.4,0.118,27,0
707 | 6,80,80,36,0,39.8,0.177,28,0
708 | 10,115,0,0,0,0,0.261,30,1
709 | 2,127,46,21,335,34.4,0.176,22,0
710 | 9,164,78,0,0,32.8,0.148,45,1
711 | 2,93,64,32,160,38,0.674,23,1
712 | 3,158,64,13,387,31.2,0.295,24,0
713 | 5,126,78,27,22,29.6,0.439,40,0
714 | 10,129,62,36,0,41.2,0.441,38,1
715 | 0,134,58,20,291,26.4,0.352,21,0
716 | 3,102,74,0,0,29.5,0.121,32,0
717 | 7,187,50,33,392,33.9,0.826,34,1
718 | 3,173,78,39,185,33.8,0.97,31,1
719 | 10,94,72,18,0,23.1,0.595,56,0
720 | 1,108,60,46,178,35.5,0.415,24,0
721 | 5,97,76,27,0,35.6,0.378,52,1
722 | 4,83,86,19,0,29.3,0.317,34,0
723 | 1,114,66,36,200,38.1,0.289,21,0
724 | 1,149,68,29,127,29.3,0.349,42,1
725 | 5,117,86,30,105,39.1,0.251,42,0
726 | 1,111,94,0,0,32.8,0.265,45,0
727 | 4,112,78,40,0,39.4,0.236,38,0
728 | 1,116,78,29,180,36.1,0.496,25,0
729 | 0,141,84,26,0,32.4,0.433,22,0
730 | 2,175,88,0,0,22.9,0.326,22,0
731 | 2,92,52,0,0,30.1,0.141,22,0
732 | 3,130,78,23,79,28.4,0.323,34,1
733 | 8,120,86,0,0,28.4,0.259,22,1
734 | 2,174,88,37,120,44.5,0.646,24,1
735 | 2,106,56,27,165,29,0.426,22,0
736 | 2,105,75,0,0,23.3,0.56,53,0
737 | 4,95,60,32,0,35.4,0.284,28,0
738 | 0,126,86,27,120,27.4,0.515,21,0
739 | 8,65,72,23,0,32,0.6,42,0
740 | 2,99,60,17,160,36.6,0.453,21,0
741 | 1,102,74,0,0,39.5,0.293,42,1
742 | 11,120,80,37,150,42.3,0.785,48,1
743 | 3,102,44,20,94,30.8,0.4,26,0
744 | 1,109,58,18,116,28.5,0.219,22,0
745 | 9,140,94,0,0,32.7,0.734,45,1
746 | 13,153,88,37,140,40.6,1.174,39,0
747 | 12,100,84,33,105,30,0.488,46,0
748 | 1,147,94,41,0,49.3,0.358,27,1
749 | 1,81,74,41,57,46.3,1.096,32,0
750 | 3,187,70,22,200,36.4,0.408,36,1
751 | 6,162,62,0,0,24.3,0.178,50,1
752 | 4,136,70,0,0,31.2,1.182,22,1
753 | 1,121,78,39,74,39,0.261,28,0
754 | 3,108,62,24,0,26,0.223,25,0
755 | 0,181,88,44,510,43.3,0.222,26,1
756 | 8,154,78,32,0,32.4,0.443,45,1
757 | 1,128,88,39,110,36.5,1.057,37,1
758 | 7,137,90,41,0,32,0.391,39,0
759 | 0,123,72,0,0,36.3,0.258,52,1
760 | 1,106,76,0,0,37.5,0.197,26,0
761 | 6,190,92,0,0,35.5,0.278,66,1
762 | 2,88,58,26,16,28.4,0.766,22,0
763 | 9,170,74,31,0,44,0.403,43,1
764 | 9,89,62,0,0,22.5,0.142,33,0
765 | 10,101,76,48,180,32.9,0.171,63,0
766 | 2,122,70,27,0,36.8,0.34,27,0
767 | 5,121,72,23,112,26.2,0.245,30,0
768 | 1,126,60,0,0,30.1,0.349,47,1
769 | 1,93,70,31,0,30.4,0.315,23,0
--------------------------------------------------------------------------------
/Diabetes Prediction/diabetes_predictor_script.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import joblib
3 | model = joblib.load('diabetes_rf_model.pkl')
4 |
5 | print("This is a Python to Script that Predicts Diabetes\n")
6 | a= int(input('Pregnancies: '))
7 | b= int(input('Glucose: '))
8 | c= int(input('BloodPressure: '))
9 | d= int(input('SkinThickness: '))
10 | e= int(input('Insulin: '))
11 | f= int(input('BMI: '))
12 | g= int(input('DiabetesPedigreeFunction: '))
13 |
14 | features = np.array([[a,b,c,d,e,f,g]])
15 |
16 | pred1 = model.predict(features)
17 | if pred1 == 0:
18 | print("You tested Negative for Diabetes")
19 | elif pred1 == 1:
20 | print("You tested Positive for Diabetes")
21 |
--------------------------------------------------------------------------------
/Diabetes Prediction/diabetes_rf_model.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Diabetes Prediction/diabetes_rf_model.pkl
--------------------------------------------------------------------------------
/Diabetes Prediction/diabetes_test.csv:
--------------------------------------------------------------------------------
1 | Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction
2 | 6,148,72,35,0,33.6,0.627
3 | 1,85,66,29,0,26.6,0.351
4 | 8,183,64,0,0,23.3,0.672
5 | 1,89,66,23,94,28.1,0.167
6 | 0,137,40,35,168,43.1,2.288
7 | 5,116,74,0,0,25.6,0.201
8 | 3,78,50,32,88,31,0.248
9 | 10,115,0,0,0,35.3,0.134
10 | 2,197,70,45,543,30.5,0.158
11 | 8,125,96,0,0,0,0.232
12 | 4,110,92,0,0,37.6,0.191
13 | 10,168,74,0,0,38,0.537
14 | 10,139,80,0,0,27.1,1.441
15 | 1,189,60,23,846,30.1,0.398
16 | 5,166,72,19,175,25.8,0.587
17 | 7,100,0,0,0,30,0.484
18 | 0,118,84,47,230,45.8,0.551
19 | 7,107,74,0,0,29.6,0.254
20 | 1,103,30,38,83,43.3,0.183
21 | 1,115,70,30,96,34.6,0.529
22 | 3,126,88,41,235,39.3,0.704
23 | 8,99,84,0,0,35.4,0.388
24 | 7,196,90,0,0,39.8,0.451
25 | 9,119,80,35,0,29,0.263
26 | 11,143,94,33,146,36.6,0.254
27 | 10,125,70,26,115,31.1,0.205
28 | 7,147,76,0,0,39.4,0.257
29 | 1,97,66,15,140,23.2,0.487
30 | 13,145,82,19,110,22.2,0.245
31 | 5,117,92,0,0,34.1,0.337
32 | 5,109,75,26,0,36,0.546
33 | 3,158,76,36,245,31.6,0.851
34 | 3,88,58,11,54,24.8,0.267
35 | 6,92,92,0,0,19.9,0.188
36 | 10,122,78,31,0,27.6,0.512
37 | 4,103,60,33,192,24,0.966
38 | 11,138,76,0,0,33.2,0.42
39 | 9,102,76,37,0,32.9,0.665
40 | 2,90,68,42,0,38.2,0.503
41 | 4,111,72,47,207,37.1,1.39
42 | 3,180,64,25,70,34,0.271
43 | 7,133,84,0,0,40.2,0.696
44 | 7,106,92,18,0,22.7,0.235
45 | 9,171,110,24,240,45.4,0.721
46 | 7,159,64,0,0,27.4,0.294
47 | 0,180,66,39,0,42,1.893
48 | 1,146,56,0,0,29.7,0.564
49 | 2,71,70,27,0,28,0.586
50 | 7,103,66,32,0,39.1,0.344
51 |
--------------------------------------------------------------------------------
/Diabetes Prediction/heat.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Diabetes Prediction/heat.png
--------------------------------------------------------------------------------
/Diabetes Prediction/heat2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Diabetes Prediction/heat2.png
--------------------------------------------------------------------------------
/Fish Weight Prediction/BoxPlot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Fish Weight Prediction/BoxPlot.png
--------------------------------------------------------------------------------
/Fish Weight Prediction/Fish_Predictor_Script.py:
--------------------------------------------------------------------------------
1 | import joblib
2 |
3 | joblib.load('fish_predictor.pkl')
4 |
5 | print("This is a model to predict the Weight of a fish given the input parameters")
6 |
7 | vert = int(input("Vertical Length:\n"))
8 | diag = int(input('Diagonal Length:\n'))
9 | hori = int(input('Horizontal Length:\n'))
10 | cross = int(input('Cross Length:\n'))
11 | height = int(input('Height:\n'))
12 |
13 | predictions = model.predict([[vert,diag,hori,cross,height]])
14 | print("This is the predicted value: ", predictions)
--------------------------------------------------------------------------------
/Fish Weight Prediction/barplots.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Fish Weight Prediction/barplots.png
--------------------------------------------------------------------------------
/Fish Weight Prediction/corrmap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Fish Weight Prediction/corrmap.png
--------------------------------------------------------------------------------
/Fish Weight Prediction/fish_predictor.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Fish Weight Prediction/fish_predictor.pkl
--------------------------------------------------------------------------------
/Fish Weight Prediction/fish_proj.csv:
--------------------------------------------------------------------------------
1 | Species,Weight,Length1,Length2,Length3,Height,Width
2 | Bream,242,23.2,25.4,30,11.52,4.02
3 | Bream,290,24,26.3,31.2,12.48,4.3056
4 | Bream,340,23.9,26.5,31.1,12.3778,4.6961
5 | Bream,363,26.3,29,33.5,12.73,4.4555
6 | Bream,430,26.5,29,34,12.444,5.134
7 | Bream,450,26.8,29.7,34.7,13.6024,4.9274
8 | Bream,500,26.8,29.7,34.5,14.1795,5.2785
9 | Bream,390,27.6,30,35,12.67,4.69
10 | Bream,450,27.6,30,35.1,14.0049,4.8438
11 | Bream,500,28.5,30.7,36.2,14.2266,4.9594
12 | Bream,475,28.4,31,36.2,14.2628,5.1042
13 | Bream,500,28.7,31,36.2,14.3714,4.8146
14 | Bream,500,29.1,31.5,36.4,13.7592,4.368
15 | Bream,340,29.5,32,37.3,13.9129,5.0728
16 | Bream,600,29.4,32,37.2,14.9544,5.1708
17 | Bream,600,29.4,32,37.2,15.438,5.58
18 | Bream,700,30.4,33,38.3,14.8604,5.2854
19 | Bream,700,30.4,33,38.5,14.938,5.1975
20 | Bream,610,30.9,33.5,38.6,15.633,5.1338
21 | Bream,650,31,33.5,38.7,14.4738,5.7276
22 | Bream,575,31.3,34,39.5,15.1285,5.5695
23 | Bream,685,31.4,34,39.2,15.9936,5.3704
24 | Bream,620,31.5,34.5,39.7,15.5227,5.2801
25 | Bream,680,31.8,35,40.6,15.4686,6.1306
26 | Bream,700,31.9,35,40.5,16.2405,5.589
27 | Bream,725,31.8,35,40.9,16.36,6.0532
28 | Bream,720,32,35,40.6,16.3618,6.09
29 | Bream,714,32.7,36,41.5,16.517,5.8515
30 | Bream,850,32.8,36,41.6,16.8896,6.1984
31 | Bream,1000,33.5,37,42.6,18.957,6.603
32 | Bream,920,35,38.5,44.1,18.0369,6.3063
33 | Bream,955,35,38.5,44,18.084,6.292
34 | Bream,925,36.2,39.5,45.3,18.7542,6.7497
35 | Bream,975,37.4,41,45.9,18.6354,6.7473
36 | Bream,950,38,41,46.5,17.6235,6.3705
37 | Roach,40,12.9,14.1,16.2,4.1472,2.268
38 | Roach,69,16.5,18.2,20.3,5.2983,2.8217
39 | Roach,78,17.5,18.8,21.2,5.5756,2.9044
40 | Roach,87,18.2,19.8,22.2,5.6166,3.1746
41 | Roach,120,18.6,20,22.2,6.216,3.5742
42 | Roach,0,19,20.5,22.8,6.4752,3.3516
43 | Roach,110,19.1,20.8,23.1,6.1677,3.3957
44 | Roach,120,19.4,21,23.7,6.1146,3.2943
45 | Roach,150,20.4,22,24.7,5.8045,3.7544
46 | Roach,145,20.5,22,24.3,6.6339,3.5478
47 | Roach,160,20.5,22.5,25.3,7.0334,3.8203
48 | Roach,140,21,22.5,25,6.55,3.325
49 | Roach,160,21.1,22.5,25,6.4,3.8
50 | Roach,169,22,24,27.2,7.5344,3.8352
51 | Roach,161,22,23.4,26.7,6.9153,3.6312
52 | Roach,200,22.1,23.5,26.8,7.3968,4.1272
53 | Roach,180,23.6,25.2,27.9,7.0866,3.906
54 | Roach,290,24,26,29.2,8.8768,4.4968
55 | Roach,272,25,27,30.6,8.568,4.7736
56 | Roach,390,29.5,31.7,35,9.485,5.355
57 | Whitefish,270,23.6,26,28.7,8.3804,4.2476
58 | Whitefish,270,24.1,26.5,29.3,8.1454,4.2485
59 | Whitefish,306,25.6,28,30.8,8.778,4.6816
60 | Whitefish,540,28.5,31,34,10.744,6.562
61 | Whitefish,800,33.7,36.4,39.6,11.7612,6.5736
62 | Whitefish,1000,37.3,40,43.5,12.354,6.525
63 | Parkki,55,13.5,14.7,16.5,6.8475,2.3265
64 | Parkki,60,14.3,15.5,17.4,6.5772,2.3142
65 | Parkki,90,16.3,17.7,19.8,7.4052,2.673
66 | Parkki,120,17.5,19,21.3,8.3922,2.9181
67 | Parkki,150,18.4,20,22.4,8.8928,3.2928
68 | Parkki,140,19,20.7,23.2,8.5376,3.2944
69 | Parkki,170,19,20.7,23.2,9.396,3.4104
70 | Parkki,145,19.8,21.5,24.1,9.7364,3.1571
71 | Parkki,200,21.2,23,25.8,10.3458,3.6636
72 | Parkki,273,23,25,28,11.088,4.144
73 | Parkki,300,24,26,29,11.368,4.234
74 | Perch,5.9,7.5,8.4,8.8,2.112,1.408
75 | Perch,32,12.5,13.7,14.7,3.528,1.9992
76 | Perch,40,13.8,15,16,3.824,2.432
77 | Perch,51.5,15,16.2,17.2,4.5924,2.6316
78 | Perch,70,15.7,17.4,18.5,4.588,2.9415
79 | Perch,100,16.2,18,19.2,5.2224,3.3216
80 | Perch,78,16.8,18.7,19.4,5.1992,3.1234
81 | Perch,80,17.2,19,20.2,5.6358,3.0502
82 | Perch,85,17.8,19.6,20.8,5.1376,3.0368
83 | Perch,85,18.2,20,21,5.082,2.772
84 | Perch,110,19,21,22.5,5.6925,3.555
85 | Perch,115,19,21,22.5,5.9175,3.3075
86 | Perch,125,19,21,22.5,5.6925,3.6675
87 | Perch,130,19.3,21.3,22.8,6.384,3.534
88 | Perch,120,20,22,23.5,6.11,3.4075
89 | Perch,120,20,22,23.5,5.64,3.525
90 | Perch,130,20,22,23.5,6.11,3.525
91 | Perch,135,20,22,23.5,5.875,3.525
92 | Perch,110,20,22,23.5,5.5225,3.995
93 | Perch,130,20.5,22.5,24,5.856,3.624
94 | Perch,150,20.5,22.5,24,6.792,3.624
95 | Perch,145,20.7,22.7,24.2,5.9532,3.63
96 | Perch,150,21,23,24.5,5.2185,3.626
97 | Perch,170,21.5,23.5,25,6.275,3.725
98 | Perch,225,22,24,25.5,7.293,3.723
99 | Perch,145,22,24,25.5,6.375,3.825
100 | Perch,188,22.6,24.6,26.2,6.7334,4.1658
101 | Perch,180,23,25,26.5,6.4395,3.6835
102 | Perch,197,23.5,25.6,27,6.561,4.239
103 | Perch,218,25,26.5,28,7.168,4.144
104 | Perch,300,25.2,27.3,28.7,8.323,5.1373
105 | Perch,260,25.4,27.5,28.9,7.1672,4.335
106 | Perch,265,25.4,27.5,28.9,7.0516,4.335
107 | Perch,250,25.4,27.5,28.9,7.2828,4.5662
108 | Perch,250,25.9,28,29.4,7.8204,4.2042
109 | Perch,300,26.9,28.7,30.1,7.5852,4.6354
110 | Perch,320,27.8,30,31.6,7.6156,4.7716
111 | Perch,514,30.5,32.8,34,10.03,6.018
112 | Perch,556,32,34.5,36.5,10.2565,6.3875
113 | Perch,840,32.5,35,37.3,11.4884,7.7957
114 | Perch,685,34,36.5,39,10.881,6.864
115 | Perch,700,34,36,38.3,10.6091,6.7408
116 | Perch,700,34.5,37,39.4,10.835,6.2646
117 | Perch,690,34.6,37,39.3,10.5717,6.3666
118 | Perch,900,36.5,39,41.4,11.1366,7.4934
119 | Perch,650,36.5,39,41.4,11.1366,6.003
120 | Perch,820,36.6,39,41.3,12.4313,7.3514
121 | Perch,850,36.9,40,42.3,11.9286,7.1064
122 | Perch,900,37,40,42.5,11.73,7.225
123 | Perch,1015,37,40,42.4,12.3808,7.4624
124 | Perch,820,37.1,40,42.5,11.135,6.63
125 | Perch,1100,39,42,44.6,12.8002,6.8684
126 | Perch,1000,39.8,43,45.2,11.9328,7.2772
127 | Perch,1100,40.1,43,45.5,12.5125,7.4165
128 | Perch,1000,40.2,43.5,46,12.604,8.142
129 | Perch,1000,41.1,44,46.6,12.4888,7.5958
130 | Pike,200,30,32.3,34.8,5.568,3.3756
131 | Pike,300,31.7,34,37.8,5.7078,4.158
132 | Pike,300,32.7,35,38.8,5.9364,4.3844
133 | Pike,300,34.8,37.3,39.8,6.2884,4.0198
134 | Pike,430,35.5,38,40.5,7.29,4.5765
135 | Pike,345,36,38.5,41,6.396,3.977
136 | Pike,456,40,42.5,45.5,7.28,4.3225
137 | Pike,510,40,42.5,45.5,6.825,4.459
138 | Pike,540,40.1,43,45.8,7.786,5.1296
139 | Pike,500,42,45,48,6.96,4.896
140 | Pike,567,43.2,46,48.7,7.792,4.87
141 | Pike,770,44.8,48,51.2,7.68,5.376
142 | Pike,950,48.3,51.7,55.1,8.9262,6.1712
143 | Pike,1250,52,56,59.7,10.6863,6.9849
144 | Pike,1600,56,60,64,9.6,6.144
145 | Pike,1550,56,60,64,9.6,6.144
146 | Pike,1650,59,63.4,68,10.812,7.48
147 | Smelt,6.7,9.3,9.8,10.8,1.7388,1.0476
148 | Smelt,7.5,10,10.5,11.6,1.972,1.16
149 | Smelt,7,10.1,10.6,11.6,1.7284,1.1484
150 | Smelt,9.7,10.4,11,12,2.196,1.38
151 | Smelt,9.8,10.7,11.2,12.4,2.0832,1.2772
152 | Smelt,8.7,10.8,11.3,12.6,1.9782,1.2852
153 | Smelt,10,11.3,11.8,13.1,2.2139,1.2838
154 | Smelt,9.9,11.3,11.8,13.1,2.2139,1.1659
155 | Smelt,9.8,11.4,12,13.2,2.2044,1.1484
156 | Smelt,12.2,11.5,12.2,13.4,2.0904,1.3936
157 | Smelt,13.4,11.7,12.4,13.5,2.43,1.269
158 | Smelt,12.2,12.1,13,13.8,2.277,1.2558
159 | Smelt,19.7,13.2,14.3,15.2,2.8728,2.0672
160 | Smelt,19.9,13.8,15,16.2,2.9322,1.8792
161 |
--------------------------------------------------------------------------------
/Fish Weight Prediction/pairplots.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Fish Weight Prediction/pairplots.png
--------------------------------------------------------------------------------
/HomePrices Prediction/HomePrices Prediction.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "id": "48192837",
7 | "metadata": {},
8 | "outputs": [
9 | {
10 | "data": {
11 | "text/html": [
12 | "
\n",
13 | "\n",
26 | "
\n",
27 | " \n",
28 | " \n",
29 | " | \n",
30 | " area | \n",
31 | " bedrooms | \n",
32 | " age | \n",
33 | " price | \n",
34 | "
\n",
35 | " \n",
36 | " \n",
37 | " \n",
38 | " 0 | \n",
39 | " 2600 | \n",
40 | " 3.0 | \n",
41 | " 20 | \n",
42 | " 550000 | \n",
43 | "
\n",
44 | " \n",
45 | " 1 | \n",
46 | " 3000 | \n",
47 | " 4.0 | \n",
48 | " 15 | \n",
49 | " 565000 | \n",
50 | "
\n",
51 | " \n",
52 | " 2 | \n",
53 | " 3200 | \n",
54 | " NaN | \n",
55 | " 18 | \n",
56 | " 610000 | \n",
57 | "
\n",
58 | " \n",
59 | " 3 | \n",
60 | " 3600 | \n",
61 | " 3.0 | \n",
62 | " 30 | \n",
63 | " 595000 | \n",
64 | "
\n",
65 | " \n",
66 | " 4 | \n",
67 | " 4000 | \n",
68 | " 5.0 | \n",
69 | " 8 | \n",
70 | " 760000 | \n",
71 | "
\n",
72 | " \n",
73 | "
\n",
74 | "
"
75 | ],
76 | "text/plain": [
77 | " area bedrooms age price\n",
78 | "0 2600 3.0 20 550000\n",
79 | "1 3000 4.0 15 565000\n",
80 | "2 3200 NaN 18 610000\n",
81 | "3 3600 3.0 30 595000\n",
82 | "4 4000 5.0 8 760000"
83 | ]
84 | },
85 | "execution_count": 1,
86 | "metadata": {},
87 | "output_type": "execute_result"
88 | }
89 | ],
90 | "source": [
91 | "import pandas as pd\n",
92 | "df = pd.read_csv('homeprices.csv')\n",
93 | "df.head()"
94 | ]
95 | },
96 | {
97 | "cell_type": "code",
98 | "execution_count": 2,
99 | "id": "e39d650c",
100 | "metadata": {},
101 | "outputs": [],
102 | "source": [
103 | "df= df.fillna(df.median())"
104 | ]
105 | },
106 | {
107 | "cell_type": "code",
108 | "execution_count": 3,
109 | "id": "32b74aad",
110 | "metadata": {},
111 | "outputs": [
112 | {
113 | "data": {
114 | "text/html": [
115 | "\n",
116 | "\n",
129 | "
\n",
130 | " \n",
131 | " \n",
132 | " | \n",
133 | " area | \n",
134 | " bedrooms | \n",
135 | " age | \n",
136 | " price | \n",
137 | "
\n",
138 | " \n",
139 | " \n",
140 | " \n",
141 | " 0 | \n",
142 | " 2600 | \n",
143 | " 3.0 | \n",
144 | " 20 | \n",
145 | " 550000 | \n",
146 | "
\n",
147 | " \n",
148 | " 1 | \n",
149 | " 3000 | \n",
150 | " 4.0 | \n",
151 | " 15 | \n",
152 | " 565000 | \n",
153 | "
\n",
154 | " \n",
155 | " 2 | \n",
156 | " 3200 | \n",
157 | " 4.0 | \n",
158 | " 18 | \n",
159 | " 610000 | \n",
160 | "
\n",
161 | " \n",
162 | " 3 | \n",
163 | " 3600 | \n",
164 | " 3.0 | \n",
165 | " 30 | \n",
166 | " 595000 | \n",
167 | "
\n",
168 | " \n",
169 | " 4 | \n",
170 | " 4000 | \n",
171 | " 5.0 | \n",
172 | " 8 | \n",
173 | " 760000 | \n",
174 | "
\n",
175 | " \n",
176 | "
\n",
177 | "
"
178 | ],
179 | "text/plain": [
180 | " area bedrooms age price\n",
181 | "0 2600 3.0 20 550000\n",
182 | "1 3000 4.0 15 565000\n",
183 | "2 3200 4.0 18 610000\n",
184 | "3 3600 3.0 30 595000\n",
185 | "4 4000 5.0 8 760000"
186 | ]
187 | },
188 | "execution_count": 3,
189 | "metadata": {},
190 | "output_type": "execute_result"
191 | }
192 | ],
193 | "source": [
194 | "df.head()"
195 | ]
196 | },
197 | {
198 | "cell_type": "code",
199 | "execution_count": 4,
200 | "id": "5095c1f5",
201 | "metadata": {},
202 | "outputs": [
203 | {
204 | "name": "stdout",
205 | "output_type": "stream",
206 | "text": [
207 | "\n",
208 | "RangeIndex: 6 entries, 0 to 5\n",
209 | "Data columns (total 4 columns):\n",
210 | " # Column Non-Null Count Dtype \n",
211 | "--- ------ -------------- ----- \n",
212 | " 0 area 6 non-null int64 \n",
213 | " 1 bedrooms 6 non-null float64\n",
214 | " 2 age 6 non-null int64 \n",
215 | " 3 price 6 non-null int64 \n",
216 | "dtypes: float64(1), int64(3)\n",
217 | "memory usage: 320.0 bytes\n"
218 | ]
219 | }
220 | ],
221 | "source": [
222 | "df.info()"
223 | ]
224 | },
225 | {
226 | "cell_type": "code",
227 | "execution_count": 5,
228 | "id": "2c7a2a03",
229 | "metadata": {},
230 | "outputs": [
231 | {
232 | "data": {
233 | "text/plain": [
234 | "(6, 4)"
235 | ]
236 | },
237 | "execution_count": 5,
238 | "metadata": {},
239 | "output_type": "execute_result"
240 | }
241 | ],
242 | "source": [
243 | "df.shape"
244 | ]
245 | },
246 | {
247 | "cell_type": "code",
248 | "execution_count": 6,
249 | "id": "2bd26c3d",
250 | "metadata": {},
251 | "outputs": [],
252 | "source": [
253 | "from sklearn.linear_model import LinearRegression"
254 | ]
255 | },
256 | {
257 | "cell_type": "code",
258 | "execution_count": 7,
259 | "id": "00950c74",
260 | "metadata": {},
261 | "outputs": [],
262 | "source": [
263 | "model = LinearRegression()"
264 | ]
265 | },
266 | {
267 | "cell_type": "code",
268 | "execution_count": 8,
269 | "id": "76a394c8",
270 | "metadata": {},
271 | "outputs": [],
272 | "source": [
273 | "X = df.drop('price', axis=1)\n",
274 | "y = df.price"
275 | ]
276 | },
277 | {
278 | "cell_type": "code",
279 | "execution_count": 9,
280 | "id": "82f8870c",
281 | "metadata": {},
282 | "outputs": [
283 | {
284 | "data": {
285 | "text/html": [
286 | "\n",
287 | "\n",
300 | "
\n",
301 | " \n",
302 | " \n",
303 | " | \n",
304 | " area | \n",
305 | " bedrooms | \n",
306 | " age | \n",
307 | "
\n",
308 | " \n",
309 | " \n",
310 | " \n",
311 | " 0 | \n",
312 | " 2600 | \n",
313 | " 3.0 | \n",
314 | " 20 | \n",
315 | "
\n",
316 | " \n",
317 | " 1 | \n",
318 | " 3000 | \n",
319 | " 4.0 | \n",
320 | " 15 | \n",
321 | "
\n",
322 | " \n",
323 | " 2 | \n",
324 | " 3200 | \n",
325 | " 4.0 | \n",
326 | " 18 | \n",
327 | "
\n",
328 | " \n",
329 | " 3 | \n",
330 | " 3600 | \n",
331 | " 3.0 | \n",
332 | " 30 | \n",
333 | "
\n",
334 | " \n",
335 | " 4 | \n",
336 | " 4000 | \n",
337 | " 5.0 | \n",
338 | " 8 | \n",
339 | "
\n",
340 | " \n",
341 | "
\n",
342 | "
"
343 | ],
344 | "text/plain": [
345 | " area bedrooms age\n",
346 | "0 2600 3.0 20\n",
347 | "1 3000 4.0 15\n",
348 | "2 3200 4.0 18\n",
349 | "3 3600 3.0 30\n",
350 | "4 4000 5.0 8"
351 | ]
352 | },
353 | "execution_count": 9,
354 | "metadata": {},
355 | "output_type": "execute_result"
356 | }
357 | ],
358 | "source": [
359 | "X.head()"
360 | ]
361 | },
362 | {
363 | "cell_type": "code",
364 | "execution_count": 10,
365 | "id": "8c1d270e",
366 | "metadata": {},
367 | "outputs": [
368 | {
369 | "data": {
370 | "text/plain": [
371 | "0 550000\n",
372 | "1 565000\n",
373 | "2 610000\n",
374 | "3 595000\n",
375 | "4 760000\n",
376 | "Name: price, dtype: int64"
377 | ]
378 | },
379 | "execution_count": 10,
380 | "metadata": {},
381 | "output_type": "execute_result"
382 | }
383 | ],
384 | "source": [
385 | "y.head()"
386 | ]
387 | },
388 | {
389 | "cell_type": "code",
390 | "execution_count": 11,
391 | "id": "a189db6c",
392 | "metadata": {},
393 | "outputs": [
394 | {
395 | "data": {
396 | "text/plain": [
397 | "LinearRegression()"
398 | ]
399 | },
400 | "execution_count": 11,
401 | "metadata": {},
402 | "output_type": "execute_result"
403 | }
404 | ],
405 | "source": [
406 | "model.fit(X,y)"
407 | ]
408 | },
409 | {
410 | "cell_type": "code",
411 | "execution_count": 12,
412 | "id": "bcc0e40e",
413 | "metadata": {},
414 | "outputs": [
415 | {
416 | "data": {
417 | "text/plain": [
418 | "array([ 112.06244194, 23388.88007794, -3231.71790863])"
419 | ]
420 | },
421 | "execution_count": 12,
422 | "metadata": {},
423 | "output_type": "execute_result"
424 | }
425 | ],
426 | "source": [
427 | "model.coef_"
428 | ]
429 | },
430 | {
431 | "cell_type": "code",
432 | "execution_count": 13,
433 | "id": "d1421d9f",
434 | "metadata": {},
435 | "outputs": [
436 | {
437 | "data": {
438 | "text/plain": [
439 | "221323.00186540408"
440 | ]
441 | },
442 | "execution_count": 13,
443 | "metadata": {},
444 | "output_type": "execute_result"
445 | }
446 | ],
447 | "source": [
448 | "model.intercept_"
449 | ]
450 | },
451 | {
452 | "cell_type": "code",
453 | "execution_count": 14,
454 | "id": "0dd8c380",
455 | "metadata": {},
456 | "outputs": [
457 | {
458 | "data": {
459 | "text/plain": [
460 | "['homemodel.pkl']"
461 | ]
462 | },
463 | "execution_count": 14,
464 | "metadata": {},
465 | "output_type": "execute_result"
466 | }
467 | ],
468 | "source": [
469 | "import joblib\n",
470 | "\n",
471 | "joblib.dump(model, 'homemodel.pkl')"
472 | ]
473 | },
474 | {
475 | "cell_type": "code",
476 | "execution_count": 15,
477 | "id": "0236cd41",
478 | "metadata": {},
479 | "outputs": [
480 | {
481 | "name": "stdout",
482 | "output_type": "stream",
483 | "text": [
484 | "This is a model to predict the Price of a House\n",
485 | "Age of the House:\n",
486 | "20\n",
487 | "Number of bedrooms:\n",
488 | "3\n",
489 | "Area of the house:\n",
490 | "2900\n",
491 | "The price of the house is $[551836.36555875]\n"
492 | ]
493 | }
494 | ],
495 | "source": [
496 | "joblib.load('homemodel.pkl')\n",
497 | "print(\"This is a model to predict the Price of a House\")\n",
498 | "age = int(input(\"Age of the House:\\n\"))\n",
499 | "bed = int(input('Number of bedrooms:\\n'))\n",
500 | "area_of = int(input('Area of the house:\\n'))\n",
501 | "\n",
502 | "predictions = model.predict([[area_of,bed,age]])\n",
503 | "print(str(f\"The price of the house is ${predictions}\"))"
504 | ]
505 | },
506 | {
507 | "cell_type": "code",
508 | "execution_count": null,
509 | "id": "eda80a52",
510 | "metadata": {},
511 | "outputs": [],
512 | "source": []
513 | }
514 | ],
515 | "metadata": {
516 | "kernelspec": {
517 | "display_name": "Python 3 (ipykernel)",
518 | "language": "python",
519 | "name": "python3"
520 | },
521 | "language_info": {
522 | "codemirror_mode": {
523 | "name": "ipython",
524 | "version": 3
525 | },
526 | "file_extension": ".py",
527 | "mimetype": "text/x-python",
528 | "name": "python",
529 | "nbconvert_exporter": "python",
530 | "pygments_lexer": "ipython3",
531 | "version": "3.9.7"
532 | }
533 | },
534 | "nbformat": 4,
535 | "nbformat_minor": 5
536 | }
537 |
--------------------------------------------------------------------------------
/HomePrices Prediction/homemodel.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/HomePrices Prediction/homemodel.pkl
--------------------------------------------------------------------------------
/HomePrices Prediction/homeprices.csv:
--------------------------------------------------------------------------------
1 | area,bedrooms,age,price
2 | 2600,3,20,550000
3 | 3000,4,15,565000
4 | 3200,,18,610000
5 | 3600,3,30,595000
6 | 4000,5,8,760000
7 | 4100,6,8,810000
8 |
--------------------------------------------------------------------------------
/IMDB Movie Reviews Sentiment Analysis with NLP/IMDB Movie Reviews Sentiment Analysis 2.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "2b41306f",
6 | "metadata": {},
7 | "source": [
8 | "# IMDB Movie Reviews Sentiment Analysis\n",
9 | "\n",
10 | "In this project I try to perform sentiment analysis of IMDB movie reviews using NLP techniques"
11 | ]
12 | },
13 | {
14 | "cell_type": "code",
15 | "execution_count": 58,
16 | "id": "1183182e",
17 | "metadata": {},
18 | "outputs": [],
19 | "source": [
20 | "import pandas as pd"
21 | ]
22 | },
23 | {
24 | "cell_type": "code",
25 | "execution_count": 59,
26 | "id": "50b75380",
27 | "metadata": {},
28 | "outputs": [
29 | {
30 | "data": {
31 | "text/html": [
32 | "\n",
33 | "\n",
46 | "
\n",
47 | " \n",
48 | " \n",
49 | " | \n",
50 | " review | \n",
51 | " sentiment | \n",
52 | "
\n",
53 | " \n",
54 | " \n",
55 | " \n",
56 | " 0 | \n",
57 | " One of the other reviewers has mentioned that ... | \n",
58 | " positive | \n",
59 | "
\n",
60 | " \n",
61 | " 1 | \n",
62 | " A wonderful little production. <br /><br />The... | \n",
63 | " positive | \n",
64 | "
\n",
65 | " \n",
66 | " 2 | \n",
67 | " I thought this was a wonderful way to spend ti... | \n",
68 | " positive | \n",
69 | "
\n",
70 | " \n",
71 | " 3 | \n",
72 | " Basically there's a family where a little boy ... | \n",
73 | " negative | \n",
74 | "
\n",
75 | " \n",
76 | " 4 | \n",
77 | " Petter Mattei's \"Love in the Time of Money\" is... | \n",
78 | " positive | \n",
79 | "
\n",
80 | " \n",
81 | "
\n",
82 | "
"
83 | ],
84 | "text/plain": [
85 | " review sentiment\n",
86 | "0 One of the other reviewers has mentioned that ... positive\n",
87 | "1 A wonderful little production.
The... positive\n",
88 | "2 I thought this was a wonderful way to spend ti... positive\n",
89 | "3 Basically there's a family where a little boy ... negative\n",
90 | "4 Petter Mattei's \"Love in the Time of Money\" is... positive"
91 | ]
92 | },
93 | "execution_count": 59,
94 | "metadata": {},
95 | "output_type": "execute_result"
96 | }
97 | ],
98 | "source": [
99 | "data = pd.read_csv('imdb.csv')\n",
100 | "\n",
101 | "data.head()"
102 | ]
103 | },
104 | {
105 | "cell_type": "markdown",
106 | "id": "17b97644",
107 | "metadata": {},
108 | "source": [
109 | "The next two lines convert the positive and negative sentiments to 1 and 0 respectively so we can use it later for our ML Model."
110 | ]
111 | },
112 | {
113 | "cell_type": "code",
114 | "execution_count": 60,
115 | "id": "29d19e60",
116 | "metadata": {},
117 | "outputs": [],
118 | "source": [
119 | "data.loc[data['sentiment']=='positive','sentiment'] = 1"
120 | ]
121 | },
122 | {
123 | "cell_type": "code",
124 | "execution_count": 61,
125 | "id": "cdb54873",
126 | "metadata": {},
127 | "outputs": [],
128 | "source": [
129 | "data.loc[data['sentiment']=='negative','sentiment'] = 0"
130 | ]
131 | },
132 | {
133 | "cell_type": "code",
134 | "execution_count": 69,
135 | "id": "82f77c48",
136 | "metadata": {},
137 | "outputs": [
138 | {
139 | "data": {
140 | "text/html": [
141 | "\n",
142 | "\n",
155 | "
\n",
156 | " \n",
157 | " \n",
158 | " | \n",
159 | " review | \n",
160 | " sentiment | \n",
161 | "
\n",
162 | " \n",
163 | " \n",
164 | " \n",
165 | " 0 | \n",
166 | " One of the other reviewers has mentioned that ... | \n",
167 | " 1 | \n",
168 | "
\n",
169 | " \n",
170 | " 1 | \n",
171 | " A wonderful little production. <br /><br />The... | \n",
172 | " 1 | \n",
173 | "
\n",
174 | " \n",
175 | " 2 | \n",
176 | " I thought this was a wonderful way to spend ti... | \n",
177 | " 1 | \n",
178 | "
\n",
179 | " \n",
180 | " 3 | \n",
181 | " Basically there's a family where a little boy ... | \n",
182 | " 0 | \n",
183 | "
\n",
184 | " \n",
185 | " 4 | \n",
186 | " Petter Mattei's \"Love in the Time of Money\" is... | \n",
187 | " 1 | \n",
188 | "
\n",
189 | " \n",
190 | " ... | \n",
191 | " ... | \n",
192 | " ... | \n",
193 | "
\n",
194 | " \n",
195 | " 49995 | \n",
196 | " I thought this movie did a down right good job... | \n",
197 | " 1 | \n",
198 | "
\n",
199 | " \n",
200 | " 49996 | \n",
201 | " Bad plot, bad dialogue, bad acting, idiotic di... | \n",
202 | " 0 | \n",
203 | "
\n",
204 | " \n",
205 | " 49997 | \n",
206 | " I am a Catholic taught in parochial elementary... | \n",
207 | " 0 | \n",
208 | "
\n",
209 | " \n",
210 | " 49998 | \n",
211 | " I'm going to have to disagree with the previou... | \n",
212 | " 0 | \n",
213 | "
\n",
214 | " \n",
215 | " 49999 | \n",
216 | " No one expects the Star Trek movies to be high... | \n",
217 | " 0 | \n",
218 | "
\n",
219 | " \n",
220 | "
\n",
221 | "
50000 rows × 2 columns
\n",
222 | "
"
223 | ],
224 | "text/plain": [
225 | " review sentiment\n",
226 | "0 One of the other reviewers has mentioned that ... 1\n",
227 | "1 A wonderful little production.
The... 1\n",
228 | "2 I thought this was a wonderful way to spend ti... 1\n",
229 | "3 Basically there's a family where a little boy ... 0\n",
230 | "4 Petter Mattei's \"Love in the Time of Money\" is... 1\n",
231 | "... ... ...\n",
232 | "49995 I thought this movie did a down right good job... 1\n",
233 | "49996 Bad plot, bad dialogue, bad acting, idiotic di... 0\n",
234 | "49997 I am a Catholic taught in parochial elementary... 0\n",
235 | "49998 I'm going to have to disagree with the previou... 0\n",
236 | "49999 No one expects the Star Trek movies to be high... 0\n",
237 | "\n",
238 | "[50000 rows x 2 columns]"
239 | ]
240 | },
241 | "execution_count": 69,
242 | "metadata": {},
243 | "output_type": "execute_result"
244 | }
245 | ],
246 | "source": [
247 | "data[\"sentiment\"] = pd.to_numeric(data[\"sentiment\"])\n",
248 | "data"
249 | ]
250 | },
251 | {
252 | "cell_type": "markdown",
253 | "id": "36cb9968",
254 | "metadata": {},
255 | "source": [
256 | "Now we have two columns, the review and the sentiment."
257 | ]
258 | },
259 | {
260 | "cell_type": "code",
261 | "execution_count": 70,
262 | "id": "cba75c10",
263 | "metadata": {},
264 | "outputs": [],
265 | "source": [
266 | "review = data['review']\n",
267 | "\n",
268 | "label = data['sentiment']"
269 | ]
270 | },
271 | {
272 | "cell_type": "markdown",
273 | "id": "bc055e02",
274 | "metadata": {},
275 | "source": [
276 | "We save the reviews column to a variable called **review** and the labels to a variable called **label**."
277 | ]
278 | },
279 | {
280 | "cell_type": "code",
281 | "execution_count": 71,
282 | "id": "34f259aa",
283 | "metadata": {},
284 | "outputs": [
285 | {
286 | "data": {
287 | "text/plain": [
288 | "0 One of the other reviewers has mentioned that ...\n",
289 | "1 A wonderful little production.
The...\n",
290 | "2 I thought this was a wonderful way to spend ti...\n",
291 | "3 Basically there's a family where a little boy ...\n",
292 | "4 Petter Mattei's \"Love in the Time of Money\" is...\n",
293 | "Name: review, dtype: object"
294 | ]
295 | },
296 | "execution_count": 71,
297 | "metadata": {},
298 | "output_type": "execute_result"
299 | }
300 | ],
301 | "source": [
302 | "review.head()"
303 | ]
304 | },
305 | {
306 | "cell_type": "code",
307 | "execution_count": 72,
308 | "id": "cd1ded62",
309 | "metadata": {},
310 | "outputs": [
311 | {
312 | "data": {
313 | "text/plain": [
314 | "0 1\n",
315 | "1 1\n",
316 | "2 1\n",
317 | "3 0\n",
318 | "4 1\n",
319 | "Name: sentiment, dtype: int64"
320 | ]
321 | },
322 | "execution_count": 72,
323 | "metadata": {},
324 | "output_type": "execute_result"
325 | }
326 | ],
327 | "source": [
328 | "label.head()"
329 | ]
330 | },
331 | {
332 | "cell_type": "markdown",
333 | "id": "b2f3d41a",
334 | "metadata": {},
335 | "source": [
336 | "## Pre Processing\n",
337 | "Now in this section we have to process the data by:\n",
338 | "1. Converting all the rows to lower case.\n",
339 | "2. Removing stop words like i, me , you, our, your etc\n",
340 | "3. Removing hyperlinks,numbers,punctuations etc."
341 | ]
342 | },
343 | {
344 | "cell_type": "markdown",
345 | "id": "e1ea6106",
346 | "metadata": {},
347 | "source": [
348 | "Now we import the nltk library. NLTK is a toolkit build for working with NLP in Python. It provides us various text processing libraries with a lot of test datasets."
349 | ]
350 | },
351 | {
352 | "cell_type": "code",
353 | "execution_count": 73,
354 | "id": "2ef3d0a2",
355 | "metadata": {},
356 | "outputs": [],
357 | "source": [
358 | "import nltk\n",
359 | "import re\n",
360 | "import string"
361 | ]
362 | },
363 | {
364 | "cell_type": "code",
365 | "execution_count": 74,
366 | "id": "f754db9c",
367 | "metadata": {},
368 | "outputs": [
369 | {
370 | "name": "stderr",
371 | "output_type": "stream",
372 | "text": [
373 | "[nltk_data] Downloading package stopwords to C:\\Users\\IFEANYI\n",
374 | "[nltk_data] PC\\AppData\\Roaming\\nltk_data...\n",
375 | "[nltk_data] Package stopwords is already up-to-date!\n"
376 | ]
377 | }
378 | ],
379 | "source": [
380 | "nltk.download('stopwords')\n",
381 | "\n",
382 | "stop_words = nltk.corpus.stopwords.words('english')"
383 | ]
384 | },
385 | {
386 | "cell_type": "markdown",
387 | "id": "f2e93633",
388 | "metadata": {},
389 | "source": [
390 | "We download the stopwords we want to remove from the dataset."
391 | ]
392 | },
393 | {
394 | "cell_type": "code",
395 | "execution_count": 75,
396 | "id": "f5e52573",
397 | "metadata": {},
398 | "outputs": [
399 | {
400 | "name": "stderr",
401 | "output_type": "stream",
402 | "text": [
403 | "[nltk_data] Downloading package punkt to C:\\Users\\IFEANYI\n",
404 | "[nltk_data] PC\\AppData\\Roaming\\nltk_data...\n",
405 | "[nltk_data] Package punkt is already up-to-date!\n"
406 | ]
407 | }
408 | ],
409 | "source": [
410 | "nltk.download('punkt')\n",
411 | "\n",
412 | "from nltk.tokenize import word_tokenize"
413 | ]
414 | },
415 | {
416 | "cell_type": "code",
417 | "execution_count": 76,
418 | "id": "5d9f2bc9",
419 | "metadata": {},
420 | "outputs": [],
421 | "source": [
422 | "def pre_process(txt):\n",
423 | " lowered_text = txt.lower()\n",
424 | " \n",
425 | " removed_numbers = re.sub(r'\\d+','',lowered_text) # re. is for regular expressions. Substitutes digits with an empty string.\n",
426 | " \n",
427 | " removed_punctuation = removed_numbers.translate(str.maketrans('','',string.punctuation)) # This removes punctuation from the text and replaces it with an empty string\n",
428 | " \n",
429 | " # now we split the text to obtain tokens and then remove the stopwords.\n",
430 | " \n",
431 | " word_tokens = word_tokenize(removed_punctuation)\n",
432 | " \n",
433 | " processed_text = ''.join([word for word in word_tokens if word not in stop_words])\n",
434 | " \n",
435 | " return processed_text"
436 | ]
437 | },
438 | {
439 | "cell_type": "code",
440 | "execution_count": 77,
441 | "id": "f1117ecc",
442 | "metadata": {},
443 | "outputs": [
444 | {
445 | "data": {
446 | "text/plain": [
447 | "0 onereviewersmentionedwatchingozepisodeyoullhoo...\n",
448 | "1 wonderfullittleproductionbrbrfilmingtechniqueu...\n",
449 | "2 thoughtwonderfulwayspendtimehotsummerweekendsi...\n",
450 | "3 basicallytheresfamilylittleboyjakethinkstheres...\n",
451 | "4 pettermatteislovetimemoneyvisuallystunningfilm...\n",
452 | " ... \n",
453 | "49995 thoughtmovierightgoodjobwasntcreativeoriginalf...\n",
454 | "49996 badplotbaddialoguebadactingidioticdirectingann...\n",
455 | "49997 catholictaughtparochialelementaryschoolsnunsta...\n",
456 | "49998 imgoingdisagreepreviouscommentsidemaltinonesec...\n",
457 | "49999 oneexpectsstartrekmovieshighartfansexpectmovie...\n",
458 | "Name: review, Length: 50000, dtype: object"
459 | ]
460 | },
461 | "execution_count": 77,
462 | "metadata": {},
463 | "output_type": "execute_result"
464 | }
465 | ],
466 | "source": [
467 | "processed = review.apply(pre_process) #.apply applies a function across a pandas dataframe.\n",
468 | "\n",
469 | "processed"
470 | ]
471 | },
472 | {
473 | "cell_type": "markdown",
474 | "id": "cc68f4b2",
475 | "metadata": {},
476 | "source": [
477 | "We have now processed the text but we still need to tokenize it."
478 | ]
479 | },
480 | {
481 | "cell_type": "code",
482 | "execution_count": 78,
483 | "id": "a3f72cf3",
484 | "metadata": {},
485 | "outputs": [
486 | {
487 | "data": {
488 | "text/plain": [
489 | "<50000x54131 sparse matrix of type ''\n",
490 | "\twith 54605 stored elements in Compressed Sparse Row format>"
491 | ]
492 | },
493 | "execution_count": 78,
494 | "metadata": {},
495 | "output_type": "execute_result"
496 | }
497 | ],
498 | "source": [
499 | "from sklearn.feature_extraction.text import CountVectorizer\n",
500 | "\n",
501 | "vectorizer = CountVectorizer()\n",
502 | "\n",
503 | "input_data = vectorizer.fit_transform(processed)\n",
504 | "input_data"
505 | ]
506 | },
507 | {
508 | "cell_type": "markdown",
509 | "id": "12af08c3",
510 | "metadata": {},
511 | "source": [
512 | "We have now created our sparse matrix with number of reviews as rows(50000) and all the words in the dataset as columns after removing the stopwords(54605)"
513 | ]
514 | },
515 | {
516 | "cell_type": "code",
517 | "execution_count": 79,
518 | "id": "a590526c",
519 | "metadata": {},
520 | "outputs": [
521 | {
522 | "name": "stdout",
523 | "output_type": "stream",
524 | "text": [
525 | " (0, 33523)\t1\n",
526 | " (1, 52330)\t1\n",
527 | " (2, 47296)\t1\n",
528 | " (3, 3292)\t1\n",
529 | " (4, 34949)\t1\n",
530 | " (5, 36048)\t1\n",
531 | " (6, 45390)\t1\n",
532 | " (7, 42384)\t1\n",
533 | " (8, 10557)\t1\n",
534 | " (9, 24508)\t1\n",
535 | " (10, 34963)\t1\n",
536 | " (11, 40135)\t1\n",
537 | " (12, 20553)\t1\n",
538 | " (13, 5754)\t1\n",
539 | " (14, 12330)\t1\n",
540 | " (15, 22987)\t1\n",
541 | " (16, 13935)\t1\n",
542 | " (17, 29071)\t1\n",
543 | " (18, 38317)\t1\n",
544 | " (19, 2837)\t1\n",
545 | " (20, 45084)\t1\n",
546 | " (21, 46037)\t1\n",
547 | " (22, 173)\t1\n",
548 | " (23, 14792)\t1\n",
549 | " (24, 52689)\t1\n",
550 | " :\t:\n",
551 | " (49977, 9287)\t1\n",
552 | " (49977, 23754)\t1\n",
553 | " (49977, 21977)\t1\n",
554 | " (49978, 18524)\t1\n",
555 | " (49979, 40021)\t1\n",
556 | " (49980, 44949)\t1\n",
557 | " (49981, 38674)\t1\n",
558 | " (49982, 19772)\t1\n",
559 | " (49983, 25347)\t1\n",
560 | " (49984, 19141)\t1\n",
561 | " (49985, 20457)\t1\n",
562 | " (49986, 28372)\t1\n",
563 | " (49987, 38251)\t1\n",
564 | " (49988, 15399)\t1\n",
565 | " (49989, 17655)\t1\n",
566 | " (49990, 23482)\t1\n",
567 | " (49991, 23902)\t1\n",
568 | " (49992, 22565)\t1\n",
569 | " (49993, 39103)\t1\n",
570 | " (49994, 48636)\t1\n",
571 | " (49995, 47217)\t1\n",
572 | " (49996, 3100)\t1\n",
573 | " (49997, 5777)\t1\n",
574 | " (49998, 20717)\t1\n",
575 | " (49999, 33005)\t1\n"
576 | ]
577 | }
578 | ],
579 | "source": [
580 | "print(input_data)"
581 | ]
582 | },
583 | {
584 | "cell_type": "markdown",
585 | "id": "7de2c8be",
586 | "metadata": {},
587 | "source": [
588 | "Now we can feed the matrix to a machine learning model. In this case we'll use the Logistic Regression model since we are trying to classify it into positive or negative."
589 | ]
590 | },
591 | {
592 | "cell_type": "code",
593 | "execution_count": 80,
594 | "id": "46e82cd1",
595 | "metadata": {},
596 | "outputs": [
597 | {
598 | "data": {
599 | "text/plain": [
600 | "LogisticRegression()"
601 | ]
602 | },
603 | "execution_count": 80,
604 | "metadata": {},
605 | "output_type": "execute_result"
606 | }
607 | ],
608 | "source": [
609 | "from sklearn.linear_model import LogisticRegression\n",
610 | "\n",
611 | "model = LogisticRegression()\n",
612 | "model.fit(input_data, label)"
613 | ]
614 | },
615 | {
616 | "cell_type": "code",
617 | "execution_count": 86,
618 | "id": "987923f7",
619 | "metadata": {},
620 | "outputs": [],
621 | "source": [
622 | "def prediction_input(sentence):\n",
623 | " processed = pre_process(sentence)\n",
624 | " input_data = vectorizer.transform([processed])\n",
625 | " prediction = model.predict(input_data)\n",
626 | " \n",
627 | " if (prediction[0] == 1):\n",
628 | " print('This is a Positive Sentiment Sentence.')\n",
629 | " elif (prediction[0] == 0):\n",
630 | " print('This is a Negative Sentiment Sentence.')"
631 | ]
632 | },
633 | {
634 | "cell_type": "code",
635 | "execution_count": 87,
636 | "id": "413980cd",
637 | "metadata": {},
638 | "outputs": [
639 | {
640 | "name": "stdout",
641 | "output_type": "stream",
642 | "text": [
643 | "What is your review: that movie was bad\n",
644 | "This is a Positive Sentiment Sentence.\n"
645 | ]
646 | }
647 | ],
648 | "source": [
649 | "review_input = input(\"What is your review: \")\n",
650 | "prediction_input(review_input)"
651 | ]
652 | },
653 | {
654 | "cell_type": "code",
655 | "execution_count": 88,
656 | "id": "74ed18e9",
657 | "metadata": {},
658 | "outputs": [
659 | {
660 | "name": "stdout",
661 | "output_type": "stream",
662 | "text": [
663 | "accuracy_score without data pre-processing = 100.00 %\n"
664 | ]
665 | }
666 | ],
667 | "source": [
668 | "from sklearn import metrics\n",
669 | "accuracy_score = metrics.accuracy_score(model.predict(input_data), label)\n",
670 | "print(\"accuracy_score without data pre-processing = \" + str('{:04.2f}'.format(accuracy_score*100))+\" %\")"
671 | ]
672 | }
673 | ],
674 | "metadata": {
675 | "kernelspec": {
676 | "display_name": "Python 3 (ipykernel)",
677 | "language": "python",
678 | "name": "python3"
679 | },
680 | "language_info": {
681 | "codemirror_mode": {
682 | "name": "ipython",
683 | "version": 3
684 | },
685 | "file_extension": ".py",
686 | "mimetype": "text/x-python",
687 | "name": "python",
688 | "nbconvert_exporter": "python",
689 | "pygments_lexer": "ipython3",
690 | "version": "3.9.7"
691 | }
692 | },
693 | "nbformat": 4,
694 | "nbformat_minor": 5
695 | }
696 |
--------------------------------------------------------------------------------
/IMDB Movie Reviews Sentiment Analysis with NLP/IMDB Movie Reviews Sentiment Analysis.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "6025239e",
6 | "metadata": {},
7 | "source": [
8 | "# IMDB Movie Reviews Sentiment Analysis\n",
9 | "\n",
10 | "In this project I try to perform sentiment analysis of IMDB movie reviews using NLP techniques"
11 | ]
12 | },
13 | {
14 | "cell_type": "code",
15 | "execution_count": 36,
16 | "id": "e1182bec",
17 | "metadata": {},
18 | "outputs": [],
19 | "source": [
20 | "import pandas as pd"
21 | ]
22 | },
23 | {
24 | "cell_type": "code",
25 | "execution_count": 37,
26 | "id": "0bf8d1f3",
27 | "metadata": {},
28 | "outputs": [
29 | {
30 | "data": {
31 | "text/html": [
32 | "\n",
33 | "\n",
46 | "
\n",
47 | " \n",
48 | " \n",
49 | " | \n",
50 | " review | \n",
51 | " sentiment | \n",
52 | "
\n",
53 | " \n",
54 | " \n",
55 | " \n",
56 | " 0 | \n",
57 | " One of the other reviewers has mentioned that ... | \n",
58 | " positive | \n",
59 | "
\n",
60 | " \n",
61 | " 1 | \n",
62 | " A wonderful little production. <br /><br />The... | \n",
63 | " positive | \n",
64 | "
\n",
65 | " \n",
66 | " 2 | \n",
67 | " I thought this was a wonderful way to spend ti... | \n",
68 | " positive | \n",
69 | "
\n",
70 | " \n",
71 | " 3 | \n",
72 | " Basically there's a family where a little boy ... | \n",
73 | " negative | \n",
74 | "
\n",
75 | " \n",
76 | " 4 | \n",
77 | " Petter Mattei's \"Love in the Time of Money\" is... | \n",
78 | " positive | \n",
79 | "
\n",
80 | " \n",
81 | "
\n",
82 | "
"
83 | ],
84 | "text/plain": [
85 | " review sentiment\n",
86 | "0 One of the other reviewers has mentioned that ... positive\n",
87 | "1 A wonderful little production.
The... positive\n",
88 | "2 I thought this was a wonderful way to spend ti... positive\n",
89 | "3 Basically there's a family where a little boy ... negative\n",
90 | "4 Petter Mattei's \"Love in the Time of Money\" is... positive"
91 | ]
92 | },
93 | "execution_count": 37,
94 | "metadata": {},
95 | "output_type": "execute_result"
96 | }
97 | ],
98 | "source": [
99 | "data = pd.read_csv('imdb_reviews.csv')\n",
100 | "\n",
101 | "data.head()"
102 | ]
103 | },
104 | {
105 | "cell_type": "markdown",
106 | "id": "faafc3a3",
107 | "metadata": {},
108 | "source": [
109 | "Because the dataset doesn't have any headers it uses the first row as the header. Therefore we have to add the argument **'header = None'** when reading the dataset.\""
110 | ]
111 | },
112 | {
113 | "cell_type": "code",
114 | "execution_count": 5,
115 | "id": "7234a95f",
116 | "metadata": {},
117 | "outputs": [
118 | {
119 | "data": {
120 | "text/html": [
121 | "\n",
122 | "\n",
135 | "
\n",
136 | " \n",
137 | " \n",
138 | " | \n",
139 | " 0 | \n",
140 | " 1 | \n",
141 | "
\n",
142 | " \n",
143 | " \n",
144 | " \n",
145 | " 0 | \n",
146 | " A very, very, very slow-moving, aimless movie ... | \n",
147 | " 0 | \n",
148 | "
\n",
149 | " \n",
150 | " 1 | \n",
151 | " Not sure who was more lost - the flat characte... | \n",
152 | " 0 | \n",
153 | "
\n",
154 | " \n",
155 | " 2 | \n",
156 | " Attempting artiness with black & white and cle... | \n",
157 | " 0 | \n",
158 | "
\n",
159 | " \n",
160 | " 3 | \n",
161 | " Very little music or anything to speak of. | \n",
162 | " 0 | \n",
163 | "
\n",
164 | " \n",
165 | " 4 | \n",
166 | " The best scene in the movie was when Gerardo i... | \n",
167 | " 1 | \n",
168 | "
\n",
169 | " \n",
170 | "
\n",
171 | "
"
172 | ],
173 | "text/plain": [
174 | " 0 1\n",
175 | "0 A very, very, very slow-moving, aimless movie ... 0\n",
176 | "1 Not sure who was more lost - the flat characte... 0\n",
177 | "2 Attempting artiness with black & white and cle... 0\n",
178 | "3 Very little music or anything to speak of. 0\n",
179 | "4 The best scene in the movie was when Gerardo i... 1"
180 | ]
181 | },
182 | "execution_count": 5,
183 | "metadata": {},
184 | "output_type": "execute_result"
185 | }
186 | ],
187 | "source": [
188 | "data = pd.read_csv('imdb_reviews.csv', header = None)\n",
189 | "data.head()"
190 | ]
191 | },
192 | {
193 | "cell_type": "markdown",
194 | "id": "b78e8691",
195 | "metadata": {},
196 | "source": [
197 | "Now we have two columns, the review and the labels. 0 indicates a negative review while 1 indicates a positive review"
198 | ]
199 | },
200 | {
201 | "cell_type": "code",
202 | "execution_count": 6,
203 | "id": "aa8b3663",
204 | "metadata": {},
205 | "outputs": [],
206 | "source": [
207 | "review = data[0]\n",
208 | "\n",
209 | "label = data[1]"
210 | ]
211 | },
212 | {
213 | "cell_type": "markdown",
214 | "id": "6c4205ac",
215 | "metadata": {},
216 | "source": [
217 | "We save the reviews column to a variable called **review** and the labels to a variable called **label**."
218 | ]
219 | },
220 | {
221 | "cell_type": "code",
222 | "execution_count": 7,
223 | "id": "a24998fb",
224 | "metadata": {},
225 | "outputs": [
226 | {
227 | "data": {
228 | "text/plain": [
229 | "0 A very, very, very slow-moving, aimless movie ...\n",
230 | "1 Not sure who was more lost - the flat characte...\n",
231 | "2 Attempting artiness with black & white and cle...\n",
232 | "3 Very little music or anything to speak of. \n",
233 | "4 The best scene in the movie was when Gerardo i...\n",
234 | "Name: 0, dtype: object"
235 | ]
236 | },
237 | "execution_count": 7,
238 | "metadata": {},
239 | "output_type": "execute_result"
240 | }
241 | ],
242 | "source": [
243 | "review.head()"
244 | ]
245 | },
246 | {
247 | "cell_type": "code",
248 | "execution_count": 8,
249 | "id": "ea8e2821",
250 | "metadata": {},
251 | "outputs": [
252 | {
253 | "data": {
254 | "text/plain": [
255 | "0 0\n",
256 | "1 0\n",
257 | "2 0\n",
258 | "3 0\n",
259 | "4 1\n",
260 | "Name: 1, dtype: int64"
261 | ]
262 | },
263 | "execution_count": 8,
264 | "metadata": {},
265 | "output_type": "execute_result"
266 | }
267 | ],
268 | "source": [
269 | "label.head()"
270 | ]
271 | },
272 | {
273 | "cell_type": "markdown",
274 | "id": "bca895cb",
275 | "metadata": {},
276 | "source": [
277 | "## Pre Processing\n",
278 | "Now in this section we have to process the data by:\n",
279 | "1. Converting all the rows to lower case.\n",
280 | "2. Removing stop words like i, me , you, our, your etc\n",
281 | "3. Removing hyperlinks,numbers,punctuations etc."
282 | ]
283 | },
284 | {
285 | "cell_type": "markdown",
286 | "id": "d1d96f68",
287 | "metadata": {},
288 | "source": [
289 | "Now we import the nltk library. NLTK is a toolkit build for working with NLP in Python. It provides us various text processing libraries with a lot of test datasets."
290 | ]
291 | },
292 | {
293 | "cell_type": "code",
294 | "execution_count": 10,
295 | "id": "b3f0e127",
296 | "metadata": {},
297 | "outputs": [],
298 | "source": [
299 | "import nltk\n",
300 | "import re\n",
301 | "import string"
302 | ]
303 | },
304 | {
305 | "cell_type": "code",
306 | "execution_count": 11,
307 | "id": "2122a282",
308 | "metadata": {},
309 | "outputs": [
310 | {
311 | "name": "stderr",
312 | "output_type": "stream",
313 | "text": [
314 | "[nltk_data] Downloading package stopwords to C:\\Users\\IFEANYI\n",
315 | "[nltk_data] PC\\AppData\\Roaming\\nltk_data...\n",
316 | "[nltk_data] Unzipping corpora\\stopwords.zip.\n"
317 | ]
318 | }
319 | ],
320 | "source": [
321 | "nltk.download('stopwords')\n",
322 | "\n",
323 | "stop_words = nltk.corpus.stopwords.words('english')"
324 | ]
325 | },
326 | {
327 | "cell_type": "markdown",
328 | "id": "0be6f194",
329 | "metadata": {},
330 | "source": [
331 | "We download the stopwords we want to remove from the dataset."
332 | ]
333 | },
334 | {
335 | "cell_type": "code",
336 | "execution_count": 12,
337 | "id": "6a7613db",
338 | "metadata": {},
339 | "outputs": [
340 | {
341 | "name": "stderr",
342 | "output_type": "stream",
343 | "text": [
344 | "[nltk_data] Downloading package punkt to C:\\Users\\IFEANYI\n",
345 | "[nltk_data] PC\\AppData\\Roaming\\nltk_data...\n",
346 | "[nltk_data] Unzipping tokenizers\\punkt.zip.\n"
347 | ]
348 | }
349 | ],
350 | "source": [
351 | "nltk.download('punkt')\n",
352 | "\n",
353 | "from nltk.tokenize import word_tokenize"
354 | ]
355 | },
356 | {
357 | "cell_type": "code",
358 | "execution_count": 17,
359 | "id": "89adf866",
360 | "metadata": {},
361 | "outputs": [],
362 | "source": [
363 | "def pre_process(txt):\n",
364 | " lowered_text = txt.lower()\n",
365 | " \n",
366 | " removed_numbers = re.sub(r'\\d+','',lowered_text) # re. is for regular expressions. Substitutes digits with an empty string.\n",
367 | " \n",
368 | " removed_punctuation = removed_numbers.translate(str.maketrans('','',string.punctuation)) # This removes punctuation from the text and replaces it with an empty string\n",
369 | " \n",
370 | " # now we split the text to obtain tokens and then remove the stopwords.\n",
371 | " \n",
372 | " word_tokens = word_tokenize(removed_punctuation)\n",
373 | " \n",
374 | " processed_text = ''.join([word for word in word_tokens if word not in stop_words])\n",
375 | " \n",
376 | " return processed_text"
377 | ]
378 | },
379 | {
380 | "cell_type": "code",
381 | "execution_count": 18,
382 | "id": "2c88cb08",
383 | "metadata": {},
384 | "outputs": [
385 | {
386 | "data": {
387 | "text/plain": [
388 | "0 slowmovingaimlessmoviedistresseddriftingyoungman\n",
389 | "1 surelostflatcharactersaudiencenearlyhalfwalked\n",
390 | "2 attemptingartinessblackwhiteclevercameraangles...\n",
391 | "3 littlemusicanythingspeak\n",
392 | "4 bestscenemoviegerardotryingfindsongkeepsrunnin...\n",
393 | " ... \n",
394 | "743 gotboredwatchingjessicelangetakeclothes\n",
395 | "744 unfortunatelyvirtuefilmsproductionworklostregr...\n",
396 | "745 wordembarrassing\n",
397 | "746 exceptionallybad\n",
398 | "747 insultonesintelligencehugewastemoney\n",
399 | "Name: 0, Length: 748, dtype: object"
400 | ]
401 | },
402 | "execution_count": 18,
403 | "metadata": {},
404 | "output_type": "execute_result"
405 | }
406 | ],
407 | "source": [
408 | "processed = review.apply(pre_process) #.apply applies a function across a pandas dataframe.\n",
409 | "\n",
410 | "processed"
411 | ]
412 | },
413 | {
414 | "cell_type": "markdown",
415 | "id": "6300160a",
416 | "metadata": {},
417 | "source": [
418 | "We have now processed the text but we still need to tokenize it."
419 | ]
420 | },
421 | {
422 | "cell_type": "code",
423 | "execution_count": 19,
424 | "id": "974223c6",
425 | "metadata": {},
426 | "outputs": [
427 | {
428 | "data": {
429 | "text/plain": [
430 | "<748x748 sparse matrix of type ''\n",
431 | "\twith 754 stored elements in Compressed Sparse Row format>"
432 | ]
433 | },
434 | "execution_count": 19,
435 | "metadata": {},
436 | "output_type": "execute_result"
437 | }
438 | ],
439 | "source": [
440 | "from sklearn.feature_extraction.text import CountVectorizer\n",
441 | "\n",
442 | "vectorizer = CountVectorizer()\n",
443 | "\n",
444 | "input_data = vectorizer.fit_transform(processed)\n",
445 | "input_data"
446 | ]
447 | },
448 | {
449 | "cell_type": "markdown",
450 | "id": "ec802dea",
451 | "metadata": {},
452 | "source": [
453 | "We have now created our sparse matrix with number of reviews as rows(748) and all the words in the dataset as columns after removing the stopwords(748)"
454 | ]
455 | },
456 | {
457 | "cell_type": "code",
458 | "execution_count": 20,
459 | "id": "39d2013e",
460 | "metadata": {},
461 | "outputs": [
462 | {
463 | "name": "stdout",
464 | "output_type": "stream",
465 | "text": [
466 | " (0, 614)\t1\n",
467 | " (1, 648)\t1\n",
468 | " (2, 41)\t1\n",
469 | " (3, 373)\t1\n",
470 | " (4, 64)\t1\n",
471 | " (5, 560)\t1\n",
472 | " (6, 711)\t1\n",
473 | " (7, 575)\t1\n",
474 | " (8, 67)\t1\n",
475 | " (9, 380)\t1\n",
476 | " (10, 50)\t1\n",
477 | " (11, 440)\t1\n",
478 | " (12, 618)\t1\n",
479 | " (13, 116)\t1\n",
480 | " (14, 565)\t1\n",
481 | " (15, 44)\t1\n",
482 | " (16, 563)\t1\n",
483 | " (17, 313)\t1\n",
484 | " (18, 524)\t1\n",
485 | " (18, 686)\t1\n",
486 | " (19, 643)\t1\n",
487 | " (20, 236)\t1\n",
488 | " (21, 182)\t1\n",
489 | " (22, 488)\t1\n",
490 | " (23, 516)\t1\n",
491 | " :\t:\n",
492 | " (723, 632)\t1\n",
493 | " (724, 109)\t1\n",
494 | " (725, 324)\t1\n",
495 | " (726, 712)\t1\n",
496 | " (727, 716)\t1\n",
497 | " (728, 336)\t1\n",
498 | " (729, 339)\t1\n",
499 | " (730, 569)\t1\n",
500 | " (731, 595)\t1\n",
501 | " (732, 387)\t1\n",
502 | " (733, 633)\t1\n",
503 | " (734, 554)\t1\n",
504 | " (735, 444)\t1\n",
505 | " (736, 566)\t1\n",
506 | " (737, 36)\t1\n",
507 | " (738, 483)\t1\n",
508 | " (739, 205)\t1\n",
509 | " (740, 350)\t1\n",
510 | " (741, 376)\t1\n",
511 | " (742, 452)\t1\n",
512 | " (743, 268)\t1\n",
513 | " (744, 697)\t1\n",
514 | " (745, 730)\t1\n",
515 | " (746, 196)\t1\n",
516 | " (747, 325)\t1\n"
517 | ]
518 | }
519 | ],
520 | "source": [
521 | "print(input_data)"
522 | ]
523 | },
524 | {
525 | "cell_type": "markdown",
526 | "id": "7e59655a",
527 | "metadata": {},
528 | "source": [
529 | "Now we can feed the matrix to a machine learning model. In this case we'll use the Logistic Regression model since we are trying to classify it into positive or negative."
530 | ]
531 | },
532 | {
533 | "cell_type": "code",
534 | "execution_count": 27,
535 | "id": "be71817c",
536 | "metadata": {},
537 | "outputs": [
538 | {
539 | "data": {
540 | "text/plain": [
541 | "LogisticRegression()"
542 | ]
543 | },
544 | "execution_count": 27,
545 | "metadata": {},
546 | "output_type": "execute_result"
547 | }
548 | ],
549 | "source": [
550 | "from sklearn.linear_model import LogisticRegression\n",
551 | "\n",
552 | "model = LogisticRegression()\n",
553 | "model.fit(input_data, label)"
554 | ]
555 | },
556 | {
557 | "cell_type": "code",
558 | "execution_count": 35,
559 | "id": "5c6133a6",
560 | "metadata": {},
561 | "outputs": [
562 | {
563 | "name": "stdout",
564 | "output_type": "stream",
565 | "text": [
566 | "This is a Negative Sentiment Sentence.\n"
567 | ]
568 | }
569 | ],
570 | "source": [
571 | "def prediction_input(sentence):\n",
572 | " processed = pre_process(sentence)\n",
573 | " input_data = vectorizer.transform([processed])\n",
574 | " prediction = model.predict(input_data)\n",
575 | " \n",
576 | " if (prediction[0] == 1):\n",
577 | " print('This is a Positive Sentiment Sentence.')\n",
578 | " elif (prediction[0] == 0):\n",
579 | " print('This is a Negative Sentiment Sentence.')\n",
580 | "\n",
581 | "prediction_input('That movie was bad')"
582 | ]
583 | },
584 | {
585 | "cell_type": "code",
586 | "execution_count": 30,
587 | "id": "0eeae0a5",
588 | "metadata": {},
589 | "outputs": [
590 | {
591 | "name": "stdout",
592 | "output_type": "stream",
593 | "text": [
594 | "This is a Positive Sentiment Sentence.\n"
595 | ]
596 | }
597 | ],
598 | "source": [
599 | "prediction_input(review_input)"
600 | ]
601 | }
602 | ],
603 | "metadata": {
604 | "kernelspec": {
605 | "display_name": "Python 3 (ipykernel)",
606 | "language": "python",
607 | "name": "python3"
608 | },
609 | "language_info": {
610 | "codemirror_mode": {
611 | "name": "ipython",
612 | "version": 3
613 | },
614 | "file_extension": ".py",
615 | "mimetype": "text/x-python",
616 | "name": "python",
617 | "nbconvert_exporter": "python",
618 | "pygments_lexer": "ipython3",
619 | "version": "3.9.7"
620 | }
621 | },
622 | "nbformat": 4,
623 | "nbformat_minor": 5
624 | }
625 |
--------------------------------------------------------------------------------
/IMDB Movie Reviews Sentiment Analysis with NLP/imdb_sentiment_analyser_script.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/IMDB Movie Reviews Sentiment Analysis with NLP/imdb_sentiment_analyser_script.py
--------------------------------------------------------------------------------
/IMDB Movie Reviews Sentiment Analysis with NLP/imdb_sentiment_analyzer.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/IMDB Movie Reviews Sentiment Analysis with NLP/imdb_sentiment_analyzer.pkl
--------------------------------------------------------------------------------
/Image Recognition with Keras/class.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Image Recognition with Keras/class.png
--------------------------------------------------------------------------------
/Image Recognition with Keras/number.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Image Recognition with Keras/number.png
--------------------------------------------------------------------------------
/Iris Flower Classification/Predicted Classes Using OVR and OVO Approach.csv:
--------------------------------------------------------------------------------
1 | ,One Vs Rest Prediction,One Vs One Prediction,Species
2 | 0,Iris-setosa,Iris-setosa,Iris-setosa
3 | 1,Iris-virginica,Iris-virginica,Iris-virginica
4 | 2,Iris-versicolor,Iris-versicolor,Iris-versicolor
5 | 3,Iris-versicolor,Iris-versicolor,Iris-versicolor
6 | 4,Iris-setosa,Iris-setosa,Iris-setosa
7 | 5,Iris-virginica,Iris-versicolor,Iris-versicolor
8 | 6,Iris-setosa,Iris-setosa,Iris-setosa
9 | 7,Iris-setosa,Iris-setosa,Iris-setosa
10 | 8,Iris-virginica,Iris-virginica,Iris-virginica
11 | 9,Iris-versicolor,Iris-versicolor,Iris-versicolor
12 | 10,Iris-virginica,Iris-virginica,Iris-virginica
13 | 11,Iris-virginica,Iris-virginica,Iris-virginica
14 | 12,Iris-virginica,Iris-virginica,Iris-virginica
15 | 13,Iris-versicolor,Iris-versicolor,Iris-versicolor
16 | 14,Iris-setosa,Iris-setosa,Iris-setosa
17 | 15,Iris-setosa,Iris-setosa,Iris-setosa
18 | 16,Iris-setosa,Iris-setosa,Iris-setosa
19 | 17,Iris-versicolor,Iris-versicolor,Iris-versicolor
20 | 18,Iris-versicolor,Iris-versicolor,Iris-versicolor
21 | 19,Iris-virginica,Iris-virginica,Iris-virginica
22 | 20,Iris-setosa,Iris-setosa,Iris-setosa
23 | 21,Iris-virginica,Iris-virginica,Iris-virginica
24 | 22,Iris-versicolor,Iris-versicolor,Iris-versicolor
25 | 23,Iris-virginica,Iris-virginica,Iris-virginica
26 | 24,Iris-virginica,Iris-virginica,Iris-virginica
27 | 25,Iris-virginica,Iris-virginica,Iris-versicolor
28 | 26,Iris-versicolor,Iris-versicolor,Iris-versicolor
29 | 27,Iris-setosa,Iris-setosa,Iris-setosa
30 | 28,Iris-virginica,Iris-virginica,Iris-virginica
31 | 29,Iris-setosa,Iris-setosa,Iris-setosa
32 |
--------------------------------------------------------------------------------
/Iris Flower Classification/heatmap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Iris Flower Classification/heatmap.png
--------------------------------------------------------------------------------
/Iris Flower Classification/heatmap2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Iris Flower Classification/heatmap2.png
--------------------------------------------------------------------------------
/Iris Flower Classification/hh.py:
--------------------------------------------------------------------------------
1 | def fizzBuzz(n):
2 | i = 1
3 | for i in range(n):
4 | if i % 3 == 0 and i% 5 == 0:
5 | print("fizzBuzz")
6 | elif i%3==0:
7 | print("Fizz")
8 | elif i%5==0:
9 | print("Buzz")
10 | else:
11 | print(i)
12 | x = int(input())
13 |
14 | fizzBuzz(x)
15 |
--------------------------------------------------------------------------------
/Iris Flower Classification/iris.csv:
--------------------------------------------------------------------------------
1 | SepalLength,SepalWidth,PetalLength,PetalWidth,Species
2 | 5.1,3.5,1.4,0.2,Iris-setosa
3 | 4.9,3,1.4,0.2,Iris-setosa
4 | 4.7,3.2,1.3,0.2,Iris-setosa
5 | 4.6,3.1,1.5,0.2,Iris-setosa
6 | 5,3.6,1.4,0.2,Iris-setosa
7 | 5.4,3.9,1.7,0.4,Iris-setosa
8 | 4.6,3.4,1.4,0.3,Iris-setosa
9 | 5,3.4,1.5,0.2,Iris-setosa
10 | 4.4,2.9,1.4,0.2,Iris-setosa
11 | 4.9,3.1,1.5,0.1,Iris-setosa
12 | 5.4,3.7,1.5,0.2,Iris-setosa
13 | 4.8,3.4,1.6,0.2,Iris-setosa
14 | 4.8,3,1.4,0.1,Iris-setosa
15 | 4.3,3,1.1,0.1,Iris-setosa
16 | 5.8,4,1.2,0.2,Iris-setosa
17 | 5.7,4.4,1.5,0.4,Iris-setosa
18 | 5.4,3.9,1.3,0.4,Iris-setosa
19 | 5.1,3.5,1.4,0.3,Iris-setosa
20 | 5.7,3.8,1.7,0.3,Iris-setosa
21 | 5.1,3.8,1.5,0.3,Iris-setosa
22 | 5.4,3.4,1.7,0.2,Iris-setosa
23 | 5.1,3.7,1.5,0.4,Iris-setosa
24 | 4.6,3.6,1,0.2,Iris-setosa
25 | 5.1,3.3,1.7,0.5,Iris-setosa
26 | 4.8,3.4,1.9,0.2,Iris-setosa
27 | 5,3,1.6,0.2,Iris-setosa
28 | 5,3.4,1.6,0.4,Iris-setosa
29 | 5.2,3.5,1.5,0.2,Iris-setosa
30 | 5.2,3.4,1.4,0.2,Iris-setosa
31 | 4.7,3.2,1.6,0.2,Iris-setosa
32 | 4.8,3.1,1.6,0.2,Iris-setosa
33 | 5.4,3.4,1.5,0.4,Iris-setosa
34 | 5.2,4.1,1.5,0.1,Iris-setosa
35 | 5.5,4.2,1.4,0.2,Iris-setosa
36 | 4.9,3.1,1.5,0.1,Iris-setosa
37 | 5,3.2,1.2,0.2,Iris-setosa
38 | 5.5,3.5,1.3,0.2,Iris-setosa
39 | 4.9,3.1,1.5,0.1,Iris-setosa
40 | 4.4,3,1.3,0.2,Iris-setosa
41 | 5.1,3.4,1.5,0.2,Iris-setosa
42 | 5,3.5,1.3,0.3,Iris-setosa
43 | 4.5,2.3,1.3,0.3,Iris-setosa
44 | 4.4,3.2,1.3,0.2,Iris-setosa
45 | 5,3.5,1.6,0.6,Iris-setosa
46 | 5.1,3.8,1.9,0.4,Iris-setosa
47 | 4.8,3,1.4,0.3,Iris-setosa
48 | 5.1,3.8,1.6,0.2,Iris-setosa
49 | 4.6,3.2,1.4,0.2,Iris-setosa
50 | 5.3,3.7,1.5,0.2,Iris-setosa
51 | 5,3.3,1.4,0.2,Iris-setosa
52 | 7,3.2,4.7,1.4,Iris-versicolor
53 | 6.4,3.2,4.5,1.5,Iris-versicolor
54 | 6.9,3.1,4.9,1.5,Iris-versicolor
55 | 5.5,2.3,4,1.3,Iris-versicolor
56 | 6.5,2.8,4.6,1.5,Iris-versicolor
57 | 5.7,2.8,4.5,1.3,Iris-versicolor
58 | 6.3,3.3,4.7,1.6,Iris-versicolor
59 | 4.9,2.4,3.3,1,Iris-versicolor
60 | 6.6,2.9,4.6,1.3,Iris-versicolor
61 | 5.2,2.7,3.9,1.4,Iris-versicolor
62 | 5,2,3.5,1,Iris-versicolor
63 | 5.9,3,4.2,1.5,Iris-versicolor
64 | 6,2.2,4,1,Iris-versicolor
65 | 6.1,2.9,4.7,1.4,Iris-versicolor
66 | 5.6,2.9,3.6,1.3,Iris-versicolor
67 | 6.7,3.1,4.4,1.4,Iris-versicolor
68 | 5.6,3,4.5,1.5,Iris-versicolor
69 | 5.8,2.7,4.1,1,Iris-versicolor
70 | 6.2,2.2,4.5,1.5,Iris-versicolor
71 | 5.6,2.5,3.9,1.1,Iris-versicolor
72 | 5.9,3.2,4.8,1.8,Iris-versicolor
73 | 6.1,2.8,4,1.3,Iris-versicolor
74 | 6.3,2.5,4.9,1.5,Iris-versicolor
75 | 6.1,2.8,4.7,1.2,Iris-versicolor
76 | 6.4,2.9,4.3,1.3,Iris-versicolor
77 | 6.6,3,4.4,1.4,Iris-versicolor
78 | 6.8,2.8,4.8,1.4,Iris-versicolor
79 | 6.7,3,5,1.7,Iris-versicolor
80 | 6,2.9,4.5,1.5,Iris-versicolor
81 | 5.7,2.6,3.5,1,Iris-versicolor
82 | 5.5,2.4,3.8,1.1,Iris-versicolor
83 | 5.5,2.4,3.7,1,Iris-versicolor
84 | 5.8,2.7,3.9,1.2,Iris-versicolor
85 | 6,2.7,5.1,1.6,Iris-versicolor
86 | 5.4,3,4.5,1.5,Iris-versicolor
87 | 6,3.4,4.5,1.6,Iris-versicolor
88 | 6.7,3.1,4.7,1.5,Iris-versicolor
89 | 6.3,2.3,4.4,1.3,Iris-versicolor
90 | 5.6,3,4.1,1.3,Iris-versicolor
91 | 5.5,2.5,4,1.3,Iris-versicolor
92 | 5.5,2.6,4.4,1.2,Iris-versicolor
93 | 6.1,3,4.6,1.4,Iris-versicolor
94 | 5.8,2.6,4,1.2,Iris-versicolor
95 | 5,2.3,3.3,1,Iris-versicolor
96 | 5.6,2.7,4.2,1.3,Iris-versicolor
97 | 5.7,3,4.2,1.2,Iris-versicolor
98 | 5.7,2.9,4.2,1.3,Iris-versicolor
99 | 6.2,2.9,4.3,1.3,Iris-versicolor
100 | 5.1,2.5,3,1.1,Iris-versicolor
101 | 5.7,2.8,4.1,1.3,Iris-versicolor
102 | 6.3,3.3,6,2.5,Iris-virginica
103 | 5.8,2.7,5.1,1.9,Iris-virginica
104 | 7.1,3,5.9,2.1,Iris-virginica
105 | 6.3,2.9,5.6,1.8,Iris-virginica
106 | 6.5,3,5.8,2.2,Iris-virginica
107 | 7.6,3,6.6,2.1,Iris-virginica
108 | 4.9,2.5,4.5,1.7,Iris-virginica
109 | 7.3,2.9,6.3,1.8,Iris-virginica
110 | 6.7,2.5,5.8,1.8,Iris-virginica
111 | 7.2,3.6,6.1,2.5,Iris-virginica
112 | 6.5,3.2,5.1,2,Iris-virginica
113 | 6.4,2.7,5.3,1.9,Iris-virginica
114 | 6.8,3,5.5,2.1,Iris-virginica
115 | 5.7,2.5,5,2,Iris-virginica
116 | 5.8,2.8,5.1,2.4,Iris-virginica
117 | 6.4,3.2,5.3,2.3,Iris-virginica
118 | 6.5,3,5.5,1.8,Iris-virginica
119 | 7.7,3.8,6.7,2.2,Iris-virginica
120 | 7.7,2.6,6.9,2.3,Iris-virginica
121 | 6,2.2,5,1.5,Iris-virginica
122 | 6.9,3.2,5.7,2.3,Iris-virginica
123 | 5.6,2.8,4.9,2,Iris-virginica
124 | 7.7,2.8,6.7,2,Iris-virginica
125 | 6.3,2.7,4.9,1.8,Iris-virginica
126 | 6.7,3.3,5.7,2.1,Iris-virginica
127 | 7.2,3.2,6,1.8,Iris-virginica
128 | 6.2,2.8,4.8,1.8,Iris-virginica
129 | 6.1,3,4.9,1.8,Iris-virginica
130 | 6.4,2.8,5.6,2.1,Iris-virginica
131 | 7.2,3,5.8,1.6,Iris-virginica
132 | 7.4,2.8,6.1,1.9,Iris-virginica
133 | 7.9,3.8,6.4,2,Iris-virginica
134 | 6.4,2.8,5.6,2.2,Iris-virginica
135 | 6.3,2.8,5.1,1.5,Iris-virginica
136 | 6.1,2.6,5.6,1.4,Iris-virginica
137 | 7.7,3,6.1,2.3,Iris-virginica
138 | 6.3,3.4,5.6,2.4,Iris-virginica
139 | 6.4,3.1,5.5,1.8,Iris-virginica
140 | 6,3,4.8,1.8,Iris-virginica
141 | 6.9,3.1,5.4,2.1,Iris-virginica
142 | 6.7,3.1,5.6,2.4,Iris-virginica
143 | 6.9,3.1,5.1,2.3,Iris-virginica
144 | 5.8,2.7,5.1,1.9,Iris-virginica
145 | 6.8,3.2,5.9,2.3,Iris-virginica
146 | 6.7,3.3,5.7,2.5,Iris-virginica
147 | 6.7,3,5.2,2.3,Iris-virginica
148 | 6.3,2.5,5,1.9,Iris-virginica
149 | 6.5,3,5.2,2,Iris-virginica
150 | 6.2,3.4,5.4,2.3,Iris-virginica
151 | 5.9,3,5.1,1.8,Iris-virginica
152 |
--------------------------------------------------------------------------------
/Iris Flower Classification/iris_prediction_script.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | model = joblib.load('one_vs_one_classifier.pkl')
4 |
5 | print("This is a Python to Script that Predicts Iris Flower Species Given the Following Features\n")
6 | a= float(input("Enter the flower's Sepal Length value: "))
7 | b= float(input('Enter the flower Sepal Width: '))
8 | c= float(input('Enter the flower Petal Length: '))
9 | d= float(input('Enter the flower Petal Width: '))
10 |
11 |
12 | features = np.array([[a,b,c,d]])
13 |
14 | pred1 = model.predict(features)
15 | print(f"The model Species is {pred1}")
--------------------------------------------------------------------------------
/Iris Flower Classification/one_vs_one_classifier.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Iris Flower Classification/one_vs_one_classifier.pkl
--------------------------------------------------------------------------------
/Iris Flower KMeans Classifier/iris2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Iris Flower KMeans Classifier/iris2.png
--------------------------------------------------------------------------------
/Iris Flower KMeans Classifier/iris_classifier_kmeans.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Iris Flower KMeans Classifier/iris_classifier_kmeans.pkl
--------------------------------------------------------------------------------
/Iris Flower KMeans Classifier/iris_kmeans.csv:
--------------------------------------------------------------------------------
1 | PetalLength,PetalWidth,Species
2 | 1.4,0.2,Iris-setosa
3 | 1.4,0.2,Iris-setosa
4 | 1.3,0.2,Iris-setosa
5 | 1.5,0.2,Iris-setosa
6 | 1.4,0.2,Iris-setosa
7 | 1.7,0.4,Iris-setosa
8 | 1.4,0.3,Iris-setosa
9 | 1.5,0.2,Iris-setosa
10 | 1.4,0.2,Iris-setosa
11 | 1.5,0.1,Iris-setosa
12 | 1.5,0.2,Iris-setosa
13 | 1.6,0.2,Iris-setosa
14 | 1.4,0.1,Iris-setosa
15 | 1.1,0.1,Iris-setosa
16 | 1.2,0.2,Iris-setosa
17 | 1.5,0.4,Iris-setosa
18 | 1.3,0.4,Iris-setosa
19 | 1.4,0.3,Iris-setosa
20 | 1.7,0.3,Iris-setosa
21 | 1.5,0.3,Iris-setosa
22 | 1.7,0.2,Iris-setosa
23 | 1.5,0.4,Iris-setosa
24 | 1,0.2,Iris-setosa
25 | 1.7,0.5,Iris-setosa
26 | 1.9,0.2,Iris-setosa
27 | 1.6,0.2,Iris-setosa
28 | 1.6,0.4,Iris-setosa
29 | 1.5,0.2,Iris-setosa
30 | 1.4,0.2,Iris-setosa
31 | 1.6,0.2,Iris-setosa
32 | 4.7,1.4,Iris-versicolor
33 | 4.5,1.5,Iris-versicolor
34 | 4.9,1.5,Iris-versicolor
35 | 4,1.3,Iris-versicolor
36 | 4.6,1.5,Iris-versicolor
37 | 4.5,1.3,Iris-versicolor
38 | 4.7,1.6,Iris-versicolor
39 | 3.3,1,Iris-versicolor
40 | 4.6,1.3,Iris-versicolor
41 | 3.9,1.4,Iris-versicolor
42 | 3.5,1,Iris-versicolor
43 | 4.2,1.5,Iris-versicolor
44 | 4,1,Iris-versicolor
45 | 4.7,1.4,Iris-versicolor
46 | 3.6,1.3,Iris-versicolor
47 | 4.4,1.4,Iris-versicolor
48 | 4.5,1.5,Iris-versicolor
49 | 4.1,1,Iris-versicolor
50 | 4.5,1.5,Iris-versicolor
51 | 3.9,1.1,Iris-versicolor
52 | 4.8,1.8,Iris-versicolor
53 | 4,1.3,Iris-versicolor
54 | 4.9,1.5,Iris-versicolor
55 | 4.7,1.2,Iris-versicolor
56 | 4.3,1.3,Iris-versicolor
57 | 4.4,1.4,Iris-versicolor
58 | 4.8,1.4,Iris-versicolor
59 | 5,1.7,Iris-versicolor
60 | 4.5,1.5,Iris-versicolor
61 | 3.5,1,Iris-versicolor
62 | 6,2.5,Iris-virginica
63 | 5.1,1.9,Iris-virginica
64 | 5.9,2.1,Iris-virginica
65 | 5.6,1.8,Iris-virginica
66 | 5.8,2.2,Iris-virginica
67 | 6.6,2.1,Iris-virginica
68 | 4.5,1.7,Iris-virginica
69 | 6.3,1.8,Iris-virginica
70 | 5.8,1.8,Iris-virginica
71 | 6.1,2.5,Iris-virginica
72 | 5.1,2,Iris-virginica
73 | 5.3,1.9,Iris-virginica
74 | 5.5,2.1,Iris-virginica
75 | 5,2,Iris-virginica
76 | 5.1,2.4,Iris-virginica
77 | 5.3,2.3,Iris-virginica
78 | 5.5,1.8,Iris-virginica
79 | 6.7,2.2,Iris-virginica
80 | 6.9,2.3,Iris-virginica
81 | 5,1.5,Iris-virginica
82 | 5.7,2.3,Iris-virginica
83 | 4.9,2,Iris-virginica
84 | 6.7,2,Iris-virginica
85 | 4.9,1.8,Iris-virginica
86 | 5.7,2.1,Iris-virginica
87 | 6,1.8,Iris-virginica
88 | 4.8,1.8,Iris-virginica
89 | 4.9,1.8,Iris-virginica
90 | 5.6,2.1,Iris-virginica
91 | 5.8,1.6,Iris-virginica
92 |
--------------------------------------------------------------------------------
/Iris Flower KMeans Classifier/k_elbow_plot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Iris Flower KMeans Classifier/k_elbow_plot.png
--------------------------------------------------------------------------------
/Iris Flower KMeans Classifier/petal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Iris Flower KMeans Classifier/petal.png
--------------------------------------------------------------------------------
/Iris Flower KMeans Classifier/petal2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Iris Flower KMeans Classifier/petal2.png
--------------------------------------------------------------------------------
/Iris KMeans Classifier/iris2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Iris KMeans Classifier/iris2.png
--------------------------------------------------------------------------------
/Iris KMeans Classifier/iris_classifier_kmeans.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Iris KMeans Classifier/iris_classifier_kmeans.pkl
--------------------------------------------------------------------------------
/Iris KMeans Classifier/iris_kmeans.csv:
--------------------------------------------------------------------------------
1 | PetalLength,PetalWidth,Species
2 | 1.4,0.2,Iris-setosa
3 | 1.4,0.2,Iris-setosa
4 | 1.3,0.2,Iris-setosa
5 | 1.5,0.2,Iris-setosa
6 | 1.4,0.2,Iris-setosa
7 | 1.7,0.4,Iris-setosa
8 | 1.4,0.3,Iris-setosa
9 | 1.5,0.2,Iris-setosa
10 | 1.4,0.2,Iris-setosa
11 | 1.5,0.1,Iris-setosa
12 | 1.5,0.2,Iris-setosa
13 | 1.6,0.2,Iris-setosa
14 | 1.4,0.1,Iris-setosa
15 | 1.1,0.1,Iris-setosa
16 | 1.2,0.2,Iris-setosa
17 | 1.5,0.4,Iris-setosa
18 | 1.3,0.4,Iris-setosa
19 | 1.4,0.3,Iris-setosa
20 | 1.7,0.3,Iris-setosa
21 | 1.5,0.3,Iris-setosa
22 | 1.7,0.2,Iris-setosa
23 | 1.5,0.4,Iris-setosa
24 | 1,0.2,Iris-setosa
25 | 1.7,0.5,Iris-setosa
26 | 1.9,0.2,Iris-setosa
27 | 1.6,0.2,Iris-setosa
28 | 1.6,0.4,Iris-setosa
29 | 1.5,0.2,Iris-setosa
30 | 1.4,0.2,Iris-setosa
31 | 1.6,0.2,Iris-setosa
32 | 4.7,1.4,Iris-versicolor
33 | 4.5,1.5,Iris-versicolor
34 | 4.9,1.5,Iris-versicolor
35 | 4,1.3,Iris-versicolor
36 | 4.6,1.5,Iris-versicolor
37 | 4.5,1.3,Iris-versicolor
38 | 4.7,1.6,Iris-versicolor
39 | 3.3,1,Iris-versicolor
40 | 4.6,1.3,Iris-versicolor
41 | 3.9,1.4,Iris-versicolor
42 | 3.5,1,Iris-versicolor
43 | 4.2,1.5,Iris-versicolor
44 | 4,1,Iris-versicolor
45 | 4.7,1.4,Iris-versicolor
46 | 3.6,1.3,Iris-versicolor
47 | 4.4,1.4,Iris-versicolor
48 | 4.5,1.5,Iris-versicolor
49 | 4.1,1,Iris-versicolor
50 | 4.5,1.5,Iris-versicolor
51 | 3.9,1.1,Iris-versicolor
52 | 4.8,1.8,Iris-versicolor
53 | 4,1.3,Iris-versicolor
54 | 4.9,1.5,Iris-versicolor
55 | 4.7,1.2,Iris-versicolor
56 | 4.3,1.3,Iris-versicolor
57 | 4.4,1.4,Iris-versicolor
58 | 4.8,1.4,Iris-versicolor
59 | 5,1.7,Iris-versicolor
60 | 4.5,1.5,Iris-versicolor
61 | 3.5,1,Iris-versicolor
62 | 6,2.5,Iris-virginica
63 | 5.1,1.9,Iris-virginica
64 | 5.9,2.1,Iris-virginica
65 | 5.6,1.8,Iris-virginica
66 | 5.8,2.2,Iris-virginica
67 | 6.6,2.1,Iris-virginica
68 | 4.5,1.7,Iris-virginica
69 | 6.3,1.8,Iris-virginica
70 | 5.8,1.8,Iris-virginica
71 | 6.1,2.5,Iris-virginica
72 | 5.1,2,Iris-virginica
73 | 5.3,1.9,Iris-virginica
74 | 5.5,2.1,Iris-virginica
75 | 5,2,Iris-virginica
76 | 5.1,2.4,Iris-virginica
77 | 5.3,2.3,Iris-virginica
78 | 5.5,1.8,Iris-virginica
79 | 6.7,2.2,Iris-virginica
80 | 6.9,2.3,Iris-virginica
81 | 5,1.5,Iris-virginica
82 | 5.7,2.3,Iris-virginica
83 | 4.9,2,Iris-virginica
84 | 6.7,2,Iris-virginica
85 | 4.9,1.8,Iris-virginica
86 | 5.7,2.1,Iris-virginica
87 | 6,1.8,Iris-virginica
88 | 4.8,1.8,Iris-virginica
89 | 4.9,1.8,Iris-virginica
90 | 5.6,2.1,Iris-virginica
91 | 5.8,1.6,Iris-virginica
92 |
--------------------------------------------------------------------------------
/Iris KMeans Classifier/k_elbow_plot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Iris KMeans Classifier/k_elbow_plot.png
--------------------------------------------------------------------------------
/Iris KMeans Classifier/petal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Iris KMeans Classifier/petal.png
--------------------------------------------------------------------------------
/Iris KMeans Classifier/petal2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Iris KMeans Classifier/petal2.png
--------------------------------------------------------------------------------
/KMeans Clustering/income.csv:
--------------------------------------------------------------------------------
1 | Name,Age,Income
2 | Rob,27,70000
3 | Michael,29,90000
4 | Mohan,29,61000
5 | Ismail,28,60000
6 | Kory,42,150000
7 | Gautam,39,155000
8 | David,41,160000
9 | Andrea,38,162000
10 | Brad,36,156000
11 | Angelina,35,130000
12 | Donald,37,137000
13 | Tom,26,45000
14 | Arnold,27,48000
15 | Jared,28,51000
16 | Stark,29,49500
17 | Ranbir,32,53000
18 | Dipika,40,65000
19 | Priyanka,41,63000
20 | Nick,43,64000
21 | Alia,39,80000
22 | Sid,41,82000
23 | Abdul,39,58000
24 |
--------------------------------------------------------------------------------
/KMeans Clustering/kmeans_classifier.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/KMeans Clustering/kmeans_classifier.pkl
--------------------------------------------------------------------------------
/KMeans Clustering/scatter.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/KMeans Clustering/scatter.png
--------------------------------------------------------------------------------
/KMeans Clustering/scatter2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/KMeans Clustering/scatter2.png
--------------------------------------------------------------------------------
/KMeans Clustering/scatter3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/KMeans Clustering/scatter3.png
--------------------------------------------------------------------------------
/KMeans Clustering/scatter5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/KMeans Clustering/scatter5.png
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/Market Basket Association/005 Transactions.txt:
--------------------------------------------------------------------------------
1 | dataset = [['Milk', 'Egg', 'Bread', 'Butter'],
2 | ['Milk', 'Butter', 'Egg', 'Ketchup'],
3 | ['Bread', 'Butter', 'Ketchup'],
4 | ['Milk', 'Bread', 'Butter'],
5 | ['Cookies', 'Bread', 'Butter'],
6 | ['Milk', 'Bread', 'Butter', 'Cookies'],
7 | ['Milk', 'Cookies'],
8 | ['Milk', 'Bread', 'Butter'],
9 | ['Egg', 'Bread', 'Butter', 'Cookies'],
10 | ['Milk', 'Bread', 'Butter'],
11 | ['Milk', 'Bread', 'Cookies', 'Ketchup']]
--------------------------------------------------------------------------------
/Movie Recommendation System/ratings.csv:
--------------------------------------------------------------------------------
1 | ,Avengers,The_Incredibles,The_Lion_King,Dumbo,Frozen,Ponyo
2 | user 1,4,5,3,,2,1
3 | user 2,5,3,3,2,2,
4 | user 3,1,,,4,5,4
5 | user 4,,2,1,4,,3
6 | user 5,1,,2,3,3,4
7 |
--------------------------------------------------------------------------------
/Multi Fruit Classification/Fruit Predicted Classes Using OVR and OVO Approach.csv:
--------------------------------------------------------------------------------
1 | ,One Vs Rest Prediction,One Vs One Prediction,Class
2 | 0,SAFAVI,SAFAVI,SAFAVI
3 | 1,DOKOL,DOKOL,DOKOL
4 | 2,SAFAVI,SAFAVI,SAFAVI
5 | 3,DOKOL,DOKOL,DOKOL
6 | 4,SAFAVI,SOGAY,SAFAVI
7 | 5,DOKOL,DOKOL,DOKOL
8 | 6,ROTANA,ROTANA,ROTANA
9 | 7,ROTANA,BERHI,BERHI
10 | 8,DOKOL,DOKOL,DOKOL
11 | 9,SAFAVI,SAFAVI,SAFAVI
12 | 10,ROTANA,ROTANA,IRAQI
13 | 11,SAFAVI,SAFAVI,SAFAVI
14 | 12,DOKOL,DOKOL,DEGLET
15 | 13,ROTANA,ROTANA,ROTANA
16 | 14,SAFAVI,SAFAVI,SAFAVI
17 | 15,SAFAVI,SAFAVI,SAFAVI
18 | 16,SAFAVI,DEGLET,SOGAY
19 | 17,DEGLET,DEGLET,DEGLET
20 | 18,DOKOL,DOKOL,DOKOL
21 | 19,SAFAVI,ROTANA,ROTANA
22 | 20,ROTANA,ROTANA,IRAQI
23 | 21,SAFAVI,SOGAY,DEGLET
24 | 22,IRAQI,IRAQI,BERHI
25 | 23,ROTANA,ROTANA,ROTANA
26 | 24,ROTANA,ROTANA,ROTANA
27 | 25,ROTANA,ROTANA,ROTANA
28 | 26,DOKOL,DOKOL,DOKOL
29 | 27,DOKOL,DOKOL,DOKOL
30 | 28,SAFAVI,SAFAVI,SOGAY
31 | 29,ROTANA,SOGAY,DEGLET
32 | 30,SAFAVI,SAFAVI,SAFAVI
33 | 31,SAFAVI,SAFAVI,SAFAVI
34 | 32,DOKOL,DOKOL,SOGAY
35 | 33,ROTANA,ROTANA,ROTANA
36 | 34,SOGAY,SOGAY,DEGLET
37 | 35,DOKOL,DOKOL,DOKOL
38 | 36,ROTANA,SOGAY,SAFAVI
39 | 37,SAFAVI,SAFAVI,SAFAVI
40 | 38,DOKOL,DOKOL,DOKOL
41 | 39,DOKOL,DOKOL,DEGLET
42 | 40,DOKOL,DOKOL,DOKOL
43 | 41,DOKOL,DOKOL,DOKOL
44 | 42,ROTANA,ROTANA,ROTANA
45 | 43,ROTANA,ROTANA,ROTANA
46 | 44,SAFAVI,SAFAVI,SAFAVI
47 | 45,ROTANA,ROTANA,ROTANA
48 | 46,ROTANA,ROTANA,ROTANA
49 | 47,DOKOL,DOKOL,DOKOL
50 | 48,SAFAVI,ROTANA,ROTANA
51 | 49,DOKOL,DOKOL,DOKOL
52 | 50,DOKOL,DOKOL,DOKOL
53 | 51,DEGLET,DEGLET,SOGAY
54 | 52,SAFAVI,SAFAVI,SOGAY
55 | 53,DOKOL,DOKOL,DOKOL
56 | 54,DOKOL,DOKOL,DOKOL
57 | 55,DOKOL,DOKOL,DOKOL
58 | 56,IRAQI,IRAQI,IRAQI
59 | 57,DOKOL,DOKOL,DOKOL
60 | 58,ROTANA,ROTANA,SOGAY
61 | 59,SAFAVI,SAFAVI,SAFAVI
62 | 60,DEGLET,SOGAY,SOGAY
63 | 61,DOKOL,DOKOL,DOKOL
64 | 62,ROTANA,BERHI,BERHI
65 | 63,DOKOL,DOKOL,DOKOL
66 | 64,DEGLET,DEGLET,DEGLET
67 | 65,DOKOL,DOKOL,DOKOL
68 | 66,ROTANA,ROTANA,ROTANA
69 | 67,DOKOL,DOKOL,DOKOL
70 | 68,DOKOL,DOKOL,DOKOL
71 | 69,ROTANA,ROTANA,ROTANA
72 | 70,ROTANA,ROTANA,ROTANA
73 | 71,DOKOL,DOKOL,DOKOL
74 | 72,ROTANA,ROTANA,ROTANA
75 | 73,ROTANA,ROTANA,ROTANA
76 | 74,ROTANA,ROTANA,ROTANA
77 | 75,IRAQI,IRAQI,IRAQI
78 | 76,DOKOL,DOKOL,DOKOL
79 | 77,BERHI,BERHI,IRAQI
80 | 78,SAFAVI,SAFAVI,SAFAVI
81 | 79,DOKOL,DOKOL,DOKOL
82 | 80,DEGLET,DEGLET,SOGAY
83 | 81,DOKOL,DOKOL,DOKOL
84 | 82,ROTANA,ROTANA,ROTANA
85 | 83,DEGLET,SOGAY,SOGAY
86 | 84,ROTANA,ROTANA,ROTANA
87 | 85,BERHI,IRAQI,IRAQI
88 | 86,ROTANA,SOGAY,ROTANA
89 | 87,SAFAVI,SAFAVI,SAFAVI
90 | 88,SAFAVI,SAFAVI,SAFAVI
91 | 89,ROTANA,ROTANA,ROTANA
92 | 90,SAFAVI,ROTANA,SAFAVI
93 | 91,ROTANA,ROTANA,IRAQI
94 | 92,SAFAVI,SAFAVI,SOGAY
95 | 93,IRAQI,IRAQI,BERHI
96 | 94,SAFAVI,SAFAVI,SAFAVI
97 | 95,BERHI,BERHI,IRAQI
98 | 96,IRAQI,IRAQI,BERHI
99 | 97,ROTANA,ROTANA,IRAQI
100 | 98,DOKOL,DOKOL,DOKOL
101 | 99,SAFAVI,ROTANA,ROTANA
102 | 100,DOKOL,DOKOL,DOKOL
103 | 101,DOKOL,DOKOL,DOKOL
104 | 102,ROTANA,BERHI,BERHI
105 | 103,ROTANA,ROTANA,ROTANA
106 | 104,SAFAVI,SAFAVI,SAFAVI
107 | 105,SAFAVI,DEGLET,DEGLET
108 | 106,SAFAVI,SAFAVI,SAFAVI
109 | 107,IRAQI,IRAQI,IRAQI
110 | 108,ROTANA,ROTANA,ROTANA
111 | 109,SAFAVI,SAFAVI,SAFAVI
112 | 110,DEGLET,SOGAY,SOGAY
113 | 111,SOGAY,DEGLET,SOGAY
114 | 112,DEGLET,DEGLET,DEGLET
115 | 113,ROTANA,SOGAY,ROTANA
116 | 114,SAFAVI,SAFAVI,SAFAVI
117 | 115,SAFAVI,SAFAVI,SAFAVI
118 | 116,SAFAVI,SAFAVI,SAFAVI
119 | 117,DOKOL,DOKOL,DOKOL
120 | 118,SAFAVI,SAFAVI,SOGAY
121 | 119,SAFAVI,SAFAVI,SAFAVI
122 | 120,BERHI,BERHI,BERHI
123 | 121,DOKOL,DOKOL,DOKOL
124 | 122,SAFAVI,SAFAVI,SAFAVI
125 | 123,DOKOL,DOKOL,DOKOL
126 | 124,DOKOL,DOKOL,DOKOL
127 | 125,SAFAVI,SAFAVI,SAFAVI
128 | 126,ROTANA,SOGAY,SOGAY
129 | 127,BERHI,BERHI,BERHI
130 | 128,SAFAVI,DEGLET,DOKOL
131 | 129,IRAQI,IRAQI,ROTANA
132 | 130,DOKOL,DOKOL,DOKOL
133 | 131,DOKOL,DOKOL,DOKOL
134 | 132,ROTANA,ROTANA,SAFAVI
135 | 133,SAFAVI,SAFAVI,SAFAVI
136 | 134,DEGLET,DEGLET,DEGLET
137 | 135,BERHI,BERHI,BERHI
138 | 136,DEGLET,DEGLET,DEGLET
139 | 137,SAFAVI,SAFAVI,DEGLET
140 | 138,DOKOL,DOKOL,DOKOL
141 | 139,SOGAY,SOGAY,SOGAY
142 | 140,SAFAVI,SAFAVI,SAFAVI
143 | 141,SOGAY,DEGLET,SOGAY
144 | 142,BERHI,BERHI,BERHI
145 | 143,SAFAVI,SAFAVI,SAFAVI
146 | 144,BERHI,BERHI,BERHI
147 | 145,SAFAVI,SAFAVI,SAFAVI
148 | 146,ROTANA,ROTANA,ROTANA
149 | 147,DOKOL,DOKOL,DOKOL
150 | 148,DEGLET,DEGLET,DEGLET
151 | 149,ROTANA,ROTANA,ROTANA
152 | 150,SAFAVI,SAFAVI,SAFAVI
153 | 151,IRAQI,IRAQI,IRAQI
154 | 152,SAFAVI,SAFAVI,DEGLET
155 | 153,DOKOL,DEGLET,DEGLET
156 | 154,BERHI,IRAQI,IRAQI
157 | 155,SAFAVI,SAFAVI,SAFAVI
158 | 156,SOGAY,SOGAY,DEGLET
159 | 157,ROTANA,ROTANA,ROTANA
160 | 158,IRAQI,BERHI,IRAQI
161 | 159,SAFAVI,SAFAVI,SAFAVI
162 | 160,ROTANA,SOGAY,SOGAY
163 | 161,ROTANA,ROTANA,ROTANA
164 | 162,DEGLET,DEGLET,DEGLET
165 | 163,DOKOL,DOKOL,DOKOL
166 | 164,SAFAVI,SAFAVI,SAFAVI
167 | 165,ROTANA,SOGAY,ROTANA
168 | 166,BERHI,IRAQI,BERHI
169 | 167,ROTANA,SOGAY,SOGAY
170 | 168,DEGLET,DEGLET,DEGLET
171 | 169,DOKOL,DEGLET,DOKOL
172 | 170,SAFAVI,SAFAVI,SAFAVI
173 | 171,ROTANA,BERHI,IRAQI
174 | 172,ROTANA,ROTANA,ROTANA
175 | 173,SAFAVI,SAFAVI,SAFAVI
176 | 174,DEGLET,DEGLET,DEGLET
177 | 175,SAFAVI,SAFAVI,SAFAVI
178 | 176,DOKOL,DOKOL,DEGLET
179 | 177,IRAQI,IRAQI,BERHI
180 | 178,ROTANA,SOGAY,SAFAVI
181 | 179,ROTANA,ROTANA,SOGAY
182 |
--------------------------------------------------------------------------------
/Multi Fruit Classification/fruit.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Multi Fruit Classification/fruit.xlsx
--------------------------------------------------------------------------------
/Multi Fruit Classification/fruit_one_vs_one_classifier.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Multi Fruit Classification/fruit_one_vs_one_classifier.pkl
--------------------------------------------------------------------------------
/Multi Fruit Classification/heatmap3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Multi Fruit Classification/heatmap3.png
--------------------------------------------------------------------------------
/Multi Fruit Classification/heatmap4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Multi Fruit Classification/heatmap4.png
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Machine Learning Projects
2 | [](http://hits.dwyl.com/Nneji123/Machine-Learning-Course-Projects)
3 | [](https://www.python.org)
4 |
5 | 
6 |
7 | This is a Repository containing various projects I carried out while learning different Machine Learning Algorithms.
8 |
9 | ## Algorithms Used
10 | ### Supervised Machine Learning Algorithms
11 | Supervised learning is a type of machine learning in which machine learn from known datasets (set of training examples), and then predict the output. A supervised learning agent needs to find out the function that matches a given sample set. Supervised learning further can be classified into two categories of algorithms:
12 |
13 | 1. Classifications
14 | 2. Regression
15 |
16 | - Linear Regression:
17 | Linear Regression is the supervised Machine Learning model in which the model finds the best fit linear line between the independent and dependent variable i.e it finds the linear relationship between the dependent and independent variable.
18 |
19 | - Logistic Regression:
20 | Logistic regression is an example of supervised learning. It is used to calculate or predict the probability of a binary (yes/no) event occurring.
21 |
22 | - Decision Trees:
23 | This is a type of Supervised Machine Learning (that is you explain what the input is and what the corresponding output is in the training data) where the data is continuously split according to a certain parameter. The tree can be explained by two entities, namely decision nodes and leaves.
24 |
25 | - Random Forest:
26 | Random forest is a Supervised Machine Learning Algorithm that is used widely in Classification and Regression problems. It builds decision trees on different samples and takes their majority vote for classification and average in case of regression.
27 |
28 | **Supervised Machine Learning Projects** in this repository include; home prices prediction, fish weigth prediction, titanic survival prediction, iris flower prediction, salary prediction, diabetes prediction, fruit classification
29 |
30 | ### Unsupervised Machine Learning Algorithms
31 | Unsupervised learning is associated with learning without supervision or training. In unsupervised learning, the algorithms are trained with data which is neither labeled nor classified. In unsupervised learning, the agent needs to learn from patterns without corresponding output values.
32 |
33 | Unsupervised learning can be classified into two categories of algorithms:
34 | - Clustering
35 | - Association
36 |
37 | - Hierarchical Clustering:
38 | Hierarchical clustering is an algorithm which builds a hierarchy of clusters. It begins with all the data which is assigned to a cluster of their own. Here, two close cluster are going to be in the same cluster. This algorithm ends when there is only one cluster left.
39 |
40 | - K-means Clustering:
41 | K means it is an iterative clustering algorithm which helps you to find the highest value for every iteration. Initially, the desired number of clusters are selected. In this clustering method, you need to cluster the data points into k groups. A larger k means smaller groups with more granularity in the same way. A lower k means larger groups with less granularity.
42 |
43 | The output of the algorithm is a group of “labels.” It assigns data point to one of the k groups. In k-means clustering, each group is defined by creating a centroid for each group. The centroids are like the heart of the cluster, which captures the points closest to them and adds them to the cluster.
44 |
45 | K-mean clustering further defines two subgroups:
46 |
47 | Agglomerative clustering
48 | Dendrogram
49 |
50 |
51 | - K- Nearest neighbors:
52 | K- nearest neighbour is the simplest of all machine learning classifiers. It differs from other machine learning techniques, in that it doesn’t produce a model. It is a simple algorithm which stores all available cases and classifies new instances based on a similarity measure.
53 |
54 | It works very well when there is a distance between examples. The learning speed is slow when the training set is large, and the distance calculation is nontrivial.
55 | Principal Components Analysis
56 | In case you want a higher-dimensional space. You need to select a basis for that space and only the 200 most important scores of that basis. This base is known as a principal component. The subset you select constitute is a new space which is small in size compared to original space. It maintains as much of the complexity of data as possible.
57 |
58 | - Association:
59 | Association rules allow you to establish associations amongst data objects inside large databases. This unsupervised technique is about discovering interesting relationships between variables in large databases. For example, people that buy a new home most likely to buy new furniture.
60 |
61 | **Unsupervised Machine Learning Projects** in this repository include; Iris Flower Kmeans Classifier, kmeans clustering, movie recommendation system, content based recommender system, customer segmentation, customer spend segmentation.
62 |
63 | ### Natural Language Processing
64 | Natural language processing is a subfield of computer science and artificial intelligence.
65 | NLP enables a computer system to understand and process human language such as English.
66 |
67 | NLP plays an important role in AI as without NLP, AI agent cannot work on human instructions,
68 | but with the help of NLP, we can instruct an AI system on our language. Today we are all around AI,
69 | and as well as NLP, we can easily ask Siri, Google or Cortana to help us in our language.
70 |
71 | Natural language processing application enables a user to communicate with the system in their own words directly.
72 |
73 | **NLP Projects** in this repository include; IMDB Movie reviews sentiment ananlysis and spam detection.
74 |
75 | ### Deep Learning
76 | Deep learning is a machine learning technique that teaches computers to do what comes naturally to humans: learn by example. Deep learning is a key technology behind driverless cars, enabling them to recognize a stop sign, or to distinguish a pedestrian from a lamppost. Deep learning is a machine learning technique that teaches computers to do what comes naturally to humans.
77 |
78 | **Deep Learning Projects** in this repository include; Object Recognition with keras, image recognition with keras, digit recognition with keras and tensorflow.
79 |
80 |
81 | ## Projects
82 | ### Chatbot
83 | Problem Statement: Create a chatbot for Tesla company that can answer at least 5 questions related to their electric cars. Use Conditional Statements, Nested Ifs, and Loops in your python code, and be creative when forming your question and answers.
84 |
85 | In this Project I used simple if-else statements and nested statements to create chatbot script that takes user inputs and gives out various responses
86 |
87 | ### Restaurant App
88 | Problem Statement: Create a function in python, that will print the steps involved in ordering food using a food delivery app. The function should contain one parameter that accepts the name of the food delivery app (Example: Ubereats, etc.).
89 |
90 | Only print basic steps in ordering food like the opening app, user login, select restaurant, select food, select delivery location, payment method, etc.
91 |
92 | (The first output statement should be Food delivery by (app name passed as an argument ) and then print the steps involved in ordering food.
93 |
94 | Also, inside the function ask the user to input the dish and restaurant he wants to order from, then print a statement at the end, specifying the input details as Ordering (dish name) from (restaurant name).
95 |
96 | ### Student Performance
97 | Problem Statement: Using the student_performance dataset, perform basic data exploration, data visualization, and data cleaning as described in your course project.
98 | Plot the histogram for gender, math_score and reading_score (Use boxplot to check for outliers for both math_score and reading_score).
99 | Note: Do not remove the outliers as all the scores are important for further data visualization.
100 |
101 | Then remove all other columns from the dataset except 'gender', 'test_preparation_course', 'math_score', 'reading_score' and 'writing_score'.
102 | Now check for any null or nan values in the dataset and perform data cleaning if required.
103 |
104 | Add one column to calculate the total score by adding the scores obtained in 'math_score', 'reading_score' and 'writing_score'.
105 | Add another column to calculate the average score by dividing the total score by 3.
106 |
107 | Now Perform some data visualization to find which gender has a higher average score, also find the average score of students who completed the test_preparation_course vs the students who did not complete it. (Hint: Use bar plot for both the visualizations).
108 |
109 | ### HomePrices Prediction
110 | Using the 'homeprices' dataset, predict prices of new homes based on area, bed rooms and age. Check for missing values and fill in the missing values with the median value of that attribute.
111 |
112 | Train the model using linear regression and check the coefficients and intercept value to create the linear equation. Save the model into a .pkl file
113 |
114 | Finally predict the price of a home that has,
115 | 3000 sqr ft area, 3 bedrooms, 40 years old
116 | 2500 sqr ft area, 4 bedrooms, 5 years old
117 | (Cross check the values by manually calculating using the linear equation)
118 |
119 | ### Fish Weight Prediction
120 | Using the same dataset of fishes used in the class project, predict the width of the fishes using all the other attributes as independent variables.
121 | Check the correlation between the width and other attributes by using heatmap and pairplot. Also, check for outliers using boxplot and remove them if any.
122 |
123 | Use 70:30 ratio for training and testing the model then save your model as .pkl file.
124 | Compare the predicted data with the test data and calculate the R2 score to give your conclusions about the accuracy of the model.
125 |
126 | Also, predict the width of fishes with the following data:
127 | Weight: 300 vertical:25 diagonal:27 cross:30 height: 8
128 | Weight: 400 vertical:26 diagonal:28 cross:31 height: 9
129 | Weight: 500 vertical:27 diagonal:29 cross:32 height: 10
130 |
131 | ### Titanic Survival Prediction
132 | Problem Statement: From the given 'titanic' dataset use the following columns to build a model to predict if person would survive or not,
133 | Pclass
134 | Gender
135 | Age
136 | Fare
137 |
138 | Use label encoder code below to convert the string data in Gender column into numbers (1=male, 0=female).
139 | from sklearn.preprocessing import LabelEncoder
140 | le_gender = LabelEncoder()
141 | df['gender'] = le_gender.fit_transform(df.Gender)
142 | df = df.drop('Gender',axis = 1)
143 |
144 | Check for missing or null values and replace them with the mean.
145 |
146 | Train your model using train_test_split function with 75-25 ratio. Finally use both decision tree and random forest(n_estimators =400) classifier to predict the x_test data and compare it with the y_test data using confusion matrix. Give your conclusions about the accuracy of both the classifiers
147 |
148 | Calculate the score of your model using the code :
149 | clf.score(X_test,y_test)
150 |
151 | ### Iris Flower Classification
152 | Problem Statement: Using the same iris flower dataset, apply direct logistic regression with 70-30 split for training and testing data to predict the species of the flower.
153 |
154 | Check the accuracy of the model using confusion matrix and visualize using heatmap.
155 |
156 | Code to apply logistic regression
157 | ```
158 | from sklearn.linear_model import LogisticRegression
159 |
160 | model = LogisticRegression()
161 | ```
162 | ### Iris Flower Petal Length vs Width Clustering
163 | Problem Statement: Using the iris_petallenvspetalwidth.csv dataset, plot a graph between petal length and petal width to observe the data and assign a colour to each type of flower (virginica, setosa and versiolour) to better visualize the data.
164 |
165 | Note: Assign an int value to each type of flower and then change the column type to int using the following commands :
166 |
167 | ```
168 | df["Species"].replace({"Iris-setosa": "0", "Iris-versicolor": "1", "Iris-virginica": "2"}, inplace=True)
169 |
170 | convert_dict = {'Species': int}
171 |
172 | df = df.astype(convert_dict)
173 | ```
174 | Cross check the change in data type using the info() function.
175 |
176 | To assign colour to each type of flower use the following code while using scatter plot:
177 | ```
178 | pyplot.scatter(df.PetalLength, df.PetalWidth , c=df.Species, cmap='gist_rainbow')
179 | ```
180 | Find the optimal k value using the elbow curve and perform Kmeans clustering on the data by taking the columns 'PetaLength' and 'PetalWidth' to train the model (Drop other columns).
181 |
182 | Compare the initial graph with the formed clustered graph and conclude your observations.
183 |
184 | ### Customer Segmentation
185 | Using the 'Customer_Segmentation' dataset, plot a bar graph between INCOME and SPEND to have a basic idea of the distribution of the dataset.
186 |
187 | Then plot a scatter plot of the same to visualize the data easily.
188 |
189 | Now, find the optimal k value using elbow method from the Sum of squared distance graph, and use the optimal k value to form clusters of the data.
190 |
191 | Finally conclude which cluster can be used as the target customer to sell more products to.
192 |
193 | Note: There could be more than 1 cluster where the target customers fall in.
194 |
195 | ### Market Basket Association
196 | Problem Statement: Use the following data taken from the table and perform market basket analysis using apriori algorithm to generate association rules.
197 |
198 | The minimum support should be 30% and confidence threshold should be 60%.
199 |
200 | List the association rules in descending order of lift to focus on the most important association rules.
201 |
202 | dataset = [['Eggs', 'Kidney Beans', 'Milk', 'Nutmeg', 'Onion', 'Yogurt'], ['Dill', 'Eggs', 'Kidney Beans', 'Nutmeg', 'Onion', 'Yogurt'], ['Apple', 'Eggs', 'Kidney Beans', 'Milk'], ['Corn', 'Kidney Beans', 'Milk', 'Yogurt'], ['Corn', 'Eggs', 'Ice cream', 'Kidney Beans', 'Onion'], ['Apple', 'Milk', 'Yogurt'], ['Eggs', 'Kidney Beans', 'Onion'], ['Corn', 'Dill', 'Kidney Beans', 'Nutmeg', 'Onion'], ['Apple', 'Eggs', 'Ice cream', 'Milk', 'Onion', 'Yogurt'], ['Ice cream'], ['Apple', 'Ice cream', 'Nutmeg']]
203 |
204 | ### Movie Recommendation System
205 | Problem Statement: Using the given 'userRatings' matrix to recommend movies similarly done in class project. This time use pearson method to create the similarity matrix instead of using cosine similarity.
206 |
207 | Use the code below for pearson method:
208 | ```
209 | corrMatrix = userRatings.corr(method='pearson')
210 | ```
211 | This will directly create the item based similarity matrix
212 |
213 | Then take 3 user inputs of movies that they have seen along with the rating for each.
214 |
215 | Finally recommend 2 new movies for them to watch.
216 |
217 | Note: Remember to include the rating threshold in your get_similarity function.
218 |
219 | ### Spam Detection
220 | Problem Statement: Use the 'spam.csv' labelled dataset to detect if a message is spam or not.
221 |
222 | Use the same data pre-processing techniques learned in the class project to pre-process your data using a single function.
223 |
224 | Use CountVectorizer function to convert the processed data and train the model using logistic regression algorithm.
225 |
226 | Create a function and use the trained model to predict if a new message is classified as spam or not.
227 |
228 | Give one example each for 'a spam message' and 'not a spam message'
229 |
230 | ### Image Classification with Keras
231 | Problem Statement: Build a neural network in Keras for image classification problems using the Keras fashion MNIST dataset.
232 |
233 | Use the code given below to get the dataset from keras
234 | ```
235 | fm = keras.datasets.fashion_mnist
236 |
237 | (X_train, y_train), (X_test, y_test) = fm.load_data()
238 | ```
239 | This consist of 60000 28X28 pixel images and 10000 test images, these images are classified in one of the 10 categories shown below.
240 |
241 | Each image is 28 x 28 pixel in dimension
242 |
243 | Make sure to normalize the training data before training the neural network
244 |
245 | Design and train your neural network with an optimal number of hidden layers and neurons in each hidden layer that can give you the best accuracy.
246 |
247 | Evaluate the model to check its accuracy and
248 |
--------------------------------------------------------------------------------
/Salary Prediction/Salary Prediction.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 2,
6 | "id": "82b906d7",
7 | "metadata": {},
8 | "outputs": [],
9 | "source": [
10 | "import pandas as pd \n",
11 | "\n",
12 | "df = pd.read_csv('salaries.csv')"
13 | ]
14 | },
15 | {
16 | "cell_type": "code",
17 | "execution_count": 3,
18 | "id": "6eb5df44",
19 | "metadata": {},
20 | "outputs": [
21 | {
22 | "data": {
23 | "text/html": [
24 | "\n",
25 | "\n",
38 | "
\n",
39 | " \n",
40 | " \n",
41 | " | \n",
42 | " experience | \n",
43 | " test_score_10 | \n",
44 | " interview_score_10 | \n",
45 | " salary | \n",
46 | "
\n",
47 | " \n",
48 | " \n",
49 | " \n",
50 | " 0 | \n",
51 | " NaN | \n",
52 | " 8.0 | \n",
53 | " 9 | \n",
54 | " 50000 | \n",
55 | "
\n",
56 | " \n",
57 | " 1 | \n",
58 | " NaN | \n",
59 | " 8.0 | \n",
60 | " 6 | \n",
61 | " 45000 | \n",
62 | "
\n",
63 | " \n",
64 | " 2 | \n",
65 | " five | \n",
66 | " 6.0 | \n",
67 | " 7 | \n",
68 | " 60000 | \n",
69 | "
\n",
70 | " \n",
71 | " 3 | \n",
72 | " two | \n",
73 | " 10.0 | \n",
74 | " 10 | \n",
75 | " 65000 | \n",
76 | "
\n",
77 | " \n",
78 | " 4 | \n",
79 | " seven | \n",
80 | " 9.0 | \n",
81 | " 6 | \n",
82 | " 70000 | \n",
83 | "
\n",
84 | " \n",
85 | "
\n",
86 | "
"
87 | ],
88 | "text/plain": [
89 | " experience test_score_10 interview_score_10 salary\n",
90 | "0 NaN 8.0 9 50000\n",
91 | "1 NaN 8.0 6 45000\n",
92 | "2 five 6.0 7 60000\n",
93 | "3 two 10.0 10 65000\n",
94 | "4 seven 9.0 6 70000"
95 | ]
96 | },
97 | "execution_count": 3,
98 | "metadata": {},
99 | "output_type": "execute_result"
100 | }
101 | ],
102 | "source": [
103 | "# Linear Regression with Multivariate Variables\n",
104 | "df.head()"
105 | ]
106 | },
107 | {
108 | "cell_type": "code",
109 | "execution_count": 4,
110 | "id": "fa982854",
111 | "metadata": {},
112 | "outputs": [
113 | {
114 | "data": {
115 | "text/plain": [
116 | "(8, 4)"
117 | ]
118 | },
119 | "execution_count": 4,
120 | "metadata": {},
121 | "output_type": "execute_result"
122 | }
123 | ],
124 | "source": [
125 | "df.shape"
126 | ]
127 | },
128 | {
129 | "cell_type": "code",
130 | "execution_count": 5,
131 | "id": "8aed32ea",
132 | "metadata": {},
133 | "outputs": [
134 | {
135 | "name": "stdout",
136 | "output_type": "stream",
137 | "text": [
138 | "\n",
139 | "RangeIndex: 8 entries, 0 to 7\n",
140 | "Data columns (total 4 columns):\n",
141 | " # Column Non-Null Count Dtype \n",
142 | "--- ------ -------------- ----- \n",
143 | " 0 experience 6 non-null object \n",
144 | " 1 test_score_10 7 non-null float64\n",
145 | " 2 interview_score_10 8 non-null int64 \n",
146 | " 3 salary 8 non-null int64 \n",
147 | "dtypes: float64(1), int64(2), object(1)\n",
148 | "memory usage: 384.0+ bytes\n"
149 | ]
150 | }
151 | ],
152 | "source": [
153 | "df.info()"
154 | ]
155 | },
156 | {
157 | "cell_type": "code",
158 | "execution_count": 6,
159 | "id": "e55c7f03",
160 | "metadata": {},
161 | "outputs": [
162 | {
163 | "data": {
164 | "text/plain": [
165 | "experience 2\n",
166 | "test_score_10 1\n",
167 | "interview_score_10 0\n",
168 | "salary 0\n",
169 | "dtype: int64"
170 | ]
171 | },
172 | "execution_count": 6,
173 | "metadata": {},
174 | "output_type": "execute_result"
175 | }
176 | ],
177 | "source": [
178 | "df.isnull().sum()"
179 | ]
180 | },
181 | {
182 | "cell_type": "code",
183 | "execution_count": 8,
184 | "id": "9a4b4384",
185 | "metadata": {},
186 | "outputs": [
187 | {
188 | "data": {
189 | "text/html": [
190 | "\n",
191 | "\n",
204 | "
\n",
205 | " \n",
206 | " \n",
207 | " | \n",
208 | " test_score_10 | \n",
209 | " interview_score_10 | \n",
210 | " salary | \n",
211 | "
\n",
212 | " \n",
213 | " \n",
214 | " \n",
215 | " count | \n",
216 | " 7.000000 | \n",
217 | " 8.000000 | \n",
218 | " 8.00000 | \n",
219 | "
\n",
220 | " \n",
221 | " mean | \n",
222 | " 7.857143 | \n",
223 | " 7.875000 | \n",
224 | " 63000.00000 | \n",
225 | "
\n",
226 | " \n",
227 | " std | \n",
228 | " 1.345185 | \n",
229 | " 1.642081 | \n",
230 | " 11501.55269 | \n",
231 | "
\n",
232 | " \n",
233 | " min | \n",
234 | " 6.000000 | \n",
235 | " 6.000000 | \n",
236 | " 45000.00000 | \n",
237 | "
\n",
238 | " \n",
239 | " 25% | \n",
240 | " 7.000000 | \n",
241 | " 6.750000 | \n",
242 | " 57500.00000 | \n",
243 | "
\n",
244 | " \n",
245 | " 50% | \n",
246 | " 8.000000 | \n",
247 | " 7.500000 | \n",
248 | " 63500.00000 | \n",
249 | "
\n",
250 | " \n",
251 | " 75% | \n",
252 | " 8.500000 | \n",
253 | " 9.250000 | \n",
254 | " 70500.00000 | \n",
255 | "
\n",
256 | " \n",
257 | " max | \n",
258 | " 10.000000 | \n",
259 | " 10.000000 | \n",
260 | " 80000.00000 | \n",
261 | "
\n",
262 | " \n",
263 | "
\n",
264 | "
"
265 | ],
266 | "text/plain": [
267 | " test_score_10 interview_score_10 salary\n",
268 | "count 7.000000 8.000000 8.00000\n",
269 | "mean 7.857143 7.875000 63000.00000\n",
270 | "std 1.345185 1.642081 11501.55269\n",
271 | "min 6.000000 6.000000 45000.00000\n",
272 | "25% 7.000000 6.750000 57500.00000\n",
273 | "50% 8.000000 7.500000 63500.00000\n",
274 | "75% 8.500000 9.250000 70500.00000\n",
275 | "max 10.000000 10.000000 80000.00000"
276 | ]
277 | },
278 | "execution_count": 8,
279 | "metadata": {},
280 | "output_type": "execute_result"
281 | }
282 | ],
283 | "source": [
284 | "df.describe()"
285 | ]
286 | },
287 | {
288 | "cell_type": "code",
289 | "execution_count": 9,
290 | "id": "de08e965",
291 | "metadata": {},
292 | "outputs": [],
293 | "source": [
294 | "df.experience = df.experience.fillna('zero')"
295 | ]
296 | },
297 | {
298 | "cell_type": "code",
299 | "execution_count": 11,
300 | "id": "c54ba31f",
301 | "metadata": {},
302 | "outputs": [
303 | {
304 | "name": "stderr",
305 | "output_type": "stream",
306 | "text": [
307 | "C:\\Users\\IFEANY~1\\AppData\\Local\\Temp/ipykernel_30028/308181716.py:1: FutureWarning: Dropping of nuisance columns in DataFrame reductions (with 'numeric_only=None') is deprecated; in a future version this will raise TypeError. Select only valid columns before calling the reduction.\n",
308 | " df=df.fillna(df.median())\n"
309 | ]
310 | }
311 | ],
312 | "source": [
313 | "df=df.fillna(df.median())"
314 | ]
315 | },
316 | {
317 | "cell_type": "code",
318 | "execution_count": 12,
319 | "id": "6c868069",
320 | "metadata": {},
321 | "outputs": [
322 | {
323 | "data": {
324 | "text/html": [
325 | "\n",
326 | "\n",
339 | "
\n",
340 | " \n",
341 | " \n",
342 | " | \n",
343 | " experience | \n",
344 | " test_score_10 | \n",
345 | " interview_score_10 | \n",
346 | " salary | \n",
347 | "
\n",
348 | " \n",
349 | " \n",
350 | " \n",
351 | " 0 | \n",
352 | " zero | \n",
353 | " 8.0 | \n",
354 | " 9 | \n",
355 | " 50000 | \n",
356 | "
\n",
357 | " \n",
358 | " 1 | \n",
359 | " zero | \n",
360 | " 8.0 | \n",
361 | " 6 | \n",
362 | " 45000 | \n",
363 | "
\n",
364 | " \n",
365 | " 2 | \n",
366 | " five | \n",
367 | " 6.0 | \n",
368 | " 7 | \n",
369 | " 60000 | \n",
370 | "
\n",
371 | " \n",
372 | " 3 | \n",
373 | " two | \n",
374 | " 10.0 | \n",
375 | " 10 | \n",
376 | " 65000 | \n",
377 | "
\n",
378 | " \n",
379 | " 4 | \n",
380 | " seven | \n",
381 | " 9.0 | \n",
382 | " 6 | \n",
383 | " 70000 | \n",
384 | "
\n",
385 | " \n",
386 | " 5 | \n",
387 | " three | \n",
388 | " 7.0 | \n",
389 | " 10 | \n",
390 | " 62000 | \n",
391 | "
\n",
392 | " \n",
393 | " 6 | \n",
394 | " ten | \n",
395 | " 8.0 | \n",
396 | " 7 | \n",
397 | " 72000 | \n",
398 | "
\n",
399 | " \n",
400 | " 7 | \n",
401 | " eleven | \n",
402 | " 7.0 | \n",
403 | " 8 | \n",
404 | " 80000 | \n",
405 | "
\n",
406 | " \n",
407 | "
\n",
408 | "
"
409 | ],
410 | "text/plain": [
411 | " experience test_score_10 interview_score_10 salary\n",
412 | "0 zero 8.0 9 50000\n",
413 | "1 zero 8.0 6 45000\n",
414 | "2 five 6.0 7 60000\n",
415 | "3 two 10.0 10 65000\n",
416 | "4 seven 9.0 6 70000\n",
417 | "5 three 7.0 10 62000\n",
418 | "6 ten 8.0 7 72000\n",
419 | "7 eleven 7.0 8 80000"
420 | ]
421 | },
422 | "execution_count": 12,
423 | "metadata": {},
424 | "output_type": "execute_result"
425 | }
426 | ],
427 | "source": [
428 | "df"
429 | ]
430 | },
431 | {
432 | "cell_type": "code",
433 | "execution_count": 13,
434 | "id": "b07b08cb",
435 | "metadata": {},
436 | "outputs": [
437 | {
438 | "data": {
439 | "text/plain": [
440 | "experience 0\n",
441 | "test_score_10 0\n",
442 | "interview_score_10 0\n",
443 | "salary 0\n",
444 | "dtype: int64"
445 | ]
446 | },
447 | "execution_count": 13,
448 | "metadata": {},
449 | "output_type": "execute_result"
450 | }
451 | ],
452 | "source": [
453 | "df.isnull().sum()"
454 | ]
455 | },
456 | {
457 | "cell_type": "code",
458 | "execution_count": 14,
459 | "id": "25573a74",
460 | "metadata": {},
461 | "outputs": [
462 | {
463 | "name": "stdout",
464 | "output_type": "stream",
465 | "text": [
466 | "Collecting word2number\n",
467 | " Downloading word2number-1.1.zip (9.7 kB)\n",
468 | "Building wheels for collected packages: word2number\n",
469 | " Building wheel for word2number (setup.py): started\n",
470 | " Building wheel for word2number (setup.py): finished with status 'done'\n",
471 | " Created wheel for word2number: filename=word2number-1.1-py3-none-any.whl size=5580 sha256=91388dd746a4833475c00a4c138f1353bc63eb77ed47db41427a3e38a8fc54bf\n",
472 | " Stored in directory: c:\\users\\ifeanyi pc\\appdata\\local\\pip\\cache\\wheels\\a0\\4a\\5b\\d2f2df5c344ddbecb8bea759872c207ea91d93f57fb54e816e\n",
473 | "Successfully built word2number\n",
474 | "Installing collected packages: word2number\n",
475 | "Successfully installed word2number-1.1\n"
476 | ]
477 | }
478 | ],
479 | "source": [
480 | "! pip install word2number"
481 | ]
482 | },
483 | {
484 | "cell_type": "code",
485 | "execution_count": 15,
486 | "id": "b6d1f48f",
487 | "metadata": {},
488 | "outputs": [],
489 | "source": [
490 | "from word2number import w2n\n",
491 | "\n",
492 | "df.experience = df.experience.apply(w2n.word_to_num)"
493 | ]
494 | },
495 | {
496 | "cell_type": "code",
497 | "execution_count": 18,
498 | "id": "0ac80fda",
499 | "metadata": {},
500 | "outputs": [
501 | {
502 | "data": {
503 | "text/html": [
504 | "\n",
505 | "\n",
518 | "
\n",
519 | " \n",
520 | " \n",
521 | " | \n",
522 | " experience | \n",
523 | " test_score_10 | \n",
524 | " interview_score_10 | \n",
525 | " salary | \n",
526 | "
\n",
527 | " \n",
528 | " \n",
529 | " \n",
530 | " 0 | \n",
531 | " 0 | \n",
532 | " 8.0 | \n",
533 | " 9 | \n",
534 | " 50000 | \n",
535 | "
\n",
536 | " \n",
537 | " 1 | \n",
538 | " 0 | \n",
539 | " 8.0 | \n",
540 | " 6 | \n",
541 | " 45000 | \n",
542 | "
\n",
543 | " \n",
544 | " 2 | \n",
545 | " 5 | \n",
546 | " 6.0 | \n",
547 | " 7 | \n",
548 | " 60000 | \n",
549 | "
\n",
550 | " \n",
551 | " 3 | \n",
552 | " 2 | \n",
553 | " 10.0 | \n",
554 | " 10 | \n",
555 | " 65000 | \n",
556 | "
\n",
557 | " \n",
558 | " 4 | \n",
559 | " 7 | \n",
560 | " 9.0 | \n",
561 | " 6 | \n",
562 | " 70000 | \n",
563 | "
\n",
564 | " \n",
565 | " 5 | \n",
566 | " 3 | \n",
567 | " 7.0 | \n",
568 | " 10 | \n",
569 | " 62000 | \n",
570 | "
\n",
571 | " \n",
572 | " 6 | \n",
573 | " 10 | \n",
574 | " 8.0 | \n",
575 | " 7 | \n",
576 | " 72000 | \n",
577 | "
\n",
578 | " \n",
579 | " 7 | \n",
580 | " 11 | \n",
581 | " 7.0 | \n",
582 | " 8 | \n",
583 | " 80000 | \n",
584 | "
\n",
585 | " \n",
586 | "
\n",
587 | "
"
588 | ],
589 | "text/plain": [
590 | " experience test_score_10 interview_score_10 salary\n",
591 | "0 0 8.0 9 50000\n",
592 | "1 0 8.0 6 45000\n",
593 | "2 5 6.0 7 60000\n",
594 | "3 2 10.0 10 65000\n",
595 | "4 7 9.0 6 70000\n",
596 | "5 3 7.0 10 62000\n",
597 | "6 10 8.0 7 72000\n",
598 | "7 11 7.0 8 80000"
599 | ]
600 | },
601 | "execution_count": 18,
602 | "metadata": {},
603 | "output_type": "execute_result"
604 | }
605 | ],
606 | "source": [
607 | "df"
608 | ]
609 | },
610 | {
611 | "cell_type": "code",
612 | "execution_count": 19,
613 | "id": "9ed4ff58",
614 | "metadata": {},
615 | "outputs": [],
616 | "source": [
617 | "from sklearn.linear_model import LinearRegression"
618 | ]
619 | },
620 | {
621 | "cell_type": "code",
622 | "execution_count": 20,
623 | "id": "ae396208",
624 | "metadata": {},
625 | "outputs": [],
626 | "source": [
627 | "X = df.drop('salary', axis = 1)\n",
628 | "y = df.salary"
629 | ]
630 | },
631 | {
632 | "cell_type": "code",
633 | "execution_count": 21,
634 | "id": "eecfe444",
635 | "metadata": {},
636 | "outputs": [
637 | {
638 | "data": {
639 | "text/plain": [
640 | "0 50000\n",
641 | "1 45000\n",
642 | "2 60000\n",
643 | "3 65000\n",
644 | "4 70000\n",
645 | "Name: salary, dtype: int64"
646 | ]
647 | },
648 | "execution_count": 21,
649 | "metadata": {},
650 | "output_type": "execute_result"
651 | }
652 | ],
653 | "source": [
654 | "X.head()\n",
655 | "y.head()"
656 | ]
657 | },
658 | {
659 | "cell_type": "code",
660 | "execution_count": 22,
661 | "id": "1fa698d1",
662 | "metadata": {},
663 | "outputs": [],
664 | "source": [
665 | "model = LinearRegression()"
666 | ]
667 | },
668 | {
669 | "cell_type": "code",
670 | "execution_count": 23,
671 | "id": "acc943c3",
672 | "metadata": {},
673 | "outputs": [
674 | {
675 | "data": {
676 | "text/plain": [
677 | "LinearRegression()"
678 | ]
679 | },
680 | "execution_count": 23,
681 | "metadata": {},
682 | "output_type": "execute_result"
683 | }
684 | ],
685 | "source": [
686 | "model.fit(X,y)"
687 | ]
688 | },
689 | {
690 | "cell_type": "code",
691 | "execution_count": 24,
692 | "id": "d124b0b9",
693 | "metadata": {},
694 | "outputs": [
695 | {
696 | "data": {
697 | "text/plain": [
698 | "array([2812.95487627, 1845.70596798, 2205.24017467])"
699 | ]
700 | },
701 | "execution_count": 24,
702 | "metadata": {},
703 | "output_type": "execute_result"
704 | }
705 | ],
706 | "source": [
707 | "model.coef_"
708 | ]
709 | },
710 | {
711 | "cell_type": "code",
712 | "execution_count": 25,
713 | "id": "fc874d48",
714 | "metadata": {},
715 | "outputs": [
716 | {
717 | "data": {
718 | "text/plain": [
719 | "17737.26346433771"
720 | ]
721 | },
722 | "execution_count": 25,
723 | "metadata": {},
724 | "output_type": "execute_result"
725 | }
726 | ],
727 | "source": [
728 | "model.intercept_"
729 | ]
730 | },
731 | {
732 | "cell_type": "code",
733 | "execution_count": 27,
734 | "id": "80669325",
735 | "metadata": {},
736 | "outputs": [
737 | {
738 | "data": {
739 | "text/plain": [
740 | "['salary_model.pkl']"
741 | ]
742 | },
743 | "execution_count": 27,
744 | "metadata": {},
745 | "output_type": "execute_result"
746 | }
747 | ],
748 | "source": [
749 | "import joblib\n",
750 | "\n",
751 | "joblib.dump(model, 'salary_model.pkl')"
752 | ]
753 | },
754 | {
755 | "cell_type": "code",
756 | "execution_count": null,
757 | "id": "84c24e83",
758 | "metadata": {},
759 | "outputs": [],
760 | "source": []
761 | }
762 | ],
763 | "metadata": {
764 | "kernelspec": {
765 | "display_name": "Python 3 (ipykernel)",
766 | "language": "python",
767 | "name": "python3"
768 | },
769 | "language_info": {
770 | "codemirror_mode": {
771 | "name": "ipython",
772 | "version": 3
773 | },
774 | "file_extension": ".py",
775 | "mimetype": "text/x-python",
776 | "name": "python",
777 | "nbconvert_exporter": "python",
778 | "pygments_lexer": "ipython3",
779 | "version": "3.9.7"
780 | }
781 | },
782 | "nbformat": 4,
783 | "nbformat_minor": 5
784 | }
785 |
--------------------------------------------------------------------------------
/Salary Prediction/salaries.csv:
--------------------------------------------------------------------------------
1 | experience,test_score_10,interview_score_10,salary
2 | ,8,9,50000
3 | ,8,6,45000
4 | five,6,7,60000
5 | two,10,10,65000
6 | seven,9,6,70000
7 | three,7,10,62000
8 | ten,,7,72000
9 | eleven,7,8,80000
10 |
--------------------------------------------------------------------------------
/Salary Prediction/salary_model.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Salary Prediction/salary_model.pkl
--------------------------------------------------------------------------------
/Spam Detection with NLP/Spam Detection with NLP.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "6025239e",
6 | "metadata": {},
7 | "source": [
8 | "# SPAM Detection with Natural Language Processing\n",
9 | "\n",
10 | "In this project I try to predict if an email should be classified as spam or not."
11 | ]
12 | },
13 | {
14 | "cell_type": "code",
15 | "execution_count": 30,
16 | "id": "e1182bec",
17 | "metadata": {},
18 | "outputs": [],
19 | "source": [
20 | "import pandas as pd"
21 | ]
22 | },
23 | {
24 | "cell_type": "code",
25 | "execution_count": 31,
26 | "id": "0bf8d1f3",
27 | "metadata": {},
28 | "outputs": [
29 | {
30 | "data": {
31 | "text/html": [
32 | "\n",
33 | "\n",
46 | "
\n",
47 | " \n",
48 | " \n",
49 | " | \n",
50 | " Category | \n",
51 | " Message | \n",
52 | "
\n",
53 | " \n",
54 | " \n",
55 | " \n",
56 | " 0 | \n",
57 | " ham | \n",
58 | " Go until jurong point, crazy.. Available only ... | \n",
59 | "
\n",
60 | " \n",
61 | " 1 | \n",
62 | " ham | \n",
63 | " Ok lar... Joking wif u oni... | \n",
64 | "
\n",
65 | " \n",
66 | " 2 | \n",
67 | " spam | \n",
68 | " Free entry in 2 a wkly comp to win FA Cup fina... | \n",
69 | "
\n",
70 | " \n",
71 | " 3 | \n",
72 | " ham | \n",
73 | " U dun say so early hor... U c already then say... | \n",
74 | "
\n",
75 | " \n",
76 | " 4 | \n",
77 | " ham | \n",
78 | " Nah I don't think he goes to usf, he lives aro... | \n",
79 | "
\n",
80 | " \n",
81 | "
\n",
82 | "
"
83 | ],
84 | "text/plain": [
85 | " Category Message\n",
86 | "0 ham Go until jurong point, crazy.. Available only ...\n",
87 | "1 ham Ok lar... Joking wif u oni...\n",
88 | "2 spam Free entry in 2 a wkly comp to win FA Cup fina...\n",
89 | "3 ham U dun say so early hor... U c already then say...\n",
90 | "4 ham Nah I don't think he goes to usf, he lives aro..."
91 | ]
92 | },
93 | "execution_count": 31,
94 | "metadata": {},
95 | "output_type": "execute_result"
96 | }
97 | ],
98 | "source": [
99 | "data = pd.read_csv('spam.csv')\n",
100 | "\n",
101 | "data.head()"
102 | ]
103 | },
104 | {
105 | "cell_type": "code",
106 | "execution_count": 32,
107 | "id": "f7948e5b",
108 | "metadata": {},
109 | "outputs": [],
110 | "source": [
111 | "data.loc[data['Category']=='ham','Category'] = 0\n",
112 | "data.loc[data['Category']=='spam','Category'] = 1\n",
113 | "data['Category'] = data['Category'].astype(int)"
114 | ]
115 | },
116 | {
117 | "cell_type": "code",
118 | "execution_count": 33,
119 | "id": "aa8b3663",
120 | "metadata": {},
121 | "outputs": [],
122 | "source": [
123 | "message = data['Message']\n",
124 | "\n",
125 | "category = data['Category']"
126 | ]
127 | },
128 | {
129 | "cell_type": "markdown",
130 | "id": "6c4205ac",
131 | "metadata": {},
132 | "source": [
133 | "We save the message column to a variable called **message** and the category to a variable called **category**."
134 | ]
135 | },
136 | {
137 | "cell_type": "code",
138 | "execution_count": 34,
139 | "id": "a24998fb",
140 | "metadata": {},
141 | "outputs": [
142 | {
143 | "data": {
144 | "text/plain": [
145 | "0 Go until jurong point, crazy.. Available only ...\n",
146 | "1 Ok lar... Joking wif u oni...\n",
147 | "2 Free entry in 2 a wkly comp to win FA Cup fina...\n",
148 | "3 U dun say so early hor... U c already then say...\n",
149 | "4 Nah I don't think he goes to usf, he lives aro...\n",
150 | "Name: Message, dtype: object"
151 | ]
152 | },
153 | "execution_count": 34,
154 | "metadata": {},
155 | "output_type": "execute_result"
156 | }
157 | ],
158 | "source": [
159 | "message.head()"
160 | ]
161 | },
162 | {
163 | "cell_type": "code",
164 | "execution_count": 35,
165 | "id": "ea8e2821",
166 | "metadata": {},
167 | "outputs": [
168 | {
169 | "data": {
170 | "text/plain": [
171 | "0 0\n",
172 | "1 0\n",
173 | "2 1\n",
174 | "3 0\n",
175 | "4 0\n",
176 | "Name: Category, dtype: int32"
177 | ]
178 | },
179 | "execution_count": 35,
180 | "metadata": {},
181 | "output_type": "execute_result"
182 | }
183 | ],
184 | "source": [
185 | "category.head()"
186 | ]
187 | },
188 | {
189 | "cell_type": "markdown",
190 | "id": "bca895cb",
191 | "metadata": {},
192 | "source": [
193 | "## Pre Processing\n",
194 | "Now in this section we have to process the data by:\n",
195 | "1. Converting all the rows to lower case.\n",
196 | "2. Removing stop words like i, me , you, our, your etc\n",
197 | "3. Removing hyperlinks,numbers,punctuations etc."
198 | ]
199 | },
200 | {
201 | "cell_type": "markdown",
202 | "id": "d1d96f68",
203 | "metadata": {},
204 | "source": [
205 | "Now we import the nltk library. NLTK is a toolkit build for working with NLP in Python. It provides us various text processing libraries with a lot of test datasets."
206 | ]
207 | },
208 | {
209 | "cell_type": "code",
210 | "execution_count": 36,
211 | "id": "b3f0e127",
212 | "metadata": {},
213 | "outputs": [],
214 | "source": [
215 | "import nltk\n",
216 | "import re\n",
217 | "import string"
218 | ]
219 | },
220 | {
221 | "cell_type": "code",
222 | "execution_count": 37,
223 | "id": "2122a282",
224 | "metadata": {},
225 | "outputs": [
226 | {
227 | "name": "stderr",
228 | "output_type": "stream",
229 | "text": [
230 | "[nltk_data] Downloading package stopwords to C:\\Users\\IFEANYI\n",
231 | "[nltk_data] PC\\AppData\\Roaming\\nltk_data...\n",
232 | "[nltk_data] Package stopwords is already up-to-date!\n"
233 | ]
234 | }
235 | ],
236 | "source": [
237 | "nltk.download('stopwords')\n",
238 | "\n",
239 | "stop_words = nltk.corpus.stopwords.words('english')"
240 | ]
241 | },
242 | {
243 | "cell_type": "markdown",
244 | "id": "0be6f194",
245 | "metadata": {},
246 | "source": [
247 | "We download the stopwords we want to remove from the dataset."
248 | ]
249 | },
250 | {
251 | "cell_type": "code",
252 | "execution_count": 38,
253 | "id": "6a7613db",
254 | "metadata": {},
255 | "outputs": [
256 | {
257 | "name": "stderr",
258 | "output_type": "stream",
259 | "text": [
260 | "[nltk_data] Downloading package punkt to C:\\Users\\IFEANYI\n",
261 | "[nltk_data] PC\\AppData\\Roaming\\nltk_data...\n",
262 | "[nltk_data] Package punkt is already up-to-date!\n"
263 | ]
264 | }
265 | ],
266 | "source": [
267 | "nltk.download('punkt')\n",
268 | "\n",
269 | "from nltk.tokenize import word_tokenize"
270 | ]
271 | },
272 | {
273 | "cell_type": "code",
274 | "execution_count": 39,
275 | "id": "89adf866",
276 | "metadata": {},
277 | "outputs": [],
278 | "source": [
279 | "def pre_process(txt):\n",
280 | " lowered_text = txt.lower()\n",
281 | " \n",
282 | " removed_numbers = re.sub(r'\\d+','',lowered_text) # re. is for regular expressions. Substitutes digits with an empty string.\n",
283 | " \n",
284 | " removed_punctuation = removed_numbers.translate(str.maketrans('','',string.punctuation)) # This removes punctuation from the text and replaces it with an empty string\n",
285 | " \n",
286 | " # now we split the text to obtain tokens and then remove the stopwords.\n",
287 | " \n",
288 | " word_tokens = word_tokenize(removed_punctuation)\n",
289 | " \n",
290 | " processed_text = ''.join([word for word in word_tokens if word not in stop_words])\n",
291 | " \n",
292 | " return processed_text"
293 | ]
294 | },
295 | {
296 | "cell_type": "code",
297 | "execution_count": 40,
298 | "id": "2c88cb08",
299 | "metadata": {},
300 | "outputs": [
301 | {
302 | "data": {
303 | "text/plain": [
304 | "0 gojurongpointcrazyavailablebugisngreatworldlae...\n",
305 | "1 oklarjokingwifuoni\n",
306 | "2 freeentrywklycompwinfacupfinaltktsstmaytextfar...\n",
307 | "3 udunsayearlyhorucalreadysay\n",
308 | "4 nahdontthinkgoesusflivesaroundthough\n",
309 | " ... \n",
310 | "5567 ndtimetriedcontactuu£poundprizeclaimeasycallpp...\n",
311 | "5568 übgoingesplanadefrhome\n",
312 | "5569 pitymoodsoanysuggestions\n",
313 | "5570 guybitchingactedlikeidinterestedbuyingsomethin...\n",
314 | "5571 rofltruename\n",
315 | "Name: Message, Length: 5572, dtype: object"
316 | ]
317 | },
318 | "execution_count": 40,
319 | "metadata": {},
320 | "output_type": "execute_result"
321 | }
322 | ],
323 | "source": [
324 | "processed = message.apply(pre_process) #.apply applies a function across a pandas dataframe.\n",
325 | "\n",
326 | "processed"
327 | ]
328 | },
329 | {
330 | "cell_type": "markdown",
331 | "id": "6300160a",
332 | "metadata": {},
333 | "source": [
334 | "We have now processed the text but we still need to tokenize it."
335 | ]
336 | },
337 | {
338 | "cell_type": "code",
339 | "execution_count": 41,
340 | "id": "974223c6",
341 | "metadata": {},
342 | "outputs": [
343 | {
344 | "data": {
345 | "text/plain": [
346 | "<5572x5315 sparse matrix of type ''\n",
347 | "\twith 5965 stored elements in Compressed Sparse Row format>"
348 | ]
349 | },
350 | "execution_count": 41,
351 | "metadata": {},
352 | "output_type": "execute_result"
353 | }
354 | ],
355 | "source": [
356 | "from sklearn.feature_extraction.text import CountVectorizer\n",
357 | "\n",
358 | "vectorizer = CountVectorizer()\n",
359 | "\n",
360 | "input_data = vectorizer.fit_transform(processed)\n",
361 | "input_data"
362 | ]
363 | },
364 | {
365 | "cell_type": "markdown",
366 | "id": "ec802dea",
367 | "metadata": {},
368 | "source": [
369 | "We have now created our sparse matrix with number of reviews as rows(5572) and all the words in the dataset as columns after removing the stopwords(5965)"
370 | ]
371 | },
372 | {
373 | "cell_type": "code",
374 | "execution_count": 42,
375 | "id": "39d2013e",
376 | "metadata": {},
377 | "outputs": [
378 | {
379 | "name": "stdout",
380 | "output_type": "stream",
381 | "text": [
382 | " (0, 1304)\t1\n",
383 | " (1, 3121)\t1\n",
384 | " (2, 1130)\t1\n",
385 | " (3, 4467)\t1\n",
386 | " (4, 2804)\t1\n",
387 | " (5, 1142)\t1\n",
388 | " (5, 3497)\t1\n",
389 | " (6, 1015)\t1\n",
390 | " (7, 3278)\t1\n",
391 | " (8, 4922)\t1\n",
392 | " (8, 3436)\t1\n",
393 | " (9, 2729)\t1\n",
394 | " (10, 2065)\t1\n",
395 | " (11, 3829)\t1\n",
396 | " (12, 4594)\t1\n",
397 | " (12, 3435)\t1\n",
398 | " (13, 2178)\t1\n",
399 | " (14, 696)\t1\n",
400 | " (15, 5027)\t1\n",
401 | " (16, 2999)\t1\n",
402 | " (17, 971)\t1\n",
403 | " (18, 1087)\t1\n",
404 | " (18, 4080)\t1\n",
405 | " (18, 4079)\t1\n",
406 | " (19, 988)\t1\n",
407 | " :\t:\n",
408 | " (5548, 4416)\t1\n",
409 | " (5549, 2338)\t1\n",
410 | " (5550, 626)\t1\n",
411 | " (5551, 4870)\t1\n",
412 | " (5552, 3628)\t1\n",
413 | " (5553, 1519)\t1\n",
414 | " (5554, 4843)\t1\n",
415 | " (5555, 5118)\t1\n",
416 | " (5556, 5178)\t1\n",
417 | " (5557, 2648)\t1\n",
418 | " (5558, 3905)\t1\n",
419 | " (5559, 183)\t1\n",
420 | " (5560, 158)\t1\n",
421 | " (5561, 1229)\t1\n",
422 | " (5562, 3135)\t1\n",
423 | " (5563, 178)\t1\n",
424 | " (5564, 886)\t1\n",
425 | " (5565, 1962)\t1\n",
426 | " (5566, 3556)\t1\n",
427 | " (5567, 2817)\t1\n",
428 | " (5567, 3390)\t1\n",
429 | " (5568, 5273)\t1\n",
430 | " (5569, 3301)\t1\n",
431 | " (5570, 1484)\t1\n",
432 | " (5571, 3593)\t1\n"
433 | ]
434 | }
435 | ],
436 | "source": [
437 | "print(input_data)"
438 | ]
439 | },
440 | {
441 | "cell_type": "markdown",
442 | "id": "7e59655a",
443 | "metadata": {},
444 | "source": [
445 | "Now we can feed the matrix to a machine learning model. In this case we'll use the Logistic Regression model since we are trying to classify it into positive or negative."
446 | ]
447 | },
448 | {
449 | "cell_type": "code",
450 | "execution_count": 43,
451 | "id": "be71817c",
452 | "metadata": {},
453 | "outputs": [
454 | {
455 | "data": {
456 | "text/plain": [
457 | "LogisticRegression()"
458 | ]
459 | },
460 | "execution_count": 43,
461 | "metadata": {},
462 | "output_type": "execute_result"
463 | }
464 | ],
465 | "source": [
466 | "from sklearn.linear_model import LogisticRegression\n",
467 | "\n",
468 | "model = LogisticRegression()\n",
469 | "model.fit(input_data, category)"
470 | ]
471 | },
472 | {
473 | "cell_type": "code",
474 | "execution_count": 60,
475 | "id": "5c6133a6",
476 | "metadata": {},
477 | "outputs": [],
478 | "source": [
479 | "def prediction_input(sentence):\n",
480 | " processed = pre_process(sentence)\n",
481 | " input_data = vectorizer.transform([processed])\n",
482 | " prediction = model.predict(input_data)\n",
483 | " \n",
484 | " if (prediction[0] == 0):\n",
485 | " print('This is Spam.')\n",
486 | " else:\n",
487 | " print('This is not Spam.')"
488 | ]
489 | },
490 | {
491 | "cell_type": "code",
492 | "execution_count": 61,
493 | "id": "0eeae0a5",
494 | "metadata": {},
495 | "outputs": [
496 | {
497 | "name": "stdout",
498 | "output_type": "stream",
499 | "text": [
500 | "This is Spam.\n"
501 | ]
502 | }
503 | ],
504 | "source": [
505 | "prediction_input(\"This is meant to be today\")"
506 | ]
507 | },
508 | {
509 | "cell_type": "code",
510 | "execution_count": 62,
511 | "id": "5bd45e74",
512 | "metadata": {},
513 | "outputs": [
514 | {
515 | "name": "stdout",
516 | "output_type": "stream",
517 | "text": [
518 | "This is Spam.\n"
519 | ]
520 | }
521 | ],
522 | "source": [
523 | "prediction_input(\"Send and recieve emails\")"
524 | ]
525 | },
526 | {
527 | "cell_type": "code",
528 | "execution_count": null,
529 | "id": "a4e9be6d",
530 | "metadata": {},
531 | "outputs": [],
532 | "source": []
533 | }
534 | ],
535 | "metadata": {
536 | "kernelspec": {
537 | "display_name": "Python 3 (ipykernel)",
538 | "language": "python",
539 | "name": "python3"
540 | },
541 | "language_info": {
542 | "codemirror_mode": {
543 | "name": "ipython",
544 | "version": 3
545 | },
546 | "file_extension": ".py",
547 | "mimetype": "text/x-python",
548 | "name": "python",
549 | "nbconvert_exporter": "python",
550 | "pygments_lexer": "ipython3",
551 | "version": "3.9.7"
552 | }
553 | },
554 | "nbformat": 4,
555 | "nbformat_minor": 5
556 | }
557 |
--------------------------------------------------------------------------------
/Titanic Prediction/heat3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Titanic Prediction/heat3.png
--------------------------------------------------------------------------------
/Titanic Prediction/heat4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Titanic Prediction/heat4.png
--------------------------------------------------------------------------------
/Titanic Prediction/titanic_predictor_script.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import joblib
3 | model = joblib.load('titanic_rf_model.pkl')
4 |
5 | print("This is a Python to Script that Predicts If Someone Survived the Titanic Crash or not\n")
6 | a= int(input('Passenger Class(input 1,2 or 3): '))
7 | b= int(input('Gender(Enter 1 for Male and O for Female): '))
8 | c= int(input('Age: '))
9 | d= int(input('Fare Amount: '))
10 |
11 |
12 | features = np.array([[a,b,c,d]])
13 |
14 | pred1 = model.predict(features)
15 | if pred1 == 0:
16 | print("This Person did not survive the Titanic Crash")
17 | elif pred1 == 1:
18 | print("This Person Survived the Titanic Crash")
19 |
--------------------------------------------------------------------------------
/Titanic Prediction/titanic_rf_model.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Titanic Prediction/titanic_rf_model.pkl
--------------------------------------------------------------------------------
/Video Game Sales Prediction/apps/endpoints/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Video Game Sales Prediction/apps/endpoints/__init__.py
--------------------------------------------------------------------------------
/Video Game Sales Prediction/apps/endpoints/admin.py:
--------------------------------------------------------------------------------
1 | from django.contrib import admin
2 |
3 | # Register your models here.
4 |
--------------------------------------------------------------------------------
/Video Game Sales Prediction/apps/endpoints/apps.py:
--------------------------------------------------------------------------------
1 | from django.apps import AppConfig
2 |
3 |
4 | class EndpointsConfig(AppConfig):
5 | default_auto_field = 'django.db.models.BigAutoField'
6 | name = 'endpoints'
7 |
--------------------------------------------------------------------------------
/Video Game Sales Prediction/apps/endpoints/migrations/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Video Game Sales Prediction/apps/endpoints/migrations/__init__.py
--------------------------------------------------------------------------------
/Video Game Sales Prediction/apps/endpoints/models.py:
--------------------------------------------------------------------------------
1 | from django.db import models
2 |
3 | # Create your models here.
4 |
--------------------------------------------------------------------------------
/Video Game Sales Prediction/apps/endpoints/tests.py:
--------------------------------------------------------------------------------
1 | from django.test import TestCase
2 |
3 | # Create your tests here.
4 |
--------------------------------------------------------------------------------
/Video Game Sales Prediction/apps/endpoints/views.py:
--------------------------------------------------------------------------------
1 | from django.shortcuts import render
2 |
3 | # Create your views here.
4 |
--------------------------------------------------------------------------------
/Video Game Sales Prediction/db.sqlite3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Video Game Sales Prediction/db.sqlite3
--------------------------------------------------------------------------------
/Video Game Sales Prediction/manage.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Django's command-line utility for administrative tasks."""
3 | import os
4 | import sys
5 |
6 |
7 | def main():
8 | """Run administrative tasks."""
9 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pycharmtut.settings')
10 | try:
11 | from django.core.management import execute_from_command_line
12 | except ImportError as exc:
13 | raise ImportError(
14 | "Couldn't import Django. Are you sure it's installed and "
15 | "available on your PYTHONPATH environment variable? Did you "
16 | "forget to activate a virtual environment?"
17 | ) from exc
18 | execute_from_command_line(sys.argv)
19 |
20 |
21 | if __name__ == '__main__':
22 | main()
23 |
--------------------------------------------------------------------------------
/Video Game Sales Prediction/pycharmtut/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Video Game Sales Prediction/pycharmtut/__init__.py
--------------------------------------------------------------------------------
/Video Game Sales Prediction/pycharmtut/__pycache__/__init__.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Video Game Sales Prediction/pycharmtut/__pycache__/__init__.cpython-310.pyc
--------------------------------------------------------------------------------
/Video Game Sales Prediction/pycharmtut/__pycache__/settings.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Video Game Sales Prediction/pycharmtut/__pycache__/settings.cpython-310.pyc
--------------------------------------------------------------------------------
/Video Game Sales Prediction/pycharmtut/__pycache__/urls.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Video Game Sales Prediction/pycharmtut/__pycache__/urls.cpython-310.pyc
--------------------------------------------------------------------------------
/Video Game Sales Prediction/pycharmtut/__pycache__/wsgi.cpython-310.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Video Game Sales Prediction/pycharmtut/__pycache__/wsgi.cpython-310.pyc
--------------------------------------------------------------------------------
/Video Game Sales Prediction/pycharmtut/asgi.py:
--------------------------------------------------------------------------------
1 | """
2 | ASGI config for pycharmtut project.
3 |
4 | It exposes the ASGI callable as a module-level variable named ``application``.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/4.0/howto/deployment/asgi/
8 | """
9 |
10 | import os
11 |
12 | from django.core.asgi import get_asgi_application
13 |
14 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pycharmtut.settings')
15 |
16 | application = get_asgi_application()
17 |
--------------------------------------------------------------------------------
/Video Game Sales Prediction/pycharmtut/settings.py:
--------------------------------------------------------------------------------
1 | """
2 | Django settings for pycharmtut project.
3 |
4 | Generated by 'django-admin startproject' using Django 4.0.4.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/4.0/topics/settings/
8 |
9 | For the full list of settings and their values, see
10 | https://docs.djangoproject.com/en/4.0/ref/settings/
11 | """
12 |
13 | from pathlib import Path
14 |
15 | # Build paths inside the project like this: BASE_DIR / 'subdir'.
16 | BASE_DIR = Path(__file__).resolve().parent.parent
17 |
18 |
19 | # Quick-start development settings - unsuitable for production
20 | # See https://docs.djangoproject.com/en/4.0/howto/deployment/checklist/
21 |
22 | # SECURITY WARNING: keep the secret key used in production secret!
23 | SECRET_KEY = 'django-insecure-&ub)@h&9kb^)fohg+$^kj&r)(7!qkvsb#yozlts%7vr7u*$bjy'
24 |
25 | # SECURITY WARNING: don't run with debug turned on in production!
26 | DEBUG = True
27 |
28 | ALLOWED_HOSTS = []
29 |
30 |
31 | # Application definition
32 |
33 | INSTALLED_APPS = [
34 | 'django.contrib.admin',
35 | 'django.contrib.auth',
36 | 'django.contrib.contenttypes',
37 | 'django.contrib.sessions',
38 | 'django.contrib.messages',
39 | 'django.contrib.staticfiles',
40 | ]
41 |
42 | MIDDLEWARE = [
43 | 'django.middleware.security.SecurityMiddleware',
44 | 'django.contrib.sessions.middleware.SessionMiddleware',
45 | 'django.middleware.common.CommonMiddleware',
46 | 'django.middleware.csrf.CsrfViewMiddleware',
47 | 'django.contrib.auth.middleware.AuthenticationMiddleware',
48 | 'django.contrib.messages.middleware.MessageMiddleware',
49 | 'django.middleware.clickjacking.XFrameOptionsMiddleware',
50 | ]
51 |
52 | ROOT_URLCONF = 'pycharmtut.urls'
53 |
54 | TEMPLATES = [
55 | {
56 | 'BACKEND': 'django.template.backends.django.DjangoTemplates',
57 | 'DIRS': [],
58 | 'APP_DIRS': True,
59 | 'OPTIONS': {
60 | 'context_processors': [
61 | 'django.template.context_processors.debug',
62 | 'django.template.context_processors.request',
63 | 'django.contrib.auth.context_processors.auth',
64 | 'django.contrib.messages.context_processors.messages',
65 | ],
66 | },
67 | },
68 | ]
69 |
70 | WSGI_APPLICATION = 'pycharmtut.wsgi.application'
71 |
72 |
73 | # Database
74 | # https://docs.djangoproject.com/en/4.0/ref/settings/#databases
75 |
76 | DATABASES = {
77 | 'default': {
78 | 'ENGINE': 'django.db.backends.sqlite3',
79 | 'NAME': BASE_DIR / 'db.sqlite3',
80 | }
81 | }
82 |
83 |
84 | # Password validation
85 | # https://docs.djangoproject.com/en/4.0/ref/settings/#auth-password-validators
86 |
87 | AUTH_PASSWORD_VALIDATORS = [
88 | {
89 | 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
90 | },
91 | {
92 | 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
93 | },
94 | {
95 | 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
96 | },
97 | {
98 | 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
99 | },
100 | ]
101 |
102 |
103 | # Internationalization
104 | # https://docs.djangoproject.com/en/4.0/topics/i18n/
105 |
106 | LANGUAGE_CODE = 'en-us'
107 |
108 | TIME_ZONE = 'UTC'
109 |
110 | USE_I18N = True
111 |
112 | USE_TZ = True
113 |
114 |
115 | # Static files (CSS, JavaScript, Images)
116 | # https://docs.djangoproject.com/en/4.0/howto/static-files/
117 |
118 | STATIC_URL = 'static/'
119 |
120 | # Default primary key field type
121 | # https://docs.djangoproject.com/en/4.0/ref/settings/#default-auto-field
122 |
123 | DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
124 |
--------------------------------------------------------------------------------
/Video Game Sales Prediction/pycharmtut/urls.py:
--------------------------------------------------------------------------------
1 | """pycharmtut URL Configuration
2 |
3 | The `urlpatterns` list routes URLs to views. For more information please see:
4 | https://docs.djangoproject.com/en/4.0/topics/http/urls/
5 | Examples:
6 | Function views
7 | 1. Add an import: from my_app import views
8 | 2. Add a URL to urlpatterns: path('', views.home, name='home')
9 | Class-based views
10 | 1. Add an import: from other_app.views import Home
11 | 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
12 | Including another URLconf
13 | 1. Import the include() function: from django.urls import include, path
14 | 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
15 | """
16 | from django.contrib import admin
17 | from django.urls import path
18 |
19 | urlpatterns = [
20 | path('admin/', admin.site.urls),
21 | ]
22 |
--------------------------------------------------------------------------------
/Video Game Sales Prediction/pycharmtut/wsgi.py:
--------------------------------------------------------------------------------
1 | """
2 | WSGI config for pycharmtut project.
3 |
4 | It exposes the WSGI callable as a module-level variable named ``application``.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/4.0/howto/deployment/wsgi/
8 | """
9 |
10 | import os
11 |
12 | from django.core.wsgi import get_wsgi_application
13 |
14 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pycharmtut.settings')
15 |
16 | application = get_wsgi_application()
17 |
--------------------------------------------------------------------------------
/Video Game Sales Prediction/videogame_sale_model.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nneji123/Machine-Learning-Course-Projects/c22550fc2e69f734cb1714982edce897444c6a1a/Video Game Sales Prediction/videogame_sale_model.pkl
--------------------------------------------------------------------------------