├── .gitignore
├── Day-0
├── Numpy-tutorial.ipynb
├── c-fortan.jpg
├── data.csv
└── readme.md
├── Day-1
├── Pandas_tutorial_part_1.ipynb
├── Pandas_tutorial_part_2.ipynb
├── Pandas_tutorial_part_3.ipynb
├── Pandas_tutorial_part_4.ipynb
└── readme.md
├── Day-10
├── data
│ ├── age_data.csv
│ ├── developers_salary.csv
│ └── train.csv
├── matplotlib_tutorial_part_5.ipynb
├── matplotlib_tutorial_part_6.ipynb
├── overview.md
├── readme.md
└── titanic_data_analysis.ipynb
├── Day-11
├── data
│ └── train.csv
├── overview.md
├── readme.md
└── titanic_data_analysis.ipynb
├── Day-12
├── data
│ └── train.csv
├── overview.md
├── readme.md
└── titanic_data_analysis.ipynb
├── Day-13
├── data
│ └── train.csv
├── overview.md
├── readme.md
└── titanic_data_analysis.ipynb
├── Day-14
├── data
│ └── yt-200trending.csv
├── matplotlib_tutorial_part_7.ipynb
└── readme.md
├── Day-15
├── data
│ └── bitcoin_price.csv
├── matplotlib_tutorial_part_8.ipynb
└── readme.md
├── Day-16
├── L1 - Hello, Seaborn.ipynb
├── data
│ ├── developers_salary.csv
│ ├── fifa.csv
│ └── random_data.csv
├── data_gen.py
├── dev_salaries.png
├── matplotlib_tutorial_part_10.ipynb
├── matplotlib_tutorial_part_9.ipynb
└── readme.md
├── Day-17
├── Data_analysis_project_3.ipynb
├── L2 - Line Charts.ipynb
├── L3 - Bar Charts and Heatmaps.ipynb
├── L4 - Scatter plots.ipynb
├── catplot.png
├── data
│ ├── candy.csv
│ ├── ign_scores.csv
│ ├── medical_examination.csv
│ └── museum_visitors.csv
├── heatmap.png
└── readme.md
├── Day-18
├── Box_plots.ipynb
├── Data_analysis_project_4.ipynb
├── L5 - Distributions (histograms & KDE).ipynb
├── L6 - Choosing plot types and custom styles.ipynb
├── L7 - Final project.ipynb
├── box_plot_box.jpg
├── box_plot_median.jpg
├── choosing-plot-types-and-custom-styles.ipynb
├── data
│ ├── cancer.csv
│ ├── drug200.csv
│ ├── fcc_forum_pageviews.csv
│ └── iris.csv
└── readme.md
├── Day-19
├── House_price_prediction-v1.ipynb
├── L2 - Explore_your_data.ipynb
├── L3 - Your_first_machine_learning_model.ipynb
├── L4 - Model_validation.ipynb
├── L5 - Underfitting_and_overfitting.ipynb
├── L6 - Random_forests.ipynb
├── L7 - Machine_learning_competitions.ipynb
├── data
│ ├── data_description.txt
│ ├── sample_submission.csv
│ ├── test.csv
│ └── train.csv
└── readme.md
├── Day-2
├── Data_analysis_project_1.ipynb
├── Pandas_tutorial_part_5.ipynb
├── Pandas_tutorial_part_6.ipynb
├── Pandas_tutorial_part_7.ipynb
└── readme.md
├── Day-20
├── House_price_prediction_2.ipynb
├── House_price_prediction_3.ipynb
├── L1 - Introduction.ipynb
├── L2 - Missing_values.ipynb
├── L3 - Categorical_variables.ipynb
├── data
│ ├── L1-submission.csv
│ ├── L2-submission.csv
│ ├── L3-submission.csv
│ ├── data_description.txt
│ ├── sample_submission.csv
│ ├── submission_2.csv
│ ├── submission_3.csv
│ ├── test.csv
│ └── train.csv
└── readme.md
├── Day-21
├── House_price_prediction_4.ipynb
├── House_price_prediction_5.ipynb
├── House_price_prediction_6.ipynb
├── L4 - Pipelines.ipynb
├── L5 - Cross_validation.ipynb
├── L6 - Xgboost.ipynb
├── L7 - Data_leakage.ipynb
├── data
│ ├── L4-submission.csv
│ ├── data_description.txt
│ ├── sample_submission.csv
│ ├── submission_4.csv
│ ├── submission_5.csv
│ ├── submission_6.csv
│ ├── test.csv
│ └── train.csv
└── readme.md
├── Day-22
├── House_price_prediction_7.ipynb
├── House_price_prediction_8.ipynb
├── data
│ ├── data_description.txt
│ ├── sample_submission.csv
│ ├── submission_7.csv
│ ├── submission_8.csv
│ ├── test.csv
│ └── train.csv
└── readme.md
├── Day-23
├── House_price_prediction_10.ipynb
├── House_price_prediction_9.ipynb
├── data
│ ├── data_description.txt
│ ├── sample_submission.csv
│ ├── submission_10.csv
│ ├── submission_9.csv
│ ├── test.csv
│ └── train.csv
└── readme.md
├── Day-24
└── readme.md
├── Day-25
├── House_price_prediction_11.ipynb
├── L2 - Mutual_information.ipynb
├── data
│ ├── data_description.txt
│ ├── sample_submission.csv
│ ├── submission_11.csv
│ ├── test.csv
│ └── train.csv
└── readme.md
├── Day-26
├── L3 - Creating_features.ipynb
└── readme.md
├── Day-27
├── House_price_prediction_12.ipynb
├── House_price_prediction_13.ipynb
├── L4 - Clustering_with_k-means.ipynb
├── data
│ ├── data_description.txt
│ ├── sample_submission.csv
│ ├── submission_12.csv
│ ├── submission_13.csv
│ ├── test.csv
│ └── train.csv
├── pipeline.py
└── readme.md
├── Day-28
└── readme.md
├── Day-29
└── readme.md
├── Day-3
├── Pandas_tutorial_part_8.ipynb
└── readme.md
├── Day-30
├── Titanic_survival_competition_1.ipynb
├── data
│ ├── gender_submission.csv
│ ├── submission_1.csv
│ ├── test.csv
│ └── train.csv
└── readme.md
├── Day-31
├── Titanic_survival_competition_2.ipynb
├── Titanic_survival_competition_3.ipynb
├── Titanic_survival_competition_4.ipynb
├── data
│ ├── gender_submission.csv
│ ├── submission_2.csv
│ ├── submission_3.csv
│ ├── submission_4.csv
│ ├── test.csv
│ └── train.csv
└── readme.md
├── Day-32
├── GHW_data_science_project.ipynb
├── Titanic_survival_competition_5.ipynb
├── data
│ ├── Enrollment-in-school-2011-12-India.csv
│ ├── gender_submission.csv
│ ├── submission_5.csv
│ ├── test.csv
│ └── train.csv
└── readme.md
├── Day-33
├── Titanic_survival_competition_6.ipynb
├── Titanic_survival_competition_7.ipynb
├── data
│ ├── gender_submission.csv
│ ├── submission_6.csv
│ ├── submission_7.csv
│ ├── test.csv
│ └── train.csv
└── readme.md
├── Day-34
├── Titanic_survival_compeittion_8.ipynb
├── Titanic_survival_competition_10.ipynb
├── Titanic_survival_competition_9.ipynb
├── data
│ ├── gender_submission.csv
│ ├── submission_10.csv
│ ├── submission_8.csv
│ ├── submission_9.csv
│ ├── test.csv
│ └── train.csv
└── readme.md
├── Day-35
└── readme.md
├── Day-36
└── readme.md
├── Day-37
└── readme.md
├── Day-38
└── readme.md
├── Day-39
└── readme.md
├── Day-4
├── Pandas_tutorial_part_10.ipynb
├── Pandas_tutorial_part_9.ipynb
└── readme.md
├── Day-40
└── readme.md
├── Day-41
└── readme.md
├── Day-42
└── readme.md
├── Day-43
└── readme.md
├── Day-44
└── readme.md
├── Day-45
└── readme.md
├── Day-46
└── readme.md
├── Day-47
└── readme.md
├── Day-48
└── readme.md
├── Day-49
└── readme.md
├── Day-5
├── Data_analysis_project_2.ipynb
├── Pandas_tutorial_part_11.ipynb
└── readme.md
├── Day-50
└── readme.md
├── Day-6
├── Data_analysis_project_2.ipynb
└── readme.md
├── Day-7
├── data
│ └── language_knows.csv
├── matplotlib_tutorial_part_1.ipynb
├── matplotlib_tutorial_part_2.ipynb
├── plot.png
└── readme.md
├── Day-8
├── data
│ └── language_popularity.csv
├── matplotlib_tutorial_part_3.ipynb
├── matplotlib_tutorial_part_4.ipynb
└── readme.md
├── Day-9
├── data
│ └── medical_examination.csv
├── data_analysis_project_3.ipynb
└── readme.md
└── readme.md
/.gitignore:
--------------------------------------------------------------------------------
1 | venv/
--------------------------------------------------------------------------------
/Day-0/c-fortan.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Param302/100DaysOfCode-Python/44000a5ec5173c65c18810db68e11812baa2f0c4/Day-0/c-fortan.jpg
--------------------------------------------------------------------------------
/Day-0/readme.md:
--------------------------------------------------------------------------------
1 | # Day 0 of [#100DaysofCode](https://twitter.com/Param3021/status/1531507810756067328?s=20&t=psaKuMNtRHbSmcQ7QQ0zuQ) in Python
2 |
3 | ## Task
4 | - Learn Numpy
5 |
6 | ### Resources
7 | - Numpy Crash Course by Patrick Loeber ([Video link](https://www.youtube.com/watch?v=9JUAPgtkKpI))
8 |
9 | ### Software used
10 | - Jupyter Notebook
11 | - Python 3.10.2
12 | - Numpy 1.22.4
13 |
14 | ### My notebook
15 | - [Numpy-tutorial.ipynb](./Numpy-tutorial.ipynb)
16 |
17 | ### Topics I have learnt
18 | 1. Numpy Intro
19 | 2. Array Basics
20 | 3. Dot product
21 | 4. Multi-dimensional arrays
22 | 5. Array Indexing
23 | 6. Array Slicing
24 | 7. Boolean Indexing
25 | 8. Reshape
26 | 9. Concatenation
27 | 10. Broadcasting
28 | 11. Functions and Axes
29 | 12. Datatypes
30 | 13. Copying
31 | 14. Generating arrays
32 | 15. Random numbers
33 | 16. Linear Algebra
34 | - Eigenvalues
35 | - Solving Linear Systems
36 | 17. Creating arrays
37 | - Conversion from Python datastructure
38 | - Using numpy array creation functions like arange, zeroes
39 | - Replicating, concatenating & modifying existed arrays
40 | - Reading arrays from CSV files
41 | - Using special library functions like random
42 |
43 | ### Conclusion:
44 | Numpy is a really recommended library to learn. It's really fast compared to normal Python code. Useful to learn for mathematical calculations.
--------------------------------------------------------------------------------
/Day-1/readme.md:
--------------------------------------------------------------------------------
1 | # Day 1 of [#100DaysofCode](https://twitter.com/Param3021/status/1531855558353080320?s=20&t=FPrzBL0UathYN2noQnodAg)
2 |
3 | ## Task
4 | - Learn Numpy
5 | - Learn Pandas
6 |
7 | # Resources
8 | - Numpy Tutorial (Iterate numpy array using nditer | numpy nditer) by Codebasics YT channel: ([Video link](https://youtu.be/XawR6CjAYV4))
9 | - Python Pandas Tutorial Playlist by Corey Schafer YT channel: ([Playlist link](https://www.youtube.com/playlist?list=PL-osiE80TeTsWmV9i9c58mdDCSskIFdDS))
10 | - Stack Overflow Developer Survey 2021 ([link](https://insights.stackoverflow.com/survey))
11 |
12 | ### Software used
13 | - Jupyter Notebook
14 | - Python 3.10.2
15 | - Numpy 1.22.4
16 | - pandas 1.4.2
17 |
18 | ### My Notebooks
19 | - [Numpy-tutorial.ipynb](../Day-0/Numpy-tutorial.ipynb)
20 | - [Pandas_tutorial_part_1.ipynb](./Pandas_tutorial_part_1.ipynb)
21 | - [Pandas_tutorial_part_2.ipynb](./Pandas_tutorial_part_2.ipynb)
22 | - [Pandas_tutorial_part_3.ipynb](./Pandas_tutorial_part_3.ipynb)
23 | - [Pandas_tutorial_part_4.ipynb](./Pandas_tutorial_part_4.ipynb)
24 |
25 | ### Topics I have learnt
26 | 1. Splitting arrays in numpy
27 | 2. Iterating arrays efficiently using `np.nditer()`
28 | 3. Pandas Intro
29 | 4. DataFrame Basics
30 | 5. Series Basics
31 | 6. Reading Data from CSV file
32 | 7. Selecting rows & columns using loc & iloc
33 | 8. Custom Indexing
34 | 9. Sorting Data based on exercises
35 | 10. Condition Filtering on rows and columns
36 |
37 | ### Conclusion:
38 | Pandas is a huge library, and good for data analysis. So many topics of numpy needed to learn pandas like slicing is same as Numpy.
39 |
--------------------------------------------------------------------------------
/Day-10/data/developers_salary.csv:
--------------------------------------------------------------------------------
1 | Age,All_Devs,Python,JavaScript
2 | 18,17784,20046,16446
3 | 19,16500,17100,16791
4 | 20,18012,20000,18942
5 | 21,20628,24744,21780
6 | 22,25206,30500,25704
7 | 23,30252,37732,29000
8 | 24,34368,41247,34372
9 | 25,38496,45372,37810
10 | 26,42000,48876,43515
11 | 27,46752,53850,46823
12 | 28,49320,57287,49293
13 | 29,53200,45000,53437
14 | 30,56000,50000,56373
15 | 31,62316,55000,62375
16 | 32,64928,70000,66674
17 | 33,67317,71496,68745
18 | 34,68748,75370,68746
19 | 35,73752,83640,74583
20 | 36,77232,84666,79000
21 | 37,78000,84392,78508
22 | 38,78508,78254,79996
23 | 39,79536,85000,80403
24 | 40,82488,87038,83820
25 | 41,88935,91991,88833
26 | 42,90000,100000,91660
27 | 43,90056,94796,87892
28 | 44,95000,97962,96243
29 | 45,90000,93302,90000
30 | 46,91633,99240,99313
31 | 47,91660,102736,91660
32 | 48,98150,112285,102264
33 | 49,98964,100771,100000
34 | 50,100000,104708,100000
35 | 51,98988,108423,91660
36 | 52,100000,101407,99240
37 | 53,108923,112542,108000
38 | 54,105000,122870,105000
39 | 55,103117,120000,104000
--------------------------------------------------------------------------------
/Day-10/overview.md:
--------------------------------------------------------------------------------
1 |
Overview
2 |
The data has been split into two groups:
3 |
4 | - training set (train.csv)
5 | - test set (test.csv)
6 |
7 |
The training set should be used to build your machine learning models. For the training set, we provide the outcome (also known as the “ground truth”) for each passenger. Your model will be based on “features” like passengers’ gender and class. You can also use feature engineering to create new features.
8 |
The test set should be used to see how well your model performs on unseen data. For the test set, we do not provide the ground truth for each passenger. It is your job to predict these outcomes. For each passenger in the test set, use the model you trained to predict whether or not they survived the sinking of the Titanic.
9 |
We also include gender_submission.csv, a set of predictions that assume all and only female passengers survive, as an example of what a submission file should look like.
10 |
Data Dictionary
11 |
12 |
13 | Variable | Definition | Key |
14 |
15 | survival |
16 | Survival |
17 | 0 = No, 1 = Yes |
18 |
19 |
20 | pclass |
21 | Ticket class |
22 | 1 = 1st, 2 = 2nd, 3 = 3rd |
23 |
24 |
25 | sex |
26 | Sex |
27 | |
28 |
29 |
30 | Age |
31 | Age in years |
32 | |
33 |
34 |
35 | sibsp |
36 | # of siblings / spouses aboard the Titanic |
37 | |
38 |
39 |
40 | parch |
41 | # of parents / children aboard the Titanic |
42 | |
43 |
44 |
45 | ticket |
46 | Ticket number |
47 | |
48 |
49 |
50 | fare |
51 | Passenger fare |
52 | |
53 |
54 |
55 | cabin |
56 | Cabin number |
57 | |
58 |
59 |
60 | embarked |
61 | Port of Embarkation |
62 | C = Cherbourg, Q = Queenstown, S = Southampton |
63 |
64 |
65 |
66 |
Variable Notes
67 |
pclass: A proxy for socio-economic status (SES)
1st = Upper
2nd = Middle
3rd = Lower
age: Age is fractional if less than 1. If the age is estimated, is it in the form of xx.5
sibsp: The dataset defines family relations in this way...
Sibling = brother, sister, stepbrother, stepsister
Spouse = husband, wife (mistresses and fiancés were ignored)
parch: The dataset defines family relations in this way...
Parent = mother, father
Child = daughter, son, stepdaughter, stepson
Some children travelled only with a nanny, therefore parch=0 for them.
--------------------------------------------------------------------------------
/Day-10/readme.md:
--------------------------------------------------------------------------------
1 | # Day 10 of [#100DaysOfCode](https://twitter.com/Param3021/status/1535168003729686528)
2 |
3 | ## Task
4 | 1. Matplotlib
5 | 2. Data Analysis with Pandas & matplotlib
6 |
7 | # Resources
8 | - Corey Schafer [Matplotlib Tutorial](https://www.youtube.com/playlist?list=PL-osiE80TeTvipOqomVEeZ1HRrcEvtZB_) Playlist
9 | - Kaggle [Titanic Dataset](https://www.kaggle.com/competitions/titanic/data?select=train.csv)
10 |
11 | ### Topics I have learnt
12 | 1. Matplotlib
13 | - - Histogram
14 | - - Filling areas in Line Plots
15 | 2. Did data cleaning in Titanic Dataset
16 |
17 | ### Software used
18 | - Jupyter Notebook
19 | - Python 3.10.2
20 | - Numpy 1.22.4
21 | - pandas 1.4.2
22 | - matplotlib 3.5.2
23 |
24 | ### My Notebooks
25 | - [matplotlib_tutorial_part_5.ipynb](./matplotlib_tutorial_part_5.ipynb)
26 | - [matplotlib_tutorial_part_6.ipynb](./matplotlib_tutorial_part_6.ipynb)
27 | - [titanic_data_analysis.ipynb](./titanic_data_analysis.ipynb)
28 |
29 | ### Conclusion:
30 | Today I learnt about Histograms and how to fill areas in Line plots. By Filling areas, line plots looks so interesting! Also did some data cleaning on Titanic Dataset. Tomorrow will do Data analysis, seems to interesting (have many ideas).
--------------------------------------------------------------------------------
/Day-11/overview.md:
--------------------------------------------------------------------------------
1 | Overview
2 |
The data has been split into two groups:
3 |
4 | - training set (train.csv)
5 | - test set (test.csv)
6 |
7 |
The training set should be used to build your machine learning models. For the training set, we provide the outcome (also known as the “ground truth”) for each passenger. Your model will be based on “features” like passengers’ gender and class. You can also use feature engineering to create new features.
8 |
The test set should be used to see how well your model performs on unseen data. For the test set, we do not provide the ground truth for each passenger. It is your job to predict these outcomes. For each passenger in the test set, use the model you trained to predict whether or not they survived the sinking of the Titanic.
9 |
We also include gender_submission.csv, a set of predictions that assume all and only female passengers survive, as an example of what a submission file should look like.
10 |
Data Dictionary
11 |
12 |
13 | Variable | Definition | Key |
14 |
15 | survival |
16 | Survival |
17 | 0 = No, 1 = Yes |
18 |
19 |
20 | pclass |
21 | Ticket class |
22 | 1 = 1st, 2 = 2nd, 3 = 3rd |
23 |
24 |
25 | sex |
26 | Sex |
27 | |
28 |
29 |
30 | Age |
31 | Age in years |
32 | |
33 |
34 |
35 | sibsp |
36 | # of siblings / spouses aboard the Titanic |
37 | |
38 |
39 |
40 | parch |
41 | # of parents / children aboard the Titanic |
42 | |
43 |
44 |
45 | ticket |
46 | Ticket number |
47 | |
48 |
49 |
50 | fare |
51 | Passenger fare |
52 | |
53 |
54 |
55 | cabin |
56 | Cabin number |
57 | |
58 |
59 |
60 | embarked |
61 | Port of Embarkation |
62 | C = Cherbourg, Q = Queenstown, S = Southampton |
63 |
64 |
65 |
66 |
Variable Notes
67 |
pclass: A proxy for socio-economic status (SES)
1st = Upper
2nd = Middle
3rd = Lower
age: Age is fractional if less than 1. If the age is estimated, is it in the form of xx.5
sibsp: The dataset defines family relations in this way...
Sibling = brother, sister, stepbrother, stepsister
Spouse = husband, wife (mistresses and fiancés were ignored)
parch: The dataset defines family relations in this way...
Parent = mother, father
Child = daughter, son, stepdaughter, stepson
Some children travelled only with a nanny, therefore parch=0 for them.
--------------------------------------------------------------------------------
/Day-11/readme.md:
--------------------------------------------------------------------------------
1 | # Day 11 of [#100DaysOfCode](https://twitter.com/Param3021/status/1535654375548682242)
2 |
3 | ## Task
4 | 1. Data Analysis with Pandas & matplotlib
5 |
6 | # Resources
7 | - Kaggle [Titanic Dataset](https://www.kaggle.com/competitions/titanic/data?select=train.csv)
8 |
9 | ### Topics I have learnt
10 | 1. Did data analysis on Titanic Dataset
11 |
12 | ### Software used
13 | - Jupyter Notebook
14 | - Python 3.10.2
15 | - Numpy 1.22.4
16 | - pandas 1.4.2
17 | - matplotlib 3.5.2
18 |
19 | ### My Notebooks
20 | - [titanic_data_analysis.ipynb](./titanic_data_analysis.ipynb)
21 |
22 | ### Conclusion:
23 | Today I did some analysis on Titanic dataset by Kaggle.
--------------------------------------------------------------------------------
/Day-12/overview.md:
--------------------------------------------------------------------------------
1 | Overview
2 |
The data has been split into two groups:
3 |
4 | - training set (train.csv)
5 | - test set (test.csv)
6 |
7 |
The training set should be used to build your machine learning models. For the training set, we provide the outcome (also known as the “ground truth”) for each passenger. Your model will be based on “features” like passengers’ gender and class. You can also use feature engineering to create new features.
8 |
The test set should be used to see how well your model performs on unseen data. For the test set, we do not provide the ground truth for each passenger. It is your job to predict these outcomes. For each passenger in the test set, use the model you trained to predict whether or not they survived the sinking of the Titanic.
9 |
We also include gender_submission.csv, a set of predictions that assume all and only female passengers survive, as an example of what a submission file should look like.
10 |
Data Dictionary
11 |
12 |
13 | Variable | Definition | Key |
14 |
15 | survival |
16 | Survival |
17 | 0 = No, 1 = Yes |
18 |
19 |
20 | pclass |
21 | Ticket class |
22 | 1 = 1st, 2 = 2nd, 3 = 3rd |
23 |
24 |
25 | sex |
26 | Sex |
27 | |
28 |
29 |
30 | Age |
31 | Age in years |
32 | |
33 |
34 |
35 | sibsp |
36 | # of siblings / spouses aboard the Titanic |
37 | |
38 |
39 |
40 | parch |
41 | # of parents / children aboard the Titanic |
42 | |
43 |
44 |
45 | ticket |
46 | Ticket number |
47 | |
48 |
49 |
50 | fare |
51 | Passenger fare |
52 | |
53 |
54 |
55 | cabin |
56 | Cabin number |
57 | |
58 |
59 |
60 | embarked |
61 | Port of Embarkation |
62 | C = Cherbourg, Q = Queenstown, S = Southampton |
63 |
64 |
65 |
66 |
Variable Notes
67 |
pclass: A proxy for socio-economic status (SES)
1st = Upper
2nd = Middle
3rd = Lower
age: Age is fractional if less than 1. If the age is estimated, is it in the form of xx.5
sibsp: The dataset defines family relations in this way...
Sibling = brother, sister, stepbrother, stepsister
Spouse = husband, wife (mistresses and fiancés were ignored)
parch: The dataset defines family relations in this way...
Parent = mother, father
Child = daughter, son, stepdaughter, stepson
Some children travelled only with a nanny, therefore parch=0 for them.
--------------------------------------------------------------------------------
/Day-12/readme.md:
--------------------------------------------------------------------------------
1 | # Day 12 of [#100DaysOfCode](https://twitter.com/Param3021/status/1535929391813115904)
2 |
3 | ## Task
4 | 1. Data Analysis with Pandas & matplotlib
5 |
6 | # Resources
7 | - Kaggle [Titanic Dataset](https://www.kaggle.com/competitions/titanic/data?select=train.csv)
8 |
9 | ### Topics I have learnt
10 | 1. Did little bit data analysis
11 |
12 | ### Software used
13 | - Jupyter Notebook
14 | - Python 3.10.2
15 | - Numpy 1.22.4
16 | - pandas 1.4.2
17 | - matplotlib 3.5.2
18 |
19 | ### My Notebooks
20 | - [titanic_data_analysis.ipynb](./titanic_data_analysis.ipynb)
21 |
22 | ### Conclusion:
23 | Today I did some analysis on Titanic dataset by Kaggle. From day-11 not doing much, but from tomorrow I will be free as my exams are over!
--------------------------------------------------------------------------------
/Day-13/overview.md:
--------------------------------------------------------------------------------
1 | Overview
2 |
The data has been split into two groups:
3 |
4 | - training set (train.csv)
5 | - test set (test.csv)
6 |
7 |
The training set should be used to build your machine learning models. For the training set, we provide the outcome (also known as the “ground truth”) for each passenger. Your model will be based on “features” like passengers’ gender and class. You can also use feature engineering to create new features.
8 |
The test set should be used to see how well your model performs on unseen data. For the test set, we do not provide the ground truth for each passenger. It is your job to predict these outcomes. For each passenger in the test set, use the model you trained to predict whether or not they survived the sinking of the Titanic.
9 |
We also include gender_submission.csv, a set of predictions that assume all and only female passengers survive, as an example of what a submission file should look like.
10 |
Data Dictionary
11 |
12 |
13 | Variable | Definition | Key |
14 |
15 | survival |
16 | Survival |
17 | 0 = No, 1 = Yes |
18 |
19 |
20 | pclass |
21 | Ticket class |
22 | 1 = 1st, 2 = 2nd, 3 = 3rd |
23 |
24 |
25 | sex |
26 | Sex |
27 | |
28 |
29 |
30 | Age |
31 | Age in years |
32 | |
33 |
34 |
35 | sibsp |
36 | # of siblings / spouses aboard the Titanic |
37 | |
38 |
39 |
40 | parch |
41 | # of parents / children aboard the Titanic |
42 | |
43 |
44 |
45 | ticket |
46 | Ticket number |
47 | |
48 |
49 |
50 | fare |
51 | Passenger fare |
52 | |
53 |
54 |
55 | cabin |
56 | Cabin number |
57 | |
58 |
59 |
60 | embarked |
61 | Port of Embarkation |
62 | C = Cherbourg, Q = Queenstown, S = Southampton |
63 |
64 |
65 |
66 |
Variable Notes
67 |
pclass: A proxy for socio-economic status (SES)
1st = Upper
2nd = Middle
3rd = Lower
age: Age is fractional if less than 1. If the age is estimated, is it in the form of xx.5
sibsp: The dataset defines family relations in this way...
Sibling = brother, sister, stepbrother, stepsister
Spouse = husband, wife (mistresses and fiancés were ignored)
parch: The dataset defines family relations in this way...
Parent = mother, father
Child = daughter, son, stepdaughter, stepson
Some children travelled only with a nanny, therefore parch=0 for them.
--------------------------------------------------------------------------------
/Day-13/readme.md:
--------------------------------------------------------------------------------
1 | # Day 13 of [#100DaysOfCode](https://twitter.com/Param3021/status/1536354694184206337)
2 |
3 | ## Task
4 | 1. Data Analysis with Pandas & matplotlib
5 |
6 | # Resources
7 | - Kaggle [Titanic Dataset](https://www.kaggle.com/competitions/titanic/data?select=train.csv)
8 |
9 | ### Topics I have learnt
10 | 1. Did some bit data analysis
11 |
12 | ### Software used
13 | - Jupyter Notebook
14 | - Python 3.10.2
15 | - Numpy 1.22.4
16 | - pandas 1.4.2
17 | - matplotlib 3.5.2
18 |
19 | ### My Notebooks
20 | - [titanic_data_analysis.ipynb](./titanic_data_analysis.ipynb)
21 |
22 | ### Conclusion:
23 | Today I did some analysis on Titanic dataset by Kaggle. Not did much cuz not well.
--------------------------------------------------------------------------------
/Day-14/data/yt-200trending.csv:
--------------------------------------------------------------------------------
1 | view_count,likes,ratio
2 | 8036001,324742,96.91
3 | 9378067,562589,98.19
4 | 2182066,273650,99.38
5 | 6525864,94698,96.25
6 | 9481284,582481,97.22
7 | 1853121,89903,97.46
8 | 2875684,183163,94.52
9 | 483827,4864,91.53
10 | 1677046,103227,97.52
11 | 289756,2387,92.95
12 | 2561907,237728,98.8
13 | 468390,25346,98.34
14 | 18977153,768968,98.73
15 | 365731,5997,93.29
16 | 680701,41543,97.99
17 | 5748289,225966,99.17
18 | 3575950,374937,97.69
19 | 865788,31806,98.3
20 | 5433739,389145,98.84
21 | 3643458,369667,97.88
22 | 247602,1516,89.18
23 | 300443,25429,99.49
24 | 313500,56891,98.35
25 | 3525217,92948,95.29
26 | 195072,23832,98.97
27 | 142697,20708,98.91
28 | 456783,2625,94.53
29 | 601565,38792,98.34
30 | 6021472,342044,97.54
31 | 940583,14292,97.7
32 | 446569,7557,97.15
33 | 767900,11091,97.14
34 | 5895810,98088,95.87
35 | 381910,45178,99.21
36 | 2468645,188315,98.73
37 | 407859,19407,98.77
38 | 846399,29308,95.93
39 | 872092,27298,94.85
40 | 1279718,98471,99.06
41 | 1068377,92634,98.89
42 | 4691951,164807,98.93
43 | 1091006,55346,98.53
44 | 891230,30612,88.39
45 | 720734,35647,98.11
46 | 1025214,19926,94.86
47 | 505146,3309,59.69
48 | 265430,2124,91.99
49 | 3651318,283911,98.64
50 | 1290212,201881,99.3
51 | 420393,5434,95.99
52 | 655107,21485,96.16
53 | 1010207,23720,95.85
54 | 777547,9167,94.46
55 | 686703,34001,98.54
56 | 1625877,62101,98.35
57 | 2107926,59334,97.3
58 | 1564214,81581,97.96
59 | 2277765,53425,89.82
60 | 1558609,95695,98.23
61 | 1689305,88050,95.43
62 | 3382856,74078,93.32
63 | 4835746,276098,94.3
64 | 248754,2041,90.75
65 | 687182,63309,97.61
66 | 751948,24359,98.3
67 | 737756,23093,82.35
68 | 964229,18898,86.34
69 | 973121,22810,97.6
70 | 575508,16975,94.75
71 | 1114419,35208,94.3
72 | 722956,21843,97.6
73 | 1560200,38185,96.52
74 | 281397,3706,91.53
75 | 1122525,28232,97.23
76 | 20650480,212862,91.88
77 | 225207,1524,84.76
78 | 598367,24260,94.51
79 | 2117363,162960,99.12
80 | 1233027,16400,88.81
81 | 2566897,112005,54.67
82 | 11907188,1234111,83.49
83 | 1477059,36018,98.75
84 | 292469,5656,92.71
85 | 466862,47754,98.96
86 | 1055798,46122,97.84
87 | 1278142,26021,97.37
88 | 1938747,16942,87.66
89 | 338563,8416,96.46
90 | 645274,17943,94.67
91 | 730110,26868,92.31
92 | 1521090,19761,86.6
93 | 1719425,79646,98.33
94 | 3028604,75484,97.22
95 | 1236239,55409,96.0
96 | 906642,14128,91.88
97 | 1257902,20899,92.93
98 | 1163635,30173,89.82
99 | 1413936,90918,97.87
100 | 709519,6013,95.14
101 | 628111,41450,97.03
102 | 2478832,143686,98.28
103 | 2524598,32486,93.66
104 | 821547,18708,97.31
105 | 3016943,38294,95.76
106 | 743575,20181,89.7
107 | 919626,22114,95.84
108 | 2536083,538376,99.6
109 | 959442,13220,95.94
110 | 2044159,41080,92.48
111 | 1554417,67165,93.0
112 | 2181022,180132,98.19
113 | 1010899,13696,97.57
114 | 2620663,72681,96.68
115 | 5732609,189529,97.16
116 | 1187273,73120,99.24
117 | 1594532,85661,97.01
118 | 8403016,294629,96.97
119 | 5972754,133474,96.6
120 | 6189511,267690,99.03
121 | 1042734,23761,91.61
122 | 9476773,417402,97.8
123 | 8040754,789213,98.73
124 | 2724624,88968,91.74
125 | 1085592,27288,98.51
126 | 3393417,219213,95.68
127 | 16396012,208578,79.21
128 | 3226905,19814,91.77
129 | 6276301,286642,98.15
130 | 647094,19753,89.98
131 | 8081040,477122,98.81
132 | 886934,29360,98.46
133 | 1228396,29893,98.2
134 | 697471,6452,94.85
135 | 1605670,78364,96.63
136 | 2056991,121925,98.44
137 | 397981,6185,58.36
138 | 2760289,106828,97.14
139 | 3655043,54069,89.65
140 | 10662064,320959,97.89
141 | 3105500,108620,96.6
142 | 2238691,48825,96.77
143 | 1153518,25832,96.44
144 | 686228,24882,96.57
145 | 7523411,614901,98.87
146 | 2641916,49354,95.78
147 | 11657853,233343,97.82
148 | 5932061,172195,95.91
149 | 6313988,323119,98.18
150 | 2850316,218273,98.14
151 | 2620142,36637,93.99
152 | 854120,54821,98.05
153 | 13799864,317613,96.07
154 | 906841,35315,98.09
155 | 689607,20658,98.58
156 | 441729,14901,99.0
157 | 797800,14327,95.41
158 | 1682016,75706,98.17
159 | 1426251,57965,98.73
160 | 2268534,91796,97.75
161 | 750032,39406,98.19
162 | 4272799,26229,98.03
163 | 2449662,80825,97.54
164 | 5988592,512483,99.4
165 | 3662227,75552,97.46
166 | 725964,42700,98.98
167 | 1647440,111190,98.85
168 | 985104,12721,96.5
169 | 1665692,23961,92.37
170 | 2051794,81790,96.64
171 | 4112883,116481,93.46
172 | 33297045,1293427,99.07
173 | 1517628,19931,96.25
174 | 1675692,18803,72.76
175 | 3626738,173591,98.44
176 | 1169663,7766,92.99
177 | 446959,4923,89.48
178 | 6995153,195994,96.69
179 | 519706,18975,98.94
180 | 4373224,169228,93.01
181 | 4024087,73080,97.71
182 | 731349,42205,98.52
183 | 94366013,4539630,97.66
184 | 2458132,34337,95.52
185 | 1812670,17476,94.43
186 | 2028445,158178,97.94
187 | 1335703,12622,94.14
188 | 938717,17120,97.26
189 | 2926955,42554,97.73
190 | 4018930,32919,82.1
191 | 6439402,81148,51.58
192 | 5665790,166892,96.95
193 | 899728,28115,96.49
194 | 2792057,206926,96.99
195 | 12839663,722491,97.84
196 | 5694139,146797,98.19
197 | 1069693,3970,90.66
198 | 590760,70454,99.18
199 | 319347,1208,92.5
200 | 27594927,1351963,96.4
201 | 26993425,437561,97.42
--------------------------------------------------------------------------------
/Day-14/readme.md:
--------------------------------------------------------------------------------
1 | # Day 14 of [#100DaysOfCode](https://twitter.com/Param3021/status/1537358511981887489)
2 |
3 | ## Task
4 | 1. Explore Matplotlib library
5 |
6 | # Resources
7 | - Kaggle [Titanic Dataset](https://www.kaggle.com/competitions/titanic/data?select=train.csv)
8 |
9 | ### Topics I have learnt
10 | 1. Learned how to make scatter plots
11 | 2. Applying color maps to plots
12 |
13 | ### Software used
14 | - Jupyter Notebook
15 | - Python 3.10.2
16 | - Numpy 1.22.4
17 | - pandas 1.4.2
18 | - matplotlib 3.5.2
19 |
20 | ### My Notebooks
21 | - [matplotlib_tutorial_part_7.ipynb](./matplotlib_tutorial_part_7.ipynb)
22 |
23 | ### Conclusion:
24 | Today I learned how to plot scatter plots, also how to apply colormaps, and when to use which kind of plot.
--------------------------------------------------------------------------------
/Day-15/data/bitcoin_price.csv:
--------------------------------------------------------------------------------
1 | Date,Open,High,Low,Close,Adj Close,Volume
2 | 2019-05-18,7266.080078,8281.660156,7257.259766,8193.139648,8193.139648,723011166
3 | 2019-05-19,8193.139648,8193.139648,7591.850098,7998.290039,7998.290039,637617163
4 | 2019-05-20,7998.290039,8102.319824,7807.770020,7947.930176,7947.930176,357803946
5 | 2019-05-21,7947.930176,8033.759766,7533.660156,7626.890137,7626.890137,424501866
6 | 2019-05-22,7626.890137,7971.259766,7478.740234,7876.500000,7876.500000,386766321
7 | 2019-05-23,7876.500000,8165.450195,7801.569824,7996.399902,7996.399902,413162746
8 | 2019-05-24,7996.399902,8140.819824,7948.680176,8059.129883,8059.129883,179206342
9 | 2019-05-25,8059.129883,8779.000000,7894.529785,8726.230469,8726.230469,483663699
10 | 2019-05-26,8726.230469,8931.530273,8668.459961,8785.169922,8785.169922,507164714
11 | 2019-05-27,8785.169922,8818.709961,8562.200195,8718.849609,8718.849609,360752199
12 | 2019-05-28,8718.849609,8760.480469,8444.099609,8664.559570,8664.559570,380343928
13 | 2019-05-29,8664.559570,9065.889648,8027.209961,8276.250000,8276.250000,815525590
14 | 2019-05-30,8276.250000,8570.780273,8116.000000,8560.080078,8560.080078,500141087
15 | 2019-05-31,8550.629883,8576.339844,8459.650391,8504.980469,8504.980469,69915456
--------------------------------------------------------------------------------
/Day-15/readme.md:
--------------------------------------------------------------------------------
1 | # Day 15 of [#100DaysOfCode](https://twitter.com/Param3021/status/1537738644928495616)
2 |
3 | ## Task
4 | 1. Subplots & Plotting live data in Matplotlib (50%)
5 | 2. Linear Algebra (Determinant) (not done)
6 |
7 | # Resources
8 | - Corey Schafer [Matplotlib Tutorial](https://www.youtube.com/playlist?list=PL-osiE80TeTvipOqomVEeZ1HRrcEvtZB_) Playlist
9 |
10 | ### Topics I have learnt
11 | 1. Learned how to plot Time Series data
12 |
13 | ### Software used
14 | - Jupyter Notebook
15 | - Python 3.10.2
16 | - Numpy 1.22.4
17 | - pandas 1.4.2
18 | - matplotlib 3.5.2
19 |
20 | ### My Notebooks
21 | - [matplotlib_tutorial_part_8.ipynb](./matplotlib_tutorial_part_8.ipynb)
22 |
23 | ### Conclusion:
24 | Today I learned how to plot Time Series data in matplotlib using `plt.plot_date()`. Also, format dates in axis using `matplotlib.dates.DateFormatter()`.
--------------------------------------------------------------------------------
/Day-16/data/developers_salary.csv:
--------------------------------------------------------------------------------
1 | Age,All_Devs,Python,JavaScript
2 | 18,17784,20046,16446
3 | 19,16500,17100,16791
4 | 20,18012,20000,18942
5 | 21,20628,24744,21780
6 | 22,25206,30500,25704
7 | 23,30252,37732,29000
8 | 24,34368,41247,34372
9 | 25,38496,45372,37810
10 | 26,42000,48876,43515
11 | 27,46752,53850,46823
12 | 28,49320,57287,49293
13 | 29,53200,45000,53437
14 | 30,56000,50000,56373
15 | 31,62316,55000,62375
16 | 32,64928,70000,66674
17 | 33,67317,71496,68745
18 | 34,68748,75370,68746
19 | 35,73752,83640,74583
20 | 36,77232,84666,79000
21 | 37,78000,84392,78508
22 | 38,78508,78254,79996
23 | 39,79536,85000,80403
24 | 40,82488,87038,83820
25 | 41,88935,91991,88833
26 | 42,90000,100000,91660
27 | 43,90056,94796,87892
28 | 44,95000,97962,96243
29 | 45,90000,93302,90000
30 | 46,91633,99240,99313
31 | 47,91660,102736,91660
32 | 48,98150,112285,102264
33 | 49,98964,100771,100000
34 | 50,100000,104708,100000
35 | 51,98988,108423,91660
36 | 52,100000,101407,99240
37 | 53,108923,112542,108000
38 | 54,105000,122870,105000
39 | 55,103117,120000,104000
--------------------------------------------------------------------------------
/Day-16/data/random_data.csv:
--------------------------------------------------------------------------------
1 | x_value,total_1,total_2
2 | 0,1000,1000
3 | 1,1004,996
4 | 2,1010,995
5 | 3,1018,992
6 | 4,1017,988
7 | 5,1011,994
8 | 6,1014,998
9 | 7,1015,996
10 | 8,1014,997
11 | 9,1016,994
12 | 10,1023,994
13 | 11,1019,998
14 | 12,1014,993
15 | 13,1014,995
16 | 14,1010,1000
17 | 15,1017,1005
18 | 16,1021,1005
19 | 17,1029,1002
20 | 18,1032,1000
21 | 19,1030,1006
22 | 20,1028,1004
23 | 21,1032,1003
24 | 22,1031,1000
25 | 23,1037,1005
26 | 24,1034,1006
27 | 25,1036,1002
28 | 26,1041,1002
29 | 27,1038,1000
30 | 28,1032,1001
31 | 29,1031,1003
32 | 30,1031,1005
33 | 31,1027,1008
34 | 32,1021,1007
35 | 33,1021,1007
36 | 34,1016,1012
37 | 35,1021,1014
38 | 36,1017,1018
39 | 37,1020,1020
40 | 38,1017,1025
41 | 39,1025,1028
42 | 40,1033,1024
43 | 41,1035,1029
44 | 42,1030,1033
45 | 43,1025,1038
46 | 44,1022,1042
47 | 45,1021,1038
48 | 46,1025,1041
49 | 47,1019,1040
50 | 48,1027,1044
51 | 49,1030,1041
52 | 50,1029,1038
53 | 51,1024,1041
54 | 52,1032,1042
55 | 53,1029,1037
56 | 54,1032,1041
57 | 55,1033,1046
58 | 56,1030,1052
59 | 57,1033,1052
60 | 58,1030,1049
61 | 59,1033,1052
62 | 60,1034,1058
63 | 61,1038,1056
64 | 62,1039,1059
65 | 63,1045,1060
66 | 64,1048,1055
67 | 65,1055,1060
68 | 66,1058,1065
69 | 67,1061,1069
70 | 68,1068,1071
71 | 69,1065,1070
72 | 70,1073,1067
73 | 71,1073,1066
74 | 72,1073,1066
75 | 73,1079,1061
76 | 74,1081,1058
77 | 75,1089,1059
78 | 76,1086,1063
79 | 77,1080,1061
80 | 78,1081,1061
81 | 79,1082,1056
82 | 80,1076,1056
83 | 81,1077,1055
84 | 82,1078,1053
85 | 83,1081,1048
86 | 84,1079,1046
87 | 85,1081,1043
88 | 86,1087,1042
89 | 87,1093,1046
90 | 88,1090,1047
91 | 89,1096,1051
92 | 90,1101,1054
93 | 91,1102,1053
94 |
--------------------------------------------------------------------------------
/Day-16/data_gen.py:
--------------------------------------------------------------------------------
1 | import csv
2 | import random
3 | import time
4 |
5 | x_value = 0
6 | total_1 = 1000
7 | total_2 = 1000
8 |
9 | fieldnames = ["x_value", "total_1", "total_2"]
10 |
11 | with open('./data/random_data.csv', 'w', newline="") as csv_file:
12 | csv_writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
13 | csv_writer.writeheader()
14 |
15 | while True:
16 |
17 | with open('./data/random_data.csv', 'a', newline="") as csv_file:
18 | csv_writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
19 |
20 | info = {
21 | "x_value": x_value,
22 | "total_1": total_1,
23 | "total_2": total_2
24 | }
25 |
26 | csv_writer.writerow(info)
27 | print(x_value, total_1, total_2)
28 |
29 | x_value += 1
30 | total_1 = total_1 + random.randint(-6, 8)
31 | total_2 = total_2 + random.randint(-5, 6)
32 |
33 | time.sleep(1)
--------------------------------------------------------------------------------
/Day-16/dev_salaries.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Param302/100DaysOfCode-Python/44000a5ec5173c65c18810db68e11812baa2f0c4/Day-16/dev_salaries.png
--------------------------------------------------------------------------------
/Day-16/readme.md:
--------------------------------------------------------------------------------
1 | # Day 16 of [#100DaysOfCode](https://twitter.com/Param3021/status/1538466881937756160)
2 |
3 | ## Task
4 | 1. Explore Seaborn
5 |
6 | # Resources
7 | - Corey Schafer [Matplotlib Tutorial](https://www.youtube.com/playlist?list=PL-osiE80TeTvipOqomVEeZ1HRrcEvtZB_) Playlist
8 | - kaggle [Data Visualization Course](https://www.kaggle.com/learn/data-visualization)
9 | - - Lesson 1: [Hello Seaborn](https://www.kaggle.com/code/alexisbcook/hello-seaborn)
10 | - - [My Notebook](https://www.kaggle.com/param302/exercise-hello-seaborn)
11 |
12 | ### Topics I have learnt
13 | 1. Plotting Live data in real-time in matplotlib
14 | 2. Subplots in matplotlib
15 | 3. Seaborn basics
16 | 4. Line plots in Seaborn
17 |
18 | ### Software used
19 | - Jupyter Notebook
20 | - Python 3.10.2
21 | - Numpy 1.22.4
22 | - pandas 1.4.2
23 | - matplotlib 3.5.2
24 | - seaborn 0.11.2
25 |
26 | ### My Notebooks
27 | - [matplotlib_tutorial_part_9.ipynb](./matplotlib_tutorial_part_9.ipynb)
28 | - [matplotlib_tutorial_part_10.ipynb](./matplotlib_tutorial_part_10.ipynb)
29 | - [L1 - Hello, Seaborn.ipynb](./L1%20-%20Hello,%20Seaborn.ipynb)
30 |
31 | ### Conclusion:
32 | Today I learned how to plot live data in real time and subplots in matplotlib. Also basics of seaborn and line plots in it.
--------------------------------------------------------------------------------
/Day-17/catplot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Param302/100DaysOfCode-Python/44000a5ec5173c65c18810db68e11812baa2f0c4/Day-17/catplot.png
--------------------------------------------------------------------------------
/Day-17/data/candy.csv:
--------------------------------------------------------------------------------
1 | id,competitorname,chocolate,fruity,caramel,peanutyalmondy,nougat,crispedricewafer,hard,bar,pluribus,sugarpercent,pricepercent,winpercent
2 | 0,100 Grand,Yes,No,Yes,No,No,Yes,No,Yes,No,0.73199999,0.86000001,66.971725
3 | 1,3 Musketeers,Yes,No,No,No,Yes,No,No,Yes,No,0.60399997,0.51099998,67.602936
4 | 2,Air Heads,No,Yes,No,No,No,No,No,No,No,0.90600002,0.51099998,52.341465
5 | 3,Almond Joy,Yes,No,No,Yes,No,No,No,Yes,No,0.465,0.76700002,50.347546
6 | 4,Baby Ruth,Yes,No,Yes,Yes,Yes,No,No,Yes,No,0.60399997,0.76700002,56.914547
7 | 5,Boston Baked Beans,No,No,No,Yes,No,No,No,No,Yes,0.31299999,0.51099998,23.417824
8 | 6,Candy Corn,No,No,No,No,No,No,No,No,Yes,0.90600002,0.32499999,38.010963000000004
9 | 7,Caramel Apple Pops,No,Yes,Yes,No,No,No,No,No,No,0.60399997,0.32499999,34.517681
10 | 8,Charleston Chew,Yes,No,No,No,Yes,No,No,Yes,No,0.60399997,0.51099998,38.975037
11 | 9,Chewey Lemonhead Fruit Mix,No,Yes,No,No,No,No,No,No,Yes,0.73199999,0.51099998,36.017628
12 | 10,Chiclets,No,Yes,No,No,No,No,No,No,Yes,0.046,0.32499999,24.524988
13 | 11,Dots,No,Yes,No,No,No,No,No,No,Yes,0.73199999,0.51099998,42.272076
14 | 12,Dum Dums,No,Yes,No,No,No,No,Yes,No,No,0.73199999,0.034000002,39.460556
15 | 13,Fruit Chews,No,Yes,No,No,No,No,No,No,Yes,0.127,0.034000002,43.088924
16 | 14,Fun Dip,No,Yes,No,No,No,No,Yes,No,No,0.73199999,0.32499999,39.185505
17 | 15,Gobstopper,No,Yes,No,No,No,No,Yes,No,Yes,0.90600002,0.45300001,46.783348
18 | 16,Haribo Gold Bears,No,Yes,No,No,No,No,No,No,Yes,0.465,0.465,57.11974
19 | 17,Haribo Happy Cola,No,No,No,No,No,No,No,No,Yes,0.465,0.465,34.158958
20 | 18,Haribo Sour Bears,No,Yes,No,No,No,No,No,No,Yes,0.465,0.465,51.41243
21 | 19,Haribo Twin Snakes,No,Yes,No,No,No,No,No,No,Yes,0.465,0.465,42.178771999999995
22 | 20,Hershey's Kisses,Yes,No,No,No,No,No,No,No,Yes,0.127,0.093000002,55.375454000000005
23 | 21,Hershey's Krackel,Yes,No,No,No,No,Yes,No,Yes,No,0.43000001,0.91799998,62.28448100000001
24 | 22,Hershey's Milk Chocolate,Yes,No,No,No,No,No,No,Yes,No,0.43000001,0.91799998,56.490501
25 | 23,Hershey's Special Dark,Yes,No,No,No,No,No,No,Yes,No,0.43000001,0.91799998,59.236121999999995
26 | 24,Jawbusters,No,Yes,No,No,No,No,Yes,No,Yes,0.093000002,0.51099998,28.127439000000003
27 | 25,Junior Mints,Yes,No,No,No,No,No,No,No,Yes,0.19699999999999998,0.51099998,57.21925
28 | 26,Kit Kat,Yes,No,No,No,No,Yes,No,Yes,No,0.31299999,0.51099998,76.7686
29 | 27,Laffy Taffy,No,Yes,No,No,No,No,No,No,No,0.22,0.11599999999999999,41.389557
30 | 28,Lemonhead,No,Yes,No,No,No,No,Yes,No,No,0.046,0.10400000000000001,39.141056
31 | 29,Lifesavers big ring gummies,No,Yes,No,No,No,No,No,No,No,0.26699999,0.27900001,52.911392000000006
32 | 30,Peanut butter M&M's,Yes,No,No,Yes,No,No,No,No,Yes,0.82499999,0.65100002,71.46505
33 | 31,M&M's,Yes,No,No,No,No,No,No,No,Yes,0.82499999,0.65100002,66.574585
34 | 32,Mike & Ike,No,Yes,No,No,No,No,No,No,Yes,0.87199998,0.32499999,46.411716
35 | 33,Milk Duds,Yes,No,Yes,No,No,No,No,No,Yes,0.30199999,0.51099998,55.064071999999996
36 | 34,Milky Way,Yes,No,Yes,No,Yes,No,No,Yes,No,0.60399997,0.65100002,73.099556
37 | 35,Milky Way Midnight,Yes,No,Yes,No,Yes,No,No,Yes,No,0.73199999,0.44100001,60.800701000000004
38 | 36,Milky Way Simply Caramel,Yes,No,Yes,No,No,No,No,Yes,No,0.96499997,0.86000001,64.35334
39 | 37,Mounds,Yes,No,No,No,No,No,No,Yes,No,0.31299999,0.86000001,47.829754
40 | 38,Mr Good Bar,Yes,No,No,Yes,No,No,No,Yes,No,0.31299999,0.91799998,54.526451
41 | 39,Nerds,No,Yes,No,No,No,No,Yes,No,Yes,0.84799999,0.32499999,55.354046
42 | 40,Nestle Butterfinger,Yes,No,No,Yes,No,No,No,Yes,No,0.60399997,0.76700002,70.735641
43 | 41,Nestle Crunch,Yes,No,No,No,No,Yes,No,Yes,No,0.31299999,0.76700002,66.47068
44 | 42,Nik L Nip,No,Yes,No,No,No,No,No,No,Yes,0.19699999999999998,0.97600001,22.445341
45 | 43,Now & Later,No,Yes,No,No,No,No,No,No,Yes,0.22,0.32499999,39.4468
46 | 44,Payday,No,No,No,Yes,Yes,No,No,Yes,No,0.465,0.76700002,46.296597
47 | 45,Peanut M&Ms,Yes,No,No,Yes,No,No,No,No,Yes,0.59299999,0.65100002,69.483788
48 | 46,Pixie Sticks,No,No,No,No,No,No,No,No,Yes,0.093000002,0.023,37.722336
49 | 47,Pop Rocks,No,Yes,No,No,No,No,Yes,No,Yes,0.60399997,0.83700001,41.265511
50 | 48,Red vines,No,Yes,No,No,No,No,No,No,Yes,0.58099997,0.11599999999999999,37.348521999999996
51 | 49,Reese's Miniatures,Yes,No,No,Yes,No,No,No,No,No,0.034000002,0.27900001,81.86625699999999
52 | 50,Reese's Peanut Butter cup,Yes,No,No,Yes,No,No,No,No,No,0.72000003,0.65100002,84.18029
53 | 51,Reese's pieces,Yes,No,No,Yes,No,No,No,No,Yes,0.40599999,0.65100002,73.43499
54 | 52,Reese's stuffed with pieces,Yes,No,No,Yes,No,No,No,No,No,0.98799998,0.65100002,72.887901
55 | 53,Ring pop,No,Yes,No,No,No,No,Yes,No,No,0.73199999,0.96499997,35.290756
56 | 54,Rolo,Yes,No,Yes,No,No,No,No,No,Yes,0.86000001,0.86000001,65.716286
57 | 55,Root Beer Barrels,No,No,No,No,No,No,Yes,No,Yes,0.73199999,0.06899999799999999,29.703691
58 | 56,Runts,No,Yes,No,No,No,No,Yes,No,Yes,0.87199998,0.27900001,42.849144
59 | 57,Sixlets,Yes,No,No,No,No,No,No,No,Yes,0.22,0.081,34.722
60 | 58,Skittles original,No,Yes,No,No,No,No,No,No,Yes,0.94099998,0.22,63.08514
61 | 59,Skittles wildberry,No,Yes,No,No,No,No,No,No,Yes,0.94099998,0.22,55.103694999999995
62 | 60,Nestle Smarties,Yes,No,No,No,No,No,No,No,Yes,0.26699999,0.97600001,37.887188
63 | 61,Smarties candy,No,Yes,No,No,No,No,Yes,No,Yes,0.26699999,0.11599999999999999,45.995827
64 | 62,Snickers,Yes,No,Yes,Yes,Yes,No,No,Yes,No,0.546,0.65100002,76.67378199999999
65 | 63,Snickers Crisper,Yes,No,Yes,Yes,No,Yes,No,Yes,No,0.60399997,0.65100002,59.529251
66 | 64,Sour Patch Kids,No,Yes,No,No,No,No,No,No,Yes,0.06899999799999999,0.11599999999999999,59.863997999999995
67 | 65,Sour Patch Tricksters,No,Yes,No,No,No,No,No,No,Yes,0.06899999799999999,0.11599999999999999,52.825947
68 | 66,Starburst,No,Yes,No,No,No,No,No,No,Yes,0.15099999,0.22,67.037628
69 | 67,Strawberry bon bons,No,Yes,No,No,No,No,Yes,No,Yes,0.56900001,0.057999998,34.578990999999995
70 | 68,Sugar Babies,No,No,Yes,No,No,No,No,No,Yes,0.96499997,0.76700002,33.43755
71 | 69,Sugar Daddy,No,No,Yes,No,No,No,No,No,No,0.41800001,0.32499999,32.230995
72 | 70,Super Bubble,No,Yes,No,No,No,No,No,No,No,0.162,0.11599999999999999,27.303865000000002
73 | 71,Swedish Fish,No,Yes,No,No,No,No,No,No,Yes,0.60399997,0.755,54.861111
74 | 72,Tootsie Pop,Yes,Yes,No,No,No,No,Yes,No,No,0.60399997,0.32499999,48.982651000000004
75 | 73,Tootsie Roll Juniors,Yes,No,No,No,No,No,No,No,No,0.31299999,0.51099998,43.068897
76 | 74,Tootsie Roll Midgies,Yes,No,No,No,No,No,No,No,Yes,0.17399999,0.011000000000000001,45.736748
77 | 75,Tootsie Roll Snack Bars,Yes,No,No,No,No,No,No,Yes,No,0.465,0.32499999,49.653503
78 | 76,Trolli Sour Bites,No,Yes,No,No,No,No,No,No,Yes,0.31299999,0.255,47.173229
79 | 77,Twix,Yes,No,Yes,No,No,Yes,No,Yes,No,0.546,0.90600002,81.642914
80 | 78,Twizzlers,No,Yes,No,No,No,No,No,No,No,0.22,0.11599999999999999,45.466282
81 | 79,Warheads,No,Yes,No,No,No,No,Yes,No,No,0.093000002,0.11599999999999999,39.011897999999995
82 | 80,Welch's Fruit Snacks,No,Yes,No,No,No,No,No,No,Yes,0.31299999,0.31299999,44.375519
83 | 81,Werther's Original Caramel,No,No,Yes,No,No,No,Yes,No,No,0.18600000000000003,0.26699999,41.904308
84 | 82,Whoppers,Yes,No,No,No,No,Yes,No,No,Yes,0.87199998,0.84799999,49.524113
85 |
--------------------------------------------------------------------------------
/Day-17/data/ign_scores.csv:
--------------------------------------------------------------------------------
1 | Platform,Action,"Action, Adventure",Adventure,Fighting,Platformer,Puzzle,RPG,Racing,Shooter,Simulation,Sports,Strategy
2 | Dreamcast,6.8828571428571435,7.51111111111111,6.281818181818181,8.2,8.34,8.088888888888889,7.7,7.0424999999999995,7.616666666666668,7.628571428571428,7.272222222222222,6.433333333333333
3 | Game Boy Advance,6.373076923076922,7.507692307692309,6.057142857142857,6.226315789473684,6.970588235294118,6.5321428571428575,7.542857142857143,6.6571428571428575,6.444444444444445,6.928571428571429,6.694444444444444,7.175000000000001
4 | Game Boy Color,6.2727272727272725,8.166666666666666,5.3076923076923075,4.5,6.352941176470588,6.583333333333333,7.285714285714286,5.897435897435898,4.5,5.9,5.790697674418604,7.4
5 | GameCube,6.5325842696629195,7.608333333333334,6.753846153846154,7.422222222222222,6.665714285714286,6.133333333333333,7.890909090909091,6.85263157894737,6.981818181818183,8.028571428571428,7.4813186813186805,7.116666666666667
6 | Nintendo 3DS,6.670833333333333,7.4818181818181815,7.414285714285713,6.614285714285714,7.50344827586207,7.999999999999999,7.71923076923077,6.9,7.033333333333332,7.7,6.388888888888889,7.900000000000001
7 | Nintendo 64,6.649056603773586,8.25,7.0,5.6812499999999995,6.889655172413795,7.461538461538462,6.050000000000001,6.939622641509434,8.042857142857143,5.675,6.967857142857146,6.9
8 | Nintendo DS,5.90360824742268,7.239999999999999,6.259803921568627,6.32,6.839999999999999,6.604615384615385,7.222619047619048,6.038636363636365,6.965217391304347,5.874358974358975,5.936666666666668,6.644736842105263
9 | Nintendo DSi,6.827027027027027,8.5,6.090909090909091,7.5,7.25,6.810526315789474,7.166666666666667,6.5636363636363635,6.5,5.195652173913044,5.644444444444444,6.566666666666667
10 | PC,6.805790645879736,7.334745762711864,7.13679775280899,7.166666666666667,7.4109375,6.924705882352943,7.759930313588847,7.032417582417583,7.0848780487804905,7.104888888888888,6.9024242424242415,7.310207336523128
11 | PlayStation,6.016406249999999,7.933333333333334,6.31372549019608,6.553731343283583,6.57906976744186,6.757894736842106,7.909999999999999,6.773387096774193,6.4239999999999995,6.918181818181818,6.751219512195119,6.496875
12 | PlayStation 2,6.467361111111114,7.249999999999999,6.315151515151516,7.306349206349208,7.06842105263158,6.354545454545455,7.473076923076923,6.585064935064935,6.6416666666666675,7.152631578947367,7.197826086956523,7.23888888888889
13 | PlayStation 3,6.853819444444444,7.306153846153846,6.820987654320987,7.710937500000001,7.735714285714285,7.3500000000000005,7.436111111111111,6.978571428571429,7.219553072625698,7.142857142857143,7.485815602836878,7.355172413793104
14 | PlayStation 4,7.55,7.8352941176470585,7.388571428571429,7.279999999999999,8.39090909090909,7.3999999999999995,7.944,7.589999999999999,7.804444444444443,9.25,7.4300000000000015,6.566666666666666
15 | PlayStation Portable,6.467796610169487,7.0,6.938095238095237,6.822222222222222,7.194736842105263,6.726666666666666,6.817777777777777,6.401960784313726,7.071052631578948,6.761538461538462,6.956790123456789,6.550000000000001
16 | PlayStation Vita,7.173076923076923,6.133333333333334,8.057142857142859,7.527272727272727,8.56875,8.25,7.3375,6.3,7.659999999999999,5.725,7.130000000000001,8.9
17 | Wii,6.262717770034842,7.294642857142857,6.23404255319149,6.733333333333333,7.054255319148935,6.426984126984126,7.410344827586206,5.011666666666668,6.47979797979798,6.327027027027026,5.9669014084507035,6.9750000000000005
18 | Wireless,7.041698841698844,7.3125,6.972413793103448,6.739999999999999,7.509090909090909,7.360550458715593,8.26,6.898305084745762,6.906779661016946,7.802857142857143,7.417699115044249,7.5428571428571445
19 | Xbox,6.819512195121949,7.4790322580645165,6.821428571428571,7.02962962962963,7.3034482758620705,5.125,8.277777777777779,7.021590909090909,7.485416666666668,7.155555555555556,7.884397163120564,7.3133333333333335
20 | Xbox 360,6.719047619047618,7.137837837837838,6.857352941176469,7.552238805970149,7.559574468085106,7.141025641025641,7.6499999999999995,6.996153846153845,7.338152610441768,7.325,7.31785714285714,7.112244897959185
21 | Xbox One,7.702857142857144,7.566666666666667,7.254545454545454,7.171428571428571,6.7333333333333325,8.1,8.291666666666666,8.163636363636364,8.019999999999998,7.733333333333333,7.331818181818182,8.5
22 | iPhone,6.865445026178009,7.764285714285714,7.745833333333334,6.0875,7.471929824561404,7.810784313725489,7.185185185185185,7.315789473684211,6.995588235294116,7.328571428571428,7.1521739130434785,7.534920634920634
23 |
--------------------------------------------------------------------------------
/Day-17/data/museum_visitors.csv:
--------------------------------------------------------------------------------
1 | Date,Avila Adobe,Firehouse Museum,Chinese American Museum,America Tropical Interpretive Center
2 | 2014-01-01,24778,4486,1581,6602
3 | 2014-02-01,18976,4172,1785,5029
4 | 2014-03-01,25231,7082,3229,8129
5 | 2014-04-01,26989,6756,2129,2824
6 | 2014-05-01,36883,10858,3676,10694
7 | 2014-06-01,29487,5751,2121,11036
8 | 2014-07-01,32378,5406,2239,13490
9 | 2014-08-01,37680,8619,1769,9139
10 | 2014-09-01,28473,61192,1073,5661
11 | 2014-10-01,27995,6488,1979,7356
12 | 2014-11-01,25691,4189,2404,9773
13 | 2014-12-01,18754,4339,1319,7184
14 | 2015-01-01,20438,3858,1823,6250
15 | 2015-02-01,15578,3742,1558,5907
16 | 2015-03-01,21297,5390,2336,9884
17 | 2015-04-01,26670,7000,3057,7254
18 | 2015-05-01,34383,12528,4009,13207
19 | 2015-06-01,30569,5377,2544,11102
20 | 2015-07-01,41242,6111,3057,11072
21 | 2015-08-01,30700,5383,2415,12096
22 | 2015-09-01,20967,5746,1398,6608
23 | 2015-10-01,29764,8882,2237,12524
24 | 2015-11-01,24483,6848,2850,6677
25 | 2015-12-01,21426,4502,2075,5967
26 | 2016-01-01,19659,4377,2150,6587
27 | 2016-02-01,17378,3675,2547,6955
28 | 2016-03-01,20322,6334,7702,9979
29 | 2016-04-01,24521,6509,3485,5113
30 | 2016-05-01,31728,9315,4565,7757
31 | 2016-06-01,23696,4918,3034,6751
32 | 2016-07-01,31689,5602,2776,12188
33 | 2016-08-01,25987,5181,2153,6245
34 | 2016-09-01,22897,4736,1754,6977
35 | 2016-10-01,25040,6145,2607,6282
36 | 2016-11-01,17760,3930,2248,6365
37 | 2016-12-01,20107,3911,2263,5223
38 | 2017-01-01,18792,3551,2531,4929
39 | 2017-02-01,14035,3491,2139,4493
40 | 2017-03-01,20680,5273,5966,6271
41 | 2017-04-01,25234,5320,3273,6685
42 | 2017-05-01,30029,8733,5585,7556
43 | 2017-06-01,22169,4862,2946,6818
44 | 2017-07-01,30831,5242,2939,8675
45 | 2017-08-01,27009,5379,1784,6212
46 | 2017-09-01,23403,4484,1704,4607
47 | 2017-10-01,22164,4822,2419,6097
48 | 2017-11-01,17629,4613,2685,6757
49 | 2017-12-01,18339,4448,2768,6780
50 | 2018-01-01,16265,3910,2477,5837
51 | 2018-02-01,14718,3306,2710,5705
52 | 2018-03-01,19001,5225,4529,4909
53 | 2018-04-01,18966,4535,2750,4650
54 | 2018-05-01,25173,8013,4960,5626
55 | 2018-06-01,22171,4635,2518,4694
56 | 2018-07-01,23136,4191,2620,4718
57 | 2018-08-01,20815,4866,2409,3891
58 | 2018-09-01,21020,4956,2146,3180
59 | 2018-10-01,19280,4622,2364,3775
60 | 2018-11-01,17163,4082,2385,4562
61 |
--------------------------------------------------------------------------------
/Day-17/heatmap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Param302/100DaysOfCode-Python/44000a5ec5173c65c18810db68e11812baa2f0c4/Day-17/heatmap.png
--------------------------------------------------------------------------------
/Day-17/readme.md:
--------------------------------------------------------------------------------
1 | # Day 17 of [#100DaysOfCode](https://twitter.com/Param3021/status/1538797665042980866)
2 |
3 | ## Task
4 | 1. Data Visualization on Seaborn (80%)
5 | 2. Linear Algebra (Determinants) (not done)
6 |
7 | # Resources
8 | - kaggle [Data Visualization Course](https://www.kaggle.com/learn/data-visualization)
9 | - - Chapter 2: [Line Charts](https://www.kaggle.com/code/alexisbcook/line-charts) - [My Notebook](https://www.kaggle.com/param302/exercise-hello-seaborn)
10 | - - Chapter 3: [Bar Charts and Heatmaps](https://www.kaggle.com/code/alexisbcook/bar-charts-and-heatmaps) - [My Notebook](https://www.kaggle.com/param302/exercise-bar-charts-and-heatmaps)
11 | - - Chapter 4: [Scatter Plots](https://www.kaggle.com/code/alexisbcook/scatter-plots) - [Notebook](https://www.kaggle.com/param302/exercise-scatter-plots)
12 | - FreeCodeCamp Data Analysis with Python project 3 [Medical Data Visualizer](https://www.freecodecamp.org/learn/data-analysis-with-python/data-analysis-with-python-projects/medical-data-visualizer)
13 |
14 | ### Topics I have learnt
15 | 1. Seaborn
16 | - Line charts
17 | - Bar charts
18 | - Heatmaps
19 | - Scatter plots
20 | 2. Data Analysis project
21 |
22 | ### Software used
23 | - Jupyter Notebook
24 | - Python 3.10.2
25 | - Numpy 1.22.4
26 | - pandas 1.4.2
27 | - matplotlib 3.5.2
28 | - seaborn 0.11.2
29 |
30 | ### My Notebooks
31 | - [L2 - Line Charts.ipynb](./L2%20-%20Line%20Charts.ipynb)
32 | - [L3 - Bar Charts and Heatmaps.ipynb](./L3%20-%20Bar%20Charts%20and%20Heatmaps.ipynb)
33 | - [L4 - Scatter plots.ipynb](./L4%20-%20Scatter%20plots.ipynb)
34 |
35 |
36 | ### Conclusion:
37 | Today I learned how to plot line charts, bar charts, heatmaps, scatter plots in seaborn. Also completed FCC's Medical Data Visualizer project.
38 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/Day-18/box_plot_box.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Param302/100DaysOfCode-Python/44000a5ec5173c65c18810db68e11812baa2f0c4/Day-18/box_plot_box.jpg
--------------------------------------------------------------------------------
/Day-18/box_plot_median.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Param302/100DaysOfCode-Python/44000a5ec5173c65c18810db68e11812baa2f0c4/Day-18/box_plot_median.jpg
--------------------------------------------------------------------------------
/Day-18/data/drug200.csv:
--------------------------------------------------------------------------------
1 | Age,Sex,BP,Cholesterol,Na_to_K,Drug
2 | 23,F,HIGH,HIGH,25.355,DrugY
3 | 47,M,LOW,HIGH,13.093,drugC
4 | 47,M,LOW,HIGH,10.114,drugC
5 | 28,F,NORMAL,HIGH,7.798,drugX
6 | 61,F,LOW,HIGH,18.043,DrugY
7 | 22,F,NORMAL,HIGH,8.607,drugX
8 | 49,F,NORMAL,HIGH,16.275,DrugY
9 | 41,M,LOW,HIGH,11.037,drugC
10 | 60,M,NORMAL,HIGH,15.171,DrugY
11 | 43,M,LOW,NORMAL,19.368,DrugY
12 | 47,F,LOW,HIGH,11.767,drugC
13 | 34,F,HIGH,NORMAL,19.199,DrugY
14 | 43,M,LOW,HIGH,15.376,DrugY
15 | 74,F,LOW,HIGH,20.942,DrugY
16 | 50,F,NORMAL,HIGH,12.703,drugX
17 | 16,F,HIGH,NORMAL,15.516,DrugY
18 | 69,M,LOW,NORMAL,11.455,drugX
19 | 43,M,HIGH,HIGH,13.972,drugA
20 | 23,M,LOW,HIGH,7.298,drugC
21 | 32,F,HIGH,NORMAL,25.974,DrugY
22 | 57,M,LOW,NORMAL,19.128,DrugY
23 | 63,M,NORMAL,HIGH,25.917,DrugY
24 | 47,M,LOW,NORMAL,30.568,DrugY
25 | 48,F,LOW,HIGH,15.036,DrugY
26 | 33,F,LOW,HIGH,33.486,DrugY
27 | 28,F,HIGH,NORMAL,18.809,DrugY
28 | 31,M,HIGH,HIGH,30.366,DrugY
29 | 49,F,NORMAL,NORMAL,9.381,drugX
30 | 39,F,LOW,NORMAL,22.697,DrugY
31 | 45,M,LOW,HIGH,17.951,DrugY
32 | 18,F,NORMAL,NORMAL,8.75,drugX
33 | 74,M,HIGH,HIGH,9.567,drugB
34 | 49,M,LOW,NORMAL,11.014,drugX
35 | 65,F,HIGH,NORMAL,31.876,DrugY
36 | 53,M,NORMAL,HIGH,14.133,drugX
37 | 46,M,NORMAL,NORMAL,7.285,drugX
38 | 32,M,HIGH,NORMAL,9.445,drugA
39 | 39,M,LOW,NORMAL,13.938,drugX
40 | 39,F,NORMAL,NORMAL,9.709,drugX
41 | 15,M,NORMAL,HIGH,9.084,drugX
42 | 73,F,NORMAL,HIGH,19.221,DrugY
43 | 58,F,HIGH,NORMAL,14.239,drugB
44 | 50,M,NORMAL,NORMAL,15.79,DrugY
45 | 23,M,NORMAL,HIGH,12.26,drugX
46 | 50,F,NORMAL,NORMAL,12.295,drugX
47 | 66,F,NORMAL,NORMAL,8.107,drugX
48 | 37,F,HIGH,HIGH,13.091,drugA
49 | 68,M,LOW,HIGH,10.291,drugC
50 | 23,M,NORMAL,HIGH,31.686,DrugY
51 | 28,F,LOW,HIGH,19.796,DrugY
52 | 58,F,HIGH,HIGH,19.416,DrugY
53 | 67,M,NORMAL,NORMAL,10.898,drugX
54 | 62,M,LOW,NORMAL,27.183,DrugY
55 | 24,F,HIGH,NORMAL,18.457,DrugY
56 | 68,F,HIGH,NORMAL,10.189,drugB
57 | 26,F,LOW,HIGH,14.16,drugC
58 | 65,M,HIGH,NORMAL,11.34,drugB
59 | 40,M,HIGH,HIGH,27.826,DrugY
60 | 60,M,NORMAL,NORMAL,10.091,drugX
61 | 34,M,HIGH,HIGH,18.703,DrugY
62 | 38,F,LOW,NORMAL,29.875,DrugY
63 | 24,M,HIGH,NORMAL,9.475,drugA
64 | 67,M,LOW,NORMAL,20.693,DrugY
65 | 45,M,LOW,NORMAL,8.37,drugX
66 | 60,F,HIGH,HIGH,13.303,drugB
67 | 68,F,NORMAL,NORMAL,27.05,DrugY
68 | 29,M,HIGH,HIGH,12.856,drugA
69 | 17,M,NORMAL,NORMAL,10.832,drugX
70 | 54,M,NORMAL,HIGH,24.658,DrugY
71 | 18,F,HIGH,NORMAL,24.276,DrugY
72 | 70,M,HIGH,HIGH,13.967,drugB
73 | 28,F,NORMAL,HIGH,19.675,DrugY
74 | 24,F,NORMAL,HIGH,10.605,drugX
75 | 41,F,NORMAL,NORMAL,22.905,DrugY
76 | 31,M,HIGH,NORMAL,17.069,DrugY
77 | 26,M,LOW,NORMAL,20.909,DrugY
78 | 36,F,HIGH,HIGH,11.198,drugA
79 | 26,F,HIGH,NORMAL,19.161,DrugY
80 | 19,F,HIGH,HIGH,13.313,drugA
81 | 32,F,LOW,NORMAL,10.84,drugX
82 | 60,M,HIGH,HIGH,13.934,drugB
83 | 64,M,NORMAL,HIGH,7.761,drugX
84 | 32,F,LOW,HIGH,9.712,drugC
85 | 38,F,HIGH,NORMAL,11.326,drugA
86 | 47,F,LOW,HIGH,10.067,drugC
87 | 59,M,HIGH,HIGH,13.935,drugB
88 | 51,F,NORMAL,HIGH,13.597,drugX
89 | 69,M,LOW,HIGH,15.478,DrugY
90 | 37,F,HIGH,NORMAL,23.091,DrugY
91 | 50,F,NORMAL,NORMAL,17.211,DrugY
92 | 62,M,NORMAL,HIGH,16.594,DrugY
93 | 41,M,HIGH,NORMAL,15.156,DrugY
94 | 29,F,HIGH,HIGH,29.45,DrugY
95 | 42,F,LOW,NORMAL,29.271,DrugY
96 | 56,M,LOW,HIGH,15.015,DrugY
97 | 36,M,LOW,NORMAL,11.424,drugX
98 | 58,F,LOW,HIGH,38.247,DrugY
99 | 56,F,HIGH,HIGH,25.395,DrugY
100 | 20,M,HIGH,NORMAL,35.639,DrugY
101 | 15,F,HIGH,NORMAL,16.725,DrugY
102 | 31,M,HIGH,NORMAL,11.871,drugA
103 | 45,F,HIGH,HIGH,12.854,drugA
104 | 28,F,LOW,HIGH,13.127,drugC
105 | 56,M,NORMAL,HIGH,8.966,drugX
106 | 22,M,HIGH,NORMAL,28.294,DrugY
107 | 37,M,LOW,NORMAL,8.968,drugX
108 | 22,M,NORMAL,HIGH,11.953,drugX
109 | 42,M,LOW,HIGH,20.013,DrugY
110 | 72,M,HIGH,NORMAL,9.677,drugB
111 | 23,M,NORMAL,HIGH,16.85,DrugY
112 | 50,M,HIGH,HIGH,7.49,drugA
113 | 47,F,NORMAL,NORMAL,6.683,drugX
114 | 35,M,LOW,NORMAL,9.17,drugX
115 | 65,F,LOW,NORMAL,13.769,drugX
116 | 20,F,NORMAL,NORMAL,9.281,drugX
117 | 51,M,HIGH,HIGH,18.295,DrugY
118 | 67,M,NORMAL,NORMAL,9.514,drugX
119 | 40,F,NORMAL,HIGH,10.103,drugX
120 | 32,F,HIGH,NORMAL,10.292,drugA
121 | 61,F,HIGH,HIGH,25.475,DrugY
122 | 28,M,NORMAL,HIGH,27.064,DrugY
123 | 15,M,HIGH,NORMAL,17.206,DrugY
124 | 34,M,NORMAL,HIGH,22.456,DrugY
125 | 36,F,NORMAL,HIGH,16.753,DrugY
126 | 53,F,HIGH,NORMAL,12.495,drugB
127 | 19,F,HIGH,NORMAL,25.969,DrugY
128 | 66,M,HIGH,HIGH,16.347,DrugY
129 | 35,M,NORMAL,NORMAL,7.845,drugX
130 | 47,M,LOW,NORMAL,33.542,DrugY
131 | 32,F,NORMAL,HIGH,7.477,drugX
132 | 70,F,NORMAL,HIGH,20.489,DrugY
133 | 52,M,LOW,NORMAL,32.922,DrugY
134 | 49,M,LOW,NORMAL,13.598,drugX
135 | 24,M,NORMAL,HIGH,25.786,DrugY
136 | 42,F,HIGH,HIGH,21.036,DrugY
137 | 74,M,LOW,NORMAL,11.939,drugX
138 | 55,F,HIGH,HIGH,10.977,drugB
139 | 35,F,HIGH,HIGH,12.894,drugA
140 | 51,M,HIGH,NORMAL,11.343,drugB
141 | 69,F,NORMAL,HIGH,10.065,drugX
142 | 49,M,HIGH,NORMAL,6.269,drugA
143 | 64,F,LOW,NORMAL,25.741,DrugY
144 | 60,M,HIGH,NORMAL,8.621,drugB
145 | 74,M,HIGH,NORMAL,15.436,DrugY
146 | 39,M,HIGH,HIGH,9.664,drugA
147 | 61,M,NORMAL,HIGH,9.443,drugX
148 | 37,F,LOW,NORMAL,12.006,drugX
149 | 26,F,HIGH,NORMAL,12.307,drugA
150 | 61,F,LOW,NORMAL,7.34,drugX
151 | 22,M,LOW,HIGH,8.151,drugC
152 | 49,M,HIGH,NORMAL,8.7,drugA
153 | 68,M,HIGH,HIGH,11.009,drugB
154 | 55,M,NORMAL,NORMAL,7.261,drugX
155 | 72,F,LOW,NORMAL,14.642,drugX
156 | 37,M,LOW,NORMAL,16.724,DrugY
157 | 49,M,LOW,HIGH,10.537,drugC
158 | 31,M,HIGH,NORMAL,11.227,drugA
159 | 53,M,LOW,HIGH,22.963,DrugY
160 | 59,F,LOW,HIGH,10.444,drugC
161 | 34,F,LOW,NORMAL,12.923,drugX
162 | 30,F,NORMAL,HIGH,10.443,drugX
163 | 57,F,HIGH,NORMAL,9.945,drugB
164 | 43,M,NORMAL,NORMAL,12.859,drugX
165 | 21,F,HIGH,NORMAL,28.632,DrugY
166 | 16,M,HIGH,NORMAL,19.007,DrugY
167 | 38,M,LOW,HIGH,18.295,DrugY
168 | 58,F,LOW,HIGH,26.645,DrugY
169 | 57,F,NORMAL,HIGH,14.216,drugX
170 | 51,F,LOW,NORMAL,23.003,DrugY
171 | 20,F,HIGH,HIGH,11.262,drugA
172 | 28,F,NORMAL,HIGH,12.879,drugX
173 | 45,M,LOW,NORMAL,10.017,drugX
174 | 39,F,NORMAL,NORMAL,17.225,DrugY
175 | 41,F,LOW,NORMAL,18.739,DrugY
176 | 42,M,HIGH,NORMAL,12.766,drugA
177 | 73,F,HIGH,HIGH,18.348,DrugY
178 | 48,M,HIGH,NORMAL,10.446,drugA
179 | 25,M,NORMAL,HIGH,19.011,DrugY
180 | 39,M,NORMAL,HIGH,15.969,DrugY
181 | 67,F,NORMAL,HIGH,15.891,DrugY
182 | 22,F,HIGH,NORMAL,22.818,DrugY
183 | 59,F,NORMAL,HIGH,13.884,drugX
184 | 20,F,LOW,NORMAL,11.686,drugX
185 | 36,F,HIGH,NORMAL,15.49,DrugY
186 | 18,F,HIGH,HIGH,37.188,DrugY
187 | 57,F,NORMAL,NORMAL,25.893,DrugY
188 | 70,M,HIGH,HIGH,9.849,drugB
189 | 47,M,HIGH,HIGH,10.403,drugA
190 | 65,M,HIGH,NORMAL,34.997,DrugY
191 | 64,M,HIGH,NORMAL,20.932,DrugY
192 | 58,M,HIGH,HIGH,18.991,DrugY
193 | 23,M,HIGH,HIGH,8.011,drugA
194 | 72,M,LOW,HIGH,16.31,DrugY
195 | 72,M,LOW,HIGH,6.769,drugC
196 | 46,F,HIGH,HIGH,34.686,DrugY
197 | 56,F,LOW,HIGH,11.567,drugC
198 | 16,M,LOW,HIGH,12.006,drugC
199 | 52,M,NORMAL,HIGH,9.894,drugX
200 | 23,M,NORMAL,NORMAL,14.02,drugX
201 | 40,F,LOW,NORMAL,11.349,drugX
202 |
--------------------------------------------------------------------------------
/Day-18/data/iris.csv:
--------------------------------------------------------------------------------
1 | Id,Sepal Length (cm),Sepal Width (cm),Petal Length (cm),Petal Width (cm),Species
2 | 1,5.1,3.5,1.4,0.2,Iris-setosa
3 | 2,4.9,3.0,1.4,0.2,Iris-setosa
4 | 3,4.7,3.2,1.3,0.2,Iris-setosa
5 | 4,4.6,3.1,1.5,0.2,Iris-setosa
6 | 5,5.0,3.6,1.4,0.2,Iris-setosa
7 | 6,5.4,3.9,1.7,0.4,Iris-setosa
8 | 7,4.6,3.4,1.4,0.3,Iris-setosa
9 | 8,5.0,3.4,1.5,0.2,Iris-setosa
10 | 9,4.4,2.9,1.4,0.2,Iris-setosa
11 | 10,4.9,3.1,1.5,0.1,Iris-setosa
12 | 11,5.4,3.7,1.5,0.2,Iris-setosa
13 | 12,4.8,3.4,1.6,0.2,Iris-setosa
14 | 13,4.8,3.0,1.4,0.1,Iris-setosa
15 | 14,4.3,3.0,1.1,0.1,Iris-setosa
16 | 15,5.8,4.0,1.2,0.2,Iris-setosa
17 | 16,5.7,4.4,1.5,0.4,Iris-setosa
18 | 17,5.4,3.9,1.3,0.4,Iris-setosa
19 | 18,5.1,3.5,1.4,0.3,Iris-setosa
20 | 19,5.7,3.8,1.7,0.3,Iris-setosa
21 | 20,5.1,3.8,1.5,0.3,Iris-setosa
22 | 21,5.4,3.4,1.7,0.2,Iris-setosa
23 | 22,5.1,3.7,1.5,0.4,Iris-setosa
24 | 23,4.6,3.6,1.0,0.2,Iris-setosa
25 | 24,5.1,3.3,1.7,0.5,Iris-setosa
26 | 25,4.8,3.4,1.9,0.2,Iris-setosa
27 | 26,5.0,3.0,1.6,0.2,Iris-setosa
28 | 27,5.0,3.4,1.6,0.4,Iris-setosa
29 | 28,5.2,3.5,1.5,0.2,Iris-setosa
30 | 29,5.2,3.4,1.4,0.2,Iris-setosa
31 | 30,4.7,3.2,1.6,0.2,Iris-setosa
32 | 31,4.8,3.1,1.6,0.2,Iris-setosa
33 | 32,5.4,3.4,1.5,0.4,Iris-setosa
34 | 33,5.2,4.1,1.5,0.1,Iris-setosa
35 | 34,5.5,4.2,1.4,0.2,Iris-setosa
36 | 35,4.9,3.1,1.5,0.1,Iris-setosa
37 | 36,5.0,3.2,1.2,0.2,Iris-setosa
38 | 37,5.5,3.5,1.3,0.2,Iris-setosa
39 | 38,4.9,3.1,1.5,0.1,Iris-setosa
40 | 39,4.4,3.0,1.3,0.2,Iris-setosa
41 | 40,5.1,3.4,1.5,0.2,Iris-setosa
42 | 41,5.0,3.5,1.3,0.3,Iris-setosa
43 | 42,4.5,2.3,1.3,0.3,Iris-setosa
44 | 43,4.4,3.2,1.3,0.2,Iris-setosa
45 | 44,5.0,3.5,1.6,0.6,Iris-setosa
46 | 45,5.1,3.8,1.9,0.4,Iris-setosa
47 | 46,4.8,3.0,1.4,0.3,Iris-setosa
48 | 47,5.1,3.8,1.6,0.2,Iris-setosa
49 | 48,4.6,3.2,1.4,0.2,Iris-setosa
50 | 49,5.3,3.7,1.5,0.2,Iris-setosa
51 | 50,5.0,3.3,1.4,0.2,Iris-setosa
52 | 51,7.0,3.2,4.7,1.4,Iris-versicolor
53 | 52,6.4,3.2,4.5,1.5,Iris-versicolor
54 | 53,6.9,3.1,4.9,1.5,Iris-versicolor
55 | 54,5.5,2.3,4.0,1.3,Iris-versicolor
56 | 55,6.5,2.8,4.6,1.5,Iris-versicolor
57 | 56,5.7,2.8,4.5,1.3,Iris-versicolor
58 | 57,6.3,3.3,4.7,1.6,Iris-versicolor
59 | 58,4.9,2.4,3.3,1.0,Iris-versicolor
60 | 59,6.6,2.9,4.6,1.3,Iris-versicolor
61 | 60,5.2,2.7,3.9,1.4,Iris-versicolor
62 | 61,5.0,2.0,3.5,1.0,Iris-versicolor
63 | 62,5.9,3.0,4.2,1.5,Iris-versicolor
64 | 63,6.0,2.2,4.0,1.0,Iris-versicolor
65 | 64,6.1,2.9,4.7,1.4,Iris-versicolor
66 | 65,5.6,2.9,3.6,1.3,Iris-versicolor
67 | 66,6.7,3.1,4.4,1.4,Iris-versicolor
68 | 67,5.6,3.0,4.5,1.5,Iris-versicolor
69 | 68,5.8,2.7,4.1,1.0,Iris-versicolor
70 | 69,6.2,2.2,4.5,1.5,Iris-versicolor
71 | 70,5.6,2.5,3.9,1.1,Iris-versicolor
72 | 71,5.9,3.2,4.8,1.8,Iris-versicolor
73 | 72,6.1,2.8,4.0,1.3,Iris-versicolor
74 | 73,6.3,2.5,4.9,1.5,Iris-versicolor
75 | 74,6.1,2.8,4.7,1.2,Iris-versicolor
76 | 75,6.4,2.9,4.3,1.3,Iris-versicolor
77 | 76,6.6,3.0,4.4,1.4,Iris-versicolor
78 | 77,6.8,2.8,4.8,1.4,Iris-versicolor
79 | 78,6.7,3.0,5.0,1.7,Iris-versicolor
80 | 79,6.0,2.9,4.5,1.5,Iris-versicolor
81 | 80,5.7,2.6,3.5,1.0,Iris-versicolor
82 | 81,5.5,2.4,3.8,1.1,Iris-versicolor
83 | 82,5.5,2.4,3.7,1.0,Iris-versicolor
84 | 83,5.8,2.7,3.9,1.2,Iris-versicolor
85 | 84,6.0,2.7,5.1,1.6,Iris-versicolor
86 | 85,5.4,3.0,4.5,1.5,Iris-versicolor
87 | 86,6.0,3.4,4.5,1.6,Iris-versicolor
88 | 87,6.7,3.1,4.7,1.5,Iris-versicolor
89 | 88,6.3,2.3,4.4,1.3,Iris-versicolor
90 | 89,5.6,3.0,4.1,1.3,Iris-versicolor
91 | 90,5.5,2.5,4.0,1.3,Iris-versicolor
92 | 91,5.5,2.6,4.4,1.2,Iris-versicolor
93 | 92,6.1,3.0,4.6,1.4,Iris-versicolor
94 | 93,5.8,2.6,4.0,1.2,Iris-versicolor
95 | 94,5.0,2.3,3.3,1.0,Iris-versicolor
96 | 95,5.6,2.7,4.2,1.3,Iris-versicolor
97 | 96,5.7,3.0,4.2,1.2,Iris-versicolor
98 | 97,5.7,2.9,4.2,1.3,Iris-versicolor
99 | 98,6.2,2.9,4.3,1.3,Iris-versicolor
100 | 99,5.1,2.5,3.0,1.1,Iris-versicolor
101 | 100,5.7,2.8,4.1,1.3,Iris-versicolor
102 | 101,6.3,3.3,6.0,2.5,Iris-virginica
103 | 102,5.8,2.7,5.1,1.9,Iris-virginica
104 | 103,7.1,3.0,5.9,2.1,Iris-virginica
105 | 104,6.3,2.9,5.6,1.8,Iris-virginica
106 | 105,6.5,3.0,5.8,2.2,Iris-virginica
107 | 106,7.6,3.0,6.6,2.1,Iris-virginica
108 | 107,4.9,2.5,4.5,1.7,Iris-virginica
109 | 108,7.3,2.9,6.3,1.8,Iris-virginica
110 | 109,6.7,2.5,5.8,1.8,Iris-virginica
111 | 110,7.2,3.6,6.1,2.5,Iris-virginica
112 | 111,6.5,3.2,5.1,2.0,Iris-virginica
113 | 112,6.4,2.7,5.3,1.9,Iris-virginica
114 | 113,6.8,3.0,5.5,2.1,Iris-virginica
115 | 114,5.7,2.5,5.0,2.0,Iris-virginica
116 | 115,5.8,2.8,5.1,2.4,Iris-virginica
117 | 116,6.4,3.2,5.3,2.3,Iris-virginica
118 | 117,6.5,3.0,5.5,1.8,Iris-virginica
119 | 118,7.7,3.8,6.7,2.2,Iris-virginica
120 | 119,7.7,2.6,6.9,2.3,Iris-virginica
121 | 120,6.0,2.2,5.0,1.5,Iris-virginica
122 | 121,6.9,3.2,5.7,2.3,Iris-virginica
123 | 122,5.6,2.8,4.9,2.0,Iris-virginica
124 | 123,7.7,2.8,6.7,2.0,Iris-virginica
125 | 124,6.3,2.7,4.9,1.8,Iris-virginica
126 | 125,6.7,3.3,5.7,2.1,Iris-virginica
127 | 126,7.2,3.2,6.0,1.8,Iris-virginica
128 | 127,6.2,2.8,4.8,1.8,Iris-virginica
129 | 128,6.1,3.0,4.9,1.8,Iris-virginica
130 | 129,6.4,2.8,5.6,2.1,Iris-virginica
131 | 130,7.2,3.0,5.8,1.6,Iris-virginica
132 | 131,7.4,2.8,6.1,1.9,Iris-virginica
133 | 132,7.9,3.8,6.4,2.0,Iris-virginica
134 | 133,6.4,2.8,5.6,2.2,Iris-virginica
135 | 134,6.3,2.8,5.1,1.5,Iris-virginica
136 | 135,6.1,2.6,5.6,1.4,Iris-virginica
137 | 136,7.7,3.0,6.1,2.3,Iris-virginica
138 | 137,6.3,3.4,5.6,2.4,Iris-virginica
139 | 138,6.4,3.1,5.5,1.8,Iris-virginica
140 | 139,6.0,3.0,4.8,1.8,Iris-virginica
141 | 140,6.9,3.1,5.4,2.1,Iris-virginica
142 | 141,6.7,3.1,5.6,2.4,Iris-virginica
143 | 142,6.9,3.1,5.1,2.3,Iris-virginica
144 | 143,5.8,2.7,5.1,1.9,Iris-virginica
145 | 144,6.8,3.2,5.9,2.3,Iris-virginica
146 | 145,6.7,3.3,5.7,2.5,Iris-virginica
147 | 146,6.7,3.0,5.2,2.3,Iris-virginica
148 | 147,6.3,2.5,5.0,1.9,Iris-virginica
149 | 148,6.5,3.0,5.2,2.0,Iris-virginica
150 | 149,6.2,3.4,5.4,2.3,Iris-virginica
151 | 150,5.9,3.0,5.1,1.8,Iris-virginica
152 |
--------------------------------------------------------------------------------
/Day-18/readme.md:
--------------------------------------------------------------------------------
1 | # Day 18 of [#100DaysOfCode](https://twitter.com/Param3021/status/1539153638094159872)
2 |
3 | ## Task
4 | 1. Data Visualization on Seaborn
5 | 2. Data analysis
6 |
7 | # Resources
8 | - kaggle [Data Visualization Course](https://www.kaggle.com/learn/data-visualization)
9 | - - Chapter 5: [Distributions (Histograms & KDE plots)](https://www.kaggle.com/code/alexisbcook/distributions) - [My Notebook](https://www.kaggle.com/param302/exercise-distributions)
10 | - - Chapter 6: [Choosing plot types and custom styles](https://www.kaggle.com/code/alexisbcook/choosing-plot-types-and-custom-styles) - [My Notebook](https://www.kaggle.com/param302/exercise-choosing-plot-types-and-custom-styles)
11 | - - Chapter 7: [Final Project](https://www.kaggle.com/code/alexisbcook/final-project) - [My Notebook](https://www.kaggle.com/code/param302/exercise-final-project)
12 | - Box Plots in Seaborn by @akshay_pachaar [Tweet](https://twitter.com/akshay_pachaar/status/1538823345625395207), StatQuest with Josh Starmer - [Boxplots are Awesome](https://www.youtube.com/watch?v=fHLhBnmwUM0) Video and Seaborn [boxplot](https://seaborn.pydata.org/generated/seaborn.boxplot.html) docs
13 | - FreeCodeCamp Data Analysis with Python project 4 [Page View Time Series Analyzer](https://www.freecodecamp.org/learn/data-analysis-with-python/data-analysis-with-python-projects/page-view-time-series-visualizer)
14 |
15 | ### Topics I have learnt
16 | 1. Seaborn
17 | - Histograms
18 | - KDE plots
19 | - Custom plot styles in Seaborn & Choosing right plot for the situation
20 | 2. Data Analysis
21 | - Kaggle's Data Visualization course Final project upon Drug Classification
22 | - FreeCodeCamp's Data Analysis with Python project 4
23 |
24 | ### Software used
25 | - Jupyter Notebook
26 | - Python 3.10.2
27 | - Numpy 1.22.4
28 | - pandas 1.4.2
29 | - matplotlib 3.5.2
30 | - seaborn 0.11.2
31 |
32 | ### My Notebooks
33 | - [L5 - Distributions (histograms & KDE).ipynb](./L5%20-%20Distributions%20(histograms%20&%20KDE).ipynb)
34 | - [L6 - Choosing plot types and custom styles.ipynb](./L6%20-%20-Choosing%20plot%20types%20and%20custom%20styles.ipynb)
35 | - [L7 - Final project](./L7%20-%20Final%20project.ipynb)
36 |
37 |
38 | ### Conclusion:
39 | Today I learned how to plot Histograms, KDE plots and how to choose right plot based upon data and changing seaborn styles. Also did data analysis on FCC's Page View Time Series Visualizer project and some on Drug Classification dataset.
40 |
--------------------------------------------------------------------------------
/Day-19/readme.md:
--------------------------------------------------------------------------------
1 | # Day 19 of [#100DaysOfCode](https://twitter.com/Param3021/status/1539545151580041216)
2 |
3 | ## Task
4 | 1. Intro to Machine Learning Course
5 | 2. Intermediate Machine Learning Course (not done)
6 |
7 | # Resources
8 | - Kaggle's [Intro to Machine Learning](https://www.kaggle.com/learn/intro-to-machine-learning) course
9 | - - Lesson 1: [How Models Work](https://www.kaggle.com/code/dansbecker/how-models-work)
10 | - - Lesson 2: [Basic Data Exploration](https://www.kaggle.com/code/dansbecker/basic-data-exploration) - [My Notebok](https://www.kaggle.com/param302/exercise-explore-your-data)
11 | - - Lesson 3: [Your First Machine Learning Model](https://www.kaggle.com/code/dansbecker/your-first-machine-learning-model) - [My Notebok](https://www.kaggle.com/param302/exercise-your-first-machine-learning-model)
12 | - - Lesson 4: [Model Validation](https://www.kaggle.com/code/dansbecker/model-validation) - [My Notebok](https://www.kaggle.com/param302/exercise-model-validation)
13 | - - Lesson 5: [Underfitting and Overfitting](https://www.kaggle.com/code/dansbecker/underfitting-and-overfitting) - [My Notebok](https://www.kaggle.com/param302/exercise-underfitting-and-overfitting)
14 | - - Lesson 6: [Random Forests](https://www.kaggle.com/code/dansbecker/random-forests) - [My Notebok](https://www.kaggle.com/param302/exercise-random-forests)
15 | - - Lesson 7: [Machine Learning Competition](https://www.kaggle.com/code/alexisbcook/machine-learning-competitions) - [My Notebok](https://www.kaggle.com/param302/exercise-machine-learning-competitions)
16 |
17 | - Kaggle [House price prediction Challenge](https://www.kaggle.com/competitions/home-data-for-ml-course/)
18 | - - [My Notebook](https://www.kaggle.com/param302/house-price-prediction-v1)
19 |
20 | ### Topics I have learnt
21 | 1. Intro to Machine Learning Course
22 | - How Models Work
23 | - Exploring data
24 | - Decision Trees (DecisionTreeRegressor)
25 | - Random Forest (RandomForestRegressor)
26 | - Overfitting and Underfitting data
27 | - Model Validation using MAE (mean absolute error)
28 | 2. Also did house price prediction
29 |
30 | ### Software used
31 | - Jupyter Notebook
32 | - Python 3.10.2
33 | - Numpy 1.22.4
34 | - pandas 1.4.2
35 | - scikit-learn 1.1.1
36 |
37 | ### My Notebooks
38 | - [L2 - Explore_your_data.ipynb](./L2%20-%20Explore_your_data.ipynb)
39 | - [L3 - Your_first_machine_learning_model.ipynb](./L3%20-%20Your_first_machine_learning_model.ipynb)
40 | - [L4 - Model_validation.ipynb](./L4%20-%20Model_validation.ipynb)
41 | - [L5 - Underfitting_and_Overfitting.ipynb](./L5%20-%20Underfitting_and_overfitting.ipynb)
42 | - [L6 - Random_forests.ipynb](./L6%20-%20Random_forests.ipynb)
43 | - [L7 - Machine_learning_competitions.ipynb](./L7%20-%20Machine_learning_competitions.ipynb)
44 | - [House_price_prediction_v1.ipynb](./House_price_prediction_v1.ipynb)
45 |
46 |
47 | ### Conclusion:
48 | Today I learned how models work - DecisionTreeRegressor, RandomForestRegressor, overfitting & underfitting data and model validation using MAE. Also did house price prediction.
49 | Actually I have done both Intro to ML and Intermediate ML course a year ago, but I forgot almost everything, so doing it again, now doing it seriously.
50 | Not did Intermediate ML course today, rather did some house price prediction on things which I have learnt.
51 |
--------------------------------------------------------------------------------
/Day-2/Data_analysis_project_1.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# FreeCodeCamp Data Analysis with Python Course\n",
8 | "### Project 1: [Mean-Variance-Standard Deviation Calculator](https://www.freecodecamp.org/learn/data-analysis-with-python/data-analysis-with-python-projects/mean-variance-standard-deviation-calculator)"
9 | ]
10 | },
11 | {
12 | "cell_type": "code",
13 | "execution_count": 1,
14 | "metadata": {},
15 | "outputs": [],
16 | "source": [
17 | "import numpy as np"
18 | ]
19 | },
20 | {
21 | "cell_type": "code",
22 | "execution_count": 2,
23 | "metadata": {},
24 | "outputs": [],
25 | "source": [
26 | "\"\"\"Answer format:\n",
27 | "{\n",
28 | " 'mean': [axis1, axis2, flattened],\n",
29 | " 'variance': [axis1, axis2, flattened],\n",
30 | " 'standard deviation': [axis1, axis2, flattened],\n",
31 | " 'max': [axis1, axis2, flattened],\n",
32 | " 'min': [axis1, axis2, flattened],\n",
33 | " 'sum': [axis1, axis2, flattened]\n",
34 | "}\"\"\"\n",
35 | "def calculate(arr):\n",
36 | " \"\"\"Convert array into 3x3 matrix.\n",
37 | " Calculate mean, variance, standard deviation, max, min, sum \n",
38 | " based on whole matrix, axis=0 (rows) & axis=1 (columns)\n",
39 | " \"\"\"\n",
40 | " # reshaping arr into matrix\n",
41 | " if len(arr) != 9:\n",
42 | " raise ValueError(\"List must contain nine numbers.\")\n",
43 | " \n",
44 | " matrix = np.array(arr).reshape(3, 3)\n",
45 | " # mean\n",
46 | " mean = np.mean(matrix)\n",
47 | " mean_0 = list(np.mean(matrix, axis=0))\n",
48 | " mean_1 = list(np.mean(matrix, axis=1))\n",
49 | " # variance\n",
50 | " variance = np.var(matrix)\n",
51 | " variance_0 = list(np.var(matrix, axis=0))\n",
52 | " variance_1 = list(np.var(matrix, axis=1))\n",
53 | " # standard deviation\n",
54 | " std = np.std(matrix)\n",
55 | " std_0 = list(np.std(matrix, axis=0))\n",
56 | " std_1 = list(np.std(matrix, axis=1))\n",
57 | " # max\n",
58 | " arr_max = matrix.max()\n",
59 | " max_0 = list(matrix.max(axis=0))\n",
60 | " max_1 = list(matrix.max(axis=1))\n",
61 | " # min\n",
62 | " arr_min = matrix.min()\n",
63 | " min_0 = list(matrix.min(axis=0))\n",
64 | " min_1 = list(matrix.min(axis=1))\n",
65 | " # sum\n",
66 | " arr_sum = matrix.sum()\n",
67 | " sum_0 = list(matrix.sum(axis=0))\n",
68 | " sum_1 = list(matrix.sum(axis=1))\n",
69 | "\n",
70 | " calculations = {\n",
71 | " \"mean\" : [mean_0, mean_1, mean],\n",
72 | " \"variance\" : [variance_0, variance_1, variance],\n",
73 | " \"standard deviation\" : [std_0, std_1, std],\n",
74 | " \"max\" : [max_0, max_1, arr_max],\n",
75 | " \"min\" : [min_0, min_1, arr_min],\n",
76 | " \"sum\" : [sum_0, sum_1, arr_sum]\n",
77 | " }\n",
78 | " \n",
79 | " return calculations"
80 | ]
81 | },
82 | {
83 | "cell_type": "code",
84 | "execution_count": 3,
85 | "metadata": {},
86 | "outputs": [
87 | {
88 | "data": {
89 | "text/plain": [
90 | "{'mean': [[3.0, 4.0, 5.0], [1.0, 4.0, 7.0], 4.0],\n",
91 | " 'variance': [[6.0, 6.0, 6.0],\n",
92 | " [0.6666666666666666, 0.6666666666666666, 0.6666666666666666],\n",
93 | " 6.666666666666667],\n",
94 | " 'standard deviation': [[2.449489742783178,\n",
95 | " 2.449489742783178,\n",
96 | " 2.449489742783178],\n",
97 | " [0.816496580927726, 0.816496580927726, 0.816496580927726],\n",
98 | " 2.581988897471611],\n",
99 | " 'max': [[6, 7, 8], [2, 5, 8], 8],\n",
100 | " 'min': [[0, 1, 2], [0, 3, 6], 0],\n",
101 | " 'sum': [[9, 12, 15], [3, 12, 21], 36]}"
102 | ]
103 | },
104 | "execution_count": 3,
105 | "metadata": {},
106 | "output_type": "execute_result"
107 | }
108 | ],
109 | "source": [
110 | "\"\"\"Answer:\n",
111 | "{\n",
112 | " 'mean': [[3.0, 4.0, 5.0], [1.0, 4.0, 7.0], 4.0],\n",
113 | " 'variance': [[6.0, 6.0, 6.0], [0.6666666666666666, 0.6666666666666666, 0.6666666666666666], 6.666666666666667],\n",
114 | " 'standard deviation': [[2.449489742783178, 2.449489742783178, 2.449489742783178], [0.816496580927726, 0.816496580927726, 0.816496580927726], 2.581988897471611],\n",
115 | " 'max': [[6, 7, 8], [2, 5, 8], 8],\n",
116 | " 'min': [[0, 1, 2], [0, 3, 6], 0],\n",
117 | " 'sum': [[9, 12, 15], [3, 12, 21], 36]\n",
118 | "}\"\"\"\n",
119 | "calculate([0,1,2,3,4,5,6,7,8])"
120 | ]
121 | }
122 | ],
123 | "metadata": {
124 | "interpreter": {
125 | "hash": "d88aeb6bf73c3e2735560d04f72b234e9e70c0b4c2e9aa8924f896649b343841"
126 | },
127 | "kernelspec": {
128 | "display_name": "Python 3.10.2 64-bit",
129 | "language": "python",
130 | "name": "python3"
131 | },
132 | "language_info": {
133 | "codemirror_mode": {
134 | "name": "ipython",
135 | "version": 3
136 | },
137 | "file_extension": ".py",
138 | "mimetype": "text/x-python",
139 | "name": "python",
140 | "nbconvert_exporter": "python",
141 | "pygments_lexer": "ipython3",
142 | "version": "3.10.2"
143 | },
144 | "orig_nbformat": 4
145 | },
146 | "nbformat": 4,
147 | "nbformat_minor": 2
148 | }
149 |
--------------------------------------------------------------------------------
/Day-2/readme.md:
--------------------------------------------------------------------------------
1 | # Day 2 of [#100DaysOfCode](https://twitter.com/Param3021/status/1532218403754496002?s=20&t=nDDm68WQNwG12Y2JdK2pxw)
2 |
3 | ## Task
4 | - Learn Pandas
5 | - Learn Linear Algebra (not done)
6 |
7 | # Resources
8 | - Python Pandas Tutorial Playlist by Corey Schafer YT channel: ([Playlist link](https://www.youtube.com/playlist?list=PL-osiE80TeTsWmV9i9c58mdDCSskIFdDS))
9 | - Stack Overflow Developer Survey 2021 ([link]((https://insights.stackoverflow.com/survey)))
10 | - FreeCodeCamp Data Anlysis with Python Project - 1 [Mean-Variance-Standard Deviation Calculator](https://www.freecodecamp.org/learn/data-analysis-with-python/data-analysis-with-python-projects/mean-variance-standard-deviation-calculator)
11 |
12 | ### Software used
13 | - Jupyter Notebook
14 | - Python 3.10.2
15 | - Numpy 1.22.4
16 | - pandas 1.4.2
17 |
18 | ### My Notebooks
19 | - [Pandas_tutorial_part_5.ipynb](./Pandas_tutorial_part_5.ipynb)
20 | - [Pandas_tutorial_part_6.ipynb](./Pandas_tutorial_part_6.ipynb)
21 | - [Pandas_tutorial_part_7.ipynb](./Pandas_tutorial_part_7.ipynb)
22 | - [Data_analysis_project_1.ipynb](./Data_analysis_project_1.ipynb)
23 |
24 | ### Topics I have learnt
25 | 1. Updating Rows & Columns
26 | 2. Adding Rows & Columns
27 | 3. Sorting Data
28 | 4. Data analysis project on *Mean-Variance-Standard Deviation Calculator*
29 |
30 | ### Conclusion:
31 | Today I learnt how to add, modify and sort the data in pandas. Also did a Data analysis project on "Mean-Variance-Standard Deviation Calculator" from FreeCodeCamp but didn't started mathematics.
32 |
--------------------------------------------------------------------------------
/Day-20/readme.md:
--------------------------------------------------------------------------------
1 | # Day 20 of [#100DaysOfCode](https://twitter.com/Param3021/status/1539895073093353472)
2 |
3 | ## Task
4 | 1. Intermediate Machine Learning Course (40% done)
5 | 2. House price prediction
6 |
7 | # Resources
8 | - Kaggle's [Intermediate Machine Learning](https://www.kaggle.com/learn/intermediate-machine-learning) course
9 | - - Lesson 1: [Introduction](https://www.kaggle.com/code/alexisbcook/introduction) - [My Notebook](https://www.kaggle.com/code/param302/exercise-introduction/)
10 | - - Lesson 2: [Missing Values](https://www.kaggle.com/code/alexisbcook/missing-values) - [My Notebook](https://www.kaggle.com/code/param302/exercise-missing-values)
11 | - Lesson 3: [Categorical Variables](https://www.kaggle.com/code/alexisbcook/categorical-variables) - [My Notebook](https://www.kaggle.com/code/param302/exercise-categorical-variables)
12 |
13 | - Kaggle [House price prediction Challenge](https://www.kaggle.com/competitions/home-data-for-ml-course/)
14 | - - [My Notebook 1](./House_price_prediction_2.ipynb)
15 | - - [My Notebook 2](./House_price_prediction_3.ipynb)
16 | - - [My Notebook 3](https://www.kaggle.com/code/param302/exercise-categorical-variables)
17 |
18 | ### Topics I have learnt
19 | 1. Intermediate Machine Learning Course
20 | - Handling Missing values in data
21 | - - by dropping columns
22 | - - by imputing the mean values of the columns
23 | - - by imputing the mean values and adding another column having True/False for respected imputed values
24 | - Handling Categorical columns in data
25 | - - by dropping categorical columns
26 | - - Changing them to numbers using **Ordinal Encoder**
27 | - - Creating each numerical column for every unique value using **One Hot Encoding**
28 | 2. Also House price prediction
29 | - One by dropping missing columns using `DecisionTreeRegressor`.
30 | - One by dropping missing columns using `RandomForestRegressor`.
31 | - One by Imputing missing values, doing Ordinal Encoding using `RandomForestRegressor`.
32 |
33 | ### Software used
34 | - Jupyter Notebook
35 | - Python 3.10.2
36 | - Numpy 1.22.4
37 | - pandas 1.4.2
38 | - scikit-learn 1.1.1
39 |
40 | ### My Notebooks
41 | - [L1 - Introduction.ipynb](./L1%20-%20Introduction.ipynb)
42 | - [L2 - Missing_values.ipynb](./L2%20-%20Missing_values.ipynb)
43 | - [L3 - Categorical_variables.ipynb](./L4%20-%20Categorical_variables.ipynb)
44 | - [House_price_prediction_2.ipynb](./House_price_prediction_2.ipynb)
45 | - [House_price_prediction_3.ipynb](./House_price_prediction_3.ipynb)
46 | - [House_price_prediction_4.ipynb](https://www.kaggle.com/code/param302/exercise-categorical-variables)
47 |
48 | ### Conclusion:
49 | Today I learned how to handle misisng values using SimpleImputer and categorical columns using One Hot Encoding & Ordinal Encoding. Also, did house price prediction using `DecisionTreeRegressor` and `RandomForestRegressor` applied imputer & ordinal encoding too. Today was great!!!
50 |
--------------------------------------------------------------------------------
/Day-21/readme.md:
--------------------------------------------------------------------------------
1 | # Day 21 of [#100DaysOfCode](https://twitter.com/Param3021/status/1540276365777580032)
2 |
3 | ## Task
4 | 1. Intermediate Machine Learning Course
5 | 2. House price prediction
6 |
7 | # Resources
8 | - Kaggle's [Intermediate Machine Learning](https://www.kaggle.com/learn/intermediate-machine-learning) course
9 | - - Lesson 4: [Pipelines](https://www.kaggle.com/code/alexisbcook/pipelines) - [My Notebook](https://www.kaggle.com/code/param302/exercise-pipelines)
10 | - - Lesson 5: [Cross Validation](https://www.kaggle.com/code/alexisbcook/cross-validation) - [My Notebook](https://www.kaggle.com/code/param302/exercise-cross-validation)
11 | - - Lesson 6: [XGBoost](https://www.kaggle.com/code/alexisbcook/xgboost) - [My Notebook](https://www.kaggle.com/code/param302/exercise-xgboost)
12 | - - Lesson 7: [Data Leakage](https://www.kaggle.com/code/alexisbcook/data-leakage) - [My Notebook](https://www.kaggle.com/code/param302/exercise-data-leakage)
13 |
14 | - Kaggle [House price prediction Challenge](https://www.kaggle.com/competitions/home-data-for-ml-course/)
15 | - - [My Notebook 1](./House_price_prediction_4.ipynb)
16 | - - [My Notebook 2](https://www.kaggle.com/code/param302/house-price-prediction-5)
17 | - - [My Notebook 3](https://www.kaggle.com/code/param302/house-price-prediction-6)
18 |
19 | ### Topics I have learnt
20 | 1. Intermediate Machine Learning Course
21 | - Using Pipelines to write structural code
22 | - Cross validation using `cross_val_score`, (use when data is less, takes more time than `train_test_split`)
23 | - XGBoost using `XtremeGradientRegressor` aka `XGBRegressor`
24 | - Data Leakage, how to handle data leakage
25 | 2. House price prediction
26 | - One by Imputing missing values, doing Ordinal Encoding using `RandomForestRegressor`.
27 | - One mostly same as above but used more features
28 | - One using `Cross Validation` and `Pipelines` with `RandomForestRegressor`.
29 |
30 | ### Software used
31 | - Jupyter Notebook
32 | - Python 3.10.2
33 | - Numpy 1.22.4
34 | - pandas 1.4.2
35 | - Matplotlib 3.5.2
36 | - Seaborn 0.11.2
37 | - scikit-learn 1.1.1
38 | - XGBoost 1.6.1
39 |
40 | ### My Notebooks
41 | - [L4 - Pipelines.ipynb](./L4%20-%20Pipelines.ipynb)
42 | - [L5 - Cross_validation.ipynb](./L5%20-%20Cross_validation.ipynb)
43 | - [L6 - XGBoost.ipynb](./L6%20-%20XGBoost.ipynb)
44 | - [L7 - Data_leakage.ipynb](./L7%20-%20Data_leakage.ipynb)
45 | - [House_price_prediction_4.ipynb](./House_price_prediction_4.ipynb)
46 | - [House_price_prediction_5.ipynb](./House_price_prediction_5.ipynb)
47 | - [House_price_prediction_6.ipynb](./House_price_prediction_6.ipynb)
48 |
49 | ### Conclusion:
50 | Today I learned how to use pipelines to write cleaner code, cross validation using cross_val_score, XGBRegressor and how to handle data(Target) leakage and train_test_contamination. Also, did house price prediction using cross validation.
51 |
--------------------------------------------------------------------------------
/Day-22/readme.md:
--------------------------------------------------------------------------------
1 | # Day 22 of [#100DaysOfCode](https://twitter.com/Param3021/status/1540665095495520256)
2 |
3 | ## Task
4 | 1. Linear Algebra
5 | 2. Statistics for ML (not done)
6 | 3. House price prediction
7 |
8 | # Resources
9 | - 3 Blue 1 Brown [Linear Aglebra](https://www.youtube.com/playlist?list=PLZHQObOWTQDPD3MizzM2xVFitgF8hE_ab) playlist
10 | - Linear Algebra notes [link](https://hacobe.github.io/notes/linear_algebra_3blue1brown.html)
11 | - Kaggle [House price prediction Challenge](https://www.kaggle.com/competitions/home-data-for-ml-course/)
12 | - - [My Notebook 1](https://www.kaggle.com/code/param302/house-price-prediction-7)
13 | - - [My Notebook 2](https://www.kaggle.com/code/param302/house-price-prediction-8)
14 |
15 | ### Topics I have learnt
16 | 1. Linear Algebra
17 | - 3-d Linear Transformations
18 | - Determinants
19 | 2. House price prediction
20 | - One with 50 features using `XGBRegressor`
21 | - One with data cleaning (removed outliers) & 41 features using `XGBRegressor`
22 |
23 | ### Software used
24 | - Jupyter Notebook
25 | - Python 3.10.2
26 | - Numpy 1.22.4
27 | - pandas 1.4.2
28 | - Matplotlib 3.5.2
29 | - Seaborn 0.11.2
30 | - scikit-learn 1.1.1
31 | - XGBoost 1.6.1
32 |
33 | ### My Notebooks
34 | - [House_price_prediction_7.ipynb](./House_price_prediction_7.ipynb)
35 | - [House_price_prediction_8.ipynb](./House_price_prediction_8.ipynb)
36 |
37 | ### Conclusion:
38 | Today I learned about 3-d linear transformations and determinants. Also did house price prediction using XGBRegressor.
39 |
--------------------------------------------------------------------------------
/Day-23/readme.md:
--------------------------------------------------------------------------------
1 | # Day 23 of [#100DaysOfCode](https://twitter.com/Param3021/status/1541008865059565568)
2 |
3 | ## Task
4 | 1. Statistics for ML
5 | 2. House price prediction
6 |
7 | # Resources
8 | - StatQuest with Josh Starmer [Machine Learning](https://www.youtube.com/playlist?list=PLblh5JKOoLUICTaGLRoHQDuF_7q2GfuJF) Playlist
9 | - Kaggle [House price prediction Challenge](https://www.kaggle.com/competitions/home-data-for-ml-course/)
10 | - - [My Notebook 1](https://www.kaggle.com/code/param302/house-price-prediction-9)
11 | - - [My Notebook 2](https://www.kaggle.com/code/param302/house-price-prediction-10)
12 |
13 | ### Topics I have learnt
14 | 1. Statistics for ML
15 | - ML Intro
16 | - Cross Validation
17 | - The Confusion Matrix
18 | - Senstivity and Specificity
19 | 2. House price prediction
20 | - Used `XGBRegressor`, `cross-validation` & `OrdinalEncoding`
21 |
22 |
23 | ### Software used
24 | - Jupyter Notebook
25 | - Python 3.10.2
26 | - Numpy 1.22.4
27 | - pandas 1.4.2
28 | - Matplotlib 3.5.2
29 | - Seaborn 0.11.2
30 | - scikit-learn 1.1.1
31 | - XGBoost 1.6.1
32 |
33 | ### My Notebooks
34 | - [House_price_prediction_9.ipynb](./House_price_prediction_9.ipynb)
35 | - [House_price_prediction_10.ipynb](./House_price_prediction_10.ipynb)
36 |
37 | ### Conclusion:
38 | Today I learned about Confusion Matrix & Senstivity and Specificity in Statistics. Also did house price prediction.
39 |
--------------------------------------------------------------------------------
/Day-24/readme.md:
--------------------------------------------------------------------------------
1 | # Day 24 of [#100DaysOfCode](https://twitter.com/Param3021/status/1541402854761541632)
2 |
3 | ## Task
4 | 1. Linear Algebra
5 | 2. Statistics for ML
6 | 3. EDA on house price prediction challenge. (not done)
7 |
8 | # Resources
9 | - 3Blue1Brown [Linear Algebra](https://www.youtube.com/playlist?list=PLZHQObOWTQDPD3MizzM2xVFitgF8hE_ab)
10 | - StatQuest with Josh Starmer [Machine Learning](https://www.youtube.com/playlist?list=PLblh5JKOoLUICTaGLRoHQDuF_7q2GfuJF) Playlist
11 |
12 | ### Topics I have learnt
13 | 1. Linear Algebra
14 | - Rank
15 | - null space
16 | - Column space
17 | - Inverse matrices
18 | 2. Statistics for ML
19 | - Bias & Variance
20 |
21 | ### Conclusion:
22 | Today I not did EDA. But learned about Inverse matrices, Column space, rank, null space in Linear Algebra and Bias & variance in statistics.
--------------------------------------------------------------------------------
/Day-25/readme.md:
--------------------------------------------------------------------------------
1 | # Day 25 of [#100DaysOfCode](https://twitter.com/Param3021/status/1541716679163789312)
2 |
3 | ## Task
4 | 1. Feature Engineering Course
5 | 2. House price prediction
6 |
7 | # Resources
8 | - Kaggle's [Feature Engineering Course](https://www.kaggle.com/learn/feature-engineering)
9 | - - Lesson 1: [What is Feature Engineering](https://www.kaggle.com/code/ryanholbrook/what-is-feature-engineering)
10 | - - Lesson 2: [Mutual Information](https://www.kaggle.com/code/ryanholbrook/mutual-information) - [My Notebook](https://www.kaggle.com/param302/exercise-mutual-information)
11 |
12 | - Kaggle [House price prediction Challenge](https://www.kaggle.com/competitions/home-data-for-ml-course/)
13 | - - [My Notebook 1](https://www.kaggle.com/param302/house-price-prediction-11)
14 |
15 |
16 | ### Topics I have learnt
17 | 1. Feature Engineering
18 | - What is Feature Engineering
19 | - How is it useful for making Models
20 | - Mutual Information (shows every kind of relation in data with target)
21 | 2. Did house price prediction
22 | - With `XGBRegressor` & Mutual Information (high MI 50 columns)
23 |
24 | ### Software used
25 | - Jupyter Notebook
26 | - Python 3.10.2
27 | - Numpy 1.22.4
28 | - pandas 1.4.2
29 | - Matplotlib 3.5.2
30 | - Seaborn 0.11.2
31 | - scikit-learn 1.1.1
32 | - XGBoost 1.6.1
33 |
34 | ### My Notebooks
35 | - [L2 - Mutual Information](./L2%20-%20Mutual_information.ipynb)
36 | - [House_price_prediction_9.ipynb](./House_price_prediction_11.ipynb)
37 |
38 | ### Conclusion:
39 | Today I learned What is Feature Engineering, How to make data better for ML models and Mutual Information. Also did house price prediction using Mutual Information of 50 columns having high MI.
40 |
--------------------------------------------------------------------------------
/Day-26/readme.md:
--------------------------------------------------------------------------------
1 | # Day 26 of [#100DaysOfCode](https://twitter.com/Param3021/status/1542076146296459265)
2 |
3 | ## Task
4 | 1. Feature Engineering Course
5 | 2. House price prediction (not done)
6 |
7 | # Resources
8 | - Kaggle's [Feature Engineering Course](https://www.kaggle.com/learn/feature-engineering)
9 | - - Lesson 3: [Creating Features](https://www.kaggle.com/code/ryanholbrook/creating-features) - [My Notebook](https://www.kaggle.com/code/param302/exercise-creating-features/)
10 |
11 | ### Topics I have learnt
12 | 1. Feature Engineering
13 | - Creating Features
14 | - - creating new features
15 |
16 | ### Software used
17 | - Jupyter Notebook
18 | - Python 3.10.2
19 | - Numpy 1.22.4
20 | - pandas 1.4.2
21 | - Matplotlib 3.5.2
22 | - Seaborn 0.11.2
23 | - scikit-learn 1.1.1
24 | - XGBoost 1.6.1
25 |
26 | ### My Notebooks
27 | - [L3 - Creating_features.ipynb](./L3%20-%20Creating_features.ipynb)
28 |
29 | ### Conclusion:
30 | Today I learned how to create new features based on old features which helps our model like creating ratios, count features, splitting features into different sub-features, also grouped transformers. Not did house price prediction.
31 |
--------------------------------------------------------------------------------
/Day-27/pipeline.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | from xgboost import XGBRegressor
3 | from sklearn.pipeline import Pipeline
4 | from sklearn.impute import SimpleImputer
5 | from sklearn.compose import ColumnTransformer
6 | from sklearn.preprocessing import OrdinalEncoder, OneHotEncoder
7 |
8 |
9 | class CreatePipeline:
10 | """Create Pipeline
11 | methods:
12 | pipeline: Create Final Pipeline
13 |
14 | create_model: Create the provided model
15 |
16 | numerical_transformer: Transform numerical cols
17 |
18 | categorical_transformer: Transform categorical cols \
19 | OneHotEncoding / OrdinalEncoding
20 |
21 | data_preprocessor: Preprocess the data using ColumnTransformer
22 | """
23 |
24 | def pipeline(self, *, preprocessor, model, verbose=False):
25 | """Creates pipeline
26 | params:
27 | preprocessor
28 | model
29 | """
30 | steps = [("preprocessor", preprocessor),
31 | ("model", model)]
32 | return Pipeline(steps=steps, verbose=verbose)
33 |
34 |
35 | def numerical_transformer(self, *, strategy="mean", **params):
36 | """Transform numerical columns using `SimpleImputer`.
37 | params:
38 | strategy: "mean" | "median" | "most_frequent" | "constant"
39 | **params: extra keyword args for SimpleImputer"""
40 |
41 | transformer = SimpleImputer(strategy=strategy, **params)
42 | return transformer
43 |
44 |
45 | def categorical_transformer(self, *,
46 | imp_strategy="most_frequent",
47 | encoder_type="Ordinal",
48 | imp_params={}, encoder_params={}):
49 | """Transform categorical columns by making Pipeline
50 | `SimpleImputer` | `OneHotEncoder` | `OrdinalEncoder`.
51 | args:
52 | imp_strategy: strategy for imputer values can be
53 | "most_frequent" | "constant"
54 | encoder_type: encoder type,
55 | "Ordinal" | "OneHot"
56 | kwargs:
57 | imp_params: keyword args for `SimpleImputer`.
58 | encoder_params: keyword args for encoder.`
59 | """
60 | if not encoder_type in ("Ordinal", "OneHot"):
61 | raise ValueError(f"Inappropriate value for encoder_type passed: {encoder_type}\
62 | Takes one of 'Ordinal' | 'OneHot'.")
63 |
64 | encoder = OrdinalEncoder if encoder_type=="Ordinal" else OneHotEncoder
65 | transformer = Pipeline(steps=[
66 | ("imputer", SimpleImputer(strategy=imp_strategy, **imp_params)),
67 | (encoder_type, encoder(**encoder_params))
68 | ])
69 | return transformer
70 |
71 |
72 | def data_preprocessor(self, *, transformers):
73 | """Preprocess the data using `ColumnTransformer`.
74 | Pass extact list of transformers
75 | to be passed in `ColumnTransformer`.
76 | each tuple consist of: (transformer_name,
77 | transformer,
78 | list_of_columns)."""
79 | preprocessor = ColumnTransformer(transformers=transformers)
80 | return preprocessor
81 |
82 |
83 | def create_model(self, *, model, random_state=0, n_estimators=1000, **kwargs):
84 | """Creates the model.
85 | **kwargs: keyword args for model."""
86 | my_model = model(random_state=random_state, n_estimators=n_estimators, **kwargs)
87 | return my_model
88 |
89 |
90 | if __name__ == "__main__":
91 | # Loading data
92 | house_data = pd.read_csv("./data/train.csv", index_col="Id")
93 | test_data = pd.read_csv("./data/test.csv", index_col="Id")
94 | X = house_data.drop(columns="SalePrice")
95 | Y = house_data["SalePrice"]
96 | num_cols = X.select_dtypes(exclude="object").columns
97 | cat_cols = X.select_dtypes("object").columns
98 | print("Data loaded and ready")
99 |
100 | print("Creating Pipeline")
101 | # Creating Pipeline class
102 | cp = CreatePipeline()
103 | num_transformer = cp.numerical_transformer()
104 | cat_transformer = cp.categorical_transformer(
105 | encoder_params={
106 | "handle_unknown":"use_encoded_value",
107 | "unknown_value":-1
108 | })
109 | print("Preprocessing data")
110 | # preprocessor
111 | preprocessor = cp.data_preprocessor(
112 | transformers=[("num", num_transformer, num_cols),
113 | ("cat", cat_transformer, cat_cols)
114 | ])
115 |
116 | print("Creating model (XGBRgressor)")
117 | # Creating model (XGBRgressor)
118 | model = cp.create_model(model=XGBRegressor, n_estimators=500, learning_rate=0.05)
119 | pipeline = cp.pipeline(preprocessor=preprocessor, model=model)
120 |
121 | print("Training my model")
122 | pipeline.fit(X, Y)
123 |
124 | print("Predictions are:")
125 | test_preds = pipeline.predict(test_data)
126 | print(test_preds)
--------------------------------------------------------------------------------
/Day-27/readme.md:
--------------------------------------------------------------------------------
1 | # Day 27 of [#100DaysOfCode](https://twitter.com/Param3021/status/1542445802865696770)
2 |
3 | ## Task
4 | 1. Clustering with K-means
5 | 2. PCA (only video)
6 | 3. House price prediction
7 |
8 | ---
9 |
10 | ## Created `CreatePipeline` for creating pipeline and making model [🔗](./pipeline.py)
11 | ```python
12 | class CreatePipeline:
13 | """Create Pipeline
14 | methods:
15 | pipeline: Create Final Pipeline
16 |
17 | create_model: Create the provided model
18 |
19 | numerical_transformer: Transform numerical cols
20 |
21 | categorical_transformer: Transform categorical cols \
22 | OneHotEncoding / OrdinalEncoding
23 |
24 | data_preprocessor: Preprocess the data using ColumnTransformer
25 | """
26 |
27 | def pipeline(self, *, preprocessor, model, verbose=False):
28 | """Creates pipeline
29 | params:
30 | preprocessor
31 | model
32 | """
33 | steps = [("preprocessor", preprocessor),
34 | ("model", model)]
35 | return Pipeline(steps=steps, verbose=verbose)
36 |
37 |
38 | def numerical_transformer(self, *, strategy="mean", **params):
39 | """Transform numerical columns using `SimpleImputer`.
40 | params:
41 | strategy: "mean" | "median" | "most_frequent" | "constant"
42 | **params: extra keyword args for SimpleImputer"""
43 |
44 | transformer = SimpleImputer(strategy=strategy, **params)
45 | return transformer
46 |
47 |
48 | def categorical_transformer(self, *,
49 | imp_strategy="most_frequent",
50 | encoder_type="Ordinal",
51 | imp_params={}, encoder_params={}):
52 | """Transform categorical columns by making Pipeline
53 | `SimpleImputer` | `OneHotEncoder` | `OrdinalEncoder`.
54 | args:
55 | imp_strategy: strategy for imputer values can be
56 | "most_frequent" | "constant"
57 | encoder_type: encoder type,
58 | "Ordinal" | "OneHot"
59 | kwargs:
60 | imp_params: keyword args for `SimpleImputer`.
61 | encoder_params: keyword args for encoder.`
62 | """
63 | if not encoder_type in ("Ordinal", "OneHot"):
64 | raise ValueError(f"Inappropriate value for encoder_type passed: {encoder_type}\
65 | Takes one of 'Ordinal' | 'OneHot'.")
66 |
67 | encoder = OrdinalEncoder if encoder_type=="Ordinal" else OneHotEncoder
68 | transformer = Pipeline(steps=[
69 | ("imputer", SimpleImputer(strategy=imp_strategy, **imp_params)),
70 | (encoder_type, encoder(**encoder_params))
71 | ])
72 | return transformer
73 |
74 |
75 | def data_preprocessor(self, *, transformers):
76 | """Preprocess the data using `ColumnTransformer`.
77 | Pass extact list of transformers
78 | to be passed in `ColumnTransformer`.
79 | each tuple consist of: (transformer_name,
80 | transformer,
81 | list_of_columns)."""
82 | preprocessor = ColumnTransformer(transformers=transformers)
83 | return preprocessor
84 |
85 |
86 | def create_model(self, *, model, random_state=0, n_estimators=1000, **kwargs):
87 | """Creates the model.
88 | **kwargs: keyword args for model."""
89 | my_model = model(random_state=random_state, n_estimators=n_estimators, **kwargs)
90 | return my_model
91 | ```
92 | - It will create transformers, preprocessors and model and final pipeline, so that all the process from data preprocessing (imputing / transforming) done in 1 place.
93 | - Link of python file [🔗](./pipeline.py)
94 |
95 | ---
96 |
97 | # Resources
98 | - Kaggle's [Feature Engineering Course](https://www.kaggle.com/learn/feature-engineering)
99 | - - Lesson 4: [Clustering with K-means](https://www.kaggle.com/code/ryanholbrook/clustering-with-k-means) - [My Notebook](https://www.kaggle.com/code/param302/exercise-clustering-with-k-means/)
100 |
101 | - Kaggle [House price prediction Challenge](https://www.kaggle.com/competitions/home-data-for-ml-course/)
102 | - - [My Notebook 1](https://www.kaggle.com/param302/house-price-prediction-12)
103 | - - [My Notebook 1](https://www.kaggle.com/param302/house-price-prediction-13)
104 |
105 | ### Topics I have learnt
106 | 1. Clustering with K-means
107 | 2. PCA
108 | 3. House price prediction
109 | - One with Mutual Information and used `XGBRegressor` (Score: 14900.48264)
110 | - One with creating new features and used `XGBRegressor` (Score: 15078.56818)
111 |
112 | ### Software used
113 | - Jupyter Notebook
114 | - Python 3.10.2
115 | - Numpy 1.22.4
116 | - pandas 1.4.2
117 | - Matplotlib 3.5.2
118 | - Seaborn 0.11.2
119 | - scikit-learn 1.1.1
120 | - XGBoost 1.6.1
121 |
122 | ### My Notebooks
123 | - [House_price_prediction_12.ipynb](./House_price_prediction_12.ipynb)
124 | - [House_price_prediction_13.ipynb](./House_price_prediction_13.ipynb)
125 | - [L4 - Clustering_with_K-means.ipynb](./L4%20-%20Clustering_with_k-means.ipynb)
126 |
127 | ### Conclusion:
128 | Today I learned about K-means clustering and PCA. Also did house price prediction with `XGBRegressor` and feature engineering.
129 |
--------------------------------------------------------------------------------
/Day-28/readme.md:
--------------------------------------------------------------------------------
1 | # Day 28 of [#100DaysOfCode](https://twitter.com/Param3021/status/1542790975499538432)
2 |
3 | ## Task
4 | 1. Feature Engineering
5 | 2. Statistics
6 |
7 | # Resources
8 | - Kaggle's [Feature Engineering Course](https://www.kaggle.com/learn/feature-engineering)
9 | - - Lesson 4: [PCA](https://www.kaggle.com/code/ryanholbrook/principal-component-analysis)
10 | - StatQuest with Josh Stamer [PCA](https://www.youtube.com/watch?v=FgakZw6K1QQ) YT video
11 |
12 | ### Topics I have learnt
13 | 1. PCA
14 |
15 | ### Software used
16 | - Jupyter Notebook
17 | - Python 3.10.2
18 | - Numpy 1.22.4
19 | - pandas 1.4.2
20 | - Matplotlib 3.5.2
21 | - Seaborn 0.11.2
22 | - scikit-learn 1.1.1
23 |
24 |
25 | ### Conclusion:
26 | Today I revised Linear Algebra and learned about PCA. Not did much. I think I should learn stats for ML.
--------------------------------------------------------------------------------
/Day-29/readme.md:
--------------------------------------------------------------------------------
1 | # Day 29 of [#100DaysOfCode](https://twitter.com/Param3021/status/1543136477663883265)
2 |
3 | ## Task
4 | 1. Linaer Algebra
5 | 2. Statistics
6 |
7 | # Resources
8 | - StatQuest with Josh Stamer [Entropy](https://www.youtube.com/watch?v=YtebGVx-Fxw) YT video
9 | - 3Blue1Brown [Inverse Matrices](https://www.youtube.com/watch?v=uQhTuRlWMxw&t=330s) YT Video
10 |
11 | ### Topics I have learnt
12 | 1. Linear Algebra
13 | - Inverse matrices
14 | 2. Statistics
15 | - Entropy
16 |
17 | ### Conclusion:
18 | Today I learnt Inverse matrices and Entropy today. Days are going lazy..
--------------------------------------------------------------------------------
/Day-3/readme.md:
--------------------------------------------------------------------------------
1 | # Day 3 of [#100DaysOfCode](https://twitter.com/Param3021/status/1532678152941862912)
2 |
3 | ## Task
4 | 1. Learn Pandas
5 | - Grouping
6 | - Aggregating
7 | - CLeaning data (not done)
8 |
9 | # Resources
10 | - Python Pandas Tutorial Playlist by Corey Schafer YT channel: ([Playlist link](https://www.youtube.com/playlist?list=PL-osiE80TeTsWmV9i9c58mdDCSskIFdDS))
11 | - Stack Overflow Developer Survey 2021 ([link](https://insights.stackoverflow.com/survey))
12 |
13 | ### Software used
14 | - Jupyter Notebook
15 | - Python 3.10.2
16 | - Numpy 1.22.4
17 | - pandas 1.4.2
18 |
19 | ### My Notebooks
20 | - [Pandas_tutorial_part_8.ipynb](./Pandas_tutorial_part_8.ipynb)
21 |
22 | ### Topics I have learnt
23 | 1. Grouping data in pandas
24 | 2. How to use Aggregate functions on data
25 |
26 | ### Conclusion:
27 | Today I learnt how to group data using `groupby()` and use it also, how to use Aggregate functions on data. Today I am not well, hope tomorrow will be good :) .
28 |
--------------------------------------------------------------------------------
/Day-30/data/gender_submission.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived
2 | 892,0
3 | 893,1
4 | 894,0
5 | 895,0
6 | 896,1
7 | 897,0
8 | 898,1
9 | 899,0
10 | 900,1
11 | 901,0
12 | 902,0
13 | 903,0
14 | 904,1
15 | 905,0
16 | 906,1
17 | 907,1
18 | 908,0
19 | 909,0
20 | 910,1
21 | 911,1
22 | 912,0
23 | 913,0
24 | 914,1
25 | 915,0
26 | 916,1
27 | 917,0
28 | 918,1
29 | 919,0
30 | 920,0
31 | 921,0
32 | 922,0
33 | 923,0
34 | 924,1
35 | 925,1
36 | 926,0
37 | 927,0
38 | 928,1
39 | 929,1
40 | 930,0
41 | 931,0
42 | 932,0
43 | 933,0
44 | 934,0
45 | 935,1
46 | 936,1
47 | 937,0
48 | 938,0
49 | 939,0
50 | 940,1
51 | 941,1
52 | 942,0
53 | 943,0
54 | 944,1
55 | 945,1
56 | 946,0
57 | 947,0
58 | 948,0
59 | 949,0
60 | 950,0
61 | 951,1
62 | 952,0
63 | 953,0
64 | 954,0
65 | 955,1
66 | 956,0
67 | 957,1
68 | 958,1
69 | 959,0
70 | 960,0
71 | 961,1
72 | 962,1
73 | 963,0
74 | 964,1
75 | 965,0
76 | 966,1
77 | 967,0
78 | 968,0
79 | 969,1
80 | 970,0
81 | 971,1
82 | 972,0
83 | 973,0
84 | 974,0
85 | 975,0
86 | 976,0
87 | 977,0
88 | 978,1
89 | 979,1
90 | 980,1
91 | 981,0
92 | 982,1
93 | 983,0
94 | 984,1
95 | 985,0
96 | 986,0
97 | 987,0
98 | 988,1
99 | 989,0
100 | 990,1
101 | 991,0
102 | 992,1
103 | 993,0
104 | 994,0
105 | 995,0
106 | 996,1
107 | 997,0
108 | 998,0
109 | 999,0
110 | 1000,0
111 | 1001,0
112 | 1002,0
113 | 1003,1
114 | 1004,1
115 | 1005,1
116 | 1006,1
117 | 1007,0
118 | 1008,0
119 | 1009,1
120 | 1010,0
121 | 1011,1
122 | 1012,1
123 | 1013,0
124 | 1014,1
125 | 1015,0
126 | 1016,0
127 | 1017,1
128 | 1018,0
129 | 1019,1
130 | 1020,0
131 | 1021,0
132 | 1022,0
133 | 1023,0
134 | 1024,1
135 | 1025,0
136 | 1026,0
137 | 1027,0
138 | 1028,0
139 | 1029,0
140 | 1030,1
141 | 1031,0
142 | 1032,1
143 | 1033,1
144 | 1034,0
145 | 1035,0
146 | 1036,0
147 | 1037,0
148 | 1038,0
149 | 1039,0
150 | 1040,0
151 | 1041,0
152 | 1042,1
153 | 1043,0
154 | 1044,0
155 | 1045,1
156 | 1046,0
157 | 1047,0
158 | 1048,1
159 | 1049,1
160 | 1050,0
161 | 1051,1
162 | 1052,1
163 | 1053,0
164 | 1054,1
165 | 1055,0
166 | 1056,0
167 | 1057,1
168 | 1058,0
169 | 1059,0
170 | 1060,1
171 | 1061,1
172 | 1062,0
173 | 1063,0
174 | 1064,0
175 | 1065,0
176 | 1066,0
177 | 1067,1
178 | 1068,1
179 | 1069,0
180 | 1070,1
181 | 1071,1
182 | 1072,0
183 | 1073,0
184 | 1074,1
185 | 1075,0
186 | 1076,1
187 | 1077,0
188 | 1078,1
189 | 1079,0
190 | 1080,1
191 | 1081,0
192 | 1082,0
193 | 1083,0
194 | 1084,0
195 | 1085,0
196 | 1086,0
197 | 1087,0
198 | 1088,0
199 | 1089,1
200 | 1090,0
201 | 1091,1
202 | 1092,1
203 | 1093,0
204 | 1094,0
205 | 1095,1
206 | 1096,0
207 | 1097,0
208 | 1098,1
209 | 1099,0
210 | 1100,1
211 | 1101,0
212 | 1102,0
213 | 1103,0
214 | 1104,0
215 | 1105,1
216 | 1106,1
217 | 1107,0
218 | 1108,1
219 | 1109,0
220 | 1110,1
221 | 1111,0
222 | 1112,1
223 | 1113,0
224 | 1114,1
225 | 1115,0
226 | 1116,1
227 | 1117,1
228 | 1118,0
229 | 1119,1
230 | 1120,0
231 | 1121,0
232 | 1122,0
233 | 1123,1
234 | 1124,0
235 | 1125,0
236 | 1126,0
237 | 1127,0
238 | 1128,0
239 | 1129,0
240 | 1130,1
241 | 1131,1
242 | 1132,1
243 | 1133,1
244 | 1134,0
245 | 1135,0
246 | 1136,0
247 | 1137,0
248 | 1138,1
249 | 1139,0
250 | 1140,1
251 | 1141,1
252 | 1142,1
253 | 1143,0
254 | 1144,0
255 | 1145,0
256 | 1146,0
257 | 1147,0
258 | 1148,0
259 | 1149,0
260 | 1150,1
261 | 1151,0
262 | 1152,0
263 | 1153,0
264 | 1154,1
265 | 1155,1
266 | 1156,0
267 | 1157,0
268 | 1158,0
269 | 1159,0
270 | 1160,1
271 | 1161,0
272 | 1162,0
273 | 1163,0
274 | 1164,1
275 | 1165,1
276 | 1166,0
277 | 1167,1
278 | 1168,0
279 | 1169,0
280 | 1170,0
281 | 1171,0
282 | 1172,1
283 | 1173,0
284 | 1174,1
285 | 1175,1
286 | 1176,1
287 | 1177,0
288 | 1178,0
289 | 1179,0
290 | 1180,0
291 | 1181,0
292 | 1182,0
293 | 1183,1
294 | 1184,0
295 | 1185,0
296 | 1186,0
297 | 1187,0
298 | 1188,1
299 | 1189,0
300 | 1190,0
301 | 1191,0
302 | 1192,0
303 | 1193,0
304 | 1194,0
305 | 1195,0
306 | 1196,1
307 | 1197,1
308 | 1198,0
309 | 1199,0
310 | 1200,0
311 | 1201,1
312 | 1202,0
313 | 1203,0
314 | 1204,0
315 | 1205,1
316 | 1206,1
317 | 1207,1
318 | 1208,0
319 | 1209,0
320 | 1210,0
321 | 1211,0
322 | 1212,0
323 | 1213,0
324 | 1214,0
325 | 1215,0
326 | 1216,1
327 | 1217,0
328 | 1218,1
329 | 1219,0
330 | 1220,0
331 | 1221,0
332 | 1222,1
333 | 1223,0
334 | 1224,0
335 | 1225,1
336 | 1226,0
337 | 1227,0
338 | 1228,0
339 | 1229,0
340 | 1230,0
341 | 1231,0
342 | 1232,0
343 | 1233,0
344 | 1234,0
345 | 1235,1
346 | 1236,0
347 | 1237,1
348 | 1238,0
349 | 1239,1
350 | 1240,0
351 | 1241,1
352 | 1242,1
353 | 1243,0
354 | 1244,0
355 | 1245,0
356 | 1246,1
357 | 1247,0
358 | 1248,1
359 | 1249,0
360 | 1250,0
361 | 1251,1
362 | 1252,0
363 | 1253,1
364 | 1254,1
365 | 1255,0
366 | 1256,1
367 | 1257,1
368 | 1258,0
369 | 1259,1
370 | 1260,1
371 | 1261,0
372 | 1262,0
373 | 1263,1
374 | 1264,0
375 | 1265,0
376 | 1266,1
377 | 1267,1
378 | 1268,1
379 | 1269,0
380 | 1270,0
381 | 1271,0
382 | 1272,0
383 | 1273,0
384 | 1274,1
385 | 1275,1
386 | 1276,0
387 | 1277,1
388 | 1278,0
389 | 1279,0
390 | 1280,0
391 | 1281,0
392 | 1282,0
393 | 1283,1
394 | 1284,0
395 | 1285,0
396 | 1286,0
397 | 1287,1
398 | 1288,0
399 | 1289,1
400 | 1290,0
401 | 1291,0
402 | 1292,1
403 | 1293,0
404 | 1294,1
405 | 1295,0
406 | 1296,0
407 | 1297,0
408 | 1298,0
409 | 1299,0
410 | 1300,1
411 | 1301,1
412 | 1302,1
413 | 1303,1
414 | 1304,1
415 | 1305,0
416 | 1306,1
417 | 1307,0
418 | 1308,0
419 | 1309,0
420 |
--------------------------------------------------------------------------------
/Day-30/data/submission_1.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived
2 | 892,0
3 | 893,1
4 | 894,0
5 | 895,0
6 | 896,1
7 | 897,0
8 | 898,1
9 | 899,0
10 | 900,1
11 | 901,0
12 | 902,0
13 | 903,0
14 | 904,1
15 | 905,0
16 | 906,1
17 | 907,1
18 | 908,0
19 | 909,0
20 | 910,1
21 | 911,1
22 | 912,0
23 | 913,0
24 | 914,1
25 | 915,0
26 | 916,1
27 | 917,0
28 | 918,1
29 | 919,0
30 | 920,1
31 | 921,0
32 | 922,0
33 | 923,0
34 | 924,0
35 | 925,0
36 | 926,1
37 | 927,0
38 | 928,1
39 | 929,1
40 | 930,0
41 | 931,1
42 | 932,0
43 | 933,1
44 | 934,0
45 | 935,1
46 | 936,1
47 | 937,0
48 | 938,1
49 | 939,0
50 | 940,1
51 | 941,0
52 | 942,1
53 | 943,0
54 | 944,1
55 | 945,1
56 | 946,0
57 | 947,0
58 | 948,0
59 | 949,0
60 | 950,0
61 | 951,1
62 | 952,0
63 | 953,0
64 | 954,0
65 | 955,1
66 | 956,0
67 | 957,1
68 | 958,1
69 | 959,0
70 | 960,1
71 | 961,1
72 | 962,1
73 | 963,0
74 | 964,1
75 | 965,1
76 | 966,1
77 | 967,1
78 | 968,0
79 | 969,1
80 | 970,0
81 | 971,1
82 | 972,1
83 | 973,0
84 | 974,0
85 | 975,0
86 | 976,0
87 | 977,0
88 | 978,1
89 | 979,1
90 | 980,1
91 | 981,1
92 | 982,1
93 | 983,0
94 | 984,1
95 | 985,0
96 | 986,0
97 | 987,0
98 | 988,1
99 | 989,0
100 | 990,1
101 | 991,0
102 | 992,1
103 | 993,0
104 | 994,0
105 | 995,0
106 | 996,1
107 | 997,0
108 | 998,0
109 | 999,0
110 | 1000,0
111 | 1001,0
112 | 1002,0
113 | 1003,1
114 | 1004,1
115 | 1005,1
116 | 1006,1
117 | 1007,0
118 | 1008,0
119 | 1009,1
120 | 1010,1
121 | 1011,1
122 | 1012,1
123 | 1013,0
124 | 1014,1
125 | 1015,0
126 | 1016,0
127 | 1017,1
128 | 1018,0
129 | 1019,1
130 | 1020,0
131 | 1021,0
132 | 1022,0
133 | 1023,1
134 | 1024,0
135 | 1025,0
136 | 1026,0
137 | 1027,0
138 | 1028,0
139 | 1029,0
140 | 1030,1
141 | 1031,0
142 | 1032,0
143 | 1033,1
144 | 1034,0
145 | 1035,0
146 | 1036,1
147 | 1037,0
148 | 1038,1
149 | 1039,0
150 | 1040,1
151 | 1041,0
152 | 1042,1
153 | 1043,0
154 | 1044,0
155 | 1045,1
156 | 1046,0
157 | 1047,0
158 | 1048,1
159 | 1049,1
160 | 1050,1
161 | 1051,1
162 | 1052,1
163 | 1053,1
164 | 1054,1
165 | 1055,0
166 | 1056,0
167 | 1057,1
168 | 1058,1
169 | 1059,0
170 | 1060,1
171 | 1061,1
172 | 1062,0
173 | 1063,0
174 | 1064,0
175 | 1065,0
176 | 1066,0
177 | 1067,1
178 | 1068,1
179 | 1069,1
180 | 1070,1
181 | 1071,1
182 | 1072,0
183 | 1073,0
184 | 1074,1
185 | 1075,0
186 | 1076,1
187 | 1077,0
188 | 1078,1
189 | 1079,0
190 | 1080,0
191 | 1081,0
192 | 1082,0
193 | 1083,0
194 | 1084,0
195 | 1085,0
196 | 1086,1
197 | 1087,0
198 | 1088,1
199 | 1089,1
200 | 1090,0
201 | 1091,1
202 | 1092,1
203 | 1093,1
204 | 1094,0
205 | 1095,1
206 | 1096,0
207 | 1097,0
208 | 1098,1
209 | 1099,0
210 | 1100,1
211 | 1101,0
212 | 1102,0
213 | 1103,0
214 | 1104,0
215 | 1105,1
216 | 1106,1
217 | 1107,0
218 | 1108,1
219 | 1109,0
220 | 1110,1
221 | 1111,0
222 | 1112,1
223 | 1113,0
224 | 1114,1
225 | 1115,0
226 | 1116,1
227 | 1117,1
228 | 1118,0
229 | 1119,1
230 | 1120,0
231 | 1121,0
232 | 1122,0
233 | 1123,1
234 | 1124,0
235 | 1125,0
236 | 1126,0
237 | 1127,0
238 | 1128,1
239 | 1129,0
240 | 1130,1
241 | 1131,1
242 | 1132,1
243 | 1133,1
244 | 1134,1
245 | 1135,0
246 | 1136,0
247 | 1137,1
248 | 1138,1
249 | 1139,0
250 | 1140,1
251 | 1141,1
252 | 1142,1
253 | 1143,0
254 | 1144,1
255 | 1145,0
256 | 1146,0
257 | 1147,0
258 | 1148,0
259 | 1149,0
260 | 1150,1
261 | 1151,0
262 | 1152,0
263 | 1153,0
264 | 1154,1
265 | 1155,1
266 | 1156,0
267 | 1157,0
268 | 1158,0
269 | 1159,0
270 | 1160,1
271 | 1161,0
272 | 1162,1
273 | 1163,0
274 | 1164,1
275 | 1165,1
276 | 1166,0
277 | 1167,1
278 | 1168,0
279 | 1169,0
280 | 1170,0
281 | 1171,0
282 | 1172,1
283 | 1173,1
284 | 1174,1
285 | 1175,1
286 | 1176,1
287 | 1177,0
288 | 1178,0
289 | 1179,1
290 | 1180,0
291 | 1181,0
292 | 1182,1
293 | 1183,1
294 | 1184,0
295 | 1185,0
296 | 1186,0
297 | 1187,0
298 | 1188,1
299 | 1189,0
300 | 1190,1
301 | 1191,0
302 | 1192,0
303 | 1193,0
304 | 1194,0
305 | 1195,0
306 | 1196,1
307 | 1197,1
308 | 1198,1
309 | 1199,1
310 | 1200,1
311 | 1201,0
312 | 1202,0
313 | 1203,0
314 | 1204,0
315 | 1205,1
316 | 1206,1
317 | 1207,1
318 | 1208,0
319 | 1209,0
320 | 1210,0
321 | 1211,0
322 | 1212,0
323 | 1213,0
324 | 1214,0
325 | 1215,1
326 | 1216,1
327 | 1217,0
328 | 1218,1
329 | 1219,0
330 | 1220,0
331 | 1221,0
332 | 1222,1
333 | 1223,1
334 | 1224,0
335 | 1225,1
336 | 1226,0
337 | 1227,0
338 | 1228,0
339 | 1229,0
340 | 1230,0
341 | 1231,0
342 | 1232,0
343 | 1233,0
344 | 1234,0
345 | 1235,1
346 | 1236,0
347 | 1237,1
348 | 1238,0
349 | 1239,1
350 | 1240,0
351 | 1241,1
352 | 1242,1
353 | 1243,0
354 | 1244,0
355 | 1245,0
356 | 1246,1
357 | 1247,0
358 | 1248,1
359 | 1249,0
360 | 1250,0
361 | 1251,1
362 | 1252,0
363 | 1253,1
364 | 1254,1
365 | 1255,0
366 | 1256,1
367 | 1257,0
368 | 1258,0
369 | 1259,1
370 | 1260,1
371 | 1261,0
372 | 1262,0
373 | 1263,1
374 | 1264,0
375 | 1265,0
376 | 1266,1
377 | 1267,1
378 | 1268,1
379 | 1269,0
380 | 1270,0
381 | 1271,0
382 | 1272,0
383 | 1273,0
384 | 1274,1
385 | 1275,1
386 | 1276,0
387 | 1277,1
388 | 1278,0
389 | 1279,0
390 | 1280,0
391 | 1281,0
392 | 1282,1
393 | 1283,1
394 | 1284,0
395 | 1285,0
396 | 1286,0
397 | 1287,1
398 | 1288,0
399 | 1289,1
400 | 1290,0
401 | 1291,0
402 | 1292,1
403 | 1293,0
404 | 1294,1
405 | 1295,1
406 | 1296,1
407 | 1297,0
408 | 1298,0
409 | 1299,1
410 | 1300,1
411 | 1301,1
412 | 1302,1
413 | 1303,1
414 | 1304,1
415 | 1305,0
416 | 1306,1
417 | 1307,0
418 | 1308,0
419 | 1309,0
420 |
--------------------------------------------------------------------------------
/Day-30/readme.md:
--------------------------------------------------------------------------------
1 | # Day 30 of [#100DaysOfCode](https://twitter.com/Param3021/status/1543909519679705091)
2 |
3 | ## Task
4 | 1. Titanic dataset challenge
5 | 2. Maths with ML (not done)
6 |
7 | # Resources
8 | - Kaggle [Titanic - Machine Learning from Disaster](https://www.kaggle.com/competitions/titanic/)
9 | - - [My Notebook](https://www.kaggle.com/code/param302/titanic-survival-competition-1)
10 |
11 | ### Topics I have learnt
12 | 1. Titanic dataset challenge
13 | - Used `XGBClassifier` with `Cross-validation`
14 |
15 | 2. Also participated in MLH Global Hack Week INIT 2023 [🔗](https://ghw.mlh.io/)
16 | - Did 3 challenges
17 | - And made **Wikipedia Searcher**[🔗](https://github.com/AnantLuthra/wikipedia_searcher) with [Anant Luthra](https://github.com/AnantLuthra)
18 |
19 | ### Software used
20 | - Jupyter Notebook
21 | - Python 3.10.2
22 | - Numpy 1.22.4
23 | - pandas 1.4.2
24 | - Matplotlib 3.5.2
25 | - Seaborn 0.11.2
26 | - scikit-learn 1.1.1
27 |
28 | ### My Notebooks
29 | - [Titanic_survival_competition_1.ipynb](./Titanic_survival_competition_1.ipynb)
30 |
31 | ### Conclusion:
32 | Today I did little bit of prediction on titanic dataset using `RandomForestClassifier` and did hyper parameter tuning on it with `cross-validation`.
--------------------------------------------------------------------------------
/Day-31/data/gender_submission.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived
2 | 892,0
3 | 893,1
4 | 894,0
5 | 895,0
6 | 896,1
7 | 897,0
8 | 898,1
9 | 899,0
10 | 900,1
11 | 901,0
12 | 902,0
13 | 903,0
14 | 904,1
15 | 905,0
16 | 906,1
17 | 907,1
18 | 908,0
19 | 909,0
20 | 910,1
21 | 911,1
22 | 912,0
23 | 913,0
24 | 914,1
25 | 915,0
26 | 916,1
27 | 917,0
28 | 918,1
29 | 919,0
30 | 920,0
31 | 921,0
32 | 922,0
33 | 923,0
34 | 924,1
35 | 925,1
36 | 926,0
37 | 927,0
38 | 928,1
39 | 929,1
40 | 930,0
41 | 931,0
42 | 932,0
43 | 933,0
44 | 934,0
45 | 935,1
46 | 936,1
47 | 937,0
48 | 938,0
49 | 939,0
50 | 940,1
51 | 941,1
52 | 942,0
53 | 943,0
54 | 944,1
55 | 945,1
56 | 946,0
57 | 947,0
58 | 948,0
59 | 949,0
60 | 950,0
61 | 951,1
62 | 952,0
63 | 953,0
64 | 954,0
65 | 955,1
66 | 956,0
67 | 957,1
68 | 958,1
69 | 959,0
70 | 960,0
71 | 961,1
72 | 962,1
73 | 963,0
74 | 964,1
75 | 965,0
76 | 966,1
77 | 967,0
78 | 968,0
79 | 969,1
80 | 970,0
81 | 971,1
82 | 972,0
83 | 973,0
84 | 974,0
85 | 975,0
86 | 976,0
87 | 977,0
88 | 978,1
89 | 979,1
90 | 980,1
91 | 981,0
92 | 982,1
93 | 983,0
94 | 984,1
95 | 985,0
96 | 986,0
97 | 987,0
98 | 988,1
99 | 989,0
100 | 990,1
101 | 991,0
102 | 992,1
103 | 993,0
104 | 994,0
105 | 995,0
106 | 996,1
107 | 997,0
108 | 998,0
109 | 999,0
110 | 1000,0
111 | 1001,0
112 | 1002,0
113 | 1003,1
114 | 1004,1
115 | 1005,1
116 | 1006,1
117 | 1007,0
118 | 1008,0
119 | 1009,1
120 | 1010,0
121 | 1011,1
122 | 1012,1
123 | 1013,0
124 | 1014,1
125 | 1015,0
126 | 1016,0
127 | 1017,1
128 | 1018,0
129 | 1019,1
130 | 1020,0
131 | 1021,0
132 | 1022,0
133 | 1023,0
134 | 1024,1
135 | 1025,0
136 | 1026,0
137 | 1027,0
138 | 1028,0
139 | 1029,0
140 | 1030,1
141 | 1031,0
142 | 1032,1
143 | 1033,1
144 | 1034,0
145 | 1035,0
146 | 1036,0
147 | 1037,0
148 | 1038,0
149 | 1039,0
150 | 1040,0
151 | 1041,0
152 | 1042,1
153 | 1043,0
154 | 1044,0
155 | 1045,1
156 | 1046,0
157 | 1047,0
158 | 1048,1
159 | 1049,1
160 | 1050,0
161 | 1051,1
162 | 1052,1
163 | 1053,0
164 | 1054,1
165 | 1055,0
166 | 1056,0
167 | 1057,1
168 | 1058,0
169 | 1059,0
170 | 1060,1
171 | 1061,1
172 | 1062,0
173 | 1063,0
174 | 1064,0
175 | 1065,0
176 | 1066,0
177 | 1067,1
178 | 1068,1
179 | 1069,0
180 | 1070,1
181 | 1071,1
182 | 1072,0
183 | 1073,0
184 | 1074,1
185 | 1075,0
186 | 1076,1
187 | 1077,0
188 | 1078,1
189 | 1079,0
190 | 1080,1
191 | 1081,0
192 | 1082,0
193 | 1083,0
194 | 1084,0
195 | 1085,0
196 | 1086,0
197 | 1087,0
198 | 1088,0
199 | 1089,1
200 | 1090,0
201 | 1091,1
202 | 1092,1
203 | 1093,0
204 | 1094,0
205 | 1095,1
206 | 1096,0
207 | 1097,0
208 | 1098,1
209 | 1099,0
210 | 1100,1
211 | 1101,0
212 | 1102,0
213 | 1103,0
214 | 1104,0
215 | 1105,1
216 | 1106,1
217 | 1107,0
218 | 1108,1
219 | 1109,0
220 | 1110,1
221 | 1111,0
222 | 1112,1
223 | 1113,0
224 | 1114,1
225 | 1115,0
226 | 1116,1
227 | 1117,1
228 | 1118,0
229 | 1119,1
230 | 1120,0
231 | 1121,0
232 | 1122,0
233 | 1123,1
234 | 1124,0
235 | 1125,0
236 | 1126,0
237 | 1127,0
238 | 1128,0
239 | 1129,0
240 | 1130,1
241 | 1131,1
242 | 1132,1
243 | 1133,1
244 | 1134,0
245 | 1135,0
246 | 1136,0
247 | 1137,0
248 | 1138,1
249 | 1139,0
250 | 1140,1
251 | 1141,1
252 | 1142,1
253 | 1143,0
254 | 1144,0
255 | 1145,0
256 | 1146,0
257 | 1147,0
258 | 1148,0
259 | 1149,0
260 | 1150,1
261 | 1151,0
262 | 1152,0
263 | 1153,0
264 | 1154,1
265 | 1155,1
266 | 1156,0
267 | 1157,0
268 | 1158,0
269 | 1159,0
270 | 1160,1
271 | 1161,0
272 | 1162,0
273 | 1163,0
274 | 1164,1
275 | 1165,1
276 | 1166,0
277 | 1167,1
278 | 1168,0
279 | 1169,0
280 | 1170,0
281 | 1171,0
282 | 1172,1
283 | 1173,0
284 | 1174,1
285 | 1175,1
286 | 1176,1
287 | 1177,0
288 | 1178,0
289 | 1179,0
290 | 1180,0
291 | 1181,0
292 | 1182,0
293 | 1183,1
294 | 1184,0
295 | 1185,0
296 | 1186,0
297 | 1187,0
298 | 1188,1
299 | 1189,0
300 | 1190,0
301 | 1191,0
302 | 1192,0
303 | 1193,0
304 | 1194,0
305 | 1195,0
306 | 1196,1
307 | 1197,1
308 | 1198,0
309 | 1199,0
310 | 1200,0
311 | 1201,1
312 | 1202,0
313 | 1203,0
314 | 1204,0
315 | 1205,1
316 | 1206,1
317 | 1207,1
318 | 1208,0
319 | 1209,0
320 | 1210,0
321 | 1211,0
322 | 1212,0
323 | 1213,0
324 | 1214,0
325 | 1215,0
326 | 1216,1
327 | 1217,0
328 | 1218,1
329 | 1219,0
330 | 1220,0
331 | 1221,0
332 | 1222,1
333 | 1223,0
334 | 1224,0
335 | 1225,1
336 | 1226,0
337 | 1227,0
338 | 1228,0
339 | 1229,0
340 | 1230,0
341 | 1231,0
342 | 1232,0
343 | 1233,0
344 | 1234,0
345 | 1235,1
346 | 1236,0
347 | 1237,1
348 | 1238,0
349 | 1239,1
350 | 1240,0
351 | 1241,1
352 | 1242,1
353 | 1243,0
354 | 1244,0
355 | 1245,0
356 | 1246,1
357 | 1247,0
358 | 1248,1
359 | 1249,0
360 | 1250,0
361 | 1251,1
362 | 1252,0
363 | 1253,1
364 | 1254,1
365 | 1255,0
366 | 1256,1
367 | 1257,1
368 | 1258,0
369 | 1259,1
370 | 1260,1
371 | 1261,0
372 | 1262,0
373 | 1263,1
374 | 1264,0
375 | 1265,0
376 | 1266,1
377 | 1267,1
378 | 1268,1
379 | 1269,0
380 | 1270,0
381 | 1271,0
382 | 1272,0
383 | 1273,0
384 | 1274,1
385 | 1275,1
386 | 1276,0
387 | 1277,1
388 | 1278,0
389 | 1279,0
390 | 1280,0
391 | 1281,0
392 | 1282,0
393 | 1283,1
394 | 1284,0
395 | 1285,0
396 | 1286,0
397 | 1287,1
398 | 1288,0
399 | 1289,1
400 | 1290,0
401 | 1291,0
402 | 1292,1
403 | 1293,0
404 | 1294,1
405 | 1295,0
406 | 1296,0
407 | 1297,0
408 | 1298,0
409 | 1299,0
410 | 1300,1
411 | 1301,1
412 | 1302,1
413 | 1303,1
414 | 1304,1
415 | 1305,0
416 | 1306,1
417 | 1307,0
418 | 1308,0
419 | 1309,0
420 |
--------------------------------------------------------------------------------
/Day-31/data/submission_2.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived
2 | 892,0
3 | 893,0
4 | 894,0
5 | 895,0
6 | 896,1
7 | 897,0
8 | 898,1
9 | 899,0
10 | 900,1
11 | 901,0
12 | 902,0
13 | 903,0
14 | 904,1
15 | 905,0
16 | 906,1
17 | 907,1
18 | 908,0
19 | 909,0
20 | 910,0
21 | 911,1
22 | 912,0
23 | 913,0
24 | 914,1
25 | 915,0
26 | 916,1
27 | 917,0
28 | 918,1
29 | 919,0
30 | 920,0
31 | 921,0
32 | 922,0
33 | 923,0
34 | 924,0
35 | 925,0
36 | 926,0
37 | 927,0
38 | 928,0
39 | 929,0
40 | 930,0
41 | 931,0
42 | 932,0
43 | 933,0
44 | 934,0
45 | 935,1
46 | 936,1
47 | 937,0
48 | 938,0
49 | 939,0
50 | 940,1
51 | 941,1
52 | 942,0
53 | 943,0
54 | 944,1
55 | 945,1
56 | 946,0
57 | 947,0
58 | 948,0
59 | 949,0
60 | 950,0
61 | 951,1
62 | 952,0
63 | 953,0
64 | 954,0
65 | 955,1
66 | 956,1
67 | 957,1
68 | 958,1
69 | 959,0
70 | 960,0
71 | 961,1
72 | 962,1
73 | 963,0
74 | 964,0
75 | 965,0
76 | 966,1
77 | 967,0
78 | 968,0
79 | 969,1
80 | 970,0
81 | 971,1
82 | 972,0
83 | 973,0
84 | 974,0
85 | 975,0
86 | 976,0
87 | 977,0
88 | 978,1
89 | 979,0
90 | 980,1
91 | 981,1
92 | 982,0
93 | 983,0
94 | 984,1
95 | 985,0
96 | 986,0
97 | 987,0
98 | 988,1
99 | 989,0
100 | 990,0
101 | 991,0
102 | 992,1
103 | 993,0
104 | 994,0
105 | 995,0
106 | 996,1
107 | 997,0
108 | 998,0
109 | 999,0
110 | 1000,0
111 | 1001,0
112 | 1002,0
113 | 1003,1
114 | 1004,1
115 | 1005,1
116 | 1006,1
117 | 1007,0
118 | 1008,0
119 | 1009,1
120 | 1010,0
121 | 1011,1
122 | 1012,1
123 | 1013,0
124 | 1014,1
125 | 1015,0
126 | 1016,0
127 | 1017,1
128 | 1018,0
129 | 1019,1
130 | 1020,0
131 | 1021,0
132 | 1022,0
133 | 1023,0
134 | 1024,0
135 | 1025,0
136 | 1026,0
137 | 1027,0
138 | 1028,0
139 | 1029,0
140 | 1030,0
141 | 1031,0
142 | 1032,0
143 | 1033,1
144 | 1034,0
145 | 1035,0
146 | 1036,0
147 | 1037,0
148 | 1038,0
149 | 1039,0
150 | 1040,0
151 | 1041,0
152 | 1042,1
153 | 1043,0
154 | 1044,0
155 | 1045,1
156 | 1046,0
157 | 1047,0
158 | 1048,1
159 | 1049,0
160 | 1050,0
161 | 1051,1
162 | 1052,1
163 | 1053,0
164 | 1054,1
165 | 1055,0
166 | 1056,0
167 | 1057,1
168 | 1058,0
169 | 1059,0
170 | 1060,1
171 | 1061,0
172 | 1062,0
173 | 1063,0
174 | 1064,0
175 | 1065,0
176 | 1066,0
177 | 1067,1
178 | 1068,1
179 | 1069,0
180 | 1070,1
181 | 1071,1
182 | 1072,0
183 | 1073,0
184 | 1074,1
185 | 1075,0
186 | 1076,1
187 | 1077,0
188 | 1078,1
189 | 1079,0
190 | 1080,0
191 | 1081,0
192 | 1082,0
193 | 1083,0
194 | 1084,0
195 | 1085,0
196 | 1086,0
197 | 1087,0
198 | 1088,1
199 | 1089,0
200 | 1090,0
201 | 1091,0
202 | 1092,1
203 | 1093,0
204 | 1094,0
205 | 1095,1
206 | 1096,0
207 | 1097,0
208 | 1098,1
209 | 1099,0
210 | 1100,1
211 | 1101,0
212 | 1102,0
213 | 1103,0
214 | 1104,0
215 | 1105,1
216 | 1106,0
217 | 1107,0
218 | 1108,1
219 | 1109,0
220 | 1110,1
221 | 1111,0
222 | 1112,1
223 | 1113,0
224 | 1114,1
225 | 1115,0
226 | 1116,1
227 | 1117,1
228 | 1118,0
229 | 1119,1
230 | 1120,0
231 | 1121,0
232 | 1122,0
233 | 1123,1
234 | 1124,0
235 | 1125,0
236 | 1126,0
237 | 1127,0
238 | 1128,0
239 | 1129,0
240 | 1130,1
241 | 1131,1
242 | 1132,1
243 | 1133,1
244 | 1134,0
245 | 1135,0
246 | 1136,0
247 | 1137,0
248 | 1138,1
249 | 1139,0
250 | 1140,1
251 | 1141,1
252 | 1142,1
253 | 1143,0
254 | 1144,0
255 | 1145,0
256 | 1146,0
257 | 1147,0
258 | 1148,0
259 | 1149,0
260 | 1150,1
261 | 1151,0
262 | 1152,0
263 | 1153,0
264 | 1154,1
265 | 1155,1
266 | 1156,0
267 | 1157,0
268 | 1158,0
269 | 1159,0
270 | 1160,0
271 | 1161,0
272 | 1162,0
273 | 1163,0
274 | 1164,1
275 | 1165,1
276 | 1166,0
277 | 1167,1
278 | 1168,0
279 | 1169,0
280 | 1170,0
281 | 1171,0
282 | 1172,0
283 | 1173,0
284 | 1174,1
285 | 1175,1
286 | 1176,1
287 | 1177,0
288 | 1178,0
289 | 1179,0
290 | 1180,0
291 | 1181,0
292 | 1182,0
293 | 1183,1
294 | 1184,0
295 | 1185,0
296 | 1186,0
297 | 1187,0
298 | 1188,1
299 | 1189,0
300 | 1190,0
301 | 1191,0
302 | 1192,0
303 | 1193,0
304 | 1194,0
305 | 1195,0
306 | 1196,1
307 | 1197,1
308 | 1198,0
309 | 1199,0
310 | 1200,0
311 | 1201,0
312 | 1202,0
313 | 1203,0
314 | 1204,0
315 | 1205,0
316 | 1206,1
317 | 1207,1
318 | 1208,0
319 | 1209,0
320 | 1210,0
321 | 1211,0
322 | 1212,0
323 | 1213,0
324 | 1214,0
325 | 1215,0
326 | 1216,1
327 | 1217,0
328 | 1218,1
329 | 1219,0
330 | 1220,0
331 | 1221,0
332 | 1222,1
333 | 1223,0
334 | 1224,0
335 | 1225,1
336 | 1226,0
337 | 1227,0
338 | 1228,0
339 | 1229,0
340 | 1230,0
341 | 1231,0
342 | 1232,0
343 | 1233,0
344 | 1234,0
345 | 1235,1
346 | 1236,0
347 | 1237,0
348 | 1238,0
349 | 1239,1
350 | 1240,0
351 | 1241,1
352 | 1242,1
353 | 1243,0
354 | 1244,0
355 | 1245,0
356 | 1246,1
357 | 1247,0
358 | 1248,1
359 | 1249,0
360 | 1250,0
361 | 1251,1
362 | 1252,0
363 | 1253,1
364 | 1254,1
365 | 1255,0
366 | 1256,1
367 | 1257,0
368 | 1258,0
369 | 1259,0
370 | 1260,1
371 | 1261,0
372 | 1262,0
373 | 1263,1
374 | 1264,0
375 | 1265,0
376 | 1266,1
377 | 1267,1
378 | 1268,0
379 | 1269,0
380 | 1270,0
381 | 1271,0
382 | 1272,0
383 | 1273,0
384 | 1274,1
385 | 1275,0
386 | 1276,0
387 | 1277,1
388 | 1278,0
389 | 1279,0
390 | 1280,0
391 | 1281,0
392 | 1282,0
393 | 1283,1
394 | 1284,0
395 | 1285,0
396 | 1286,0
397 | 1287,1
398 | 1288,0
399 | 1289,1
400 | 1290,0
401 | 1291,0
402 | 1292,1
403 | 1293,0
404 | 1294,1
405 | 1295,0
406 | 1296,0
407 | 1297,0
408 | 1298,0
409 | 1299,0
410 | 1300,1
411 | 1301,1
412 | 1302,1
413 | 1303,1
414 | 1304,0
415 | 1305,0
416 | 1306,1
417 | 1307,0
418 | 1308,0
419 | 1309,0
420 |
--------------------------------------------------------------------------------
/Day-31/data/submission_3.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived
2 | 892,0
3 | 893,0
4 | 894,0
5 | 895,0
6 | 896,0
7 | 897,0
8 | 898,0
9 | 899,0
10 | 900,1
11 | 901,0
12 | 902,0
13 | 903,0
14 | 904,1
15 | 905,0
16 | 906,1
17 | 907,1
18 | 908,0
19 | 909,0
20 | 910,1
21 | 911,0
22 | 912,0
23 | 913,0
24 | 914,1
25 | 915,1
26 | 916,1
27 | 917,0
28 | 918,1
29 | 919,0
30 | 920,1
31 | 921,0
32 | 922,0
33 | 923,0
34 | 924,1
35 | 925,0
36 | 926,1
37 | 927,0
38 | 928,0
39 | 929,0
40 | 930,0
41 | 931,1
42 | 932,0
43 | 933,1
44 | 934,0
45 | 935,1
46 | 936,1
47 | 937,0
48 | 938,0
49 | 939,0
50 | 940,1
51 | 941,1
52 | 942,0
53 | 943,0
54 | 944,1
55 | 945,1
56 | 946,0
57 | 947,0
58 | 948,0
59 | 949,0
60 | 950,0
61 | 951,1
62 | 952,0
63 | 953,0
64 | 954,0
65 | 955,1
66 | 956,0
67 | 957,1
68 | 958,1
69 | 959,0
70 | 960,0
71 | 961,1
72 | 962,1
73 | 963,0
74 | 964,1
75 | 965,0
76 | 966,1
77 | 967,0
78 | 968,0
79 | 969,1
80 | 970,0
81 | 971,1
82 | 972,1
83 | 973,0
84 | 974,0
85 | 975,0
86 | 976,0
87 | 977,0
88 | 978,1
89 | 979,1
90 | 980,1
91 | 981,1
92 | 982,0
93 | 983,0
94 | 984,1
95 | 985,0
96 | 986,1
97 | 987,0
98 | 988,1
99 | 989,0
100 | 990,1
101 | 991,0
102 | 992,1
103 | 993,0
104 | 994,0
105 | 995,0
106 | 996,1
107 | 997,0
108 | 998,0
109 | 999,0
110 | 1000,0
111 | 1001,0
112 | 1002,0
113 | 1003,1
114 | 1004,1
115 | 1005,1
116 | 1006,1
117 | 1007,0
118 | 1008,0
119 | 1009,1
120 | 1010,1
121 | 1011,1
122 | 1012,1
123 | 1013,0
124 | 1014,1
125 | 1015,0
126 | 1016,0
127 | 1017,1
128 | 1018,0
129 | 1019,1
130 | 1020,0
131 | 1021,0
132 | 1022,0
133 | 1023,0
134 | 1024,0
135 | 1025,0
136 | 1026,0
137 | 1027,0
138 | 1028,0
139 | 1029,0
140 | 1030,0
141 | 1031,0
142 | 1032,0
143 | 1033,1
144 | 1034,0
145 | 1035,0
146 | 1036,1
147 | 1037,0
148 | 1038,0
149 | 1039,0
150 | 1040,1
151 | 1041,0
152 | 1042,1
153 | 1043,0
154 | 1044,0
155 | 1045,1
156 | 1046,0
157 | 1047,0
158 | 1048,1
159 | 1049,1
160 | 1050,1
161 | 1051,1
162 | 1052,1
163 | 1053,1
164 | 1054,1
165 | 1055,0
166 | 1056,0
167 | 1057,0
168 | 1058,0
169 | 1059,0
170 | 1060,1
171 | 1061,0
172 | 1062,0
173 | 1063,0
174 | 1064,0
175 | 1065,0
176 | 1066,0
177 | 1067,1
178 | 1068,1
179 | 1069,0
180 | 1070,1
181 | 1071,1
182 | 1072,0
183 | 1073,1
184 | 1074,1
185 | 1075,0
186 | 1076,1
187 | 1077,0
188 | 1078,1
189 | 1079,0
190 | 1080,0
191 | 1081,0
192 | 1082,0
193 | 1083,0
194 | 1084,0
195 | 1085,0
196 | 1086,1
197 | 1087,0
198 | 1088,1
199 | 1089,1
200 | 1090,0
201 | 1091,0
202 | 1092,1
203 | 1093,1
204 | 1094,0
205 | 1095,1
206 | 1096,0
207 | 1097,0
208 | 1098,0
209 | 1099,0
210 | 1100,1
211 | 1101,0
212 | 1102,0
213 | 1103,0
214 | 1104,0
215 | 1105,1
216 | 1106,0
217 | 1107,0
218 | 1108,1
219 | 1109,0
220 | 1110,1
221 | 1111,0
222 | 1112,1
223 | 1113,0
224 | 1114,1
225 | 1115,0
226 | 1116,1
227 | 1117,1
228 | 1118,0
229 | 1119,1
230 | 1120,0
231 | 1121,0
232 | 1122,0
233 | 1123,1
234 | 1124,0
235 | 1125,0
236 | 1126,1
237 | 1127,0
238 | 1128,0
239 | 1129,1
240 | 1130,1
241 | 1131,1
242 | 1132,1
243 | 1133,1
244 | 1134,0
245 | 1135,0
246 | 1136,0
247 | 1137,0
248 | 1138,1
249 | 1139,0
250 | 1140,1
251 | 1141,0
252 | 1142,1
253 | 1143,0
254 | 1144,0
255 | 1145,0
256 | 1146,0
257 | 1147,0
258 | 1148,0
259 | 1149,0
260 | 1150,1
261 | 1151,0
262 | 1152,0
263 | 1153,0
264 | 1154,1
265 | 1155,1
266 | 1156,0
267 | 1157,0
268 | 1158,0
269 | 1159,0
270 | 1160,0
271 | 1161,0
272 | 1162,0
273 | 1163,0
274 | 1164,1
275 | 1165,1
276 | 1166,0
277 | 1167,1
278 | 1168,0
279 | 1169,0
280 | 1170,0
281 | 1171,0
282 | 1172,0
283 | 1173,1
284 | 1174,1
285 | 1175,0
286 | 1176,1
287 | 1177,0
288 | 1178,0
289 | 1179,0
290 | 1180,0
291 | 1181,0
292 | 1182,0
293 | 1183,0
294 | 1184,0
295 | 1185,0
296 | 1186,0
297 | 1187,0
298 | 1188,1
299 | 1189,0
300 | 1190,0
301 | 1191,0
302 | 1192,0
303 | 1193,0
304 | 1194,0
305 | 1195,0
306 | 1196,1
307 | 1197,1
308 | 1198,0
309 | 1199,1
310 | 1200,0
311 | 1201,0
312 | 1202,0
313 | 1203,0
314 | 1204,0
315 | 1205,0
316 | 1206,1
317 | 1207,1
318 | 1208,0
319 | 1209,0
320 | 1210,0
321 | 1211,0
322 | 1212,0
323 | 1213,0
324 | 1214,0
325 | 1215,1
326 | 1216,1
327 | 1217,0
328 | 1218,1
329 | 1219,0
330 | 1220,0
331 | 1221,0
332 | 1222,1
333 | 1223,0
334 | 1224,0
335 | 1225,1
336 | 1226,0
337 | 1227,0
338 | 1228,0
339 | 1229,0
340 | 1230,0
341 | 1231,0
342 | 1232,0
343 | 1233,0
344 | 1234,0
345 | 1235,1
346 | 1236,0
347 | 1237,1
348 | 1238,0
349 | 1239,0
350 | 1240,0
351 | 1241,1
352 | 1242,1
353 | 1243,0
354 | 1244,0
355 | 1245,0
356 | 1246,1
357 | 1247,0
358 | 1248,1
359 | 1249,0
360 | 1250,0
361 | 1251,1
362 | 1252,0
363 | 1253,1
364 | 1254,1
365 | 1255,0
366 | 1256,1
367 | 1257,0
368 | 1258,0
369 | 1259,0
370 | 1260,1
371 | 1261,0
372 | 1262,0
373 | 1263,1
374 | 1264,0
375 | 1265,0
376 | 1266,1
377 | 1267,1
378 | 1268,0
379 | 1269,0
380 | 1270,0
381 | 1271,0
382 | 1272,0
383 | 1273,0
384 | 1274,0
385 | 1275,1
386 | 1276,0
387 | 1277,1
388 | 1278,0
389 | 1279,0
390 | 1280,0
391 | 1281,0
392 | 1282,0
393 | 1283,1
394 | 1284,0
395 | 1285,0
396 | 1286,0
397 | 1287,1
398 | 1288,0
399 | 1289,1
400 | 1290,0
401 | 1291,0
402 | 1292,1
403 | 1293,0
404 | 1294,1
405 | 1295,0
406 | 1296,0
407 | 1297,0
408 | 1298,0
409 | 1299,0
410 | 1300,1
411 | 1301,1
412 | 1302,1
413 | 1303,1
414 | 1304,1
415 | 1305,0
416 | 1306,1
417 | 1307,0
418 | 1308,0
419 | 1309,0
420 |
--------------------------------------------------------------------------------
/Day-31/data/submission_4.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived
2 | 892,0
3 | 893,0
4 | 894,0
5 | 895,0
6 | 896,0
7 | 897,0
8 | 898,0
9 | 899,0
10 | 900,1
11 | 901,0
12 | 902,0
13 | 903,0
14 | 904,1
15 | 905,0
16 | 906,1
17 | 907,1
18 | 908,0
19 | 909,0
20 | 910,1
21 | 911,0
22 | 912,0
23 | 913,0
24 | 914,1
25 | 915,1
26 | 916,1
27 | 917,0
28 | 918,1
29 | 919,0
30 | 920,1
31 | 921,0
32 | 922,0
33 | 923,0
34 | 924,0
35 | 925,0
36 | 926,1
37 | 927,0
38 | 928,0
39 | 929,0
40 | 930,0
41 | 931,1
42 | 932,0
43 | 933,1
44 | 934,0
45 | 935,1
46 | 936,1
47 | 937,0
48 | 938,0
49 | 939,0
50 | 940,1
51 | 941,1
52 | 942,0
53 | 943,0
54 | 944,1
55 | 945,1
56 | 946,0
57 | 947,0
58 | 948,0
59 | 949,0
60 | 950,0
61 | 951,1
62 | 952,0
63 | 953,0
64 | 954,0
65 | 955,1
66 | 956,0
67 | 957,1
68 | 958,1
69 | 959,0
70 | 960,0
71 | 961,1
72 | 962,1
73 | 963,0
74 | 964,0
75 | 965,0
76 | 966,1
77 | 967,0
78 | 968,0
79 | 969,1
80 | 970,0
81 | 971,1
82 | 972,1
83 | 973,0
84 | 974,1
85 | 975,0
86 | 976,0
87 | 977,0
88 | 978,1
89 | 979,1
90 | 980,1
91 | 981,1
92 | 982,0
93 | 983,0
94 | 984,1
95 | 985,0
96 | 986,1
97 | 987,0
98 | 988,1
99 | 989,0
100 | 990,1
101 | 991,0
102 | 992,1
103 | 993,0
104 | 994,0
105 | 995,0
106 | 996,1
107 | 997,0
108 | 998,0
109 | 999,0
110 | 1000,0
111 | 1001,0
112 | 1002,0
113 | 1003,1
114 | 1004,1
115 | 1005,1
116 | 1006,1
117 | 1007,0
118 | 1008,0
119 | 1009,1
120 | 1010,0
121 | 1011,1
122 | 1012,1
123 | 1013,0
124 | 1014,1
125 | 1015,0
126 | 1016,0
127 | 1017,1
128 | 1018,0
129 | 1019,1
130 | 1020,0
131 | 1021,0
132 | 1022,0
133 | 1023,0
134 | 1024,0
135 | 1025,0
136 | 1026,0
137 | 1027,0
138 | 1028,1
139 | 1029,0
140 | 1030,0
141 | 1031,0
142 | 1032,0
143 | 1033,1
144 | 1034,0
145 | 1035,0
146 | 1036,1
147 | 1037,0
148 | 1038,0
149 | 1039,0
150 | 1040,1
151 | 1041,0
152 | 1042,1
153 | 1043,0
154 | 1044,0
155 | 1045,1
156 | 1046,0
157 | 1047,0
158 | 1048,1
159 | 1049,0
160 | 1050,1
161 | 1051,1
162 | 1052,1
163 | 1053,1
164 | 1054,1
165 | 1055,0
166 | 1056,0
167 | 1057,0
168 | 1058,0
169 | 1059,0
170 | 1060,1
171 | 1061,0
172 | 1062,0
173 | 1063,1
174 | 1064,0
175 | 1065,0
176 | 1066,0
177 | 1067,1
178 | 1068,1
179 | 1069,0
180 | 1070,1
181 | 1071,1
182 | 1072,0
183 | 1073,0
184 | 1074,1
185 | 1075,0
186 | 1076,0
187 | 1077,0
188 | 1078,1
189 | 1079,0
190 | 1080,0
191 | 1081,0
192 | 1082,0
193 | 1083,0
194 | 1084,0
195 | 1085,0
196 | 1086,1
197 | 1087,0
198 | 1088,1
199 | 1089,0
200 | 1090,0
201 | 1091,0
202 | 1092,1
203 | 1093,1
204 | 1094,0
205 | 1095,1
206 | 1096,0
207 | 1097,1
208 | 1098,0
209 | 1099,0
210 | 1100,1
211 | 1101,0
212 | 1102,0
213 | 1103,0
214 | 1104,0
215 | 1105,1
216 | 1106,0
217 | 1107,0
218 | 1108,1
219 | 1109,0
220 | 1110,1
221 | 1111,0
222 | 1112,1
223 | 1113,0
224 | 1114,1
225 | 1115,0
226 | 1116,1
227 | 1117,1
228 | 1118,0
229 | 1119,1
230 | 1120,0
231 | 1121,0
232 | 1122,0
233 | 1123,1
234 | 1124,0
235 | 1125,0
236 | 1126,1
237 | 1127,0
238 | 1128,0
239 | 1129,1
240 | 1130,1
241 | 1131,1
242 | 1132,1
243 | 1133,1
244 | 1134,0
245 | 1135,0
246 | 1136,0
247 | 1137,0
248 | 1138,1
249 | 1139,0
250 | 1140,1
251 | 1141,0
252 | 1142,1
253 | 1143,0
254 | 1144,0
255 | 1145,0
256 | 1146,0
257 | 1147,0
258 | 1148,0
259 | 1149,0
260 | 1150,1
261 | 1151,0
262 | 1152,0
263 | 1153,0
264 | 1154,1
265 | 1155,1
266 | 1156,0
267 | 1157,0
268 | 1158,0
269 | 1159,0
270 | 1160,0
271 | 1161,0
272 | 1162,0
273 | 1163,0
274 | 1164,1
275 | 1165,1
276 | 1166,0
277 | 1167,1
278 | 1168,0
279 | 1169,0
280 | 1170,0
281 | 1171,0
282 | 1172,0
283 | 1173,1
284 | 1174,1
285 | 1175,0
286 | 1176,1
287 | 1177,0
288 | 1178,0
289 | 1179,0
290 | 1180,0
291 | 1181,0
292 | 1182,0
293 | 1183,0
294 | 1184,0
295 | 1185,0
296 | 1186,0
297 | 1187,0
298 | 1188,1
299 | 1189,0
300 | 1190,0
301 | 1191,0
302 | 1192,0
303 | 1193,0
304 | 1194,0
305 | 1195,0
306 | 1196,1
307 | 1197,1
308 | 1198,0
309 | 1199,1
310 | 1200,0
311 | 1201,0
312 | 1202,0
313 | 1203,0
314 | 1204,0
315 | 1205,0
316 | 1206,1
317 | 1207,1
318 | 1208,0
319 | 1209,0
320 | 1210,0
321 | 1211,0
322 | 1212,0
323 | 1213,0
324 | 1214,0
325 | 1215,1
326 | 1216,1
327 | 1217,0
328 | 1218,1
329 | 1219,0
330 | 1220,0
331 | 1221,0
332 | 1222,1
333 | 1223,0
334 | 1224,0
335 | 1225,1
336 | 1226,0
337 | 1227,0
338 | 1228,0
339 | 1229,0
340 | 1230,0
341 | 1231,0
342 | 1232,0
343 | 1233,0
344 | 1234,0
345 | 1235,1
346 | 1236,0
347 | 1237,1
348 | 1238,0
349 | 1239,0
350 | 1240,0
351 | 1241,1
352 | 1242,1
353 | 1243,0
354 | 1244,0
355 | 1245,0
356 | 1246,1
357 | 1247,0
358 | 1248,1
359 | 1249,0
360 | 1250,0
361 | 1251,0
362 | 1252,0
363 | 1253,1
364 | 1254,1
365 | 1255,0
366 | 1256,1
367 | 1257,0
368 | 1258,0
369 | 1259,0
370 | 1260,1
371 | 1261,0
372 | 1262,0
373 | 1263,1
374 | 1264,0
375 | 1265,0
376 | 1266,1
377 | 1267,1
378 | 1268,0
379 | 1269,0
380 | 1270,0
381 | 1271,0
382 | 1272,0
383 | 1273,0
384 | 1274,0
385 | 1275,1
386 | 1276,0
387 | 1277,1
388 | 1278,0
389 | 1279,0
390 | 1280,0
391 | 1281,0
392 | 1282,0
393 | 1283,1
394 | 1284,0
395 | 1285,0
396 | 1286,0
397 | 1287,1
398 | 1288,0
399 | 1289,1
400 | 1290,0
401 | 1291,0
402 | 1292,1
403 | 1293,0
404 | 1294,1
405 | 1295,0
406 | 1296,0
407 | 1297,0
408 | 1298,0
409 | 1299,0
410 | 1300,0
411 | 1301,1
412 | 1302,1
413 | 1303,1
414 | 1304,1
415 | 1305,0
416 | 1306,1
417 | 1307,0
418 | 1308,0
419 | 1309,0
420 |
--------------------------------------------------------------------------------
/Day-31/readme.md:
--------------------------------------------------------------------------------
1 | # Day 31 of [#100DaysOfCode](https://twitter.com/Param3021/status/1544261582565568512)
2 |
3 | ## Task
4 | 1. Titanic dataset prediction challenge
5 |
6 | # Resources
7 | - Kaggle [Titanic - Machine Learning from Disaster](https://www.kaggle.com/competitions/titanic/)
8 | - - [My Notebook 1](https://www.kaggle.com/code/param302/titanic-survival-competition-2)
9 | - - [My Notebook 2](https://www.kaggle.com/code/param302/titanic-survival-competition-3)
10 |
11 |
12 | ### Topics I have learnt
13 | 1. Titanic dataset prediction
14 | - One with `RandomForestClassifier` and `OneHotEncoding`
15 | - One with `XGBClassifier` and `OneHotEncoding`
16 | - One with same as above but removed Outliers from data.
17 |
18 | 2. Also participated in MLH Global Hack Week INIT 2023 [🔗](https://ghw.mlh.io/)
19 | - Did 1 challenge
20 | - And tried to make discord bot but failed
21 |
22 | ### Software used
23 | - Jupyter Notebook
24 | - Python 3.10.2
25 | - Numpy 1.22.4
26 | - pandas 1.4.2
27 | - Matplotlib 3.5.2
28 | - Seaborn 0.11.2
29 | - scikit-learn 1.1.1
30 | - XGBoost 1.6.1
31 |
32 | ### My Notebooks
33 | - [Titanic_survival_competition_2.ipynb](./Titanic_survival_competition_2.ipynb)
34 | - [Titanic_survival_competition_3.ipynb](./Titanic_survival_competition_3.ipynb)
35 | - [Titanic_survival_competition_4.ipynb](./Titanic_survival_competition_4.ipynb)
36 |
37 | ### Conclusion:
38 | Today I did titanic survival compeition prediction 3 times, one time I scored better. I have used `XGBClassifier`, `RandomForestClassifier`.
--------------------------------------------------------------------------------
/Day-32/data/gender_submission.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived
2 | 892,0
3 | 893,1
4 | 894,0
5 | 895,0
6 | 896,1
7 | 897,0
8 | 898,1
9 | 899,0
10 | 900,1
11 | 901,0
12 | 902,0
13 | 903,0
14 | 904,1
15 | 905,0
16 | 906,1
17 | 907,1
18 | 908,0
19 | 909,0
20 | 910,1
21 | 911,1
22 | 912,0
23 | 913,0
24 | 914,1
25 | 915,0
26 | 916,1
27 | 917,0
28 | 918,1
29 | 919,0
30 | 920,0
31 | 921,0
32 | 922,0
33 | 923,0
34 | 924,1
35 | 925,1
36 | 926,0
37 | 927,0
38 | 928,1
39 | 929,1
40 | 930,0
41 | 931,0
42 | 932,0
43 | 933,0
44 | 934,0
45 | 935,1
46 | 936,1
47 | 937,0
48 | 938,0
49 | 939,0
50 | 940,1
51 | 941,1
52 | 942,0
53 | 943,0
54 | 944,1
55 | 945,1
56 | 946,0
57 | 947,0
58 | 948,0
59 | 949,0
60 | 950,0
61 | 951,1
62 | 952,0
63 | 953,0
64 | 954,0
65 | 955,1
66 | 956,0
67 | 957,1
68 | 958,1
69 | 959,0
70 | 960,0
71 | 961,1
72 | 962,1
73 | 963,0
74 | 964,1
75 | 965,0
76 | 966,1
77 | 967,0
78 | 968,0
79 | 969,1
80 | 970,0
81 | 971,1
82 | 972,0
83 | 973,0
84 | 974,0
85 | 975,0
86 | 976,0
87 | 977,0
88 | 978,1
89 | 979,1
90 | 980,1
91 | 981,0
92 | 982,1
93 | 983,0
94 | 984,1
95 | 985,0
96 | 986,0
97 | 987,0
98 | 988,1
99 | 989,0
100 | 990,1
101 | 991,0
102 | 992,1
103 | 993,0
104 | 994,0
105 | 995,0
106 | 996,1
107 | 997,0
108 | 998,0
109 | 999,0
110 | 1000,0
111 | 1001,0
112 | 1002,0
113 | 1003,1
114 | 1004,1
115 | 1005,1
116 | 1006,1
117 | 1007,0
118 | 1008,0
119 | 1009,1
120 | 1010,0
121 | 1011,1
122 | 1012,1
123 | 1013,0
124 | 1014,1
125 | 1015,0
126 | 1016,0
127 | 1017,1
128 | 1018,0
129 | 1019,1
130 | 1020,0
131 | 1021,0
132 | 1022,0
133 | 1023,0
134 | 1024,1
135 | 1025,0
136 | 1026,0
137 | 1027,0
138 | 1028,0
139 | 1029,0
140 | 1030,1
141 | 1031,0
142 | 1032,1
143 | 1033,1
144 | 1034,0
145 | 1035,0
146 | 1036,0
147 | 1037,0
148 | 1038,0
149 | 1039,0
150 | 1040,0
151 | 1041,0
152 | 1042,1
153 | 1043,0
154 | 1044,0
155 | 1045,1
156 | 1046,0
157 | 1047,0
158 | 1048,1
159 | 1049,1
160 | 1050,0
161 | 1051,1
162 | 1052,1
163 | 1053,0
164 | 1054,1
165 | 1055,0
166 | 1056,0
167 | 1057,1
168 | 1058,0
169 | 1059,0
170 | 1060,1
171 | 1061,1
172 | 1062,0
173 | 1063,0
174 | 1064,0
175 | 1065,0
176 | 1066,0
177 | 1067,1
178 | 1068,1
179 | 1069,0
180 | 1070,1
181 | 1071,1
182 | 1072,0
183 | 1073,0
184 | 1074,1
185 | 1075,0
186 | 1076,1
187 | 1077,0
188 | 1078,1
189 | 1079,0
190 | 1080,1
191 | 1081,0
192 | 1082,0
193 | 1083,0
194 | 1084,0
195 | 1085,0
196 | 1086,0
197 | 1087,0
198 | 1088,0
199 | 1089,1
200 | 1090,0
201 | 1091,1
202 | 1092,1
203 | 1093,0
204 | 1094,0
205 | 1095,1
206 | 1096,0
207 | 1097,0
208 | 1098,1
209 | 1099,0
210 | 1100,1
211 | 1101,0
212 | 1102,0
213 | 1103,0
214 | 1104,0
215 | 1105,1
216 | 1106,1
217 | 1107,0
218 | 1108,1
219 | 1109,0
220 | 1110,1
221 | 1111,0
222 | 1112,1
223 | 1113,0
224 | 1114,1
225 | 1115,0
226 | 1116,1
227 | 1117,1
228 | 1118,0
229 | 1119,1
230 | 1120,0
231 | 1121,0
232 | 1122,0
233 | 1123,1
234 | 1124,0
235 | 1125,0
236 | 1126,0
237 | 1127,0
238 | 1128,0
239 | 1129,0
240 | 1130,1
241 | 1131,1
242 | 1132,1
243 | 1133,1
244 | 1134,0
245 | 1135,0
246 | 1136,0
247 | 1137,0
248 | 1138,1
249 | 1139,0
250 | 1140,1
251 | 1141,1
252 | 1142,1
253 | 1143,0
254 | 1144,0
255 | 1145,0
256 | 1146,0
257 | 1147,0
258 | 1148,0
259 | 1149,0
260 | 1150,1
261 | 1151,0
262 | 1152,0
263 | 1153,0
264 | 1154,1
265 | 1155,1
266 | 1156,0
267 | 1157,0
268 | 1158,0
269 | 1159,0
270 | 1160,1
271 | 1161,0
272 | 1162,0
273 | 1163,0
274 | 1164,1
275 | 1165,1
276 | 1166,0
277 | 1167,1
278 | 1168,0
279 | 1169,0
280 | 1170,0
281 | 1171,0
282 | 1172,1
283 | 1173,0
284 | 1174,1
285 | 1175,1
286 | 1176,1
287 | 1177,0
288 | 1178,0
289 | 1179,0
290 | 1180,0
291 | 1181,0
292 | 1182,0
293 | 1183,1
294 | 1184,0
295 | 1185,0
296 | 1186,0
297 | 1187,0
298 | 1188,1
299 | 1189,0
300 | 1190,0
301 | 1191,0
302 | 1192,0
303 | 1193,0
304 | 1194,0
305 | 1195,0
306 | 1196,1
307 | 1197,1
308 | 1198,0
309 | 1199,0
310 | 1200,0
311 | 1201,1
312 | 1202,0
313 | 1203,0
314 | 1204,0
315 | 1205,1
316 | 1206,1
317 | 1207,1
318 | 1208,0
319 | 1209,0
320 | 1210,0
321 | 1211,0
322 | 1212,0
323 | 1213,0
324 | 1214,0
325 | 1215,0
326 | 1216,1
327 | 1217,0
328 | 1218,1
329 | 1219,0
330 | 1220,0
331 | 1221,0
332 | 1222,1
333 | 1223,0
334 | 1224,0
335 | 1225,1
336 | 1226,0
337 | 1227,0
338 | 1228,0
339 | 1229,0
340 | 1230,0
341 | 1231,0
342 | 1232,0
343 | 1233,0
344 | 1234,0
345 | 1235,1
346 | 1236,0
347 | 1237,1
348 | 1238,0
349 | 1239,1
350 | 1240,0
351 | 1241,1
352 | 1242,1
353 | 1243,0
354 | 1244,0
355 | 1245,0
356 | 1246,1
357 | 1247,0
358 | 1248,1
359 | 1249,0
360 | 1250,0
361 | 1251,1
362 | 1252,0
363 | 1253,1
364 | 1254,1
365 | 1255,0
366 | 1256,1
367 | 1257,1
368 | 1258,0
369 | 1259,1
370 | 1260,1
371 | 1261,0
372 | 1262,0
373 | 1263,1
374 | 1264,0
375 | 1265,0
376 | 1266,1
377 | 1267,1
378 | 1268,1
379 | 1269,0
380 | 1270,0
381 | 1271,0
382 | 1272,0
383 | 1273,0
384 | 1274,1
385 | 1275,1
386 | 1276,0
387 | 1277,1
388 | 1278,0
389 | 1279,0
390 | 1280,0
391 | 1281,0
392 | 1282,0
393 | 1283,1
394 | 1284,0
395 | 1285,0
396 | 1286,0
397 | 1287,1
398 | 1288,0
399 | 1289,1
400 | 1290,0
401 | 1291,0
402 | 1292,1
403 | 1293,0
404 | 1294,1
405 | 1295,0
406 | 1296,0
407 | 1297,0
408 | 1298,0
409 | 1299,0
410 | 1300,1
411 | 1301,1
412 | 1302,1
413 | 1303,1
414 | 1304,1
415 | 1305,0
416 | 1306,1
417 | 1307,0
418 | 1308,0
419 | 1309,0
420 |
--------------------------------------------------------------------------------
/Day-32/data/submission_5.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived
2 | 892,0
3 | 893,1
4 | 894,0
5 | 895,0
6 | 896,1
7 | 897,0
8 | 898,0
9 | 899,0
10 | 900,1
11 | 901,0
12 | 902,0
13 | 903,0
14 | 904,1
15 | 905,0
16 | 906,1
17 | 907,1
18 | 908,0
19 | 909,0
20 | 910,1
21 | 911,1
22 | 912,0
23 | 913,0
24 | 914,1
25 | 915,0
26 | 916,1
27 | 917,0
28 | 918,1
29 | 919,0
30 | 920,1
31 | 921,0
32 | 922,0
33 | 923,0
34 | 924,1
35 | 925,0
36 | 926,1
37 | 927,0
38 | 928,0
39 | 929,0
40 | 930,0
41 | 931,1
42 | 932,0
43 | 933,1
44 | 934,0
45 | 935,1
46 | 936,1
47 | 937,0
48 | 938,0
49 | 939,0
50 | 940,1
51 | 941,1
52 | 942,1
53 | 943,0
54 | 944,1
55 | 945,1
56 | 946,0
57 | 947,0
58 | 948,0
59 | 949,0
60 | 950,0
61 | 951,1
62 | 952,0
63 | 953,0
64 | 954,0
65 | 955,1
66 | 956,1
67 | 957,1
68 | 958,1
69 | 959,0
70 | 960,0
71 | 961,1
72 | 962,1
73 | 963,0
74 | 964,1
75 | 965,0
76 | 966,1
77 | 967,1
78 | 968,0
79 | 969,1
80 | 970,0
81 | 971,1
82 | 972,0
83 | 973,1
84 | 974,0
85 | 975,0
86 | 976,0
87 | 977,0
88 | 978,1
89 | 979,0
90 | 980,1
91 | 981,0
92 | 982,1
93 | 983,0
94 | 984,1
95 | 985,0
96 | 986,0
97 | 987,0
98 | 988,1
99 | 989,0
100 | 990,1
101 | 991,0
102 | 992,1
103 | 993,0
104 | 994,0
105 | 995,0
106 | 996,0
107 | 997,0
108 | 998,0
109 | 999,0
110 | 1000,0
111 | 1001,1
112 | 1002,0
113 | 1003,1
114 | 1004,1
115 | 1005,1
116 | 1006,1
117 | 1007,0
118 | 1008,0
119 | 1009,1
120 | 1010,1
121 | 1011,1
122 | 1012,1
123 | 1013,0
124 | 1014,1
125 | 1015,0
126 | 1016,0
127 | 1017,1
128 | 1018,0
129 | 1019,1
130 | 1020,0
131 | 1021,0
132 | 1022,0
133 | 1023,0
134 | 1024,0
135 | 1025,0
136 | 1026,0
137 | 1027,0
138 | 1028,0
139 | 1029,0
140 | 1030,0
141 | 1031,0
142 | 1032,0
143 | 1033,1
144 | 1034,1
145 | 1035,0
146 | 1036,0
147 | 1037,0
148 | 1038,0
149 | 1039,0
150 | 1040,0
151 | 1041,0
152 | 1042,1
153 | 1043,0
154 | 1044,0
155 | 1045,1
156 | 1046,0
157 | 1047,0
158 | 1048,1
159 | 1049,1
160 | 1050,1
161 | 1051,1
162 | 1052,1
163 | 1053,0
164 | 1054,1
165 | 1055,0
166 | 1056,0
167 | 1057,1
168 | 1058,0
169 | 1059,0
170 | 1060,1
171 | 1061,0
172 | 1062,0
173 | 1063,0
174 | 1064,0
175 | 1065,0
176 | 1066,0
177 | 1067,1
178 | 1068,1
179 | 1069,1
180 | 1070,1
181 | 1071,1
182 | 1072,0
183 | 1073,0
184 | 1074,1
185 | 1075,0
186 | 1076,1
187 | 1077,0
188 | 1078,1
189 | 1079,0
190 | 1080,0
191 | 1081,0
192 | 1082,0
193 | 1083,0
194 | 1084,0
195 | 1085,0
196 | 1086,0
197 | 1087,0
198 | 1088,1
199 | 1089,1
200 | 1090,0
201 | 1091,0
202 | 1092,1
203 | 1093,0
204 | 1094,1
205 | 1095,1
206 | 1096,0
207 | 1097,0
208 | 1098,1
209 | 1099,0
210 | 1100,1
211 | 1101,0
212 | 1102,0
213 | 1103,0
214 | 1104,0
215 | 1105,1
216 | 1106,1
217 | 1107,0
218 | 1108,1
219 | 1109,1
220 | 1110,1
221 | 1111,0
222 | 1112,1
223 | 1113,0
224 | 1114,1
225 | 1115,0
226 | 1116,1
227 | 1117,0
228 | 1118,0
229 | 1119,1
230 | 1120,0
231 | 1121,0
232 | 1122,0
233 | 1123,1
234 | 1124,0
235 | 1125,0
236 | 1126,0
237 | 1127,0
238 | 1128,1
239 | 1129,0
240 | 1130,1
241 | 1131,1
242 | 1132,1
243 | 1133,1
244 | 1134,1
245 | 1135,0
246 | 1136,0
247 | 1137,0
248 | 1138,1
249 | 1139,0
250 | 1140,1
251 | 1141,0
252 | 1142,1
253 | 1143,0
254 | 1144,0
255 | 1145,0
256 | 1146,0
257 | 1147,0
258 | 1148,0
259 | 1149,0
260 | 1150,1
261 | 1151,0
262 | 1152,0
263 | 1153,0
264 | 1154,1
265 | 1155,1
266 | 1156,0
267 | 1157,0
268 | 1158,0
269 | 1159,0
270 | 1160,0
271 | 1161,0
272 | 1162,1
273 | 1163,0
274 | 1164,1
275 | 1165,1
276 | 1166,0
277 | 1167,1
278 | 1168,0
279 | 1169,0
280 | 1170,0
281 | 1171,0
282 | 1172,0
283 | 1173,0
284 | 1174,1
285 | 1175,0
286 | 1176,1
287 | 1177,0
288 | 1178,0
289 | 1179,0
290 | 1180,0
291 | 1181,0
292 | 1182,0
293 | 1183,0
294 | 1184,0
295 | 1185,0
296 | 1186,0
297 | 1187,0
298 | 1188,1
299 | 1189,0
300 | 1190,0
301 | 1191,0
302 | 1192,0
303 | 1193,1
304 | 1194,0
305 | 1195,0
306 | 1196,1
307 | 1197,1
308 | 1198,0
309 | 1199,0
310 | 1200,1
311 | 1201,0
312 | 1202,0
313 | 1203,0
314 | 1204,0
315 | 1205,1
316 | 1206,1
317 | 1207,1
318 | 1208,0
319 | 1209,0
320 | 1210,0
321 | 1211,0
322 | 1212,0
323 | 1213,0
324 | 1214,1
325 | 1215,0
326 | 1216,1
327 | 1217,0
328 | 1218,1
329 | 1219,0
330 | 1220,0
331 | 1221,0
332 | 1222,1
333 | 1223,0
334 | 1224,0
335 | 1225,1
336 | 1226,0
337 | 1227,0
338 | 1228,0
339 | 1229,0
340 | 1230,0
341 | 1231,0
342 | 1232,0
343 | 1233,0
344 | 1234,0
345 | 1235,1
346 | 1236,0
347 | 1237,1
348 | 1238,0
349 | 1239,1
350 | 1240,0
351 | 1241,1
352 | 1242,1
353 | 1243,0
354 | 1244,0
355 | 1245,0
356 | 1246,1
357 | 1247,0
358 | 1248,1
359 | 1249,0
360 | 1250,0
361 | 1251,1
362 | 1252,0
363 | 1253,1
364 | 1254,1
365 | 1255,0
366 | 1256,1
367 | 1257,0
368 | 1258,0
369 | 1259,0
370 | 1260,1
371 | 1261,0
372 | 1262,0
373 | 1263,1
374 | 1264,0
375 | 1265,0
376 | 1266,1
377 | 1267,1
378 | 1268,0
379 | 1269,0
380 | 1270,0
381 | 1271,0
382 | 1272,0
383 | 1273,0
384 | 1274,0
385 | 1275,1
386 | 1276,0
387 | 1277,1
388 | 1278,0
389 | 1279,0
390 | 1280,0
391 | 1281,0
392 | 1282,1
393 | 1283,1
394 | 1284,0
395 | 1285,0
396 | 1286,0
397 | 1287,1
398 | 1288,0
399 | 1289,1
400 | 1290,0
401 | 1291,0
402 | 1292,1
403 | 1293,0
404 | 1294,1
405 | 1295,0
406 | 1296,0
407 | 1297,1
408 | 1298,0
409 | 1299,1
410 | 1300,1
411 | 1301,1
412 | 1302,1
413 | 1303,1
414 | 1304,1
415 | 1305,0
416 | 1306,1
417 | 1307,0
418 | 1308,0
419 | 1309,0
420 |
--------------------------------------------------------------------------------
/Day-32/readme.md:
--------------------------------------------------------------------------------
1 | # Day 32 of [#100DaysOfCode](https://twitter.com/ossia/status/1544733677300613121)
2 |
3 | ## Task
4 | 1. Titanic dataset prediction challenge
5 | 2. Data Science project challenge in GHW
6 |
7 | # Resources
8 | - Kaggle [Titanic - Machine Learning from Disaster](https://www.kaggle.com/competitions/titanic/)
9 | - - [My Notebook 1](https://www.kaggle.com/code/param302/titanic-survival-competition-5)
10 |
11 | - MLH - [Global Hack Week](https://ghw.mlh.io/) INIT 2023 hackathon
12 | - - [Data Science project](https://github.com/Param302/MLH-GHW-2023/tree/master/Data%20Science%20project)
13 |
14 | ### Topics I have learnt
15 | 1. Titanic dataset prediction
16 | - With `XGBClassifier` and did `Feature Engineering` using Mutual Information.
17 |
18 | 2. Also participated in MLH Global Hack Week INIT 2023 [🔗](https://ghw.mlh.io/)
19 | - Did 4 challenges
20 |
21 | ### Software used
22 | - Jupyter Notebook
23 | - Python 3.10.2
24 | - Numpy 1.22.4
25 | - pandas 1.4.2
26 | - Matplotlib 3.5.2
27 | - Seaborn 0.11.2
28 | - scikit-learn 1.1.1
29 | - XGBoost 1.6.1
30 |
31 | ### My Notebooks
32 | - [Titanic_survival_competition_5.ipynb](./Titanic_survival_competition_5.ipynb)
33 | - [GHW_data_science_project.ipynb](./GHW_data_science_project.ipynb)
34 |
35 | ### Conclusion:
36 | Today I did titanic survival compeition and participated in Global Hack Week hackathon and completed 4 challenges.
--------------------------------------------------------------------------------
/Day-33/data/gender_submission.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived
2 | 892,0
3 | 893,1
4 | 894,0
5 | 895,0
6 | 896,1
7 | 897,0
8 | 898,1
9 | 899,0
10 | 900,1
11 | 901,0
12 | 902,0
13 | 903,0
14 | 904,1
15 | 905,0
16 | 906,1
17 | 907,1
18 | 908,0
19 | 909,0
20 | 910,1
21 | 911,1
22 | 912,0
23 | 913,0
24 | 914,1
25 | 915,0
26 | 916,1
27 | 917,0
28 | 918,1
29 | 919,0
30 | 920,0
31 | 921,0
32 | 922,0
33 | 923,0
34 | 924,1
35 | 925,1
36 | 926,0
37 | 927,0
38 | 928,1
39 | 929,1
40 | 930,0
41 | 931,0
42 | 932,0
43 | 933,0
44 | 934,0
45 | 935,1
46 | 936,1
47 | 937,0
48 | 938,0
49 | 939,0
50 | 940,1
51 | 941,1
52 | 942,0
53 | 943,0
54 | 944,1
55 | 945,1
56 | 946,0
57 | 947,0
58 | 948,0
59 | 949,0
60 | 950,0
61 | 951,1
62 | 952,0
63 | 953,0
64 | 954,0
65 | 955,1
66 | 956,0
67 | 957,1
68 | 958,1
69 | 959,0
70 | 960,0
71 | 961,1
72 | 962,1
73 | 963,0
74 | 964,1
75 | 965,0
76 | 966,1
77 | 967,0
78 | 968,0
79 | 969,1
80 | 970,0
81 | 971,1
82 | 972,0
83 | 973,0
84 | 974,0
85 | 975,0
86 | 976,0
87 | 977,0
88 | 978,1
89 | 979,1
90 | 980,1
91 | 981,0
92 | 982,1
93 | 983,0
94 | 984,1
95 | 985,0
96 | 986,0
97 | 987,0
98 | 988,1
99 | 989,0
100 | 990,1
101 | 991,0
102 | 992,1
103 | 993,0
104 | 994,0
105 | 995,0
106 | 996,1
107 | 997,0
108 | 998,0
109 | 999,0
110 | 1000,0
111 | 1001,0
112 | 1002,0
113 | 1003,1
114 | 1004,1
115 | 1005,1
116 | 1006,1
117 | 1007,0
118 | 1008,0
119 | 1009,1
120 | 1010,0
121 | 1011,1
122 | 1012,1
123 | 1013,0
124 | 1014,1
125 | 1015,0
126 | 1016,0
127 | 1017,1
128 | 1018,0
129 | 1019,1
130 | 1020,0
131 | 1021,0
132 | 1022,0
133 | 1023,0
134 | 1024,1
135 | 1025,0
136 | 1026,0
137 | 1027,0
138 | 1028,0
139 | 1029,0
140 | 1030,1
141 | 1031,0
142 | 1032,1
143 | 1033,1
144 | 1034,0
145 | 1035,0
146 | 1036,0
147 | 1037,0
148 | 1038,0
149 | 1039,0
150 | 1040,0
151 | 1041,0
152 | 1042,1
153 | 1043,0
154 | 1044,0
155 | 1045,1
156 | 1046,0
157 | 1047,0
158 | 1048,1
159 | 1049,1
160 | 1050,0
161 | 1051,1
162 | 1052,1
163 | 1053,0
164 | 1054,1
165 | 1055,0
166 | 1056,0
167 | 1057,1
168 | 1058,0
169 | 1059,0
170 | 1060,1
171 | 1061,1
172 | 1062,0
173 | 1063,0
174 | 1064,0
175 | 1065,0
176 | 1066,0
177 | 1067,1
178 | 1068,1
179 | 1069,0
180 | 1070,1
181 | 1071,1
182 | 1072,0
183 | 1073,0
184 | 1074,1
185 | 1075,0
186 | 1076,1
187 | 1077,0
188 | 1078,1
189 | 1079,0
190 | 1080,1
191 | 1081,0
192 | 1082,0
193 | 1083,0
194 | 1084,0
195 | 1085,0
196 | 1086,0
197 | 1087,0
198 | 1088,0
199 | 1089,1
200 | 1090,0
201 | 1091,1
202 | 1092,1
203 | 1093,0
204 | 1094,0
205 | 1095,1
206 | 1096,0
207 | 1097,0
208 | 1098,1
209 | 1099,0
210 | 1100,1
211 | 1101,0
212 | 1102,0
213 | 1103,0
214 | 1104,0
215 | 1105,1
216 | 1106,1
217 | 1107,0
218 | 1108,1
219 | 1109,0
220 | 1110,1
221 | 1111,0
222 | 1112,1
223 | 1113,0
224 | 1114,1
225 | 1115,0
226 | 1116,1
227 | 1117,1
228 | 1118,0
229 | 1119,1
230 | 1120,0
231 | 1121,0
232 | 1122,0
233 | 1123,1
234 | 1124,0
235 | 1125,0
236 | 1126,0
237 | 1127,0
238 | 1128,0
239 | 1129,0
240 | 1130,1
241 | 1131,1
242 | 1132,1
243 | 1133,1
244 | 1134,0
245 | 1135,0
246 | 1136,0
247 | 1137,0
248 | 1138,1
249 | 1139,0
250 | 1140,1
251 | 1141,1
252 | 1142,1
253 | 1143,0
254 | 1144,0
255 | 1145,0
256 | 1146,0
257 | 1147,0
258 | 1148,0
259 | 1149,0
260 | 1150,1
261 | 1151,0
262 | 1152,0
263 | 1153,0
264 | 1154,1
265 | 1155,1
266 | 1156,0
267 | 1157,0
268 | 1158,0
269 | 1159,0
270 | 1160,1
271 | 1161,0
272 | 1162,0
273 | 1163,0
274 | 1164,1
275 | 1165,1
276 | 1166,0
277 | 1167,1
278 | 1168,0
279 | 1169,0
280 | 1170,0
281 | 1171,0
282 | 1172,1
283 | 1173,0
284 | 1174,1
285 | 1175,1
286 | 1176,1
287 | 1177,0
288 | 1178,0
289 | 1179,0
290 | 1180,0
291 | 1181,0
292 | 1182,0
293 | 1183,1
294 | 1184,0
295 | 1185,0
296 | 1186,0
297 | 1187,0
298 | 1188,1
299 | 1189,0
300 | 1190,0
301 | 1191,0
302 | 1192,0
303 | 1193,0
304 | 1194,0
305 | 1195,0
306 | 1196,1
307 | 1197,1
308 | 1198,0
309 | 1199,0
310 | 1200,0
311 | 1201,1
312 | 1202,0
313 | 1203,0
314 | 1204,0
315 | 1205,1
316 | 1206,1
317 | 1207,1
318 | 1208,0
319 | 1209,0
320 | 1210,0
321 | 1211,0
322 | 1212,0
323 | 1213,0
324 | 1214,0
325 | 1215,0
326 | 1216,1
327 | 1217,0
328 | 1218,1
329 | 1219,0
330 | 1220,0
331 | 1221,0
332 | 1222,1
333 | 1223,0
334 | 1224,0
335 | 1225,1
336 | 1226,0
337 | 1227,0
338 | 1228,0
339 | 1229,0
340 | 1230,0
341 | 1231,0
342 | 1232,0
343 | 1233,0
344 | 1234,0
345 | 1235,1
346 | 1236,0
347 | 1237,1
348 | 1238,0
349 | 1239,1
350 | 1240,0
351 | 1241,1
352 | 1242,1
353 | 1243,0
354 | 1244,0
355 | 1245,0
356 | 1246,1
357 | 1247,0
358 | 1248,1
359 | 1249,0
360 | 1250,0
361 | 1251,1
362 | 1252,0
363 | 1253,1
364 | 1254,1
365 | 1255,0
366 | 1256,1
367 | 1257,1
368 | 1258,0
369 | 1259,1
370 | 1260,1
371 | 1261,0
372 | 1262,0
373 | 1263,1
374 | 1264,0
375 | 1265,0
376 | 1266,1
377 | 1267,1
378 | 1268,1
379 | 1269,0
380 | 1270,0
381 | 1271,0
382 | 1272,0
383 | 1273,0
384 | 1274,1
385 | 1275,1
386 | 1276,0
387 | 1277,1
388 | 1278,0
389 | 1279,0
390 | 1280,0
391 | 1281,0
392 | 1282,0
393 | 1283,1
394 | 1284,0
395 | 1285,0
396 | 1286,0
397 | 1287,1
398 | 1288,0
399 | 1289,1
400 | 1290,0
401 | 1291,0
402 | 1292,1
403 | 1293,0
404 | 1294,1
405 | 1295,0
406 | 1296,0
407 | 1297,0
408 | 1298,0
409 | 1299,0
410 | 1300,1
411 | 1301,1
412 | 1302,1
413 | 1303,1
414 | 1304,1
415 | 1305,0
416 | 1306,1
417 | 1307,0
418 | 1308,0
419 | 1309,0
420 |
--------------------------------------------------------------------------------
/Day-33/data/submission_6.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived
2 | 892,0
3 | 893,1
4 | 894,0
5 | 895,0
6 | 896,1
7 | 897,0
8 | 898,0
9 | 899,0
10 | 900,1
11 | 901,0
12 | 902,0
13 | 903,0
14 | 904,1
15 | 905,0
16 | 906,1
17 | 907,1
18 | 908,0
19 | 909,0
20 | 910,0
21 | 911,1
22 | 912,0
23 | 913,0
24 | 914,1
25 | 915,0
26 | 916,0
27 | 917,0
28 | 918,1
29 | 919,0
30 | 920,1
31 | 921,0
32 | 922,0
33 | 923,0
34 | 924,1
35 | 925,0
36 | 926,1
37 | 927,0
38 | 928,0
39 | 929,0
40 | 930,0
41 | 931,1
42 | 932,0
43 | 933,1
44 | 934,0
45 | 935,1
46 | 936,1
47 | 937,1
48 | 938,0
49 | 939,0
50 | 940,1
51 | 941,1
52 | 942,0
53 | 943,0
54 | 944,1
55 | 945,0
56 | 946,0
57 | 947,0
58 | 948,0
59 | 949,0
60 | 950,0
61 | 951,1
62 | 952,0
63 | 953,0
64 | 954,0
65 | 955,1
66 | 956,0
67 | 957,1
68 | 958,1
69 | 959,0
70 | 960,0
71 | 961,0
72 | 962,1
73 | 963,0
74 | 964,1
75 | 965,0
76 | 966,1
77 | 967,0
78 | 968,0
79 | 969,1
80 | 970,0
81 | 971,1
82 | 972,1
83 | 973,0
84 | 974,0
85 | 975,0
86 | 976,0
87 | 977,0
88 | 978,1
89 | 979,0
90 | 980,1
91 | 981,1
92 | 982,1
93 | 983,0
94 | 984,1
95 | 985,0
96 | 986,0
97 | 987,0
98 | 988,1
99 | 989,1
100 | 990,1
101 | 991,0
102 | 992,1
103 | 993,0
104 | 994,0
105 | 995,0
106 | 996,0
107 | 997,0
108 | 998,0
109 | 999,0
110 | 1000,0
111 | 1001,1
112 | 1002,0
113 | 1003,1
114 | 1004,1
115 | 1005,1
116 | 1006,1
117 | 1007,0
118 | 1008,0
119 | 1009,1
120 | 1010,0
121 | 1011,1
122 | 1012,1
123 | 1013,0
124 | 1014,1
125 | 1015,0
126 | 1016,0
127 | 1017,1
128 | 1018,0
129 | 1019,1
130 | 1020,0
131 | 1021,0
132 | 1022,0
133 | 1023,0
134 | 1024,0
135 | 1025,0
136 | 1026,0
137 | 1027,0
138 | 1028,0
139 | 1029,0
140 | 1030,0
141 | 1031,0
142 | 1032,0
143 | 1033,1
144 | 1034,0
145 | 1035,0
146 | 1036,0
147 | 1037,0
148 | 1038,0
149 | 1039,0
150 | 1040,0
151 | 1041,0
152 | 1042,1
153 | 1043,0
154 | 1044,0
155 | 1045,1
156 | 1046,0
157 | 1047,0
158 | 1048,1
159 | 1049,1
160 | 1050,1
161 | 1051,1
162 | 1052,1
163 | 1053,1
164 | 1054,1
165 | 1055,0
166 | 1056,0
167 | 1057,1
168 | 1058,0
169 | 1059,0
170 | 1060,1
171 | 1061,0
172 | 1062,0
173 | 1063,0
174 | 1064,0
175 | 1065,0
176 | 1066,0
177 | 1067,1
178 | 1068,1
179 | 1069,1
180 | 1070,1
181 | 1071,1
182 | 1072,0
183 | 1073,0
184 | 1074,1
185 | 1075,0
186 | 1076,1
187 | 1077,0
188 | 1078,1
189 | 1079,0
190 | 1080,0
191 | 1081,0
192 | 1082,0
193 | 1083,0
194 | 1084,0
195 | 1085,0
196 | 1086,1
197 | 1087,0
198 | 1088,1
199 | 1089,1
200 | 1090,0
201 | 1091,0
202 | 1092,1
203 | 1093,0
204 | 1094,0
205 | 1095,1
206 | 1096,0
207 | 1097,0
208 | 1098,1
209 | 1099,0
210 | 1100,1
211 | 1101,0
212 | 1102,0
213 | 1103,0
214 | 1104,0
215 | 1105,1
216 | 1106,0
217 | 1107,0
218 | 1108,1
219 | 1109,1
220 | 1110,1
221 | 1111,0
222 | 1112,1
223 | 1113,0
224 | 1114,1
225 | 1115,0
226 | 1116,1
227 | 1117,0
228 | 1118,0
229 | 1119,1
230 | 1120,0
231 | 1121,0
232 | 1122,0
233 | 1123,1
234 | 1124,0
235 | 1125,0
236 | 1126,1
237 | 1127,0
238 | 1128,0
239 | 1129,0
240 | 1130,1
241 | 1131,1
242 | 1132,1
243 | 1133,1
244 | 1134,1
245 | 1135,0
246 | 1136,0
247 | 1137,0
248 | 1138,1
249 | 1139,0
250 | 1140,1
251 | 1141,0
252 | 1142,1
253 | 1143,0
254 | 1144,0
255 | 1145,0
256 | 1146,0
257 | 1147,0
258 | 1148,0
259 | 1149,0
260 | 1150,1
261 | 1151,0
262 | 1152,0
263 | 1153,0
264 | 1154,1
265 | 1155,1
266 | 1156,0
267 | 1157,0
268 | 1158,0
269 | 1159,0
270 | 1160,0
271 | 1161,0
272 | 1162,0
273 | 1163,0
274 | 1164,1
275 | 1165,1
276 | 1166,0
277 | 1167,1
278 | 1168,0
279 | 1169,0
280 | 1170,0
281 | 1171,0
282 | 1172,0
283 | 1173,0
284 | 1174,1
285 | 1175,0
286 | 1176,1
287 | 1177,0
288 | 1178,0
289 | 1179,0
290 | 1180,0
291 | 1181,0
292 | 1182,0
293 | 1183,0
294 | 1184,0
295 | 1185,0
296 | 1186,0
297 | 1187,0
298 | 1188,1
299 | 1189,0
300 | 1190,0
301 | 1191,0
302 | 1192,0
303 | 1193,0
304 | 1194,0
305 | 1195,0
306 | 1196,1
307 | 1197,1
308 | 1198,1
309 | 1199,1
310 | 1200,1
311 | 1201,0
312 | 1202,0
313 | 1203,0
314 | 1204,0
315 | 1205,1
316 | 1206,1
317 | 1207,1
318 | 1208,0
319 | 1209,0
320 | 1210,0
321 | 1211,0
322 | 1212,0
323 | 1213,0
324 | 1214,1
325 | 1215,0
326 | 1216,1
327 | 1217,0
328 | 1218,1
329 | 1219,0
330 | 1220,0
331 | 1221,0
332 | 1222,1
333 | 1223,0
334 | 1224,0
335 | 1225,1
336 | 1226,0
337 | 1227,0
338 | 1228,0
339 | 1229,0
340 | 1230,0
341 | 1231,0
342 | 1232,0
343 | 1233,0
344 | 1234,0
345 | 1235,1
346 | 1236,0
347 | 1237,1
348 | 1238,0
349 | 1239,1
350 | 1240,0
351 | 1241,1
352 | 1242,1
353 | 1243,0
354 | 1244,0
355 | 1245,1
356 | 1246,1
357 | 1247,0
358 | 1248,1
359 | 1249,0
360 | 1250,0
361 | 1251,1
362 | 1252,0
363 | 1253,1
364 | 1254,1
365 | 1255,0
366 | 1256,1
367 | 1257,0
368 | 1258,0
369 | 1259,1
370 | 1260,1
371 | 1261,0
372 | 1262,0
373 | 1263,1
374 | 1264,0
375 | 1265,0
376 | 1266,1
377 | 1267,1
378 | 1268,0
379 | 1269,0
380 | 1270,0
381 | 1271,0
382 | 1272,0
383 | 1273,0
384 | 1274,0
385 | 1275,1
386 | 1276,0
387 | 1277,1
388 | 1278,0
389 | 1279,0
390 | 1280,0
391 | 1281,0
392 | 1282,1
393 | 1283,1
394 | 1284,0
395 | 1285,0
396 | 1286,0
397 | 1287,1
398 | 1288,0
399 | 1289,1
400 | 1290,0
401 | 1291,0
402 | 1292,1
403 | 1293,0
404 | 1294,1
405 | 1295,0
406 | 1296,0
407 | 1297,1
408 | 1298,0
409 | 1299,0
410 | 1300,1
411 | 1301,1
412 | 1302,1
413 | 1303,1
414 | 1304,1
415 | 1305,0
416 | 1306,1
417 | 1307,0
418 | 1308,0
419 | 1309,1
420 |
--------------------------------------------------------------------------------
/Day-33/data/submission_7.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived
2 | 892,0
3 | 893,1
4 | 894,0
5 | 895,0
6 | 896,1
7 | 897,0
8 | 898,0
9 | 899,0
10 | 900,1
11 | 901,0
12 | 902,0
13 | 903,0
14 | 904,1
15 | 905,0
16 | 906,1
17 | 907,1
18 | 908,0
19 | 909,0
20 | 910,1
21 | 911,1
22 | 912,0
23 | 913,0
24 | 914,1
25 | 915,0
26 | 916,1
27 | 917,0
28 | 918,1
29 | 919,0
30 | 920,1
31 | 921,0
32 | 922,0
33 | 923,0
34 | 924,1
35 | 925,0
36 | 926,1
37 | 927,0
38 | 928,0
39 | 929,0
40 | 930,0
41 | 931,1
42 | 932,0
43 | 933,1
44 | 934,0
45 | 935,1
46 | 936,1
47 | 937,1
48 | 938,0
49 | 939,0
50 | 940,1
51 | 941,1
52 | 942,0
53 | 943,0
54 | 944,1
55 | 945,1
56 | 946,0
57 | 947,0
58 | 948,0
59 | 949,0
60 | 950,0
61 | 951,1
62 | 952,0
63 | 953,0
64 | 954,0
65 | 955,1
66 | 956,1
67 | 957,1
68 | 958,1
69 | 959,0
70 | 960,0
71 | 961,1
72 | 962,1
73 | 963,0
74 | 964,1
75 | 965,0
76 | 966,1
77 | 967,1
78 | 968,0
79 | 969,1
80 | 970,0
81 | 971,1
82 | 972,0
83 | 973,1
84 | 974,0
85 | 975,0
86 | 976,0
87 | 977,0
88 | 978,1
89 | 979,0
90 | 980,1
91 | 981,0
92 | 982,1
93 | 983,0
94 | 984,1
95 | 985,0
96 | 986,0
97 | 987,0
98 | 988,1
99 | 989,1
100 | 990,1
101 | 991,0
102 | 992,1
103 | 993,0
104 | 994,0
105 | 995,0
106 | 996,0
107 | 997,0
108 | 998,0
109 | 999,0
110 | 1000,0
111 | 1001,1
112 | 1002,0
113 | 1003,1
114 | 1004,1
115 | 1005,1
116 | 1006,1
117 | 1007,0
118 | 1008,0
119 | 1009,1
120 | 1010,0
121 | 1011,1
122 | 1012,1
123 | 1013,0
124 | 1014,1
125 | 1015,0
126 | 1016,0
127 | 1017,1
128 | 1018,0
129 | 1019,1
130 | 1020,0
131 | 1021,0
132 | 1022,0
133 | 1023,0
134 | 1024,0
135 | 1025,0
136 | 1026,0
137 | 1027,0
138 | 1028,0
139 | 1029,0
140 | 1030,0
141 | 1031,0
142 | 1032,0
143 | 1033,1
144 | 1034,1
145 | 1035,0
146 | 1036,0
147 | 1037,0
148 | 1038,0
149 | 1039,0
150 | 1040,0
151 | 1041,0
152 | 1042,1
153 | 1043,0
154 | 1044,0
155 | 1045,1
156 | 1046,0
157 | 1047,0
158 | 1048,1
159 | 1049,1
160 | 1050,1
161 | 1051,1
162 | 1052,1
163 | 1053,0
164 | 1054,1
165 | 1055,0
166 | 1056,0
167 | 1057,1
168 | 1058,0
169 | 1059,0
170 | 1060,1
171 | 1061,0
172 | 1062,0
173 | 1063,0
174 | 1064,0
175 | 1065,0
176 | 1066,0
177 | 1067,1
178 | 1068,1
179 | 1069,1
180 | 1070,1
181 | 1071,1
182 | 1072,0
183 | 1073,0
184 | 1074,1
185 | 1075,0
186 | 1076,1
187 | 1077,1
188 | 1078,1
189 | 1079,0
190 | 1080,0
191 | 1081,0
192 | 1082,0
193 | 1083,0
194 | 1084,0
195 | 1085,0
196 | 1086,0
197 | 1087,0
198 | 1088,1
199 | 1089,1
200 | 1090,0
201 | 1091,0
202 | 1092,1
203 | 1093,0
204 | 1094,1
205 | 1095,1
206 | 1096,0
207 | 1097,0
208 | 1098,1
209 | 1099,0
210 | 1100,1
211 | 1101,0
212 | 1102,0
213 | 1103,0
214 | 1104,0
215 | 1105,1
216 | 1106,1
217 | 1107,0
218 | 1108,1
219 | 1109,1
220 | 1110,1
221 | 1111,0
222 | 1112,1
223 | 1113,0
224 | 1114,1
225 | 1115,0
226 | 1116,1
227 | 1117,0
228 | 1118,0
229 | 1119,1
230 | 1120,0
231 | 1121,0
232 | 1122,0
233 | 1123,1
234 | 1124,0
235 | 1125,0
236 | 1126,0
237 | 1127,0
238 | 1128,0
239 | 1129,0
240 | 1130,1
241 | 1131,1
242 | 1132,1
243 | 1133,1
244 | 1134,1
245 | 1135,0
246 | 1136,0
247 | 1137,0
248 | 1138,1
249 | 1139,0
250 | 1140,1
251 | 1141,0
252 | 1142,1
253 | 1143,0
254 | 1144,0
255 | 1145,0
256 | 1146,0
257 | 1147,0
258 | 1148,0
259 | 1149,0
260 | 1150,1
261 | 1151,0
262 | 1152,0
263 | 1153,0
264 | 1154,1
265 | 1155,1
266 | 1156,0
267 | 1157,0
268 | 1158,0
269 | 1159,0
270 | 1160,0
271 | 1161,0
272 | 1162,0
273 | 1163,0
274 | 1164,1
275 | 1165,1
276 | 1166,0
277 | 1167,1
278 | 1168,0
279 | 1169,0
280 | 1170,0
281 | 1171,0
282 | 1172,0
283 | 1173,0
284 | 1174,1
285 | 1175,0
286 | 1176,0
287 | 1177,0
288 | 1178,0
289 | 1179,0
290 | 1180,0
291 | 1181,0
292 | 1182,0
293 | 1183,0
294 | 1184,0
295 | 1185,1
296 | 1186,0
297 | 1187,0
298 | 1188,1
299 | 1189,0
300 | 1190,0
301 | 1191,0
302 | 1192,0
303 | 1193,1
304 | 1194,0
305 | 1195,0
306 | 1196,1
307 | 1197,1
308 | 1198,0
309 | 1199,0
310 | 1200,1
311 | 1201,0
312 | 1202,0
313 | 1203,0
314 | 1204,0
315 | 1205,1
316 | 1206,1
317 | 1207,1
318 | 1208,0
319 | 1209,0
320 | 1210,0
321 | 1211,0
322 | 1212,0
323 | 1213,0
324 | 1214,1
325 | 1215,0
326 | 1216,1
327 | 1217,0
328 | 1218,1
329 | 1219,0
330 | 1220,0
331 | 1221,0
332 | 1222,1
333 | 1223,0
334 | 1224,0
335 | 1225,1
336 | 1226,0
337 | 1227,0
338 | 1228,0
339 | 1229,0
340 | 1230,0
341 | 1231,0
342 | 1232,0
343 | 1233,0
344 | 1234,0
345 | 1235,1
346 | 1236,0
347 | 1237,1
348 | 1238,0
349 | 1239,1
350 | 1240,0
351 | 1241,1
352 | 1242,1
353 | 1243,0
354 | 1244,0
355 | 1245,0
356 | 1246,1
357 | 1247,0
358 | 1248,1
359 | 1249,0
360 | 1250,0
361 | 1251,1
362 | 1252,0
363 | 1253,1
364 | 1254,1
365 | 1255,0
366 | 1256,1
367 | 1257,0
368 | 1258,0
369 | 1259,0
370 | 1260,1
371 | 1261,0
372 | 1262,0
373 | 1263,1
374 | 1264,0
375 | 1265,0
376 | 1266,1
377 | 1267,1
378 | 1268,0
379 | 1269,0
380 | 1270,0
381 | 1271,0
382 | 1272,0
383 | 1273,0
384 | 1274,0
385 | 1275,1
386 | 1276,0
387 | 1277,1
388 | 1278,0
389 | 1279,0
390 | 1280,0
391 | 1281,0
392 | 1282,1
393 | 1283,1
394 | 1284,0
395 | 1285,0
396 | 1286,0
397 | 1287,1
398 | 1288,0
399 | 1289,1
400 | 1290,0
401 | 1291,0
402 | 1292,1
403 | 1293,0
404 | 1294,1
405 | 1295,0
406 | 1296,0
407 | 1297,1
408 | 1298,0
409 | 1299,1
410 | 1300,1
411 | 1301,1
412 | 1302,1
413 | 1303,1
414 | 1304,1
415 | 1305,0
416 | 1306,1
417 | 1307,0
418 | 1308,0
419 | 1309,0
420 |
--------------------------------------------------------------------------------
/Day-33/readme.md:
--------------------------------------------------------------------------------
1 | # Day 33 of [#100DaysOfCode](https://twitter.com/Param3021/status/1545020574178902016)
2 |
3 | ## Task
4 | 1. Titanic dataset prediction challenge
5 | 2. Data Science project challenge in GHW
6 |
7 | # Resources
8 | - Kaggle [Titanic - Machine Learning from Disaster](https://www.kaggle.com/competitions/titanic/)
9 | - - [My Notebook 1](https://www.kaggle.com/code/param302/titanic-survival-competition-6)
10 | - - [My Notebook 2](https://www.kaggle.com/code/param302/titanic-survival-competition-7)
11 |
12 | - MLH - [Global Hack Week](https://ghw.mlh.io/) INIT 2023 hackathon
13 | - - [Create an Animation](https://github.com/Param302/MLH-GHW-2023/tree/master/Day-4%20CSS)
14 |
15 | ### Topics I have learnt
16 | 1. Titanic dataset prediction
17 | - With `XGBClassifier` and did `Feature Engineering` using Mutual Information and by creating new features.
18 | - Same as above but used less features.
19 |
20 | 1. Also participated in MLH Global Hack Week INIT 2023 [🔗](https://ghw.mlh.io/)
21 | - Did 1 challenge created An Aquarium animation
22 |
23 | ### Software used
24 | - Jupyter Notebook
25 | - Python 3.10.2
26 | - Numpy 1.22.4
27 | - pandas 1.4.2
28 | - Matplotlib 3.5.2
29 | - Seaborn 0.11.2
30 | - scikit-learn 1.1.1
31 | - XGBoost 1.6.1
32 | - HTML5
33 | - CSS3
34 |
35 | ### My Notebooks
36 | - [Titanic_survival_competition_6.ipynb](./Titanic_survival_competition_6.ipynb)
37 | - [Titanic_survival_competition_7.ipynb](./Titanic_survival_competition_7.ipynb)
38 | - [GHW - Create an Animation]([./GHW_data_science_project.ipynb](https://github.com/Param302/MLH-GHW-2023/tree/master/Day-4%20CSS))
39 |
40 | ### Conclusion:
41 | Today I did titanic survival compeition & scored better by creating new features and participated in Global Hack Week hackathon & completed Create an Animation challenge.
--------------------------------------------------------------------------------
/Day-34/data/gender_submission.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived
2 | 892,0
3 | 893,1
4 | 894,0
5 | 895,0
6 | 896,1
7 | 897,0
8 | 898,1
9 | 899,0
10 | 900,1
11 | 901,0
12 | 902,0
13 | 903,0
14 | 904,1
15 | 905,0
16 | 906,1
17 | 907,1
18 | 908,0
19 | 909,0
20 | 910,1
21 | 911,1
22 | 912,0
23 | 913,0
24 | 914,1
25 | 915,0
26 | 916,1
27 | 917,0
28 | 918,1
29 | 919,0
30 | 920,0
31 | 921,0
32 | 922,0
33 | 923,0
34 | 924,1
35 | 925,1
36 | 926,0
37 | 927,0
38 | 928,1
39 | 929,1
40 | 930,0
41 | 931,0
42 | 932,0
43 | 933,0
44 | 934,0
45 | 935,1
46 | 936,1
47 | 937,0
48 | 938,0
49 | 939,0
50 | 940,1
51 | 941,1
52 | 942,0
53 | 943,0
54 | 944,1
55 | 945,1
56 | 946,0
57 | 947,0
58 | 948,0
59 | 949,0
60 | 950,0
61 | 951,1
62 | 952,0
63 | 953,0
64 | 954,0
65 | 955,1
66 | 956,0
67 | 957,1
68 | 958,1
69 | 959,0
70 | 960,0
71 | 961,1
72 | 962,1
73 | 963,0
74 | 964,1
75 | 965,0
76 | 966,1
77 | 967,0
78 | 968,0
79 | 969,1
80 | 970,0
81 | 971,1
82 | 972,0
83 | 973,0
84 | 974,0
85 | 975,0
86 | 976,0
87 | 977,0
88 | 978,1
89 | 979,1
90 | 980,1
91 | 981,0
92 | 982,1
93 | 983,0
94 | 984,1
95 | 985,0
96 | 986,0
97 | 987,0
98 | 988,1
99 | 989,0
100 | 990,1
101 | 991,0
102 | 992,1
103 | 993,0
104 | 994,0
105 | 995,0
106 | 996,1
107 | 997,0
108 | 998,0
109 | 999,0
110 | 1000,0
111 | 1001,0
112 | 1002,0
113 | 1003,1
114 | 1004,1
115 | 1005,1
116 | 1006,1
117 | 1007,0
118 | 1008,0
119 | 1009,1
120 | 1010,0
121 | 1011,1
122 | 1012,1
123 | 1013,0
124 | 1014,1
125 | 1015,0
126 | 1016,0
127 | 1017,1
128 | 1018,0
129 | 1019,1
130 | 1020,0
131 | 1021,0
132 | 1022,0
133 | 1023,0
134 | 1024,1
135 | 1025,0
136 | 1026,0
137 | 1027,0
138 | 1028,0
139 | 1029,0
140 | 1030,1
141 | 1031,0
142 | 1032,1
143 | 1033,1
144 | 1034,0
145 | 1035,0
146 | 1036,0
147 | 1037,0
148 | 1038,0
149 | 1039,0
150 | 1040,0
151 | 1041,0
152 | 1042,1
153 | 1043,0
154 | 1044,0
155 | 1045,1
156 | 1046,0
157 | 1047,0
158 | 1048,1
159 | 1049,1
160 | 1050,0
161 | 1051,1
162 | 1052,1
163 | 1053,0
164 | 1054,1
165 | 1055,0
166 | 1056,0
167 | 1057,1
168 | 1058,0
169 | 1059,0
170 | 1060,1
171 | 1061,1
172 | 1062,0
173 | 1063,0
174 | 1064,0
175 | 1065,0
176 | 1066,0
177 | 1067,1
178 | 1068,1
179 | 1069,0
180 | 1070,1
181 | 1071,1
182 | 1072,0
183 | 1073,0
184 | 1074,1
185 | 1075,0
186 | 1076,1
187 | 1077,0
188 | 1078,1
189 | 1079,0
190 | 1080,1
191 | 1081,0
192 | 1082,0
193 | 1083,0
194 | 1084,0
195 | 1085,0
196 | 1086,0
197 | 1087,0
198 | 1088,0
199 | 1089,1
200 | 1090,0
201 | 1091,1
202 | 1092,1
203 | 1093,0
204 | 1094,0
205 | 1095,1
206 | 1096,0
207 | 1097,0
208 | 1098,1
209 | 1099,0
210 | 1100,1
211 | 1101,0
212 | 1102,0
213 | 1103,0
214 | 1104,0
215 | 1105,1
216 | 1106,1
217 | 1107,0
218 | 1108,1
219 | 1109,0
220 | 1110,1
221 | 1111,0
222 | 1112,1
223 | 1113,0
224 | 1114,1
225 | 1115,0
226 | 1116,1
227 | 1117,1
228 | 1118,0
229 | 1119,1
230 | 1120,0
231 | 1121,0
232 | 1122,0
233 | 1123,1
234 | 1124,0
235 | 1125,0
236 | 1126,0
237 | 1127,0
238 | 1128,0
239 | 1129,0
240 | 1130,1
241 | 1131,1
242 | 1132,1
243 | 1133,1
244 | 1134,0
245 | 1135,0
246 | 1136,0
247 | 1137,0
248 | 1138,1
249 | 1139,0
250 | 1140,1
251 | 1141,1
252 | 1142,1
253 | 1143,0
254 | 1144,0
255 | 1145,0
256 | 1146,0
257 | 1147,0
258 | 1148,0
259 | 1149,0
260 | 1150,1
261 | 1151,0
262 | 1152,0
263 | 1153,0
264 | 1154,1
265 | 1155,1
266 | 1156,0
267 | 1157,0
268 | 1158,0
269 | 1159,0
270 | 1160,1
271 | 1161,0
272 | 1162,0
273 | 1163,0
274 | 1164,1
275 | 1165,1
276 | 1166,0
277 | 1167,1
278 | 1168,0
279 | 1169,0
280 | 1170,0
281 | 1171,0
282 | 1172,1
283 | 1173,0
284 | 1174,1
285 | 1175,1
286 | 1176,1
287 | 1177,0
288 | 1178,0
289 | 1179,0
290 | 1180,0
291 | 1181,0
292 | 1182,0
293 | 1183,1
294 | 1184,0
295 | 1185,0
296 | 1186,0
297 | 1187,0
298 | 1188,1
299 | 1189,0
300 | 1190,0
301 | 1191,0
302 | 1192,0
303 | 1193,0
304 | 1194,0
305 | 1195,0
306 | 1196,1
307 | 1197,1
308 | 1198,0
309 | 1199,0
310 | 1200,0
311 | 1201,1
312 | 1202,0
313 | 1203,0
314 | 1204,0
315 | 1205,1
316 | 1206,1
317 | 1207,1
318 | 1208,0
319 | 1209,0
320 | 1210,0
321 | 1211,0
322 | 1212,0
323 | 1213,0
324 | 1214,0
325 | 1215,0
326 | 1216,1
327 | 1217,0
328 | 1218,1
329 | 1219,0
330 | 1220,0
331 | 1221,0
332 | 1222,1
333 | 1223,0
334 | 1224,0
335 | 1225,1
336 | 1226,0
337 | 1227,0
338 | 1228,0
339 | 1229,0
340 | 1230,0
341 | 1231,0
342 | 1232,0
343 | 1233,0
344 | 1234,0
345 | 1235,1
346 | 1236,0
347 | 1237,1
348 | 1238,0
349 | 1239,1
350 | 1240,0
351 | 1241,1
352 | 1242,1
353 | 1243,0
354 | 1244,0
355 | 1245,0
356 | 1246,1
357 | 1247,0
358 | 1248,1
359 | 1249,0
360 | 1250,0
361 | 1251,1
362 | 1252,0
363 | 1253,1
364 | 1254,1
365 | 1255,0
366 | 1256,1
367 | 1257,1
368 | 1258,0
369 | 1259,1
370 | 1260,1
371 | 1261,0
372 | 1262,0
373 | 1263,1
374 | 1264,0
375 | 1265,0
376 | 1266,1
377 | 1267,1
378 | 1268,1
379 | 1269,0
380 | 1270,0
381 | 1271,0
382 | 1272,0
383 | 1273,0
384 | 1274,1
385 | 1275,1
386 | 1276,0
387 | 1277,1
388 | 1278,0
389 | 1279,0
390 | 1280,0
391 | 1281,0
392 | 1282,0
393 | 1283,1
394 | 1284,0
395 | 1285,0
396 | 1286,0
397 | 1287,1
398 | 1288,0
399 | 1289,1
400 | 1290,0
401 | 1291,0
402 | 1292,1
403 | 1293,0
404 | 1294,1
405 | 1295,0
406 | 1296,0
407 | 1297,0
408 | 1298,0
409 | 1299,0
410 | 1300,1
411 | 1301,1
412 | 1302,1
413 | 1303,1
414 | 1304,1
415 | 1305,0
416 | 1306,1
417 | 1307,0
418 | 1308,0
419 | 1309,0
420 |
--------------------------------------------------------------------------------
/Day-34/data/submission_10.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived
2 | 892,0
3 | 893,0
4 | 894,0
5 | 895,0
6 | 896,1
7 | 897,0
8 | 898,1
9 | 899,0
10 | 900,1
11 | 901,0
12 | 902,0
13 | 903,0
14 | 904,1
15 | 905,0
16 | 906,1
17 | 907,1
18 | 908,0
19 | 909,0
20 | 910,0
21 | 911,1
22 | 912,0
23 | 913,0
24 | 914,1
25 | 915,0
26 | 916,1
27 | 917,0
28 | 918,1
29 | 919,0
30 | 920,0
31 | 921,0
32 | 922,0
33 | 923,0
34 | 924,1
35 | 925,0
36 | 926,0
37 | 927,0
38 | 928,0
39 | 929,0
40 | 930,0
41 | 931,0
42 | 932,0
43 | 933,0
44 | 934,0
45 | 935,1
46 | 936,1
47 | 937,0
48 | 938,0
49 | 939,0
50 | 940,1
51 | 941,1
52 | 942,0
53 | 943,0
54 | 944,1
55 | 945,1
56 | 946,0
57 | 947,0
58 | 948,0
59 | 949,0
60 | 950,0
61 | 951,1
62 | 952,0
63 | 953,0
64 | 954,0
65 | 955,1
66 | 956,1
67 | 957,1
68 | 958,1
69 | 959,0
70 | 960,0
71 | 961,1
72 | 962,1
73 | 963,0
74 | 964,0
75 | 965,0
76 | 966,1
77 | 967,0
78 | 968,0
79 | 969,1
80 | 970,0
81 | 971,1
82 | 972,0
83 | 973,0
84 | 974,0
85 | 975,0
86 | 976,0
87 | 977,0
88 | 978,1
89 | 979,0
90 | 980,1
91 | 981,1
92 | 982,1
93 | 983,0
94 | 984,1
95 | 985,0
96 | 986,0
97 | 987,0
98 | 988,1
99 | 989,0
100 | 990,0
101 | 991,0
102 | 992,1
103 | 993,0
104 | 994,0
105 | 995,0
106 | 996,1
107 | 997,0
108 | 998,0
109 | 999,0
110 | 1000,0
111 | 1001,0
112 | 1002,0
113 | 1003,1
114 | 1004,1
115 | 1005,1
116 | 1006,1
117 | 1007,0
118 | 1008,0
119 | 1009,1
120 | 1010,0
121 | 1011,1
122 | 1012,1
123 | 1013,0
124 | 1014,1
125 | 1015,0
126 | 1016,0
127 | 1017,1
128 | 1018,0
129 | 1019,1
130 | 1020,0
131 | 1021,0
132 | 1022,0
133 | 1023,0
134 | 1024,0
135 | 1025,0
136 | 1026,0
137 | 1027,0
138 | 1028,0
139 | 1029,0
140 | 1030,0
141 | 1031,0
142 | 1032,0
143 | 1033,1
144 | 1034,0
145 | 1035,0
146 | 1036,0
147 | 1037,0
148 | 1038,0
149 | 1039,0
150 | 1040,0
151 | 1041,0
152 | 1042,1
153 | 1043,0
154 | 1044,0
155 | 1045,1
156 | 1046,0
157 | 1047,0
158 | 1048,1
159 | 1049,0
160 | 1050,0
161 | 1051,1
162 | 1052,1
163 | 1053,0
164 | 1054,1
165 | 1055,0
166 | 1056,0
167 | 1057,1
168 | 1058,0
169 | 1059,0
170 | 1060,1
171 | 1061,0
172 | 1062,0
173 | 1063,0
174 | 1064,0
175 | 1065,0
176 | 1066,0
177 | 1067,1
178 | 1068,1
179 | 1069,0
180 | 1070,1
181 | 1071,1
182 | 1072,0
183 | 1073,0
184 | 1074,1
185 | 1075,0
186 | 1076,1
187 | 1077,0
188 | 1078,1
189 | 1079,0
190 | 1080,0
191 | 1081,0
192 | 1082,0
193 | 1083,0
194 | 1084,0
195 | 1085,0
196 | 1086,0
197 | 1087,0
198 | 1088,1
199 | 1089,0
200 | 1090,0
201 | 1091,0
202 | 1092,1
203 | 1093,0
204 | 1094,0
205 | 1095,1
206 | 1096,0
207 | 1097,0
208 | 1098,1
209 | 1099,0
210 | 1100,1
211 | 1101,0
212 | 1102,0
213 | 1103,0
214 | 1104,0
215 | 1105,1
216 | 1106,0
217 | 1107,0
218 | 1108,1
219 | 1109,0
220 | 1110,1
221 | 1111,0
222 | 1112,1
223 | 1113,0
224 | 1114,1
225 | 1115,0
226 | 1116,1
227 | 1117,1
228 | 1118,0
229 | 1119,1
230 | 1120,0
231 | 1121,0
232 | 1122,0
233 | 1123,1
234 | 1124,0
235 | 1125,0
236 | 1126,1
237 | 1127,0
238 | 1128,0
239 | 1129,0
240 | 1130,1
241 | 1131,1
242 | 1132,1
243 | 1133,1
244 | 1134,1
245 | 1135,0
246 | 1136,0
247 | 1137,0
248 | 1138,1
249 | 1139,0
250 | 1140,1
251 | 1141,1
252 | 1142,1
253 | 1143,0
254 | 1144,0
255 | 1145,0
256 | 1146,0
257 | 1147,0
258 | 1148,0
259 | 1149,0
260 | 1150,1
261 | 1151,0
262 | 1152,0
263 | 1153,0
264 | 1154,1
265 | 1155,1
266 | 1156,0
267 | 1157,0
268 | 1158,0
269 | 1159,0
270 | 1160,0
271 | 1161,0
272 | 1162,0
273 | 1163,0
274 | 1164,1
275 | 1165,1
276 | 1166,0
277 | 1167,1
278 | 1168,0
279 | 1169,0
280 | 1170,0
281 | 1171,0
282 | 1172,0
283 | 1173,0
284 | 1174,1
285 | 1175,1
286 | 1176,1
287 | 1177,0
288 | 1178,0
289 | 1179,0
290 | 1180,0
291 | 1181,0
292 | 1182,0
293 | 1183,1
294 | 1184,0
295 | 1185,0
296 | 1186,0
297 | 1187,0
298 | 1188,1
299 | 1189,0
300 | 1190,0
301 | 1191,0
302 | 1192,0
303 | 1193,0
304 | 1194,0
305 | 1195,0
306 | 1196,1
307 | 1197,1
308 | 1198,0
309 | 1199,0
310 | 1200,0
311 | 1201,1
312 | 1202,0
313 | 1203,0
314 | 1204,0
315 | 1205,1
316 | 1206,1
317 | 1207,1
318 | 1208,1
319 | 1209,0
320 | 1210,0
321 | 1211,0
322 | 1212,0
323 | 1213,0
324 | 1214,0
325 | 1215,0
326 | 1216,1
327 | 1217,0
328 | 1218,1
329 | 1219,0
330 | 1220,0
331 | 1221,0
332 | 1222,1
333 | 1223,0
334 | 1224,0
335 | 1225,1
336 | 1226,0
337 | 1227,0
338 | 1228,0
339 | 1229,0
340 | 1230,0
341 | 1231,0
342 | 1232,0
343 | 1233,0
344 | 1234,0
345 | 1235,1
346 | 1236,0
347 | 1237,0
348 | 1238,0
349 | 1239,1
350 | 1240,0
351 | 1241,1
352 | 1242,1
353 | 1243,0
354 | 1244,0
355 | 1245,0
356 | 1246,1
357 | 1247,0
358 | 1248,1
359 | 1249,0
360 | 1250,0
361 | 1251,1
362 | 1252,0
363 | 1253,1
364 | 1254,1
365 | 1255,0
366 | 1256,1
367 | 1257,0
368 | 1258,0
369 | 1259,0
370 | 1260,1
371 | 1261,0
372 | 1262,0
373 | 1263,1
374 | 1264,0
375 | 1265,0
376 | 1266,1
377 | 1267,1
378 | 1268,0
379 | 1269,0
380 | 1270,0
381 | 1271,0
382 | 1272,0
383 | 1273,0
384 | 1274,1
385 | 1275,1
386 | 1276,0
387 | 1277,1
388 | 1278,0
389 | 1279,0
390 | 1280,0
391 | 1281,0
392 | 1282,0
393 | 1283,1
394 | 1284,0
395 | 1285,0
396 | 1286,0
397 | 1287,1
398 | 1288,0
399 | 1289,1
400 | 1290,0
401 | 1291,0
402 | 1292,1
403 | 1293,0
404 | 1294,1
405 | 1295,0
406 | 1296,0
407 | 1297,0
408 | 1298,0
409 | 1299,0
410 | 1300,1
411 | 1301,1
412 | 1302,1
413 | 1303,1
414 | 1304,0
415 | 1305,0
416 | 1306,1
417 | 1307,0
418 | 1308,0
419 | 1309,0
420 |
--------------------------------------------------------------------------------
/Day-34/data/submission_8.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived
2 | 892,0
3 | 893,0
4 | 894,0
5 | 895,0
6 | 896,0
7 | 897,0
8 | 898,0
9 | 899,0
10 | 900,1
11 | 901,0
12 | 902,0
13 | 903,0
14 | 904,1
15 | 905,0
16 | 906,1
17 | 907,1
18 | 908,0
19 | 909,1
20 | 910,1
21 | 911,0
22 | 912,1
23 | 913,0
24 | 914,1
25 | 915,1
26 | 916,1
27 | 917,0
28 | 918,1
29 | 919,1
30 | 920,1
31 | 921,1
32 | 922,0
33 | 923,0
34 | 924,1
35 | 925,0
36 | 926,1
37 | 927,0
38 | 928,1
39 | 929,0
40 | 930,0
41 | 931,1
42 | 932,0
43 | 933,1
44 | 934,0
45 | 935,1
46 | 936,1
47 | 937,0
48 | 938,0
49 | 939,1
50 | 940,1
51 | 941,1
52 | 942,0
53 | 943,0
54 | 944,1
55 | 945,1
56 | 946,0
57 | 947,0
58 | 948,0
59 | 949,0
60 | 950,0
61 | 951,1
62 | 952,0
63 | 953,1
64 | 954,0
65 | 955,1
66 | 956,0
67 | 957,1
68 | 958,1
69 | 959,0
70 | 960,0
71 | 961,1
72 | 962,1
73 | 963,0
74 | 964,0
75 | 965,0
76 | 966,1
77 | 967,0
78 | 968,1
79 | 969,1
80 | 970,0
81 | 971,1
82 | 972,1
83 | 973,0
84 | 974,0
85 | 975,0
86 | 976,1
87 | 977,0
88 | 978,1
89 | 979,1
90 | 980,1
91 | 981,1
92 | 982,1
93 | 983,1
94 | 984,1
95 | 985,1
96 | 986,0
97 | 987,0
98 | 988,1
99 | 989,0
100 | 990,0
101 | 991,0
102 | 992,1
103 | 993,0
104 | 994,1
105 | 995,0
106 | 996,1
107 | 997,0
108 | 998,0
109 | 999,1
110 | 1000,1
111 | 1001,0
112 | 1002,0
113 | 1003,1
114 | 1004,1
115 | 1005,1
116 | 1006,1
117 | 1007,0
118 | 1008,0
119 | 1009,1
120 | 1010,1
121 | 1011,1
122 | 1012,1
123 | 1013,1
124 | 1014,1
125 | 1015,0
126 | 1016,1
127 | 1017,1
128 | 1018,0
129 | 1019,1
130 | 1020,0
131 | 1021,0
132 | 1022,0
133 | 1023,0
134 | 1024,0
135 | 1025,0
136 | 1026,0
137 | 1027,0
138 | 1028,1
139 | 1029,0
140 | 1030,0
141 | 1031,0
142 | 1032,0
143 | 1033,1
144 | 1034,0
145 | 1035,0
146 | 1036,1
147 | 1037,0
148 | 1038,0
149 | 1039,0
150 | 1040,1
151 | 1041,0
152 | 1042,1
153 | 1043,1
154 | 1044,0
155 | 1045,0
156 | 1046,0
157 | 1047,0
158 | 1048,1
159 | 1049,0
160 | 1050,1
161 | 1051,1
162 | 1052,1
163 | 1053,1
164 | 1054,1
165 | 1055,1
166 | 1056,0
167 | 1057,0
168 | 1058,0
169 | 1059,0
170 | 1060,1
171 | 1061,0
172 | 1062,0
173 | 1063,1
174 | 1064,0
175 | 1065,1
176 | 1066,0
177 | 1067,1
178 | 1068,1
179 | 1069,1
180 | 1070,1
181 | 1071,1
182 | 1072,0
183 | 1073,1
184 | 1074,1
185 | 1075,1
186 | 1076,1
187 | 1077,0
188 | 1078,1
189 | 1079,1
190 | 1080,0
191 | 1081,0
192 | 1082,0
193 | 1083,0
194 | 1084,1
195 | 1085,0
196 | 1086,1
197 | 1087,0
198 | 1088,1
199 | 1089,0
200 | 1090,0
201 | 1091,1
202 | 1092,1
203 | 1093,1
204 | 1094,0
205 | 1095,1
206 | 1096,0
207 | 1097,0
208 | 1098,0
209 | 1099,0
210 | 1100,1
211 | 1101,0
212 | 1102,0
213 | 1103,1
214 | 1104,0
215 | 1105,1
216 | 1106,0
217 | 1107,0
218 | 1108,1
219 | 1109,0
220 | 1110,1
221 | 1111,1
222 | 1112,1
223 | 1113,0
224 | 1114,1
225 | 1115,0
226 | 1116,1
227 | 1117,1
228 | 1118,0
229 | 1119,1
230 | 1120,0
231 | 1121,0
232 | 1122,0
233 | 1123,1
234 | 1124,0
235 | 1125,1
236 | 1126,1
237 | 1127,0
238 | 1128,0
239 | 1129,1
240 | 1130,1
241 | 1131,1
242 | 1132,1
243 | 1133,1
244 | 1134,0
245 | 1135,0
246 | 1136,0
247 | 1137,0
248 | 1138,1
249 | 1139,0
250 | 1140,1
251 | 1141,0
252 | 1142,1
253 | 1143,0
254 | 1144,0
255 | 1145,0
256 | 1146,0
257 | 1147,0
258 | 1148,1
259 | 1149,0
260 | 1150,1
261 | 1151,0
262 | 1152,0
263 | 1153,0
264 | 1154,1
265 | 1155,1
266 | 1156,0
267 | 1157,0
268 | 1158,1
269 | 1159,0
270 | 1160,1
271 | 1161,0
272 | 1162,0
273 | 1163,1
274 | 1164,1
275 | 1165,1
276 | 1166,1
277 | 1167,1
278 | 1168,0
279 | 1169,0
280 | 1170,0
281 | 1171,0
282 | 1172,0
283 | 1173,1
284 | 1174,1
285 | 1175,0
286 | 1176,0
287 | 1177,0
288 | 1178,0
289 | 1179,0
290 | 1180,1
291 | 1181,0
292 | 1182,0
293 | 1183,0
294 | 1184,1
295 | 1185,0
296 | 1186,0
297 | 1187,0
298 | 1188,1
299 | 1189,1
300 | 1190,0
301 | 1191,0
302 | 1192,0
303 | 1193,0
304 | 1194,0
305 | 1195,0
306 | 1196,1
307 | 1197,1
308 | 1198,0
309 | 1199,1
310 | 1200,0
311 | 1201,0
312 | 1202,0
313 | 1203,1
314 | 1204,0
315 | 1205,0
316 | 1206,1
317 | 1207,1
318 | 1208,0
319 | 1209,0
320 | 1210,1
321 | 1211,0
322 | 1212,0
323 | 1213,0
324 | 1214,0
325 | 1215,1
326 | 1216,1
327 | 1217,0
328 | 1218,1
329 | 1219,0
330 | 1220,0
331 | 1221,0
332 | 1222,1
333 | 1223,0
334 | 1224,1
335 | 1225,1
336 | 1226,0
337 | 1227,0
338 | 1228,1
339 | 1229,0
340 | 1230,0
341 | 1231,1
342 | 1232,0
343 | 1233,0
344 | 1234,0
345 | 1235,1
346 | 1236,0
347 | 1237,1
348 | 1238,0
349 | 1239,0
350 | 1240,0
351 | 1241,1
352 | 1242,1
353 | 1243,0
354 | 1244,0
355 | 1245,0
356 | 1246,1
357 | 1247,0
358 | 1248,1
359 | 1249,1
360 | 1250,1
361 | 1251,1
362 | 1252,0
363 | 1253,1
364 | 1254,1
365 | 1255,0
366 | 1256,1
367 | 1257,0
368 | 1258,0
369 | 1259,0
370 | 1260,1
371 | 1261,1
372 | 1262,0
373 | 1263,1
374 | 1264,0
375 | 1265,0
376 | 1266,1
377 | 1267,1
378 | 1268,0
379 | 1269,0
380 | 1270,0
381 | 1271,1
382 | 1272,1
383 | 1273,0
384 | 1274,1
385 | 1275,1
386 | 1276,0
387 | 1277,1
388 | 1278,0
389 | 1279,0
390 | 1280,0
391 | 1281,0
392 | 1282,0
393 | 1283,1
394 | 1284,0
395 | 1285,0
396 | 1286,0
397 | 1287,1
398 | 1288,0
399 | 1289,1
400 | 1290,0
401 | 1291,0
402 | 1292,1
403 | 1293,0
404 | 1294,1
405 | 1295,0
406 | 1296,0
407 | 1297,1
408 | 1298,0
409 | 1299,0
410 | 1300,1
411 | 1301,1
412 | 1302,1
413 | 1303,1
414 | 1304,1
415 | 1305,0
416 | 1306,1
417 | 1307,0
418 | 1308,1
419 | 1309,1
420 |
--------------------------------------------------------------------------------
/Day-34/data/submission_9.csv:
--------------------------------------------------------------------------------
1 | PassengerId,Survived
2 | 892,0
3 | 893,0
4 | 894,0
5 | 895,0
6 | 896,1
7 | 897,0
8 | 898,0
9 | 899,0
10 | 900,1
11 | 901,0
12 | 902,0
13 | 903,0
14 | 904,1
15 | 905,0
16 | 906,1
17 | 907,1
18 | 908,0
19 | 909,1
20 | 910,1
21 | 911,0
22 | 912,0
23 | 913,0
24 | 914,1
25 | 915,0
26 | 916,1
27 | 917,0
28 | 918,1
29 | 919,0
30 | 920,1
31 | 921,0
32 | 922,0
33 | 923,0
34 | 924,1
35 | 925,0
36 | 926,1
37 | 927,0
38 | 928,1
39 | 929,0
40 | 930,0
41 | 931,1
42 | 932,0
43 | 933,1
44 | 934,0
45 | 935,1
46 | 936,1
47 | 937,0
48 | 938,0
49 | 939,1
50 | 940,1
51 | 941,1
52 | 942,1
53 | 943,0
54 | 944,1
55 | 945,1
56 | 946,0
57 | 947,0
58 | 948,0
59 | 949,0
60 | 950,0
61 | 951,1
62 | 952,0
63 | 953,1
64 | 954,0
65 | 955,1
66 | 956,0
67 | 957,1
68 | 958,1
69 | 959,0
70 | 960,0
71 | 961,1
72 | 962,1
73 | 963,0
74 | 964,0
75 | 965,0
76 | 966,1
77 | 967,0
78 | 968,1
79 | 969,1
80 | 970,0
81 | 971,1
82 | 972,1
83 | 973,0
84 | 974,0
85 | 975,0
86 | 976,0
87 | 977,0
88 | 978,1
89 | 979,1
90 | 980,1
91 | 981,1
92 | 982,1
93 | 983,1
94 | 984,1
95 | 985,1
96 | 986,0
97 | 987,0
98 | 988,1
99 | 989,0
100 | 990,1
101 | 991,0
102 | 992,1
103 | 993,0
104 | 994,1
105 | 995,0
106 | 996,0
107 | 997,0
108 | 998,0
109 | 999,1
110 | 1000,0
111 | 1001,0
112 | 1002,0
113 | 1003,1
114 | 1004,1
115 | 1005,1
116 | 1006,1
117 | 1007,0
118 | 1008,0
119 | 1009,1
120 | 1010,1
121 | 1011,1
122 | 1012,1
123 | 1013,0
124 | 1014,1
125 | 1015,0
126 | 1016,1
127 | 1017,1
128 | 1018,0
129 | 1019,1
130 | 1020,0
131 | 1021,0
132 | 1022,0
133 | 1023,0
134 | 1024,0
135 | 1025,0
136 | 1026,0
137 | 1027,0
138 | 1028,1
139 | 1029,0
140 | 1030,0
141 | 1031,0
142 | 1032,0
143 | 1033,1
144 | 1034,0
145 | 1035,0
146 | 1036,1
147 | 1037,0
148 | 1038,0
149 | 1039,0
150 | 1040,1
151 | 1041,0
152 | 1042,1
153 | 1043,0
154 | 1044,0
155 | 1045,1
156 | 1046,0
157 | 1047,0
158 | 1048,1
159 | 1049,1
160 | 1050,1
161 | 1051,0
162 | 1052,1
163 | 1053,1
164 | 1054,1
165 | 1055,0
166 | 1056,0
167 | 1057,1
168 | 1058,0
169 | 1059,0
170 | 1060,1
171 | 1061,0
172 | 1062,0
173 | 1063,1
174 | 1064,0
175 | 1065,1
176 | 1066,0
177 | 1067,1
178 | 1068,1
179 | 1069,0
180 | 1070,1
181 | 1071,1
182 | 1072,0
183 | 1073,0
184 | 1074,1
185 | 1075,1
186 | 1076,1
187 | 1077,0
188 | 1078,1
189 | 1079,1
190 | 1080,0
191 | 1081,0
192 | 1082,0
193 | 1083,0
194 | 1084,1
195 | 1085,0
196 | 1086,1
197 | 1087,0
198 | 1088,1
199 | 1089,0
200 | 1090,0
201 | 1091,1
202 | 1092,1
203 | 1093,1
204 | 1094,0
205 | 1095,1
206 | 1096,0
207 | 1097,0
208 | 1098,0
209 | 1099,0
210 | 1100,1
211 | 1101,0
212 | 1102,1
213 | 1103,0
214 | 1104,0
215 | 1105,1
216 | 1106,0
217 | 1107,0
218 | 1108,1
219 | 1109,0
220 | 1110,1
221 | 1111,1
222 | 1112,1
223 | 1113,0
224 | 1114,1
225 | 1115,0
226 | 1116,1
227 | 1117,1
228 | 1118,0
229 | 1119,1
230 | 1120,0
231 | 1121,0
232 | 1122,0
233 | 1123,1
234 | 1124,0
235 | 1125,1
236 | 1126,0
237 | 1127,0
238 | 1128,0
239 | 1129,1
240 | 1130,1
241 | 1131,1
242 | 1132,1
243 | 1133,1
244 | 1134,0
245 | 1135,0
246 | 1136,0
247 | 1137,0
248 | 1138,1
249 | 1139,0
250 | 1140,1
251 | 1141,0
252 | 1142,1
253 | 1143,0
254 | 1144,0
255 | 1145,0
256 | 1146,0
257 | 1147,0
258 | 1148,1
259 | 1149,0
260 | 1150,1
261 | 1151,0
262 | 1152,0
263 | 1153,0
264 | 1154,1
265 | 1155,1
266 | 1156,0
267 | 1157,0
268 | 1158,1
269 | 1159,0
270 | 1160,1
271 | 1161,0
272 | 1162,0
273 | 1163,1
274 | 1164,1
275 | 1165,1
276 | 1166,1
277 | 1167,1
278 | 1168,0
279 | 1169,0
280 | 1170,0
281 | 1171,0
282 | 1172,0
283 | 1173,1
284 | 1174,1
285 | 1175,0
286 | 1176,1
287 | 1177,0
288 | 1178,0
289 | 1179,0
290 | 1180,1
291 | 1181,0
292 | 1182,0
293 | 1183,0
294 | 1184,1
295 | 1185,0
296 | 1186,0
297 | 1187,0
298 | 1188,1
299 | 1189,0
300 | 1190,0
301 | 1191,0
302 | 1192,0
303 | 1193,0
304 | 1194,0
305 | 1195,0
306 | 1196,1
307 | 1197,1
308 | 1198,0
309 | 1199,1
310 | 1200,0
311 | 1201,0
312 | 1202,0
313 | 1203,1
314 | 1204,0
315 | 1205,0
316 | 1206,1
317 | 1207,1
318 | 1208,0
319 | 1209,0
320 | 1210,1
321 | 1211,0
322 | 1212,0
323 | 1213,0
324 | 1214,0
325 | 1215,1
326 | 1216,1
327 | 1217,0
328 | 1218,1
329 | 1219,0
330 | 1220,0
331 | 1221,0
332 | 1222,1
333 | 1223,0
334 | 1224,1
335 | 1225,1
336 | 1226,0
337 | 1227,0
338 | 1228,1
339 | 1229,0
340 | 1230,0
341 | 1231,1
342 | 1232,0
343 | 1233,0
344 | 1234,0
345 | 1235,1
346 | 1236,0
347 | 1237,1
348 | 1238,0
349 | 1239,0
350 | 1240,0
351 | 1241,1
352 | 1242,1
353 | 1243,0
354 | 1244,0
355 | 1245,0
356 | 1246,0
357 | 1247,0
358 | 1248,1
359 | 1249,1
360 | 1250,1
361 | 1251,1
362 | 1252,0
363 | 1253,1
364 | 1254,1
365 | 1255,0
366 | 1256,1
367 | 1257,0
368 | 1258,0
369 | 1259,0
370 | 1260,1
371 | 1261,0
372 | 1262,0
373 | 1263,1
374 | 1264,0
375 | 1265,0
376 | 1266,1
377 | 1267,1
378 | 1268,0
379 | 1269,0
380 | 1270,0
381 | 1271,1
382 | 1272,1
383 | 1273,0
384 | 1274,0
385 | 1275,1
386 | 1276,0
387 | 1277,1
388 | 1278,0
389 | 1279,0
390 | 1280,0
391 | 1281,0
392 | 1282,1
393 | 1283,1
394 | 1284,0
395 | 1285,0
396 | 1286,0
397 | 1287,1
398 | 1288,0
399 | 1289,1
400 | 1290,0
401 | 1291,0
402 | 1292,1
403 | 1293,0
404 | 1294,1
405 | 1295,0
406 | 1296,0
407 | 1297,0
408 | 1298,0
409 | 1299,0
410 | 1300,1
411 | 1301,1
412 | 1302,1
413 | 1303,1
414 | 1304,1
415 | 1305,0
416 | 1306,1
417 | 1307,0
418 | 1308,1
419 | 1309,1
420 |
--------------------------------------------------------------------------------
/Day-34/readme.md:
--------------------------------------------------------------------------------
1 | # Day 34 of [#100DaysOfCode](https://twitter.com/Param3021/status/1545334652915032064)
2 |
3 | ## Task
4 | 1. Titanic dataset prediction challenge
5 | 2. Global Hack Week challenges
6 |
7 | # Resources
8 | - Kaggle [Titanic - Machine Learning from Disaster](https://www.kaggle.com/competitions/titanic/)
9 | - - [My Notebook 1](https://www.kaggle.com/code/param302/titanic-survival-competition-8)
10 | - - [My Notebook 2](https://www.kaggle.com/code/param302/titanic-survival-competition-9)
11 | - - [My Notebook 3](https://www.kaggle.com/code/param302/titanic-survival-competition-10)
12 |
13 | - MLH - [Global Hack Week](https://ghw.mlh.io/) INIT 2023 hackathon
14 | - - [2022-Goal-sticker](https://github.com/Param302/MLH-GHW-2023/blob/master/2022-goal-sticker.png)
15 | - - [Bob Ross MS PAINT](https://github.com/Param302/MLH-GHW-2023/blob/master/Paint.png)
16 | - - [Code in Python](https://github.com/Param302/MLH-GHW-2023/blob/master/leap_year.py)
17 | - - [A project that uses Dataset](https://github.com/Param302/MLH-GHW-2023/tree/master/Day-5%20Titanic_dataset)
18 | - - [Show off Github Activity](https://github.com/Param302/MLH-GHW-2023/blob/master/github-activity.jpg)
19 | - - [Random Number Generator](https://github.com/Param302/MLH-GHW-2023/blob/master/generate_random.c)
20 |
21 | ### Topics I have learnt
22 | 1. Titanic dataset prediction
23 | - 1st with `XGBClassifier` and did `Feature Engineering` using Mutual Information and by creating new features.
24 | - 2nd same as above but added more columns.
25 | - 3rd with `RandomForestClassifier` and did `Feature Engineering` by creating new features & Mutual Information.
26 |
27 | 2. Also participated in MLH Global Hack Week INIT 2023 [🔗](https://ghw.mlh.io/)
28 | - Did 6 challenges
29 | - Created random number generator in `C`.
30 | - Made a leap year program in `Python`.
31 | - and much more....
32 |
33 | ### Software used
34 | - Jupyter Notebook
35 | - Python 3.10.2
36 | - Numpy 1.22.4
37 | - pandas 1.4.2
38 | - Matplotlib 3.5.2
39 | - Seaborn 0.11.2
40 | - scikit-learn 1.1.1
41 | - XGBoost 1.6.1
42 | - HTML5
43 | - CSS3
44 | - C
45 |
46 | ### My Notebooks
47 | - [Titanic_survival_competition_8.ipynb](./Titanic_survival_competition_8.ipynb)
48 | - [Titanic_survival_competition_9.ipynb](./Titanic_survival_competition_9.ipynb)
49 | - [Titanic_survival_competition_10.ipynb](./Titanic_survival_competition_10.ipynb)
50 | - MLH - [Global Hack Week](https://ghw.mlh.io/) INIT 2023 hackathon
51 | - - [2022-Goal-sticker](https://github.com/Param302/MLH-GHW-2023/blob/master/2022-goal-sticker.png)
52 | - - [Bob Ross MS PAINT](https://github.com/Param302/MLH-GHW-2023/blob/master/Paint.png)
53 | - - [Code in Python](https://github.com/Param302/MLH-GHW-2023/blob/master/leap_year.py)
54 | - - [A project that uses Dataset](https://github.com/Param302/MLH-GHW-2023/tree/master/Day-5%20Titanic_dataset)
55 | - - [Show off Github Activity](https://github.com/Param302/MLH-GHW-2023/blob/master/github-activity.jpg)
56 | - - [Random Number Generator](https://github.com/Param302/MLH-GHW-2023/blob/master/generate_random.c)
57 |
58 | ### Conclusion:
59 | Today I did titanic survival compeition & scored much better by creating new features and participated in Global Hack Week hackathon & completed 6 challenges.
--------------------------------------------------------------------------------
/Day-35/readme.md:
--------------------------------------------------------------------------------
1 | # Day 35 of [#100DaysOfCode](https://twitter.com/Param3021/status/1546079960171159553)
2 |
3 | ## Task
4 | 1. Learn more about Regression / Classification models
5 | 2. Revise Mathematics
6 | 3. Global Hack Week Challenge
7 |
8 | ## Resources
9 | - StatQuest With Josh Stamer
10 | - - [Regression Trees](https://www.youtube.com/watch?v=g9c66TUylZ4) YT Video
11 | - - [Decision and Classification Trees](https://www.youtube.com/watch?v=_L39rN6gz7Y) YT Video
12 |
13 | - MLH - [Global Hack Week](https://ghw.mlh.io/) INIT 2023 hackathon
14 |
15 | ### Topics I have learnt
16 | 1. Titanic dataset prediction
17 | - Regression Trees
18 | - Classification Trees
19 |
20 | 2. Also participated in MLH Global Hack Week INIT 2023 [🔗](https://ghw.mlh.io/)
21 | - Did 2 challenges (1 yesterday)
22 | - Designed a mascot for my guild **Young Devs** [🔗](https://twitter.com/Param3021/status/1545857632631951360)
23 | - Made a [Demo Video](https://youtu.be/ltYkbAh_8FU) on [Wikipedia Searcher App](https://github.com/AnantLuthra/wikipedia_searcher) with [Anant Luthra](https://github.com/AnantLuthra)
24 |
25 |
26 | ### Conclusion:
27 | Today I learned about more parameters in Decision Trees Regressor / Classifier. Didn't revised maths. Also, Made a [Demo Video](https://youtu.be/ltYkbAh_8FU) with with [Anant Luthra](https://github.com/AnantLuthra) on on [Wikipedia Searcher App](https://github.com/AnantLuthra/wikipedia_searcher) for Global Hack week hackathon.
--------------------------------------------------------------------------------
/Day-36/readme.md:
--------------------------------------------------------------------------------
1 | # Day 36 of [#100DaysOfCode](https://twitter.com/Param3021/status/1546597597540139008)
2 |
3 | ## Task
4 | 1. Revised mathematics
5 | 2. Machine Learning Specialization Course by [Deeplearning.ai](https://www.deeplearning.ai/) on coursera. [🔗](https://www.deeplearning.ai/courses/machine-learning-specialization/)
6 |
7 | ## Resources
8 | - Machine Learning Specialization Course [🔗](https://www.deeplearning.ai/courses/machine-learning-specialization/)
9 | - - Course 1: Supervised Machine Learning: Regression and Classification [🔗](https://www.coursera.org/learn/machine-learning?specialization=machine-learning-introduction)
10 |
11 | ### Topics I have learnt
12 | 1. Overview of Machine Learning in Week 1 of Supervised machine course 1
13 | 2. Revised mathematics
14 |
15 | ### My notebooks
16 | - All of my notes of this course are in my ML speicalization notes repository. [🔗](https://github.com/Param302/ML-specialization-notes)
17 |
18 | ### Conclusion:
19 | Today I have started Machine learning specialization course by Andrew Ng sir on coursera. And revised mathematics.
--------------------------------------------------------------------------------
/Day-37/readme.md:
--------------------------------------------------------------------------------
1 | # Day 37 of [#100DaysOfCode](https://twitter.com/Param3021/status/1546790848788336640)
2 | I have created a repository having all my notes and codes of Machine learning specialization course. [🔗](https://github.com/Param302/ML-specialization-notes)
3 |
4 | ## Task
5 | 1. Supervised vs Unsupervised learning
6 | 2. Regression model (not started)
7 |
8 | ## Resources
9 | - Machine Learning Specialization Course [🔗](https://www.deeplearning.ai/courses/machine-learning-specialization/)
10 | - - Course 1: Supervised Machine Learning: Regression and Classification [🔗](https://www.coursera.org/learn/machine-learning?specialization=machine-learning-introduction)
11 |
12 | ### Topics I have learnt
13 | 1. Supervised learning
14 | 2. Unsupervised learning
15 | 3. Regression
16 | 4. Classification
17 | 5. Clustering
18 | 6. Practice quiz on Supervised vs Unsupervised learning
19 |
20 | ### Software used:
21 | - Jupyter Notebook
22 | - Python 3.10.2
23 |
24 | ### My notebooks
25 | - All of my notes of this course are in my ML speicalization notes repository. [🔗](https://github.com/Param302/ML-specialization-notes)
26 |
27 | ### Conclusion:
28 | Today I have learned about Supervised and Unsupervised learning in ML. Also, definition of Regression, Classification and clustering. And I have made a repository having all my notes and codes of Machine Learning specialization course.
29 |
--------------------------------------------------------------------------------
/Day-38/readme.md:
--------------------------------------------------------------------------------
1 | # Day 38 of [#100DaysOfCode](https://twitter.com/Param3021/status/1547126471931887616)
2 | I have created a repository having all my notes and codes of Machine learning specialization course. [🔗](https://github.com/Param302/ML-specialization-notes)
3 |
4 | ## Task
5 | 1. Regression model in ML course
6 | 2. Mathematics for ML book (not done)
7 |
8 | ## Resources
9 | - Machine Learning Specialization Course [🔗](https://www.deeplearning.ai/courses/machine-learning-specialization/)
10 | - - Course 1: Supervised Machine Learning: Regression and Classification [🔗](https://www.coursera.org/learn/machine-learning?specialization=machine-learning-introduction)
11 |
12 | ### Topics I have learnt
13 | 1. Regression model in ML course (definitions)
14 | - Linear Regression model
15 | - Notations in Machine learning
16 |
17 | ### Software used:
18 | - Jupyter Notebook
19 | - Python 3.10.2
20 |
21 | ### My notebooks
22 | - All of my notes of this course are in my ML speicalization notes repository. [🔗](https://github.com/Param302/ML-specialization-notes)
23 |
24 | ### Conclusion:
25 | Today I have learned about Linear Regression model and some notations in machine learning. Not did much today.
--------------------------------------------------------------------------------
/Day-39/readme.md:
--------------------------------------------------------------------------------
1 | # Day 39 of [#100DaysOfCode](https://twitter.com/Param3021/status/1547520010939412481)
2 | I have created a repository having all my notes and codes of Machine learning specialization course. [🔗](https://github.com/Param302/ML-specialization-notes)
3 |
4 | ## Task
5 | 1. Linear Regression
6 | 2. Cost function
7 |
8 | ## Resources
9 | - Machine Learning Specialization Course [🔗](https://www.deeplearning.ai/courses/machine-learning-specialization/)
10 | - - Course 1: Supervised Machine Learning: Regression and Classification [🔗](https://www.coursera.org/learn/machine-learning?specialization=machine-learning-introduction)
11 |
12 | ### Topics I have learnt
13 | 1. Regression Model
14 | - Linear Regression
15 | - What is Cost function
16 | - Why to use cost function
17 | - formula of cost function
18 | - Code of linear regression
19 |
20 | ### Software used:
21 | - Jupyter Notebook
22 | - Python 3.10.2
23 |
24 | ### My notebooks
25 | - All of my notes of this course are in my ML speicalization notes repository. [🔗](https://github.com/Param302/ML-specialization-notes)
26 |
27 | ### Conclusion:
28 | Today I have learned about Linear Regression, what is cost function, why to use cost function and it's formula. Also code of linear regression in python.
--------------------------------------------------------------------------------
/Day-4/readme.md:
--------------------------------------------------------------------------------
1 | # Day 4 of [#100DaysOfCode](https://twitter.com/Param3021/status/1533012411774803968)
2 |
3 | ## Task
4 | 1. Learn Pandas
5 | - Cleaning data & Handling missing values
6 | - Time Series data
7 | - Reading/Writing data to different formats (not done)
8 |
9 | # Resources
10 | - Python Pandas Tutorial Playlist by Corey Schafer YT channel: ([Playlist link](https://www.youtube.com/playlist?list=PL-osiE80TeTsWmV9i9c58mdDCSskIFdDS))
11 | - Stack Overflow Developer Survey 2021 ([link](https://insights.stackoverflow.com/survey))
12 | - Ethereum Historical Dataset on Kaggle ([link](https://www.kaggle.com/datasets/prasoonkottarathil/ethereum-historical-dataset?select=ETH_1H.csv))
13 |
14 | ### Topics I have learnt
15 | 1. Cleaning data & Handling missing values
16 | 2. Working with Dates & Time Series Data (interesting !!!)
17 |
18 | ### Software used
19 | - Jupyter Notebook
20 | - Python 3.10.2
21 | - Numpy 1.22.4
22 | - pandas 1.4.2
23 |
24 | ### My Notebooks
25 | - [Pandas_tutorial_part_9.ipynb](Pandas_tutorial_part_9.ipynb)
26 | - [Pandas_tutorial_part_10.ipynb](Pandas_tutorial_part_10.ipynb)
27 |
28 | ### Conclusion:
29 | Today I learnt how to clean the dataset by handling NaN(missing) values. Also, how to work with dates & did some time series analysis. I think, I am being lazy.
30 |
--------------------------------------------------------------------------------
/Day-40/readme.md:
--------------------------------------------------------------------------------
1 | # Day 40 of [#100DaysOfCode](https://twitter.com/Param3021/status/1547887157993680896)
2 | I have created a repository having all my notes and codes of Machine learning specialization course. [🔗](https://github.com/Param302/ML-specialization-notes)
3 |
4 | ## Task
5 | 1. Cost function
6 | 2. Gradient Descent (not done)
7 |
8 | ## Resources
9 | - Machine Learning Specialization Course [🔗](https://www.deeplearning.ai/courses/machine-learning-specialization/)
10 | - - Course 1: Supervised Machine Learning: Regression and Classification [🔗](https://www.coursera.org/learn/machine-learning?specialization=machine-learning-introduction)
11 |
12 | ### Topics I have learnt
13 | 1. Cost function
14 | - How cost function works
15 | - How weights `w` and `b` effect linear regression model
16 | - 2d/3d Contour plot (visualization of cost function)
17 |
18 | ### Software used:
19 | - Jupyter Notebook
20 | - Python 3.10.2
21 |
22 | ### My notebooks
23 | - All of my notes of this course are in my ML speicalization notes repository. [🔗](https://github.com/Param302/ML-specialization-notes)
24 |
25 | ### Conclusion:
26 | Today I completed Regression model part in week 1, and learned about cost function, and how weights w and b effect the linear regression function. Also learned about 3d contour plot, it's 🔥.
--------------------------------------------------------------------------------
/Day-41/readme.md:
--------------------------------------------------------------------------------
1 | # Day 41 of [#100DaysOfCode](https://twitter.com/Param3021/status/1548234590695079936)
2 | I have created a repository having all my notes and codes of Machine learning specialization course. [🔗](https://github.com/Param302/ML-specialization-notes)
3 |
4 | ## Task
5 | 1. Gradient Descent
6 |
7 | ## Resources
8 | - Machine Learning Specialization Course [🔗](https://www.deeplearning.ai/courses/machine-learning-specialization/)
9 | - - Course 1: Supervised Machine Learning: Regression and Classification [🔗](https://www.coursera.org/learn/machine-learning?specialization=machine-learning-introduction)
10 |
11 | ### Topics I have learnt
12 | 1. Gradient Descent
13 | - What is gradient descent
14 | - What is the use of gradient descent
15 | - Working of gradient descent
16 |
17 | ### Software used:
18 | - Jupyter Notebook
19 | - Python 3.10.2
20 |
21 | ### My notebooks
22 | - All of my notes of this course are in my ML speicalization notes repository. [🔗](https://github.com/Param302/ML-specialization-notes)
23 |
24 | ### Conclusion:
25 | Today I learned about what Gradient descent is and how it works.
26 | It's really great, like finding best parameters for Cost function which makes it's value as minimum as possible.
--------------------------------------------------------------------------------
/Day-42/readme.md:
--------------------------------------------------------------------------------
1 | # Day 42 of [#100DaysOfCode](https://twitter.com/Param3021/status/1548595765290754048)
2 | I have created a repository having all my notes and codes of Machine learning specialization course. [🔗](https://github.com/Param302/ML-specialization-notes)
3 |
4 | ## Task
5 | 1. Gradient Descent in ML
6 |
7 | ## Resources
8 | - Machine Learning Specialization Course [🔗](https://www.deeplearning.ai/courses/machine-learning-specialization/)
9 | - - Course 1: Supervised Machine Learning: Regression and Classification [🔗](https://www.coursera.org/learn/machine-learning?specialization=machine-learning-introduction)
10 |
11 | ### Topics I have learnt
12 | 1. Gradient Descent
13 | - Algorithm
14 | - Working of Gradient Descent
15 | - How *derivative* and *learning rate* effects *coefficients*
16 | - How *derivatives* are calculated
17 | - Gradient Descent in Action
18 |
19 | ### Software used:
20 | - Jupyter Notebook
21 | - Python 3.10.2
22 |
23 | ### My notebooks
24 | - All of my notes of this course are in my ML speicalization notes repository. [🔗](https://github.com/Param302/ML-specialization-notes)
25 |
26 | ### Conclusion:
27 | Finally!, Today I completed Week 1 of Superivsed Machine Learning course. Today I learned about Batch Gradient Descent algorithm in Linear Regression.
--------------------------------------------------------------------------------
/Day-43/readme.md:
--------------------------------------------------------------------------------
1 | # Day 43 of [#100DaysOfCode](https://twitter.com/Param3021/status/1548964394042011648)
2 | I have created a repository having all my notes and codes of Machine learning specialization course. [🔗](https://github.com/Param302/ML-specialization-notes)
3 |
4 | ## Task
5 | 1. Linear Regression with multiple variables
6 | 2. Vectorization (not done)
7 |
8 | ## Resources
9 | - Machine Learning Specialization Course [🔗](https://www.deeplearning.ai/courses/machine-learning-specialization/)
10 | - - Course 1: Supervised Machine Learning: Regression and Classification [🔗](https://www.coursera.org/learn/machine-learning?specialization=machine-learning-introduction)
11 |
12 | ### Topics I have learnt
13 | 1. Linear regression with multiple features
14 | - formula of linear regression with multiple features using vectors
15 |
16 | ### My notebooks
17 | - All of my notes of this course are in my ML speicalization notes repository. [🔗](https://github.com/Param302/ML-specialization-notes)
18 |
19 | ### Conclusion:
20 | Today I learned about linear regression with mutliple variables, also it's formula, quite interesting ! It uses vectors for mutliple features and their weights. Not started Vectorization.
--------------------------------------------------------------------------------
/Day-44/readme.md:
--------------------------------------------------------------------------------
1 | # Day 44 of [#100DaysOfCode](https://twitter.com/Param3021/status/1549295897414356992)
2 | I have created a repository having all my notes and codes of Machine learning specialization course. [🔗](https://github.com/Param302/ML-specialization-notes)
3 |
4 | ## Task
5 | 1. Vectorization
6 | 2. Gradient Descent for multiple linear regression
7 |
8 | ## Resources
9 | - Machine Learning Specialization Course [🔗](https://www.deeplearning.ai/courses/machine-learning-specialization/)
10 | - - Course 1: Supervised Machine Learning: Regression and Classification [🔗](https://www.coursera.org/learn/machine-learning?specialization=machine-learning-introduction)
11 |
12 | ### Topics I have learnt
13 | 1. Vectorization
14 | 2. Gradient Descent formula
15 |
16 | ### My notebooks
17 | - All of my notes of this course are in my ML speicalization notes repository. [🔗](https://github.com/Param302/ML-specialization-notes)
18 |
19 | ### Conclusion:
20 | Today I learned what is vectorization, how vectorization is faster than loop. Also, formula for gradient descent in multiple linear regression.
--------------------------------------------------------------------------------
/Day-45/readme.md:
--------------------------------------------------------------------------------
1 | # Day 45 of [#100DaysOfCode](https://twitter.com/Param3021/status/1549677064823140353)
2 | I have created a repository having all my notes and codes of Machine learning specialization course. [🔗](https://github.com/Param302/ML-specialization-notes)
3 |
4 | ## Task
5 | 1. Feature Scaling
6 | 2. Checking gradient descent for convergence (not done)
7 |
8 | ## Resources
9 | - Machine Learning Specialization Course [🔗](https://www.deeplearning.ai/courses/machine-learning-specialization/)
10 | - - Course 1: Supervised Machine Learning: Regression and Classification [🔗](https://www.coursera.org/learn/machine-learning?specialization=machine-learning-introduction)
11 |
12 | ### Topics I have learnt
13 | 1. Feature Scaling
14 | - Max normalization
15 | - Mean normalization
16 | - Z-score normalization
17 |
18 | ### My notebooks
19 | - All of my notes of this course are in my ML speicalization notes repository. [🔗](https://github.com/Param302/ML-specialization-notes)
20 |
21 | ### Conclusion:
22 | Today I learned what is Feature Scaling, and why to do it. Also some feature scaling methods like dividing by max, mean normalization, Standardisation.
--------------------------------------------------------------------------------
/Day-46/readme.md:
--------------------------------------------------------------------------------
1 | # Day 46 of [#100DaysOfCode](https://twitter.com/Param3021/status/1550023758597193729)
2 | I have created a repository having all my notes and codes of Machine learning specialization course. [🔗](https://github.com/Param302/ML-specialization-notes)
3 |
4 | ## Task
5 | 1. Check Gadient Descent for convergence
6 | 2. Choosing best learning rate
7 | 3. Simple Linear Regression code
8 |
9 | ## Resources
10 | - Machine Learning Specialization Course [🔗](https://www.deeplearning.ai/courses/machine-learning-specialization/)
11 | - - Course 1: Supervised Machine Learning: Regression and Classification [🔗](https://www.coursera.org/learn/machine-learning?specialization=machine-learning-introduction)
12 |
13 | ### Topics I have learnt
14 | 1. Check Gadient Descent for convergence
15 | 2. Choosing best learning rate
16 | 3. Also wrote Linear Regression code
17 |
18 | ### My notebooks
19 | - All of my notes of this course are in my ML speicalization notes repository. [🔗](https://github.com/Param302/ML-specialization-notes)
20 |
21 | ### Conclusion:
22 | Today I learned how to check that gradient descent is converged using learning curve and how to choose best learning rate. Also wrote code for simple linear regression.
--------------------------------------------------------------------------------
/Day-47/readme.md:
--------------------------------------------------------------------------------
1 | # Day 47 of [#100DaysOfCode](https://twitter.com/Param3021/status/1550394454120747008)
2 | I have created a repository having all my notes and codes of Machine learning specialization course. [🔗](https://github.com/Param302/ML-specialization-notes)
3 |
4 | ## Task
5 | 1. Feature Engineering
6 | 2. Polynomial Regression
7 |
8 | ## Resources
9 | - Machine Learning Specialization Course [🔗](https://www.deeplearning.ai/courses/machine-learning-specialization/)
10 | - - Course 1: Supervised Machine Learning: Regression and Classification [🔗](https://www.coursera.org/learn/machine-learning?specialization=machine-learning-introduction)
11 |
12 | ### Topics I have learnt
13 | 1. Linear regression with multiple features
14 | - formula of linear regression with multiple features using vectors
15 |
16 | ### My notebooks
17 | - All of my notes of this course are in my ML speicalization notes repository. [🔗](https://github.com/Param302/ML-specialization-notes)
18 |
19 | ### Conclusion:
20 | Today I learned about Feature Engineering and polynomial regression to make non-linear curves which fits non-linear data well.
--------------------------------------------------------------------------------
/Day-48/readme.md:
--------------------------------------------------------------------------------
1 | # Day 48 of [#100DaysOfCode](https://twitter.com/Param3021/status/1550780252980654080)
2 | I have created a repository having all my notes and codes of Machine learning specialization course. [🔗](https://github.com/Param302/ML-specialization-notes)
3 |
4 | ## Task
5 | 1. Linear Regression code with Gradient Descent in Python
6 |
7 | ## Resources
8 | - Machine Learning Specialization Course [🔗](https://www.deeplearning.ai/courses/machine-learning-specialization/)
9 | - - Course 1: Supervised Machine Learning: Regression and Classification [🔗](https://www.coursera.org/learn/machine-learning?specialization=machine-learning-introduction)
10 |
11 | ### Topics I have learnt
12 | 1. Ipmlemented Linear Regression code with Gradient Descent in Python (week 2 programming assignment)
13 |
14 |
15 | ### My notebooks
16 | - All of my notes of this course are in my ML speicalization notes repository. [🔗](https://github.com/Param302/ML-specialization-notes)
17 |
18 | ### Conclusion:
19 | Today I did programming assignment of linear regression and with this, Week 2 is completed 🥳.
20 |
21 | So far, my pace is good!
22 |
23 | Tomorrow, I will use all the things which I learned till now and make linear / polynomial regression model.
--------------------------------------------------------------------------------
/Day-49/readme.md:
--------------------------------------------------------------------------------
1 | # Day 43 of [#100DaysOfCode](https://twitter.com/Param3021/status/1548964394042011648)
2 | I have created a repository having all my notes and codes of Machine learning specialization course. [🔗](https://github.com/Param302/ML-specialization-notes)
3 |
4 | ## Task
5 | 1. Code for Linear / Polynomial regression in Python.
6 |
7 | ## Resources
8 | - Machine Learning Specialization Course [🔗](https://www.deeplearning.ai/courses/machine-learning-specialization/)
9 | - - Course 1: Supervised Machine Learning: Regression and Classification [🔗](https://www.coursera.org/learn/machine-learning?specialization=machine-learning-introduction)
10 |
11 | ### Topics I have learnt
12 | 1. Linear regression code in Python
13 |
14 | ### My notebooks
15 | - All of my notes of this course are in my ML speicalization notes repository. [🔗](https://github.com/Param302/ML-specialization-notes)
16 |
17 | ### Conclusion:
18 | Today I wrote code for Multiple Linear Regression including it's cost function and gradient descent.
19 | Took so much time, but worked successfully!
20 | Also did some feature engineering and feature scaling with data.
21 | Not did Polynomial regression code.
--------------------------------------------------------------------------------
/Day-5/readme.md:
--------------------------------------------------------------------------------
1 | # Day 5 of [#100DaysOfCode](https://twitter.com/Param3021/status/1533373019036721152)
2 |
3 | ## Task
4 | 1. Learn Pandas
5 | - Reading/Writing with different formats like json, excel, csv, sql etc...
6 | - Data Analysis Project (EDA) (70%)
7 |
8 | # Resources
9 | - Python Pandas Tutorial Playlist by Corey Schafer YT channel: ([Playlist link](https://www.youtube.com/playlist?list=PL-osiE80TeTsWmV9i9c58mdDCSskIFdDS))
10 | - Stack Overflow Developer Survey 2021 ([link]((https://insights.stackoverflow.com/survey)))
11 | - FreeCodeCamp [Demographic Data Analyzer](https://www.freecodecamp.org/learn/data-analysis-with-python/data-analysis-with-python-projects/demographic-data-analyzer)
12 |
13 | ### Topics I have learnt
14 | 1. Reading/Writing data in
15 | - CSV/TSV
16 | - JSON
17 | - EXCEL
18 | - SQL
19 | 2. FreeCodeCamp Demographic Data Analyzer project (70% done)
20 |
21 | ### Software used
22 | - Jupyter Notebook
23 | - Python 3.10.2
24 | - Numpy 1.22.4
25 | - pandas 1.4.2
26 | - SQLAlchemy 1.4.37
27 | - PyMySQL 1.0.2
28 |
29 | ### My Notebooks
30 | - [Pandas_tutorial_part_11.ipynb](Pandas_tutorial_part_11.ipynb)
31 | - [Data_analysis_project_2.ipynb](Data_analysis_project_2.ipynb)
32 |
33 | ### Conclusion:
34 | Today I learnt how to read and write data in different formats like csv, json, excel & sql. Also did EDA on FreeCodeCamp's Demographic Data Analyzer (70% done). Both things are so interesting.
--------------------------------------------------------------------------------
/Day-50/readme.md:
--------------------------------------------------------------------------------
1 | # Day 50 of [#100DaysOfCode](https://twitter.com/Param3021/status/1551513162750767104)
2 | I have created a repository having all my notes and codes of Machine learning specialization course. [🔗](https://github.com/Param302/ML-specialization-notes)
3 |
4 | ## Task
5 | 1. Week 3 - Classification
6 |
7 | ## Resources
8 | - Machine Learning Specialization Course [🔗](https://www.deeplearning.ai/courses/machine-learning-specialization/)
9 | - - Course 1: Supervised Machine Learning: Regression and Classification [🔗](https://www.coursera.org/learn/machine-learning?specialization=machine-learning-introduction)
10 |
11 | ### Topics I have learnt
12 | 1. Classification
13 | 2. Logistic Regression
14 | 3. Sigmoid function
15 |
16 | ### My notebooks
17 | - All of my notes of this course are in my ML speicalization notes repository. [🔗](https://github.com/Param302/ML-specialization-notes)
18 |
19 | ### Conclusion:
20 | Today I learnt about Classification, Logistic regression, it's algorithm Sigmoid function.
--------------------------------------------------------------------------------
/Day-6/readme.md:
--------------------------------------------------------------------------------
1 | # Day 6 of [#100DaysOfCode](https://twitter.com/Param3021/status/1533737876563046400)
2 |
3 | ## Task
4 | 1. Linear Algebra
5 | 2. Data Analysis With Pandas
6 |
7 | # Resources
8 | - 3Blue1Brown [Linear Algebra](https://www.youtube.com/playlist?list=PLZHQObOWTQDPD3MizzM2xVFitgF8hE_ab)
9 | - FreeCodeCamp [Demographic Data Analyzer](https://www.freecodecamp.org/learn/data-analysis-with-python/data-analysis-with-python-projects/demographic-data-analyzer)
10 |
11 | ### Topics I have learnt
12 | 1. Linear Algebra
13 | - - Vector, Addition & Multiplication
14 | - - Linear Combinations, span, basis vectors
15 |
16 | 1. EDA on FreeCodeCamp's Demographic Data Analyzer.
17 |
18 | ### Software used
19 | - Jupyter Notebook
20 | - Python 3.10.2
21 | - Numpy 1.22.4
22 | - pandas 1.4.2
23 |
24 | ### My Notebooks
25 | - [Data_analysis_project_2.ipynb](Data_analysis_project_2.ipynb)
26 |
27 | ### Conclusion:
28 | Today I learnt basics about Vectors in Linear Algebra. And completed FreeCodeCamp's Demographic Data Analyzer project and did some pandas practice in Datacamp.
--------------------------------------------------------------------------------
/Day-7/plot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Param302/100DaysOfCode-Python/44000a5ec5173c65c18810db68e11812baa2f0c4/Day-7/plot.png
--------------------------------------------------------------------------------
/Day-7/readme.md:
--------------------------------------------------------------------------------
1 | # Day 7 of [#100DaysOfCode](https://twitter.com/Param3021/status/1534075090111184897)
2 |
3 | ## Task
4 | 1. Matplotlib Library
5 | 2. Matrices in Linear Algebra (not done)
6 | 3. EDA with Pandas & matplotlib (not done)
7 |
8 | # Resources
9 | - Corey Schafer [Matplotlib Tutorial](https://www.youtube.com/playlist?list=PL-osiE80TeTvipOqomVEeZ1HRrcEvtZB_) Playlist
10 |
11 |
12 | ### Topics I have learnt
13 | 1. Matplotlib library
14 | - - Basics (title, labels, ticks, legends, grid)
15 | - - Line plots
16 | - - Bar plots (horizontal & vertical)
17 | - - Format line plots & bar plots
18 | - - Use different styles in plots
19 | - - Plotting multiple data
20 | - - Saving the figure in different formats
21 |
22 | ### Software used
23 | - Jupyter Notebook
24 | - Python 3.10.2
25 | - Numpy 1.22.4
26 | - pandas 1.4.2
27 | - matplotlib 3.5.2
28 |
29 | ### My Notebooks
30 | - [matplotlib_tutorial_part_1.ipynb](./matplotlib_tutorial_part_1.ipynb)
31 | - [matplotlib_tutorial_part_2.ipynb](./matplotlib_tutorial_part_2.ipynb)
32 |
33 | ### Conclusion:
34 | Today I learnt basics of matplotlib, and how to plot line & bar graphs. Also how to format the plots & change the style.
--------------------------------------------------------------------------------
/Day-8/data/language_popularity.csv:
--------------------------------------------------------------------------------
1 | Language,Popularity
2 | C++,20057
3 | HTML/CSS,46259
4 | JavaScript,53587
5 | Objective-C,2310
6 | PHP,18130
7 | Swift,4204
8 | Python,39792
9 | Assembly,4632
10 | C,17329
11 | R,4185
12 | Rust,5799
13 | TypeScript,24909
14 | Bash/Shell,22385
15 | SQL,38835
16 | C#,22984
17 | Java,29162
18 | Node.js,27975
19 | PowerShell,8871
20 | Ruby,5569
21 | Perl,2028
22 | Matlab,3846
23 | Kotlin,6866
24 | Julia,1068
25 | Haskell,1749
26 | Delphi,1731
27 | Go,7879
28 | Scala,2148
29 | Dart,4965
30 | VBA,3847
31 | Groovy,2479
32 | Clojure,1552
33 | APL,536
34 | LISP,1096
35 | F#,804
36 | Elixir,1438
37 | Erlang,651
38 | Crystal,466
39 | COBOL,437
40 |
--------------------------------------------------------------------------------
/Day-8/readme.md:
--------------------------------------------------------------------------------
1 | # Day 8 of [#100DaysOfCode](https://twitter.com/Param3021/status/1534450016978628608)
2 |
3 | ## Task
4 | 1. Matplotlib Library
5 | 2. EDA with Pandas & matplotlib (not done)
6 |
7 | # Resources
8 | - Corey Schafer [Matplotlib Tutorial](https://www.youtube.com/playlist?list=PL-osiE80TeTvipOqomVEeZ1HRrcEvtZB_) Playlist
9 |
10 |
11 | ### Topics I have learnt
12 | 1. Matplotlib library
13 | - - Pie chart
14 | - - Stack plot
15 |
16 | ### Software used
17 | - Jupyter Notebook
18 | - Python 3.10.2
19 | - Numpy 1.22.4
20 | - pandas 1.4.2
21 | - matplotlib 3.5.2
22 |
23 | ### My Notebooks
24 | - [matplotlib_tutorial_part_3.ipynb](./matplotlib_tutorial_part_3.ipynb)
25 | - [matplotlib_tutorial_part_4.ipynb](./matplotlib_tutorial_part_4.ipynb)
26 |
27 | ### Conclusion:
28 | Today I learnt how to plot Pie chart and Stackplot.
--------------------------------------------------------------------------------
/Day-9/data_analysis_project_3.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# FreeCodeCamp Data Analysis with Python Course\n",
8 | "### Project 2: [Medical Data Visualizer](https://www.freecodecamp.org/learn/data-analysis-with-python/data-analysis-with-python-projects/medical-data-visualizer)"
9 | ]
10 | },
11 | {
12 | "cell_type": "code",
13 | "execution_count": 4,
14 | "metadata": {},
15 | "outputs": [],
16 | "source": [
17 | "import pandas as pd\n",
18 | "# import seaborn as sns\n",
19 | "import matplotlib.pyplot as plt\n",
20 | "import numpy as np"
21 | ]
22 | },
23 | {
24 | "cell_type": "code",
25 | "execution_count": 6,
26 | "metadata": {},
27 | "outputs": [],
28 | "source": [
29 | "# Import data\n",
30 | "df = pd.read_csv(\"./data/medical_examination.csv\")\n",
31 | "\n",
32 | "# Add 'overweight' column\n",
33 | "bmi = df[\"weight\"] / (df[\"height\"] / 100)**2\n",
34 | "df['overweight'] = bmi.apply(lambda x: 1 if x>25 else 0)\n",
35 | "\n",
36 | "# Normalize data by making 0 always good and 1 always bad. If the value of 'cholesterol' or 'gluc' is 1, make the value 0. If the value is more than 1, make the value 1.\n",
37 | "df[\"cholesterol\"] = df[\"cholesterol\"].apply(lambda x: 1 if x==1 else 1)\n",
38 | "df[\"gluc\"] = df[\"gluc\"].apply(lambda x: 1 if x==1 else 1)\n",
39 | "\n",
40 | "# Draw Categorical Plot\n",
41 | "def draw_cat_plot():\n",
42 | " # Create DataFrame for cat plot using `pd.melt` using just the values from 'cholesterol', 'gluc', 'smoke', 'alco', 'active', and 'overweight'.\n",
43 | " df_cat = None\n",
44 | "\n",
45 | " # Group and reformat the data to split it by 'cardio'. Show the counts of each feature. You will have to rename one of the columns for the catplot to work correctly.\n",
46 | " df_cat = None\n",
47 | "\n",
48 | " # Draw the catplot with 'sns.catplot()'\n",
49 | "\n",
50 | " # Do not modify the next two lines\n",
51 | " fig.savefig('catplot.png')\n",
52 | " return fig\n",
53 | "\n",
54 | "\n",
55 | "# Draw Heat Map\n",
56 | "def draw_heat_map():\n",
57 | " # Clean the data\n",
58 | " df_heat = None\n",
59 | "\n",
60 | " # Calculate the correlation matrix\n",
61 | " corr = None\n",
62 | "\n",
63 | " # Generate a mask for the upper triangle\n",
64 | " mask = None\n",
65 | "\n",
66 | " # Set up the matplotlib figure\n",
67 | " fig, ax = None\n",
68 | "\n",
69 | " # Draw the heatmap with 'sns.heatmap()'\n",
70 | "\n",
71 | " # Do not modify the next two lines\n",
72 | " fig.savefig('heatmap.png')\n",
73 | " return fig\n"
74 | ]
75 | }
76 | ],
77 | "metadata": {
78 | "interpreter": {
79 | "hash": "7e2642ab4b747f3fe33eee77a3cca2e0983db70ec81f3eda854dde93e34ee41b"
80 | },
81 | "kernelspec": {
82 | "display_name": "Python 3.10.2 ('venv': venv)",
83 | "language": "python",
84 | "name": "python3"
85 | },
86 | "language_info": {
87 | "codemirror_mode": {
88 | "name": "ipython",
89 | "version": 3
90 | },
91 | "file_extension": ".py",
92 | "mimetype": "text/x-python",
93 | "name": "python",
94 | "nbconvert_exporter": "python",
95 | "pygments_lexer": "ipython3",
96 | "version": "3.10.2"
97 | },
98 | "orig_nbformat": 4
99 | },
100 | "nbformat": 4,
101 | "nbformat_minor": 2
102 | }
103 |
--------------------------------------------------------------------------------
/Day-9/readme.md:
--------------------------------------------------------------------------------
1 | # Day 9 of [#100DaysOfCode](https://twitter.com/Param3021/status/1534808911970451456)
2 |
3 | ## Task
4 | 1. Linear Algebra (Matrices)
5 | 2. Data Analysis with Pandas & matplotlib
6 |
7 | # Resources
8 | - 3 Blue 1 Brown [Linear Aglebra](https://www.youtube.com/playlist?list=PLZHQObOWTQDPD3MizzM2xVFitgF8hE_ab) playlist
9 | - Linear Algebra notes [link](https://hacobe.github.io/notes/linear_algebra_3blue1brown.html)
10 | - FreeCodeCamp Data Analysis with Python project 3 [Medical Data Visualizer](https://www.freecodecamp.org/learn/data-analysis-with-python/data-analysis-with-python-projects/medical-data-visualizer)
11 |
12 | ### Topics I have learnt
13 | 1. Linear Algebra - Matrices
14 | - - Linear Transformation
15 | - - Composition of matrices (matrix multiplication)
16 | 2. Data Analysis of Medical Data Visualizer (50% done)
17 | 3. FreeCodeCamp Data Analysis with Python all questions
18 |
19 | ### Software used
20 | - Jupyter Notebook
21 | - Python 3.10.2
22 | - Numpy 1.22.4
23 | - pandas 1.4.2
24 | - matplotlib 3.5.2
25 |
26 | ### My Notebooks
27 | - [data_analysis_project_part_3.ipynb](./data_analysis_project_part_3.ipynb)
28 |
29 | ### Conclusion:
30 | Today I learnt about Matrices, Linear Transformation & Composition of matrices (matrix multiplication), linear transformation is cool and must do for matrix. Also did 50% Data Analysis project of FreeCodeCamp And all exercises of Data Analysis with Python course.
31 |
--------------------------------------------------------------------------------
/readme.md:
--------------------------------------------------------------------------------
1 | # [100 Days of Code in Python](https://twitter.com/Param3021/status/1531507810756067328?s=20&t=psaKuMNtRHbSmcQ7QQ0zuQ)
2 | I have started 100 days of code to document my coding journey and stay consistent also for accountability.
3 |
4 | I am doing Machine Learning starting from Scratch
5 |
6 | ## Daily Tasks
7 | 1. Day - 0 Numpy [🔗](./Day-0/)
8 | 2. Day - 1 Numpy, Pandas [🔗](./Day-1/)
9 | 3. Day - 2 Pandas, Data Analysis project [🔗](./Day-2/)
10 | 4. Day - 3 Pandas (Grouping & Aggregating Data [🔗](./Day-3/)
11 | 5. Day - 4 Pandas (Cleaning data & Time Series Analysis) [🔗](./Day-4/)
12 | 6. Day - 5 Pandas (Working with different data formats) & Data Analysis project [🔗](./Day-5/)
13 | 7. Day - 6 Linear Algebra (Vectors Basics) & Data Analysis project [🔗](./Day-6/)
14 | 8. Day - 7 Matplotlib library [🔗](./Day-7/)
15 | 9. Day - 8 Matplotlib Library [🔗](./Day-7/)
16 | 10. Day - 9 Linear Algebra (Matrices, Linear Transformations) & Data Analysis project [🔗](./Day-9/)
17 | 11. Day - 10 Matplotlib & Data analysis on Titanic Dataset [🔗](./Day-10/)
18 | 12. Day - 11 Data analysis on Titanic Dataset [🔗](./Day-11/)
19 | 13. Day - 12 Data analysis on Titanic Dataset [🔗](./Day-12/)
20 | 14. Day - 13 Data analysis on Titanic Dataset [🔗](./Day-13/)
21 | 15. Day - 14 Matplotlib Library [🔗](./Day-14/)
22 | 16. Day - 15 Matplotlib Library[🔗](./Day-15/)
23 | 17. Day - 16 Matplotlib & Seaborn Library [🔗](./Day-16/)
24 | 18. Day - 17 Seaborn Library & Data Analysis project [🔗](./Day-17/)
25 | 19. Day - 18 Seaborn Library & Data Analysis project [🔗](./Day-18/)
26 | 20. Day - 19 Intro to Machine Learning & House price prediction [🔗](./Day-19/)
27 | 21. Day - 20 Intermediate Machine Learning & House price prediction [🔗](./Day-20/)
28 | 22. Day - 21 Intermediate Machine Learning & House price prediction [🔗](./Day-21/)
29 | 23. Day - 22 Linear Algebra & House price prediction [🔗](./Day-22/)
30 | 24. Day - 23 Statistics & House price prediction [🔗](./Day-23/)
31 | 25. Day - 24 Linear Algebra & Statistics [🔗](./Day-24/)
32 | 26. Day - 25 Feature Engineering & House price preidction [🔗](./Day-25/)
33 | 27. Day - 26 Feature Engineering [🔗](./Day-26/)
34 | 28. Day - 27 Feature Engineering & House price prediction [🔗](./Day-27/)
35 | 29. Day - 28 Linear Algebra & PCA [🔗](./Day-28/)
36 | 30. Day - 29 Linear Algebra & Statistics [🔗](./Day-29/)
37 | 31. Day - 30 Titanic dataset prediction & GHW hackathon [🔗](./Day-30/)
38 | 32. Day - 31 Titanic dataset prediction & GHW hackathon [🔗](./Day-31/)
39 | 33. Day - 32 Titanic dataset prediction & GHW hackathon [🔗](./Day-32/)
40 | 34. Day - 33 Titanic dataset prediction & GHW hackathon [🔗](./Day-33/)
41 | 35. Day - 34 Titanic dataset prediction & GHW hackathon [🔗](./Day-34/)
42 | 36. Day - 35 Regression & Classification Random Forest [🔗](./Day-35/)
43 | 37. Day - 36 Started ML Speicalization course & Revised Mathematics [🔗](./Day-36/)
44 | 38. Day - 37 Supervised vs unsupervised learning and Regression model [🔗](./Day-37/)
45 | 39. Day - 38 Linear Regression and Notations in ML [🔗](./Day-38/)
46 | 40. Day - 39 Linear Regression and it's Cost function [🔗](./Day-39/)
47 | 41. Day - 40 Working of Cost function [🔗](./Day-40/)
48 | 42. Day - 41 Gradient Descent [🔗](./Day-41/)
49 | 43. Day - 42 Completed Week 1 of ML course & Gradient Descent [🔗](./Day-42/)
50 | 44. Day - 43 Linear regression with mutliple variables [🔗](./Day-43/)
51 | 45. Day - 44 Vectorization and Gradient Descent [🔗](./Day-44/)
52 | 46. Day - 45 Feature Scaling [🔗](./Day-45/)
53 | 47. Day - 46 Gradient Descent and Simple Linear Regression code [🔗](./Day-46/)
54 | 48. Day - 47 Feature Engineering and Polynomial Regression [🔗](./Day-47/)
55 | 49. Day - 48 (Week 2 assignment) Linear Regression with Gradient Descent code [🔗](./Day-48/)
56 | 50. Day - 49 Linear Regression code in Python [🔗](./Day-49/)
57 | 51. Day - 50 Logistic Regression and Sigmoid function [🔗](./Day-50/)
--------------------------------------------------------------------------------