├── .gitattributes
├── .gitignore
├── Gradient Descent
├── GD from Scratch
│ ├── .md
│ ├── BatchGradientDescent.ipynb
│ ├── MiniBatchGradientDescent.ipynb
│ ├── StochasticGradientDescent.ipynb
│ ├── bgd.gif
│ ├── bgd_pred.gif
│ ├── output.png
│ ├── sdg.gif
│ └── sgd_animation.gif
└── readme.md
├── LICENSE
├── PINN
├── PINN.ipynb
└── ReadME.md
├── README.md
├── clustering
└── clustering_examples.ipynb
├── decision_trees
├── Images
│ ├── 1.png
│ ├── 2.png
│ ├── 3.png
│ ├── 4.png
│ ├── 5.png
│ ├── 6.png
│ └── 7.png
├── decision_trees_examples.ipynb
├── decision_trees_implementation (1).pdf
├── decision_trees_implementation.ipynb
└── heart.csv
├── dimensionality_reduction
├── dimensionality_reduction_examples.ipynb
└── dimensionality_reduction_implementation.ipynb
├── linear_regression
├── README.md
├── multiple_regression
│ ├── images
│ │ ├── loss_history.png
│ │ ├── ml_regression_training_comparison.gif
│ │ └── output.png
│ ├── multiple_regression_scratch.ipynb
│ └── multiple_regression_sklearn.ipynb
├── polynomial_regression
│ ├── images
│ │ ├── R2_Scores_Comparison.png
│ │ ├── R2_Scores_Comparison_for_Gradient_Descent.png
│ │ ├── cost_function.png
│ │ ├── header.png
│ │ ├── header_animated_complex.gif
│ │ ├── plot_data.png
│ │ ├── predictions_Comparison_for_Gradient_Descent.png
│ │ ├── predictions_comparison.png
│ │ ├── time_Comparison.png
│ │ └── time_Comparison_for_Gradient_Descent.png
│ └── poly_regession_from_scratch.ipynb
└── simple_regression
│ ├── images
│ ├── data_generation.gif
│ ├── final_prediction.png
│ ├── generated_data.png
│ ├── learning_process.gif
│ ├── loss_history.png
│ ├── parameter_evolution.png
│ ├── predictions.png
│ └── train_test_comparison.png
│ ├── linear_regression_scratch.ipynb
│ ├── linear_regression_sklearn.ipynb
│ ├── linear_regression_with_regul.ipynb
│ └── manually_linear_regession.ipynb
├── logistic_regression
├── logistic_regression_reg.ipynb
└── logistic_regression_scratch.ipynb
├── naive_bayes
├── naive_bayes_examples.ipynb
└── naive_bayes_implementation.ipynb
├── neural_networks
├── NN-from-scratch.ipynb
├── animation.gif
├── neural_networks_implementation.ipynb
├── output.png
├── output1.png
└── output2.png
├── requirements.txt
└── svm
└── svm_implementation.ipynb
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # poetry
98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99 | # This is especially recommended for binary packages to ensure reproducibility, and is more
100 | # commonly ignored for libraries.
101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102 | #poetry.lock
103 |
104 | # pdm
105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106 | #pdm.lock
107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108 | # in version control.
109 | # https://pdm.fming.dev/#use-with-ide
110 | .pdm.toml
111 |
112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
113 | __pypackages__/
114 |
115 | # Celery stuff
116 | celerybeat-schedule
117 | celerybeat.pid
118 |
119 | # SageMath parsed files
120 | *.sage.py
121 |
122 | # Environments
123 | .env
124 | .venv
125 | env/
126 | venv/
127 | ENV/
128 | env.bak/
129 | venv.bak/
130 |
131 | # Spyder project settings
132 | .spyderproject
133 | .spyproject
134 |
135 | # Rope project settings
136 | .ropeproject
137 |
138 | # mkdocs documentation
139 | /site
140 |
141 | # mypy
142 | .mypy_cache/
143 | .dmypy.json
144 | dmypy.json
145 |
146 | # Pyre type checker
147 | .pyre/
148 |
149 | # pytype static type analyzer
150 | .pytype/
151 |
152 | # Cython debug symbols
153 | cython_debug/
154 |
155 | # PyCharm
156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
158 | # and can be added to the global gitignore or merged into this file. For a more nuclear
159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
160 | #.idea/
161 | # Allow .gitkeep files to track empty folders
162 | .gitkeep
163 |
--------------------------------------------------------------------------------
/Gradient Descent/GD from Scratch/.md:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/Gradient Descent/GD from Scratch/.md
--------------------------------------------------------------------------------
/Gradient Descent/GD from Scratch/MiniBatchGradientDescent.ipynb:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/Gradient Descent/GD from Scratch/MiniBatchGradientDescent.ipynb
--------------------------------------------------------------------------------
/Gradient Descent/GD from Scratch/bgd.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/Gradient Descent/GD from Scratch/bgd.gif
--------------------------------------------------------------------------------
/Gradient Descent/GD from Scratch/bgd_pred.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/Gradient Descent/GD from Scratch/bgd_pred.gif
--------------------------------------------------------------------------------
/Gradient Descent/GD from Scratch/output.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/Gradient Descent/GD from Scratch/output.png
--------------------------------------------------------------------------------
/Gradient Descent/GD from Scratch/sdg.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/Gradient Descent/GD from Scratch/sdg.gif
--------------------------------------------------------------------------------
/Gradient Descent/GD from Scratch/sgd_animation.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/Gradient Descent/GD from Scratch/sgd_animation.gif
--------------------------------------------------------------------------------
/Gradient Descent/readme.md:
--------------------------------------------------------------------------------
1 | # Understanding Gradient Descent Algorithms
2 |
3 | ## Introduction
4 | Gradient Descent is a powerful optimization algorithm widely used in machine learning to minimize cost functions. It works by iteratively adjusting parameters in the direction of steepest descent of the loss surface.
5 |
6 | ## Types of Gradient Descent
7 |
8 | ### 1. Batch Gradient Descent (BGD)
9 | - Computes gradient using entire dataset
10 | - Characteristics:
11 | - Most stable convergence
12 | - Computationally expensive
13 | - High memory requirements
14 | - Formula: θ = θ - α * ∇J(θ)
15 | - Best for: Small to medium datasets
16 |
17 | ### 2. Stochastic Gradient Descent (SGD)
18 | - Updates parameters using single training example
19 | - Characteristics:
20 | - Fast computation
21 | - High variance in parameter updates
22 | - Less likely to get stuck in local minima
23 | - Formula: θ = θ - α * ∇J(θ; x(i); y(i))
24 | - Best for: Large datasets, online learning
25 |
26 | ### 3. Mini-batch Gradient Descent
27 | - Uses small batches (typically 32-256 samples)
28 | - Characteristics:
29 | - Balanced computation speed
30 | - Moderate parameter update variance
31 | - Good parallelization capability
32 | - Formula: θ = θ - α * ∇J(θ; x(i:i+n); y(i:i+n))
33 | - Best for: Most practical applications
34 |
35 | ## Mathematical Foundation
36 |
37 | ### Core Equations
38 | ```python
39 | # Basic update rule
40 | θ(t+1) = θ(t) - α * ∇J(θ(t))
41 |
42 | # Learning rate scheduling
43 | α(t) = α₀ / (1 + kt)
44 |
45 | Where:
46 | - θ: Model parameters
47 | - α: Learning rate
48 | - ∇J: Gradient of cost function
49 | - t: Current iteration
50 | - k: Decay rate
51 | ```
52 |
53 | ## Advanced Variations
54 |
55 | ### 1. Momentum
56 | - Adds velocity term to updates
57 | - v(t) = βv(t-1) + (1-β)∇J(θ)
58 | - θ = θ - αv(t)
59 |
60 | ### 2. Adam
61 | - Combines momentum and RMSprop
62 | - Adaptive learning rates
63 | - State-of-the-art performance
64 |
65 | ### 3. RMSprop
66 | - Adaptive learning rates
67 | - Handles non-stationary objectives
68 | - Good for RNNs
69 |
70 | ## Optimization Tips
71 |
72 | ### Learning Rate Selection
73 | - Start with α = 0.1 or 0.01
74 | - Monitor loss curve
75 | - Use learning rate schedules
76 | - Consider adaptive methods
77 |
78 | ### Batch Size Guidelines
79 | - Small (32-64): Better generalization
80 | - Large (128-256): Faster training
81 | - Very Large (512+): Distributed training
82 |
83 | ## Performance Comparison
84 |
85 | | Aspect | BGD | SGD | Mini-batch |
86 | |:-------|:----|:----|:-----------|
87 | | Computation | O(n) | O(1) | O(b) |
88 | | Memory | High | Minimal | Moderate |
89 | | Convergence | Deterministic | Stochastic | Semi-stochastic |
90 | | Parallelization | Limited | Poor | Excellent |
91 |
92 | ## Common Challenges
93 | 1. Vanishing/Exploding gradients
94 | 2. Saddle points
95 | 3. Poor conditioning
96 | 4. Local minima
97 |
98 | ## Best Practices
99 | - Normalize input data
100 | - Monitor gradient norms
101 | - Use gradient clipping
102 | - Implement early stopping
103 | - Cross-validate hyperparameters
104 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Dark Coder
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/PINN/ReadME.md:
--------------------------------------------------------------------------------
1 | # Physics-Informed Neural Network (PINN) vs. Traditional Neural Network (ANN)
2 |
3 | ## 📌 Introduction
4 |
5 | In this experiment, we compare **Physics-Informed Neural Networks (PINNs)** and **Artificial Neural Networks (ANNs)** for solving the **1D Heat Equation**:
6 |
7 | $$
8 | \frac{\partial u}{\partial t} = \alpha \frac{\partial^2 u}{\partial x^2}
9 | $$
10 |
11 | where $u(x, t)$ represents the heat distribution, and $\alpha$ is the diffusion coefficient.
12 |
13 | ### 🔹 What is PINN?
14 | PINNs use **both data and physics constraints** (e.g., differential equations) to improve learning.
15 |
16 | ### 🔹 What is ANN?
17 | A traditional ANN learns purely from **data without any physics knowledge**.
18 |
19 | ### 📌 Goal
20 | - Train both **PINN and ANN** on noisy data.
21 | - Compare their ability to **recover the underlying solution**.
22 |
23 | ---
24 | ## 📌 Implementation
25 |
26 | ### 🔹 **Step 1: Define PINN and ANN Models**
27 |
28 | ```python
29 | import torch
30 | import torch.nn as nn
31 | import torch.optim as optim
32 | import numpy as np
33 | import matplotlib.pyplot as plt
34 |
35 | # Define the PINN network
36 | class PINN(nn.Module):
37 | def __init__(self):
38 | super(PINN, self).__init__()
39 | self.net = nn.Sequential(
40 | nn.Linear(2, 32), nn.Tanh(),
41 | nn.Linear(32, 32), nn.Tanh(),
42 | nn.Linear(32, 32), nn.Tanh(),
43 | nn.Linear(32, 1)
44 | )
45 |
46 | def forward(self, x, t):
47 | input_tensor = torch.cat((x, t), dim=1)
48 | return self.net(input_tensor)
49 |
50 | # Define the traditional ANN network
51 | class ANN(nn.Module):
52 | def __init__(self):
53 | super(ANN, self).__init__()
54 | self.net = nn.Sequential(
55 | nn.Linear(2, 32), nn.ReLU(),
56 | nn.Linear(32, 32), nn.ReLU(),
57 | nn.Linear(32, 32), nn.ReLU(),
58 | nn.Linear(32, 1)
59 | )
60 |
61 | def forward(self, x, t):
62 | input_tensor = torch.cat((x, t), dim=1)
63 | return self.net(input_tensor)
64 | ```
65 |
66 | ---
67 | ### 🔹 **Step 2: Define the Physics Loss for PINN**
68 | ```python
69 | def physics_loss(model, x, t, alpha=0.01):
70 | x.requires_grad = True
71 | t.requires_grad = True
72 |
73 | u = model(x, t)
74 | u_t = torch.autograd.grad(u, t, grad_outputs=torch.ones_like(u), create_graph=True)[0]
75 | u_x = torch.autograd.grad(u, x, grad_outputs=torch.ones_like(u), create_graph=True)[0]
76 | u_xx = torch.autograd.grad(u_x, x, grad_outputs=torch.ones_like(u_x), create_graph=True)[0]
77 |
78 | residual = u_t - alpha * u_xx # Heat equation residual
79 | return torch.mean(residual**2)
80 | ```
81 |
82 | ---
83 | ### 🔹 **Step 3: Generate Noisy Training Data**
84 | ```python
85 | N = 1000 # Number of training points
86 | x_train = torch.rand((N, 1)) * 2 - 1 # x in [-1, 1]
87 | t_train = torch.rand((N, 1)) * 2 - 1 # t in [-1, 1]
88 |
89 | noise_level = 0.1
90 | u_exact = torch.sin(torch.pi * x_train) # True function
91 | u_noisy = u_exact + noise_level * torch.randn_like(u_exact) # Noisy data
92 | ```
93 |
94 | ---
95 | ### 🔹 **Step 4: Train Both Models**
96 | ```python
97 | pinn_model = PINN()
98 | ann_model = ANN()
99 | optimizer_pinn = optim.Adam(pinn_model.parameters(), lr=0.01)
100 | optimizer_ann = optim.Adam(ann_model.parameters(), lr=0.01)
101 |
102 | epochs = 5000
103 | for epoch in range(epochs):
104 | optimizer_pinn.zero_grad()
105 | u_pred = pinn_model(x_train, torch.zeros_like(t_train))
106 | loss_data = torch.mean((u_pred - u_noisy) ** 2)
107 | loss_physics = physics_loss(pinn_model, x_train, t_train)
108 | loss = loss_data + loss_physics
109 | loss.backward()
110 | optimizer_pinn.step()
111 |
112 | optimizer_ann.zero_grad()
113 | u_ann_pred = ann_model(x_train, torch.zeros_like(t_train))
114 | loss_ann = torch.mean((u_ann_pred - u_noisy) ** 2)
115 | loss_ann.backward()
116 | optimizer_ann.step()
117 |
118 | if epoch % 500 == 0:
119 | print(f"Epoch {epoch}, PINN Loss: {loss.item():.6f}, ANN Loss: {loss_ann.item():.6f}")
120 | ```
121 |
122 | ---
123 | ### 🔹 **Step 5: Visualizing Results**
124 | ```python
125 | x_test = torch.linspace(-1, 1, 100).view(-1, 1)
126 | t_test = torch.zeros_like(x_test)
127 |
128 | u_true = torch.sin(torch.pi * x_test).detach().numpy()
129 | u_noisy_sample = u_noisy[:100].detach().numpy()
130 | u_pinn_pred = pinn_model(x_test, t_test).detach().numpy()
131 | u_ann_pred = ann_model(x_test, t_test).detach().numpy()
132 |
133 | plt.figure(figsize=(10, 5))
134 | plt.plot(x_test, u_true, label="True Solution", linestyle="dashed", color="blue")
135 | plt.scatter(x_train[:100], u_noisy_sample, label="Noisy Data", color="gray", alpha=0.5)
136 | plt.plot(x_test, u_pinn_pred, label="PINN Prediction", color="red")
137 | plt.plot(x_test, u_ann_pred, label="ANN Prediction", color="green")
138 | plt.xlabel("x")
139 | plt.ylabel("u(x, 0)")
140 | plt.title("Comparison of PINN, ANN, and Noisy Data")
141 | plt.legend()
142 | plt.grid()
143 | plt.show()
144 | ```
145 |
146 | ---
147 | ## 📌 Results and Observations
148 | ✅ **PINN learns a smooth solution**, despite noisy data.
149 | ✅ **ANN overfits noise**, failing to recover the true function.
150 | ✅ **PINN generalizes better**, as it incorporates physical laws.
151 |
152 | | Model | Uses Physics? | Handles Noisy Data? | Generalization |
153 | |--------|--------------|----------------|--------------|
154 | | **PINN** | ✅ Yes | ✅ Robust | ✅ Excellent |
155 | | **ANN** | ❌ No | ❌ Overfits | ❌ Poor |
156 |
157 | ---
158 | ## 📌 Future Work
159 | ✅ Extend PINN to **2D Heat Equation**
160 | ✅ Apply PINNs to **Navier-Stokes (fluid dynamics)**
161 | ✅ Experiment with **real-world physics problems**
162 |
163 |
164 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ML-Algorithms-From-Scratch
2 |
3 | A comprehensive collection of machine learning algorithms implemented both from scratch and using popular libraries. Each implementation includes detailed explanations, mathematical concepts, and practical examples.
4 |
5 | ## 🎯 Project Goal
6 | This repository aims to provide clear, well-documented implementations of machine learning algorithms to help understand their inner workings. Each algorithm is implemented twice:
7 | 1. From scratch using NumPy (to understand the core concepts)
8 | 2. Using popular libraries like scikit-learn (for practical applications)
9 |
10 |
11 | ## 🗂️ Algorithms Included
12 | - Linear Regression
13 | - Methods:
14 | - Gradient Descent
15 | - Normal Equation
16 | - Simple Linear Regression
17 | - Multiple Linear Regression
18 | - Polynomial Regression
19 | - Gradient Descent
20 | - Batch Gradient Descent
21 | - Stochastic Gradient Descent
22 | - Neural Networks
23 | - Neural Network from Scratch
24 | - Decision Tree
25 | - PINN (Physics Inform Neural Network)
26 |
27 |
28 | - More algorithms coming soon:
29 | - Logistic Regression
30 | - Support Vector Machines
31 | - K-means Clustering
32 | - Naive Bayes
33 | - Dimensionality Reduction
34 |
35 | ## 📚 Features
36 | - Detailed Jupyter notebooks with step-by-step explanations
37 | - Mathematical concepts and formulas
38 | - Visualizations of algorithm behavior
39 | - Performance comparisons
40 | - Real-world examples and use cases
41 | - Comprehensive documentation
42 |
43 | ## 🛠️ Technologies Used
44 | - Python 3.8+
45 | - NumPy
46 | - Matplotlib
47 | - scikit-learn
48 | - Jupyter Notebook
49 |
50 | ## 🚀 Getting Started
51 | 1. Clone the repository
52 | 2. Install dependencies:
53 | ```bash
54 | pip install -r requirements.txt
55 | ```
56 | 3. Navigate to any algorithm folder
57 | 4. Open the Jupyter notebooks to see implementations
58 |
59 | ## 📖 Learning Path
60 | Each algorithm folder contains:
61 | - Theoretical explanation
62 | - Step-by-step implementation
63 | - Visualization of results
64 | - Practical examples
65 | - Performance evaluation
66 |
67 | ## 🤝 Contributing
68 | Contributions are welcome! Feel free to:
69 | - Add new algorithms
70 | - Improve existing implementations
71 | - Add more examples
72 | - Enhance documentation
73 |
74 | ## 📝 License
75 | This project is licensed under the MIT License - see the LICENSE file for details..
76 |
--------------------------------------------------------------------------------
/clustering/clustering_examples.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 4
6 | }
7 |
--------------------------------------------------------------------------------
/decision_trees/Images/1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/decision_trees/Images/1.png
--------------------------------------------------------------------------------
/decision_trees/Images/2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/decision_trees/Images/2.png
--------------------------------------------------------------------------------
/decision_trees/Images/3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/decision_trees/Images/3.png
--------------------------------------------------------------------------------
/decision_trees/Images/4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/decision_trees/Images/4.png
--------------------------------------------------------------------------------
/decision_trees/Images/5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/decision_trees/Images/5.png
--------------------------------------------------------------------------------
/decision_trees/Images/6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/decision_trees/Images/6.png
--------------------------------------------------------------------------------
/decision_trees/Images/7.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/decision_trees/Images/7.png
--------------------------------------------------------------------------------
/decision_trees/decision_trees_examples.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "from sklearn.tree import DecisionTreeClassifier\n",
10 | "from sklearn.preprocessing import LabelEncoder\n",
11 | "from sklearn.metrics import accuracy_score\n",
12 | "from sklearn.datasets import load_iris\n",
13 | "\n",
14 | "# Load the dataset\n"
15 | ]
16 | },
17 | {
18 | "cell_type": "code",
19 | "execution_count": 2,
20 | "metadata": {},
21 | "outputs": [],
22 | "source": [
23 | "X,y = load_iris(return_X_y=True)\n"
24 | ]
25 | },
26 | {
27 | "cell_type": "code",
28 | "execution_count": 5,
29 | "metadata": {},
30 | "outputs": [],
31 | "source": [
32 | "from sklearn.model_selection import train_test_split\n",
33 | "\n",
34 | "\n",
35 | "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)"
36 | ]
37 | },
38 | {
39 | "cell_type": "code",
40 | "execution_count": 7,
41 | "metadata": {},
42 | "outputs": [
43 | {
44 | "data": {
45 | "text/plain": [
46 | "1.0"
47 | ]
48 | },
49 | "execution_count": 7,
50 | "metadata": {},
51 | "output_type": "execute_result"
52 | }
53 | ],
54 | "source": [
55 | "tree = DecisionTreeClassifier()\n",
56 | "\n",
57 | "tree.fit(X_train, y_train)\n",
58 | "\n",
59 | "predictions = tree.predict(X_test)\n",
60 | "\n",
61 | "accuracy = accuracy_score(y_test, predictions)\n",
62 | "\n",
63 | "accuracy"
64 | ]
65 | },
66 | {
67 | "cell_type": "code",
68 | "execution_count": 8,
69 | "metadata": {},
70 | "outputs": [
71 | {
72 | "data": {
73 | "text/html": [
74 | "
\n",
75 | "\n",
88 | "
\n",
89 | " \n",
90 | " \n",
91 | " | \n",
92 | " age | \n",
93 | " sex | \n",
94 | " cp | \n",
95 | " trestbps | \n",
96 | " chol | \n",
97 | " fbs | \n",
98 | " restecg | \n",
99 | " thalach | \n",
100 | " exang | \n",
101 | " oldpeak | \n",
102 | " slope | \n",
103 | " ca | \n",
104 | " thal | \n",
105 | " target | \n",
106 | "
\n",
107 | " \n",
108 | " \n",
109 | " \n",
110 | " 0 | \n",
111 | " 52 | \n",
112 | " 1 | \n",
113 | " 0 | \n",
114 | " 125 | \n",
115 | " 212 | \n",
116 | " 0 | \n",
117 | " 1 | \n",
118 | " 168 | \n",
119 | " 0 | \n",
120 | " 1.0 | \n",
121 | " 2 | \n",
122 | " 2 | \n",
123 | " 3 | \n",
124 | " 0 | \n",
125 | "
\n",
126 | " \n",
127 | " 1 | \n",
128 | " 53 | \n",
129 | " 1 | \n",
130 | " 0 | \n",
131 | " 140 | \n",
132 | " 203 | \n",
133 | " 1 | \n",
134 | " 0 | \n",
135 | " 155 | \n",
136 | " 1 | \n",
137 | " 3.1 | \n",
138 | " 0 | \n",
139 | " 0 | \n",
140 | " 3 | \n",
141 | " 0 | \n",
142 | "
\n",
143 | " \n",
144 | " 2 | \n",
145 | " 70 | \n",
146 | " 1 | \n",
147 | " 0 | \n",
148 | " 145 | \n",
149 | " 174 | \n",
150 | " 0 | \n",
151 | " 1 | \n",
152 | " 125 | \n",
153 | " 1 | \n",
154 | " 2.6 | \n",
155 | " 0 | \n",
156 | " 0 | \n",
157 | " 3 | \n",
158 | " 0 | \n",
159 | "
\n",
160 | " \n",
161 | " 3 | \n",
162 | " 61 | \n",
163 | " 1 | \n",
164 | " 0 | \n",
165 | " 148 | \n",
166 | " 203 | \n",
167 | " 0 | \n",
168 | " 1 | \n",
169 | " 161 | \n",
170 | " 0 | \n",
171 | " 0.0 | \n",
172 | " 2 | \n",
173 | " 1 | \n",
174 | " 3 | \n",
175 | " 0 | \n",
176 | "
\n",
177 | " \n",
178 | " 4 | \n",
179 | " 62 | \n",
180 | " 0 | \n",
181 | " 0 | \n",
182 | " 138 | \n",
183 | " 294 | \n",
184 | " 1 | \n",
185 | " 1 | \n",
186 | " 106 | \n",
187 | " 0 | \n",
188 | " 1.9 | \n",
189 | " 1 | \n",
190 | " 3 | \n",
191 | " 2 | \n",
192 | " 0 | \n",
193 | "
\n",
194 | " \n",
195 | " ... | \n",
196 | " ... | \n",
197 | " ... | \n",
198 | " ... | \n",
199 | " ... | \n",
200 | " ... | \n",
201 | " ... | \n",
202 | " ... | \n",
203 | " ... | \n",
204 | " ... | \n",
205 | " ... | \n",
206 | " ... | \n",
207 | " ... | \n",
208 | " ... | \n",
209 | " ... | \n",
210 | "
\n",
211 | " \n",
212 | " 1020 | \n",
213 | " 59 | \n",
214 | " 1 | \n",
215 | " 1 | \n",
216 | " 140 | \n",
217 | " 221 | \n",
218 | " 0 | \n",
219 | " 1 | \n",
220 | " 164 | \n",
221 | " 1 | \n",
222 | " 0.0 | \n",
223 | " 2 | \n",
224 | " 0 | \n",
225 | " 2 | \n",
226 | " 1 | \n",
227 | "
\n",
228 | " \n",
229 | " 1021 | \n",
230 | " 60 | \n",
231 | " 1 | \n",
232 | " 0 | \n",
233 | " 125 | \n",
234 | " 258 | \n",
235 | " 0 | \n",
236 | " 0 | \n",
237 | " 141 | \n",
238 | " 1 | \n",
239 | " 2.8 | \n",
240 | " 1 | \n",
241 | " 1 | \n",
242 | " 3 | \n",
243 | " 0 | \n",
244 | "
\n",
245 | " \n",
246 | " 1022 | \n",
247 | " 47 | \n",
248 | " 1 | \n",
249 | " 0 | \n",
250 | " 110 | \n",
251 | " 275 | \n",
252 | " 0 | \n",
253 | " 0 | \n",
254 | " 118 | \n",
255 | " 1 | \n",
256 | " 1.0 | \n",
257 | " 1 | \n",
258 | " 1 | \n",
259 | " 2 | \n",
260 | " 0 | \n",
261 | "
\n",
262 | " \n",
263 | " 1023 | \n",
264 | " 50 | \n",
265 | " 0 | \n",
266 | " 0 | \n",
267 | " 110 | \n",
268 | " 254 | \n",
269 | " 0 | \n",
270 | " 0 | \n",
271 | " 159 | \n",
272 | " 0 | \n",
273 | " 0.0 | \n",
274 | " 2 | \n",
275 | " 0 | \n",
276 | " 2 | \n",
277 | " 1 | \n",
278 | "
\n",
279 | " \n",
280 | " 1024 | \n",
281 | " 54 | \n",
282 | " 1 | \n",
283 | " 0 | \n",
284 | " 120 | \n",
285 | " 188 | \n",
286 | " 0 | \n",
287 | " 1 | \n",
288 | " 113 | \n",
289 | " 0 | \n",
290 | " 1.4 | \n",
291 | " 1 | \n",
292 | " 1 | \n",
293 | " 3 | \n",
294 | " 0 | \n",
295 | "
\n",
296 | " \n",
297 | "
\n",
298 | "
1025 rows × 14 columns
\n",
299 | "
"
300 | ],
301 | "text/plain": [
302 | " age sex cp trestbps chol fbs restecg thalach exang oldpeak \\\n",
303 | "0 52 1 0 125 212 0 1 168 0 1.0 \n",
304 | "1 53 1 0 140 203 1 0 155 1 3.1 \n",
305 | "2 70 1 0 145 174 0 1 125 1 2.6 \n",
306 | "3 61 1 0 148 203 0 1 161 0 0.0 \n",
307 | "4 62 0 0 138 294 1 1 106 0 1.9 \n",
308 | "... ... ... .. ... ... ... ... ... ... ... \n",
309 | "1020 59 1 1 140 221 0 1 164 1 0.0 \n",
310 | "1021 60 1 0 125 258 0 0 141 1 2.8 \n",
311 | "1022 47 1 0 110 275 0 0 118 1 1.0 \n",
312 | "1023 50 0 0 110 254 0 0 159 0 0.0 \n",
313 | "1024 54 1 0 120 188 0 1 113 0 1.4 \n",
314 | "\n",
315 | " slope ca thal target \n",
316 | "0 2 2 3 0 \n",
317 | "1 0 0 3 0 \n",
318 | "2 0 0 3 0 \n",
319 | "3 2 1 3 0 \n",
320 | "4 1 3 2 0 \n",
321 | "... ... .. ... ... \n",
322 | "1020 2 0 2 1 \n",
323 | "1021 1 1 3 0 \n",
324 | "1022 1 1 2 0 \n",
325 | "1023 2 0 2 1 \n",
326 | "1024 1 1 3 0 \n",
327 | "\n",
328 | "[1025 rows x 14 columns]"
329 | ]
330 | },
331 | "execution_count": 8,
332 | "metadata": {},
333 | "output_type": "execute_result"
334 | }
335 | ],
336 | "source": [
337 | "import pandas as pd\n",
338 | "import numpy as np\n",
339 | "\n",
340 | "df = pd.read_csv('heart.csv')\n",
341 | "df"
342 | ]
343 | },
344 | {
345 | "cell_type": "code",
346 | "execution_count": 9,
347 | "metadata": {},
348 | "outputs": [],
349 | "source": [
350 | "X = df.iloc[:, 0:-1]\n",
351 | "y = df.iloc[:, -1]"
352 | ]
353 | },
354 | {
355 | "cell_type": "code",
356 | "execution_count": 10,
357 | "metadata": {},
358 | "outputs": [],
359 | "source": [
360 | "from sklearn.model_selection import train_test_split\n",
361 | "\n",
362 | "\n",
363 | "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)"
364 | ]
365 | },
366 | {
367 | "cell_type": "code",
368 | "execution_count": 11,
369 | "metadata": {},
370 | "outputs": [],
371 | "source": [
372 | "from sklearn.preprocessing import LabelEncoder\n",
373 | "\n",
374 | "le = LabelEncoder()\n",
375 | "for column in X_train.columns:\n",
376 | " if X_train[column].dtype == 'object': # Encode only non-numerical columns\n",
377 | " X_train[column] = le.fit_transform(X_train[column])\n",
378 | " X_test[column] = le.transform(X_test[column])\n",
379 | "\n",
380 | "# Convert to NumPy arrays for compatibility\n",
381 | "X_train = np.array(X_train)\n",
382 | "X_test = np.array(X_test)"
383 | ]
384 | },
385 | {
386 | "cell_type": "code",
387 | "execution_count": 14,
388 | "metadata": {},
389 | "outputs": [
390 | {
391 | "data": {
392 | "text/plain": [
393 | "0.9853658536585366"
394 | ]
395 | },
396 | "execution_count": 14,
397 | "metadata": {},
398 | "output_type": "execute_result"
399 | }
400 | ],
401 | "source": []
402 | },
403 | {
404 | "cell_type": "code",
405 | "execution_count": null,
406 | "metadata": {},
407 | "outputs": [],
408 | "source": []
409 | }
410 | ],
411 | "metadata": {
412 | "kernelspec": {
413 | "display_name": "Python 3",
414 | "language": "python",
415 | "name": "python3"
416 | },
417 | "language_info": {
418 | "codemirror_mode": {
419 | "name": "ipython",
420 | "version": 3
421 | },
422 | "file_extension": ".py",
423 | "mimetype": "text/x-python",
424 | "name": "python",
425 | "nbconvert_exporter": "python",
426 | "pygments_lexer": "ipython3",
427 | "version": "3.12.4"
428 | }
429 | },
430 | "nbformat": 4,
431 | "nbformat_minor": 4
432 | }
433 |
--------------------------------------------------------------------------------
/decision_trees/decision_trees_implementation (1).pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/decision_trees/decision_trees_implementation (1).pdf
--------------------------------------------------------------------------------
/decision_trees/decision_trees_implementation.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Decision Tree from Scratch:
A Comprehensive Guide \n",
8 | "\n",
9 | "## 🌐 **Follow Me on Social Media:** \n",
10 | "\n",
11 | "- [](https://linkedin.com/in/codewithdark) \n",
12 | "- [](https://github.com/codewithdark-git) \n",
13 | "- [](https://kaggle.com/codewithdark)\n",
14 | "- [](https://github.com/codewithdark-git/ML-Algorithms-From-Scratch/blob/88da3d72945397d913a1cafbc8d4280bd80538c7/decision_trees/decision_trees_implementation.ipynb)\n",
15 | "- [](https://gist.github.com/codewithdark-git)\n",
16 | "\n",
17 | "\n",
18 | "Decision trees are one of the most popular and interpretable algorithms in machine learning, commonly used for both classification and regression tasks. They work by recursively splitting the dataset based on feature thresholds, creating a tree-like structure where each internal node represents a decision based on a feature, and each leaf node corresponds to an output label or value. \n",
19 | "\n",
20 | "The main advantages of decision trees are their simplicity, ease of visualization, and ability to handle both numerical and categorical data. However, when implemented from scratch, careful handling of split criteria, stopping conditions, and data preprocessing is required to ensure the model performs optimally.\n",
21 | "\n",
22 | "This document provides an in-depth exploration of implementing a decision tree from scratch in Python. It covers key concepts such as splitting data, calculating information gain using metrics like Gini Impurity and Entropy, and building the tree structure recursively. Additionally, the implementation accounts for various parameters like maximum tree depth and minimum samples required for a split to prevent overfitting.\n",
23 | "\n",
24 | "By understanding and building a decision tree from the ground up, we gain valuable insights into the mechanics of tree-based algorithms and lay a strong foundation for extending these concepts to advanced methods like Random Forests and Gradient Boosted Trees.\n",
25 | "\n"
26 | ]
27 | },
28 | {
29 | "cell_type": "markdown",
30 | "metadata": {},
31 | "source": [
32 | ""
33 | ]
34 | },
35 | {
36 | "cell_type": "code",
37 | "execution_count": 1,
38 | "metadata": {},
39 | "outputs": [],
40 | "source": [
41 | "import numpy as np\n",
42 | "import pandas as pd"
43 | ]
44 | },
45 | {
46 | "cell_type": "markdown",
47 | "metadata": {},
48 | "source": [
49 | "### **1. Gini Impurity Formula**\n",
50 | "\n",
51 | "$$\n",
52 | "Gini(y) = 1 - \\sum_{i=1}^{k} p_i^2\n",
53 | "$$\n",
54 | "\n",
55 | "- **Explanation**:\n",
56 | " - $( p_i )$ is the proportion of class $( i )$ in the dataset.\n",
57 | " - $( k )$ is the total number of classes.\n",
58 | "\n",
59 | " - `np.unique(y, return_counts=True)` calculates the unique classes and their counts.\n",
60 | " - `probabilities = counts / len(y)` computes $( p_i )$ for each class.\n",
61 | " - `1 - np.sum(probabilities**2)` computes $( Gini(y) )$.\n"
62 | ]
63 | },
64 | {
65 | "cell_type": "code",
66 | "execution_count": 2,
67 | "metadata": {},
68 | "outputs": [],
69 | "source": [
70 | "def gini_impurity(y):\n",
71 | " classes, counts = np.unique(y, return_counts=True)\n",
72 | " probabilities = counts / len(y)\n",
73 | " return 1 - np.sum(probabilities**2)\n"
74 | ]
75 | },
76 | {
77 | "cell_type": "markdown",
78 | "metadata": {},
79 | "source": [
80 | "\n",
81 | "### **2. Entropy Formula**\n",
82 | "\n",
83 | "$$\n",
84 | "Entropy(y) = - \\sum_{i=1}^{k} p_i \\log_2(p_i)\n",
85 | "$$\n",
86 | "\n",
87 | "- **Explanation**:\n",
88 | " - $( p_i )$ is the proportion of class $( i )$ in the dataset.\n",
89 | " - Entropy measures the \"impurity\" or \"uncertainty\" in the dataset.\n",
90 | "\n",
91 | " - $( \\log_2(p_i) )$ is calculated using `np.log2(probabilities + 1e-9)`.\n",
92 | " - Adding $( 1e-9 )$ prevents numerical errors when $( p_i = 0 )$."
93 | ]
94 | },
95 | {
96 | "cell_type": "markdown",
97 | "metadata": {},
98 | "source": [
99 | ""
100 | ]
101 | },
102 | {
103 | "cell_type": "code",
104 | "execution_count": 3,
105 | "metadata": {},
106 | "outputs": [],
107 | "source": [
108 | "def entropy(y):\n",
109 | " classes, counts = np.unique(y, return_counts=True)\n",
110 | " probabilities = counts / len(y)\n",
111 | " return -np.sum(probabilities * np.log2(probabilities + 1e-9)) # Add small value to avoid log(0)\n"
112 | ]
113 | },
114 | {
115 | "cell_type": "code",
116 | "execution_count": 4,
117 | "metadata": {},
118 | "outputs": [],
119 | "source": [
120 | "def split_data(X, y, feature_index, threshold):\n",
121 | " left_indices = X[:, feature_index] <= threshold\n",
122 | " right_indices = X[:, feature_index] > threshold\n",
123 | " return X[left_indices], X[right_indices], y[left_indices], y[right_indices]\n"
124 | ]
125 | },
126 | {
127 | "cell_type": "markdown",
128 | "metadata": {},
129 | "source": [
130 | "### **3. Information Gain Formula**\n",
131 | "\n",
132 | "\n",
133 | "$$\n",
134 | "Gain = Impurity_{parent} - \\left( \\frac{n_{left}}{n} \\cdot Impurity_{left} + \\frac{n_{right}}{n} \\cdot Impurity_{right} \\right)\n",
135 | "$$\n",
136 | "\n",
137 | "- **Explanation**:\n",
138 | " - $( Impurity_{parent})$: Gini or Entropy of the parent node.\n",
139 | " - $( Impurity_{left})$: Gini or Entropy of the left child node.\n",
140 | " - $( Impurity_{right})$: Gini or Entropy of the right child node.\n",
141 | " - $( n_{left}, n_{right})$: Number of samples in the left and right child nodes.\n",
142 | " - $( n )$: Total number of samples in the parent node.\n",
143 | "\n",
144 | " - The parent impurity is calculated as `impurity_function(y)`.\n",
145 | " - Weighted child impurity is calculated using the proportions $( \\frac{n_{left}}{n} )$ and $( \\frac{n_{right}}{n} )$.\n",
146 | "\n"
147 | ]
148 | },
149 | {
150 | "cell_type": "markdown",
151 | "metadata": {},
152 | "source": [
153 | "\n",
154 | ""
155 | ]
156 | },
157 | {
158 | "cell_type": "code",
159 | "execution_count": 5,
160 | "metadata": {},
161 | "outputs": [],
162 | "source": [
163 | "def information_gain(y, y_left, y_right, impurity_function=gini_impurity):\n",
164 | " parent_impurity = impurity_function(y)\n",
165 | " n = len(y)\n",
166 | " n_left, n_right = len(y_left), len(y_right)\n",
167 | " \n",
168 | " # Weighted impurity of children\n",
169 | " child_impurity = (n_left / n) * impurity_function(y_left) + (n_right / n) * impurity_function(y_right)\n",
170 | " \n",
171 | " return parent_impurity - child_impurity\n"
172 | ]
173 | },
174 | {
175 | "cell_type": "markdown",
176 | "metadata": {},
177 | "source": [
178 | "The `Node` class represents a single node in the decision tree. Each node can either be:\n",
179 | "\n",
180 | "1. **An internal node**: Contains information about a feature index and threshold used for splitting the data, along with pointers to its left and right child nodes.\n",
181 | "2. **A leaf node**: Contains a classification value when further splits are no longer possible or desirable.\n",
182 | "\n",
183 | "Here’s an explanation of each parameter in the `Node` class:\n",
184 | "\n",
185 | "---\n",
186 | "\n",
187 | "### **1. `feature_index`**\n",
188 | "- **Purpose**:\n",
189 | " - Stores the index of the feature used for splitting at this node.\n",
190 | " - Example: If `feature_index = 2`, it means this node splits based on the third feature in the dataset.\n",
191 | "\n",
192 | "- **Type**: Integer or `None`.\n",
193 | "- **Usage**:\n",
194 | " - Used only in internal nodes. It is `None` for leaf nodes.\n",
195 | "\n",
196 | "---\n",
197 | "\n",
198 | "### **2. `threshold`**\n",
199 | "- **Purpose**:\n",
200 | " - Stores the threshold value for the feature used to split the data at this node.\n",
201 | " - Example: If `threshold = 5.5`, this node splits data into:\n",
202 | " - Left child: Samples where the feature value is ≤ 5.5.\n",
203 | " - Right child: Samples where the feature value is > 5.5.\n",
204 | "\n",
205 | "- **Type**: Float or `None`.\n",
206 | "- **Usage**:\n",
207 | " - Used only in internal nodes. It is `None` for leaf nodes.\n",
208 | "\n",
209 | "---\n",
210 | "\n",
211 | "### **3. `left`**\n",
212 | "- **Purpose**:\n",
213 | " - A reference to the left child node.\n",
214 | " - Represents the subset of data that satisfies the condition `feature_value <= threshold`.\n",
215 | "\n",
216 | "- **Type**: Instance of `Node` or `None`.\n",
217 | "- **Usage**:\n",
218 | " - Points to the left child in the decision tree structure.\n",
219 | "\n",
220 | "---\n",
221 | "\n",
222 | "### **4. `right`**\n",
223 | "- **Purpose**:\n",
224 | " - A reference to the right child node.\n",
225 | " - Represents the subset of data that satisfies the condition `feature_value > threshold`.\n",
226 | "\n",
227 | "- **Type**: Instance of `Node` or `None`.\n",
228 | "- **Usage**:\n",
229 | " - Points to the right child in the decision tree structure.\n",
230 | "\n",
231 | "---\n",
232 | "\n",
233 | "### **5. `value`**\n",
234 | "- **Purpose**:\n",
235 | " - Stores the value of the prediction (or class label) at a **leaf node**.\n",
236 | " - For classification tasks:\n",
237 | " - It’s the most common label in the data at this node.\n",
238 | " - For regression tasks:\n",
239 | " - It’s the mean or another aggregation metric of the target values at this node.\n",
240 | "\n",
241 | "- **Type**: Depends on the task:\n",
242 | " - For classification: Integer (class label).\n",
243 | " - For regression: Float (predicted value).\n",
244 | " - It is `None` for internal nodes.\n",
245 | "\n",
246 | "---\n",
247 | "\n",
248 | "### **Node Behavior**\n",
249 | "- **Internal Nodes**:\n",
250 | " - Contain `feature_index`, `threshold`, `left`, and `right`.\n",
251 | " - Example:\n",
252 | " ```python\n",
253 | " Node(feature_index=1, threshold=2.5, left=left_node, right=right_node)\n",
254 | " ```\n",
255 | "\n",
256 | "- **Leaf Nodes**:\n",
257 | " - Contain `value` and no references to children.\n",
258 | " - Example:\n",
259 | " ```python\n",
260 | " Node(value=0)\n",
261 | " ```\n",
262 | "\n",
263 | "---\n",
264 | "\n",
265 | "### **Example Usage**\n",
266 | "\n",
267 | "#### **Internal Node Example**:\n",
268 | "An internal node that splits based on the feature at index 2 with a threshold of 3.5:\n",
269 | "```python\n",
270 | "node = Node(feature_index=2, threshold=3.5, left=left_child, right=right_child)\n",
271 | "```\n",
272 | "\n",
273 | "#### **Leaf Node Example**:\n",
274 | "A leaf node that predicts class `1`:\n",
275 | "```python\n",
276 | "leaf = Node(value=1)\n",
277 | "```\n",
278 | "\n",
279 | "---\n",
280 | "\n",
281 | "### **Integration with `DecisionTree`**\n",
282 | "\n",
283 | "- When building the tree (`_build_tree` method):\n",
284 | " - Internal nodes are created with `feature_index`, `threshold`, and pointers to child nodes.\n",
285 | " - Leaf nodes are created with `value` when the stopping criteria are met."
286 | ]
287 | },
288 | {
289 | "cell_type": "markdown",
290 | "metadata": {},
291 | "source": [
292 | ""
293 | ]
294 | },
295 | {
296 | "cell_type": "code",
297 | "execution_count": 6,
298 | "metadata": {},
299 | "outputs": [],
300 | "source": [
301 | "class Node:\n",
302 | " def __init__(self, feature_index=None, threshold=None, left=None, right=None, value=None):\n",
303 | " self.feature_index = feature_index # Index of feature to split\n",
304 | " self.threshold = threshold # Threshold for splitting\n",
305 | " self.left = left # Left child\n",
306 | " self.right = right # Right child\n",
307 | " self.value = value # Leaf node value (for classification)\n"
308 | ]
309 | },
310 | {
311 | "cell_type": "markdown",
312 | "metadata": {},
313 | "source": [
314 | "### **1. `__init__(self, max_depth=5, min_samples_split=2)`**\n",
315 | "\n",
316 | "- **Purpose**:\n",
317 | " Initializes the decision tree with two hyperparameters:\n",
318 | " - `max_depth`: The maximum depth the tree is allowed to grow.\n",
319 | " - `min_samples_split`: The minimum number of samples required to split a node.\n",
320 | "\n",
321 | "- **Attributes**:\n",
322 | " - `self.root`: The root node of the decision tree, initialized as `None`.\n",
323 | "\n",
324 | "---\n",
325 | "\n",
326 | "### **2. `_build_tree(self, X, y, depth)`**\n",
327 | "\n",
328 | "- **Purpose**:\n",
329 | " Recursively builds the decision tree.\n",
330 | "\n",
331 | "- **Steps**:\n",
332 | " 1. **Check Stopping Criteria**:\n",
333 | " - Stops growing the tree if:\n",
334 | " - Maximum depth is reached.\n",
335 | " - Number of samples is less than `min_samples_split`.\n",
336 | " - All samples belong to the same class.\n",
337 | "\n",
338 | " 2. **Find the Best Split**:\n",
339 | " - Loops over all features and possible thresholds.\n",
340 | " - Uses `information_gain` to evaluate each split.\n",
341 | " - Tracks the best feature and threshold.\n",
342 | "\n",
343 | " 3. **No Valid Split**:\n",
344 | " - If no split improves the gain, create a **leaf node** with the most common label in `y`.\n",
345 | "\n",
346 | " 4. **Split Data and Recur**:\n",
347 | " - Splits data into `X_left` and `X_right`.\n",
348 | " - Recursively calls `_build_tree` to build the left and right children.\n",
349 | "\n",
350 | "- **Returns**:\n",
351 | " A `Node` object (either a leaf node or an internal node).\n",
352 | "\n",
353 | "---\n",
354 | "\n",
355 | "### **3. `_most_common_label(self, y)`**\n",
356 | "\n",
357 | "- **Purpose**:\n",
358 | " Finds the most common class in the given labels `y`.\n",
359 | "\n",
360 | "- **Steps**:\n",
361 | " - Uses `np.unique` to count occurrences of each class.\n",
362 | " - Returns the class with the highest count using `np.argmax`.\n",
363 | "\n",
364 | "- **Returns**:\n",
365 | " The majority class in `y`.\n",
366 | "\n",
367 | "---\n",
368 | "\n",
369 | "### **4. `fit(self, X, y)`**\n",
370 | "\n",
371 | "- **Purpose**:\n",
372 | " Fits (or trains) the decision tree on the training data.\n",
373 | "\n",
374 | "- **Steps**:\n",
375 | " - Calls `_build_tree` with the training data `X`, labels `y`, and an initial depth of `0`.\n",
376 | " - Stores the resulting tree in `self.root`.\n",
377 | "\n",
378 | "---\n",
379 | "\n",
380 | "### **5. `_predict(self, x, node)`**\n",
381 | "\n",
382 | "- **Purpose**:\n",
383 | " Predicts the class for a single sample `x` by traversing the tree.\n",
384 | "\n",
385 | "- **Steps**:\n",
386 | " - If the current `node` is a **leaf node**, return its value.\n",
387 | " - If `x[node.feature_index] <= node.threshold`, recurse into the left child.\n",
388 | " - Otherwise, recurse into the right child.\n",
389 | "\n",
390 | "- **Returns**:\n",
391 | " The predicted class for the input sample `x`.\n",
392 | "\n",
393 | "---\n",
394 | "\n",
395 | "### **6. `predict(self, X)`**\n",
396 | "\n",
397 | "- **Purpose**:\n",
398 | " Predicts the class for all samples in the dataset `X`.\n",
399 | "\n",
400 | "- **Steps**:\n",
401 | " - Loops through each sample in `X` and calls `_predict` for it.\n",
402 | " - Collects predictions into a NumPy array.\n",
403 | "\n",
404 | "- **Returns**:\n",
405 | " A NumPy array of predictions for all samples.\n",
406 | "\n",
407 | "---\n",
408 | "\n",
409 | "### **7. `_count_nodes(self, node, counts)`**\n",
410 | "\n",
411 | "- **Purpose**:\n",
412 | " Recursively counts the number of nodes in the tree, including:\n",
413 | " - Root node\n",
414 | " - Internal nodes\n",
415 | " - Leaf nodes\n",
416 | "\n",
417 | "- **Steps**:\n",
418 | " - If the node is a **leaf node** (its value is not `None`), increment the `leaves` count.\n",
419 | " - Otherwise, increment the `internal_nodes` count.\n",
420 | " - Recurse into the left and right children.\n",
421 | "\n",
422 | "---\n",
423 | "\n",
424 | "### **8. `count_nodes(self)`**\n",
425 | "\n",
426 | "- **Purpose**:\n",
427 | " Provides a summary of the number of different types of nodes in the tree.\n",
428 | "\n",
429 | "- **Steps**:\n",
430 | " - Initializes a dictionary `counts` with:\n",
431 | " - `root`: 1 (always one root).\n",
432 | " - `internal_nodes`: 0.\n",
433 | " - `leaves`: 0.\n",
434 | " - Calls `_count_nodes` starting from the root node.\n",
435 | "\n",
436 | "- **Returns**:\n",
437 | " A dictionary containing the counts of root, internal, and leaf nodes.\n",
438 | "\n",
439 | "---\n",
440 | "\n",
441 | "### **9. `_print_tree(self, node, depth=0)`**\n",
442 | "\n",
443 | "- **Purpose**:\n",
444 | " Recursively prints the structure of the decision tree.\n",
445 | "\n",
446 | "- **Steps**:\n",
447 | " - For **leaf nodes**, print \"Leaf Node: Class = ...\" with indentation proportional to depth.\n",
448 | " - For **internal nodes**, print \"Internal Node: Feature[...] <= ...\" with the feature and threshold.\n",
449 | " - Recurse into the left and right children, increasing the depth.\n",
450 | "\n",
451 | "---\n",
452 | "\n",
453 | "### **10. `print_tree(self)`**\n",
454 | "\n",
455 | "- **Purpose**:\n",
456 | " Prints the entire tree structure starting from the root.\n",
457 | "\n",
458 | "- **Steps**:\n",
459 | " - Calls `_print_tree` with the root node and an initial depth of `0`.\n",
460 | "\n",
461 | "---\n",
462 | "\n",
463 | "### **Class Summary**\n",
464 | "\n",
465 | "This `DecisionTree` class:\n",
466 | "1. **Builds a Tree**:\n",
467 | " - Using recursive splitting based on the best information gain.\n",
468 | "2. **Predicts Classes**:\n",
469 | " - Traverses the tree to make predictions for given inputs.\n",
470 | "3. **Analyzes the Tree**:\n",
471 | " - Counts the types of nodes.\n",
472 | " - Prints the tree structure."
473 | ]
474 | },
475 | {
476 | "cell_type": "markdown",
477 | "metadata": {},
478 | "source": [
479 | ""
480 | ]
481 | },
482 | {
483 | "cell_type": "code",
484 | "execution_count": 31,
485 | "metadata": {},
486 | "outputs": [],
487 | "source": [
488 | "class DecisionTree:\n",
489 | " def __init__(self, max_depth=5, min_samples_split=2):\n",
490 | " self.max_depth = max_depth\n",
491 | " self.min_samples_split = min_samples_split\n",
492 | " self.root = None\n",
493 | "\n",
494 | " def _build_tree(self, X, y, depth):\n",
495 | " n_samples, n_features = X.shape\n",
496 | " unique_classes = np.unique(y)\n",
497 | "\n",
498 | " # Stop criteria\n",
499 | " if depth >= self.max_depth or n_samples < self.min_samples_split or len(unique_classes) == 1:\n",
500 | " leaf_value = self._most_common_label(y)\n",
501 | " return Node(value=leaf_value)\n",
502 | "\n",
503 | " # Find the best split\n",
504 | " best_gain = -1\n",
505 | " best_feature, best_threshold = None, None\n",
506 | "\n",
507 | " for feature_index in range(n_features):\n",
508 | " thresholds = np.unique(X[:, feature_index])\n",
509 | " for threshold in thresholds:\n",
510 | " X_left, X_right, y_left, y_right = split_data(X, y, feature_index, threshold)\n",
511 | " if len(y_left) > 0 and len(y_right) > 0:\n",
512 | " gain = information_gain(y, y_left, y_right)\n",
513 | " if gain > best_gain:\n",
514 | " best_gain, best_feature, best_threshold = gain, feature_index, threshold\n",
515 | "\n",
516 | " # If no split improves the gain, create a leaf node\n",
517 | " if best_gain == -1:\n",
518 | " leaf_value = self._most_common_label(y)\n",
519 | " return Node(value=leaf_value)\n",
520 | "\n",
521 | " # Split the data\n",
522 | " X_left, X_right, y_left, y_right = split_data(X, y, best_feature, best_threshold)\n",
523 | "\n",
524 | " # Recursively build children\n",
525 | " left_child = self._build_tree(X_left, y_left, depth + 1)\n",
526 | " right_child = self._build_tree(X_right, y_right, depth + 1)\n",
527 | "\n",
528 | " return Node(feature_index=best_feature, threshold=best_threshold, left=left_child, right=right_child)\n",
529 | "\n",
530 | " def _most_common_label(self, y):\n",
531 | " classes, counts = np.unique(y, return_counts=True)\n",
532 | " return classes[np.argmax(counts)]\n",
533 | "\n",
534 | " def fit(self, X, y):\n",
535 | " # Convert X and y to NumPy arrays if they are DataFrames or Series\n",
536 | " X = np.array(X)\n",
537 | " y = np.array(y)\n",
538 | " self.root = self._build_tree(X, y, 0)\n",
539 | "\n",
540 | "\n",
541 | " def _predict(self, x, node):\n",
542 | " if node.value is not None:\n",
543 | " return node.value\n",
544 | "\n",
545 | " if x[node.feature_index] <= node.threshold:\n",
546 | " return self._predict(x, node.left)\n",
547 | " else:\n",
548 | " return self._predict(x, node.right)\n",
549 | "\n",
550 | " def predict(self, X):\n",
551 | " return np.array([self._predict(x, self.root) for x in X])\n",
552 | "\n",
553 | " def _count_nodes(self, node, counts):\n",
554 | " if node is None:\n",
555 | " return\n",
556 | "\n",
557 | " if node.value is not None: # Leaf node\n",
558 | " counts[\"leaves\"] += 1\n",
559 | " else: # Internal or root node\n",
560 | " counts[\"internal_nodes\"] += 1\n",
561 | "\n",
562 | " # Recursively count for left and right children\n",
563 | " self._count_nodes(node.left, counts)\n",
564 | " self._count_nodes(node.right, counts)\n",
565 | "\n",
566 | " def count_nodes(self):\n",
567 | " counts = {\"root\": 1, \"internal_nodes\": 0, \"leaves\": 0}\n",
568 | " self._count_nodes(self.root, counts)\n",
569 | " return counts\n",
570 | "\n",
571 | " def _print_tree(self, node, depth=0):\n",
572 | " if node is None:\n",
573 | " return\n",
574 | "\n",
575 | " if node.value is not None: # Leaf node\n",
576 | " print(f\"{'| ' * depth}Leaf Node: Class = {node.value}\")\n",
577 | " else:\n",
578 | " print(f\"{'| ' * depth}Internal Node: Feature[{node.feature_index}] <= {node.threshold}\")\n",
579 | " \n",
580 | " # Traverse left and right children\n",
581 | " self._print_tree(node.left, depth + 1)\n",
582 | " self._print_tree(node.right, depth + 1)\n",
583 | "\n",
584 | " def print_tree(self):\n",
585 | " print(\"Decision Tree Structure:\")\n",
586 | " self._print_tree(self.root)\n",
587 | "\n"
588 | ]
589 | },
590 | {
591 | "cell_type": "code",
592 | "execution_count": 24,
593 | "metadata": {},
594 | "outputs": [
595 | {
596 | "data": {
597 | "text/html": [
598 | "\n",
599 | "\n",
612 | "
\n",
613 | " \n",
614 | " \n",
615 | " | \n",
616 | " age | \n",
617 | " sex | \n",
618 | " cp | \n",
619 | " trestbps | \n",
620 | " chol | \n",
621 | " fbs | \n",
622 | " restecg | \n",
623 | " thalach | \n",
624 | " exang | \n",
625 | " oldpeak | \n",
626 | " slope | \n",
627 | " ca | \n",
628 | " thal | \n",
629 | " target | \n",
630 | "
\n",
631 | " \n",
632 | " \n",
633 | " \n",
634 | " 0 | \n",
635 | " 52 | \n",
636 | " 1 | \n",
637 | " 0 | \n",
638 | " 125 | \n",
639 | " 212 | \n",
640 | " 0 | \n",
641 | " 1 | \n",
642 | " 168 | \n",
643 | " 0 | \n",
644 | " 1.0 | \n",
645 | " 2 | \n",
646 | " 2 | \n",
647 | " 3 | \n",
648 | " 0 | \n",
649 | "
\n",
650 | " \n",
651 | " 1 | \n",
652 | " 53 | \n",
653 | " 1 | \n",
654 | " 0 | \n",
655 | " 140 | \n",
656 | " 203 | \n",
657 | " 1 | \n",
658 | " 0 | \n",
659 | " 155 | \n",
660 | " 1 | \n",
661 | " 3.1 | \n",
662 | " 0 | \n",
663 | " 0 | \n",
664 | " 3 | \n",
665 | " 0 | \n",
666 | "
\n",
667 | " \n",
668 | " 2 | \n",
669 | " 70 | \n",
670 | " 1 | \n",
671 | " 0 | \n",
672 | " 145 | \n",
673 | " 174 | \n",
674 | " 0 | \n",
675 | " 1 | \n",
676 | " 125 | \n",
677 | " 1 | \n",
678 | " 2.6 | \n",
679 | " 0 | \n",
680 | " 0 | \n",
681 | " 3 | \n",
682 | " 0 | \n",
683 | "
\n",
684 | " \n",
685 | " 3 | \n",
686 | " 61 | \n",
687 | " 1 | \n",
688 | " 0 | \n",
689 | " 148 | \n",
690 | " 203 | \n",
691 | " 0 | \n",
692 | " 1 | \n",
693 | " 161 | \n",
694 | " 0 | \n",
695 | " 0.0 | \n",
696 | " 2 | \n",
697 | " 1 | \n",
698 | " 3 | \n",
699 | " 0 | \n",
700 | "
\n",
701 | " \n",
702 | " 4 | \n",
703 | " 62 | \n",
704 | " 0 | \n",
705 | " 0 | \n",
706 | " 138 | \n",
707 | " 294 | \n",
708 | " 1 | \n",
709 | " 1 | \n",
710 | " 106 | \n",
711 | " 0 | \n",
712 | " 1.9 | \n",
713 | " 1 | \n",
714 | " 3 | \n",
715 | " 2 | \n",
716 | " 0 | \n",
717 | "
\n",
718 | " \n",
719 | " ... | \n",
720 | " ... | \n",
721 | " ... | \n",
722 | " ... | \n",
723 | " ... | \n",
724 | " ... | \n",
725 | " ... | \n",
726 | " ... | \n",
727 | " ... | \n",
728 | " ... | \n",
729 | " ... | \n",
730 | " ... | \n",
731 | " ... | \n",
732 | " ... | \n",
733 | " ... | \n",
734 | "
\n",
735 | " \n",
736 | " 1020 | \n",
737 | " 59 | \n",
738 | " 1 | \n",
739 | " 1 | \n",
740 | " 140 | \n",
741 | " 221 | \n",
742 | " 0 | \n",
743 | " 1 | \n",
744 | " 164 | \n",
745 | " 1 | \n",
746 | " 0.0 | \n",
747 | " 2 | \n",
748 | " 0 | \n",
749 | " 2 | \n",
750 | " 1 | \n",
751 | "
\n",
752 | " \n",
753 | " 1021 | \n",
754 | " 60 | \n",
755 | " 1 | \n",
756 | " 0 | \n",
757 | " 125 | \n",
758 | " 258 | \n",
759 | " 0 | \n",
760 | " 0 | \n",
761 | " 141 | \n",
762 | " 1 | \n",
763 | " 2.8 | \n",
764 | " 1 | \n",
765 | " 1 | \n",
766 | " 3 | \n",
767 | " 0 | \n",
768 | "
\n",
769 | " \n",
770 | " 1022 | \n",
771 | " 47 | \n",
772 | " 1 | \n",
773 | " 0 | \n",
774 | " 110 | \n",
775 | " 275 | \n",
776 | " 0 | \n",
777 | " 0 | \n",
778 | " 118 | \n",
779 | " 1 | \n",
780 | " 1.0 | \n",
781 | " 1 | \n",
782 | " 1 | \n",
783 | " 2 | \n",
784 | " 0 | \n",
785 | "
\n",
786 | " \n",
787 | " 1023 | \n",
788 | " 50 | \n",
789 | " 0 | \n",
790 | " 0 | \n",
791 | " 110 | \n",
792 | " 254 | \n",
793 | " 0 | \n",
794 | " 0 | \n",
795 | " 159 | \n",
796 | " 0 | \n",
797 | " 0.0 | \n",
798 | " 2 | \n",
799 | " 0 | \n",
800 | " 2 | \n",
801 | " 1 | \n",
802 | "
\n",
803 | " \n",
804 | " 1024 | \n",
805 | " 54 | \n",
806 | " 1 | \n",
807 | " 0 | \n",
808 | " 120 | \n",
809 | " 188 | \n",
810 | " 0 | \n",
811 | " 1 | \n",
812 | " 113 | \n",
813 | " 0 | \n",
814 | " 1.4 | \n",
815 | " 1 | \n",
816 | " 1 | \n",
817 | " 3 | \n",
818 | " 0 | \n",
819 | "
\n",
820 | " \n",
821 | "
\n",
822 | "
1025 rows × 14 columns
\n",
823 | "
"
824 | ],
825 | "text/plain": [
826 | " age sex cp trestbps chol fbs restecg thalach exang oldpeak \\\n",
827 | "0 52 1 0 125 212 0 1 168 0 1.0 \n",
828 | "1 53 1 0 140 203 1 0 155 1 3.1 \n",
829 | "2 70 1 0 145 174 0 1 125 1 2.6 \n",
830 | "3 61 1 0 148 203 0 1 161 0 0.0 \n",
831 | "4 62 0 0 138 294 1 1 106 0 1.9 \n",
832 | "... ... ... .. ... ... ... ... ... ... ... \n",
833 | "1020 59 1 1 140 221 0 1 164 1 0.0 \n",
834 | "1021 60 1 0 125 258 0 0 141 1 2.8 \n",
835 | "1022 47 1 0 110 275 0 0 118 1 1.0 \n",
836 | "1023 50 0 0 110 254 0 0 159 0 0.0 \n",
837 | "1024 54 1 0 120 188 0 1 113 0 1.4 \n",
838 | "\n",
839 | " slope ca thal target \n",
840 | "0 2 2 3 0 \n",
841 | "1 0 0 3 0 \n",
842 | "2 0 0 3 0 \n",
843 | "3 2 1 3 0 \n",
844 | "4 1 3 2 0 \n",
845 | "... ... .. ... ... \n",
846 | "1020 2 0 2 1 \n",
847 | "1021 1 1 3 0 \n",
848 | "1022 1 1 2 0 \n",
849 | "1023 2 0 2 1 \n",
850 | "1024 1 1 3 0 \n",
851 | "\n",
852 | "[1025 rows x 14 columns]"
853 | ]
854 | },
855 | "execution_count": 24,
856 | "metadata": {},
857 | "output_type": "execute_result"
858 | }
859 | ],
860 | "source": [
861 | "from sklearn.datasets import load_iris\n",
862 | "from sklearn.model_selection import train_test_split\n",
863 | "from sklearn.metrics import accuracy_score\n",
864 | "\n",
865 | "# Load dataset\n",
866 | "# data = load_iris()\n",
867 | "# X, y = data.data, data.target\n",
868 | "\n",
869 | "df = pd.read_csv('heart.csv')\n",
870 | "df"
871 | ]
872 | },
873 | {
874 | "cell_type": "code",
875 | "execution_count": 28,
876 | "metadata": {},
877 | "outputs": [],
878 | "source": [
879 | "X = df.iloc[:, 0:-1]\n",
880 | "y = df.iloc[:, -1]"
881 | ]
882 | },
883 | {
884 | "cell_type": "code",
885 | "execution_count": 29,
886 | "metadata": {},
887 | "outputs": [],
888 | "source": [
889 | "# Split into train and test sets\n",
890 | "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)"
891 | ]
892 | },
893 | {
894 | "cell_type": "code",
895 | "execution_count": 35,
896 | "metadata": {},
897 | "outputs": [],
898 | "source": [
899 | "from sklearn.preprocessing import LabelEncoder\n",
900 | "\n",
901 | "le = LabelEncoder()\n",
902 | "for column in X_train.columns:\n",
903 | " if X_train[column].dtype == 'object': # Encode only non-numerical columns\n",
904 | " X_train[column] = le.fit_transform(X_train[column])\n",
905 | " X_test[column] = le.transform(X_test[column])\n",
906 | "\n",
907 | "# Convert to NumPy arrays for compatibility\n",
908 | "X_train = np.array(X_train)\n",
909 | "X_test = np.array(X_test)"
910 | ]
911 | },
912 | {
913 | "cell_type": "markdown",
914 | "metadata": {},
915 | "source": [
916 | ""
917 | ]
918 | },
919 | {
920 | "cell_type": "markdown",
921 | "metadata": {},
922 | "source": [
923 | "### The following code shows the performance of the Build from Scratch tree algorithm"
924 | ]
925 | },
926 | {
927 | "cell_type": "code",
928 | "execution_count": 47,
929 | "metadata": {},
930 | "outputs": [
931 | {
932 | "name": "stdout",
933 | "output_type": "stream",
934 | "text": [
935 | "Accuracy of Scratch Tree : 0.9853658536585366\n"
936 | ]
937 | }
938 | ],
939 | "source": [
940 | "# Train decision tree\n",
941 | "tree = DecisionTree(max_depth=10)\n",
942 | "tree.fit(X_train, y_train)\n",
943 | "\n",
944 | "# Make predictions\n",
945 | "y_pred = tree.predict(X_test)\n",
946 | "\n",
947 | "# Evaluate accuracy\n",
948 | "accuracy = accuracy_score(y_test, y_pred)\n",
949 | "print(f\"Accuracy of Scratch Tree : {accuracy}\")\n"
950 | ]
951 | },
952 | {
953 | "cell_type": "markdown",
954 | "metadata": {},
955 | "source": [
956 | "### The following code shows the performance of the Sklearn tree algorithm\n"
957 | ]
958 | },
959 | {
960 | "cell_type": "code",
961 | "execution_count": 48,
962 | "metadata": {},
963 | "outputs": [
964 | {
965 | "name": "stdout",
966 | "output_type": "stream",
967 | "text": [
968 | "Accuracy of sklearn Tree: 0.9853658536585366\n"
969 | ]
970 | }
971 | ],
972 | "source": [
973 | "from sklearn.tree import DecisionTreeClassifier\n",
974 | "\n",
975 | "tree1 = DecisionTreeClassifier(max_depth=10)\n",
976 | "\n",
977 | "tree1.fit(X_train, y_train)\n",
978 | "\n",
979 | "predictions1 = tree1.predict(X_test)\n",
980 | "\n",
981 | "accuracy1 = accuracy_score(y_test, predictions1)\n",
982 | "print(f\"Accuracy of sklearn Tree: {accuracy1}\")"
983 | ]
984 | },
985 | {
986 | "cell_type": "code",
987 | "execution_count": 22,
988 | "metadata": {},
989 | "outputs": [
990 | {
991 | "name": "stdout",
992 | "output_type": "stream",
993 | "text": [
994 | "Root Node: 1\n",
995 | "Internal Nodes: 2\n",
996 | "Leaf Nodes: 3\n"
997 | ]
998 | }
999 | ],
1000 | "source": [
1001 | "# Count the parameters of the tree\n",
1002 | "counts = tree.count_nodes()\n",
1003 | "print(f\"Root Node: 1\")\n",
1004 | "print(f\"Internal Nodes: {counts['internal_nodes']}\")\n",
1005 | "print(f\"Leaf Nodes: {counts['leaves']}\")\n"
1006 | ]
1007 | },
1008 | {
1009 | "cell_type": "code",
1010 | "execution_count": 23,
1011 | "metadata": {},
1012 | "outputs": [
1013 | {
1014 | "name": "stdout",
1015 | "output_type": "stream",
1016 | "text": [
1017 | "Decision Tree Structure:\n",
1018 | "Internal Node: Feature[2] <= 1.9\n",
1019 | "| Leaf Node: Class = 0\n",
1020 | "| Internal Node: Feature[2] <= 4.7\n",
1021 | "| | Leaf Node: Class = 1\n",
1022 | "| | Leaf Node: Class = 2\n"
1023 | ]
1024 | }
1025 | ],
1026 | "source": [
1027 | "# Print the decision tree\n",
1028 | "tree.print_tree()"
1029 | ]
1030 | },
1031 | {
1032 | "cell_type": "markdown",
1033 | "metadata": {},
1034 | "source": []
1035 | }
1036 | ],
1037 | "metadata": {
1038 | "kernelspec": {
1039 | "display_name": "Python 3 (ipykernel)",
1040 | "language": "python",
1041 | "name": "python3"
1042 | },
1043 | "language_info": {
1044 | "codemirror_mode": {
1045 | "name": "ipython",
1046 | "version": 3
1047 | },
1048 | "file_extension": ".py",
1049 | "mimetype": "text/x-python",
1050 | "name": "python",
1051 | "nbconvert_exporter": "python",
1052 | "pygments_lexer": "ipython3",
1053 | "version": "3.12.4"
1054 | }
1055 | },
1056 | "nbformat": 4,
1057 | "nbformat_minor": 4
1058 | }
1059 |
--------------------------------------------------------------------------------
/decision_trees/heart.csv:
--------------------------------------------------------------------------------
1 | age,sex,cp,trestbps,chol,fbs,restecg,thalach,exang,oldpeak,slope,ca,thal,target
2 | 52,1,0,125,212,0,1,168,0,1,2,2,3,0
3 | 53,1,0,140,203,1,0,155,1,3.1,0,0,3,0
4 | 70,1,0,145,174,0,1,125,1,2.6,0,0,3,0
5 | 61,1,0,148,203,0,1,161,0,0,2,1,3,0
6 | 62,0,0,138,294,1,1,106,0,1.9,1,3,2,0
7 | 58,0,0,100,248,0,0,122,0,1,1,0,2,1
8 | 58,1,0,114,318,0,2,140,0,4.4,0,3,1,0
9 | 55,1,0,160,289,0,0,145,1,0.8,1,1,3,0
10 | 46,1,0,120,249,0,0,144,0,0.8,2,0,3,0
11 | 54,1,0,122,286,0,0,116,1,3.2,1,2,2,0
12 | 71,0,0,112,149,0,1,125,0,1.6,1,0,2,1
13 | 43,0,0,132,341,1,0,136,1,3,1,0,3,0
14 | 34,0,1,118,210,0,1,192,0,0.7,2,0,2,1
15 | 51,1,0,140,298,0,1,122,1,4.2,1,3,3,0
16 | 52,1,0,128,204,1,1,156,1,1,1,0,0,0
17 | 34,0,1,118,210,0,1,192,0,0.7,2,0,2,1
18 | 51,0,2,140,308,0,0,142,0,1.5,2,1,2,1
19 | 54,1,0,124,266,0,0,109,1,2.2,1,1,3,0
20 | 50,0,1,120,244,0,1,162,0,1.1,2,0,2,1
21 | 58,1,2,140,211,1,0,165,0,0,2,0,2,1
22 | 60,1,2,140,185,0,0,155,0,3,1,0,2,0
23 | 67,0,0,106,223,0,1,142,0,0.3,2,2,2,1
24 | 45,1,0,104,208,0,0,148,1,3,1,0,2,1
25 | 63,0,2,135,252,0,0,172,0,0,2,0,2,1
26 | 42,0,2,120,209,0,1,173,0,0,1,0,2,1
27 | 61,0,0,145,307,0,0,146,1,1,1,0,3,0
28 | 44,1,2,130,233,0,1,179,1,0.4,2,0,2,1
29 | 58,0,1,136,319,1,0,152,0,0,2,2,2,0
30 | 56,1,2,130,256,1,0,142,1,0.6,1,1,1,0
31 | 55,0,0,180,327,0,2,117,1,3.4,1,0,2,0
32 | 44,1,0,120,169,0,1,144,1,2.8,0,0,1,0
33 | 50,0,1,120,244,0,1,162,0,1.1,2,0,2,1
34 | 57,1,0,130,131,0,1,115,1,1.2,1,1,3,0
35 | 70,1,2,160,269,0,1,112,1,2.9,1,1,3,0
36 | 50,1,2,129,196,0,1,163,0,0,2,0,2,1
37 | 46,1,2,150,231,0,1,147,0,3.6,1,0,2,0
38 | 51,1,3,125,213,0,0,125,1,1.4,2,1,2,1
39 | 59,1,0,138,271,0,0,182,0,0,2,0,2,1
40 | 64,1,0,128,263,0,1,105,1,0.2,1,1,3,1
41 | 57,1,2,128,229,0,0,150,0,0.4,1,1,3,0
42 | 65,0,2,160,360,0,0,151,0,0.8,2,0,2,1
43 | 54,1,2,120,258,0,0,147,0,0.4,1,0,3,1
44 | 61,0,0,130,330,0,0,169,0,0,2,0,2,0
45 | 46,1,0,120,249,0,0,144,0,0.8,2,0,3,0
46 | 55,0,1,132,342,0,1,166,0,1.2,2,0,2,1
47 | 42,1,0,140,226,0,1,178,0,0,2,0,2,1
48 | 41,1,1,135,203,0,1,132,0,0,1,0,1,1
49 | 66,0,0,178,228,1,1,165,1,1,1,2,3,0
50 | 66,0,2,146,278,0,0,152,0,0,1,1,2,1
51 | 60,1,0,117,230,1,1,160,1,1.4,2,2,3,0
52 | 58,0,3,150,283,1,0,162,0,1,2,0,2,1
53 | 57,0,0,140,241,0,1,123,1,0.2,1,0,3,0
54 | 38,1,2,138,175,0,1,173,0,0,2,4,2,1
55 | 49,1,2,120,188,0,1,139,0,2,1,3,3,0
56 | 55,1,0,140,217,0,1,111,1,5.6,0,0,3,0
57 | 55,1,0,140,217,0,1,111,1,5.6,0,0,3,0
58 | 56,1,3,120,193,0,0,162,0,1.9,1,0,3,1
59 | 48,1,1,130,245,0,0,180,0,0.2,1,0,2,1
60 | 67,1,2,152,212,0,0,150,0,0.8,1,0,3,0
61 | 57,1,1,154,232,0,0,164,0,0,2,1,2,0
62 | 29,1,1,130,204,0,0,202,0,0,2,0,2,1
63 | 66,0,2,146,278,0,0,152,0,0,1,1,2,1
64 | 67,1,0,100,299,0,0,125,1,0.9,1,2,2,0
65 | 59,1,2,150,212,1,1,157,0,1.6,2,0,2,1
66 | 29,1,1,130,204,0,0,202,0,0,2,0,2,1
67 | 59,1,3,170,288,0,0,159,0,0.2,1,0,3,0
68 | 53,1,2,130,197,1,0,152,0,1.2,0,0,2,1
69 | 42,1,0,136,315,0,1,125,1,1.8,1,0,1,0
70 | 37,0,2,120,215,0,1,170,0,0,2,0,2,1
71 | 62,0,0,160,164,0,0,145,0,6.2,0,3,3,0
72 | 59,1,0,170,326,0,0,140,1,3.4,0,0,3,0
73 | 61,1,0,140,207,0,0,138,1,1.9,2,1,3,0
74 | 56,1,0,125,249,1,0,144,1,1.2,1,1,2,0
75 | 59,1,0,140,177,0,1,162,1,0,2,1,3,0
76 | 48,1,0,130,256,1,0,150,1,0,2,2,3,0
77 | 47,1,2,138,257,0,0,156,0,0,2,0,2,1
78 | 48,1,2,124,255,1,1,175,0,0,2,2,2,1
79 | 63,1,0,140,187,0,0,144,1,4,2,2,3,0
80 | 52,1,1,134,201,0,1,158,0,0.8,2,1,2,1
81 | 52,1,1,134,201,0,1,158,0,0.8,2,1,2,1
82 | 50,1,2,140,233,0,1,163,0,0.6,1,1,3,0
83 | 49,1,2,118,149,0,0,126,0,0.8,2,3,2,0
84 | 46,1,2,150,231,0,1,147,0,3.6,1,0,2,0
85 | 38,1,2,138,175,0,1,173,0,0,2,4,2,1
86 | 37,0,2,120,215,0,1,170,0,0,2,0,2,1
87 | 44,1,1,120,220,0,1,170,0,0,2,0,2,1
88 | 58,1,2,140,211,1,0,165,0,0,2,0,2,1
89 | 59,0,0,174,249,0,1,143,1,0,1,0,2,0
90 | 62,0,0,140,268,0,0,160,0,3.6,0,2,2,0
91 | 68,1,0,144,193,1,1,141,0,3.4,1,2,3,0
92 | 54,0,2,108,267,0,0,167,0,0,2,0,2,1
93 | 62,0,0,124,209,0,1,163,0,0,2,0,2,1
94 | 63,1,0,140,187,0,0,144,1,4,2,2,3,0
95 | 44,1,0,120,169,0,1,144,1,2.8,0,0,1,0
96 | 62,1,1,128,208,1,0,140,0,0,2,0,2,1
97 | 45,0,0,138,236,0,0,152,1,0.2,1,0,2,1
98 | 57,0,0,128,303,0,0,159,0,0,2,1,2,1
99 | 53,1,0,123,282,0,1,95,1,2,1,2,3,0
100 | 65,1,0,110,248,0,0,158,0,0.6,2,2,1,0
101 | 76,0,2,140,197,0,2,116,0,1.1,1,0,2,1
102 | 43,0,2,122,213,0,1,165,0,0.2,1,0,2,1
103 | 57,1,2,150,126,1,1,173,0,0.2,2,1,3,1
104 | 54,1,1,108,309,0,1,156,0,0,2,0,3,1
105 | 47,1,2,138,257,0,0,156,0,0,2,0,2,1
106 | 52,1,3,118,186,0,0,190,0,0,1,0,1,1
107 | 47,1,0,110,275,0,0,118,1,1,1,1,2,0
108 | 51,1,0,140,299,0,1,173,1,1.6,2,0,3,0
109 | 62,1,1,120,281,0,0,103,0,1.4,1,1,3,0
110 | 40,1,0,152,223,0,1,181,0,0,2,0,3,0
111 | 54,1,0,110,206,0,0,108,1,0,1,1,2,0
112 | 44,1,0,110,197,0,0,177,0,0,2,1,2,0
113 | 53,1,0,142,226,0,0,111,1,0,2,0,3,1
114 | 48,1,0,130,256,1,0,150,1,0,2,2,3,0
115 | 57,1,0,110,335,0,1,143,1,3,1,1,3,0
116 | 59,1,2,126,218,1,1,134,0,2.2,1,1,1,0
117 | 61,0,0,145,307,0,0,146,1,1,1,0,3,0
118 | 63,1,0,130,254,0,0,147,0,1.4,1,1,3,0
119 | 43,1,0,120,177,0,0,120,1,2.5,1,0,3,0
120 | 29,1,1,130,204,0,0,202,0,0,2,0,2,1
121 | 42,1,1,120,295,0,1,162,0,0,2,0,2,1
122 | 54,1,1,108,309,0,1,156,0,0,2,0,3,1
123 | 44,1,0,120,169,0,1,144,1,2.8,0,0,1,0
124 | 60,1,0,145,282,0,0,142,1,2.8,1,2,3,0
125 | 65,0,2,140,417,1,0,157,0,0.8,2,1,2,1
126 | 61,1,0,120,260,0,1,140,1,3.6,1,1,3,0
127 | 60,0,3,150,240,0,1,171,0,0.9,2,0,2,1
128 | 66,1,0,120,302,0,0,151,0,0.4,1,0,2,1
129 | 53,1,2,130,197,1,0,152,0,1.2,0,0,2,1
130 | 52,1,2,138,223,0,1,169,0,0,2,4,2,1
131 | 57,1,0,140,192,0,1,148,0,0.4,1,0,1,1
132 | 60,0,3,150,240,0,1,171,0,0.9,2,0,2,1
133 | 51,0,2,130,256,0,0,149,0,0.5,2,0,2,1
134 | 41,1,1,135,203,0,1,132,0,0,1,0,1,1
135 | 50,1,2,129,196,0,1,163,0,0,2,0,2,1
136 | 54,1,1,108,309,0,1,156,0,0,2,0,3,1
137 | 58,0,0,170,225,1,0,146,1,2.8,1,2,1,0
138 | 55,0,1,132,342,0,1,166,0,1.2,2,0,2,1
139 | 64,0,0,180,325,0,1,154,1,0,2,0,2,1
140 | 47,1,2,138,257,0,0,156,0,0,2,0,2,1
141 | 41,1,1,110,235,0,1,153,0,0,2,0,2,1
142 | 57,1,0,152,274,0,1,88,1,1.2,1,1,3,0
143 | 63,0,0,124,197,0,1,136,1,0,1,0,2,0
144 | 61,1,3,134,234,0,1,145,0,2.6,1,2,2,0
145 | 34,1,3,118,182,0,0,174,0,0,2,0,2,1
146 | 47,1,0,112,204,0,1,143,0,0.1,2,0,2,1
147 | 40,1,0,110,167,0,0,114,1,2,1,0,3,0
148 | 51,0,2,120,295,0,0,157,0,0.6,2,0,2,1
149 | 41,1,0,110,172,0,0,158,0,0,2,0,3,0
150 | 52,1,3,152,298,1,1,178,0,1.2,1,0,3,1
151 | 39,1,2,140,321,0,0,182,0,0,2,0,2,1
152 | 58,1,0,114,318,0,2,140,0,4.4,0,3,1,0
153 | 54,1,1,192,283,0,0,195,0,0,2,1,3,0
154 | 58,1,0,125,300,0,0,171,0,0,2,2,3,0
155 | 54,1,2,120,258,0,0,147,0,0.4,1,0,3,1
156 | 63,1,0,130,330,1,0,132,1,1.8,2,3,3,0
157 | 54,1,1,108,309,0,1,156,0,0,2,0,3,1
158 | 40,1,3,140,199,0,1,178,1,1.4,2,0,3,1
159 | 54,1,2,120,258,0,0,147,0,0.4,1,0,3,1
160 | 67,0,2,115,564,0,0,160,0,1.6,1,0,3,1
161 | 41,1,1,120,157,0,1,182,0,0,2,0,2,1
162 | 77,1,0,125,304,0,0,162,1,0,2,3,2,0
163 | 51,1,2,100,222,0,1,143,1,1.2,1,0,2,1
164 | 77,1,0,125,304,0,0,162,1,0,2,3,2,0
165 | 48,1,0,124,274,0,0,166,0,0.5,1,0,3,0
166 | 56,1,0,125,249,1,0,144,1,1.2,1,1,2,0
167 | 59,1,0,170,326,0,0,140,1,3.4,0,0,3,0
168 | 56,1,0,132,184,0,0,105,1,2.1,1,1,1,0
169 | 57,0,0,120,354,0,1,163,1,0.6,2,0,2,1
170 | 43,1,2,130,315,0,1,162,0,1.9,2,1,2,1
171 | 45,0,1,112,160,0,1,138,0,0,1,0,2,1
172 | 43,1,0,150,247,0,1,171,0,1.5,2,0,2,1
173 | 56,1,0,130,283,1,0,103,1,1.6,0,0,3,0
174 | 56,1,1,120,240,0,1,169,0,0,0,0,2,1
175 | 39,0,2,94,199,0,1,179,0,0,2,0,2,1
176 | 54,1,0,110,239,0,1,126,1,2.8,1,1,3,0
177 | 56,0,0,200,288,1,0,133,1,4,0,2,3,0
178 | 56,1,0,130,283,1,0,103,1,1.6,0,0,3,0
179 | 64,1,0,120,246,0,0,96,1,2.2,0,1,2,0
180 | 44,1,0,110,197,0,0,177,0,0,2,1,2,0
181 | 56,0,0,134,409,0,0,150,1,1.9,1,2,3,0
182 | 63,1,0,140,187,0,0,144,1,4,2,2,3,0
183 | 64,1,3,110,211,0,0,144,1,1.8,1,0,2,1
184 | 60,1,0,140,293,0,0,170,0,1.2,1,2,3,0
185 | 42,1,2,130,180,0,1,150,0,0,2,0,2,1
186 | 45,1,1,128,308,0,0,170,0,0,2,0,2,1
187 | 57,1,0,165,289,1,0,124,0,1,1,3,3,0
188 | 40,1,0,110,167,0,0,114,1,2,1,0,3,0
189 | 56,1,0,125,249,1,0,144,1,1.2,1,1,2,0
190 | 63,1,0,130,254,0,0,147,0,1.4,1,1,3,0
191 | 64,1,2,125,309,0,1,131,1,1.8,1,0,3,0
192 | 41,1,2,112,250,0,1,179,0,0,2,0,2,1
193 | 56,1,1,130,221,0,0,163,0,0,2,0,3,1
194 | 67,0,2,115,564,0,0,160,0,1.6,1,0,3,1
195 | 69,1,3,160,234,1,0,131,0,0.1,1,1,2,1
196 | 67,1,0,160,286,0,0,108,1,1.5,1,3,2,0
197 | 59,1,2,150,212,1,1,157,0,1.6,2,0,2,1
198 | 58,1,0,100,234,0,1,156,0,0.1,2,1,3,0
199 | 45,1,0,115,260,0,0,185,0,0,2,0,2,1
200 | 60,0,2,102,318,0,1,160,0,0,2,1,2,1
201 | 50,1,0,144,200,0,0,126,1,0.9,1,0,3,0
202 | 62,0,0,124,209,0,1,163,0,0,2,0,2,1
203 | 34,1,3,118,182,0,0,174,0,0,2,0,2,1
204 | 52,1,3,152,298,1,1,178,0,1.2,1,0,3,1
205 | 64,1,3,170,227,0,0,155,0,0.6,1,0,3,1
206 | 66,0,2,146,278,0,0,152,0,0,1,1,2,1
207 | 42,1,3,148,244,0,0,178,0,0.8,2,2,2,1
208 | 59,1,2,126,218,1,1,134,0,2.2,1,1,1,0
209 | 41,1,2,112,250,0,1,179,0,0,2,0,2,1
210 | 38,1,2,138,175,0,1,173,0,0,2,4,2,1
211 | 62,1,1,120,281,0,0,103,0,1.4,1,1,3,0
212 | 42,1,2,120,240,1,1,194,0,0.8,0,0,3,1
213 | 67,1,0,100,299,0,0,125,1,0.9,1,2,2,0
214 | 50,1,0,150,243,0,0,128,0,2.6,1,0,3,0
215 | 43,1,2,130,315,0,1,162,0,1.9,2,1,2,1
216 | 45,1,1,128,308,0,0,170,0,0,2,0,2,1
217 | 49,1,1,130,266,0,1,171,0,0.6,2,0,2,1
218 | 65,1,0,135,254,0,0,127,0,2.8,1,1,3,0
219 | 41,1,1,120,157,0,1,182,0,0,2,0,2,1
220 | 46,1,0,140,311,0,1,120,1,1.8,1,2,3,0
221 | 54,1,0,122,286,0,0,116,1,3.2,1,2,2,0
222 | 57,0,1,130,236,0,0,174,0,0,1,1,2,0
223 | 63,1,0,130,254,0,0,147,0,1.4,1,1,3,0
224 | 64,1,3,110,211,0,0,144,1,1.8,1,0,2,1
225 | 39,0,2,94,199,0,1,179,0,0,2,0,2,1
226 | 51,1,0,140,261,0,0,186,1,0,2,0,2,1
227 | 54,1,2,150,232,0,0,165,0,1.6,2,0,3,1
228 | 49,1,2,118,149,0,0,126,0,0.8,2,3,2,0
229 | 44,0,2,118,242,0,1,149,0,0.3,1,1,2,1
230 | 52,1,1,128,205,1,1,184,0,0,2,0,2,1
231 | 66,0,0,178,228,1,1,165,1,1,1,2,3,0
232 | 58,1,0,125,300,0,0,171,0,0,2,2,3,0
233 | 56,1,1,120,236,0,1,178,0,0.8,2,0,2,1
234 | 60,1,0,125,258,0,0,141,1,2.8,1,1,3,0
235 | 41,0,1,126,306,0,1,163,0,0,2,0,2,1
236 | 49,0,0,130,269,0,1,163,0,0,2,0,2,1
237 | 64,1,3,170,227,0,0,155,0,0.6,1,0,3,1
238 | 49,1,2,118,149,0,0,126,0,0.8,2,3,2,0
239 | 57,1,1,124,261,0,1,141,0,0.3,2,0,3,0
240 | 60,1,0,117,230,1,1,160,1,1.4,2,2,3,0
241 | 62,0,0,150,244,0,1,154,1,1.4,1,0,2,0
242 | 54,0,1,132,288,1,0,159,1,0,2,1,2,1
243 | 67,1,2,152,212,0,0,150,0,0.8,1,0,3,0
244 | 38,1,2,138,175,0,1,173,0,0,2,4,2,1
245 | 60,1,2,140,185,0,0,155,0,3,1,0,2,0
246 | 51,1,2,125,245,1,0,166,0,2.4,1,0,2,1
247 | 44,1,1,130,219,0,0,188,0,0,2,0,2,1
248 | 54,1,1,192,283,0,0,195,0,0,2,1,3,0
249 | 46,1,0,140,311,0,1,120,1,1.8,1,2,3,0
250 | 39,0,2,138,220,0,1,152,0,0,1,0,2,1
251 | 42,1,2,130,180,0,1,150,0,0,2,0,2,1
252 | 47,1,0,110,275,0,0,118,1,1,1,1,2,0
253 | 45,0,1,112,160,0,1,138,0,0,1,0,2,1
254 | 55,1,0,132,353,0,1,132,1,1.2,1,1,3,0
255 | 57,1,0,165,289,1,0,124,0,1,1,3,3,0
256 | 35,1,0,120,198,0,1,130,1,1.6,1,0,3,0
257 | 62,0,0,140,394,0,0,157,0,1.2,1,0,2,1
258 | 35,0,0,138,183,0,1,182,0,1.4,2,0,2,1
259 | 64,0,0,180,325,0,1,154,1,0,2,0,2,1
260 | 38,1,3,120,231,0,1,182,1,3.8,1,0,3,0
261 | 66,1,0,120,302,0,0,151,0,0.4,1,0,2,1
262 | 44,1,2,120,226,0,1,169,0,0,2,0,2,1
263 | 54,1,2,150,232,0,0,165,0,1.6,2,0,3,1
264 | 48,1,0,122,222,0,0,186,0,0,2,0,2,1
265 | 55,0,1,132,342,0,1,166,0,1.2,2,0,2,1
266 | 58,0,0,170,225,1,0,146,1,2.8,1,2,1,0
267 | 45,1,0,104,208,0,0,148,1,3,1,0,2,1
268 | 53,1,0,123,282,0,1,95,1,2,1,2,3,0
269 | 67,1,0,120,237,0,1,71,0,1,1,0,2,0
270 | 58,1,2,132,224,0,0,173,0,3.2,2,2,3,0
271 | 71,0,2,110,265,1,0,130,0,0,2,1,2,1
272 | 43,1,0,110,211,0,1,161,0,0,2,0,3,1
273 | 44,1,1,120,263,0,1,173,0,0,2,0,3,1
274 | 39,0,2,138,220,0,1,152,0,0,1,0,2,1
275 | 54,1,0,110,206,0,0,108,1,0,1,1,2,0
276 | 66,1,0,160,228,0,0,138,0,2.3,2,0,1,1
277 | 56,1,0,130,283,1,0,103,1,1.6,0,0,3,0
278 | 57,1,0,132,207,0,1,168,1,0,2,0,3,1
279 | 44,1,1,130,219,0,0,188,0,0,2,0,2,1
280 | 55,1,0,160,289,0,0,145,1,0.8,1,1,3,0
281 | 41,0,1,105,198,0,1,168,0,0,2,1,2,1
282 | 45,0,1,130,234,0,0,175,0,0.6,1,0,2,1
283 | 35,1,1,122,192,0,1,174,0,0,2,0,2,1
284 | 41,0,1,130,204,0,0,172,0,1.4,2,0,2,1
285 | 64,1,3,110,211,0,0,144,1,1.8,1,0,2,1
286 | 58,1,2,132,224,0,0,173,0,3.2,2,2,3,0
287 | 71,0,2,110,265,1,0,130,0,0,2,1,2,1
288 | 64,0,2,140,313,0,1,133,0,0.2,2,0,3,1
289 | 71,0,1,160,302,0,1,162,0,0.4,2,2,2,1
290 | 58,0,2,120,340,0,1,172,0,0,2,0,2,1
291 | 40,1,0,152,223,0,1,181,0,0,2,0,3,0
292 | 52,1,2,138,223,0,1,169,0,0,2,4,2,1
293 | 58,1,0,128,259,0,0,130,1,3,1,2,3,0
294 | 61,1,2,150,243,1,1,137,1,1,1,0,2,1
295 | 59,1,2,150,212,1,1,157,0,1.6,2,0,2,1
296 | 56,0,0,200,288,1,0,133,1,4,0,2,3,0
297 | 67,1,0,100,299,0,0,125,1,0.9,1,2,2,0
298 | 67,1,0,120,237,0,1,71,0,1,1,0,2,0
299 | 58,1,0,150,270,0,0,111,1,0.8,2,0,3,0
300 | 35,1,1,122,192,0,1,174,0,0,2,0,2,1
301 | 52,1,1,120,325,0,1,172,0,0.2,2,0,2,1
302 | 46,0,1,105,204,0,1,172,0,0,2,0,2,1
303 | 51,1,2,94,227,0,1,154,1,0,2,1,3,1
304 | 55,0,1,132,342,0,1,166,0,1.2,2,0,2,1
305 | 60,1,0,145,282,0,0,142,1,2.8,1,2,3,0
306 | 52,0,2,136,196,0,0,169,0,0.1,1,0,2,1
307 | 62,1,0,120,267,0,1,99,1,1.8,1,2,3,0
308 | 44,0,2,118,242,0,1,149,0,0.3,1,1,2,1
309 | 44,1,1,120,220,0,1,170,0,0,2,0,2,1
310 | 59,1,2,126,218,1,1,134,0,2.2,1,1,1,0
311 | 56,0,1,140,294,0,0,153,0,1.3,1,0,2,1
312 | 61,1,0,120,260,0,1,140,1,3.6,1,1,3,0
313 | 48,1,0,130,256,1,0,150,1,0,2,2,3,0
314 | 70,1,2,160,269,0,1,112,1,2.9,1,1,3,0
315 | 74,0,1,120,269,0,0,121,1,0.2,2,1,2,1
316 | 40,1,3,140,199,0,1,178,1,1.4,2,0,3,1
317 | 42,1,3,148,244,0,0,178,0,0.8,2,2,2,1
318 | 64,0,2,140,313,0,1,133,0,0.2,2,0,3,1
319 | 63,0,2,135,252,0,0,172,0,0,2,0,2,1
320 | 59,1,0,140,177,0,1,162,1,0,2,1,3,0
321 | 53,0,2,128,216,0,0,115,0,0,2,0,0,1
322 | 53,0,0,130,264,0,0,143,0,0.4,1,0,2,1
323 | 48,0,2,130,275,0,1,139,0,0.2,2,0,2,1
324 | 45,1,0,142,309,0,0,147,1,0,1,3,3,0
325 | 66,1,1,160,246,0,1,120,1,0,1,3,1,0
326 | 48,1,1,130,245,0,0,180,0,0.2,1,0,2,1
327 | 56,0,1,140,294,0,0,153,0,1.3,1,0,2,1
328 | 54,1,1,192,283,0,0,195,0,0,2,1,3,0
329 | 57,1,0,150,276,0,0,112,1,0.6,1,1,1,0
330 | 70,1,0,130,322,0,0,109,0,2.4,1,3,2,0
331 | 53,0,2,128,216,0,0,115,0,0,2,0,0,1
332 | 37,0,2,120,215,0,1,170,0,0,2,0,2,1
333 | 63,0,0,108,269,0,1,169,1,1.8,1,2,2,0
334 | 37,1,2,130,250,0,1,187,0,3.5,0,0,2,1
335 | 54,0,2,110,214,0,1,158,0,1.6,1,0,2,1
336 | 60,1,0,130,206,0,0,132,1,2.4,1,2,3,0
337 | 58,1,0,150,270,0,0,111,1,0.8,2,0,3,0
338 | 57,1,2,150,126,1,1,173,0,0.2,2,1,3,1
339 | 54,1,2,125,273,0,0,152,0,0.5,0,1,2,1
340 | 56,1,2,130,256,1,0,142,1,0.6,1,1,1,0
341 | 60,1,0,130,253,0,1,144,1,1.4,2,1,3,0
342 | 38,1,2,138,175,0,1,173,0,0,2,4,2,1
343 | 44,1,2,120,226,0,1,169,0,0,2,0,2,1
344 | 65,0,2,155,269,0,1,148,0,0.8,2,0,2,1
345 | 52,1,2,172,199,1,1,162,0,0.5,2,0,3,1
346 | 41,1,1,120,157,0,1,182,0,0,2,0,2,1
347 | 66,1,1,160,246,0,1,120,1,0,1,3,1,0
348 | 50,1,0,150,243,0,0,128,0,2.6,1,0,3,0
349 | 54,0,2,108,267,0,0,167,0,0,2,0,2,1
350 | 43,1,0,132,247,1,0,143,1,0.1,1,4,3,0
351 | 62,0,2,130,263,0,1,97,0,1.2,1,1,3,0
352 | 66,1,0,120,302,0,0,151,0,0.4,1,0,2,1
353 | 50,1,0,144,200,0,0,126,1,0.9,1,0,3,0
354 | 57,1,0,110,335,0,1,143,1,3,1,1,3,0
355 | 57,1,0,110,201,0,1,126,1,1.5,1,0,1,1
356 | 57,1,1,124,261,0,1,141,0,0.3,2,0,3,0
357 | 46,0,0,138,243,0,0,152,1,0,1,0,2,1
358 | 59,1,0,164,176,1,0,90,0,1,1,2,1,0
359 | 67,1,0,160,286,0,0,108,1,1.5,1,3,2,0
360 | 59,1,3,134,204,0,1,162,0,0.8,2,2,2,0
361 | 53,0,2,128,216,0,0,115,0,0,2,0,0,1
362 | 48,1,0,122,222,0,0,186,0,0,2,0,2,1
363 | 62,1,2,130,231,0,1,146,0,1.8,1,3,3,1
364 | 43,0,2,122,213,0,1,165,0,0.2,1,0,2,1
365 | 53,1,2,130,246,1,0,173,0,0,2,3,2,1
366 | 57,0,1,130,236,0,0,174,0,0,1,1,2,0
367 | 53,1,2,130,246,1,0,173,0,0,2,3,2,1
368 | 58,1,2,112,230,0,0,165,0,2.5,1,1,3,0
369 | 48,1,1,110,229,0,1,168,0,1,0,0,3,0
370 | 58,1,2,105,240,0,0,154,1,0.6,1,0,3,1
371 | 51,1,2,110,175,0,1,123,0,0.6,2,0,2,1
372 | 43,0,0,132,341,1,0,136,1,3,1,0,3,0
373 | 55,1,0,132,353,0,1,132,1,1.2,1,1,3,0
374 | 54,0,2,110,214,0,1,158,0,1.6,1,0,2,1
375 | 58,1,1,120,284,0,0,160,0,1.8,1,0,2,0
376 | 46,0,2,142,177,0,0,160,1,1.4,0,0,2,1
377 | 66,1,0,160,228,0,0,138,0,2.3,2,0,1,1
378 | 59,1,1,140,221,0,1,164,1,0,2,0,2,1
379 | 64,0,0,130,303,0,1,122,0,2,1,2,2,1
380 | 67,1,0,120,237,0,1,71,0,1,1,0,2,0
381 | 52,1,3,118,186,0,0,190,0,0,1,0,1,1
382 | 58,1,0,146,218,0,1,105,0,2,1,1,3,0
383 | 58,1,2,132,224,0,0,173,0,3.2,2,2,3,0
384 | 59,1,0,110,239,0,0,142,1,1.2,1,1,3,0
385 | 58,1,0,150,270,0,0,111,1,0.8,2,0,3,0
386 | 35,1,0,126,282,0,0,156,1,0,2,0,3,0
387 | 51,1,2,110,175,0,1,123,0,0.6,2,0,2,1
388 | 42,0,2,120,209,0,1,173,0,0,1,0,2,1
389 | 77,1,0,125,304,0,0,162,1,0,2,3,2,0
390 | 64,1,0,120,246,0,0,96,1,2.2,0,1,2,0
391 | 63,1,3,145,233,1,0,150,0,2.3,0,0,1,1
392 | 58,0,1,136,319,1,0,152,0,0,2,2,2,0
393 | 45,1,3,110,264,0,1,132,0,1.2,1,0,3,0
394 | 51,1,2,110,175,0,1,123,0,0.6,2,0,2,1
395 | 62,0,0,160,164,0,0,145,0,6.2,0,3,3,0
396 | 63,1,0,130,330,1,0,132,1,1.8,2,3,3,0
397 | 66,0,2,146,278,0,0,152,0,0,1,1,2,1
398 | 68,1,2,180,274,1,0,150,1,1.6,1,0,3,0
399 | 40,1,0,110,167,0,0,114,1,2,1,0,3,0
400 | 66,1,0,160,228,0,0,138,0,2.3,2,0,1,1
401 | 63,1,3,145,233,1,0,150,0,2.3,0,0,1,1
402 | 49,1,2,120,188,0,1,139,0,2,1,3,3,0
403 | 71,0,0,112,149,0,1,125,0,1.6,1,0,2,1
404 | 70,1,1,156,245,0,0,143,0,0,2,0,2,1
405 | 46,0,1,105,204,0,1,172,0,0,2,0,2,1
406 | 61,1,0,140,207,0,0,138,1,1.9,2,1,3,0
407 | 56,1,2,130,256,1,0,142,1,0.6,1,1,1,0
408 | 58,1,2,140,211,1,0,165,0,0,2,0,2,1
409 | 58,1,0,100,234,0,1,156,0,0.1,2,1,3,0
410 | 46,0,0,138,243,0,0,152,1,0,1,0,2,1
411 | 46,1,2,150,231,0,1,147,0,3.6,1,0,2,0
412 | 41,0,1,105,198,0,1,168,0,0,2,1,2,1
413 | 56,1,0,125,249,1,0,144,1,1.2,1,1,2,0
414 | 57,1,0,150,276,0,0,112,1,0.6,1,1,1,0
415 | 70,1,0,130,322,0,0,109,0,2.4,1,3,2,0
416 | 59,1,3,170,288,0,0,159,0,0.2,1,0,3,0
417 | 41,0,1,130,204,0,0,172,0,1.4,2,0,2,1
418 | 54,1,2,125,273,0,0,152,0,0.5,0,1,2,1
419 | 52,1,2,138,223,0,1,169,0,0,2,4,2,1
420 | 62,0,0,124,209,0,1,163,0,0,2,0,2,1
421 | 65,0,2,160,360,0,0,151,0,0.8,2,0,2,1
422 | 57,0,0,128,303,0,0,159,0,0,2,1,2,1
423 | 42,0,0,102,265,0,0,122,0,0.6,1,0,2,1
424 | 57,0,0,120,354,0,1,163,1,0.6,2,0,2,1
425 | 58,0,1,136,319,1,0,152,0,0,2,2,2,0
426 | 45,1,0,142,309,0,0,147,1,0,1,3,3,0
427 | 51,0,0,130,305,0,1,142,1,1.2,1,0,3,0
428 | 54,0,2,160,201,0,1,163,0,0,2,1,2,1
429 | 57,1,2,150,168,0,1,174,0,1.6,2,0,2,1
430 | 43,1,0,132,247,1,0,143,1,0.1,1,4,3,0
431 | 47,1,2,108,243,0,1,152,0,0,2,0,2,0
432 | 67,1,2,152,212,0,0,150,0,0.8,1,0,3,0
433 | 65,0,0,150,225,0,0,114,0,1,1,3,3,0
434 | 60,0,2,102,318,0,1,160,0,0,2,1,2,1
435 | 37,1,2,130,250,0,1,187,0,3.5,0,0,2,1
436 | 41,0,2,112,268,0,0,172,1,0,2,0,2,1
437 | 57,0,0,120,354,0,1,163,1,0.6,2,0,2,1
438 | 59,0,0,174,249,0,1,143,1,0,1,0,2,0
439 | 67,1,0,120,229,0,0,129,1,2.6,1,2,3,0
440 | 47,1,2,130,253,0,1,179,0,0,2,0,2,1
441 | 58,1,1,120,284,0,0,160,0,1.8,1,0,2,0
442 | 62,0,0,150,244,0,1,154,1,1.4,1,0,2,0
443 | 60,1,0,140,293,0,0,170,0,1.2,1,2,3,0
444 | 57,1,0,152,274,0,1,88,1,1.2,1,1,3,0
445 | 57,1,2,150,168,0,1,174,0,1.6,2,0,2,1
446 | 47,1,2,130,253,0,1,179,0,0,2,0,2,1
447 | 52,1,1,128,205,1,1,184,0,0,2,0,2,1
448 | 53,1,2,130,246,1,0,173,0,0,2,3,2,1
449 | 55,1,0,160,289,0,0,145,1,0.8,1,1,3,0
450 | 51,0,2,120,295,0,0,157,0,0.6,2,0,2,1
451 | 52,1,0,112,230,0,1,160,0,0,2,1,2,0
452 | 63,0,0,150,407,0,0,154,0,4,1,3,3,0
453 | 49,0,1,134,271,0,1,162,0,0,1,0,2,1
454 | 66,0,0,178,228,1,1,165,1,1,1,2,3,0
455 | 49,0,1,134,271,0,1,162,0,0,1,0,2,1
456 | 65,0,0,150,225,0,0,114,0,1,1,3,3,0
457 | 69,1,3,160,234,1,0,131,0,0.1,1,1,2,1
458 | 47,1,2,108,243,0,1,152,0,0,2,0,2,0
459 | 39,0,2,138,220,0,1,152,0,0,1,0,2,1
460 | 43,1,0,150,247,0,1,171,0,1.5,2,0,2,1
461 | 51,1,0,140,261,0,0,186,1,0,2,0,2,1
462 | 69,1,2,140,254,0,0,146,0,2,1,3,3,0
463 | 48,1,2,124,255,1,1,175,0,0,2,2,2,1
464 | 52,1,3,118,186,0,0,190,0,0,1,0,1,1
465 | 43,1,0,110,211,0,1,161,0,0,2,0,3,1
466 | 67,0,2,115,564,0,0,160,0,1.6,1,0,3,1
467 | 38,1,2,138,175,0,1,173,0,0,2,4,2,1
468 | 44,1,1,130,219,0,0,188,0,0,2,0,2,1
469 | 47,1,0,110,275,0,0,118,1,1,1,1,2,0
470 | 61,1,2,150,243,1,1,137,1,1,1,0,2,1
471 | 67,1,0,160,286,0,0,108,1,1.5,1,3,2,0
472 | 60,0,3,150,240,0,1,171,0,0.9,2,0,2,1
473 | 64,0,2,140,313,0,1,133,0,0.2,2,0,3,1
474 | 58,0,0,130,197,0,1,131,0,0.6,1,0,2,1
475 | 41,1,2,130,214,0,0,168,0,2,1,0,2,1
476 | 48,1,1,110,229,0,1,168,0,1,0,0,3,0
477 | 57,1,2,150,126,1,1,173,0,0.2,2,1,3,1
478 | 57,1,0,165,289,1,0,124,0,1,1,3,3,0
479 | 57,1,2,128,229,0,0,150,0,0.4,1,1,3,0
480 | 39,1,2,140,321,0,0,182,0,0,2,0,2,1
481 | 58,1,0,128,216,0,0,131,1,2.2,1,3,3,0
482 | 51,0,0,130,305,0,1,142,1,1.2,1,0,3,0
483 | 63,0,0,150,407,0,0,154,0,4,1,3,3,0
484 | 51,1,0,140,298,0,1,122,1,4.2,1,3,3,0
485 | 35,1,1,122,192,0,1,174,0,0,2,0,2,1
486 | 65,1,0,110,248,0,0,158,0,0.6,2,2,1,0
487 | 62,1,1,120,281,0,0,103,0,1.4,1,1,3,0
488 | 41,1,0,110,172,0,0,158,0,0,2,0,3,0
489 | 65,1,0,135,254,0,0,127,0,2.8,1,1,3,0
490 | 54,0,1,132,288,1,0,159,1,0,2,1,2,1
491 | 61,1,2,150,243,1,1,137,1,1,1,0,2,1
492 | 57,0,0,128,303,0,0,159,0,0,2,1,2,1
493 | 57,1,2,150,168,0,1,174,0,1.6,2,0,2,1
494 | 64,1,2,125,309,0,1,131,1,1.8,1,0,3,0
495 | 55,1,0,132,353,0,1,132,1,1.2,1,1,3,0
496 | 51,1,2,125,245,1,0,166,0,2.4,1,0,2,1
497 | 59,1,0,135,234,0,1,161,0,0.5,1,0,3,1
498 | 68,1,2,180,274,1,0,150,1,1.6,1,0,3,0
499 | 57,1,1,154,232,0,0,164,0,0,2,1,2,0
500 | 54,1,0,140,239,0,1,160,0,1.2,2,0,2,1
501 | 46,0,2,142,177,0,0,160,1,1.4,0,0,2,1
502 | 71,0,0,112,149,0,1,125,0,1.6,1,0,2,1
503 | 35,0,0,138,183,0,1,182,0,1.4,2,0,2,1
504 | 46,0,2,142,177,0,0,160,1,1.4,0,0,2,1
505 | 45,0,1,130,234,0,0,175,0,0.6,1,0,2,1
506 | 47,1,2,108,243,0,1,152,0,0,2,0,2,0
507 | 44,0,2,118,242,0,1,149,0,0.3,1,1,2,1
508 | 61,1,0,120,260,0,1,140,1,3.6,1,1,3,0
509 | 41,0,1,130,204,0,0,172,0,1.4,2,0,2,1
510 | 56,0,0,200,288,1,0,133,1,4,0,2,3,0
511 | 55,0,0,180,327,0,2,117,1,3.4,1,0,2,0
512 | 54,0,1,132,288,1,0,159,1,0,2,1,2,1
513 | 43,1,0,120,177,0,0,120,1,2.5,1,0,3,0
514 | 44,1,0,112,290,0,0,153,0,0,2,1,2,0
515 | 54,1,0,110,206,0,0,108,1,0,1,1,2,0
516 | 44,1,1,120,220,0,1,170,0,0,2,0,2,1
517 | 49,1,2,120,188,0,1,139,0,2,1,3,3,0
518 | 60,1,0,130,206,0,0,132,1,2.4,1,2,3,0
519 | 41,0,1,105,198,0,1,168,0,0,2,1,2,1
520 | 49,1,2,120,188,0,1,139,0,2,1,3,3,0
521 | 61,1,0,148,203,0,1,161,0,0,2,1,3,0
522 | 59,1,0,140,177,0,1,162,1,0,2,1,3,0
523 | 58,1,1,125,220,0,1,144,0,0.4,1,4,3,1
524 | 67,0,2,152,277,0,1,172,0,0,2,1,2,1
525 | 61,1,0,148,203,0,1,161,0,0,2,1,3,0
526 | 58,1,2,112,230,0,0,165,0,2.5,1,1,3,0
527 | 51,0,2,130,256,0,0,149,0,0.5,2,0,2,1
528 | 62,0,0,160,164,0,0,145,0,6.2,0,3,3,0
529 | 62,0,0,124,209,0,1,163,0,0,2,0,2,1
530 | 59,1,3,178,270,0,0,145,0,4.2,0,0,3,1
531 | 69,1,3,160,234,1,0,131,0,0.1,1,1,2,1
532 | 60,0,0,150,258,0,0,157,0,2.6,1,2,3,0
533 | 65,0,2,155,269,0,1,148,0,0.8,2,0,2,1
534 | 63,0,0,124,197,0,1,136,1,0,1,0,2,0
535 | 53,0,0,138,234,0,0,160,0,0,2,0,2,1
536 | 54,0,2,108,267,0,0,167,0,0,2,0,2,1
537 | 76,0,2,140,197,0,2,116,0,1.1,1,0,2,1
538 | 50,0,2,120,219,0,1,158,0,1.6,1,0,2,1
539 | 52,1,1,120,325,0,1,172,0,0.2,2,0,2,1
540 | 46,1,0,120,249,0,0,144,0,0.8,2,0,3,0
541 | 64,1,3,170,227,0,0,155,0,0.6,1,0,3,1
542 | 58,1,0,128,259,0,0,130,1,3,1,2,3,0
543 | 44,1,2,140,235,0,0,180,0,0,2,0,2,1
544 | 62,0,0,140,394,0,0,157,0,1.2,1,0,2,1
545 | 59,1,3,134,204,0,1,162,0,0.8,2,2,2,0
546 | 54,1,2,125,273,0,0,152,0,0.5,0,1,2,1
547 | 48,1,1,110,229,0,1,168,0,1,0,0,3,0
548 | 70,1,0,130,322,0,0,109,0,2.4,1,3,2,0
549 | 67,0,0,106,223,0,1,142,0,0.3,2,2,2,1
550 | 51,0,2,120,295,0,0,157,0,0.6,2,0,2,1
551 | 68,1,2,118,277,0,1,151,0,1,2,1,3,1
552 | 69,1,2,140,254,0,0,146,0,2,1,3,3,0
553 | 54,1,0,122,286,0,0,116,1,3.2,1,2,2,0
554 | 43,0,0,132,341,1,0,136,1,3,1,0,3,0
555 | 53,1,2,130,197,1,0,152,0,1.2,0,0,2,1
556 | 58,1,0,100,234,0,1,156,0,0.1,2,1,3,0
557 | 67,1,0,125,254,1,1,163,0,0.2,1,2,3,0
558 | 59,1,0,140,177,0,1,162,1,0,2,1,3,0
559 | 48,1,0,122,222,0,0,186,0,0,2,0,2,1
560 | 39,0,2,94,199,0,1,179,0,0,2,0,2,1
561 | 67,1,0,120,237,0,1,71,0,1,1,0,2,0
562 | 58,0,0,130,197,0,1,131,0,0.6,1,0,2,1
563 | 65,0,2,155,269,0,1,148,0,0.8,2,0,2,1
564 | 42,0,2,120,209,0,1,173,0,0,1,0,2,1
565 | 44,1,0,112,290,0,0,153,0,0,2,1,2,0
566 | 56,1,0,132,184,0,0,105,1,2.1,1,1,1,0
567 | 53,0,0,138,234,0,0,160,0,0,2,0,2,1
568 | 50,0,0,110,254,0,0,159,0,0,2,0,2,1
569 | 41,1,2,130,214,0,0,168,0,2,1,0,2,1
570 | 54,0,2,160,201,0,1,163,0,0,2,1,2,1
571 | 42,1,2,120,240,1,1,194,0,0.8,0,0,3,1
572 | 54,0,2,135,304,1,1,170,0,0,2,0,2,1
573 | 60,1,0,145,282,0,0,142,1,2.8,1,2,3,0
574 | 34,1,3,118,182,0,0,174,0,0,2,0,2,1
575 | 44,1,0,112,290,0,0,153,0,0,2,1,2,0
576 | 60,1,0,125,258,0,0,141,1,2.8,1,1,3,0
577 | 43,1,0,150,247,0,1,171,0,1.5,2,0,2,1
578 | 52,1,3,152,298,1,1,178,0,1.2,1,0,3,1
579 | 70,1,0,130,322,0,0,109,0,2.4,1,3,2,0
580 | 62,0,0,140,394,0,0,157,0,1.2,1,0,2,1
581 | 58,1,0,146,218,0,1,105,0,2,1,1,3,0
582 | 46,1,1,101,197,1,1,156,0,0,2,0,3,1
583 | 44,1,2,140,235,0,0,180,0,0,2,0,2,1
584 | 55,1,1,130,262,0,1,155,0,0,2,0,2,1
585 | 43,1,0,120,177,0,0,120,1,2.5,1,0,3,0
586 | 55,1,0,132,353,0,1,132,1,1.2,1,1,3,0
587 | 40,1,3,140,199,0,1,178,1,1.4,2,0,3,1
588 | 64,1,2,125,309,0,1,131,1,1.8,1,0,3,0
589 | 59,1,0,164,176,1,0,90,0,1,1,2,1,0
590 | 61,0,0,145,307,0,0,146,1,1,1,0,3,0
591 | 54,1,0,122,286,0,0,116,1,3.2,1,2,2,0
592 | 74,0,1,120,269,0,0,121,1,0.2,2,1,2,1
593 | 63,0,0,108,269,0,1,169,1,1.8,1,2,2,0
594 | 70,1,2,160,269,0,1,112,1,2.9,1,1,3,0
595 | 63,0,0,108,269,0,1,169,1,1.8,1,2,2,0
596 | 64,1,0,145,212,0,0,132,0,2,1,2,1,0
597 | 61,1,0,148,203,0,1,161,0,0,2,1,3,0
598 | 59,1,1,140,221,0,1,164,1,0,2,0,2,1
599 | 38,1,2,138,175,0,1,173,0,0,2,4,2,1
600 | 58,1,1,120,284,0,0,160,0,1.8,1,0,2,0
601 | 63,0,1,140,195,0,1,179,0,0,2,2,2,1
602 | 62,0,2,130,263,0,1,97,0,1.2,1,1,3,0
603 | 46,1,0,140,311,0,1,120,1,1.8,1,2,3,0
604 | 58,0,2,120,340,0,1,172,0,0,2,0,2,1
605 | 63,0,1,140,195,0,1,179,0,0,2,2,2,1
606 | 47,1,2,130,253,0,1,179,0,0,2,0,2,1
607 | 71,0,2,110,265,1,0,130,0,0,2,1,2,1
608 | 66,1,0,112,212,0,0,132,1,0.1,2,1,2,0
609 | 42,1,0,136,315,0,1,125,1,1.8,1,0,1,0
610 | 64,1,0,145,212,0,0,132,0,2,1,2,1,0
611 | 55,0,0,180,327,0,2,117,1,3.4,1,0,2,0
612 | 43,0,0,132,341,1,0,136,1,3,1,0,3,0
613 | 55,0,0,128,205,0,2,130,1,2,1,1,3,0
614 | 58,0,0,170,225,1,0,146,1,2.8,1,2,1,0
615 | 55,1,0,140,217,0,1,111,1,5.6,0,0,3,0
616 | 51,0,0,130,305,0,1,142,1,1.2,1,0,3,0
617 | 50,0,2,120,219,0,1,158,0,1.6,1,0,2,1
618 | 43,1,0,115,303,0,1,181,0,1.2,1,0,2,1
619 | 41,0,1,126,306,0,1,163,0,0,2,0,2,1
620 | 49,1,1,130,266,0,1,171,0,0.6,2,0,2,1
621 | 65,1,0,110,248,0,0,158,0,0.6,2,2,1,0
622 | 57,1,0,152,274,0,1,88,1,1.2,1,1,3,0
623 | 48,1,0,130,256,1,0,150,1,0,2,2,3,0
624 | 62,0,0,138,294,1,1,106,0,1.9,1,3,2,0
625 | 61,1,3,134,234,0,1,145,0,2.6,1,2,2,0
626 | 59,1,3,178,270,0,0,145,0,4.2,0,0,3,1
627 | 69,1,2,140,254,0,0,146,0,2,1,3,3,0
628 | 58,1,2,132,224,0,0,173,0,3.2,2,2,3,0
629 | 38,1,3,120,231,0,1,182,1,3.8,1,0,3,0
630 | 69,0,3,140,239,0,1,151,0,1.8,2,2,2,1
631 | 65,1,3,138,282,1,0,174,0,1.4,1,1,2,0
632 | 45,1,3,110,264,0,1,132,0,1.2,1,0,3,0
633 | 49,1,1,130,266,0,1,171,0,0.6,2,0,2,1
634 | 45,0,1,130,234,0,0,175,0,0.6,1,0,2,1
635 | 61,1,0,138,166,0,0,125,1,3.6,1,1,2,0
636 | 52,1,0,125,212,0,1,168,0,1,2,2,3,0
637 | 53,0,0,130,264,0,0,143,0,0.4,1,0,2,1
638 | 59,0,0,174,249,0,1,143,1,0,1,0,2,0
639 | 58,0,2,120,340,0,1,172,0,0,2,0,2,1
640 | 65,1,3,138,282,1,0,174,0,1.4,1,1,2,0
641 | 58,0,0,130,197,0,1,131,0,0.6,1,0,2,1
642 | 46,0,0,138,243,0,0,152,1,0,1,0,2,1
643 | 56,0,0,134,409,0,0,150,1,1.9,1,2,3,0
644 | 64,1,0,128,263,0,1,105,1,0.2,1,1,3,1
645 | 65,1,0,120,177,0,1,140,0,0.4,2,0,3,1
646 | 44,1,2,120,226,0,1,169,0,0,2,0,2,1
647 | 50,1,0,150,243,0,0,128,0,2.6,1,0,3,0
648 | 47,1,2,108,243,0,1,152,0,0,2,0,2,0
649 | 64,0,0,130,303,0,1,122,0,2,1,2,2,1
650 | 71,0,0,112,149,0,1,125,0,1.6,1,0,2,1
651 | 45,0,1,130,234,0,0,175,0,0.6,1,0,2,1
652 | 62,1,0,120,267,0,1,99,1,1.8,1,2,3,0
653 | 41,1,1,120,157,0,1,182,0,0,2,0,2,1
654 | 66,0,3,150,226,0,1,114,0,2.6,0,0,2,1
655 | 56,1,0,130,283,1,0,103,1,1.6,0,0,3,0
656 | 41,0,1,126,306,0,1,163,0,0,2,0,2,1
657 | 41,1,1,110,235,0,1,153,0,0,2,0,2,1
658 | 57,0,1,130,236,0,0,174,0,0,1,1,2,0
659 | 39,0,2,138,220,0,1,152,0,0,1,0,2,1
660 | 64,1,2,125,309,0,1,131,1,1.8,1,0,3,0
661 | 59,1,0,138,271,0,0,182,0,0,2,0,2,1
662 | 61,1,0,138,166,0,0,125,1,3.6,1,1,2,0
663 | 58,1,0,114,318,0,2,140,0,4.4,0,3,1,0
664 | 47,1,0,112,204,0,1,143,0,0.1,2,0,2,1
665 | 58,0,0,100,248,0,0,122,0,1,1,0,2,1
666 | 66,0,3,150,226,0,1,114,0,2.6,0,0,2,1
667 | 65,0,2,140,417,1,0,157,0,0.8,2,1,2,1
668 | 35,1,1,122,192,0,1,174,0,0,2,0,2,1
669 | 57,1,1,124,261,0,1,141,0,0.3,2,0,3,0
670 | 29,1,1,130,204,0,0,202,0,0,2,0,2,1
671 | 66,1,1,160,246,0,1,120,1,0,1,3,1,0
672 | 61,0,0,130,330,0,0,169,0,0,2,0,2,0
673 | 52,1,0,125,212,0,1,168,0,1,2,2,3,0
674 | 68,1,2,118,277,0,1,151,0,1,2,1,3,1
675 | 54,1,2,120,258,0,0,147,0,0.4,1,0,3,1
676 | 63,1,0,130,330,1,0,132,1,1.8,2,3,3,0
677 | 58,1,0,100,234,0,1,156,0,0.1,2,1,3,0
678 | 60,1,0,130,253,0,1,144,1,1.4,2,1,3,0
679 | 63,1,0,130,254,0,0,147,0,1.4,1,1,3,0
680 | 41,0,2,112,268,0,0,172,1,0,2,0,2,1
681 | 68,1,2,180,274,1,0,150,1,1.6,1,0,3,0
682 | 42,1,1,120,295,0,1,162,0,0,2,0,2,1
683 | 59,1,0,170,326,0,0,140,1,3.4,0,0,3,0
684 | 59,1,0,164,176,1,0,90,0,1,1,2,1,0
685 | 43,1,0,120,177,0,0,120,1,2.5,1,0,3,0
686 | 60,1,2,140,185,0,0,155,0,3,1,0,2,0
687 | 63,0,0,150,407,0,0,154,0,4,1,3,3,0
688 | 52,1,0,128,204,1,1,156,1,1,1,0,0,0
689 | 58,1,0,125,300,0,0,171,0,0,2,2,3,0
690 | 56,0,0,200,288,1,0,133,1,4,0,2,3,0
691 | 54,0,2,135,304,1,1,170,0,0,2,0,2,1
692 | 58,1,2,105,240,0,0,154,1,0.6,1,0,3,1
693 | 55,0,1,135,250,0,0,161,0,1.4,1,0,2,1
694 | 53,1,0,140,203,1,0,155,1,3.1,0,0,3,0
695 | 63,0,1,140,195,0,1,179,0,0,2,2,2,1
696 | 39,1,0,118,219,0,1,140,0,1.2,1,0,3,0
697 | 35,1,0,126,282,0,0,156,1,0,2,0,3,0
698 | 50,0,2,120,219,0,1,158,0,1.6,1,0,2,1
699 | 67,1,2,152,212,0,0,150,0,0.8,1,0,3,0
700 | 66,1,0,112,212,0,0,132,1,0.1,2,1,2,0
701 | 35,1,0,126,282,0,0,156,1,0,2,0,3,0
702 | 41,1,2,130,214,0,0,168,0,2,1,0,2,1
703 | 35,1,0,120,198,0,1,130,1,1.6,1,0,3,0
704 | 71,0,1,160,302,0,1,162,0,0.4,2,2,2,1
705 | 57,1,0,110,201,0,1,126,1,1.5,1,0,1,1
706 | 51,1,2,94,227,0,1,154,1,0,2,1,3,1
707 | 58,1,0,128,216,0,0,131,1,2.2,1,3,3,0
708 | 57,1,2,128,229,0,0,150,0,0.4,1,1,3,0
709 | 56,0,1,140,294,0,0,153,0,1.3,1,0,2,1
710 | 60,0,2,120,178,1,1,96,0,0,2,0,2,1
711 | 45,1,3,110,264,0,1,132,0,1.2,1,0,3,0
712 | 56,1,1,130,221,0,0,163,0,0,2,0,3,1
713 | 35,1,0,120,198,0,1,130,1,1.6,1,0,3,0
714 | 45,0,1,112,160,0,1,138,0,0,1,0,2,1
715 | 66,0,3,150,226,0,1,114,0,2.6,0,0,2,1
716 | 51,1,3,125,213,0,0,125,1,1.4,2,1,2,1
717 | 70,1,1,156,245,0,0,143,0,0,2,0,2,1
718 | 55,0,0,128,205,0,2,130,1,2,1,1,3,0
719 | 56,1,2,130,256,1,0,142,1,0.6,1,1,1,0
720 | 55,0,1,135,250,0,0,161,0,1.4,1,0,2,1
721 | 52,1,0,108,233,1,1,147,0,0.1,2,3,3,1
722 | 64,1,2,140,335,0,1,158,0,0,2,0,2,0
723 | 45,1,0,115,260,0,0,185,0,0,2,0,2,1
724 | 67,0,2,152,277,0,1,172,0,0,2,1,2,1
725 | 68,0,2,120,211,0,0,115,0,1.5,1,0,2,1
726 | 74,0,1,120,269,0,0,121,1,0.2,2,1,2,1
727 | 60,0,0,150,258,0,0,157,0,2.6,1,2,3,0
728 | 48,1,0,124,274,0,0,166,0,0.5,1,0,3,0
729 | 56,1,1,130,221,0,0,163,0,0,2,0,3,1
730 | 46,1,0,140,311,0,1,120,1,1.8,1,2,3,0
731 | 55,0,1,135,250,0,0,161,0,1.4,1,0,2,1
732 | 44,1,1,120,220,0,1,170,0,0,2,0,2,1
733 | 52,1,0,112,230,0,1,160,0,0,2,1,2,0
734 | 51,1,2,94,227,0,1,154,1,0,2,1,3,1
735 | 44,0,2,108,141,0,1,175,0,0.6,1,0,2,1
736 | 52,1,0,128,204,1,1,156,1,1,1,0,0,0
737 | 50,1,2,129,196,0,1,163,0,0,2,0,2,1
738 | 59,1,0,110,239,0,0,142,1,1.2,1,1,3,0
739 | 67,1,0,120,229,0,0,129,1,2.6,1,2,3,0
740 | 58,1,0,125,300,0,0,171,0,0,2,2,3,0
741 | 52,1,0,128,255,0,1,161,1,0,2,1,3,0
742 | 44,1,2,140,235,0,0,180,0,0,2,0,2,1
743 | 41,0,2,112,268,0,0,172,1,0,2,0,2,1
744 | 63,1,0,130,330,1,0,132,1,1.8,2,3,3,0
745 | 58,1,1,125,220,0,1,144,0,0.4,1,4,3,1
746 | 60,0,2,102,318,0,1,160,0,0,2,1,2,1
747 | 51,1,2,100,222,0,1,143,1,1.2,1,0,2,1
748 | 64,1,2,140,335,0,1,158,0,0,2,0,2,0
749 | 60,1,0,117,230,1,1,160,1,1.4,2,2,3,0
750 | 44,1,2,120,226,0,1,169,0,0,2,0,2,1
751 | 58,1,1,125,220,0,1,144,0,0.4,1,4,3,1
752 | 55,1,1,130,262,0,1,155,0,0,2,0,2,1
753 | 65,0,2,160,360,0,0,151,0,0.8,2,0,2,1
754 | 48,1,1,130,245,0,0,180,0,0.2,1,0,2,1
755 | 65,1,0,120,177,0,1,140,0,0.4,2,0,3,1
756 | 51,0,2,130,256,0,0,149,0,0.5,2,0,2,1
757 | 48,1,2,124,255,1,1,175,0,0,2,2,2,1
758 | 64,1,0,120,246,0,0,96,1,2.2,0,1,2,0
759 | 66,1,0,160,228,0,0,138,0,2.3,2,0,1,1
760 | 46,0,1,105,204,0,1,172,0,0,2,0,2,1
761 | 61,0,0,130,330,0,0,169,0,0,2,0,2,0
762 | 57,1,0,150,276,0,0,112,1,0.6,1,1,1,0
763 | 49,0,0,130,269,0,1,163,0,0,2,0,2,1
764 | 56,1,1,130,221,0,0,163,0,0,2,0,3,1
765 | 58,0,3,150,283,1,0,162,0,1,2,0,2,1
766 | 63,1,0,140,187,0,0,144,1,4,2,2,3,0
767 | 57,1,0,110,335,0,1,143,1,3,1,1,3,0
768 | 57,1,0,110,335,0,1,143,1,3,1,1,3,0
769 | 68,1,0,144,193,1,1,141,0,3.4,1,2,3,0
770 | 46,1,1,101,197,1,1,156,0,0,2,0,3,1
771 | 71,0,2,110,265,1,0,130,0,0,2,1,2,1
772 | 41,1,1,135,203,0,1,132,0,0,1,0,1,1
773 | 45,0,0,138,236,0,0,152,1,0.2,1,0,2,1
774 | 62,0,0,150,244,0,1,154,1,1.4,1,0,2,0
775 | 65,0,0,150,225,0,0,114,0,1,1,3,3,0
776 | 48,0,2,130,275,0,1,139,0,0.2,2,0,2,1
777 | 51,1,2,100,222,0,1,143,1,1.2,1,0,2,1
778 | 61,0,0,145,307,0,0,146,1,1,1,0,3,0
779 | 53,1,0,123,282,0,1,95,1,2,1,2,3,0
780 | 59,1,3,134,204,0,1,162,0,0.8,2,2,2,0
781 | 34,0,1,118,210,0,1,192,0,0.7,2,0,2,1
782 | 44,1,0,120,169,0,1,144,1,2.8,0,0,1,0
783 | 58,1,0,146,218,0,1,105,0,2,1,1,3,0
784 | 64,0,0,130,303,0,1,122,0,2,1,2,2,1
785 | 56,1,1,120,240,0,1,169,0,0,0,0,2,1
786 | 54,1,2,150,232,0,0,165,0,1.6,2,0,3,1
787 | 55,1,0,160,289,0,0,145,1,0.8,1,1,3,0
788 | 67,1,0,125,254,1,1,163,0,0.2,1,2,3,0
789 | 51,1,0,140,298,0,1,122,1,4.2,1,3,3,0
790 | 62,0,0,138,294,1,1,106,0,1.9,1,3,2,0
791 | 62,1,1,120,281,0,0,103,0,1.4,1,1,3,0
792 | 54,1,0,110,239,0,1,126,1,2.8,1,1,3,0
793 | 54,1,0,110,239,0,1,126,1,2.8,1,1,3,0
794 | 68,1,0,144,193,1,1,141,0,3.4,1,2,3,0
795 | 60,0,2,120,178,1,1,96,0,0,2,0,2,1
796 | 61,1,3,134,234,0,1,145,0,2.6,1,2,2,0
797 | 62,1,1,128,208,1,0,140,0,0,2,0,2,1
798 | 41,1,1,135,203,0,1,132,0,0,1,0,1,1
799 | 65,0,0,150,225,0,0,114,0,1,1,3,3,0
800 | 59,1,3,170,288,0,0,159,0,0.2,1,0,3,0
801 | 43,1,0,115,303,0,1,181,0,1.2,1,0,2,1
802 | 67,1,0,120,229,0,0,129,1,2.6,1,2,3,0
803 | 63,1,3,145,233,1,0,150,0,2.3,0,0,1,1
804 | 63,0,0,124,197,0,1,136,1,0,1,0,2,0
805 | 52,1,0,112,230,0,1,160,0,0,2,1,2,0
806 | 58,0,0,130,197,0,1,131,0,0.6,1,0,2,1
807 | 53,1,0,142,226,0,0,111,1,0,2,0,3,1
808 | 57,1,0,150,276,0,0,112,1,0.6,1,1,1,0
809 | 44,1,2,130,233,0,1,179,1,0.4,2,0,2,1
810 | 51,1,2,94,227,0,1,154,1,0,2,1,3,1
811 | 54,0,2,110,214,0,1,158,0,1.6,1,0,2,1
812 | 40,1,0,110,167,0,0,114,1,2,1,0,3,0
813 | 57,1,1,124,261,0,1,141,0,0.3,2,0,3,0
814 | 62,0,0,140,268,0,0,160,0,3.6,0,2,2,0
815 | 53,1,0,140,203,1,0,155,1,3.1,0,0,3,0
816 | 62,1,1,128,208,1,0,140,0,0,2,0,2,1
817 | 58,1,2,105,240,0,0,154,1,0.6,1,0,3,1
818 | 70,1,1,156,245,0,0,143,0,0,2,0,2,1
819 | 45,1,0,115,260,0,0,185,0,0,2,0,2,1
820 | 42,1,3,148,244,0,0,178,0,0.8,2,2,2,1
821 | 58,0,0,170,225,1,0,146,1,2.8,1,2,1,0
822 | 61,1,0,140,207,0,0,138,1,1.9,2,1,3,0
823 | 62,0,0,140,268,0,0,160,0,3.6,0,2,2,0
824 | 60,1,0,130,253,0,1,144,1,1.4,2,1,3,0
825 | 54,1,0,140,239,0,1,160,0,1.2,2,0,2,1
826 | 61,1,0,138,166,0,0,125,1,3.6,1,1,2,0
827 | 63,0,2,135,252,0,0,172,0,0,2,0,2,1
828 | 42,1,2,130,180,0,1,150,0,0,2,0,2,1
829 | 57,1,2,128,229,0,0,150,0,0.4,1,1,3,0
830 | 44,1,2,130,233,0,1,179,1,0.4,2,0,2,1
831 | 54,1,0,124,266,0,0,109,1,2.2,1,1,3,0
832 | 51,1,2,100,222,0,1,143,1,1.2,1,0,2,1
833 | 58,1,1,125,220,0,1,144,0,0.4,1,4,3,1
834 | 68,1,2,118,277,0,1,151,0,1,2,1,3,1
835 | 55,1,0,140,217,0,1,111,1,5.6,0,0,3,0
836 | 42,1,0,136,315,0,1,125,1,1.8,1,0,1,0
837 | 49,1,2,118,149,0,0,126,0,0.8,2,3,2,0
838 | 53,0,0,138,234,0,0,160,0,0,2,0,2,1
839 | 52,1,2,172,199,1,1,162,0,0.5,2,0,3,1
840 | 51,1,3,125,213,0,0,125,1,1.4,2,1,2,1
841 | 51,1,0,140,261,0,0,186,1,0,2,0,2,1
842 | 70,1,0,145,174,0,1,125,1,2.6,0,0,3,0
843 | 35,0,0,138,183,0,1,182,0,1.4,2,0,2,1
844 | 58,1,2,112,230,0,0,165,0,2.5,1,1,3,0
845 | 59,1,3,160,273,0,0,125,0,0,2,0,2,0
846 | 60,1,0,140,293,0,0,170,0,1.2,1,2,3,0
847 | 56,1,0,132,184,0,0,105,1,2.1,1,1,1,0
848 | 35,0,0,138,183,0,1,182,0,1.4,2,0,2,1
849 | 61,1,0,138,166,0,0,125,1,3.6,1,1,2,0
850 | 58,0,3,150,283,1,0,162,0,1,2,0,2,1
851 | 52,1,0,128,255,0,1,161,1,0,2,1,3,0
852 | 58,1,1,120,284,0,0,160,0,1.8,1,0,2,0
853 | 37,1,2,130,250,0,1,187,0,3.5,0,0,2,1
854 | 52,1,0,128,255,0,1,161,1,0,2,1,3,0
855 | 67,1,0,120,229,0,0,129,1,2.6,1,2,3,0
856 | 65,1,3,138,282,1,0,174,0,1.4,1,1,2,0
857 | 46,1,1,101,197,1,1,156,0,0,2,0,3,1
858 | 68,0,2,120,211,0,0,115,0,1.5,1,0,2,1
859 | 43,1,0,115,303,0,1,181,0,1.2,1,0,2,1
860 | 68,0,2,120,211,0,0,115,0,1.5,1,0,2,1
861 | 51,1,0,140,299,0,1,173,1,1.6,2,0,3,0
862 | 52,1,0,112,230,0,1,160,0,0,2,1,2,0
863 | 64,1,2,140,335,0,1,158,0,0,2,0,2,0
864 | 59,1,3,170,288,0,0,159,0,0.2,1,0,3,0
865 | 52,1,0,125,212,0,1,168,0,1,2,2,3,0
866 | 59,1,3,160,273,0,0,125,0,0,2,0,2,0
867 | 60,0,3,150,240,0,1,171,0,0.9,2,0,2,1
868 | 41,1,2,112,250,0,1,179,0,0,2,0,2,1
869 | 41,1,1,110,235,0,1,153,0,0,2,0,2,1
870 | 56,1,1,120,240,0,1,169,0,0,0,0,2,1
871 | 56,1,1,120,236,0,1,178,0,0.8,2,0,2,1
872 | 48,0,2,130,275,0,1,139,0,0.2,2,0,2,1
873 | 39,1,2,140,321,0,0,182,0,0,2,0,2,1
874 | 64,1,3,170,227,0,0,155,0,0.6,1,0,3,1
875 | 57,1,0,140,192,0,1,148,0,0.4,1,0,1,1
876 | 59,1,3,160,273,0,0,125,0,0,2,0,2,0
877 | 60,1,0,130,206,0,0,132,1,2.4,1,2,3,0
878 | 61,1,0,140,207,0,0,138,1,1.9,2,1,3,0
879 | 43,0,2,122,213,0,1,165,0,0.2,1,0,2,1
880 | 54,1,0,120,188,0,1,113,0,1.4,1,1,3,0
881 | 59,1,0,138,271,0,0,182,0,0,2,0,2,1
882 | 57,1,0,132,207,0,1,168,1,0,2,0,3,1
883 | 57,1,1,154,232,0,0,164,0,0,2,1,2,0
884 | 57,1,0,130,131,0,1,115,1,1.2,1,1,3,0
885 | 48,1,0,124,274,0,0,166,0,0.5,1,0,3,0
886 | 70,1,0,145,174,0,1,125,1,2.6,0,0,3,0
887 | 57,1,0,165,289,1,0,124,0,1,1,3,3,0
888 | 61,1,0,120,260,0,1,140,1,3.6,1,1,3,0
889 | 57,1,0,110,201,0,1,126,1,1.5,1,0,1,1
890 | 60,0,0,150,258,0,0,157,0,2.6,1,2,3,0
891 | 63,0,0,150,407,0,0,154,0,4,1,3,3,0
892 | 55,0,0,128,205,0,2,130,1,2,1,1,3,0
893 | 64,0,0,180,325,0,1,154,1,0,2,0,2,1
894 | 54,1,0,110,239,0,1,126,1,2.8,1,1,3,0
895 | 52,1,0,128,204,1,1,156,1,1,1,0,0,0
896 | 51,1,0,140,299,0,1,173,1,1.6,2,0,3,0
897 | 62,0,2,130,263,0,1,97,0,1.2,1,1,3,0
898 | 59,1,3,178,270,0,0,145,0,4.2,0,0,3,1
899 | 52,1,1,134,201,0,1,158,0,0.8,2,1,2,1
900 | 42,0,0,102,265,0,0,122,0,0.6,1,0,2,1
901 | 59,1,0,135,234,0,1,161,0,0.5,1,0,3,1
902 | 61,1,3,134,234,0,1,145,0,2.6,1,2,2,0
903 | 42,0,0,102,265,0,0,122,0,0.6,1,0,2,1
904 | 62,0,0,140,268,0,0,160,0,3.6,0,2,2,0
905 | 59,1,2,126,218,1,1,134,0,2.2,1,1,1,0
906 | 55,1,1,130,262,0,1,155,0,0,2,0,2,1
907 | 64,1,0,120,246,0,0,96,1,2.2,0,1,2,0
908 | 42,1,0,140,226,0,1,178,0,0,2,0,2,1
909 | 50,0,1,120,244,0,1,162,0,1.1,2,0,2,1
910 | 62,1,0,120,267,0,1,99,1,1.8,1,2,3,0
911 | 50,1,0,144,200,0,0,126,1,0.9,1,0,3,0
912 | 50,1,2,140,233,0,1,163,0,0.6,1,1,3,0
913 | 58,0,1,136,319,1,0,152,0,0,2,2,2,0
914 | 35,1,0,120,198,0,1,130,1,1.6,1,0,3,0
915 | 45,1,0,104,208,0,0,148,1,3,1,0,2,1
916 | 66,1,0,112,212,0,0,132,1,0.1,2,1,2,0
917 | 46,1,0,120,249,0,0,144,0,0.8,2,0,3,0
918 | 65,1,0,135,254,0,0,127,0,2.8,1,1,3,0
919 | 47,1,2,130,253,0,1,179,0,0,2,0,2,1
920 | 59,1,3,134,204,0,1,162,0,0.8,2,2,2,0
921 | 38,1,3,120,231,0,1,182,1,3.8,1,0,3,0
922 | 39,1,0,118,219,0,1,140,0,1.2,1,0,3,0
923 | 58,1,0,146,218,0,1,105,0,2,1,1,3,0
924 | 44,1,1,120,263,0,1,173,0,0,2,0,3,1
925 | 54,1,0,140,239,0,1,160,0,1.2,2,0,2,1
926 | 61,0,0,130,330,0,0,169,0,0,2,0,2,0
927 | 57,1,0,130,131,0,1,115,1,1.2,1,1,3,0
928 | 54,1,0,110,206,0,0,108,1,0,1,1,2,0
929 | 42,1,2,120,240,1,1,194,0,0.8,0,0,3,1
930 | 54,1,0,124,266,0,0,109,1,2.2,1,1,3,0
931 | 60,1,0,130,206,0,0,132,1,2.4,1,2,3,0
932 | 65,1,0,135,254,0,0,127,0,2.8,1,1,3,0
933 | 40,1,0,152,223,0,1,181,0,0,2,0,3,0
934 | 51,0,2,140,308,0,0,142,0,1.5,2,1,2,1
935 | 38,1,3,120,231,0,1,182,1,3.8,1,0,3,0
936 | 42,1,2,130,180,0,1,150,0,0,2,0,2,1
937 | 56,1,1,120,240,0,1,169,0,0,0,0,2,1
938 | 43,1,2,130,315,0,1,162,0,1.9,2,1,2,1
939 | 64,1,2,140,335,0,1,158,0,0,2,0,2,0
940 | 53,1,0,142,226,0,0,111,1,0,2,0,3,1
941 | 49,0,1,134,271,0,1,162,0,0,1,0,2,1
942 | 57,0,0,140,241,0,1,123,1,0.2,1,0,3,0
943 | 52,0,2,136,196,0,0,169,0,0.1,1,0,2,1
944 | 69,0,3,140,239,0,1,151,0,1.8,2,2,2,1
945 | 65,1,0,120,177,0,1,140,0,0.4,2,0,3,1
946 | 66,0,0,178,228,1,1,165,1,1,1,2,3,0
947 | 56,1,3,120,193,0,0,162,0,1.9,1,0,3,1
948 | 67,0,2,152,277,0,1,172,0,0,2,1,2,1
949 | 54,0,2,160,201,0,1,163,0,0,2,1,2,1
950 | 70,1,0,145,174,0,1,125,1,2.6,0,0,3,0
951 | 57,1,0,132,207,0,1,168,1,0,2,0,3,1
952 | 67,1,0,160,286,0,0,108,1,1.5,1,3,2,0
953 | 62,0,2,130,263,0,1,97,0,1.2,1,1,3,0
954 | 54,0,2,135,304,1,1,170,0,0,2,0,2,1
955 | 45,0,0,138,236,0,0,152,1,0.2,1,0,2,1
956 | 53,0,0,130,264,0,0,143,0,0.4,1,0,2,1
957 | 62,1,2,130,231,0,1,146,0,1.8,1,3,3,1
958 | 49,0,0,130,269,0,1,163,0,0,2,0,2,1
959 | 50,1,2,140,233,0,1,163,0,0.6,1,1,3,0
960 | 65,0,2,140,417,1,0,157,0,0.8,2,1,2,1
961 | 69,0,3,140,239,0,1,151,0,1.8,2,2,2,1
962 | 52,0,2,136,196,0,0,169,0,0.1,1,0,2,1
963 | 58,0,0,100,248,0,0,122,0,1,1,0,2,1
964 | 52,1,0,108,233,1,1,147,0,0.1,2,3,3,1
965 | 57,0,0,140,241,0,1,123,1,0.2,1,0,3,0
966 | 44,0,2,108,141,0,1,175,0,0.6,1,0,2,1
967 | 76,0,2,140,197,0,2,116,0,1.1,1,0,2,1
968 | 58,1,0,128,259,0,0,130,1,3,1,2,3,0
969 | 60,0,2,120,178,1,1,96,0,0,2,0,2,1
970 | 53,1,0,140,203,1,0,155,1,3.1,0,0,3,0
971 | 52,1,1,120,325,0,1,172,0,0.2,2,0,2,1
972 | 38,1,2,138,175,0,1,173,0,0,2,4,2,1
973 | 52,1,2,172,199,1,1,162,0,0.5,2,0,3,1
974 | 52,1,3,118,186,0,0,190,0,0,1,0,1,1
975 | 51,1,2,125,245,1,0,166,0,2.4,1,0,2,1
976 | 43,1,0,110,211,0,1,161,0,0,2,0,3,1
977 | 39,1,0,118,219,0,1,140,0,1.2,1,0,3,0
978 | 63,0,0,108,269,0,1,169,1,1.8,1,2,2,0
979 | 52,1,1,128,205,1,1,184,0,0,2,0,2,1
980 | 44,1,0,110,197,0,0,177,0,0,2,1,2,0
981 | 45,1,0,142,309,0,0,147,1,0,1,3,3,0
982 | 57,1,0,140,192,0,1,148,0,0.4,1,0,1,1
983 | 39,1,0,118,219,0,1,140,0,1.2,1,0,3,0
984 | 67,0,0,106,223,0,1,142,0,0.3,2,2,2,1
985 | 64,1,0,128,263,0,1,105,1,0.2,1,1,3,1
986 | 59,1,0,135,234,0,1,161,0,0.5,1,0,3,1
987 | 62,1,2,130,231,0,1,146,0,1.8,1,3,3,1
988 | 55,0,0,180,327,0,2,117,1,3.4,1,0,2,0
989 | 57,1,1,154,232,0,0,164,0,0,2,1,2,0
990 | 60,1,0,140,293,0,0,170,0,1.2,1,2,3,0
991 | 71,0,1,160,302,0,1,162,0,0.4,2,2,2,1
992 | 56,1,1,120,236,0,1,178,0,0.8,2,0,2,1
993 | 60,1,0,117,230,1,1,160,1,1.4,2,2,3,0
994 | 50,0,0,110,254,0,0,159,0,0,2,0,2,1
995 | 43,1,0,132,247,1,0,143,1,0.1,1,4,3,0
996 | 59,1,0,110,239,0,0,142,1,1.2,1,1,3,0
997 | 44,1,1,120,263,0,1,173,0,0,2,0,3,1
998 | 56,0,0,134,409,0,0,150,1,1.9,1,2,3,0
999 | 54,1,0,120,188,0,1,113,0,1.4,1,1,3,0
1000 | 42,1,0,136,315,0,1,125,1,1.8,1,0,1,0
1001 | 67,1,0,125,254,1,1,163,0,0.2,1,2,3,0
1002 | 64,1,0,145,212,0,0,132,0,2,1,2,1,0
1003 | 42,1,0,140,226,0,1,178,0,0,2,0,2,1
1004 | 66,1,0,112,212,0,0,132,1,0.1,2,1,2,0
1005 | 52,1,0,108,233,1,1,147,0,0.1,2,3,3,1
1006 | 51,0,2,140,308,0,0,142,0,1.5,2,1,2,1
1007 | 55,0,0,128,205,0,2,130,1,2,1,1,3,0
1008 | 58,1,2,140,211,1,0,165,0,0,2,0,2,1
1009 | 56,1,3,120,193,0,0,162,0,1.9,1,0,3,1
1010 | 42,1,1,120,295,0,1,162,0,0,2,0,2,1
1011 | 40,1,0,152,223,0,1,181,0,0,2,0,3,0
1012 | 51,1,0,140,299,0,1,173,1,1.6,2,0,3,0
1013 | 45,1,1,128,308,0,0,170,0,0,2,0,2,1
1014 | 48,1,1,110,229,0,1,168,0,1,0,0,3,0
1015 | 58,1,0,114,318,0,2,140,0,4.4,0,3,1,0
1016 | 44,0,2,108,141,0,1,175,0,0.6,1,0,2,1
1017 | 58,1,0,128,216,0,0,131,1,2.2,1,3,3,0
1018 | 65,1,3,138,282,1,0,174,0,1.4,1,1,2,0
1019 | 53,1,0,123,282,0,1,95,1,2,1,2,3,0
1020 | 41,1,0,110,172,0,0,158,0,0,2,0,3,0
1021 | 47,1,0,112,204,0,1,143,0,0.1,2,0,2,1
1022 | 59,1,1,140,221,0,1,164,1,0,2,0,2,1
1023 | 60,1,0,125,258,0,0,141,1,2.8,1,1,3,0
1024 | 47,1,0,110,275,0,0,118,1,1,1,1,2,0
1025 | 50,0,0,110,254,0,0,159,0,0,2,0,2,1
1026 | 54,1,0,120,188,0,1,113,0,1.4,1,1,3,0
1027 |
--------------------------------------------------------------------------------
/dimensionality_reduction/dimensionality_reduction_examples.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 4
6 | }
7 |
--------------------------------------------------------------------------------
/dimensionality_reduction/dimensionality_reduction_implementation.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 4
6 | }
7 |
--------------------------------------------------------------------------------
/linear_regression/README.md:
--------------------------------------------------------------------------------
1 | # Linear Regression Implementations
2 |
3 | This folder contains two implementations of Linear Regression:
4 |
5 | 1. `linear_regression_scratch.py` - Implementation of Linear Regression from scratch using numpy
6 | 2. `linear_regression_sklearn.py` - Implementation using scikit-learn's built-in LinearRegression
7 |
8 | ## Theory
9 | Linear Regression is a supervised learning algorithm that models the relationship between a dependent variable (target) and one or more independent variables (features) by fitting a linear equation to the observed data.
10 |
11 | ## Requirements
12 | - numpy
13 | - pandas
14 | - matplotlib
15 | - scikit-learn
16 |
--------------------------------------------------------------------------------
/linear_regression/multiple_regression/images/loss_history.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/multiple_regression/images/loss_history.png
--------------------------------------------------------------------------------
/linear_regression/multiple_regression/images/ml_regression_training_comparison.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/multiple_regression/images/ml_regression_training_comparison.gif
--------------------------------------------------------------------------------
/linear_regression/multiple_regression/images/output.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/multiple_regression/images/output.png
--------------------------------------------------------------------------------
/linear_regression/multiple_regression/multiple_regression_sklearn.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Multiple Linear Regression using Scikit-learn\n",
8 | "\n",
9 | "This notebook demonstrates how to implement Multiple Linear Regression using scikit-learn. Unlike simple linear regression which uses one feature, multiple linear regression uses multiple features to predict the target variable.\n",
10 | "\n",
11 | "## What we'll cover:\n",
12 | "1. Data preparation with multiple features\n",
13 | "2. Train-test splitting\n",
14 | "3. Model training and prediction\n",
15 | "4. Model evaluation using multiple metrics\n",
16 | "5. Results visualization"
17 | ]
18 | },
19 | {
20 | "cell_type": "code",
21 | "execution_count": null,
22 | "metadata": {},
23 | "source": [
24 | "import numpy as np\n",
25 | "import matplotlib.pyplot as plt\n",
26 | "from sklearn.linear_model import LinearRegression\n",
27 | "from sklearn.model_selection import train_test_split\n",
28 | "from sklearn.metrics import mean_squared_error, r2_score\n",
29 | "%matplotlib inline\n",
30 | "\n",
31 | "# Set random seed for reproducibility\n",
32 | "np.random.seed(0)"
33 | ]
34 | },
35 | {
36 | "cell_type": "markdown",
37 | "metadata": {},
38 | "source": [
39 | "## Generate Sample Data with Multiple Features\n",
40 | "\n",
41 | "We create synthetic data with a known relationship:\n",
42 | "- True relationship: y = 2 + 3x₁ + 1.5x₂ - 2x₃ + noise\n",
43 | "- Three features (X₁, X₂, X₃) are randomly generated\n",
44 | "- Gaussian noise is added for realism"
45 | ]
46 | },
47 | {
48 | "cell_type": "code",
49 | "execution_count": null,
50 | "metadata": {},
51 | "source": [
52 | "# Generate random data points with 3 features\n",
53 | "n_samples = 100\n",
54 | "X = np.random.rand(n_samples, 3) # 100 samples, 3 features\n",
55 | "\n",
56 | "# True relationship: y = 2 + 3x₁ + 1.5x₂ - 2x₃ + noise\n",
57 | "y = 2 + 3 * X[:, 0] + 1.5 * X[:, 1] - 2 * X[:, 2] + np.random.randn(n_samples)\n",
58 | "y = y.reshape(-1, 1)\n",
59 | "\n",
60 | "# Create feature names for better visualization\n",
61 | "feature_names = ['Feature 1', 'Feature 2', 'Feature 3']\n",
62 | "\n",
63 | "# Plot relationships between each feature and target\n",
64 | "fig, axes = plt.subplots(1, 3, figsize=(15, 5))\n",
65 | "fig.suptitle('Relationship between Features and Target Variable')\n",
66 | "\n",
67 | "for i in range(3):\n",
68 | " axes[i].scatter(X[:, i], y, alpha=0.5)\n",
69 | " axes[i].set_xlabel(feature_names[i])\n",
70 | " axes[i].set_ylabel('Target Variable')\n",
71 | " axes[i].grid(True)\n",
72 | "\n",
73 | "plt.tight_layout()\n",
74 | "plt.show()"
75 | ]
76 | },
77 | {
78 | "cell_type": "markdown",
79 | "metadata": {},
80 | "source": [
81 | "## Split Data into Training and Testing Sets"
82 | ]
83 | },
84 | {
85 | "cell_type": "code",
86 | "execution_count": null,
87 | "metadata": {},
88 | "source": [
89 | "X_train, X_test, y_train, y_test = train_test_split(\n",
90 | " X, y, test_size=0.2, random_state=42\n",
91 | ")\n",
92 | "\n",
93 | "print(\"Dataset Split:\")\n",
94 | "print(f\"Training set size: {X_train.shape[0]} samples, {X_train.shape[1]} features\")\n",
95 | "print(f\"Testing set size: {X_test.shape[0]} samples, {X_test.shape[1]} features\")"
96 | ]
97 | },
98 | {
99 | "cell_type": "markdown",
100 | "metadata": {},
101 | "source": [
102 | "## Train the Multiple Linear Regression Model"
103 | ]
104 | },
105 | {
106 | "cell_type": "code",
107 | "execution_count": null,
108 | "metadata": {},
109 | "source": [
110 | "# Create and train the model\n",
111 | "model = LinearRegression()\n",
112 | "model.fit(X_train, y_train)\n",
113 | "\n",
114 | "# Make predictions\n",
115 | "y_train_pred = model.predict(X_train)\n",
116 | "y_test_pred = model.predict(X_test)\n",
117 | "\n",
118 | "# Print the learned parameters\n",
119 | "print(\"\\nLearned Parameters:\")\n",
120 | "print(\"Intercept (bias):\", model.intercept_[0].round(4), \"(True value: 2)\")\n",
121 | "for i, (coef, name) in enumerate(zip(model.coef_[0], feature_names)):\n",
122 | " true_coef = [3, 1.5, -2][i]\n",
123 | " print(f\"{name} coefficient:\", coef.round(4), f\"(True value: {true_coef})\")"
124 | ]
125 | },
126 | {
127 | "cell_type": "markdown",
128 | "metadata": {},
129 | "source": [
130 | "## Model Evaluation"
131 | ]
132 | },
133 | {
134 | "cell_type": "code",
135 | "execution_count": null,
136 | "metadata": {},
137 | "source": [
138 | "# Calculate performance metrics\n",
139 | "train_r2 = r2_score(y_train, y_train_pred)\n",
140 | "test_r2 = r2_score(y_test, y_test_pred)\n",
141 | "train_mse = mean_squared_error(y_train, y_train_pred)\n",
142 | "test_mse = mean_squared_error(y_test, y_test_pred)\n",
143 | "\n",
144 | "print(\"Model Performance:\")\n",
145 | "print(f\"Training R² Score: {train_r2:.4f}\")\n",
146 | "print(f\"Testing R² Score: {test_r2:.4f}\")\n",
147 | "print(f\"Training MSE: {train_mse:.4f}\")\n",
148 | "print(f\"Testing MSE: {test_mse:.4f}\")"
149 | ]
150 | },
151 | {
152 | "cell_type": "markdown",
153 | "metadata": {},
154 | "source": [
155 | "## Visualize Predictions vs Actual Values"
156 | ]
157 | },
158 | {
159 | "cell_type": "code",
160 | "execution_count": null,
161 | "metadata": {},
162 | "source": [
163 | "plt.figure(figsize=(10, 6))\n",
164 | "\n",
165 | "# Plot training data\n",
166 | "plt.scatter(y_train, y_train_pred, color='blue', alpha=0.5, label='Training Data')\n",
167 | "plt.scatter(y_test, y_test_pred, color='red', alpha=0.5, label='Testing Data')\n",
168 | "\n",
169 | "# Plot perfect prediction line\n",
170 | "min_val = min(y_train.min(), y_test.min())\n",
171 | "max_val = max(y_train.max(), y_test.max())\n",
172 | "plt.plot([min_val, max_val], [min_val, max_val], 'k--', label='Perfect Prediction')\n",
173 | "\n",
174 | "plt.xlabel('Actual Values')\n",
175 | "plt.ylabel('Predicted Values')\n",
176 | "plt.title('Multiple Linear Regression: Predicted vs Actual Values')\n",
177 | "plt.legend()\n",
178 | "plt.grid(True)\n",
179 | "plt.show()"
180 | ]
181 | },
182 | {
183 | "cell_type": "markdown",
184 | "metadata": {},
185 | "source": [
186 | "## Key Differences from Simple Linear Regression\n",
187 | "\n",
188 | "1. **Number of Features**:\n",
189 | " - Simple Linear Regression: One feature (X)\n",
190 | " - Multiple Linear Regression: Multiple features (X₁, X₂, X₃)\n",
191 | "\n",
192 | "2. **Model Equation**:\n",
193 | " - Simple: y = b₀ + b₁x + ε\n",
194 | " - Multiple: y = b₀ + b₁x₁ + b₂x₂ + b₃x₃ + ε\n",
195 | "\n",
196 | "3. **Visualization**:\n",
197 | " - Simple: Can plot in 2D (one feature vs target)\n",
198 | " - Multiple: Requires multiple plots or dimensionality reduction\n",
199 | "\n",
200 | "4. **Interpretation**:\n",
201 | " - Simple: One coefficient represents the effect of the single feature\n",
202 | " - Multiple: Each coefficient represents the effect of its feature while holding others constant"
203 | ]
204 | }
205 | ],
206 | "metadata": {
207 | "kernelspec": {
208 | "display_name": "Python 3",
209 | "language": "python",
210 | "name": "python3"
211 | },
212 | "language_info": {
213 | "codemirror_mode": {
214 | "name": "ipython",
215 | "version": 3
216 | },
217 | "file_extension": ".py",
218 | "mimetype": "text/x-python",
219 | "name": "python",
220 | "nbconvert_exporter": "python",
221 | "pygments_lexer": "ipython3",
222 | "version": "3.8.0"
223 | }
224 | },
225 | "nbformat": 4,
226 | "nbformat_minor": 4
227 | }
228 |
--------------------------------------------------------------------------------
/linear_regression/polynomial_regression/images/R2_Scores_Comparison.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/polynomial_regression/images/R2_Scores_Comparison.png
--------------------------------------------------------------------------------
/linear_regression/polynomial_regression/images/R2_Scores_Comparison_for_Gradient_Descent.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/polynomial_regression/images/R2_Scores_Comparison_for_Gradient_Descent.png
--------------------------------------------------------------------------------
/linear_regression/polynomial_regression/images/cost_function.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/polynomial_regression/images/cost_function.png
--------------------------------------------------------------------------------
/linear_regression/polynomial_regression/images/header.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/polynomial_regression/images/header.png
--------------------------------------------------------------------------------
/linear_regression/polynomial_regression/images/header_animated_complex.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/polynomial_regression/images/header_animated_complex.gif
--------------------------------------------------------------------------------
/linear_regression/polynomial_regression/images/plot_data.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/polynomial_regression/images/plot_data.png
--------------------------------------------------------------------------------
/linear_regression/polynomial_regression/images/predictions_Comparison_for_Gradient_Descent.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/polynomial_regression/images/predictions_Comparison_for_Gradient_Descent.png
--------------------------------------------------------------------------------
/linear_regression/polynomial_regression/images/predictions_comparison.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/polynomial_regression/images/predictions_comparison.png
--------------------------------------------------------------------------------
/linear_regression/polynomial_regression/images/time_Comparison.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/polynomial_regression/images/time_Comparison.png
--------------------------------------------------------------------------------
/linear_regression/polynomial_regression/images/time_Comparison_for_Gradient_Descent.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/polynomial_regression/images/time_Comparison_for_Gradient_Descent.png
--------------------------------------------------------------------------------
/linear_regression/simple_regression/images/data_generation.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/simple_regression/images/data_generation.gif
--------------------------------------------------------------------------------
/linear_regression/simple_regression/images/final_prediction.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/simple_regression/images/final_prediction.png
--------------------------------------------------------------------------------
/linear_regression/simple_regression/images/generated_data.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/simple_regression/images/generated_data.png
--------------------------------------------------------------------------------
/linear_regression/simple_regression/images/learning_process.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/simple_regression/images/learning_process.gif
--------------------------------------------------------------------------------
/linear_regression/simple_regression/images/loss_history.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/simple_regression/images/loss_history.png
--------------------------------------------------------------------------------
/linear_regression/simple_regression/images/parameter_evolution.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/simple_regression/images/parameter_evolution.png
--------------------------------------------------------------------------------
/linear_regression/simple_regression/images/predictions.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/simple_regression/images/predictions.png
--------------------------------------------------------------------------------
/linear_regression/simple_regression/images/train_test_comparison.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/linear_regression/simple_regression/images/train_test_comparison.png
--------------------------------------------------------------------------------
/linear_regression/simple_regression/linear_regression_sklearn.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Linear Regression using Scikit-learn\n",
8 | "\n",
9 | "This notebook demonstrates how to implement Linear Regression using scikit-learn, a powerful machine learning library in Python.\n",
10 | "\n",
11 | "## What we'll cover:\n",
12 | "1. Data preparation and visualization\n",
13 | "2. Train-test splitting\n",
14 | "3. Model training and prediction\n",
15 | "4. Model evaluation using multiple metrics\n",
16 | "5. Results visualization"
17 | ]
18 | },
19 | {
20 | "cell_type": "markdown",
21 | "metadata": {},
22 | "source": [
23 | "## Import Required Libraries\n",
24 | "\n",
25 | "We'll use:\n",
26 | "- NumPy for numerical operations\n",
27 | "- Matplotlib for visualization\n",
28 | "- Scikit-learn for:\n",
29 | " - Linear Regression model\n",
30 | " - Train-test splitting\n",
31 | " - Model evaluation metrics"
32 | ]
33 | },
34 | {
35 | "cell_type": "code",
36 | "execution_count": null,
37 | "metadata": {},
38 | "source": [
39 | "import numpy as np\n",
40 | "import matplotlib.pyplot as plt\n",
41 | "from sklearn.linear_model import LinearRegression\n",
42 | "from sklearn.model_selection import train_test_split\n",
43 | "from sklearn.metrics import mean_squared_error, r2_score\n",
44 | "%matplotlib inline\n",
45 | "\n",
46 | "# Set random seed for reproducibility\n",
47 | "np.random.seed(0)"
48 | ]
49 | },
50 | {
51 | "cell_type": "markdown",
52 | "metadata": {},
53 | "source": [
54 | "## Generate Sample Data\n",
55 | "\n",
56 | "We create synthetic data with a known relationship:\n",
57 | "- True relationship: y = 4 + 3x + noise\n",
58 | "- Features (X) are randomly generated\n",
59 | "- Gaussian noise is added for realism\n",
60 | "\n",
61 | "This allows us to compare our model's learned parameters with the true values."
62 | ]
63 | },
64 | {
65 | "cell_type": "code",
66 | "execution_count": null,
67 | "metadata": {},
68 | "source": [
69 | "# Generate random data points\n",
70 | "X = 2 * np.random.rand(100, 1) # 100 random x values between 0 and 2\n",
71 | "y = 4 + 3 * X + np.random.randn(100, 1) # True relationship with added noise\n",
72 | "\n",
73 | "# Plot the data\n",
74 | "plt.figure(figsize=(10, 6))\n",
75 | "plt.scatter(X, y, color='blue', label='Data points')\n",
76 | "plt.xlabel('X (Input feature)')\n",
77 | "plt.ylabel('y (Target variable)')\n",
78 | "plt.title('Generated Data: y = 4 + 3x + noise')\n",
79 | "plt.legend()\n",
80 | "plt.grid(True)\n",
81 | "plt.show()"
82 | ]
83 | },
84 | {
85 | "cell_type": "markdown",
86 | "metadata": {},
87 | "source": [
88 | "## Split Data into Training and Testing Sets\n",
89 | "\n",
90 | "We split our data to:\n",
91 | "1. Train the model on one portion (training set)\n",
92 | "2. Evaluate its performance on unseen data (testing set)\n",
93 | "\n",
94 | "This helps us assess how well our model generalizes to new data.\n",
95 | "\n",
96 | "Parameters:\n",
97 | "- test_size=0.2: 20% for testing, 80% for training\n",
98 | "- random_state=42: For reproducibility"
99 | ]
100 | },
101 | {
102 | "cell_type": "code",
103 | "execution_count": null,
104 | "metadata": {},
105 | "source": [
106 | "# Split the data\n",
107 | "X_train, X_test, y_train, y_test = train_test_split(\n",
108 | " X, y, test_size=0.2, random_state=42\n",
109 | ")\n",
110 | "\n",
111 | "print(\"Dataset Split:\")\n",
112 | "print(f\"Training set size: {X_train.shape[0]} samples\")\n",
113 | "print(f\"Testing set size: {X_test.shape[0]} samples\")"
114 | ]
115 | },
116 | {
117 | "cell_type": "markdown",
118 | "metadata": {},
119 | "source": [
120 | "## Train the Model\n",
121 | "\n",
122 | "Scikit-learn makes it easy to:\n",
123 | "1. Initialize the model\n",
124 | "2. Fit it to our training data\n",
125 | "3. Make predictions\n",
126 | "\n",
127 | "The LinearRegression class automatically:\n",
128 | "- Computes the optimal parameters\n",
129 | "- Handles the mathematical operations\n",
130 | "- Provides convenient methods for prediction"
131 | ]
132 | },
133 | {
134 | "cell_type": "code",
135 | "execution_count": null,
136 | "metadata": {},
137 | "source": [
138 | "# Create and train the model\n",
139 | "model = LinearRegression()\n",
140 | "model.fit(X_train, y_train)\n",
141 | "\n",
142 | "# Make predictions on both training and test sets\n",
143 | "y_train_pred = model.predict(X_train)\n",
144 | "y_test_pred = model.predict(X_test)\n",
145 | "\n",
146 | "print(\"Model Training Complete!\")"
147 | ]
148 | },
149 | {
150 | "cell_type": "markdown",
151 | "metadata": {},
152 | "source": [
153 | "## Model Evaluation\n",
154 | "\n",
155 | "We evaluate our model using multiple metrics:\n",
156 | "1. R² Score (Coefficient of determination)\n",
157 | " - Measures the proportion of variance explained by the model\n",
158 | " - Range: 0 to 1 (1 being perfect prediction)\n",
159 | "\n",
160 | "2. Mean Squared Error (MSE)\n",
161 | " - Average squared difference between predictions and actual values\n",
162 | " - Lower values indicate better fit"
163 | ]
164 | },
165 | {
166 | "cell_type": "code",
167 | "execution_count": null,
168 | "metadata": {},
169 | "source": [
170 | "# Calculate performance metrics\n",
171 | "train_r2 = r2_score(y_train, y_train_pred)\n",
172 | "test_r2 = r2_score(y_test, y_test_pred)\n",
173 | "train_mse = mean_squared_error(y_train, y_train_pred)\n",
174 | "test_mse = mean_squared_error(y_test, y_test_pred)\n",
175 | "\n",
176 | "print(\"Model Performance:\")\n",
177 | "print(f\"Training R² Score: {train_r2:.4f}\")\n",
178 | "print(f\"Testing R² Score: {test_r2:.4f}\")\n",
179 | "print(f\"Training MSE: {train_mse:.4f}\")\n",
180 | "print(f\"Testing MSE: {test_mse:.4f}\")\n",
181 | "\n",
182 | "print(f\"\\nLearned Parameters:\")\n",
183 | "print(f\"Coefficient (weight): {model.coef_[0][0]:.4f} (True value: 3)\")\n",
184 | "print(f\"Intercept (bias): {model.intercept_[0]:.4f} (True value: 4)\")"
185 | ]
186 | },
187 | {
188 | "cell_type": "markdown",
189 | "metadata": {},
190 | "source": [
191 | "## Visualize Results\n",
192 | "\n",
193 | "Let's create a comprehensive visualization showing:\n",
194 | "1. Training data points\n",
195 | "2. Testing data points\n",
196 | "3. Model's predictions\n",
197 | "\n",
198 | "This helps us visually assess how well our model fits the data and if there are any patterns in the predictions."
199 | ]
200 | },
201 | {
202 | "cell_type": "code",
203 | "execution_count": null,
204 | "metadata": {},
205 | "source": [
206 | "plt.figure(figsize=(12, 6))\n",
207 | "\n",
208 | "# Plot training and testing data\n",
209 | "plt.scatter(X_train, y_train, color='blue', label='Training Data')\n",
210 | "plt.scatter(X_test, y_test, color='green', label='Testing Data')\n",
211 | "\n",
212 | "# Plot the regression line\n",
213 | "X_plot = np.sort(X, axis=0)\n",
214 | "y_plot = model.predict(X_plot)\n",
215 | "plt.plot(X_plot, y_plot, color='red', label='Model Predictions')\n",
216 | "\n",
217 | "plt.xlabel('X (Input feature)')\n",
218 | "plt.ylabel('y (Target variable)')\n",
219 | "plt.title('Linear Regression: Training, Testing Data, and Predictions')\n",
220 | "plt.legend()\n",
221 | "plt.grid(True)\n",
222 | "plt.show()"
223 | ]
224 | },
225 | {
226 | "cell_type": "markdown",
227 | "metadata": {},
228 | "source": [
229 | "## Conclusion\n",
230 | "\n",
231 | "Our scikit-learn implementation successfully:\n",
232 | "1. Split the data into training and testing sets\n",
233 | "2. Trained a linear regression model\n",
234 | "3. Made accurate predictions on both sets\n",
235 | "4. Found parameters close to the true values\n",
236 | "\n",
237 | "Advantages of using scikit-learn:\n",
238 | "- Simple and clean API\n",
239 | "- Efficient implementation\n",
240 | "- Built-in model evaluation tools\n",
241 | "- Seamless integration with other ML tools"
242 | ]
243 | }
244 | ],
245 | "metadata": {
246 | "kernelspec": {
247 | "display_name": "Python 3",
248 | "language": "python",
249 | "name": "python3"
250 | },
251 | "language_info": {
252 | "codemirror_mode": {
253 | "name": "ipython",
254 | "version": 3
255 | },
256 | "file_extension": ".py",
257 | "mimetype": "text/x-python",
258 | "name": "python",
259 | "nbconvert_exporter": "python",
260 | "pygments_lexer": "ipython3",
261 | "version": "3.8.0"
262 | }
263 | },
264 | "nbformat": 4,
265 | "nbformat_minor": 4
266 | }
267 |
--------------------------------------------------------------------------------
/logistic_regression/logistic_regression_reg.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Logistic Regression \n",
8 | "### For the over-fitting data and applied the Regularization and compare with non Regularization"
9 | ]
10 | },
11 | {
12 | "cell_type": "code",
13 | "execution_count": 1,
14 | "metadata": {},
15 | "outputs": [],
16 | "source": [
17 | "import numpy as np\n",
18 | "import math\n",
19 | "import matplotlib.pyplot as plt"
20 | ]
21 | },
22 | {
23 | "cell_type": "markdown",
24 | "metadata": {},
25 | "source": [
26 | "## Building the Logistic Regression Model"
27 | ]
28 | },
29 | {
30 | "cell_type": "code",
31 | "execution_count": 2,
32 | "metadata": {},
33 | "outputs": [],
34 | "source": [
35 | "import numpy as np\n",
36 | "\n",
37 | "class LogisticRegression:\n",
38 | " def __init__(self, learning_rate=0.001, epochs=10, lambda_=0.0):\n",
39 | " self.learning_rate = learning_rate\n",
40 | " self.epochs = epochs\n",
41 | " self.lambda_ = lambda_ # Regularization strength\n",
42 | " self.weights = None\n",
43 | " self.bias = None\n",
44 | " self.costs = []\n",
45 | " self.accuracies = []\n",
46 | "\n",
47 | " def sigmoid(self, z):\n",
48 | " return 1 / (1 + np.exp(-z))\n",
49 | "\n",
50 | " def cost(self, y, y_pred):\n",
51 | " # Avoid log(0)\n",
52 | " epsilon = 1e-15\n",
53 | " y_pred = np.clip(y_pred, epsilon, 1 - epsilon)\n",
54 | " base_cost = -np.mean(y * np.log(y_pred) + (1 - y) * np.log(1 - y_pred))\n",
55 | " reg_cost = (self.lambda_ / (2 * self.m)) * np.sum(np.square(self.weights))\n",
56 | " return base_cost + reg_cost\n",
57 | "\n",
58 | " def linear_model(self, x):\n",
59 | " return np.dot(x, self.weights) + self.bias\n",
60 | "\n",
61 | " def gradient(self, y_pred, x, y):\n",
62 | " dw = (1 / self.m) * np.dot(x.T, (y_pred - y)) + (self.lambda_ / self.m) * self.weights\n",
63 | " db = (1 / self.m) * np.sum(y_pred - y)\n",
64 | " return dw, db\n",
65 | "\n",
66 | " def update_rule(self, dw, db):\n",
67 | " self.weights -= self.learning_rate * dw\n",
68 | " self.bias -= self.learning_rate * db\n",
69 | "\n",
70 | " def accuracy(self, y_true, y_pred_prob):\n",
71 | " y_pred_label = y_pred_prob >= 0.5\n",
72 | " return np.mean(y_true == y_pred_label)\n",
73 | "\n",
74 | " def fit(self, x, y):\n",
75 | " self.m, self.n = x.shape\n",
76 | " self.weights = np.random.randn(self.n)\n",
77 | " self.bias = 0\n",
78 | "\n",
79 | " for epoch in range(self.epochs):\n",
80 | " linear_output = self.linear_model(x)\n",
81 | " y_pred = self.sigmoid(linear_output)\n",
82 | "\n",
83 | " dw, db = self.gradient(y_pred, x, y)\n",
84 | " self.update_rule(dw, db)\n",
85 | "\n",
86 | " cost = self.cost(y, y_pred)\n",
87 | " accuracy = self.accuracy(y, y_pred)\n",
88 | "\n",
89 | " self.costs.append(cost)\n",
90 | " self.accuracies.append(accuracy)\n",
91 | "\n",
92 | " if epoch % 10 == 0 or epoch == self.epochs - 1:\n",
93 | " print(f\"Epoch {epoch}: Cost = {cost:.4f}, Accuracy = {accuracy:.4f}\")\n",
94 | "\n",
95 | " def predict(self, x):\n",
96 | " probs = self.sigmoid(self.linear_model(x))\n",
97 | " return (probs >= 0.5).astype(int)\n"
98 | ]
99 | },
100 | {
101 | "cell_type": "markdown",
102 | "metadata": {},
103 | "source": [
104 | "## Creating the Data with overfitting\n",
105 | "\n"
106 | ]
107 | },
108 | {
109 | "cell_type": "code",
110 | "execution_count": 3,
111 | "metadata": {},
112 | "outputs": [],
113 | "source": [
114 | "# creating the data with overfitting\n",
115 | "import sklearn.datasets\n",
116 | "X , y = sklearn.datasets.make_classification(\n",
117 | " n_samples=100, n_features=150, n_informative=2, \n",
118 | " n_redundant=10, n_clusters_per_class=1, random_state=4)\n"
119 | ]
120 | },
121 | {
122 | "cell_type": "code",
123 | "execution_count": 4,
124 | "metadata": {},
125 | "outputs": [
126 | {
127 | "data": {
128 | "text/plain": [
129 | "((100,), (100, 150))"
130 | ]
131 | },
132 | "execution_count": 4,
133 | "metadata": {},
134 | "output_type": "execute_result"
135 | }
136 | ],
137 | "source": [
138 | "y.shape, X.shape"
139 | ]
140 | },
141 | {
142 | "cell_type": "code",
143 | "execution_count": 5,
144 | "metadata": {},
145 | "outputs": [],
146 | "source": [
147 | "# split the data into training and testing sets\n",
148 | "from sklearn.model_selection import train_test_split\n",
149 | "\n",
150 | "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)"
151 | ]
152 | },
153 | {
154 | "cell_type": "code",
155 | "execution_count": 6,
156 | "metadata": {},
157 | "outputs": [
158 | {
159 | "name": "stdout",
160 | "output_type": "stream",
161 | "text": [
162 | "Epoch 0: Cost = 7.2516, Accuracy = 0.3500\n",
163 | "Epoch 10: Cost = 6.8378, Accuracy = 0.3625\n",
164 | "Epoch 20: Cost = 6.4488, Accuracy = 0.3625\n",
165 | "Epoch 30: Cost = 6.0806, Accuracy = 0.3875\n",
166 | "Epoch 40: Cost = 5.7335, Accuracy = 0.3875\n",
167 | "Epoch 50: Cost = 5.4113, Accuracy = 0.4250\n",
168 | "Epoch 60: Cost = 5.1152, Accuracy = 0.4375\n",
169 | "Epoch 70: Cost = 4.8421, Accuracy = 0.4375\n",
170 | "Epoch 80: Cost = 4.5889, Accuracy = 0.4500\n",
171 | "Epoch 90: Cost = 4.3532, Accuracy = 0.4625\n",
172 | "Epoch 100: Cost = 4.1339, Accuracy = 0.5125\n",
173 | "Epoch 110: Cost = 3.9290, Accuracy = 0.5500\n",
174 | "Epoch 120: Cost = 3.7349, Accuracy = 0.5750\n",
175 | "Epoch 130: Cost = 3.5570, Accuracy = 0.5750\n",
176 | "Epoch 140: Cost = 3.3945, Accuracy = 0.5875\n",
177 | "Epoch 150: Cost = 3.2443, Accuracy = 0.6125\n",
178 | "Epoch 160: Cost = 3.1060, Accuracy = 0.6375\n",
179 | "Epoch 170: Cost = 2.9772, Accuracy = 0.6625\n",
180 | "Epoch 180: Cost = 2.8559, Accuracy = 0.6750\n",
181 | "Epoch 190: Cost = 2.7408, Accuracy = 0.6875\n",
182 | "Epoch 199: Cost = 2.6418, Accuracy = 0.6875\n"
183 | ]
184 | }
185 | ],
186 | "source": [
187 | "# now train the model on the training data\n",
188 | "model_nR = LogisticRegression(learning_rate=0.01, epochs=200)\n",
189 | "model_nR.fit(X_train, y_train)"
190 | ]
191 | },
192 | {
193 | "cell_type": "code",
194 | "execution_count": 10,
195 | "metadata": {},
196 | "outputs": [
197 | {
198 | "data": {
199 | "text/plain": [
200 | "[0.35,\n",
201 | " 0.35,\n",
202 | " 0.35,\n",
203 | " 0.3375,\n",
204 | " 0.3375,\n",
205 | " 0.3375,\n",
206 | " 0.3375,\n",
207 | " 0.3625,\n",
208 | " 0.3625,\n",
209 | " 0.3625,\n",
210 | " 0.3625,\n",
211 | " 0.3625,\n",
212 | " 0.3625,\n",
213 | " 0.3625,\n",
214 | " 0.3625,\n",
215 | " 0.3625,\n",
216 | " 0.3625,\n",
217 | " 0.3625,\n",
218 | " 0.3625,\n",
219 | " 0.3625,\n",
220 | " 0.3625,\n",
221 | " 0.3625,\n",
222 | " 0.375,\n",
223 | " 0.375,\n",
224 | " 0.375,\n",
225 | " 0.375,\n",
226 | " 0.375,\n",
227 | " 0.375,\n",
228 | " 0.375,\n",
229 | " 0.375,\n",
230 | " 0.3875,\n",
231 | " 0.3875,\n",
232 | " 0.3875,\n",
233 | " 0.3875,\n",
234 | " 0.3875,\n",
235 | " 0.3875,\n",
236 | " 0.3875,\n",
237 | " 0.3875,\n",
238 | " 0.3875,\n",
239 | " 0.3875,\n",
240 | " 0.3875,\n",
241 | " 0.4125,\n",
242 | " 0.4125,\n",
243 | " 0.4125,\n",
244 | " 0.4125,\n",
245 | " 0.4125,\n",
246 | " 0.4125,\n",
247 | " 0.4125,\n",
248 | " 0.4125,\n",
249 | " 0.4125,\n",
250 | " 0.425,\n",
251 | " 0.425,\n",
252 | " 0.425,\n",
253 | " 0.425,\n",
254 | " 0.425,\n",
255 | " 0.425,\n",
256 | " 0.425,\n",
257 | " 0.425,\n",
258 | " 0.425,\n",
259 | " 0.4375,\n",
260 | " 0.4375,\n",
261 | " 0.4375,\n",
262 | " 0.4375,\n",
263 | " 0.4375,\n",
264 | " 0.4375,\n",
265 | " 0.4375,\n",
266 | " 0.4375,\n",
267 | " 0.4375,\n",
268 | " 0.4375,\n",
269 | " 0.4375,\n",
270 | " 0.4375,\n",
271 | " 0.4375,\n",
272 | " 0.4375,\n",
273 | " 0.425,\n",
274 | " 0.4375,\n",
275 | " 0.4375,\n",
276 | " 0.4375,\n",
277 | " 0.45,\n",
278 | " 0.45,\n",
279 | " 0.45,\n",
280 | " 0.45,\n",
281 | " 0.45,\n",
282 | " 0.45,\n",
283 | " 0.45,\n",
284 | " 0.45,\n",
285 | " 0.45,\n",
286 | " 0.45,\n",
287 | " 0.45,\n",
288 | " 0.4625,\n",
289 | " 0.4625,\n",
290 | " 0.4625,\n",
291 | " 0.475,\n",
292 | " 0.4875,\n",
293 | " 0.4875,\n",
294 | " 0.5,\n",
295 | " 0.5,\n",
296 | " 0.5125,\n",
297 | " 0.5125,\n",
298 | " 0.5125,\n",
299 | " 0.5125,\n",
300 | " 0.5125,\n",
301 | " 0.525,\n",
302 | " 0.5375,\n",
303 | " 0.5375,\n",
304 | " 0.5375,\n",
305 | " 0.5375,\n",
306 | " 0.5375,\n",
307 | " 0.5375,\n",
308 | " 0.5375,\n",
309 | " 0.5375,\n",
310 | " 0.55,\n",
311 | " 0.55,\n",
312 | " 0.55,\n",
313 | " 0.55,\n",
314 | " 0.5625,\n",
315 | " 0.5625,\n",
316 | " 0.5625,\n",
317 | " 0.5625,\n",
318 | " 0.575,\n",
319 | " 0.575,\n",
320 | " 0.575,\n",
321 | " 0.575,\n",
322 | " 0.575,\n",
323 | " 0.575,\n",
324 | " 0.575,\n",
325 | " 0.575,\n",
326 | " 0.575,\n",
327 | " 0.575,\n",
328 | " 0.575,\n",
329 | " 0.575,\n",
330 | " 0.575,\n",
331 | " 0.575,\n",
332 | " 0.5875,\n",
333 | " 0.5875,\n",
334 | " 0.5875,\n",
335 | " 0.5875,\n",
336 | " 0.5875,\n",
337 | " 0.5875,\n",
338 | " 0.5875,\n",
339 | " 0.5875,\n",
340 | " 0.5875,\n",
341 | " 0.5875,\n",
342 | " 0.5875,\n",
343 | " 0.5875,\n",
344 | " 0.5875,\n",
345 | " 0.6,\n",
346 | " 0.6,\n",
347 | " 0.6,\n",
348 | " 0.6,\n",
349 | " 0.6125,\n",
350 | " 0.6125,\n",
351 | " 0.6125,\n",
352 | " 0.6125,\n",
353 | " 0.6125,\n",
354 | " 0.6125,\n",
355 | " 0.625,\n",
356 | " 0.6375,\n",
357 | " 0.6375,\n",
358 | " 0.6375,\n",
359 | " 0.6375,\n",
360 | " 0.6375,\n",
361 | " 0.6375,\n",
362 | " 0.6375,\n",
363 | " 0.65,\n",
364 | " 0.65,\n",
365 | " 0.6625,\n",
366 | " 0.6625,\n",
367 | " 0.6625,\n",
368 | " 0.6625,\n",
369 | " 0.6625,\n",
370 | " 0.6625,\n",
371 | " 0.6625,\n",
372 | " 0.6625,\n",
373 | " 0.6625,\n",
374 | " 0.6625,\n",
375 | " 0.6625,\n",
376 | " 0.6625,\n",
377 | " 0.6625,\n",
378 | " 0.675,\n",
379 | " 0.675,\n",
380 | " 0.675,\n",
381 | " 0.675,\n",
382 | " 0.675,\n",
383 | " 0.675,\n",
384 | " 0.675,\n",
385 | " 0.6875,\n",
386 | " 0.6875,\n",
387 | " 0.6875,\n",
388 | " 0.6875,\n",
389 | " 0.6875,\n",
390 | " 0.6875,\n",
391 | " 0.6875,\n",
392 | " 0.6875,\n",
393 | " 0.6875,\n",
394 | " 0.6875,\n",
395 | " 0.6875,\n",
396 | " 0.6875,\n",
397 | " 0.6875,\n",
398 | " 0.6875,\n",
399 | " 0.6875]"
400 | ]
401 | },
402 | "execution_count": 10,
403 | "metadata": {},
404 | "output_type": "execute_result"
405 | }
406 | ],
407 | "source": [
408 | "model_nR.accuracies"
409 | ]
410 | },
411 | {
412 | "cell_type": "code",
413 | "execution_count": 7,
414 | "metadata": {},
415 | "outputs": [
416 | {
417 | "name": "stdout",
418 | "output_type": "stream",
419 | "text": [
420 | "Epoch 0: Cost = 5.3546, Accuracy = 0.4875\n",
421 | "Epoch 10: Cost = 5.0685, Accuracy = 0.4875\n",
422 | "Epoch 20: Cost = 4.7958, Accuracy = 0.5000\n",
423 | "Epoch 30: Cost = 4.5377, Accuracy = 0.5000\n",
424 | "Epoch 40: Cost = 4.2963, Accuracy = 0.5250\n"
425 | ]
426 | },
427 | {
428 | "name": "stdout",
429 | "output_type": "stream",
430 | "text": [
431 | "Epoch 50: Cost = 4.0737, Accuracy = 0.5250\n",
432 | "Epoch 60: Cost = 3.8714, Accuracy = 0.5250\n",
433 | "Epoch 70: Cost = 3.6896, Accuracy = 0.5750\n",
434 | "Epoch 80: Cost = 3.5259, Accuracy = 0.5875\n",
435 | "Epoch 90: Cost = 3.3766, Accuracy = 0.6125\n",
436 | "Epoch 100: Cost = 3.2367, Accuracy = 0.6250\n",
437 | "Epoch 110: Cost = 3.1037, Accuracy = 0.6250\n",
438 | "Epoch 120: Cost = 2.9771, Accuracy = 0.6375\n",
439 | "Epoch 130: Cost = 2.8543, Accuracy = 0.6375\n",
440 | "Epoch 140: Cost = 2.7365, Accuracy = 0.6375\n",
441 | "Epoch 150: Cost = 2.6223, Accuracy = 0.6375\n",
442 | "Epoch 160: Cost = 2.5121, Accuracy = 0.6500\n",
443 | "Epoch 170: Cost = 2.4063, Accuracy = 0.6500\n",
444 | "Epoch 180: Cost = 2.3047, Accuracy = 0.6750\n",
445 | "Epoch 190: Cost = 2.2080, Accuracy = 0.6750\n",
446 | "Epoch 199: Cost = 2.1247, Accuracy = 0.6875\n"
447 | ]
448 | }
449 | ],
450 | "source": [
451 | "model = LogisticRegression(learning_rate=0.01, epochs=200, lambda_=0.0001)\n",
452 | "model.fit(X_train, y_train)"
453 | ]
454 | },
455 | {
456 | "cell_type": "code",
457 | "execution_count": 11,
458 | "metadata": {},
459 | "outputs": [
460 | {
461 | "data": {
462 | "text/plain": [
463 | "[0.4875,\n",
464 | " 0.4875,\n",
465 | " 0.4875,\n",
466 | " 0.4875,\n",
467 | " 0.4875,\n",
468 | " 0.4875,\n",
469 | " 0.4875,\n",
470 | " 0.4875,\n",
471 | " 0.4875,\n",
472 | " 0.4875,\n",
473 | " 0.4875,\n",
474 | " 0.4875,\n",
475 | " 0.5,\n",
476 | " 0.5,\n",
477 | " 0.5,\n",
478 | " 0.5,\n",
479 | " 0.5,\n",
480 | " 0.5,\n",
481 | " 0.5,\n",
482 | " 0.5,\n",
483 | " 0.5,\n",
484 | " 0.5,\n",
485 | " 0.5,\n",
486 | " 0.5,\n",
487 | " 0.5,\n",
488 | " 0.5,\n",
489 | " 0.5,\n",
490 | " 0.5,\n",
491 | " 0.5,\n",
492 | " 0.5,\n",
493 | " 0.5,\n",
494 | " 0.5,\n",
495 | " 0.5,\n",
496 | " 0.5,\n",
497 | " 0.5,\n",
498 | " 0.5125,\n",
499 | " 0.5125,\n",
500 | " 0.525,\n",
501 | " 0.525,\n",
502 | " 0.525,\n",
503 | " 0.525,\n",
504 | " 0.525,\n",
505 | " 0.525,\n",
506 | " 0.525,\n",
507 | " 0.525,\n",
508 | " 0.525,\n",
509 | " 0.525,\n",
510 | " 0.525,\n",
511 | " 0.525,\n",
512 | " 0.525,\n",
513 | " 0.525,\n",
514 | " 0.525,\n",
515 | " 0.525,\n",
516 | " 0.525,\n",
517 | " 0.525,\n",
518 | " 0.525,\n",
519 | " 0.525,\n",
520 | " 0.525,\n",
521 | " 0.525,\n",
522 | " 0.525,\n",
523 | " 0.525,\n",
524 | " 0.5625,\n",
525 | " 0.5625,\n",
526 | " 0.5625,\n",
527 | " 0.5625,\n",
528 | " 0.5625,\n",
529 | " 0.5625,\n",
530 | " 0.5625,\n",
531 | " 0.5625,\n",
532 | " 0.575,\n",
533 | " 0.575,\n",
534 | " 0.575,\n",
535 | " 0.5875,\n",
536 | " 0.5875,\n",
537 | " 0.575,\n",
538 | " 0.575,\n",
539 | " 0.575,\n",
540 | " 0.575,\n",
541 | " 0.575,\n",
542 | " 0.575,\n",
543 | " 0.5875,\n",
544 | " 0.5875,\n",
545 | " 0.5875,\n",
546 | " 0.5875,\n",
547 | " 0.5875,\n",
548 | " 0.6,\n",
549 | " 0.6,\n",
550 | " 0.6125,\n",
551 | " 0.6125,\n",
552 | " 0.6125,\n",
553 | " 0.6125,\n",
554 | " 0.6125,\n",
555 | " 0.6125,\n",
556 | " 0.6125,\n",
557 | " 0.6125,\n",
558 | " 0.6125,\n",
559 | " 0.625,\n",
560 | " 0.625,\n",
561 | " 0.625,\n",
562 | " 0.625,\n",
563 | " 0.625,\n",
564 | " 0.625,\n",
565 | " 0.625,\n",
566 | " 0.625,\n",
567 | " 0.625,\n",
568 | " 0.625,\n",
569 | " 0.625,\n",
570 | " 0.625,\n",
571 | " 0.625,\n",
572 | " 0.625,\n",
573 | " 0.625,\n",
574 | " 0.625,\n",
575 | " 0.625,\n",
576 | " 0.625,\n",
577 | " 0.625,\n",
578 | " 0.6375,\n",
579 | " 0.6375,\n",
580 | " 0.6375,\n",
581 | " 0.6375,\n",
582 | " 0.6375,\n",
583 | " 0.6375,\n",
584 | " 0.6375,\n",
585 | " 0.6375,\n",
586 | " 0.6375,\n",
587 | " 0.6375,\n",
588 | " 0.6375,\n",
589 | " 0.6375,\n",
590 | " 0.6375,\n",
591 | " 0.6375,\n",
592 | " 0.6375,\n",
593 | " 0.6375,\n",
594 | " 0.6375,\n",
595 | " 0.6375,\n",
596 | " 0.6375,\n",
597 | " 0.6375,\n",
598 | " 0.6375,\n",
599 | " 0.6375,\n",
600 | " 0.6375,\n",
601 | " 0.6375,\n",
602 | " 0.6375,\n",
603 | " 0.6375,\n",
604 | " 0.6375,\n",
605 | " 0.6375,\n",
606 | " 0.6375,\n",
607 | " 0.6375,\n",
608 | " 0.6375,\n",
609 | " 0.6375,\n",
610 | " 0.6375,\n",
611 | " 0.6375,\n",
612 | " 0.6375,\n",
613 | " 0.6375,\n",
614 | " 0.6375,\n",
615 | " 0.6375,\n",
616 | " 0.6375,\n",
617 | " 0.6375,\n",
618 | " 0.6375,\n",
619 | " 0.65,\n",
620 | " 0.65,\n",
621 | " 0.65,\n",
622 | " 0.65,\n",
623 | " 0.65,\n",
624 | " 0.65,\n",
625 | " 0.65,\n",
626 | " 0.65,\n",
627 | " 0.65,\n",
628 | " 0.65,\n",
629 | " 0.65,\n",
630 | " 0.65,\n",
631 | " 0.65,\n",
632 | " 0.65,\n",
633 | " 0.65,\n",
634 | " 0.65,\n",
635 | " 0.6625,\n",
636 | " 0.6625,\n",
637 | " 0.6625,\n",
638 | " 0.6625,\n",
639 | " 0.675,\n",
640 | " 0.675,\n",
641 | " 0.675,\n",
642 | " 0.675,\n",
643 | " 0.675,\n",
644 | " 0.675,\n",
645 | " 0.675,\n",
646 | " 0.675,\n",
647 | " 0.675,\n",
648 | " 0.675,\n",
649 | " 0.675,\n",
650 | " 0.675,\n",
651 | " 0.675,\n",
652 | " 0.675,\n",
653 | " 0.675,\n",
654 | " 0.675,\n",
655 | " 0.6875,\n",
656 | " 0.6875,\n",
657 | " 0.6875,\n",
658 | " 0.6875,\n",
659 | " 0.6875,\n",
660 | " 0.6875,\n",
661 | " 0.6875,\n",
662 | " 0.6875]"
663 | ]
664 | },
665 | "execution_count": 11,
666 | "metadata": {},
667 | "output_type": "execute_result"
668 | }
669 | ],
670 | "source": [
671 | "model.accuracies"
672 | ]
673 | },
674 | {
675 | "cell_type": "markdown",
676 | "metadata": {},
677 | "source": [
678 | "## 🔍 **Manual Comparison Analysis: Logistic Regression (With vs. Without L2 Regularization)**\n",
679 | "\n",
680 | "| Epoch | Cost (L2) | Accuracy (L2) | Cost (No L2) | Accuracy (No L2) |\n",
681 | "| ----- | --------- | ------------- | ------------ | ---------------- |\n",
682 | "| 0 | 5.3954 | 0.3750 | 6.4050 | 0.4500 |\n",
683 | "| 100 | 2.3566 | 0.6625 | 3.6985 | 0.5750 |\n",
684 | "| 199 | 1.3061 | 0.7750 | 2.4827 | 0.6500 |\n",
685 | "\n",
686 | "---\n",
687 | "\n",
688 | "### ✅ **1. Accuracy Trends**\n",
689 | "\n",
690 | "* **With L2 Regularization**: Accuracy steadily increases from **0.375 → 0.775** over 200 epochs.\n",
691 | "* **Without Regularization**: Accuracy improves more slowly, peaking around **0.650** by epoch 199.\n",
692 | "\n",
693 | "📈 **Conclusion**: L2 regularization leads to better generalization on the training data, resulting in **faster and higher accuracy gain**.\n",
694 | "\n",
695 | "---\n",
696 | "\n",
697 | "### ✅ **2. Cost Function Trends**\n",
698 | "\n",
699 | "* **With L2**: Cost starts lower and reduces **more aggressively**, ending at **1.3061**.\n",
700 | "* **Without L2**: Cost remains higher throughout, reducing slowly to **2.4827**.\n",
701 | "\n",
702 | "💡 **Insight**: The addition of the regularization term helps the model avoid overfitting by penalizing large weights, which often leads to **smoother convergence**.\n",
703 | "\n",
704 | "---\n",
705 | "\n",
706 | "### ✅ **3. Regularization Impact**\n",
707 | "\n",
708 | "* L2 regularization seems to **guide the model toward a better minimum** in terms of both lower loss and higher accuracy.\n",
709 | "* It introduces a **bias toward simpler models**, which improves generalization, especially if features are noisy or correlated.\n",
710 | "\n",
711 | "---\n",
712 | "\n",
713 | "### 🧪 **Practical Takeaways**\n",
714 | "\n",
715 | "| Aspect | With L2 Regularization | Without Regularization |\n",
716 | "| ---------------- | ------------------------------- | ---------------------- |\n",
717 | "| Final Accuracy | **Higher (0.775)** | Lower (0.650) |\n",
718 | "| Cost Convergence | **Faster and lower** | Slower and plateauing |\n",
719 | "| Generalization | **Better** | Weaker |\n",
720 | "| Overfitting Risk | **Reduced** | Higher potential |\n",
721 | "| Weight Control | **Yes (weights are penalized)** | No constraint |\n",
722 | "\n",
723 | "---\n",
724 | "\n",
725 | "### 📌 **Final Summary**\n",
726 | "\n",
727 | "L2 regularization consistently outperforms the non-regularized model in your experiment. It improves accuracy by over **12%**, and reduces the cost function significantly. This reflects its ability to enhance **model robustness**, **prevent overfitting**, and **accelerate convergence** — making it a recommended default when training logistic regression models, especially in high-dimensional or noisy datasets.\n",
728 | "\n"
729 | ]
730 | },
731 | {
732 | "cell_type": "code",
733 | "execution_count": null,
734 | "metadata": {},
735 | "outputs": [],
736 | "source": []
737 | }
738 | ],
739 | "metadata": {
740 | "kernelspec": {
741 | "display_name": "Python 3",
742 | "language": "python",
743 | "name": "python3"
744 | },
745 | "language_info": {
746 | "codemirror_mode": {
747 | "name": "ipython",
748 | "version": 3
749 | },
750 | "file_extension": ".py",
751 | "mimetype": "text/x-python",
752 | "name": "python",
753 | "nbconvert_exporter": "python",
754 | "pygments_lexer": "ipython3",
755 | "version": "3.12.4"
756 | }
757 | },
758 | "nbformat": 4,
759 | "nbformat_minor": 4
760 | }
761 |
--------------------------------------------------------------------------------
/logistic_regression/logistic_regression_scratch.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import pandas as pd\n",
10 | "from sklearn.datasets import load_iris, load_breast_cancer\n",
11 | "from sklearn.model_selection import train_test_split\n",
12 | "\n",
13 | "# Load Iris dataset\n",
14 | "iris = load_iris()\n",
15 | "iris_df = pd.DataFrame(data=iris.data, columns=iris.feature_names)\n",
16 | "iris_df['target'] = iris.target\n",
17 | "\n"
18 | ]
19 | },
20 | {
21 | "cell_type": "code",
22 | "execution_count": 2,
23 | "metadata": {},
24 | "outputs": [],
25 | "source": [
26 | "# Load Breast Cancer dataset\n",
27 | "breast_cancer = load_breast_cancer()\n",
28 | "breast_cancer_df = pd.DataFrame(data=breast_cancer.data, columns=breast_cancer.feature_names)\n",
29 | "breast_cancer_df['target'] = breast_cancer.target"
30 | ]
31 | },
32 | {
33 | "cell_type": "code",
34 | "execution_count": 3,
35 | "metadata": {},
36 | "outputs": [],
37 | "source": [
38 | "# Split Iris dataset into training and testing sets\n",
39 | "X_train_iris, X_test_iris, y_train_iris, y_test_iris = train_test_split(\n",
40 | " iris_df.drop(columns=['target']), iris_df['target'], test_size=0.2, random_state=42)\n",
41 | "\n",
42 | "# Split Breast Cancer dataset into training and testing sets\n",
43 | "X_train_bc, X_test_bc, y_train_bc, y_test_bc = train_test_split(\n",
44 | " breast_cancer_df.drop(columns=['target']), breast_cancer_df['target'], test_size=0.2, random_state=42)"
45 | ]
46 | },
47 | {
48 | "cell_type": "code",
49 | "execution_count": 4,
50 | "metadata": {},
51 | "outputs": [],
52 | "source": [
53 | "import numpy as np\n",
54 | "import matplotlib.pyplot as plt\n",
55 | "\n",
56 | "class LogisticRegression:\n",
57 | " def __init__(self, learning_rate=0.01, num_iterations=1000):\n",
58 | " self.learning_rate = learning_rate\n",
59 | " self.num_iterations = num_iterations\n",
60 | " self.losses = []\n",
61 | "\n",
62 | " def sigmoid(self, z):\n",
63 | " # Clip values to avoid overflow\n",
64 | " z = np.clip(z, -500, 500)\n",
65 | " return 1 / (1 + np.exp(-z))\n",
66 | "\n",
67 | " def compute_loss(self, y, y_predicted):\n",
68 | " # Clip values to avoid log(0)\n",
69 | " y_predicted = np.clip(y_predicted, 1e-15, 1 - 1e-15)\n",
70 | " return -np.mean(y * np.log(y_predicted) + (1 - y) * np.log(1 - y_predicted))\n",
71 | "\n",
72 | " def fit(self, X, y):\n",
73 | " self.m, self.n = X.shape\n",
74 | " self.weights = np.zeros(self.n)\n",
75 | " self.bias = 0\n",
76 | "\n",
77 | " for i in range(self.num_iterations):\n",
78 | " linear_model = np.dot(X, self.weights) + self.bias\n",
79 | " y_predicted = self.sigmoid(linear_model)\n",
80 | "\n",
81 | " dw = (1 / self.m) * np.dot(X.T, (y_predicted - y))\n",
82 | " db = (1 / self.m) * np.sum(y_predicted - y)\n",
83 | "\n",
84 | " self.weights -= self.learning_rate * dw\n",
85 | " self.bias -= self.learning_rate * db\n",
86 | "\n",
87 | " # Compute and store the loss\n",
88 | " loss = self.compute_loss(y, y_predicted)\n",
89 | " self.losses.append(loss)\n",
90 | "\n",
91 | " # Debugging: Print loss every 100 iterations\n",
92 | " if i % 100 == 0:\n",
93 | " print(f'Iteration {i}: Loss = {loss}')\n",
94 | "\n",
95 | " def predict(self, X):\n",
96 | " linear_model = np.dot(X, self.weights) + self.bias\n",
97 | " y_predicted = self.sigmoid(linear_model)\n",
98 | " y_predicted_cls = [1 if i > 0.5 else 0 for i in y_predicted]\n",
99 | " return np.array(y_predicted_cls)"
100 | ]
101 | },
102 | {
103 | "cell_type": "code",
104 | "execution_count": 5,
105 | "metadata": {},
106 | "outputs": [],
107 | "source": [
108 | "# Prepare the data\n",
109 | "X_train_iris = X_train_iris.values\n",
110 | "X_test_iris = X_test_iris.values\n",
111 | "y_train_iris = y_train_iris.values\n",
112 | "y_test_iris = y_test_iris.values\n",
113 | "\n",
114 | "X_train_bc = X_train_bc.values\n",
115 | "X_test_bc = X_test_bc.values\n",
116 | "y_train_bc = y_train_bc.values\n",
117 | "y_test_bc = y_test_bc.values\n"
118 | ]
119 | },
120 | {
121 | "cell_type": "code",
122 | "execution_count": 13,
123 | "metadata": {},
124 | "outputs": [
125 | {
126 | "name": "stdout",
127 | "output_type": "stream",
128 | "text": [
129 | "Iteration 0: Loss = 0.6931471805599453\n",
130 | "Iris Dataset Accuracy: 0.30\n"
131 | ]
132 | }
133 | ],
134 | "source": [
135 | "# Logistic Regression for Iris dataset\n",
136 | "log_reg_iris = LogisticRegression(learning_rate=0.01, num_iterations=10)\n",
137 | "log_reg_iris.fit(X_train_iris, y_train_iris)\n",
138 | "y_pred_iris = log_reg_iris.predict(X_test_iris)\n",
139 | "accuracy_iris = np.mean(y_pred_iris == y_test_iris)\n",
140 | "print(f'Iris Dataset Accuracy: {accuracy_iris:.2f}')"
141 | ]
142 | },
143 | {
144 | "cell_type": "code",
145 | "execution_count": 14,
146 | "metadata": {},
147 | "outputs": [
148 | {
149 | "data": {
150 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkIAAAHHCAYAAABTMjf2AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABSsklEQVR4nO3dd3gUdeLH8fem9wRIgUAgVCH0GjooKCrVAoiAgOhZQOU47w7OoymC2H4WqljAwomgSBFRQEBp0pHeA4GQUNMhZXd+fyCrkRZgk8lmP6/n2efR2dmZz+4i+3Hm+52xGIZhICIiIuKC3MwOICIiImIWFSERERFxWSpCIiIi4rJUhERERMRlqQiJiIiIy1IREhEREZelIiQiIiIuS0VIREREXJaKkIiIiLgsFSERKVYOHDjAPffcQ3BwMBaLhW+//dbsSHYzZszAYrEQFxdndhQR+Z2KkIgDXf6h27Rpk9lR8mXbtm306dOHqKgovL29KVmyJO3bt+eTTz7BarWaHe+W9OvXjx07dvDqq6/y2Wef0ahRowLbV1xcHBaLhTfffLPA9nEtbdu2xWKxYLFYcHNzIygoiDvuuIO+ffuydOnS29r25MmTmTFjhmOC3qaEhARGjx7Ntm3bzI4ixZSH2QFExBwffvghTz/9NBEREfTt25eqVauSlpbG8uXLGThwICdPnuQ///mP2TFvyoULF1i3bh0vvfQSgwcPNjvOFfr27csjjzyCt7e3Q7ZXrlw5xo8fD0BGRgYHDx7km2++4fPPP6dHjx58/vnneHp63vR2J0+eTGhoKP3793dIztuRkJDAmDFjiI6Opl69embHkWJIRUjEBa1fv56nn36aZs2asXjxYgIDA+3PDRkyhE2bNrFz506H7CsjIwN/f3+HbOtGTp8+DUBISIjDtumI/Je34e7ujru7u4OSQXBwMH369Mmz7LXXXuP5559n8uTJREdHM2HCBIftT6Q40qkxERNs3bqV++67j6CgIAICAmjXrh3r16/Ps05OTg5jxoyhatWq+Pj4UKpUKVq2bJnntEdiYiIDBgygXLlyeHt7U6ZMGbp27XrDMShjxozBYrHwxRdf5ClBlzVq1Mh+NGDlypVYLBZWrlyZZ53Lp4X+fAqlf//+BAQEcOjQIe6//34CAwPp3bs3gwcPJiAggMzMzCv21atXL0qXLp3nVNz3339Pq1at8Pf3JzAwkI4dO7Jr167rvqfRo0dToUIFAP75z39isViIjo62P5+fz/zyqc1Vq1bx7LPPEh4eTrly5a6737+63jauNkZo06ZNdOjQgdDQUHx9falYsSKPP/74Te3zz9zd3XnvvfeIiYlh4sSJpKSk2J/75JNPuOuuuwgPD8fb25uYmBimTJmS5/XR0dHs2rWLVatW2U+9tW3bFoBz587x4osvUrt2bQICAggKCuK+++5j+/btV+R4//33qVmzJn5+fpQoUYJGjRoxa9asPOucOHGCxx9/nIiICLy9valZsyYff/yx/fmVK1fSuHFjAAYMGGDPU1RO20nxoCNCIoVs165dtGrViqCgIP71r3/h6enJtGnTaNu2LatWrSI2Nha49MM+fvx4nnjiCZo0aUJqaiqbNm1iy5Yt3H333QA89NBD7Nq1i+eee47o6GhOnTrF0qVLOXbsWJ4S8GeZmZksX76c1q1bU758eYe/v9zcXDp06EDLli1588038fPzIzo6mkmTJvHdd9/RvXv3PFkWLlxI//797UdKPvvsM/r160eHDh2YMGECmZmZTJkyhZYtW7J169Zrvq8HH3yQkJAQ/v73v9OrVy/uv/9+AgICgPx/5pc9++yzhIWFMXLkSDIyMm7pc8jPNk6dOsU999xDWFgYw4YNIyQkhLi4OL755ptb2udl7u7u9OrVixEjRrB69Wo6duwIwJQpU6hZsyZdunTBw8ODhQsX8uyzz2Kz2Rg0aBAA77zzDs899xwBAQG89NJLAERERABw+PBhvv32W7p3707FihVJSkpi2rRptGnTht27dxMZGQnA9OnTef7553n44Yd54YUXuHjxIr/99hu//vorjz76KABJSUk0bdoUi8XC4MGDCQsL4/vvv2fgwIGkpqYyZMgQatSowcsvv8zIkSP529/+RqtWrQBo3rz5bX0+InkYIuIwn3zyiQEYGzduvOY63bp1M7y8vIxDhw7ZlyUkJBiBgYFG69at7cvq1q1rdOzY8ZrbOX/+vAEYb7zxxk1l3L59uwEYL7zwQr7WX7FihQEYK1asyLP8yJEjBmB88skn9mX9+vUzAGPYsGF51rXZbEbZsmWNhx56KM/yr776ygCMn3/+2TAMw0hLSzNCQkKMJ598Ms96iYmJRnBw8BXL/+pypr9+Jvn9zC9/fy1btjRyc3Ovu69r7e9627j83JEjRwzDMIx58+bd8M/LtbRp08aoWbPmNZ+/vO13333XviwzM/OK9Tp06GBUqlQpz7KaNWsabdq0uWLdixcvGlarNc+yI0eOGN7e3sbLL79sX9a1a9frZjMMwxg4cKBRpkwZ48yZM3mWP/LII0ZwcLA968aNG6/4cybiSDo1JlKIrFYrP/74I926daNSpUr25WXKlOHRRx9l9erVpKamApfGuezatYsDBw5cdVu+vr54eXmxcuVKzp8/n+8Ml7d/tVNijvLMM8/k+XeLxUL37t1ZvHgx6enp9uWzZ8+mbNmytGzZEoClS5eSnJxMr169OHPmjP3h7u5ObGwsK1asuOksN/OZX/bkk0/e9lie/Gzj8limRYsWkZOTc1v7+6vLR8PS0tLsy3x9fe3/nJKSwpkzZ2jTpg2HDx/OcwrtWry9vXFzu/SzYbVaOXv2LAEBAdxxxx1s2bLFvl5ISAjHjx9n48aNV92OYRh8/fXXdO7cGcMw8nzXHTp0ICUlJc/2RAqSipBIITp9+jSZmZnccccdVzxXo0YNbDYb8fHxALz88sskJydTrVo1ateuzT//+U9+++03+/re3t5MmDCB77//noiICFq3bs3rr79OYmLidTMEBQUBeX8gHcnDw+Oq42p69uzJhQsXWLBgAQDp6eksXryY7t27Y7FYAOyl76677iIsLCzP48cff+TUqVM3nedmPvPLKlaseNP7+av8bKNNmzY89NBDjBkzhtDQULp27conn3xCVlbWbe//cuH8c+Fds2YN7du3x9/fn5CQEMLCwuwzA/NThGw2G//3f/9H1apV8fb2JjQ0lLCwMH777bc8r//3v/9NQEAATZo0oWrVqgwaNIg1a9bYnz99+jTJycl88MEHV3zPAwYMALil71rkVqgIiRRRrVu35tChQ3z88cfUqlWLDz/8kAYNGvDhhx/a1xkyZAj79+9n/Pjx+Pj4MGLECGrUqMHWrVuvud0qVarg4eHBjh078pXjckn5q2tdZ+jPRw3+rGnTpkRHR/PVV18BsHDhQi5cuEDPnj3t69hsNuDSOKGlS5de8Zg/f36+Mt+uPx85KchtWCwW5s6dy7p16xg8eLB98HDDhg3zHDm7FZdn/VWpUgWAQ4cO0a5dO86cOcPbb7/Nd999x9KlS/n73/8O/PHZX8+4ceMYOnQorVu35vPPP+eHH35g6dKl1KxZM8/ra9Sowb59+/jyyy9p2bIlX3/9NS1btmTUqFF59tWnT5+rfs9Lly6lRYsWt/X+RfJLg6VFClFYWBh+fn7s27fviuf27t2Lm5sbUVFR9mUlS5ZkwIABDBgwgPT0dFq3bs3o0aN54okn7OtUrlyZf/zjH/zjH//gwIED1KtXj7feeovPP//8qhn8/Py46667+Omnn4iPj8+zv6spUaIEAMnJyXmWHz16NL9v265Hjx68++67pKamMnv2bKKjo2natGme9wIQHh5O+/btb3r7V3Ozn7kZmjZtStOmTXn11VeZNWsWvXv35ssvv8zzPd8Mq9XKrFmz8PPzs592XLhwIVlZWSxYsCDPIPmrnW68VvmdO3cud955Jx999FGe5cnJyYSGhuZZ5u/vT8+ePenZsyfZ2dk8+OCDvPrqqwwfPpywsDACAwOxWq03/J6vlUXEUXRESKQQubu7c8899zB//vw8U6iTkpKYNWsWLVu2tJ+6Onv2bJ7XBgQEUKVKFftpk8zMTC5evJhnncqVKxMYGHjDUyujRo3CMAz69u171SMPmzdvZubMmQBUqFABd3d3fv755zzrTJ48OX9v+k969uxJVlYWM2fOZMmSJfTo0SPP8x06dCAoKIhx48ZddczM5esE3Yyb+cwL2/nz5zEMI8+yyxcNvNXTY1arleeff549e/bw/PPP29/b5fFKf95fSkoKn3zyyRXb8Pf3v6L4Xt7GX/POmTOHEydO5Fn21z+7Xl5exMTEYBgGOTk5uLu789BDD/H1119f9XpVf/6eL1/D6Wp5RBxBR4RECsDHH3/MkiVLrlj+wgsvMHbsWJYuXUrLli159tln8fDwYNq0aWRlZfH666/b142JiaFt27Y0bNiQkiVLsmnTJubOnWu/YvL+/ftp164dPXr0ICYmBg8PD+bNm0dSUhKPPPLIdfM1b96cSZMm8eyzz1K9evU8V5ZeuXIlCxYsYOzYscCli/Z1796d999/H4vFQuXKlVm0aNEtjeFo0KABVapU4aWXXiIrKyvPaTG4NH5pypQp9O3blwYNGvDII48QFhbGsWPH+O6772jRogUTJ0686f3m9zMvbDNnzmTy5Mk88MADVK5cmbS0NKZPn05QUBD333//DV+fkpJiP/KXmZlpv7L0oUOHeOSRR3jllVfs695zzz14eXnRuXNnnnrqKdLT05k+fTrh4eGcPHkyz3YbNmzIlClTGDt2LFWqVCE8PJy77rqLTp068fLLLzNgwACaN2/Ojh07+OKLL/IMQr+8r9KlS9OiRQsiIiLYs2cPEydOpGPHjvYxS6+99horVqwgNjaWJ598kpiYGM6dO8eWLVtYtmwZ586dAy6V+5CQEKZOnUpgYCD+/v7ExsY6ZByXCKDp8yKOdHl69LUe8fHxhmEYxpYtW4wOHToYAQEBhp+fn3HnnXcaa9euzbOtsWPHGk2aNDFCQkIMX19fo3r16sarr75qZGdnG4ZhGGfOnDEGDRpkVK9e3fD39zeCg4ON2NhY46uvvsp33s2bNxuPPvqoERkZaXh6eholSpQw2rVrZ8ycOTPPNOnTp08bDz30kOHn52eUKFHCeOqpp4ydO3dedfq8v7//dff50ksvGYBRpUqVa66zYsUKo0OHDkZwcLDh4+NjVK5c2ejfv7+xadOm6277WtPnDSN/n3l+Ln9wo/1dbxt/nT6/ZcsWo1evXkb58uUNb29vIzw83OjUqdMN36dhXJo+/+c/WwEBAUbVqlWNPn36GD/++ONVX7NgwQKjTp06ho+PjxEdHW1MmDDB+Pjjj/NkMoxLlyvo2LGjERgYaAD2qfQXL140/vGPfxhlypQxfH19jRYtWhjr1q0z2rRpk2e6/bRp04zWrVsbpUqVMry9vY3KlSsb//znP42UlJQ8eZKSkoxBgwYZUVFRhqenp1G6dGmjXbt2xgcffJBnvfnz5xsxMTGGh4eHptKLw1kM4y/HOUVERERchMYIiYiIiMtSERIRERGXpSIkIiIiLktFSERERFyWipCIiIi4LBUhERERcVm6oOIN2Gw2EhISCAwM1KXeRUREnIRhGKSlpREZGXnV+x9epiJ0AwkJCabfh0hERERuTXx8POXKlbvm8ypCN3D5cvDx8fGm3Y9IREREbk5qaipRUVH23/FrURG6gcunw4KCglSEREREnMyNhrVosLSIiIi4LBUhERERcVkqQiIiIuKyVIRERETEZakIiYiIiMtSERIRERGXpSIkIiIiLktFSERERFyWipCIiIi4LBUhERERcVkqQiIiIuKyVIRERETEZakImcRqM1ix95TZMURERFyaipAJcq02uk9dy4AZG1WGRERETKQiZAIPdzcaVigBwKgFu7iYYzU5kYiIiGtSETLJC+2rUTrIh2PnMpm84qDZcURERFySipBJArw9GNU5BoCpqw5z+HS6yYlERERcj4qQie6tVZo21cLIttoYOX8XhmGYHUlERMSlqAiZyGKx8HLXmnh5uLH64BkW/nbS7EgiIiIuRUXIZBVK+TOobRUAXlm0m9SLOSYnEhERcR0qQkXAU20qUTHUn9NpWbz9436z44iIiLgMFaEiwMfTnZe71gTg03Vx7DyRYnIiERER16AiVES0qhpGpzplsBnw3293YrNp4LSIiEhBUxEqQkZ0iiHA24Nt8cl8uTHe7DgiIiLFnopQERIR5MPQu6sBMGHJXs6kZ5mcSEREpHhTESpiHmtWgZgyQaRcyGH84r1mxxERESnWVISKGA93N8Y+UAuLBb7ecpxfD581O5KIiEixpSJUBDUoX4JHGpcHYMT8neRYbSYnEhERKZ5UhIqof997ByX9vdiflM7Hq4+YHUdERKRYUhEqokL8vBh+X3UA3ll2gBPJF0xOJCIiUvyoCBVhDzUoR+PoElzIsfLywl1mxxERESl2VISKMDc3C2O71cbdzcIPu5L4aW+S2ZFERESKFRWhIu6O0oEMbFkRgJHzd3Eh22pyIhERkeJDRcgJvNCuKmWCfTh+/gKTVhw0O46IiEixoSLkBPy9PRjVOQaAaT8f4tDpdJMTiYiIFA8qQk6iQ83S3HlHGDlWgxHf7sQwdFNWERGR26Ui5CQsFgtjutTC28ONtYfOsmB7gtmRREREnJ6KkBMpX8qPwXdWAWDsd3tIvZhjciIRERHnpiLkZP7WphKVQv05nZbF2z/uNzuOiIiIU1MRcjLeHu683LUWAJ+ui2PniRSTE4mIiDgvFSEn1LJqKJ3rRmIz4KV5O7DaNHBaRETkVqgIOakRHWsQ6O3B9uMp/G/DMbPjiIiIOCUVIScVHuTDP+6pBsDrS/ZyOi3L5EQiIiLOx+mK0KRJk4iOjsbHx4fY2Fg2bNhw3fWTk5MZNGgQZcqUwdvbm2rVqrF48eJCSluw+jStQM3IIFIv5jL++z1mxxEREXE6TlWEZs+ezdChQxk1ahRbtmyhbt26dOjQgVOnTl11/ezsbO6++27i4uKYO3cu+/btY/r06ZQtW7aQkxcMD3c3xnarhcUC32w5wfrDZ82OJCIi4lQshhNdojg2NpbGjRszceJEAGw2G1FRUTz33HMMGzbsivWnTp3KG2+8wd69e/H09LylfaamphIcHExKSgpBQUG3lb+g/GfeDmb9eoyq4QF893wrvDycqt+KiIg4XH5/v53mFzM7O5vNmzfTvn17+zI3Nzfat2/PunXrrvqaBQsW0KxZMwYNGkRERAS1atVi3LhxWK3XvoN7VlYWqampeR5F3b87VKeUvxcHTqXz0eojZscRERFxGk5ThM6cOYPVaiUiIiLP8oiICBITE6/6msOHDzN37lysViuLFy9mxIgRvPXWW4wdO/aa+xk/fjzBwcH2R1RUlEPfR0EI9vNk+P01AHhv+QGOn880OZGIiIhzcJoidCtsNhvh4eF88MEHNGzYkJ49e/LSSy8xderUa75m+PDhpKSk2B/x8fGFmPjWPdSgLE2iS3Ihx8qYhbvNjiMiIuIUnKYIhYaG4u7uTlJSUp7lSUlJlC5d+qqvKVOmDNWqVcPd3d2+rEaNGiQmJpKdnX3V13h7exMUFJTn4QwsFgtjH6iFh5uFpbuTWLY76cYvEhERcXFOU4S8vLxo2LAhy5cvty+z2WwsX76cZs2aXfU1LVq04ODBg9hsNvuy/fv3U6ZMGby8vAo8c2GrFhHIwFYVARi9cBcXsq89FkpEREScqAgBDB06lOnTpzNz5kz27NnDM888Q0ZGBgMGDADgscceY/jw4fb1n3nmGc6dO8cLL7zA/v37+e677xg3bhyDBg0y6y0UuOfvqkpksA/Hz19g4ooDZscREREp0jzMDnAzevbsyenTpxk5ciSJiYnUq1ePJUuW2AdQHzt2DDe3P7pdVFQUP/zwA3//+9+pU6cOZcuW5YUXXuDf//63WW+hwPl7ezCqS02e+mwzH/x8mAfql6VKeKDZsURERIokp7qOkBmc4TpCf2UYBgNnbuKnvadoVqkUs56MxWKxmB1LRESk0BS76whJ/lksFsZ0qYmPpxvrDp9l/rYEsyOJiIgUSSpCxVRUST+eu6sqAGO/203KhRyTE4mIiBQ9KkLF2BOtKlIpzJ8z6dm89eM+s+OIiIgUOSpCxZi3hztju9YC4LP1R/nteLK5gURERIoYFaFirnmVULrWi8Qw4L/f7sRq09h4ERGRy1SEXMBLHWsQ6O3Bb8dTmPXrUbPjiIiIFBkqQi4gPNCHFzvcAcDrP+zjdFqWyYlERESKBhUhF9GnaQVqlQ0i7WIu4xbvMTuOiIhIkaAi5CLc3Sy82q02FgvM23qCdYfOmh1JRETEdCpCLqRuVAi9Y8sDMGL+TrJzbTd4hYiISPGmIuRi/nlPdUIDvDh4Kp0PVx82O46IiIipVIRcTLCfJ/+5vwYA7y0/QPy5TJMTiYiImEdFyAU9UL8ssRVLcjHHxpiFu82OIyIiYhoVIRdksVgY260WHm4Wlu1JYunuJLMjiYiImEJFyEVVjQjkydaVABi9YBeZ2bkmJxIRESl8KkIu7Lm7qlA2xJcTyRd4/6eDZscREREpdCpCLszPy4PRXWoCMP3nwxxISjM5kYiISOFSEXJxd8dE0L5GOLk2g/9+uxPD0E1ZRUTEdagICaM618TH041fj5xj3tYTZscREREpNCpCQlRJP567qyoA4xbvISUzx+REIiIihUNFSAB4slUlKof5cyY9mzd+3Gt2HBERkUKhIiQAeHm48Uq3WgB88esxtscnmxtIRESkEKgIiV3zyqE8UL8shgH//XYnVpsGTouISPGmIiR5/Of+GgT6eLDjRApf/HrU7DgiIiIFSkVI8ggL9OZfHe4A4I0l+ziVdtHkRCIiIgVHRUiu8GhsBeqUCyYtK5dx3+0xO46IiEiBURGSK7i7Xbopq8UC325LYO3BM2ZHEhERKRAqQnJVdcqF0Ce2AgD/nb+T7FybyYlEREQcT0VIrunFDncQGuDN4dMZTP/lsNlxREREHE5FSK4p2NeT/3asAcB7yw8Qfy7T5EQiIiKOpSIk19W1XiTNKpUiK9fG6AW7dFNWEREpVlSE5LosFguvdKuJp7uF5XtP8ePuJLMjiYiIOIyKkNxQlfBAnmxVCYAxC3aRmZ1rciIRERHHUBGSfHnurqqUDfElIeUi7y4/YHYcERERh1ARknzx9XJnTJeaAHz0yxH2J6WZnEhEROT2qQhJvrWPieDumAhybQb//XanBk6LiIjTUxGSmzKqcwy+nu5sOHKOb7acMDuOiIjIbVERkptSroQfz7erCsC4xXtIzsw2OZGIiMitUxGSmzawZUWqhgdwNiObN37YZ3YcERGRW6YiJDfNy8ONV7rVAmDWhmNsi082N5CIiMgtUhGSW9K0UikerF8Ww4CX5u3AatPAaRERcT4qQnLL/tOxBkE+HuxKSGXSioNmxxEREblpKkJyy0IDvBnRKQaAt5fuZ8nOkyYnEhERuTkqQnJbujeKon/zaAD+Pns7O0+kmBtIRETkJqgIyW37b8catK4WxoUcK09+uolTqRfNjiQiIpIvKkJy2zzc3Xi/V30qh/lzMuUif/tsMxdzrGbHEhERuSEVIXGIYF9PPurXmBA/T7bFJ/Pvr3/TLThERKTIUxESh4kO9Wdy7wZ4uFmYvy2BySsPmR1JRETkulSExKGaVw5lTNdLd6l/44d9mkkmIiJFmoqQOFzv2AqaSSYiIk5BRUgKxH871qBV1dA/ZpKlaSaZiIgUPSpCUiA83N2Y+GgDKl2eSfapZpKJiEjRoyIkBebyTLJg30szyYZpJpmIiBQxKkJSoCqG+jPl95lk32ommYiIFDEqQlLgmlcJZXSXP88kSzQ5kYiIyCUqQlIo+jStQL9mFQD4++xt7ErQTDIRETGfipAUmhGdYv6YSTZTM8lERMR8KkJSaP48kywh5SJP6Z5kIiJiMhUhKVR/nkm29ZhmkomIiLlUhKTQXZ5J5q6ZZCIiYjKnK0KTJk0iOjoaHx8fYmNj2bBhQ75e9+WXX2KxWOjWrVvBBpR8+etMsh92aSaZiIgUPqcqQrNnz2bo0KGMGjWKLVu2ULduXTp06MCpU6eu+7q4uDhefPFFWrVqVUhJJT/6/mUm2e6EVJMTiYiIq3GqIvT222/z5JNPMmDAAGJiYpg6dSp+fn58/PHH13yN1Wqld+/ejBkzhkqVKhViWsmPyzPJMrOtPDFzo2aSiYhIoXKaIpSdnc3mzZtp3769fZmbmxvt27dn3bp113zdyy+/THh4OAMHDiyMmHKTPNzdmNirAZVCNZNMREQKn9MUoTNnzmC1WomIiMizPCIigsTEq48vWb16NR999BHTp0/P936ysrJITU3N85CCFeznyYf9GhHk48HWY8kM/2aHZpKJiEihcJoidLPS0tLo27cv06dPJzQ0NN+vGz9+PMHBwfZHVFRUAaaUyyqFBTC5d0Pc3SzM23qCKas0k0xERAqe0xSh0NBQ3N3dSUpKyrM8KSmJ0qVLX7H+oUOHiIuLo3Pnznh4eODh4cGnn37KggUL8PDw4NChq//QDh8+nJSUFPsjPj6+QN6PXKll1bwzyX7UTDIRESlgTlOEvLy8aNiwIcuXL7cvs9lsLF++nGbNml2xfvXq1dmxYwfbtm2zP7p06cKdd97Jtm3brnmkx9vbm6CgoDwPKTx9m1bgsWYVMAwYoplkIiJSwDzMDnAzhg4dSr9+/WjUqBFNmjThnXfeISMjgwEDBgDw2GOPUbZsWcaPH4+Pjw+1atXK8/qQkBCAK5ZL0TKyUwyHT2ew+uAZnpi5kfmDWxIW6G12LBERKYac5ogQQM+ePXnzzTcZOXIk9erVY9u2bSxZssQ+gPrYsWOcPHnS5JRyuzzc3Zj06J9nkm3STDIRESkQFkPTc64rNTWV4OBgUlJSdJqskB0+nU63SWtIvZjLg/XL8laPulgsFrNjiYiIE8jv77dTHRES1/LnmWTfbD3B1FWHzY4kIiLFjIqQFGktq4YyunMMAK//sFczyURExKFUhKTI69ssmr5NNZNMREQcT0VInMLIzjG0qFKKzGwrT366idNpWWZHEhGRYkBFSJyCp7sbkx9tSMVQf04kX+DpzzeTlauZZCIicntUhMRp/PmeZJuPntc9yURE5LapCIlTqRwWwKTeDS7NJNtygmk/ayaZiIjcOhUhcTqtqoYx6veZZBOW7GXp7qQbvEJEROTqVITEKT3WLJo+TctjGPDCl1vZc1IzyURE5OapCInTGtW5pn0m2RMzN3EmXTPJRETk5qgIidPy/P2eZNGl/C7NJPtMM8lEROTmqAiJUwvx8+LDfo0J9PFgk2aSiYjITVIREqdXJTyAyZpJJiIit0BFSIqFVlXDGNlJM8lEROTmqAhJsfFYswr0jr00k2zIl1vZm6iZZCIicn0qQlJsWCwWRnepSfPKpcjItjJwhmaSiYjI9akISbHi6e7G5N6aSSYiIvmjIiTFzl9nkv3nm52aSSYiIlelIiTFUpXwACY9emkm2ddbjvOBZpKJiMhVqAhJsdW6WhgjOtYA4LUle1mmmWQiIvIXKkJSrPVrHs2jsX/ck0wzyURE5M9UhKRYs1gsjOlSk2aVLs0k0z3JRETkz1SEpNi7PJOsQik/jp/XTDIREfmDipC4hBL+Xnz0p5lkL83TTDIREVEREhdyeSaZmwXmbj7O9F80k0xExNWpCIlLaV0tjBG/35Ns/Pd7Wb5HM8lERFyZipC4nP7No+nV5NJMsuf/t5V9iWlmRxIREZOoCInLsVgsvNy1Jk0rlbx0T7KZGzmrmWQiIi5JRUhckqe7G1N6N/xjJtnnmkkmIuKKVITEZV2aSdaIQG8PNsad57+aSSYi4nJUhMSlVQkP5P1H6+NmgTmbj/PWj/tVhkREXIiKkLi8tneEM/L3mWQTVxzkpW93YrWpDImIuAIVIRGgf4uKjO1WC4sFZv16jEFfbOFijsYMiYgUdypCIr/r07QCkx5tgJe7G0t2JdL/kw2kXswxO5aIiBQgFSGRP7m/dhlmDGhMgLcH6w+f45Fp6zmVdtHsWCIiUkBUhET+onmVUL78W1NCA7zYfTKVh6es4+jZDLNjiYhIAVARErmKWmWDmft0c8qX9OPYuUwemrKOnSdSzI4lIiIOpiIkcg3Rof7MfaYZNcoEcSY9i0c+WM+6Q2fNjiUiIg6kIiRyHeGBPsx+qimxFUuSnpVLv4838P2Ok2bHEhERB1ERErmBIB9PZj7ehHtrlibbauPZWVv44tejZscSEREHUBESyQcfT3cm9W5gv2v9S/N28u6yA7oKtYiIk1MREskndzcL4x6oxfN3VQHg/5btZ9SCXboKtYiIE1MRErkJFouFoffcwZguNbFY4NN1R3n+y626c72IiJNSERK5Bf2aR/PeI/XxdLfw3W8neXzGRtKzcs2OJSIiN0lFSOQWda4bySf9m+Dv5c6ag2fp9cF6zqRnmR1LRERuwi0Vofj4eI4fP27/9w0bNjBkyBA++OADhwUTcQYtq4byv781pZS/FztOpPDwlLXEn8s0O5aIiOTTLRWhRx99lBUrVgCQmJjI3XffzYYNG3jppZd4+eWXHRpQpKirUy6EOU83o2yIL3FnM3lwylr2nEw1O5aIiOTDLRWhnTt30qRJEwC++uoratWqxdq1a/niiy+YMWOGI/OJOIVKYQF882xzqpcO5HRaFj2mrePXw7oKtYhIUXdLRSgnJwdvb28Ali1bRpcuXQCoXr06J0/qqrvimiKCfJj9VDMaR5cg7WIufT/ewI+7Es2OJSIi13FLRahmzZpMnTqVX375haVLl3LvvfcCkJCQQKlSpRwaUMSZBPt68tnAWNrXiCA718bTn2/myw3HzI4lIiLXcEtFaMKECUybNo22bdvSq1cv6tatC8CCBQvsp8xEXJWPpztT+zSgR6Ny2AwY9s0OJq04qKtQi4gUQRbjFv92tlqtpKamUqJECfuyuLg4/Pz8CA8Pd1hAs6WmphIcHExKSgpBQUFmxxEnYhgGb/ywj8krDwHQv3k0IzvF4OZmMTmZiEjxl9/f71s6InThwgWysrLsJejo0aO888477Nu3r1iVIJHbYbFY+Ne91RnZKQaAGWvjGDJ7G9m5NpOTiYjIZbdUhLp27cqnn34KQHJyMrGxsbz11lt069aNKVOmODSgiLN7vGVF3n2kHh5uFhZsT2DgzI1k6CrUIiJFwi0VoS1bttCqVSsA5s6dS0REBEePHuXTTz/lvffec2hAkeKga72yfNS/MX5e7vxy4AyPfvgr5zKyzY4lIuLybqkIZWZmEhgYCMCPP/7Igw8+iJubG02bNuXo0aMODShSXLSpFsasJ5tSws+T7fHJPDx1LcfP6yrUIiJmuqUiVKVKFb799lvi4+P54YcfuOeeewA4deqUBhSLXEe9qBDmPN2csiG+HD6dwUNT1rIvMc3sWCIiLuuWitDIkSN58cUXiY6OpkmTJjRr1gy4dHSofv36Dg0oUtxUCQ9g7jPNqBYRQFJqFt2nrmVT3DmzY4mIuKRbnj6fmJjIyZMnqVu3Lm5ul/rUhg0bCAoKonr16g4NaSZNn5eCkpyZzcCZm9h89DzeHm5M7t2AdjUizI4lIlIs5Pf3+5aL0GWX70Jfrly529lMkaUiJAXpQraVwbO2sHzvKdzdLLz2YG26N4oyO5aIiNMr0OsI2Ww2Xn75ZYKDg6lQoQIVKlQgJCSEV155BZtN10gRyS9fL3em9m3IQw3KYbUZ/HPub0xbdcjsWCIiLuOWitBLL73ExIkTee2119i6dStbt25l3LhxvP/++4wYMcLRGfOYNGkS0dHR+Pj4EBsby4YNG6657vTp02nVqhUlSpSgRIkStG/f/rrri5jB092NN7vX4ak2lQAY//1eXv1uNzabbskhIlLQbunUWGRkJFOnTrXfdf6y+fPn8+yzz3LixAmHBfyz2bNn89hjjzF16lRiY2N55513mDNnzjWvaN27d29atGhB8+bN8fHxYcKECcybN49du3ZRtmzZfO1Tp8akME3/+TCvLt4DwAP1y/L6w3XwdL+l/18REXFpBTpGyMfHh99++41q1arlWb5v3z7q1avHhQsXbj5xPsTGxtK4cWMmTpwIXDpFFxUVxXPPPcewYcNu+Hqr1UqJEiWYOHEijz32WL72qSIkhe2bLcf519zfyLUZtL0jjMm9G+Dn5WF2LBERp1KgY4Tq1q1rLyN/NnHiROrUqXMrm7yh7OxsNm/eTPv27e3L3NzcaN++PevWrcvXNjIzM8nJyaFkyZLXXCcrK4vU1NQ8D5HC9GCDckzv1wgfTzdW7jtN7w9/5byuQi0iUiBu6X8zX3/9dTp27MiyZcvs1xBat24d8fHxLF682KEBLztz5gxWq5WIiLzTiyMiIti7d2++tvHvf/+byMjIPGXqr8aPH8+YMWNuK6vI7brzjnC+eKIpj8/YyNZjyXSfto5PH29CZIiv2dFERIqVWzoi1KZNG/bv388DDzxAcnIyycnJPPjgg+zatYvPPvvM0Rkd4rXXXuPLL79k3rx5+Pj4XHO94cOHk5KSYn/Ex8cXYkqRPzSsUIK5TzejTLAPB0+l89CUtRw8patQi4g40m1fR+jPtm/fToMGDbBarY7apF12djZ+fn7MnTuXbt262Zf369eP5ORk5s+ff83Xvvnmm4wdO5Zly5bRqFGjm9qvxgiJ2RKSL/DYxxs4eCqdED9PPu7fmAblS5gdS0SkSCvQMUJm8PLyomHDhixfvty+zGazsXz5cvvpuat5/fXXeeWVV1iyZMlNlyCRoiAyxJc5TzWjfvkQkjNzeHT6elbsO2V2LBGRYsFpihDA0KFDmT59OjNnzmTPnj0888wzZGRkMGDAAAAee+wxhg8fbl9/woQJjBgxgo8//pjo6GgSExNJTEwkPT3drLcgcktK+HvxxROxtL0jjIs5Np6cuYl5W4+bHUtExOk5VRHq2bMnb775JiNHjqRevXps27aNJUuW2AdQHzt2jJMnT9rXnzJlCtnZ2Tz88MOUKVPG/njzzTfNegsit8zPy4PpjzXigfplybUZ/H32dj785bDZsUREnNpNjRF68MEHr/t8cnIyq1atKpAxQmbRGCEpamw2g3GL9/Dh6iMAPNWmEsPurY7FYjE5mYhI0ZHf3++bmj4fHBx8w+fze6FCEbk1bm4WXupYg9BAb177fi/TVh3mbHo2rz1YGw9dhVpE5KY4dNZYcaQjQlKUzdkUz7BvdmC1GbSrHs7ERxvg6+VudiwREdMVu1ljInKl7o2imNanId4ebizfe4q+H/1KSmaO2bFERJyGipCIk2sfE8EXT8QS5OPBpqPn6T5tLSdTCuZ+fyIixY2KkEgx0Ci6JHOebk5EkDf7k9K5/91f+HFXotmxRESKPBUhkWLijtKBfP1Mc2qVDeJ8Zg5/+2wzL83bwYXs4jOLU0TE0VSERIqRciX8+OaZFjzVuhIAX/x6jM4TV7M7IdXkZCIiRZOKkEgx4+XhxvD7a/D5wFjCA705eCqdbpPW8NHqI9hsmiQqIvJnKkIixVTLqqEsGdKa9jUiyLbaeGXRbgbM2MjptCyzo4mIFBkqQiLFWEl/L6Y/1pBXutXC28ONVftPc9+7P7Nir27aKiICKkIixZ7FYqFv0wosfK4l1UsHciY9mwEzNjJ6wS4u5mggtYi4NhUhERdRLSKQbwe1YECLaABmrI2j26Q17E9KMzeYiIiJVIREXIiPpzujOtfkkwGNCQ3wYm9iGp3fX81n6+LQ3XZExBWpCIm4oDvvCOf7F1rTploYWbk2RszfxZOfbuZcRrbZ0URECpWKkIiLCgv05pP+jRnZKQYvdzeW7Uni3nd+ZvWBM2ZHExEpNCpCIi7Mzc3C4y0r8u2gFlQND+BUWhZ9PvqVcYv3kJ1rMzueiEiBUxESEWIig1gwuCV9mpYH4IOfD/PglDUcOp1ucjIRkYKlIiQiAPh6uTO2W20+6NuQEn6e7DyRSqf3VvPlhmMaSC0ixZaKkIjkcU/N0iwZ0poWVUpxIcfKsG928OwXW0jO1EBqESl+VIRE5AoRQT589ngsw++rjqe7he93JnLfu7+w7tBZs6OJiDiUipCIXJWbm4Wn2lTmm2daUDHUn5MpF3n0w/W88cNecqwaSC0ixYOKkIhcV+1ywSx6riU9G0VhGDBpxSEenrqOo2czzI4mInLbVIRE5Ib8vT2Y8HAdJvduQJCPB9vjk7n/3V/4evNxDaQWEaemIiQi+XZ/7TIsGdKaJhVLkpFt5R9ztvPCl9tIvZhjdjQRkVuiIiQiNyUyxJf/PdmUF++phrubhQXbE7jvnV/YfPSc2dFERG6aipCI3DR3NwuD76rKnKebUb6kHyeSL9B96jreWbafXA2kFhEnoiIkIresQfkSfPd8Sx6sXxabAe8sO8AjH6wn/lym2dFERPJFRUhEbkugjydv96zHu4/UI9Dbg01Hz3P/u7+wYHuC2dFERG5IRUhEHKJrvbIsfqEVDcqHkJaVy/P/28o/vtpOelau2dFERK5JRUhEHCaqpB9fPdWM59tVxc0CX285Tsf3fmFbfLLZ0URErkpFSEQcysPdjaF3V2P2U80oG+LL0bOZPDxlLZNWHMRq0zWHRKRoURESkQLROLoki19oRac6Zci1Gbzxwz4enb6ehOQLZkcTEbFTERKRAhPs68n7verzZve6+Hm58+uRc9z37i8s2XnS7GgiIoCKkIgUMIvFwsMNy/Hd862oUy6YlAs5PP35FoZ/8xuZ2RpILSLmUhESkUJRMdSfuU8355m2lbFY4H8b4un0/mp2nkgxO5qIuDAVIREpNF4ebvz73up88UQspYN8OHw6gwcmr2H6z4exaSC1iJhARUhECl3zyqF8/0IrOtSMIMdq8OriPfT7ZAOnUi+aHU1EXIyKkIiYooS/F1P7NGTcA7Xx8XTjlwNnuPfdX1i2O8nsaCLiQlSERMQ0FouFR2PLs+i5VsSUCeJcRjZPfLqJkfN3cjHHanY8EXEBKkIiYroq4QHMG9ScJ1tVBODTdUfpMlEDqUWk4KkIiUiR4O3hzksdY/j08SaEBnizPymdLhNX899vd3A+I9vseCJSTKkIiUiR0rpaGD8MuXRFapsBn68/xp1vreSz9Ud1iw4RcTiLYRj6m+U6UlNTCQ4OJiUlhaCgILPjiLiU9YfPMnrBLvYmpgFQo0wQozvHEFuplMnJRKSoy+/vt4rQDagIiZgr12pj1oZjvPXjflIu5ADQpW4kw++vTplgX5PTiUhRpSLkICpCIkXDuYxs3vxxH//bcAzDAF9PdwbfVYWBLSvi4+ludjwRKWJUhBxERUikaNl5IoXRC3ax6eh5ACqU8mNExxja1QjHYrGYnE5EigoVIQdRERIpegzDYP62BMYt3sOptCwA2lQLY2TnGCqHBZicTkSKAhUhB1EREim60rNymbTiIB/+cpgcq4Gnu4XHW1Rk8F1VCPTxNDueiJhIRchBVIREir4jZzJ4ZdFuftp7CoCwQG+G31edbvXK4uam02UirkhFyEFUhEScx097k3h54W7izmYC0KB8CGO61KJ2uWCTk4lIYVMRchAVIRHnkpVr5ePVcbz/0wEys61YLPBI4yhevOcOSgV4mx1PRAqJipCDqAiJOKek1Iu89v1e5m09AUCgjwdD765G36YV8HDXRfVFijsVIQdRERJxbpvizjFqwS52JaQCUC0igNGda9K8SqjJyUSkIKkIOYiKkIjzs9oMZm+M540f9nI+89LVqe+vXZr/3F+DciX8TE4nIgVBRchBVIREio/kzGz+b+l+Plt/FJsBPp5uPNOmCk+1qaSrU4sUMypCDqIiJFL87DmZyugFu/j1yDkAyob4MqJTDTrULK2rU4sUEypCDqIiJFI8GYbBdztOMu67PSSkXASgZZVQRnWOoWpEoMnpROR2qQg5iIqQSPGWmZ3L1JWHmPrzYbJzbbi7WejXLJohd1clSFenFnFaKkIOoiIk4hriz2XyyqLd/Lg7CYDQAC/+1aE6Dzcsp6tTizghFSEHUREScS0/7z/NmIW7OHQ6A4C65YIZ3aUm9cuXMDmZiNyM/P5+O91VxSZNmkR0dDQ+Pj7ExsayYcOG664/Z84cqlevjo+PD7Vr12bx4sWFlFREnFHramEsGdKa/3asQYC3B9uPp/DA5LW8OGc7p9Iumh1PRBzMqYrQ7NmzGTp0KKNGjWLLli3UrVuXDh06cOrUqauuv3btWnr16sXAgQPZunUr3bp1o1u3buzcubOQk4uIM/F0d+OJVpX46cU2dG9YDoC5m49z15urmP77WCIRKR6c6tRYbGwsjRs3ZuLEiQDYbDaioqJ47rnnGDZs2BXr9+zZk4yMDBYtWmRf1rRpU+rVq8fUqVPztU+dGhORrcfOM3rBLrYfTwGgcpg/ozrXpHW1MJOTici1FLtTY9nZ2WzevJn27dvbl7m5udG+fXvWrVt31desW7cuz/oAHTp0uOb6AFlZWaSmpuZ5iIhrq1++BPOebcHrD9chNMCLQ6czeOzjDTz56SaO/X6nexFxTk5ThM6cOYPVaiUiIiLP8oiICBITE6/6msTExJtaH2D8+PEEBwfbH1FRUbcfXkScnpubhR6NovjpxbYMbFkRDzcLS3cn0f7/VvHmD/vIzM41O6KI3AKnKUKFZfjw4aSkpNgf8fHxZkcSkSIkyMeTEZ1i+P6FVrSsEkp2ro2JKw7S7q1VLNyegBONNhARnKgIhYaG4u7uTlJSUp7lSUlJlC5d+qqvKV269E2tD+Dt7U1QUFCeh4jIX1WNCOSzgU2Y2qch5Ur4cjLlIs/9byuPfLCePSd1Sl3EWThNEfLy8qJhw4YsX77cvsxms7F8+XKaNWt21dc0a9Ysz/oAS5cuveb6IiI3w2KxcG+t0iwb2oahd1fDx9ONX4+co+N7vzBy/k6SM7PNjigiN+A0RQhg6NChTJ8+nZkzZ7Jnzx6eeeYZMjIyGDBgAACPPfYYw4cPt6//wgsvsGTJEt566y327t3L6NGj2bRpE4MHDzbrLYhIMeTj6c7z7aqybGgbOtYug82AT9cd5c43V/LFr0ex2nS6TKSo8jA7wM3o2bMnp0+fZuTIkSQmJlKvXj2WLFliHxB97Ngx3Nz+6HbNmzdn1qxZ/Pe//+U///kPVatW5dtvv6VWrVpmvQURKcbKlfBjUu8G9D50hjELdrMvKY2X5u1k5to4nm9XlftrldHtOkSKGKe6jpAZdB0hEbkVuVYbn68/yttL95N68dKMsqrhATzXrioda5fBXYVIpEDpXmMOoiIkIrcj5UIOn6w5wserj9gLUeUwf55vV5VOdSJViEQKiIqQg6gIiYgjpF7MYcaaOD5afYSUCzkAVArz57m7qtC5TiQe7k41ZFOkyFMRchAVIRFxpLSLOcxcG8eHq4+QnHmpEFUM9WfwnVXoWk+FSMRRVIQcREVIRApCelbupUL0y2HO/16Iokv5MejOKjxQv6wKkchtUhFyEBUhESlIGVm5fLruKNN/Ocy5jEvXHSpf0o/Bd1bhgQZl8VQhErklKkIOoiIkIoUhIyuXz9cf5YOfD3P290IUVdKXQW2r8GCDcnh5qBCJ3AwVIQdRERKRwpSZncsX648x7edDnEm/VIjKhvgy6M4qPNxQhUgkv1SEHERFSETMcCHbyhe/HmXaz4c5nZYFXCpEz7StTPdG5fD2cDc5oUjRpiLkICpCImKmizlWZv16jKmrDnHq90JUJtiHZ9tWpkfjKBUikWtQEXIQFSERKQou5lj5csMxpqw6RFLqpUJUOsiHZ9pWpmfjKHw8VYhE/kxFyEFUhESkKLmYY+WrTfFMWXmIkykXAYgI8ubpNpXp1aS8CpHI71SEHERFSESKoqxcK19tOs6UFQdJ+L0QhQVeKkS9Y1WIRFSEHERFSESKsqxcK3M3H2fyikOcSL4AQGiAN0+3qUTv2Ar4eqkQiWtSEXIQFSERcQbZuTa+3nKcSSsOcvz85ULkxd9aV6JP0wr4eXmYnFCkcKkIOYiKkIg4kxyrjW+2HGfiioPEn7tUiEr5e/Fk60r0bVoBf28VInENKkIOoiIkIs4ox2pj3tYTTFpxkKNnMwEo6e/FE60q8lizaAJUiKSYUxFyEBUhEXFmuVYb325LYOJPB4j7vRCF+HnyZKtKPNasAoE+niYnFCkYKkIOoiIkIsVBrtXGgu0JTPzpIIfPZAAQ7OvJEy0r0q9FNEEqRFLMqAg5iIqQiBQnVpvBwu0JvPfTAQ6fvlSIgnw8GNiyEv1bRBPsq0IkxYOKkIOoCIlIcWS1GSz6LYH3fzrIwVPpAAT6ePB4i4o83rKiCpE4PRUhB1EREpHizGozWLzjJO8tP8CBy4XI24MBLaJ5vGVFQvy8TE4ocmtUhBxERUhEXIHNZvD9zkTeW36AfUlpAAR4e9C/eTQDW1akhL8KkTgXFSEHURESEVdisxn8sCuRd5cfYG/ipULk7+VOv+bRPNGqEiVViMRJqAg5iIqQiLgim83gx91JvLf8ALtPpgLg7eHGA/XL0r9FNNVL6+9DKdpUhBxERUhEXJlhGCzdncT7Px1kx4kU+/JmlUrRv0U07WtE4O5mMTGhyNWpCDmIipCIyKVCtOnoeWasiWPJrkSstks/HeVK+NKvWTQ9GkUR7KeZZlJ0qAg5iIqQiEheJ5Iv8Pn6o/xvwzGSM3MA8PV058EGZRnQIpoq4YEmJxRREXIYFSERkau7mGNl/rYTfLImzj6wGqBV1VD6N4/mzjvCcdNpMzGJipCDqAiJiFyfYRisP3yOGWuPsHR3Er+fNaNCKT/6NYvm4UbldAsPKXQqQg6iIiQikn/x5zL5bP1RvtxwjNSLucCl6fcPNyxHv+bRVAoLMDmhuAoVIQdRERIRuXmZ2bl8s+UEM9bG2W/hAdD2jjD6N4+mddUwnTaTAqUi5CAqQiIit84wDNYcPMuMtUdYvvcUl39xKoX50795NA82KEeAt4e5IaVYUhFyEBUhERHHOHo2g5lrjzJnUzxpWZdOmwV6e9C9URT9mlegQil/kxNKcaIi5CAqQiIijpWelcs3W44zY00ch89kAGCxQLvq4fRvXpEWVUphsei0mdweFSEHURESESkYNpvBzwdOM2NtHCv3nbYvrxoeQP8W0TxQvyx+XjptJrdGRchBVIRERAre4dPpzFwbx9zNx8nItgIQ5OPBI03K07dpBaJK+pmcUJyNipCDqAiJiBSe1Is5zN10nJnr4jh6NhMANwvcHRNB/+YVaVqppE6bSb6oCDmIipCISOGz2gxW7jvFjLVx/HLgjH159dKBDGgRTdd6ZfHxdDcxoRR1KkIOoiIkImKuA0lpzFgbxzdbTnAh59JpsxA/T3r9ftosMsTX5IRSFKkIOYiKkIhI0ZCSmcNXm+KZuS6O4+cvAODuZuHemqXp3yKaRhVK6LSZ2KkIOYiKkIhI0WK1GSzbk8SMNXGsO3zWvrxmZBADWlSkU50yOm0mKkKOoiIkIlJ07U1MZcaaOOZtPUFWrg2AUv5ePBpbnj5NKxAR5GNyQjGLipCDqAiJiBR95zOy+XJjPJ+tiyMh5SIAHm4W7q9dhv4tomlQvoTJCaWwqQg5iIqQiIjzyLXa+HH3pdNmG+LO2ZfXjQphQPNo7q9dBi8PNxMTSmFREXIQFSEREee080QKM9bGsWBbAtnWS6fNwgK96dWkPN0bltNFGos5FSEHURESEXFuZ9Kz+HLDMT5bf5Sk1Cz78hZVStGjURQdapbW4OpiSEXIQVSERESKhxyrje93JvLVxnhWH/zjIo2BPh50rRdJ94ZR1CkXrCn4xYSKkIOoCImIFD/x5zL5estx5mw6zonkC/bld0QE0r1ROR6oX5ZSAd4mJpTbpSLkICpCIiLFl81msO7wWb7aFM+SnYn2Kfie7hbaVY+gR+NytK4ahoe7Blg7GxUhB1EREhFxDSkXcli4PYE5m+LZfjzFvjw80JuHGpaje8NyVAoLMDGh3AwVIQdRERIRcT17E1OZs+k487ae4FxGtn154+gSdG8URcfaZfD39jAxodyIipCDqAiJiLiu7FwbP+1N4qtNx1m57xS2338x/bzc6Vi7DD0aR+keZ0WUipCDqAiJiAhAUupF+wDrI2cy7MsrhvrTvVE5HmpQTrf0KEJUhBxERUhERP7MMAw2HT3PnE3xLPrtJJnZVgDcLND2jnB6NCrHXdUjdAVrk6kIOYiKkIiIXEtGVi7f7TjJnE3xbIw7b19e0t+LB+qXpXujclQvrd8OM6gIOYiKkIiI5Mfh0+nM2Xycrzcf51TaH1ewrlMumO6NouhSN5JgX08TE7oWFSEHURESEZGbkWu18fOB08zZdJxle5LIsV76mfX2cOPeWqXp0SiKZpVK4eamAdYFSUXIQVSERETkVp1Nz+LbbQl8tTGefUlp9uVlQ3ztA6x189eCoSLkICpCIiJyuwzDYMeJFL7aFM/8bQmkXcy1P6ebvxYMFSEHURESERFHuphj5YddiXy1KZ41B8/al1+++WuPRlHULqubv96u/P5+O83cvnPnztG7d2+CgoIICQlh4MCBpKenX3f95557jjvuuANfX1/Kly/P888/T0pKyjVfIyIiUtB8PN3pWq8sXzzRlF/+dSdD2lelbIgvaRdz+Xz9MbpMXMN97/7CR6uPcDY968YblNviNEeE7rvvPk6ePMm0adPIyclhwIABNG7cmFmzZl11/Z07dzJq1Cj69+9PTEwMR48e5emnn6ZOnTrMnTs33/vVESERESlof7756/c7E8nWzV9vW7E6NbZnzx5iYmLYuHEjjRo1AmDJkiXcf//9HD9+nMjIyHxtZ86cOfTp04eMjAw8PPJ3jxgVIRERKUwpmTks+O3SzV9/081fb1mxOjW2bt06QkJC7CUIoH379ri5ufHrr7/mezuXP4z8liAREZHCFuznSd+mFVgwuCVLhrRiYMuKlPT34lRaFlNWHuKut1bRfepaZm88RsqFHLPjOj2naASJiYmEh4fnWebh4UHJkiVJTEzM1zbOnDnDK6+8wt/+9rfrrpeVlUVW1h/nZFNTU28+sIiIiANULx3EiE4x/Pve6nlu/rox7jwb484z4ttdtK4WRpd6kbSvEY6fl1P8rBcppn5iw4YNY8KECdddZ8+ePbe9n9TUVDp27EhMTAyjR4++7rrjx49nzJgxt71PERERR/HycOPeWmW4t1YZ+81f529NYF9SGsv2JLFsTxK+nu60j4mgS91IWlcLxdtDU/Hzw9QxQqdPn+bs2bPXXadSpUp8/vnn/OMf/+D8+T/u45Kbm4uPjw9z5szhgQceuObr09LS6NChA35+fixatAgfn+vfGfhqR4SioqI0RkhERIqcfYlpLNyewILtCRw7l2lfHuTjwb21StO5biTNKpVyyUHWxXKw9KZNm2jYsCEAP/74I/fee+91B0unpqbSoUMHvL29Wbx4MX5+N3/1Tg2WFhGRos4wDH47nsKC7Qks+i2BpNQ//oc+NMCL+2uXoUvdSBqUL+Eyt/YoVkUILk2fT0pKYurUqfbp840aNbJPnz9x4gTt2rXj008/pUmTJqSmpnLPPfeQmZnJvHnz8Pf3t28rLCwMd/f8HTJUERIREWditRlsjDvHwu0JLN5xkvOZfwyoLhviS6c6ZehcN5KakUHF+qKNxa4InTt3jsGDB7Nw4ULc3Nx46KGHeO+99wgIuDSFMC4ujooVK7JixQratm3LypUrufPOO6+6rSNHjhAdHZ2v/aoIiYiIs8qx2lhz8AwLtifw464k0rP+uLVHpVB/OtWNpEvdSKqEF7/p+MWuCJlFRUhERIqDizlWVu47xYLtCSzfc4qs3y/aCFCjTBBd6kbSqU6ZYnMTWBUhB1EREhGR4iY9K5eluxNZuP0kP+8/Ta7tjyrQoHwIXepGcn+dMoQHXn+CUVGmIuQgKkIiIlKcnc/IZsmuRBZsS2D9kbNcbgVuFmhWuRSd60Ryb63ShPh5mRv0JqkIOYiKkIiIuIpTqRdZ9NtJFv6WwNZjyfblnu4WWle9fOHGCPy9i/6FG1WEHERFSEREXFH8uUwWbE9g4fYE9iam2Zf7eLrRrkYEnetE0vaOMHw8i+aFG1WEHERFSEREXN2BpD8u3Bh39o8LNwZ6e3BPzdJ0qRdJ88ql8CxCF25UEXIQFSEREZFLDMNg54lUFmw/waLfTnIy5aL9uZL+XtxfuzSd60TSOLqk6RduVBFyEBUhERGRK9lsBpuOnrdfuPFsRrb9udJBPnSqU4Yu9SKpXTbYlAs3qgg5iIqQiIjI9eVabaw9dJYF2xP4YWciaX+6cGN0KT86142kc91IqkUEFlomFSEHURESERHJv4s5VlbtP83C7Qks25PExZw/LtxYvXTgpVJUJ5LypQr2wo0qQg6iIiQiInJrMrJyWbYniYXbE1i1/zQ51j8qR72oEDr/fjXriCDHX7hRRchBVIRERERuX3JmNj/sSmTB9gTWHTrL5YtZWyzw5sN1eahhOYfuL7+/30X/ikgiIiLi9EL8vOjZuDw9G5fnVNpFFv92koW/nWTz0fM0ii5hWi4dEboBHRESEREpOEmpF009NVZ0rnwkIiIiLqcgStDNUBESERERl6UiJCIiIi5LRUhERERcloqQiIiIuCwVIREREXFZKkIiIiLislSERERExGWpCImIiIjLUhESERERl6UiJCIiIi5LRUhERERcloqQiIiIuCwVIREREXFZHmYHKOoMwwAgNTXV5CQiIiKSX5d/ty//jl+LitANpKWlARAVFWVyEhEREblZaWlpBAcHX/N5i3GjquTibDYbCQkJBAYGYrFYHLbd1NRUoqKiiI+PJygoyGHblVun76Ro0fdRtOj7KFr0fdyYYRikpaURGRmJm9u1RwLpiNANuLm5Ua5cuQLbflBQkP4QFzH6TooWfR9Fi76PokXfx/Vd70jQZRosLSIiIi5LRUhERERcloqQSby9vRk1ahTe3t5mR5Hf6TspWvR9FC36PooWfR+Oo8HSIiIi4rJ0REhERERcloqQiIiIuCwVIREREXFZKkIiIiLislSETDJp0iSio6Px8fEhNjaWDRs2mB3JJY0fP57GjRsTGBhIeHg43bp1Y9++fWbHkt+99tprWCwWhgwZYnYUl3XixAn69OlDqVKl8PX1pXbt2mzatMnsWC7LarUyYsQIKlasiK+vL5UrV+aVV1654f205NpUhEwwe/Zshg4dyqhRo9iyZQt169alQ4cOnDp1yuxoLmfVqlUMGjSI9evXs3TpUnJycrjnnnvIyMgwO5rL27hxI9OmTaNOnTpmR3FZ58+fp0WLFnh6evL999+ze/du3nrrLUqUKGF2NJc1YcIEpkyZwsSJE9mzZw8TJkzg9ddf5/333zc7mtPS9HkTxMbG0rhxYyZOnAhcup9ZVFQUzz33HMOGDTM5nWs7ffo04eHhrFq1itatW5sdx2Wlp6fToEEDJk+ezNixY6lXrx7vvPOO2bFczrBhw1izZg2//PKL2VHkd506dSIiIoKPPvrIvuyhhx7C19eXzz//3MRkzktHhApZdnY2mzdvpn379vZlbm5utG/fnnXr1pmYTABSUlIAKFmypMlJXNugQYPo2LFjnv9OpPAtWLCARo0a0b17d8LDw6lfvz7Tp083O5ZLa968OcuXL2f//v0AbN++ndWrV3PfffeZnMx56aarhezMmTNYrVYiIiLyLI+IiGDv3r0mpRK4dGRuyJAhtGjRglq1apkdx2V9+eWXbNmyhY0bN5odxeUdPnyYKVOmMHToUP7zn/+wceNGnn/+eby8vOjXr5/Z8VzSsGHDSE1NpXr16ri7u2O1Wnn11Vfp3bu32dGcloqQyO8GDRrEzp07Wb16tdlRXFZ8fDwvvPACS5cuxcfHx+w4Ls9ms9GoUSPGjRsHQP369dm5cydTp05VETLJV199xRdffMGsWbOoWbMm27ZtY8iQIURGRuo7uUUqQoUsNDQUd3d3kpKS8ixPSkqidOnSJqWSwYMHs2jRIn7++WfKlStndhyXtXnzZk6dOkWDBg3sy6xWKz///DMTJ04kKysLd3d3ExO6ljJlyhATE5NnWY0aNfj6669NSiT//Oc/GTZsGI888ggAtWvX5ujRo4wfP15F6BZpjFAh8/LyomHDhixfvty+zGazsXz5cpo1a2ZiMtdkGAaDBw9m3rx5/PTTT1SsWNHsSC6tXbt27Nixg23bttkfjRo1onfv3mzbtk0lqJC1aNHiistJ7N+/nwoVKpiUSDIzM3Fzy/vT7e7ujs1mMymR89MRIRMMHTqUfv360ahRI5o0acI777xDRkYGAwYMMDuayxk0aBCzZs1i/vz5BAYGkpiYCEBwcDC+vr4mp3M9gYGBV4zP8vf3p1SpUhq3ZYK///3vNG/enHHjxtGjRw82bNjABx98wAcffGB2NJfVuXNnXn31VcqXL0/NmjXZunUrb7/9No8//rjZ0ZyWps+bZOLEibzxxhskJiZSr1493nvvPWJjY82O5XIsFstVl3/yySf079+/cMPIVbVt21bT5020aNEihg8fzoEDB6hYsSJDhw7lySefNDuWy0pLS2PEiBHMmzePU6dOERkZSa9evRg5ciReXl5mx3NKKkIiIiLisjRGSERERFyWipCIiIi4LBUhERERcVkqQiIiIuKyVIRERETEZakIiYiIiMtSERIRERGXpSIkInID0dHRuqCjSDGlIiQiRUr//v3p1q0bcOmq0kOGDCm0fc+YMYOQkJArlm/cuJG//e1vhZZDRAqP7jUmIsVednb2bd1+ICwszIFpRKQo0REhESmS+vfvz6pVq3j33XexWCxYLBbi4uIA2LlzJ/fddx8BAQFERETQt29fzpw5Y39t27ZtGTx4MEOGDCE0NJQOHToA8Pbbb1O7dm38/f2Jiori2WefJT09HYCVK1cyYMAAUlJS7PsbPXo0cOWpsWPHjtG1a1cCAgIICgqiR48eJCUl2Z8fPXo09erV47PPPiM6Oprg4GAeeeQR0tLSCvZDE5GbpiIkIkXSu+++S7NmzXjyySc5efIkJ0+eJCoqiuTkZO666y7q16/Ppk2bWLJkCUlJSfTo0SPP62fOnImXlxdr1qxh6tSpALi5ufHee++xa9cuZs6cyU8//cS//vUvAJo3b84777xDUFCQfX8vvvjiFblsNhtdu3bl3LlzrFq1iqVLl3L48GF69uyZZ71Dhw7x7bffsmjRIhYtWsSqVat47bXXCujTEpFbpVNjIlIkBQcH4+XlhZ+fH6VLl7YvnzhxIvXr12fcuHH2ZR9//DFRUVHs37+fatWqAVC1alVef/31PNv883ij6Ohoxo4dy9NPP83kyZPx8vIiODgYi8WSZ39/tXz5cnbs2MGRI0eIiooC4NNPP6VmzZps3LiRxo0bA5cK04wZMwgMDASgb9++LF++nFdfffX2PhgRcSgdERIRp7J9+3ZWrFhBQECA/VG9enXg0lGYyxo2bHjFa5ctW0a7du0oW7YsgYGB9O3bl7Nnz5KZmZnv/e/Zs4eoqCh7CQKIiYkhJCSEPXv22JdFR0fbSxBAmTJlOHXq1E29VxEpeDoiJCJOJT09nc6dOzNhwoQrnitTpoz9n/39/fM8FxcXR6dOnXjmmWd49dVXKVmyJKtXr2bgwIFkZ2fj5+fn0Jyenp55/t1isWCz2Ry6DxG5fSpCIlJkeXl5YbVa8yxr0KABX3/9NdHR0Xh45P+vsM2bN2Oz2Xjrrbdwc7t0MPyrr7664f7+qkaNGsTHxxMfH28/KrR7926Sk5OJiYnJdx4RKRp0akxEiqzo6Gh+/fVX4uLiOHPmDDabjUGDBnHu3Dl69erFxo0bOXToED/88AMDBgy4bompUqUKOTk5vP/++xw+fJjPPvvMPoj6z/tLT09n+fLlnDlz5qqnzNq3b0/t2rXp3bs3W7ZsYcOGDTz22GO0adOGRo0aOfwzEJGCpSIkIkXWiy++iLu7OzExMYSFhXHs2DEiIyNZs2YNVquVe+65h9q1azNkyBBCQkLsR3qupm7durz99ttMmDCBWrVq8cUXXzB+/Pg86zRv3pynn36anj17EhYWdsVga7h0imv+/PmUKFGC1q1b0759eypVqsTs2bMd/v5FpOBZDMMwzA4hIiIiYgYdERIRERGXpSIkIiIiLktFSERERFyWipCIiIi4LBUhERERcVkqQiIiIuKyVIRERETEZakIiYiIiMtSERIRERGXpSIkIiIiLktFSERERFyWipCIiIi4rP8HIv+4k9a9TjEAAAAASUVORK5CYII=",
151 | "text/plain": [
152 | ""
153 | ]
154 | },
155 | "metadata": {},
156 | "output_type": "display_data"
157 | }
158 | ],
159 | "source": [
160 | "# Plot the loss for Iris dataset\n",
161 | "plt.plot(log_reg_iris.losses)\n",
162 | "plt.title('Loss Curve for Iris Dataset')\n",
163 | "plt.xlabel('Iteration')\n",
164 | "plt.ylabel('Loss')\n",
165 | "plt.show()"
166 | ]
167 | },
168 | {
169 | "cell_type": "code",
170 | "execution_count": 15,
171 | "metadata": {},
172 | "outputs": [
173 | {
174 | "name": "stdout",
175 | "output_type": "stream",
176 | "text": [
177 | "Iteration 0: Loss = 0.6931471805599453\n",
178 | "Breast Cancer Dataset Accuracy: 0.38\n"
179 | ]
180 | }
181 | ],
182 | "source": [
183 | "# Logistic Regression for Breast Cancer dataset\n",
184 | "log_reg_bc = LogisticRegression(learning_rate=0.01, num_iterations=10)\n",
185 | "log_reg_bc.fit(X_train_bc, y_train_bc)\n",
186 | "y_pred_bc = log_reg_bc.predict(X_test_bc)\n",
187 | "accuracy_bc = np.mean(y_pred_bc == y_test_bc)\n",
188 | "print(f'Breast Cancer Dataset Accuracy: {accuracy_bc:.2f}')\n"
189 | ]
190 | }
191 | ],
192 | "metadata": {
193 | "kernelspec": {
194 | "display_name": "Python 3",
195 | "language": "python",
196 | "name": "python3"
197 | },
198 | "language_info": {
199 | "codemirror_mode": {
200 | "name": "ipython",
201 | "version": 3
202 | },
203 | "file_extension": ".py",
204 | "mimetype": "text/x-python",
205 | "name": "python",
206 | "nbconvert_exporter": "python",
207 | "pygments_lexer": "ipython3",
208 | "version": "3.12.4"
209 | }
210 | },
211 | "nbformat": 4,
212 | "nbformat_minor": 4
213 | }
214 |
--------------------------------------------------------------------------------
/naive_bayes/naive_bayes_examples.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 4
6 | }
7 |
--------------------------------------------------------------------------------
/naive_bayes/naive_bayes_implementation.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 4
6 | }
7 |
--------------------------------------------------------------------------------
/neural_networks/animation.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/neural_networks/animation.gif
--------------------------------------------------------------------------------
/neural_networks/neural_networks_implementation.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 4
6 | }
7 |
--------------------------------------------------------------------------------
/neural_networks/output.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/neural_networks/output.png
--------------------------------------------------------------------------------
/neural_networks/output1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/neural_networks/output1.png
--------------------------------------------------------------------------------
/neural_networks/output2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codewithdark-git/ML-Algorithms-From-Scratch/037299f5da9f53138f0a05e25e5da4f40f36124d/neural_networks/output2.png
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | numpy>=1.21.0
2 | pandas>=1.3.0
3 | matplotlib>=3.4.0
4 | scikit-learn>=1.0.0
5 |
6 |
--------------------------------------------------------------------------------
/svm/svm_implementation.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 4
6 | }
7 |
--------------------------------------------------------------------------------