├── .circleci
└── config.yml
├── .github
├── FUNDING.yml
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ ├── custom.md
│ └── feature_request.md
└── workflows
│ ├── package-lock.json
│ ├── package.json
│ ├── release-package.yml
│ └── simple-workflow.yml
├── .gitignore
├── .npmrc
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── Deep-learning
├── 0-Deep-learning.ipynb
├── ReadMe.md
├── deep-ml.png
└── py-files
│ └── Deep-learning.py
├── LICENSE
├── ML-Fundamental
├── 0-ML-fundamentals.ipynb
├── 0.1-ML-algorithms.ipynb
├── 0.2-Regression.ipynb
├── 0.3-Linear-reg.ipynb
├── 0.4.Linear-Parameter-estimation.ipynb
├── 1.0.Statistics.ipynb
├── 1.1-Descriptive-statistics.ipynb
├── 1.2-Inferenial-stat.ipynb
├── 4-ML-Scikit learn.ipynb
├── ML-fundamental python file
│ ├── ML-algorithms.py
│ ├── ML-descriptive-stat.py
│ ├── ML-fundamental.py
│ ├── ML-inferential-stat.py
│ ├── ML-line-parameter-estimation.py
│ ├── ML-line-reg.py
│ ├── ML-regression.py
│ └── ML-sklearn.py
├── ML-image
│ ├── AI-vs-ML-vs-Deep-Learning.png
│ ├── Anaconda.png
│ ├── Berno-pmf.png
│ ├── Binomial.png
│ ├── CS-ml-deep.png
│ ├── CS-ml.png
│ ├── Linear-reg0.png
│ ├── Linear-reg1.png
│ ├── Linearity.png
│ ├── ML-claasification.png
│ ├── ML-classifications.png
│ ├── ML1.png
│ ├── Model.regresion.png
│ ├── Multi-lin-reg.png
│ ├── Pop-sam1.png
│ ├── Pos-skew.jpeg
│ ├── Possion.png
│ ├── Regression1.png
│ ├── Supervised-process.png
│ ├── Uses-ML.png
│ ├── eexpon.png
│ ├── hypo1.png
│ ├── independence.png
│ ├── lst.png
│ ├── neg-lin-reg.png
│ ├── neg-skew.jpeg
│ ├── normal-df.png
│ ├── normal-df2.png
│ ├── optimal-reg2.png
│ ├── pos-lin-reg.png
│ ├── python-libraries.png
│ ├── rsquare1.png
│ ├── snd-nd.png
│ ├── snd.png
│ ├── spread.png
│ ├── super-alg.png
│ ├── uni-dist1.png
│ ├── unsuper-alg2.png
│ ├── unsuper-process-png.png
│ └── zero-skew.png
├── Overfitting-underfitting.ipynb
├── ReadMe.md
├── image-1.png
├── image-2.png
└── image.png
├── Projects-ML
├── Example-project.md
├── ML-cycle.png
├── Py-file
│ ├── Creid-risk.py
│ ├── Linear-reg-py-file.py
│ ├── Linear-reg.py
│ ├── Monet-Carlo.py
│ ├── Multiple-reg.py
│ └── Proj-1-simple-line.py
├── README.md
├── image.png
└── time-series-analysis-and-forecasting.ipynb
├── README.md
├── SECURITY.md
├── Statistics-fundamental
├── Boxplot_vs_PDF.svg
├── Correlaltion.png
├── Covariance.png
├── ML-Statistics.ipynb
├── ML-statistics python file
│ └── ML-statistics.py.py
├── normal-distri.png
└── variance.png
├── Supervised-learning
├── Advertising.csv
├── Algerian-fire-EDA.ipynb
├── Algerian_forest_fires_dataset_UPDATE.csv
├── Algerian_forest_fires_dataset_UPDATE.xlsx
├── Algerian_forst_fires_cleaned_dataset.csv
├── CarPrice.csv
├── IPL-IMB381IPL2013.csv
├── Project-1.0-simple-linear.ipynb
├── Project-1.1-Linear-reg.ipynb
├── Project-1.1.0-PCA_analysis.ipynb
├── Project-1.2-Model-training.ipynb
├── Project-1.3-Multiple-reg.ipynb
├── Project-1.3.1-Advertisment-multiple regression.ipynb
├── Project-1.4-Maximum-Likelihood-Estimation.ipynb
├── Project-1.5-Polinomial-regression.ipynb
├── Project-1.6-Advertising.ipynb
├── Project-1.7-Weight-height.ipynb
├── Project-2.0-Classification-regression.ipynb
├── Project-2.1-Logistic-project.ipynb
├── Project-2.2-Binary-logistic.ipynb
├── Project-2.3-KNN-classification.ipynb
├── Project-2.4-Decision-tree.ipynb
├── Project-2.5-Support vector.ipynb
├── Project-2.5.1-Support-vector-machine.ipynb
├── Study-dataset.csv
├── User_Data.csv
├── Weight_height.csv
├── boston_dataset.csv
├── churn.csv
├── cost-fun.png
├── curve.csv
├── dataset.csv
├── example.ipynb
├── fit-line.png
├── housing.csv
├── income_evaluation.csv
├── logistic_function_plot.png
├── ols-method.png
├── pair-plt.png
└── titanic.csv
├── index.js
├── ml-data
├── BEML.csv
├── Codes
│ ├── Chapter 1 - Python_Basics.pdf
│ ├── Chapter 10 - Text Analytics.pdf
│ ├── Chapter 2 - Descriptive Analytics.pdf
│ ├── Chapter 3 - Basic Statistical Analysis.pdf
│ ├── Chapter 4 - Linear Regression.pdf
│ ├── Chapter 5 - Classification Problems.pdf
│ ├── Chapter 6 - Advanced Machine Learning.pdf
│ ├── Chapter 7 - Clustering.pdf
│ ├── Chapter 8 - Forecasting.pdf
│ └── Chapter 9 - Recommendation Systems.pdf
├── Country-Data.csv
├── Earnings Manipulation 220.csv
├── GLAXO.csv
├── German Credit Data.csv
├── IPL-IMB381IPL2013.csv
├── Income Data.csv
├── MBA-Salary.csv
├── auto-mpg.data
├── bank.csv
├── beer.csv
├── bollywood.csv
├── bollywoodmovies.csv
├── breakups.csv
├── con-new.csv
├── country.csv
├── curve.csv
├── customerspends.csv
├── forecast.xls
├── healthdrink.xlsx
├── hr_data.csv
├── onestop.csv
├── passport.csv
├── sentiment_train
├── snd.png
├── store.xls
├── toy_dataset.csv
├── trainingscores.csv
├── vimana.csv
└── wsb.csv
├── package-lock.json
└── package.json
/.circleci/config.yml:
--------------------------------------------------------------------------------
1 | # Use the latest 2.1 version of CircleCI pipeline process engine.
2 | # See: https://circleci.com/docs/configuration-reference
3 | version: 2.1
4 |
5 | # Define a job to be invoked later in a workflow.
6 | # See: https://circleci.com/docs/configuration-reference/#jobs
7 | jobs:
8 | say-hello:
9 | # Specify the execution environment. You can specify an image from Docker Hub or use one of our convenience images from CircleCI's Developer Hub.
10 | # See: https://circleci.com/docs/configuration-reference/#executor-job
11 | docker:
12 | - image: cimg/base:stable
13 | # Add steps to the job
14 | # See: https://circleci.com/docs/configuration-reference/#steps
15 | steps:
16 | - checkout
17 | - run:
18 | name: "Say hello"
19 | command: "echo Hello, World!"
20 |
21 | # Orchestrate jobs using workflows
22 | # See: https://circleci.com/docs/configuration-reference/#workflows
23 | workflows:
24 | say-hello-workflow:
25 | jobs:
26 | - say-hello
27 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: [arunsinp] # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
4 | patreon: [87801682] # Replace with a single Patreon username
5 | open_collective: # Replace with a single Open Collective username
6 | ko_fi: # Replace with a single Ko-fi username
7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
9 | liberapay: # Replace with a single Liberapay username
10 | issuehunt: # Replace with a single IssueHunt username
11 | otechie: # Replace with a single Otechie username
12 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
13 | custom: ['https://www.buymeacoffee.com/arunp77] # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
14 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 | 1. Go to '...'
16 | 2. Click on '....'
17 | 3. Scroll down to '....'
18 | 4. See error
19 |
20 | **Expected behavior**
21 | A clear and concise description of what you expected to happen.
22 |
23 | **Screenshots**
24 | If applicable, add screenshots to help explain your problem.
25 |
26 | **Desktop (please complete the following information):**
27 | - OS: [e.g. iOS]
28 | - Browser [e.g. chrome, safari]
29 | - Version [e.g. 22]
30 |
31 | **Smartphone (please complete the following information):**
32 | - Device: [e.g. iPhone6]
33 | - OS: [e.g. iOS8.1]
34 | - Browser [e.g. stock browser, safari]
35 | - Version [e.g. 22]
36 |
37 | **Additional context**
38 | Add any other context about the problem here.
39 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/custom.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Custom issue template
3 | about: Describe this issue template's purpose here.
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 |
11 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/.github/workflows/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "workflows",
3 | "version": "1.0.0",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "workflows",
9 | "version": "1.0.0",
10 | "license": "ISC"
11 | }
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/.github/workflows/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "machine-learning",
3 | "version": "1.0.0",
4 | "description": "This Repository contains the Machine learning related files with fundamental theory is included in the package.",
5 | "main": "index.js",
6 | "scripts": {
7 | "test": "exit 0"
8 | },
9 | "repository": {
10 | "type": "git",
11 | "url": "git+https://github.com/arunp77/Machine-Learning.git"
12 | },
13 | "keywords": [
14 | "Machine-learning"
15 | ],
16 | "author": "Arun Kumar Pandey",
17 | "license": "MIT",
18 | "bugs": {
19 | "url": "https://github.com/arunp77/Machine-Learning/issues"
20 | },
21 | "homepage": "https://github.com/arunp77/Machine-Learning#readme",
22 | "publishConfig": {
23 | "@arunp77:registry": "https://npm.pkg.github.com/"
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/.github/workflows/release-package.yml:
--------------------------------------------------------------------------------
1 | name: Node.js Package
2 |
3 | on:
4 | release:
5 | types: [created]
6 |
7 | jobs:
8 | build:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - uses: actions/checkout@v4
12 | - uses: actions/setup-node@v3
13 | with:
14 | node-version: 16
15 | - run: npm ci
16 | - run: npm test
17 |
18 | publish-gpr:
19 | needs: build
20 | runs-on: ubuntu-latest
21 | permissions:
22 | packages: write
23 | contents: read
24 | steps:
25 | - uses: actions/checkout@v4
26 | - uses: actions/setup-node@v3
27 | with:
28 | node-version: 16
29 | registry-url: https://npm.pkg.github.com/
30 | - run: npm ci
31 | - run: npm publish
32 | env:
33 | NODE_AUTH_TOKEN: ${{secrets.GITHUB_TOKEN}}
34 |
--------------------------------------------------------------------------------
/.github/workflows/simple-workflow.yml:
--------------------------------------------------------------------------------
1 | # This is a basic workflow to help you get started with Actions
2 |
3 | name: CI
4 |
5 | # Controls when the workflow will run
6 | on:
7 | # Triggers the workflow on push or pull request events but only for the "main" branch
8 | push:
9 | branches: [ "main" ]
10 | pull_request:
11 | branches: [ "main" ]
12 |
13 | # Allows you to run this workflow manually from the Actions tab
14 | workflow_dispatch:
15 |
16 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel
17 | jobs:
18 | # This workflow contains a single job called "build"
19 | build:
20 | # The type of runner that the job will run on
21 | runs-on: ubuntu-latest
22 |
23 | # Steps represent a sequence of tasks that will be executed as part of the job
24 | steps:
25 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
26 | - uses: actions/checkout@v3
27 |
28 | # Runs a single command using the runners shell
29 | - name: Run a one-line script
30 | run: echo Hello, world!
31 |
32 | # Runs a set of commands using the runners shell
33 | - name: Run a multi-line script
34 | run: |
35 | echo Add other actions to build,
36 | echo test, and deploy your project.
37 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Node.js
2 | node_modules/
3 | npm-debug.log
4 | yarn-error.log
5 |
6 | # macOS
7 | .DS_Store
8 |
9 | # Windows
10 | Thumbs.db
11 | Desktop.ini
12 |
13 | # Editor directories and files
14 | .vscode/
15 | .idea/
16 | *.suo
17 | *.ntvs*
18 | *.njsproj
19 | *.sln
20 |
--------------------------------------------------------------------------------
/.npmrc:
--------------------------------------------------------------------------------
1 | @arunp77:registry=https://npm.pkg.github.com/
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | We as members, contributors, and leaders pledge to make participation in our
6 | community a harassment-free experience for everyone, regardless of age, body
7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
8 | identity and expression, level of experience, education, socio-economic status,
9 | nationality, personal appearance, race, religion, or sexual identity
10 | and orientation.
11 |
12 | We pledge to act and interact in ways that contribute to an open, welcoming,
13 | diverse, inclusive, and healthy community.
14 |
15 | ## Our Standards
16 |
17 | Examples of behavior that contributes to a positive environment for our
18 | community include:
19 |
20 | * Demonstrating empathy and kindness toward other people
21 | * Being respectful of differing opinions, viewpoints, and experiences
22 | * Giving and gracefully accepting constructive feedback
23 | * Accepting responsibility and apologizing to those affected by our mistakes,
24 | and learning from the experience
25 | * Focusing on what is best not just for us as individuals, but for the
26 | overall community
27 |
28 | Examples of unacceptable behavior include:
29 |
30 | * The use of sexualized language or imagery, and sexual attention or
31 | advances of any kind
32 | * Trolling, insulting or derogatory comments, and personal or political attacks
33 | * Public or private harassment
34 | * Publishing others' private information, such as a physical or email
35 | address, without their explicit permission
36 | * Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Enforcement Responsibilities
40 |
41 | Community leaders are responsible for clarifying and enforcing our standards of
42 | acceptable behavior and will take appropriate and fair corrective action in
43 | response to any behavior that they deem inappropriate, threatening, offensive,
44 | or harmful.
45 |
46 | Community leaders have the right and responsibility to remove, edit, or reject
47 | comments, commits, code, wiki edits, issues, and other contributions that are
48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
49 | decisions when appropriate.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies within all community spaces, and also applies when
54 | an individual is officially representing the community in public spaces.
55 | Examples of representing our community include using an official e-mail address,
56 | posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event.
58 |
59 | ## Enforcement
60 |
61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
62 | reported to the community leaders responsible for enforcement at
63 | .
64 | All complaints will be reviewed and investigated promptly and fairly.
65 |
66 | All community leaders are obligated to respect the privacy and security of the
67 | reporter of any incident.
68 |
69 | ## Enforcement Guidelines
70 |
71 | Community leaders will follow these Community Impact Guidelines in determining
72 | the consequences for any action they deem in violation of this Code of Conduct:
73 |
74 | ### 1. Correction
75 |
76 | **Community Impact**: Use of inappropriate language or other behavior deemed
77 | unprofessional or unwelcome in the community.
78 |
79 | **Consequence**: A private, written warning from community leaders, providing
80 | clarity around the nature of the violation and an explanation of why the
81 | behavior was inappropriate. A public apology may be requested.
82 |
83 | ### 2. Warning
84 |
85 | **Community Impact**: A violation through a single incident or series
86 | of actions.
87 |
88 | **Consequence**: A warning with consequences for continued behavior. No
89 | interaction with the people involved, including unsolicited interaction with
90 | those enforcing the Code of Conduct, for a specified period of time. This
91 | includes avoiding interactions in community spaces as well as external channels
92 | like social media. Violating these terms may lead to a temporary or
93 | permanent ban.
94 |
95 | ### 3. Temporary Ban
96 |
97 | **Community Impact**: A serious violation of community standards, including
98 | sustained inappropriate behavior.
99 |
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 |
106 | ### 4. Permanent Ban
107 |
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior, harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 |
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 |
115 | ## Attribution
116 |
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120 |
121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct
122 | enforcement ladder](https://github.com/mozilla/diversity).
123 |
124 | [homepage]: https://www.contributor-covenant.org
125 |
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | https://www.contributor-covenant.org/faq. Translations are available at
128 | https://www.contributor-covenant.org/translations.
129 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | ## Repository Contributor Guidelines
2 |
3 | ### Introduction
4 |
5 | Welcome to the repository for Jupyter notebooks covering machine learning, deep learning, statistics, and related topics. We appreciate your interest in contributing to this project. Before you get started, please review these guidelines to ensure a smooth and collaborative experience.
6 |
7 | ### Contribution Process
8 |
9 | 1. **Fork the Repository**: To begin contributing, fork this repository to your GitHub account.
10 |
11 | 2. **Branch Naming**: When working on a specific topic or notebook, create a new branch with a descriptive name related to the content you are working on. For example, if you are working on a notebook about regression, you could name your branch "regression-notebook."
12 |
13 | 3. **Notebook Organization**: Each notebook should be organized as follows:
14 | - Start with a clear title and brief description of the notebook's content.
15 | - Use Markdown cells for explanations and headings.
16 | - Include code cells with comments and explanations.
17 | - Ensure that code is well-documented and follows best practices.
18 | - Use Markdown to include any necessary equations or mathematical explanations.
19 |
20 | 4. **Commit Changes**: Make meaningful and atomic commits with clear messages. For example:
21 | - "Added a new section on logistic regression."
22 | - "Updated code comments for better clarity."
23 |
24 | 5. **Pull Requests**: When you're ready to contribute, open a pull request from your forked repository to the main repository. Clearly describe the purpose of your changes in the pull request description.
25 |
26 | 6. **Code Review**: Expect feedback and code review from maintainers and fellow contributors. Be responsive to comments and make necessary revisions.
27 |
28 | 7. **Testing**: Ensure that your code works correctly and does not introduce errors. If applicable, include test cases within your notebook.
29 |
30 | ### Code and Documentation Standards
31 |
32 | 1. **Coding Style**: Follow the coding style guidelines for Python and Jupyter notebooks. Consistency in code style is important for readability. If you're unsure, refer to PEP 8 for Python.
33 |
34 | 2. **Documentation**: Clearly explain the purpose and context of your notebook in Markdown cells. Include explanations of code, algorithms, and concepts. Make sure your documentation is clear and accessible to a wide audience.
35 |
36 | 3. **Use of Libraries**: When using external libraries (e.g., scikit-learn, TensorFlow), specify the version used and ensure your code is compatible with the most recent versions.
37 |
38 | ### Collaboration and Communication
39 |
40 | 1. **Collaboration**: Feel free to collaborate with others on improving existing notebooks or creating new ones. Coordination through issues and discussions is encouraged.
41 |
42 | 2. **Respectful Communication**: Be respectful and constructive in your communication with other contributors. Healthy discussions and debates are welcome.
43 |
44 | 3. **Issues**: If you encounter issues or have ideas for new notebooks, create a new issue to discuss it with the community.
45 |
46 | ### Licensing
47 |
48 | By contributing to this repository, you agree to license your work under the same license as the repository itself. Please ensure you have the necessary rights to make contributions.
49 |
50 | ### Acknowledgment
51 |
52 | We greatly appreciate your contributions to this repository and will acknowledge your work in the contributors' section.
53 |
54 | Thank you for your interest in advancing the knowledge and resources available in the field of machine learning and statistics. Happy contributing!
55 |
--------------------------------------------------------------------------------
/Deep-learning/ReadMe.md:
--------------------------------------------------------------------------------
1 | # Deep Learning
2 |
3 | **Reference:** [https://arunp77.github.io/machine-learning.html](https://arunp77.github.io/machine-learning.html)
4 |
5 | - Deep Learning:
6 | - Neural Networks:
7 | - Perceptron
8 | - Multi-Layer Perceptron (MLP)
9 | - Convolution Neural Networks (CNNs):
10 | - Image Classification
11 | - Object Detection
12 | - Image Segmentation
13 | - Recurrent Neural Networks
14 | - Sequence-to-Sequence Models
15 | - Text classification
16 | - Sentiment Analysis
17 | - Long Short-term Memory (LSTM) and Gated Recurrent Units (GRU)
18 | - Time series Forecasting
19 | - Language Modeling
20 | - Generative Adversarial Networks (GANs)
21 | - Image Synthesis
22 | - Data Augmentation
23 |
--------------------------------------------------------------------------------
/Deep-learning/deep-ml.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/Deep-learning/deep-ml.png
--------------------------------------------------------------------------------
/Deep-learning/py-files/Deep-learning.py:
--------------------------------------------------------------------------------
1 | # %% [markdown]
2 | # # Deep Learning
3 | #
4 | # 
5 | #
6 | # [Image reference](https://www.ait.de/en/deep-learning/)
7 |
8 | # %% [markdown]
9 | # ## Definition
10 | # Deep learning is a subfield of machine learning that involves the use of artificial neural networks with multiple layers to model and solve complex problems.
11 | #
12 | # Artificial Neural Networks (ANNs) are a type of deep learning model that is designed to simulate the way the human brain works. ANNs consist of interconnected nodes (neurons) that transmit signals and perform computations on input data to produce output values. These networks can be used for a wide range of applications, including image recognition, speech recognition, natural language processing, and more.
13 | #
14 | # ## Classification
15 | #
16 | # ANNs can be classified into several categories based on their structure and function, including:
17 | #
18 | # 1. **Feedforward neural networks:** These are neural networks that have a series of interconnected layers, where the output of each layer serves as the input for the next layer.
19 | #
20 | # 2. **Convolutional neural networks (CNNs):** CNNs are primarily used for image and video processing tasks. They consist of layers of convolutional filters that can identify patterns in the input images.
21 | #
22 | # 3. **Recurrent neural networks (RNNs):** These are neural networks that are well-suited to sequence analysis tasks, such as natural language processing or speech recognition. They use a type of neural network layer called a recurrent layer that can maintain an internal state and process inputs one at a time.
23 | #
24 | # 4. **Generative adversarial networks (GANs):** GANs are a type of network that can generate new data based on a set of input data. They consist of two networks: a generator network that creates new data, and a discriminator network that evaluates the quality of the generated data.
25 | #
26 | # 5. **Autoencoders:** Autoencoders are designed to learn a compressed representation of input data. They consist of an encoder network that compresses the input data into a low-dimensional representation, and a decoder network that can reconstruct the original input data from the compressed representation.
27 |
28 | # %% [markdown]
29 | # 
30 | #
31 | # https://www.sciencedirect.com/science/article/pii/S2352914822000612
32 |
33 | # %% [markdown]
34 | # 
35 | #
36 | # (simple form: https://developer.ibm.com/articles/cc-machine-learning-deep-learning-architectures/ good link)
37 |
38 | # %% [markdown]
39 | # ## Deep Learning framework
40 | #
41 | # In the context of deep learning, a framework is a software library or tool that provides a set of APIs (application programming interfaces) and abstractions to simplify the development of deep neural networks. Frameworks typically include pre-implemented building blocks for common neural network layers, such as convolutional, recurrent, and fully connected layers, as well as optimization algorithms and training routines.
42 | #
43 | # There are many popular frameworks for deep learning, some of which include:
44 | #
45 | # - **TensorFlow:** Developed by Google, it is an open-source software library for dataflow and differentiable programming across a range of tasks.
46 | #
47 | # - **PyTorch:** Developed by Facebook, it is an open-source machine learning framework used for applications such as computer vision and natural language processing.
48 | #
49 | # - **Keras:** An open-source neural network library written in Python, it runs on top of other deep learning frameworks such as TensorFlow and Theano.
50 | #
51 | # - **Caffe:** Developed by Berkeley Vision and Learning Center, it is a deep learning framework that specializes in image recognition.
52 | #
53 | # - **Theano:** Another popular open-source numerical computation library, it is used for deep learning and other mathematical computations.
54 | #
55 | # - **MXNet:** An open-source deep learning framework that is highly scalable and supports a range of programming languages including Python, R, and Julia.
56 | #
57 | # - **Chainer:** A Python-based, open-source deep learning framework that is highly flexible and allows for dynamic computation graphs.
58 | #
59 | # These frameworks provide a range of features and tools for developing and training deep neural networks, making it easier for developers and researchers to experiment with different architectures and optimize their models for specific tasks.
60 |
61 | # %% [markdown]
62 | # ## Difference between Deep learning and Machine learning
63 | #
64 | # | | Machine Learning | Deep Learning |
65 | # |-------------------------|------------------|---------------|
66 | # | Approach | Requires structure data | Does not require structure data |
67 | # | Human intervention | Requires human intervention for mistakes | Does not require human intervention for mistakes |
68 | # | Hardware | Can function on CPU | Requires GPU / Significant computing power |
69 | # | Time | Takes seconds to hours | Takes weeks |
70 | # | Uses | Forecasting, predicting and other simple applications | More complex applications like autonomus vehicles |
71 |
72 | # %% [markdown]
73 | # ## Practical uses and applications of deep learning
74 | #
75 | # Here are a few examples of the practical uses and applications of deep learning across various domains:
76 | #
77 | # 1. **Image and Object Recognition:** Deep learning has significantly improved image classification, object detection, and recognition tasks. Examples include:
78 | #
79 | # - Autonomous vehicles use deep learning algorithms to recognize and interpret objects in real-time, enabling them to navigate and make informed driving decisions.
80 | #
81 | # - Facial recognition systems, such as those used for biometric identification or security purposes, employ deep learning techniques to accurately recognize and verify individuals' faces.
82 | #
83 | # 2. **Natural Language Processing (NLP):** Deep learning has greatly advanced natural language processing tasks, allowing computers to understand and generate human language. Examples include:
84 | #
85 | # - Chatbots and virtual assistants utilize deep learning models to understand user queries, provide relevant responses, and engage in conversational interactions.
86 | #
87 | # - Machine translation systems, like Google Translate, employ deep learning to improve the accuracy and fluency of translations between different languages.
88 | #
89 | # - Sentiment analysis algorithms analyze text data from social media, customer reviews, or surveys, using deep learning models to determine the sentiment expressed in the text (e.g., positive, negative, neutral).
90 | #
91 | # 3. **Medical Diagnostics:** Deep learning has shown promising results in medical imaging analysis and disease diagnostics. Examples include:
92 | #
93 | # - Deep learning models can detect anomalies and classify medical images, such as X-rays, MRIs, or CT scans, assisting radiologists in diagnosing diseases like cancer or identifying abnormalities.
94 | #
95 | # - Deep learning algorithms have been used to predict the risk of certain diseases, such as diabetic retinopathy or cardiovascular diseases, based on patient data, enabling early detection and intervention.
96 | #
97 | # 4. **Recommendation Systems:** Deep learning models are used in recommendation systems to personalize and improve user experiences. Examples include:
98 | #
99 | # - Streaming platforms like Netflix and Spotify employ deep learning algorithms to recommend personalized movies, TV shows, or music based on a user's viewing or listening history.
100 | #
101 | # - E-commerce platforms, such as Amazon, utilize deep learning-based recommendation systems to suggest products based on a user's browsing history, purchase behavior, and similar user profiles.
102 | #
103 | # 5. **Speech Recognition:** Deep learning has significantly enhanced speech recognition accuracy and enabled voice-controlled applications. Examples include:
104 | #
105 | # - Voice assistants like Apple's Siri, Amazon's Alexa, or Google Assistant utilize deep learning models to accurately recognize and respond to spoken commands and queries.
106 | #
107 | # - Transcription services employ deep learning algorithms to convert spoken language into written text, facilitating tasks such as transcription services, voice search, or closed captioning.
108 | #
109 | # These are just a few examples showcasing the broad range of applications where deep learning has made significant advancements. The versatility and effectiveness of deep learning models have enabled breakthroughs in many fields, revolutionizing industries and improving various aspects of our lives.
110 |
111 | # %%
112 |
113 |
114 |
115 |
--------------------------------------------------------------------------------
/ML-Fundamental/4-ML-Scikit learn.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "attachments": {},
5 | "cell_type": "markdown",
6 | "metadata": {},
7 | "source": [
8 | "# Machine learning & Scikit learn"
9 | ]
10 | },
11 | {
12 | "attachments": {},
13 | "cell_type": "markdown",
14 | "metadata": {},
15 | "source": [
16 | "Machine learning is a subfield of artificial intelligence that involves the development of algorithms and statistical models that enable machines to learn from data and make predictions or decisions without being explicitly programmed. It is used in a wide range of applications, including image recognition, natural language processing, fraud detection, and recommendation systems.\n",
17 | "\n",
18 | "To get started with machine learning, there are several tools and libraries that can be used. Here are some of the most popular ones:\n",
19 | "\n",
20 | "- **Python:** Python is a popular programming language for machine learning due to its ease of use, rich libraries, and wide community support.\n",
21 | "- **NumPy:** NumPy is a library for numerical computing in Python that provides support for arrays, matrices, and linear algebra operations.\n",
22 | "- **Pandas:** Pandas is a library for data manipulation and analysis in Python that provides tools for reading and writing data, cleaning and preprocessing data, and exploring data.\n",
23 | "- **Scikit-learn:** Scikit-learn is a library for machine learning in Python that provides tools for classification, regression, clustering, dimensionality reduction, model selection, and data preprocessing.\n",
24 | "- **TensorFlow:** TensorFlow is a library for machine learning developed by Google that provides tools for building and training deep learning models.\n",
25 | "- **Keras:** Keras is a high-level API for building and training deep learning models that runs on top of TensorFlow.\n",
26 | "- **PyTorch:** PyTorch is a library for machine learning developed by Facebook that provides tools for building and training deep learning models.\n",
27 | "\n",
28 | "These are just a few of the many tools and libraries available for machine learning. The choice of tools and libraries depends on the specific application and the expertise of the user. In the present tutorial, I will discuss mainly Scikit-learn in great detailed."
29 | ]
30 | },
31 | {
32 | "attachments": {},
33 | "cell_type": "markdown",
34 | "metadata": {},
35 | "source": [
36 | "# Scikit-learn\n",
37 | "\n",
38 | "- **What is Scikit-learn?:** Scikit-learn (also known as sklearn) is an open-source machine learning library that provides a range of tools for implementing supervised and unsupervised learning algorithms. It is built on top of NumPy, SciPy, and Matplotlib, and is designed to integrate well with other libraries in the Python scientific computing ecosystem.\n",
39 | "\n",
40 | " Overall, scikit-learn is a powerful and user-friendly library that is widely used by data scientists and machine learning practitioners for a variety of tasks, from exploratory data analysis to building complex machine learning pipelines.\n",
41 | "\n",
42 | "- **Which applications can be implemented with the library?**\n",
43 | "\n",
44 | " With Scikit-Learn, a wide variety of AI models can be implemented, both from supervised and unsupervised learning . In general, the models can be divided into the following groups:\n",
45 | "\n",
46 | " - Classification ( Support Vector Machine , Random Forest , Decision Tree , etc.)\n",
47 | " - Regressions ( Logistic Regression , Linear Regression , etc.)\n",
48 | " - Dimension reduction ( principal component analysis , factor analysis, etc.)\n",
49 | " - Data preprocessing and visualization\n",
50 | "- **What are the advantages of scikit learn?:**\n",
51 | "\n",
52 | " Library benefits include:\n",
53 | "\n",
54 | " - simplified application of machine learning tools, data analytics and data visualization\n",
55 | " - commercial use without license fees\n",
56 | " - High degree of flexibility when fine-tuning the models\n",
57 | " - based on common and powerful data structures from Numpy\n",
58 | " - Usable in different contexts."
59 | ]
60 | },
61 | {
62 | "attachments": {},
63 | "cell_type": "markdown",
64 | "metadata": {},
65 | "source": [
66 | "## Some classes available in the Sklearn library\n",
67 | "\n",
68 | "Scikit-learn is a popular Python library for machine learning. It provides a wide range of machine learning algorithms and tools for data preprocessing, model selection, and evaluation. Here are some of the main classes in scikit-learn:\n",
69 | "\n",
70 | "- **Estimators:** Estimators are the main objects in scikit-learn that perform the machine learning algorithms. Each estimator is a Python class that implements a specific algorithm, such as linear regression, logistic regression, decision trees, or support vector machines. Estimators have a fit() method that takes in the training data and trains the model, and a predict() method that takes in new data and makes predictions.\n",
71 | "\n",
72 | "- **Transformers:** Transformers are objects that preprocess data before it is fed into the machine learning algorithm. Examples of transformers include data scaling, feature selection, and text preprocessing. Transformers have a fit_transform() method that takes in the training data and fits the transformer, and a transform() method that applies the transformer to new data.\n",
73 | "\n",
74 | "- **Pipelines:** Pipelines are a sequence of transformers and estimators that are combined together to form a complete machine learning workflow. Pipelines can be used to automate the process of preprocessing data and training a machine learning model. Pipelines have a fit() method that takes in the training data and trains the entire workflow, and a predict() method that takes in new data and makes predictions.\n",
75 | "\n",
76 | "- **Model Selection:** The model selection classes in scikit-learn provide tools for selecting the best model and hyperparameters for a given dataset. These classes include GridSearchCV and RandomizedSearchCV, which perform an exhaustive search over a grid of hyperparameters or a random search of hyperparameters, respectively.\n",
77 | "\n",
78 | "- **Metrics:** Metrics are used to evaluate the performance of a machine learning model. Scikit-learn provides a wide range of evaluation metrics, including accuracy, precision, recall, F1 score, and ROC curves.\n",
79 | "\n",
80 | "These are just some of the main classes in scikit-learn. Scikit-learn also provides many other useful classes and functions for machine learning, such as clustering algorithms, ensemble methods, and data loading utilities."
81 | ]
82 | },
83 | {
84 | "cell_type": "markdown",
85 | "metadata": {},
86 | "source": []
87 | },
88 | {
89 | "cell_type": "markdown",
90 | "metadata": {},
91 | "source": []
92 | },
93 | {
94 | "cell_type": "markdown",
95 | "metadata": {},
96 | "source": []
97 | },
98 | {
99 | "cell_type": "markdown",
100 | "metadata": {},
101 | "source": []
102 | },
103 | {
104 | "attachments": {},
105 | "cell_type": "markdown",
106 | "metadata": {},
107 | "source": [
108 | "# Refrences\n",
109 | "\n",
110 | "Some useful resources where you can find more information about scikit-learn and examples of how to use each class:\n",
111 | "\n",
112 | "1. Official scikit-learn documentation: https://scikit-learn.org/stable/documentation.html\n",
113 | "2. Scikit-learn tutorials: https://scikit-learn.org/stable/tutorial/index.html\n",
114 | "3. Scikit-learn examples: https://scikit-learn.org/stable/auto_examples/index.html\n",
115 | "4. Scikit-learn user guide: https://scikit-learn.org/stable/user_guide.html\n",
116 | "5. Python Data Science Handbook by Jake VanderPlas: https://jakevdp.github.io/PythonDataScienceHandbook/index.html"
117 | ]
118 | }
119 | ],
120 | "metadata": {
121 | "language_info": {
122 | "name": "python"
123 | },
124 | "orig_nbformat": 4
125 | },
126 | "nbformat": 4,
127 | "nbformat_minor": 2
128 | }
129 |
--------------------------------------------------------------------------------
/ML-Fundamental/ML-fundamental python file/ML-regression.py:
--------------------------------------------------------------------------------
1 | # %% [markdown]
2 | # # Regression algorithms
3 | #
4 | # Regression algorithms are a type of machine learning algorithm used to predict numerical values based on input data. Regression algorithms attempt to find a relationship between the input variables and the output variable by fitting a mathematical model to the data. The goal of regression is to find a mathematical relationship between the input features and the target variable that can be used to make accurate predictions on new, unseen data.
5 | #
6 | #
7 | #
8 | # There are many different types of regression algorithms, including:
9 | #
10 | # 1. **Linear regression:** Linear regression is a simple and widely used algorithm. It assumes a linear relationship between the independent variables and the target variable. The algorithm estimates the coefficients of the linear equation that best fits the data. The equation can be of the form
11 | #
12 | # $y = mx + b$,
13 | #
14 | # where $y$ is the target variable, $x$ is the input feature, $m$ is the slope, and $b$ is the intercept.
15 | #
16 | # **Example:** applications include predicting housing prices based on features like square footage and number of bedrooms, or estimating sales based on advertising expenditure.
17 | #
18 | # 2. **Logistic regression:** Logistic regression is a popular algorithm used for binary classification problems, where the target variable has two possible outcomes (e.g., yes/no, true/false, 0/1). Despite its name, logistic regression is a classification algorithm, not a regression algorithm. It models the relationship between the independent variables (input features) and the binary target variable using the logistic function, also known as the sigmoid function.
19 | #
20 | # 
21 | #
22 | # **Example:** predicting whether a customer will churn (i.e., stop doing business with a company) based on their demographic information and purchase history.
23 | #
24 | # 3. **Polynomial regression:** Polynomial regression is an extension of linear regression where the relationship between the variables is modeled using a polynomial equation. This allows for more flexibility in capturing nonlinear relationships between the input features and the target variable. It involves adding polynomial terms, such as $x^2$ or $x^3$, to the linear equation. Polynomial regression is useful when the data exhibits curvilinear patterns.
25 | #
26 | # **Example:** predicting the yield of a crop based on factors such as temperature, humidity, and rainfall.
27 | #
28 | # 4. **Ridge regression:** Ridge regression is a regularization technique that addresses the issue of overfitting in linear regression. It adds a penalty term to the linear regression equation to control the complexity of the model. This penalty term helps prevent the coefficients from becoming too large, reducing the model's sensitivity to the training data. Ridge regression is particularly useful when dealing with high-dimensional data or when multicollinearity (high correlation) exists among the input features.
29 | #
30 | # **Example:** predicting the price of a stock based on financial indicators such as earnings per share and price-to-earnings ratio.
31 | #
32 | # 5. **Lasso regression:** Lasso regression, similar to ridge regression, is a regularization technique used to combat overfitting. It adds a penalty term to the linear regression equation, but in this case, it uses the L1 norm of the coefficients as the penalty. Lasso regression has a feature selection property that can drive some coefficients to zero, effectively performing automatic feature selection. This makes it useful when dealing with datasets with many features or when looking to identify the most influential variables.
33 | #
34 | # **Example:** predicting the likelihood of a customer purchasing a product based on their browsing and purchase history on a website.
35 | #
36 | # 6. **Elastic Net regression:** ElasticNet regression combines both ridge and lasso regularization techniques. It adds a penalty term that is a linear combination of the L1 (lasso) and L2 (ridge) norms of the coefficients. This hybrid approach allows for feature selection while also providing stability and reducing the impact of multicollinearity. ElasticNet regression is useful when there are many correlated features and the goal is to both select relevant features and mitigate multicollinearity.
37 | #
38 | # **Example:** predicting the demand for a product based on factors such as price, advertising spend, and competitor activity.
39 | #
40 | # There are many other regression algorithms as well, and the choice of algorithm depends on the specific problem and the characteristics of the data.
41 | #
42 | # **Example:**
43 | #
44 | # - finance,
45 | # - healthcare,
46 | # - manufacturing
47 | # - Defence and space
48 |
49 | # %% [markdown]
50 | # ## Applications of Regression algorithms
51 | #
52 | # ### 1. In Finance sector:
53 | #
54 | # - **Risk management:** Regression algorithms can be used to analyze historical market data to identify patterns and trends in asset prices, which can help financial institutions to better understand and manage risks associated with their portfolios.
55 | #
56 | # - **Portfolio optimization:** Regression algorithms can be used to optimize the allocation of assets in a portfolio to maximize returns while minimizing risk. This involves using historical data to identify correlations between asset prices and building a model to predict future returns.
57 | #
58 | # - **Credit scoring:** Regression algorithms can be used to analyze borrower data such as credit scores, income, and employment history, to predict the likelihood of default on a loan. This information can be used by lenders to make more informed lending decisions.
59 | #
60 | # - **Trading strategies:** Regression algorithms can be used to analyze market data and identify profitable trading strategies. For example, a regression model could be used to predict the price of a stock based on its historical performance, and this information could be used to make buy or sell decisions.
61 | #
62 | # - **Financial forecasting:** Regression algorithms can be used to forecast financial performance metrics such as revenue, profits, and cash flow, based on historical data and other factors such as market trends and economic indicators. This information can be used by financial analysts to make informed investment recommendations.
63 | #
64 | # ### 2. In healthcare sector:
65 | #
66 | # - **Predicting patient outcomes:** Regression algorithms can be used to predict outcomes such as mortality, readmission, and length of stay for patients based on factors such as age, gender, diagnosis, and comorbidities. This information can help healthcare providers make more informed decisions about patient care and resource allocation.
67 | #
68 | # - **Predicting disease progression:** Regression algorithms can be used to predict the progression of diseases such as cancer, Alzheimer's, and Parkinson's based on biomarkers, genetic information, and other factors. This information can help with early detection and personalized treatment plans.
69 | #
70 | # - **Forecasting healthcare costs:** Regression algorithms can be used to forecast healthcare costs for individuals and populations based on factors such as age, gender, and medical history. This information can be used by insurance companies and policymakers to make more informed decisions about coverage and reimbursement.
71 | #
72 | # - **Analyzing clinical trials:** Regression algorithms can be used to analyze data from clinical trials to determine the efficacy and safety of new treatments. This information can help drug developers make decisions about which drugs to advance to the next phase of development.
73 | #
74 | # - **Predicting disease outbreaks:** Regression algorithms can be used to predict disease outbreaks based on factors such as weather patterns, population density, and vaccination rates. This information can help public health officials make decisions about resource allocation and disease prevention strategies.
75 | #
76 | # ### 3. In manufacturing sector:
77 | #
78 | # - **Quality Control:** Regression algorithms can be used to monitor the quality of manufactured products by analyzing the relationship between the input variables (such as the raw materials used, the manufacturing process parameters) and the output variables (such as the product quality metrics). This helps in identifying factors that affect the quality of the product and optimizing the manufacturing process accordingly.
79 | #
80 | # - **Predictive Maintenance:** Regression algorithms can be used to predict the remaining useful life of manufacturing equipment based on factors such as operating conditions, maintenance history, and sensor data. This helps in scheduling maintenance activities in advance, reducing downtime, and improving equipment reliability.
81 | #
82 | # - **Process Optimization:** Regression algorithms can be used to optimize the manufacturing process by analyzing the relationship between the input variables (such as the process parameters, raw materials) and the output variables (such as product yield, production rate). This helps in identifying the optimal process settings that result in the highest quality products with minimal waste.
83 | #
84 | # - **Supply Chain Management:** Regression algorithms can be used to forecast demand for raw materials and finished products based on historical sales data, economic trends, and market conditions. This helps in improving supply chain planning, reducing inventory costs, and avoiding stockouts.
85 | #
86 | # - **Root Cause Analysis:** Regression algorithms can be used to identify the root cause of defects or quality issues in the manufacturing process by analyzing the relationship between input variables and output variables. This helps in identifying the factors that contribute to defects and implementing corrective actions to prevent them from occurring in the future.
87 | #
88 | # ### 4. In Space & Defence sector:
89 | #
90 | # - **Trajectory prediction:** In the space sector, regression algorithms can be used to predict the trajectory of spacecraft, satellites, and other objects in orbit. This can help with mission planning, collision avoidance, and re-entry planning.
91 | #
92 | # - **Missile guidance:** In the defense sector, regression algorithms can be used to guide missiles to their targets. By analyzing data such as the target's speed, direction, and distance, a regression algorithm can predict the missile's trajectory and make adjustments to ensure it hits the target.
93 | #
94 | # - **Signal processing:** Regression algorithms can also be used in the analysis of signals received from space. For example, they can be used to estimate the direction of arrival of signals from space, which can help with tasks such as tracking satellites and detecting and identifying space debris.
95 | #
96 | # - **Target tracking:** In the defense sector, regression algorithms can be used to track the movement of targets such as vehicles and aircraft. By analyzing data such as the target's speed, direction, and radar signature, a regression algorithm can predict its future position and velocity, which can help with intercepting the target.
97 | #
98 | # - **Image analysis:** Regression algorithms can also be used in the analysis of images and video data from space and defense applications. For example, they can be used to estimate the size and shape of objects in images, detect anomalies, and identify targets.
99 | #
100 | #
101 | # These are just a few examples of the applications of regression algorithms in various sector. As technology advances and more data becomes available, we can expect to see even more applications of these algorithms in these fields.
102 |
103 | # %% [markdown]
104 | # ## Terminologies Related to the Regression Analysis
105 | #
106 | # 1. **Dependent variable:** The variable being predicted or explained by the regression analysis. It is also called the response variable or outcome variable.
107 | #
108 | # 2. **Independent variable:** The variable that is used to predict or explain the dependent variable. It is also called the predictor variable or explanatory variable.
109 | #
110 | # 3. **Simple linear regression:** A regression analysis that involves only one independent variable.
111 | #
112 | # 4. **Multiple linear regression:** A regression analysis that involves two or more independent variables.
113 | #
114 | # 5. **Coefficient:** The value that represents the slope of the regression line. It indicates the amount by which the dependent variable changes when the independent variable changes by one unit.
115 | #
116 | # 6. **Intercept:** The value of the dependent variable when all independent variables are set to zero. It represents the starting point of the regression line.
117 | #
118 | # 7. **Residual:** The difference between the actual value of the dependent variable and the predicted value from the regression line.
119 | #
120 | # 8. **R-squared:** A measure of how well the regression line fits the data. It represents the proportion of the variance in the dependent variable that is explained by the independent variable(s).
121 | #
122 | # 9. **Overfitting:** When a regression model is too complex and fits the training data too closely, it may not generalize well to new data.
123 | #
124 | # 10. **Underfitting:** When a regression model is too simple and does not fit the training data well enough, it may not capture the underlying relationships between the variables.
125 | #
126 | # These are some common terminologies related to regression analysis, and there may be others depending on the specific context and type of regression being used.
127 |
128 | # %% [markdown]
129 | # ## Why do we use Regression Analysis?
130 | #
131 | # Regression analysis is a statistical method used to examine the relationship between a dependent variable and one or more independent variables. It is used for a variety of purposes, including:
132 | #
133 | # 1. **Prediction:** Regression analysis can be used to predict the values of the dependent variable based on the values of the independent variables. For example, if we want to predict the sales of a product based on advertising expenditure and the size of the market, we can use regression analysis to determine the relationship between these variables and predict the sales based on the values of the independent variables.
134 | #
135 | # 2. **Hypothesis testing:** Regression analysis can be used to test hypotheses about the relationship between the dependent and independent variables. For example, we can test whether there is a significant relationship between smoking and lung cancer by using regression analysis.
136 | #
137 | # 3. **Control variables:** Regression analysis can be used to control for other variables that may affect the relationship between the dependent and independent variables. For example, if we want to examine the relationship between income and health, we may want to control for variables such as age, gender, and education.
138 | #
139 | # 4. **Forecasting:** Regression analysis can be used to forecast future trends based on historical data. For example, we can use regression analysis to forecast the demand for a product based on past sales data and other relevant variables.
140 | #
141 | # Overall, regression analysis is a useful tool for analyzing and understanding the relationship between variables and for making predictions and informed decisions based on that relationship.
142 |
143 | # %% [markdown]
144 | # # Reference
145 | #
146 | # 1. https://www.javatpoint.com/regression-analysis-in-machine-learning
147 | # 2. Machine Learning, using Python Manaranjan Pradhan | U Dinesh Kumar
148 |
149 |
150 |
--------------------------------------------------------------------------------
/ML-Fundamental/ML-fundamental python file/ML-sklearn.py:
--------------------------------------------------------------------------------
1 | # %% [markdown]
2 | # # Machine learning & Scikit learn
3 |
4 | # %% [markdown]
5 | # Machine learning is a subfield of artificial intelligence that involves the development of algorithms and statistical models that enable machines to learn from data and make predictions or decisions without being explicitly programmed. It is used in a wide range of applications, including image recognition, natural language processing, fraud detection, and recommendation systems.
6 | #
7 | # To get started with machine learning, there are several tools and libraries that can be used. Here are some of the most popular ones:
8 | #
9 | # - **Python:** Python is a popular programming language for machine learning due to its ease of use, rich libraries, and wide community support.
10 | # - **NumPy:** NumPy is a library for numerical computing in Python that provides support for arrays, matrices, and linear algebra operations.
11 | # - **Pandas:** Pandas is a library for data manipulation and analysis in Python that provides tools for reading and writing data, cleaning and preprocessing data, and exploring data.
12 | # - **Scikit-learn:** Scikit-learn is a library for machine learning in Python that provides tools for classification, regression, clustering, dimensionality reduction, model selection, and data preprocessing.
13 | # - **TensorFlow:** TensorFlow is a library for machine learning developed by Google that provides tools for building and training deep learning models.
14 | # - **Keras:** Keras is a high-level API for building and training deep learning models that runs on top of TensorFlow.
15 | # - **PyTorch:** PyTorch is a library for machine learning developed by Facebook that provides tools for building and training deep learning models.
16 | #
17 | # These are just a few of the many tools and libraries available for machine learning. The choice of tools and libraries depends on the specific application and the expertise of the user. In the present tutorial, I will discuss mainly Scikit-learn in great detailed.
18 |
19 | # %% [markdown]
20 | # # Scikit-learn
21 | #
22 | # - **What is Scikit-learn?:** Scikit-learn (also known as sklearn) is an open-source machine learning library that provides a range of tools for implementing supervised and unsupervised learning algorithms. It is built on top of NumPy, SciPy, and Matplotlib, and is designed to integrate well with other libraries in the Python scientific computing ecosystem.
23 | #
24 | # Overall, scikit-learn is a powerful and user-friendly library that is widely used by data scientists and machine learning practitioners for a variety of tasks, from exploratory data analysis to building complex machine learning pipelines.
25 | #
26 | # - **Which applications can be implemented with the library?**
27 | #
28 | # With Scikit-Learn, a wide variety of AI models can be implemented, both from supervised and unsupervised learning . In general, the models can be divided into the following groups:
29 | #
30 | # - Classification ( Support Vector Machine , Random Forest , Decision Tree , etc.)
31 | # - Regressions ( Logistic Regression , Linear Regression , etc.)
32 | # - Dimension reduction ( principal component analysis , factor analysis, etc.)
33 | # - Data preprocessing and visualization
34 | # - **What are the advantages of scikit learn?:**
35 | #
36 | # Library benefits include:
37 | #
38 | # - simplified application of machine learning tools, data analytics and data visualization
39 | # - commercial use without license fees
40 | # - High degree of flexibility when fine-tuning the models
41 | # - based on common and powerful data structures from Numpy
42 | # - Usable in different contexts.
43 |
44 | # %% [markdown]
45 | # ## Some classes available in the Sklearn library
46 | #
47 | # Scikit-learn is a popular Python library for machine learning. It provides a wide range of machine learning algorithms and tools for data preprocessing, model selection, and evaluation. Here are some of the main classes in scikit-learn:
48 | #
49 | # - **Estimators:** Estimators are the main objects in scikit-learn that perform the machine learning algorithms. Each estimator is a Python class that implements a specific algorithm, such as linear regression, logistic regression, decision trees, or support vector machines. Estimators have a fit() method that takes in the training data and trains the model, and a predict() method that takes in new data and makes predictions.
50 | #
51 | # - **Transformers:** Transformers are objects that preprocess data before it is fed into the machine learning algorithm. Examples of transformers include data scaling, feature selection, and text preprocessing. Transformers have a fit_transform() method that takes in the training data and fits the transformer, and a transform() method that applies the transformer to new data.
52 | #
53 | # - **Pipelines:** Pipelines are a sequence of transformers and estimators that are combined together to form a complete machine learning workflow. Pipelines can be used to automate the process of preprocessing data and training a machine learning model. Pipelines have a fit() method that takes in the training data and trains the entire workflow, and a predict() method that takes in new data and makes predictions.
54 | #
55 | # - **Model Selection:** The model selection classes in scikit-learn provide tools for selecting the best model and hyperparameters for a given dataset. These classes include GridSearchCV and RandomizedSearchCV, which perform an exhaustive search over a grid of hyperparameters or a random search of hyperparameters, respectively.
56 | #
57 | # - **Metrics:** Metrics are used to evaluate the performance of a machine learning model. Scikit-learn provides a wide range of evaluation metrics, including accuracy, precision, recall, F1 score, and ROC curves.
58 | #
59 | # These are just some of the main classes in scikit-learn. Scikit-learn also provides many other useful classes and functions for machine learning, such as clustering algorithms, ensemble methods, and data loading utilities.
60 |
61 | # %% [markdown]
62 | #
63 |
64 | # %% [markdown]
65 | #
66 |
67 | # %% [markdown]
68 | #
69 |
70 | # %% [markdown]
71 | #
72 |
73 | # %% [markdown]
74 | # # Refrences
75 | #
76 | # Some useful resources where you can find more information about scikit-learn and examples of how to use each class:
77 | #
78 | # 1. Official scikit-learn documentation: https://scikit-learn.org/stable/documentation.html
79 | # 2. Scikit-learn tutorials: https://scikit-learn.org/stable/tutorial/index.html
80 | # 3. Scikit-learn examples: https://scikit-learn.org/stable/auto_examples/index.html
81 | # 4. Scikit-learn user guide: https://scikit-learn.org/stable/user_guide.html
82 | # 5. Python Data Science Handbook by Jake VanderPlas: https://jakevdp.github.io/PythonDataScienceHandbook/index.html
83 |
84 |
85 |
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/AI-vs-ML-vs-Deep-Learning.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/AI-vs-ML-vs-Deep-Learning.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/Anaconda.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/Anaconda.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/Berno-pmf.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/Berno-pmf.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/Binomial.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/Binomial.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/CS-ml-deep.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/CS-ml-deep.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/CS-ml.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/CS-ml.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/Linear-reg0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/Linear-reg0.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/Linear-reg1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/Linear-reg1.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/Linearity.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/Linearity.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/ML-claasification.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/ML-claasification.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/ML-classifications.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/ML-classifications.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/ML1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/ML1.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/Model.regresion.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/Model.regresion.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/Multi-lin-reg.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/Multi-lin-reg.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/Pop-sam1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/Pop-sam1.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/Pos-skew.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/Pos-skew.jpeg
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/Possion.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/Possion.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/Regression1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/Regression1.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/Supervised-process.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/Supervised-process.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/Uses-ML.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/Uses-ML.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/eexpon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/eexpon.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/hypo1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/hypo1.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/independence.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/independence.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/lst.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/lst.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/neg-lin-reg.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/neg-lin-reg.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/neg-skew.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/neg-skew.jpeg
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/normal-df.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/normal-df.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/normal-df2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/normal-df2.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/optimal-reg2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/optimal-reg2.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/pos-lin-reg.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/pos-lin-reg.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/python-libraries.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/python-libraries.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/rsquare1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/rsquare1.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/snd-nd.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/snd-nd.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/snd.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/snd.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/spread.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/spread.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/super-alg.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/super-alg.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/uni-dist1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/uni-dist1.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/unsuper-alg2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/unsuper-alg2.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/unsuper-process-png.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/unsuper-process-png.png
--------------------------------------------------------------------------------
/ML-Fundamental/ML-image/zero-skew.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/ML-image/zero-skew.png
--------------------------------------------------------------------------------
/ML-Fundamental/ReadMe.md:
--------------------------------------------------------------------------------
1 | - Machine Learning:
2 | - Supervised Learning:
3 | - Regression
4 | - Linear Regression
5 | - Polynomial Regression
6 | - Regularisation Techniques
7 | - Classification:
8 | 1. Logistic Regression
9 | 2. k-Nearest Neighbour (K-NN)
10 | 3. Support Vector Machine (SVM)
11 | 4. Decision Trees
12 | 5. Random Forest
13 | 6. Gradient Boosting
14 | - Unsupervised Learning:
15 | - Clustering:
16 | 1. K-means
17 | 2. DBSCAN
18 | 3. Hierarchical Clustering
19 | - Dimensionality Reduction
20 | 1. Principal Component Analysis (PCA)
21 | 2. t-Distributed Stochastic Neighbor Embedding (t-SNE)
22 | 3. Linear Discriminant Analysis (LDA)
23 | 4. Association Rule Learning
24 | - Reinforcement Leanring
25 | - Model Evaluation and validation:
26 | 1. Cross-Validation
27 | 2. Hyperparameter Tuning
28 | 3. Model Selection Techniques
29 | 4. Evaluation Metrics
--------------------------------------------------------------------------------
/ML-Fundamental/image-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/image-1.png
--------------------------------------------------------------------------------
/ML-Fundamental/image-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/image-2.png
--------------------------------------------------------------------------------
/ML-Fundamental/image.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ML-Fundamental/image.png
--------------------------------------------------------------------------------
/Projects-ML/Example-project.md:
--------------------------------------------------------------------------------
1 | # Genral Framework of the project
2 | Let's consider a simple example where we have an ML project with two stages - development and Markdown markdown
3 |
4 | 1. Initialize your ML project: Create a new directory for your project, and initialize it using a tool like git.
5 | ```bash
6 | mkdir my-ml-project
7 | cd my-ml-project
8 | git init
9 | ```
10 | 2. Set up a virtual environment: To keep your project's dependencies separate from your system's, you can use a virtual environment. Create a new virtual environment and activate it.
11 | ```bash
12 | python3 -m venv venv
13 | source venv/bin/activate
14 | ```
15 | 3. Initialize a git repository: Once you've set up your virtual environment, initialize a git repository to track your project's changes.
16 | ```bash
17 | git init
18 | ```
19 | 4. Write your ML code: Now, you can start writing your ML code. Create a new file called model.py and write your ML model code.
20 | ```python
21 | def my_ml_model(x):
22 | # Your ML model code here
23 | pass
24 | ```
25 | 5. Commit your changes: Once you've written your ML code, commit your changes to the git repository.
26 | ```bash
27 | git add model.py
28 | git commit -m "Add my ML model"
29 | ```
30 | 6. Prepare your code for production: Now, it's time to prepare your code for production. Write a script called train_model.py that trains your ML model using your development data.
31 |
32 | ```python
33 | import pandas as pd
34 | from model import my_ml_model
35 |
36 | # Load your development data
37 | data = pd.read_csv('data.csv')
38 |
39 | # Train your ML model
40 | my_ml_model.fit(data.drop('target', axis=1), data['target'])
41 | ```
42 | 7. Write a script to make predictions: Create a new file called predict.py that allows you to make predictions using your trained ML model.
43 |
44 | ```python
45 | import pandas as pd
46 | from model import my_ml_model
47 |
48 | # Load your data
49 | data = pd.read_csv('data.csv')
50 |
51 | # Make predictions
52 | predictions = my_ml_model.predict(data.drop('target', axis=1))
53 |
54 | # Print predictions
55 | print(predictions)
56 | ```
57 | 8. Save your trained model: Train your ML model and save it to a file using the train_model.py script.
58 | ```bash
59 | python train_model.py
60 | ```
61 | 9. Make predictions using your trained model: Now, you can use the predict.py script to make predictions using your trained ML model.
62 | ```bash
63 | python predict.py
64 | ```
65 | 10.Prepare your model for deployment: Package your trained model, the predict.py script, and any other necessary files into a directory called model.
66 |
67 | ```bash
68 | mkdir model
69 | cp train_model.py model/
70 | cp predict.py model/
71 | cp -r data model/
72 | ```
73 | 11. Commit your changes: Finally, commit your changes to the git repository.
74 |
75 | ```bash
76 | git add model
77 | git commit -m "Prepare model for deployment"
78 | ```
79 |
80 | 12. Write your markdown: Create a new file called README.md and write your markdown documentation. This should include information about your project, how to use it, and any necessary references.
81 |
82 | ```markdown
83 | # My ML Project
84 |
85 | This is a simple ML project that uses a hypothetical ML model.
86 |
87 | ## How to use it
88 |
89 | 1. Clone the repository.
90 | 2. Train the model using the `train_model.py` script.
91 | 3. Make predictions using the `predict.py` script.
92 |
93 | ## References
94 |
95 | - [TensorFlow](https://www.tensorflow.org/)
96 | - [scikit-learn](https://scikit-learn.org/stable/)
97 | ```
98 |
99 | 13. Commit your changes: Finally, commit your changes to the git repository.
100 |
101 | ```bash
102 | git add README.md
103 | git commit -m "Add markdown documentation"
104 | ```
105 | 14. Push your changes to a remote repository: Once you've finished setting up your ML project and markdown documentation, you can push your changes to a remote repository like GitHub or GitLab.
106 |
107 | ```bash
108 | git remote add origin https://github.com/username/my-ml-project.git
109 | git push -u origin master
110 | ```
111 |
112 |
--------------------------------------------------------------------------------
/Projects-ML/ML-cycle.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/Projects-ML/ML-cycle.png
--------------------------------------------------------------------------------
/Projects-ML/Py-file/Multiple-reg.py:
--------------------------------------------------------------------------------
1 | # %% [markdown]
2 | # # Project-2: Auction pricing of players in the Indian premier league (IPL)
3 | # **(Multiple Linear Regression)**
4 | #
5 | # - Multiple linear regression (MLR) is a supervised learning algorithm for finding the existence of an association relationship between a dependent variable (aka response variable or outcome variable) and several independent variables (aka explanatory variables or predictor variable or features).
6 | #
7 | # - The functional form of MLR is given by:
8 | #
9 | # $Y_i = \beta_0 + \beta_1 X_{1i} + \beta_2 X_{2i} + .... + + \beta_k X_{ki} + \epsilon_{ki}$
10 | #
11 | # where
12 | #
13 | # $\beta_1$, $\beta_2$, $\beta_3$, .... , $\beta_k$ are partial regression coefficients.
14 | #
15 | # - X = independent variables (aka explanatory variables or predictor variable or features)
16 | # - Y = dependent variable (aka response variable or outcome variable)
17 | #
18 | # - Since the relationship between an explanatory variable and the response (outcome) variable is calculated after removing (or controlling) the effect all the other explanatory variables (features) in the model.
19 | #
20 | # - The assumptions that are made in multiple linear regression model are as follows:
21 | # 1. **Linearity:** The regression model is linear in regression parameters (b-values).
22 | # 2. **Normal distribution:** The residuals follow a normal distribution and the expected value (mean) of the residuals is zero.
23 | # 3. **Uncorrelated residuals:** In time series data, residuals are assumed to uncorrelated.
24 | # 4. **Variance of the residuals** The variance of the residuals is constant for all values of $X_i$. When the variance of the residuals is constant for different values of $X_i$, it is called homoscedasticity. A non-constant variance of residuals is called heteroscedasticity.
25 | # 5. **Correlation between independent variables:** There is no high correlation between independent variables in the model (called multi-collinearity). Multi-collinearity can destabilize the model and can result in an incorrect estimation of the regression parameters.
26 | #
27 | # The partial regressions coefficients are estimated by minimizing the sum of squared errors (SSE).
28 |
29 | # %% [markdown]
30 | # ## Objective: Predicting the SOLD PRICE (Auction Price) of Players
31 | #
32 | # The Indian Premier League (IPL) is a professional league for Twenty20 (T20) cricket championships that was started in 2008 in India. IPL was initiated by the BCCI with eight franchises comprising players from across the world. The first IPL auction was held in 2008 for ownership of the teams for 10 years, with a base price of USD 50 million. The franchises acquire players through an English auction that is con- ducted every year. However, there are several rules imposed by the IPL. For example, only international players and popular Indian players are auctioned.
33 | #
34 | # The performance of the players could be measured through several metrics. Although the IPL fol- lows the Twenty20 format of the game, it is possible that the performance of the players in the other formats of the game such as Test and One-Day matches could influence player pricing. A few players had excellent records in Test matches, but their records in Twenty20 matches were not very impressive. The performances of 130 players who played in at least one season of the IPL (2008−2011) measured through various performance metrics
35 | #
36 | # ### Data Code Description
37 | #
38 | # | Data Code | Description |
39 | # |-----------|-------------|
40 | # | AGE | Age of the player at the time of auction classified into three categories. Category 1 (L25) means the player is less than 25 years old, category 2 means that the age is between 25 and 35 years (B25− 35) and category 3 means that the age is more than 35 (A35). |
41 | # | RUNS-S | Number of runs scored by a player. |
42 | # | RUNS-C | Number of runs conceded by a player. |
43 | # | HS | Highest score by a batsman in IPL. |
44 | # | AVE-B | Average runs scored by a batsman in IPL. |
45 | # | AVE-BL | Bowling average (number of runs conceded/number of wickets taken) in IPL. |
46 | # | SR-B | Batting strike rate (ratio of the number of runs scored to the number of balls faced) in IPL. |
47 | # | SR-BL | Bowling strike rate (ratio of the number of balls bowled to the number of wickets taken) in IPL. |
48 | # | SIXERS | Number of six runs scored by a player in IPL.|
49 | # | WKTS | Number of wickets taken by a player in IPL. |
50 | # | ECON | Economy rate of a bowler (number of runs conceded by the bowler per over) in IPL. |
51 | # | CAPTAINCY EXP | Captained either a T20 team or a national team.|
52 | # | ODI-SR-B | Batting strike rate in One-Day Internationals. |
53 | # | ODI-SR-BL | Bowling strike rate in One-Day Internationals. |
54 | # | ODI-RUNS-S | Runs scored in One-Day Internationals. |
55 | # | ODI-WKTS | Wickets taken in One-Day Internationals.|
56 | # | T-RUNS-S | Runs scored in Test matches. |
57 | # | T-WKTS | Wickets taken in Test matches. |
58 | # | PLAYER-SKILL | Player’s primary skill (batsman, bowler, or allrounder). |
59 | # | COUNTRY | Country of origin of the player (AUS: Australia; IND: India; PAK: Pakistan; SA: South Africa; SL: Sri Lanka; NZ: New Zealand; WI: West Indies; OTH: Other countries). |
60 | # | YEAR-A | Year of Auction in IPL. |
61 | # | IPL TEAM | CSK: Chennai Super Kings; DC: Deccan Chargers; DD: Delhi Dare- devils; KXI: Kings XI Punjab; KKR: Kolkata Knight Riders; MI: Mumbai Indians; PWI: Pune Warriors India; RR: Rajasthan Royals; RCB: Royal Challengers Bangalore |
62 | #
63 | # *A + sign is used to indicate that the player has played for more than one team. For example, CSK+ would mean that the player has played for CSK as well as for one or more other teams.
64 |
65 | # %% [markdown]
66 | # ## Data
67 | # - Data is available at in the repository.
68 |
69 | # %% [markdown]
70 | # ## Developing Multiple Linear Regression Model Using Python
71 |
72 | # %% [markdown]
73 | # ### Importing important libraries
74 |
75 | # %%
76 | import warnings
77 | warnings.filterwarnings('ignore')
78 |
79 | import numpy as np
80 | import pandas as pd
81 |
82 | import matplotlib.pylab as plt
83 | import seaborn as sns
84 |
85 | plt.style.use('ggplot')
86 |
87 | np.set_printoptions(precision=4, linewidth=100)
88 |
89 | # %% [markdown]
90 | # ### Data file path
91 |
92 | # %%
93 | # Provide the relative path to the data file
94 | file_path = "../ml-data/IPL-IMB381IPL2013.csv"
95 |
96 | # %% [markdown]
97 | # ### Importing the data file
98 |
99 | # %%
100 | # importing the data file
101 |
102 | ipl_auction_df = pd.read_csv(file_path)
103 |
104 | # %%
105 | ipl_auction_df.info()
106 |
107 | # %%
108 | # shape of the dataframe
109 | ipl_auction_df.shape
110 |
111 | # %% [markdown]
112 | # There are 130 observations (records) and 26 columns (features) in the data, and there are no missing values.
113 |
114 | # %%
115 | # importing first 10 rows
116 | ipl_auction_df.head(5)
117 |
118 | # %%
119 | ipl_auction_df.plot.scatter(x='ODI-RUNS-S', y='BASE PRICE')
120 |
121 | # %%
122 | # displaying the initial 10 columns for the first 5 rows
123 | ipl_auction_df.iloc[0:5, 0:10]
124 |
125 | # %% [markdown]
126 | # ## Building multiple linear regression model
127 | #
128 | # - We can build a model to understand what features (`X`) of players are influencing their SOLD PRICE or predict the player’s auction prices in future. However, all columns are not features.
129 | # - For example, Sl. NO. is just a serial number and cannot be considered a feature of the player.
130 | # - We will build a model using only player’s statistics. So, BASE PRICE can also be removed.
131 | # - We will create a variable `X_feature` which will contain the list of features that we will finally use for building the model and ignore rest of the columns of the DataFrame.
132 |
133 | # %% [markdown]
134 | # ### 1. Creating a feature columns
135 | #
136 | # The following function is used for including the features in the model building.
137 |
138 | # %%
139 | # Assuming 'ipl_auction_df' is your DataFrame
140 | column_names = ipl_auction_df.columns.tolist()
141 | column_names
142 |
143 | # %% [markdown]
144 | # Not all columns are important. We select few of them to make our model for auction price. To do this, we create `X_features` list.
145 |
146 | # %%
147 | X_features = ipl_auction_df.columns
148 |
149 | # %%
150 | X_features = ['AGE', 'COUNTRY', 'PLAYING ROLE', 'T-RUNS', 'T-WKTS', 'ODI-RUNS-S', 'ODI-SR-B', 'ODI-WKTS', 'ODI-SR-BL', 'CAPTAINCY EXP',
151 | 'RUNS-S', 'HS', 'AVE', 'SR-B', 'SIXERS', 'RUNS-C', 'WKTS', 'AVE-BL', 'ECON', 'SR-BL']
152 |
153 | # %% [markdown]
154 | # ### 2. Encoding Categorical Features
155 | #
156 | # - Qualitative variables or categorical variables need to be encoded using dummy variables before incorporating them in the regression model.
157 | # - If a categorical variable has `n` categories (e.g., the player role in the data has four categories, namely, batsman, bowler, wicket-keeper and allrounder), then we will need `n − 1` dummy variables. So, in the case of PLAYING ROLE, we will need **three dummy variables** since there are four categories.
158 |
159 | # %%
160 | # Finding unique values of column PLAYING ROLE
161 | ipl_auction_df['PLAYING ROLE'].unique()
162 |
163 | # %%
164 | ipl_auction_df['COUNTRY'].unique()
165 |
166 | # %% [markdown]
167 | # The variable can be converted into four dummy variables.
168 | # - Set the variable value to `1` to indicate the role of the player.
169 | # - This can be done using `pd.get_dummies()` method.
170 | # - We will create dummy variables for only PLAYING ROLE to understand and then create dummy variables for the rest of the categorical variables.
171 | #
172 |
173 | # %%
174 | pd.get_dummies(ipl_auction_df['PLAYING ROLE'])[0:5]
175 | #[0:5] is a slicing operation that selects the first five rows of the resulting DataFrame
176 |
177 | # %% [markdown]
178 | # As shown in the table above, the `pd.get_dummies()` method has created four dummy variables and has already set the variables to `1` as variable value in each sample.
179 |
180 | # %% [markdown]
181 | # - We must create dummy variables for all categorical (qualitative) variables present in the dataset.
182 |
183 | # %%
184 | categorical_features = ['AGE', 'COUNTRY', 'PLAYING ROLE', 'CAPTAINCY EXP']
185 |
186 | # %%
187 | ipl_auction_encoded_df = pd.get_dummies(ipl_auction_df[X_features],
188 | columns = categorical_features,
189 | drop_first = True)
190 |
191 | # %%
192 | ipl_auction_encoded_df.columns
193 |
194 | # %% [markdown]
195 | # - The dataset contains the new dummy variables that have been created.
196 | # - We can reassign the new features to the variable `X_features`, which we created earlier to keep track of all features that will be used to build the model finally.
197 |
198 | # %%
199 | X_features = ipl_auction_encoded_df.columns
200 | X_features
201 |
202 | # %% [markdown]
203 | # ### 3. Splitting the Dataset into Train and Validation Sets
204 | #
205 | # - Before building the model, we will split the dataset into 80:20 ratio
206 | # - The split function allows using a parameter `random_state`, which is a seed function for reproducibility of randomness. This parameter is not required to be passed.
207 | # - Setting this variable to a fixed number will make sure that the records that go into **training** and **test set** remain unchanged and hence the results can be reproduced. We will use the value 42 (it is again selected randomly).
208 |
209 | # %%
210 | # Creating feature Set(X) and Outcome Variable (Y)
211 | import statsmodels.api as sm
212 | X = sm.add_constant( ipl_auction_encoded_df )
213 | Y = ipl_auction_df['SOLD PRICE']
214 |
215 | # %%
216 | from sklearn.model_selection import train_test_split
217 |
218 | train_X, test_X, train_y, test_y = train_test_split(X , Y,
219 | train_size = 0.8, random_state = 42 )
220 |
221 | # %% [markdown]
222 | # ### 4. Fitting the Model
223 | #
224 | # We will fit the model using OLS method and pass `train_y` and `train_X` as parameters.
225 |
226 | # %%
227 | ipl_model_1 = sm.OLS(train_y, train_X).fit()
228 |
229 | # %% [markdown]
230 | # Printing Estimated Parameters and Interpreting Them
231 |
232 | # %%
233 | print(ipl_model_1.params)
234 |
235 | # %% [markdown]
236 | # #### Scatter Plot
237 |
238 | # %%
239 | # Scatter plot of original data points
240 | ipl_auction_df.plot.scatter(x='Percentage in Grade 10', y='Salary', label = 'Scatter plot')
241 |
242 | # Generate predicted values using the linear regression coefficients
243 | MBA_salary = mba_salary_lm.params[0]+mba_salary_lm.params[1]*mba_salary_df['Percentage in Grade 10']
244 |
245 | # Plot the fitted line
246 | plt.plot(mba_salary_df['Percentage in Grade 10'], MBA_salary, color='red', label='Fitted Line')
247 |
248 | # Add labels and title
249 | plt.xlabel('X')
250 | plt.ylabel('Y')
251 | plt.title('Scatter Plot with Fitted Line')
252 |
253 | # Add legend
254 | plt.legend()
255 |
256 | # Show the plot
257 | plt.show()
258 |
259 | # %% [markdown]
260 | # ### 5. Regression Model Summary Using Python
261 | #
262 | # The function `summary2()` prints the model summary which contains the information required for diagnosing a regression model
263 |
264 | # %%
265 | ipl_model_1.summary2()
266 |
267 | # %%
268 |
269 |
270 |
271 |
--------------------------------------------------------------------------------
/Projects-ML/Py-file/Proj-1-simple-line.py:
--------------------------------------------------------------------------------
1 | # %%
2 | import warnings
3 | warnings.filterwarnings('ignore')
4 |
5 | import numpy as np
6 | import pandas as pd
7 |
8 | import matplotlib.pylab as plt
9 | import seaborn as sns
10 |
11 | plt.style.use('ggplot')
12 |
13 | np.set_printoptions(precision=4, linewidth=100)
14 |
15 | # %% [markdown]
16 | # ## Objective
17 | #
18 | # - We want to study the score in a exam on the basis of study time.
19 | #
20 | # $y = m \cdot x + c$
21 | #
22 | # - Our Data points are: ($x_i, y_i$)
23 | # - The predicted data point for a given $x$ will be $\hat{y}_i$.
24 | #
25 | # - Mean squared error function:
26 | #
27 | # $E = \frac{1}{n} \cdot \sum_{i=0}^n (y_i- \hat{y}_i)^2 = \frac{1}{n} \cdot \sum_{i=0}^n (y_i- (m\cdot x_i + b))^2$
28 | #
29 | # - We need to calculate $m$ and $b$ by minimizing the function $E$. We can calculate the di=erivative wrt $m$ and $b$ and set it to zero.
30 | #
31 | # $\frac{\partial E}{\partial m} = - \frac{2}{n} \cdot \sum_{i=0}^n x_i (y_i- (m\cdot x_i + b))$
32 | #
33 | # $\frac{\partial E}{\partial m} = - \frac{1}{n} \cdot \sum_{i=0}^n (y_i- (m\cdot x_i + b))$
34 | #
35 | # If we want to improve $m$ and $b$ then all we need to do is with each itteration we say take the current values of $m$ and assign it to following equations respectively:
36 | #
37 | # New $m$ and new $b$ will be written in terms of current $m$ and current $b$, and the the direction of the steepest ascent $\partial E/\partial (a ~{\rm or}~ m)$ as:
38 | #
39 | # $m = m -\alpha \cdot \frac{\partial E}{\partial m}$
40 | #
41 | # $b = m -\alpha \cdot \frac{\partial E}{\partial b}$
42 | #
43 | # where $\alpha$ is a learning rate and it signifies how big or how large the steps we take. Now large is $\alpha$, faster we get to the actual optimization but for lower $\alpha$, the better the results is going to be. Beacuase we are paying attention to details much more.
44 | #
45 |
46 | # %% [markdown]
47 | # Generating a random dataset with a column of 'Study time' and 'Score'.
48 |
49 | # %%
50 | # Generating the dataset for study time and score in an exam
51 | np.random.seed(42)
52 |
53 | study_time = np.random.uniform(low=0.5, high=4.0, size=100)
54 | score = 10 * study_time + np.random.normal(loc=0, scale=5, size=100)
55 |
56 | # Create a DataFrame from the generated data
57 | df = pd.DataFrame({'studytime': study_time, 'score': score})
58 |
59 | # Save the DataFrame to a CSV file
60 | df.to_csv('Study-dataset.csv', index=False)
61 |
62 | # Load the CSV file into a DataFrame
63 | data_df = pd.read_csv('Study-dataset.csv')
64 | data_df.head()
65 |
66 |
67 | # %%
68 | plt.scatter(data_df.studytime, data_df.score )
69 | plt.show()
70 |
71 | # %% [markdown]
72 | # Calculating the loss function and then calculating gradients and then finding m and b by optimization of the loss function:
73 | #
74 | # ```
75 | # def loss_function(m,b, points):
76 | # total_erro = 0
77 | # for i in range(len(points)):
78 | # x = points.iloc[i].studytime
79 | # y = points.iloc[i].Score
80 | # total_error = += (y- (m*x+b))**2
81 | # total_error / float(len(points))
82 | # ```
83 |
84 | # %%
85 | # loss function and then minimization
86 |
87 |
88 | # Gradient Descent Function:
89 | def gradient_descent(m_now, b_now, points, L):
90 | m_gradient = 0
91 | b_gradient = 0
92 |
93 | n = len(points)
94 |
95 | for i in range(n):
96 | x = points.iloc[i].studytime
97 | y = points.iloc[i].score
98 |
99 | m_gradient += - (2/n)*x*(y-(m_now*x+b_now))
100 | b_gradient += - (2/n)*(y-(m_now*x+b_now))
101 |
102 | m = m_now - m_gradient*L
103 | b = b_now - b_gradient*L
104 |
105 | return m, b
106 |
107 | '''
108 | gradient_descent(m_now, b_now, points, L): This function takes the current values of slope (m_now),
109 | y-intercept (b_now), a DataFrame containing the data points (points), and the learning rate (L) as input.
110 | It performs gradient descent to update the values of slope and y-intercept. This function calculates the
111 | gradients of the loss function with respect to m and b for each data point and updates the values of m and b
112 | using the gradient descent update rule. Here m_gradient and b_gradient are initialized to 0.
113 | '''
114 |
115 | # %%
116 | # Initialization:
117 | m = 0
118 | b = 0
119 | L = 0.001
120 | epochs = 300
121 |
122 | '''
123 | The slope (m) and y-intercept (b) are initialized to 0. The learning rate (L) is set to 0.0001,
124 | and the number of epochs (iterations) is set to 300.
125 | '''
126 |
127 | # %%
128 | # Gradient Descent Loop:
129 | for i in range(epochs):
130 | if i % 10 == 0:
131 | print(f"Epoch: {i}")
132 | m, b = gradient_descent(m,b,data_df,L)
133 |
134 | '''
135 | This loop runs for the specified number of epochs. In each iteration,
136 | it calls the gradient_descent function to update the values of m and b.
137 | The if statement inside the loop is used to print the current epoch every
138 | 50 iterations for monitoring progress.
139 | '''
140 |
141 | # %%
142 | print(m)
143 | print(b)
144 |
145 | # %%
146 | # Plotting the data and fitted line
147 | plt.scatter(data_df.studytime, data_df.score, color="black")
148 | plt.plot(data_df.studytime, m * data_df.studytime + b, color="red")
149 | plt.xlabel('Study Time (hours)')
150 | plt.ylabel('Score')
151 | plt.title('Linear Regression Fit')
152 | plt.show()
153 |
154 | # %% [markdown]
155 | # ## Reference
156 | #
157 | # - https://www.youtube.com/watch?v=VmbA0pi2cRQ&ab_channel=NeuralNine
158 |
159 | # %%
160 |
161 |
162 |
163 |
--------------------------------------------------------------------------------
/Projects-ML/README.md:
--------------------------------------------------------------------------------
1 | # Ensuring Data Integrity and Continuity for Machine Learning Projects
2 |
3 | ## Introduction
4 | In a typical Machine Learning project, the final implemented solution should provide automated training and implementation of the selected models. This is where CI/CD comes into play: This continuous integration / continuous deploying solution provides an end-to-end pipeline that completes the cycle of a full project and ensures the model's performance. Initially, Continuous Integration and Deployment is a DevOps technique to implement an automated pipeline for production's sake by:
5 | - streamlining (rationalization)
6 | - testing
7 | - deploying/ production
8 |
9 | The DevOps field corresponds to a collection of processes that tries to reduce the development life cycle of a system by enabling the continuous delivery of high-quality software.
10 |
11 | MLOps, on the other hand, is the process of automating and industrializing machine learning applications and workflows. CI/CD represents here an automation workflow of the ML pipeline through the following operations:
12 | - building the model
13 | - testing
14 | - deploying
15 |
16 | This also prevents the data scientist to take care and worry about this process, by ensuring no human negligence and constant improvement of the model efficiency by permanent monitoring of the ML model. Any change in the model construction is thus eased and its development automated with reliable delivery.
17 |
18 | As the CI/CD workflow will automate the different steps of an ML project, let's do a quick reminder about the typical lifecycle of an ML project.
19 |
20 |
21 |
22 | - **Data preparation:** In most cases, the data is initially presented in raw form. For this reason, it is necessary to perform a few steps of preprocessing these data sets to make them usable for the modeling step. This step is generally performed by the Data Scientist or sometimes by the Data Analyst and may require the use of tools such as Apache Spark, MySQL or Python, and libraries such as Pandas or Numpy.
23 | - **Model Training:** This step led by the Data Scientist is the main focus of the project life cycle: the purpose of the model implementation is to respond to a specific problem by designing and setting the appropriate algorithm. This iteration usually requires the import of tools such as TensorFlow, PyTorch frameworks, or the library Scikit-Learn.
24 | - **Model Deploying:** Once the model is ready, the Machine Learning Engineer or the Data Engineer is intended to make it available to the customer for easy and convenient use.
25 | - **New raw data:** Although the project may be expected to be coming to an end, very often the Data Engineer receives new raw data available after these steps. They must therefore be integrated into the cycle described above to refine and improve the model performance developed previously.
26 |
27 | ## Understanding CI/CD
28 | Each phase of the project lifecycle is associated with at least one process: CI, CD or both. Let's dive into their meaning.
29 |
30 | - **Continuous Integration (CI):** CI stands for Continuous Integration. This practice gathers software engineering concepts around automating the building and testing of the model, version control, and release. By release, we here explicitly mean the action of pushing the code to the corresponding GitHub repository.
31 | - **Continuous Delivery (CD):** CD stands for Continuous Delivery or Continuous deployment. This concept represents the continuous model deployment, the formatting, and the setup of a production-like environment to allow for automated integration. Regarding the application, the CD stage includes the testing, deployment, and automated configuration of the app.
32 |
33 | ## Key Components of CI/CD:
34 |
35 | - **Version Control System (VCS):** Centralized systems like Git enable collaborative development by managing code changes.
36 | - **Automated Builds:** Tools like Jenkins or Travis CI automate the build process, creating executable code from source files.
37 | - **Automated Testing:** Automated testing frameworks (e.g., JUnit for Java) ensure code quality by identifying bugs and issues early in the development cycle.
38 | - **Deployment Automation:** Tools such as Docker and Kubernetes automate the deployment process, ensuring consistency across different environments.
39 |
40 | ## Benefits of CI/CD:
41 |
42 | - **Faster Development Cycles:** Automated processes reduce manual errors and accelerate the development lifecycle.
43 | - **Improved Code Quality:** Automated testing and continuous monitoring enhance code reliability.
44 | - **Reduced Deployment Risks:** Incremental updates and automated rollbacks minimize the impact of failed deployments.
45 | - **Enhanced Collaboration:** CI/CD fosters collaboration among development and operations teams, leading to more efficient workflows.
46 |
47 | ## CI/CD in Machine Learning:
48 | Continuous Integration and Deployment (CI/CD) has long been a common practice for the majority of software systems. By offering continuous and automatic training and application of Machine Learning models, machine learning systems may do the same thing.
49 |
50 | Machine learning applications that use CI-CD offer a complete pipeline that closes the feedback loop at every level and maintains the performance of ML models. It can also bridge the gap between scientific and engineering processes by removing obstacles between data, modeling, processing, and return.
51 |
52 | Detail of every step of the typical lifecycle management of a machine learning project and its link with CI/CD:
53 |
54 |
55 |
56 | ## Example
57 | Let's consider a simple example where we have an ML project with two stages - development and Markdown?
58 | ### **Customer Behavior Prediction Model**
59 |
60 | Imagine a scenario where an e-commerce company wants to develop a machine learning model to predict customer behavior, specifically the likelihood of a customer making a purchase. The dataset includes information about customer demographics, browsing history, past purchases, and time spent on the website.
61 |
62 | **Project struncture:**
63 | Assume the following project structure:
64 | - customer_behavior_prediction/
65 | - src/
66 | - model.py
67 | - tests/
68 | - test_model.py
69 | - build.py
70 | - requirements.txt
71 |
72 |
73 | **Steps in Continuous Integration and Continuous Deployment (CI/CD) for Customer Behavior Prediction Model:**
74 |
75 | **1. Continuous Integration (CI):**
76 |
77 | * **Version Control:**
78 | - Developers work on separate branches for different features or improvements in the model.
79 | - They use a version control system (e.g., Git) to manage changes collaboratively.
80 | - **Initialize a Git repository:** ```git init```
81 | - **Create a `.gitignore` file to exclude unnecessary files from version control:** ```touch .gitignore```
82 | - **Add and commit the changes:**
83 | ```bash
84 | git add .
85 | git commit -m "Initial commit"
86 | ```
87 | - **Create a GitHub repository and push your code:**
88 | ```bash
89 | git remote add origin
90 | git branch -M main
91 | git push -u origin main
92 | ```
93 | - **Set up a virtual environment:** To keep your project's dependencies separate from your system's, you can use a virtual environment. Create a new virtual environment and activate it.
94 | ```bash
95 | python3 -m venv venv
96 | source venv/bin/activate
97 | ```
98 | and then initialize the repor using `git init`.
99 |
100 | * **Automated Testing:**
101 | - Unit tests and integration tests are written to verify the correctness of individual model components and their interactions.
102 | - Developers commit changes regularly, triggering automated tests to identify any integration issues early in the development process.
103 | - **Install `pytest`:** `pip install pytest`
104 | - **Write a simple test in a file named `test_model.py`:**
105 | ```python
106 | # tests/test_model.py
107 |
108 | from src.model import predict_customer_behavior
109 |
110 | def test_prediction():
111 | # Assuming predict_customer_behavior is a function in your model
112 | prediction = predict_customer_behavior(sample_input)
113 | assert prediction in [0, 1] # Adjust based on your model's output
114 | ```
115 | - **Run the test:** `pytest`
116 |
117 | * **Build Automation:**An automated build system (e.g., Jenkins) compiles the model code, creating an executable version of the predictive model.
118 | - **Create a file named `model.py`:**
119 | ```python
120 | # src/model.py
121 |
122 | def predict_customer_behavior(input_data):
123 | # Replace this with your actual model prediction logic
124 | # For simplicity, let's assume the model predicts 1 if spending > $100, otherwise 0
125 | return 1 if input_data['spending'] > 100 else 0
126 | ```
127 | - **Build Script (build.py):**
128 | This script will be used by continuous integration tools like Jenkins or CircleCI to build the project.
129 | ```python
130 | # build.py
131 |
132 | def build():
133 | # Replace this with your actual build logic
134 | # In this example, we might want to install necessary dependencies
135 | # and potentially perform other build steps if needed
136 | print("Building the Customer Behavior Prediction Model...")
137 | # Add any other build steps as required
138 | ```
139 |
140 | * **Requirements File (requirements.txt):**
141 |
142 | ```plaintext
143 | # requirements.txt
144 |
145 | numpy==1.21.0
146 | pytest==6.2.4
147 | # Add any other dependencies required for your project
148 | ```
149 |
150 | * **Run the build script:** `python build.py`
151 |
152 | > Replace the placeholder logic in the test script (test_model.py), model script (model.py), and build script (build.py) with your actual machine learning model, test logic, and any necessary build steps.
153 |
154 |
155 | **2. Continuous Deployment (CD):**
156 |
157 | * **Staging Environment:**
158 | - Upon successful integration and testing, the model is deployed to a staging environment.
159 | - In this controlled setting, the model's performance is assessed using a subset of real data to simulate real-world conditions.
160 |
161 | * **Automated Acceptance Tests:**
162 | - Automated acceptance tests are conducted to evaluate the model's accuracy, precision, and recall in the staging environment.
163 | - These tests confirm that the model behaves as expected and meets predefined performance criteria.
164 | - Update your GitHub Actions workflow (`ci.yml`) to include acceptance tests:
165 |
166 | ```yaml
167 | # Add the following step after the 'Build model' step
168 | - name: Run acceptance tests
169 | run: python acceptance_tests.py
170 | ```
171 |
172 | * **Deployment to Production:**
173 | - Once the model passes all tests in the staging environment, it is automatically deployed to the production environment.
174 | - The deployment process includes updating the live model with the new version, ensuring a seamless transition.
175 | - **Create a deployment script `deploy.py`:**
176 | ```python
177 | # deploy.py
178 |
179 | def deploy():
180 | # Add your actual deployment logic here
181 | print("Deploying the model to production...")
182 | ```
183 | - Update your GitHub Actions workflow to deploy to production after passing tests:
184 | ```yaml
185 | # Add the following step after the 'Run acceptance tests' step
186 |
187 | - name: Deploy to production
188 | run: python deploy.py
189 | ```
190 |
191 | **3. Continuous Monitoring and Rollback:**
192 |
193 | * **Performance Monitoring:**
194 | - Continuous monitoring tools (e.g., Prometheus) track the model's performance in real-time.
195 | - Metrics such as accuracy, response time, and error rates are continuously monitored to detect any anomalies.
196 |
197 | * **Alerts and Notifications:**
198 | - Automated alert systems notify the development team of any significant deviations from expected performance.
199 | - Alerts may trigger interventions or investigations to address issues promptly.
200 |
201 | * **Automated Rollback:**
202 | - If the model's performance degrades beyond acceptable thresholds, an automated rollback mechanism is initiated.
203 | - The system reverts to the previous version of the model to maintain service reliability while the issue is investigated.
204 |
205 | **4. Automating CI/CD with GitHub Actions:** Now, let's set up GitHub Actions for automated CI. Create a ``.github/workflows/ci.yml` file:
206 |
207 | ```yaml
208 | name: CI/CD
209 | on:
210 | push:
211 | branches:
212 | - main
213 |
214 | jobs:
215 | test:
216 | runs-on: ubuntu-latest
217 |
218 | steps:
219 | - name: Checkout repository
220 | uses: actions/checkout@v2
221 |
222 | - name: Set up Python
223 | uses: actions/setup-python@v2
224 | with:
225 | python-version: 3.8
226 |
227 | - name: Install dependencies
228 | run: |
229 | pip install -r requirements.txt
230 | pip install pytest
231 |
232 | - name: Run tests
233 | run: pytest
234 |
235 | - name: Build model
236 | run: python build.py
237 |
238 | - name: Run acceptance tests
239 | run: python acceptance_tests.py
240 |
241 | - name: Deploy to production
242 | run: python deploy.py
243 | ```
244 | This GitHub Actions workflow will be triggered on every push to the main branch, running your tests and build script.
245 |
246 | **Conclusion:**
247 |
248 | Continuous Integration and Continuous Deployment ensure that the customer behavior prediction model undergoes a seamless and controlled development, testing, and deployment process. By automating key steps and implementing continuous monitoring, the e-commerce company can maintain a reliable and high-performing predictive model, adapting to changing customer behaviors effectively. The CI/CD pipeline provides a structured framework to enhance the agility and robustness of the machine learning model in response to evolving customer dynamics.
249 |
250 |
251 |
--------------------------------------------------------------------------------
/Projects-ML/image.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/Projects-ML/image.png
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | # Table of contents
4 |
5 | - Introduction to Machine learning:
6 | - Analytics
7 | - Categories of ML algorithms
8 | - A typical ML algorithm uses the following steps
9 | - Framework for develping ML models
10 | - **Machine learning Fundamentals and application:** This repository provides a comprehensive introduction to machine learning concepts, algorithms, and applications. It covers the essential theoretical foundations of machine learning, as well as practical examples and hands-on projects to solidify your understanding.
11 | - **Deep Learning:** Delve into the world of deep learning, exploring its theoretical underpinnings and applying it to real-world problems. This folder contains notebooks and related files that guide you through the fundamentals of deep learning, including neural networks, convolutional neural networks, and recurrent neural networks.
12 | - **Projects-ML:** Hands-on experience is invaluable for mastering machine learning. This folder offers a collection of sample projects that apply the concepts and algorithms discussed in the other folders. These projects provide practical exposure to machine learning techniques and allow you to apply your knowledge to real-world scenarios.
13 | - **Statistical fundamentals:** Statistics plays a pivotal role in machine learning. This folder delves into the intricacies of statistics, particularly inferential and descriptive statistics. It provides detailed explanations of statistical concepts and introduces applications of statistics in machine learning.
14 | - **ML-Data:** Datasets are indispensable for machine learning projects. This folder contains the data files used in the sample projects, ensuring that you have access to the relevant data to replicate and experiment with the provided examples.
15 | - References
16 |
17 |
18 |
19 | ## Will be updating many more interesting concepts and project here soon.....
20 |
21 |
22 | # Reference
23 |
24 | Individual references are given in individual notebooks. Here are few general references that one can look at:
25 |
26 | 1. Machine learning using python, Manarajan Pradhan, U Dinesh Kumar
27 | 2. Please follow lecture series provided at [Jovian.ml](https://jovian.ai/), i.e. [Data Analysis with Python: Zero to Pandas](https://jovian.ai/learn/data-analysis-with-python-zero-to-pandas)
28 | 3. [Python roadmap](https://roadmap.sh/python/)
29 | 4. [Python tutorials at w3school](https://www.w3schools.com/python/default.asp)
30 | 5. [30 Days of Python](https://github.com/asabeneh/30-days-of-python)
31 | 6. [Python official documents](https://docs.python.org/3/tutorial/)
32 | 7. [Data science roadmaps](https://github.com/codebasics/py/blob/master/TechTopics/DataScienceRoadMap2020/data_science_roadmap_2020.md)
33 | 8. Statistics, Murray R. Spiegel, Larry J. Stephens
34 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security Policy
2 |
3 | ## Supported Versions
4 |
5 | Currently, the security updates are applied to the following version:
6 |
7 | - Version v2.0.0.0
8 |
9 | ## Reporting a Vulnerability
10 |
11 | If you discover a security vulnerability within the supported version, please send an email to [arunp77@gmail.com](mailto:arunp77@gmail.com) with a detailed description of the vulnerability. Our team will respond to the report promptly.
12 |
13 | ## Updates and Patching
14 |
15 | Security updates for the supported version will be released as needed and will be available through the [releases](https://github.com/arunp77/Machine-Learning/releases) section of this repository.
16 |
17 | ## Responsible Disclosure
18 |
19 | We believe in responsible disclosure. We appreciate your efforts to keep our project secure and are open to working with you to address any issues.
20 |
21 | Thank you for your contributions to the security of our project.
22 |
23 |
--------------------------------------------------------------------------------
/Statistics-fundamental/Correlaltion.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/Statistics-fundamental/Correlaltion.png
--------------------------------------------------------------------------------
/Statistics-fundamental/Covariance.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/Statistics-fundamental/Covariance.png
--------------------------------------------------------------------------------
/Statistics-fundamental/normal-distri.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/Statistics-fundamental/normal-distri.png
--------------------------------------------------------------------------------
/Statistics-fundamental/variance.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/Statistics-fundamental/variance.png
--------------------------------------------------------------------------------
/Supervised-learning/Advertising.csv:
--------------------------------------------------------------------------------
1 | "","TV","Radio","Newspaper","Sales"
2 | "1",230.1,37.8,69.2,22.1
3 | "2",44.5,39.3,45.1,10.4
4 | "3",17.2,45.9,69.3,9.3
5 | "4",151.5,41.3,58.5,18.5
6 | "5",180.8,10.8,58.4,12.9
7 | "6",8.7,48.9,75,7.2
8 | "7",57.5,32.8,23.5,11.8
9 | "8",120.2,19.6,11.6,13.2
10 | "9",8.6,2.1,1,4.8
11 | "10",199.8,2.6,21.2,10.6
12 | "11",66.1,5.8,24.2,8.6
13 | "12",214.7,24,4,17.4
14 | "13",23.8,35.1,65.9,9.2
15 | "14",97.5,7.6,7.2,9.7
16 | "15",204.1,32.9,46,19
17 | "16",195.4,47.7,52.9,22.4
18 | "17",67.8,36.6,114,12.5
19 | "18",281.4,39.6,55.8,24.4
20 | "19",69.2,20.5,18.3,11.3
21 | "20",147.3,23.9,19.1,14.6
22 | "21",218.4,27.7,53.4,18
23 | "22",237.4,5.1,23.5,12.5
24 | "23",13.2,15.9,49.6,5.6
25 | "24",228.3,16.9,26.2,15.5
26 | "25",62.3,12.6,18.3,9.7
27 | "26",262.9,3.5,19.5,12
28 | "27",142.9,29.3,12.6,15
29 | "28",240.1,16.7,22.9,15.9
30 | "29",248.8,27.1,22.9,18.9
31 | "30",70.6,16,40.8,10.5
32 | "31",292.9,28.3,43.2,21.4
33 | "32",112.9,17.4,38.6,11.9
34 | "33",97.2,1.5,30,9.6
35 | "34",265.6,20,0.3,17.4
36 | "35",95.7,1.4,7.4,9.5
37 | "36",290.7,4.1,8.5,12.8
38 | "37",266.9,43.8,5,25.4
39 | "38",74.7,49.4,45.7,14.7
40 | "39",43.1,26.7,35.1,10.1
41 | "40",228,37.7,32,21.5
42 | "41",202.5,22.3,31.6,16.6
43 | "42",177,33.4,38.7,17.1
44 | "43",293.6,27.7,1.8,20.7
45 | "44",206.9,8.4,26.4,12.9
46 | "45",25.1,25.7,43.3,8.5
47 | "46",175.1,22.5,31.5,14.9
48 | "47",89.7,9.9,35.7,10.6
49 | "48",239.9,41.5,18.5,23.2
50 | "49",227.2,15.8,49.9,14.8
51 | "50",66.9,11.7,36.8,9.7
52 | "51",199.8,3.1,34.6,11.4
53 | "52",100.4,9.6,3.6,10.7
54 | "53",216.4,41.7,39.6,22.6
55 | "54",182.6,46.2,58.7,21.2
56 | "55",262.7,28.8,15.9,20.2
57 | "56",198.9,49.4,60,23.7
58 | "57",7.3,28.1,41.4,5.5
59 | "58",136.2,19.2,16.6,13.2
60 | "59",210.8,49.6,37.7,23.8
61 | "60",210.7,29.5,9.3,18.4
62 | "61",53.5,2,21.4,8.1
63 | "62",261.3,42.7,54.7,24.2
64 | "63",239.3,15.5,27.3,15.7
65 | "64",102.7,29.6,8.4,14
66 | "65",131.1,42.8,28.9,18
67 | "66",69,9.3,0.9,9.3
68 | "67",31.5,24.6,2.2,9.5
69 | "68",139.3,14.5,10.2,13.4
70 | "69",237.4,27.5,11,18.9
71 | "70",216.8,43.9,27.2,22.3
72 | "71",199.1,30.6,38.7,18.3
73 | "72",109.8,14.3,31.7,12.4
74 | "73",26.8,33,19.3,8.8
75 | "74",129.4,5.7,31.3,11
76 | "75",213.4,24.6,13.1,17
77 | "76",16.9,43.7,89.4,8.7
78 | "77",27.5,1.6,20.7,6.9
79 | "78",120.5,28.5,14.2,14.2
80 | "79",5.4,29.9,9.4,5.3
81 | "80",116,7.7,23.1,11
82 | "81",76.4,26.7,22.3,11.8
83 | "82",239.8,4.1,36.9,12.3
84 | "83",75.3,20.3,32.5,11.3
85 | "84",68.4,44.5,35.6,13.6
86 | "85",213.5,43,33.8,21.7
87 | "86",193.2,18.4,65.7,15.2
88 | "87",76.3,27.5,16,12
89 | "88",110.7,40.6,63.2,16
90 | "89",88.3,25.5,73.4,12.9
91 | "90",109.8,47.8,51.4,16.7
92 | "91",134.3,4.9,9.3,11.2
93 | "92",28.6,1.5,33,7.3
94 | "93",217.7,33.5,59,19.4
95 | "94",250.9,36.5,72.3,22.2
96 | "95",107.4,14,10.9,11.5
97 | "96",163.3,31.6,52.9,16.9
98 | "97",197.6,3.5,5.9,11.7
99 | "98",184.9,21,22,15.5
100 | "99",289.7,42.3,51.2,25.4
101 | "100",135.2,41.7,45.9,17.2
102 | "101",222.4,4.3,49.8,11.7
103 | "102",296.4,36.3,100.9,23.8
104 | "103",280.2,10.1,21.4,14.8
105 | "104",187.9,17.2,17.9,14.7
106 | "105",238.2,34.3,5.3,20.7
107 | "106",137.9,46.4,59,19.2
108 | "107",25,11,29.7,7.2
109 | "108",90.4,0.3,23.2,8.7
110 | "109",13.1,0.4,25.6,5.3
111 | "110",255.4,26.9,5.5,19.8
112 | "111",225.8,8.2,56.5,13.4
113 | "112",241.7,38,23.2,21.8
114 | "113",175.7,15.4,2.4,14.1
115 | "114",209.6,20.6,10.7,15.9
116 | "115",78.2,46.8,34.5,14.6
117 | "116",75.1,35,52.7,12.6
118 | "117",139.2,14.3,25.6,12.2
119 | "118",76.4,0.8,14.8,9.4
120 | "119",125.7,36.9,79.2,15.9
121 | "120",19.4,16,22.3,6.6
122 | "121",141.3,26.8,46.2,15.5
123 | "122",18.8,21.7,50.4,7
124 | "123",224,2.4,15.6,11.6
125 | "124",123.1,34.6,12.4,15.2
126 | "125",229.5,32.3,74.2,19.7
127 | "126",87.2,11.8,25.9,10.6
128 | "127",7.8,38.9,50.6,6.6
129 | "128",80.2,0,9.2,8.8
130 | "129",220.3,49,3.2,24.7
131 | "130",59.6,12,43.1,9.7
132 | "131",0.7,39.6,8.7,1.6
133 | "132",265.2,2.9,43,12.7
134 | "133",8.4,27.2,2.1,5.7
135 | "134",219.8,33.5,45.1,19.6
136 | "135",36.9,38.6,65.6,10.8
137 | "136",48.3,47,8.5,11.6
138 | "137",25.6,39,9.3,9.5
139 | "138",273.7,28.9,59.7,20.8
140 | "139",43,25.9,20.5,9.6
141 | "140",184.9,43.9,1.7,20.7
142 | "141",73.4,17,12.9,10.9
143 | "142",193.7,35.4,75.6,19.2
144 | "143",220.5,33.2,37.9,20.1
145 | "144",104.6,5.7,34.4,10.4
146 | "145",96.2,14.8,38.9,11.4
147 | "146",140.3,1.9,9,10.3
148 | "147",240.1,7.3,8.7,13.2
149 | "148",243.2,49,44.3,25.4
150 | "149",38,40.3,11.9,10.9
151 | "150",44.7,25.8,20.6,10.1
152 | "151",280.7,13.9,37,16.1
153 | "152",121,8.4,48.7,11.6
154 | "153",197.6,23.3,14.2,16.6
155 | "154",171.3,39.7,37.7,19
156 | "155",187.8,21.1,9.5,15.6
157 | "156",4.1,11.6,5.7,3.2
158 | "157",93.9,43.5,50.5,15.3
159 | "158",149.8,1.3,24.3,10.1
160 | "159",11.7,36.9,45.2,7.3
161 | "160",131.7,18.4,34.6,12.9
162 | "161",172.5,18.1,30.7,14.4
163 | "162",85.7,35.8,49.3,13.3
164 | "163",188.4,18.1,25.6,14.9
165 | "164",163.5,36.8,7.4,18
166 | "165",117.2,14.7,5.4,11.9
167 | "166",234.5,3.4,84.8,11.9
168 | "167",17.9,37.6,21.6,8
169 | "168",206.8,5.2,19.4,12.2
170 | "169",215.4,23.6,57.6,17.1
171 | "170",284.3,10.6,6.4,15
172 | "171",50,11.6,18.4,8.4
173 | "172",164.5,20.9,47.4,14.5
174 | "173",19.6,20.1,17,7.6
175 | "174",168.4,7.1,12.8,11.7
176 | "175",222.4,3.4,13.1,11.5
177 | "176",276.9,48.9,41.8,27
178 | "177",248.4,30.2,20.3,20.2
179 | "178",170.2,7.8,35.2,11.7
180 | "179",276.7,2.3,23.7,11.8
181 | "180",165.6,10,17.6,12.6
182 | "181",156.6,2.6,8.3,10.5
183 | "182",218.5,5.4,27.4,12.2
184 | "183",56.2,5.7,29.7,8.7
185 | "184",287.6,43,71.8,26.2
186 | "185",253.8,21.3,30,17.6
187 | "186",205,45.1,19.6,22.6
188 | "187",139.5,2.1,26.6,10.3
189 | "188",191.1,28.7,18.2,17.3
190 | "189",286,13.9,3.7,15.9
191 | "190",18.7,12.1,23.4,6.7
192 | "191",39.5,41.1,5.8,10.8
193 | "192",75.5,10.8,6,9.9
194 | "193",17.2,4.1,31.6,5.9
195 | "194",166.8,42,3.6,19.6
196 | "195",149.7,35.6,6,17.3
197 | "196",38.2,3.7,13.8,7.6
198 | "197",94.2,4.9,8.1,9.7
199 | "198",177,9.3,6.4,12.8
200 | "199",283.6,42,66.2,25.5
201 | "200",232.1,8.6,8.7,13.4
202 |
--------------------------------------------------------------------------------
/Supervised-learning/Algerian_forest_fires_dataset_UPDATE.csv:
--------------------------------------------------------------------------------
1 | Bejaia Region Dataset ,,,,,,,,,,,,,
2 | day,month,year,Temperature, RH, Ws,Rain ,FFMC,DMC,DC,ISI,BUI,FWI,Classes
3 | 1,6,2012,29,57,18,0,65.7,3.4,7.6,1.3,3.4,0.5,not fire
4 | 2,6,2012,29,61,13,1.3,64.4,4.1,7.6,1,3.9,0.4,not fire
5 | 3,6,2012,26,82,22,13.1,47.1,2.5,7.1,0.3,2.7,0.1,not fire
6 | 4,6,2012,25,89,13,2.5,28.6,1.3,6.9,0,1.7,0,not fire
7 | 5,6,2012,27,77,16,0,64.8,3,14.2,1.2,3.9,0.5,not fire
8 | 6,6,2012,31,67,14,0,82.6,5.8,22.2,3.1,7,2.5,fire
9 | 7,6,2012,33,54,13,0,88.2,9.9,30.5,6.4,10.9,7.2,fire
10 | 8,6,2012,30,73,15,0,86.6,12.1,38.3,5.6,13.5,7.1,fire
11 | 9,6,2012,25,88,13,0.2,52.9,7.9,38.8,0.4,10.5,0.3,not fire
12 | 10,6,2012,28,79,12,0,73.2,9.5,46.3,1.3,12.6,0.9,not fire
13 | 11,6,2012,31,65,14,0,84.5,12.5,54.3,4,15.8,5.6,fire
14 | 12,6,2012,26,81,19,0,84,13.8,61.4,4.8,17.7,7.1,fire
15 | 13,6,2012,27,84,21,1.2,50,6.7,17,0.5,6.7,0.2,not fire
16 | 14,6,2012,30,78,20,0.5,59,4.6,7.8,1,4.4,0.4,not fire
17 | 15,6,2012,28,80,17,3.1,49.4,3,7.4,0.4,3,0.1,not fire
18 | 16,6,2012,29,89,13,0.7,36.1,1.7,7.6,0,2.2,0,not fire
19 | 17,6,2012,30,89,16,0.6,37.3,1.1,7.8,0,1.6,0,not fire
20 | 18,6,2012,31,78,14,0.3,56.9,1.9,8,0.7,2.4,0.2,not fire
21 | 19,6,2012,31,55,16,0.1,79.9,4.5,16,2.5,5.3,1.4,not fire
22 | 20,6,2012,30,80,16,0.4,59.8,3.4,27.1,0.9,5.1,0.4,not fire
23 | 21,6,2012,30,78,14,0,81,6.3,31.6,2.6,8.4,2.2,fire
24 | 22,6,2012,31,67,17,0.1,79.1,7,39.5,2.4,9.7,2.3,not fire
25 | 23,6,2012,32,62,18,0.1,81.4,8.2,47.7,3.3,11.5,3.8,fire
26 | 24,6,2012,32,66,17,0,85.9,11.2,55.8,5.6,14.9,7.5,fire
27 | 25,6,2012,31,64,15,0,86.7,14.2,63.8,5.7,18.3,8.4,fire
28 | 26,6,2012,31,64,18,0,86.8,17.8,71.8,6.7,21.6,10.6,fire
29 | 27,6,2012,34,53,18,0,89,21.6,80.3,9.2,25.8,15,fire
30 | 28,6,2012,32,55,14,0,89.1,25.5,88.5,7.6,29.7,13.9,fire
31 | 29,6,2012,32,47,13,0.3,79.9,18.4,84.4,2.2,23.8,3.9,not fire
32 | 30,6,2012,33,50,14,0,88.7,22.9,92.8,7.2,28.3,12.9,fire
33 | 1,7,2012,29,68,19,1,59.9,2.5,8.6,1.1,2.9,0.4,not fire
34 | 2,7,2012,27,75,19,1.2,55.7,2.4,8.3,0.8,2.8,0.3,not fire
35 | 3,7,2012,32,76,20,0.7,63.1,2.6,9.2,1.3,3,0.5,not fire
36 | 4,7,2012,33,78,17,0,80.1,4.6,18.5,2.7,5.7,1.7,not fire
37 | 5,7,2012,33,66,14,0,85.9,7.6,27.9,4.8,9.1,4.9,fire
38 | 6,7,2012,32,63,14,0,87,10.9,37,5.6,12.5,6.8,fire
39 | 7,7,2012,35,64,18,0.2,80,9.7,40.4,2.8,12.1,3.2,not fire
40 | 8,7,2012,33,68,19,0,85.6,12.5,49.8,6,15.4,8,fire
41 | 9,7,2012,32,68,14,1.4,66.6,7.7,9.2,1.1,7.4,0.6,not fire
42 | 10,7,2012,33,69,13,0.7,66.6,6,9.3,1.1,5.8,0.5,not fire
43 | 11,7,2012,33,76,14,0,81.1,8.1,18.7,2.6,8.1,2.2,not fire
44 | 12,7,2012,31,75,13,0.1,75.1,7.9,27.7,1.5,9.2,0.9,not fire
45 | 13,7,2012,34,81,15,0,81.8,9.7,37.2,3,11.7,3.4,not fire
46 | 14,7,2012,34,61,13,0.6,73.9,7.8,22.9,1.4,8.4,0.8,not fire
47 | 15,7,2012,30,80,19,0.4,60.7,5.2,17,1.1,5.9,0.5,not fire
48 | 16,7,2012,28,76,21,0,72.6,7,25.5,0.7,8.3,0.4,not fire
49 | 17,7,2012,29,70,14,0,82.8,9.4,34.1,3.2,11.1,3.6,fire
50 | 18,7,2012,31,68,14,0,85.4,12.1,43.1,4.6,14.2,6,fire
51 | 19,7,2012,35,59,17,0,88.1,12,52.8,7.7,18.2,10.9,fire
52 | 20,7,2012,33,65,15,0.1,81.4,12.3,62.1,2.8,16.5,4,fire
53 | 21,7,2012,33,70,17,0,85.4,18.5,71.5,5.2,22.4,8.8,fire
54 | 22,7,2012,28,79,18,0.1,73.4,16.4,79.9,1.8,21.7,2.8,not fire
55 | 23,7,2012,27,66,22,0.4,68.2,10.5,71.3,1.8,15.4,2.1,not fire
56 | 24,7,2012,28,78,16,0.1,70,9.6,79.7,1.4,14.7,1.3,not fire
57 | 25,7,2012,31,65,18,0,84.3,12.5,88.7,4.8,18.5,7.3,fire
58 | 26,7,2012,36,53,19,0,89.2,17.1,98.6,10,23.9,15.3,fire
59 | 27,7,2012,36,48,13,0,90.3,22.2,108.5,8.7,29.4,15.3,fire
60 | 28,7,2012,33,76,15,0,86.5,24.4,117.8,5.6,32.1,11.3,fire
61 | 29,7,2012,32,73,15,0,86.6,26.7,127,5.6,35,11.9,fire
62 | 30,7,2012,31,79,15,0,85.4,28.5,136,4.7,37.4,10.7,fire
63 | 31,7,2012,35,64,17,0,87.2,31.9,145.7,6.8,41.2,15.7,fire
64 | 1,8,2012,36,45,14,0,78.8,4.8,10.2,2,4.7,0.9,not fire
65 | 2,8,2012,35,55,12,0.4,78,5.8,10,1.7,5.5,0.8,not fire
66 | 3,8,2012,35,63,14,0.3,76.6,5.7,10,1.7,5.5,0.8,not fire
67 | 4,8,2012,34,69,13,0,85,8.2,19.8,4,8.2,3.9,fire
68 | 5,8,2012,34,65,13,0,86.8,11.1,29.7,5.2,11.5,6.1,fire
69 | 6,8,2012,32,75,14,0,86.4,13,39.1,5.2,14.2,6.8,fire
70 | 7,8,2012,32,69,16,0,86.5,15.5,48.6,5.5,17.2,8,fire
71 | 8,8,2012,32,60,18,0.3,77.1,11.3,47,2.2,14.1,2.6,not fire
72 | 9,8,2012,35,59,17,0,87.4,14.8,57,6.9,17.9,9.9,fire
73 | 10,8,2012,35,55,14,0,88.9,18.6,67,7.4,21.9,11.6,fire
74 | 11,8,2012,35,63,13,0,88.9,21.7,77,7.1,25.5,12.1,fire
75 | 12,8,2012,35,51,13,0.3,81.3,15.6,75.1,2.5,20.7,4.2,not fire
76 | 13,8,2012,35,63,15,0,87,19,85.1,5.9,24.4,10.2,fire
77 | 14,8,2012,33,66,14,0,87,21.7,94.7,5.7,27.2,10.6,fire
78 | 15,8,2012,36,55,13,0.3,82.4,15.6,92.5,3.7,22,6.3,fire
79 | 16,8,2012,36,61,18,0.3,80.2,11.7,90.4,2.8,17.6,4.2,fire
80 | 17,8,2012,37,52,18,0,89.3,16,100.7,9.7,22.9,14.6,fire
81 | 18,8,2012,36,54,18,0,89.4,20,110.9,9.7,27.5,16.1,fire
82 | 19,8,2012,35,62,19,0,89.4,23.2,120.9,9.7,31.3,17.2,fire
83 | 20,8,2012,35,68,19,0,88.3,25.9,130.6,8.8,34.7,16.8,fire
84 | 21,8,2012,36,58,19,0,88.6,29.6,141.1,9.2,38.8,18.4,fire
85 | 22,8,2012,36,55,18,0,89.1,33.5,151.3,9.9,43.1,20.4,fire
86 | 23,8,2012,36,53,16,0,89.5,37.6,161.5,10.4,47.5,22.3,fire
87 | 24,8,2012,34,64,14,0,88.9,40.5,171.3,9,50.9,20.9,fire
88 | 25,8,2012,35,60,15,0,88.9,43.9,181.3,8.2,54.7,20.3,fire
89 | 26,8,2012,31,78,18,0,85.8,45.6,190.6,4.7,57.1,13.7,fire
90 | 27,8,2012,33,82,21,0,84.9,47,200.2,4.4,59.3,13.2,fire
91 | 28,8,2012,34,64,16,0,89.4,50.2,210.4,7.3,62.9,19.9,fire
92 | 29,8,2012,35,48,18,0,90.1,54.2,220.4,12.5,67.4,30.2,fire
93 | 30,8,2012,35,70,17,0.8,72.7,25.2,180.4,1.7,37.4,4.2,not fire
94 | 31,8,2012,28,80,21,16.8,52.5,8.7,8.7,0.6,8.3,0.3,not fire
95 | 1,9,2012,25,76,17,7.2,46,1.3,7.5,0.2,1.8,0.1,not fire
96 | 2,9,2012,22,86,15,10.1,30.5,0.7,7,0,1.1,0,not fire
97 | 3,9,2012,25,78,15,3.8,42.6,1.2,7.5,0.1,1.7,0,not fire
98 | 4,9,2012,29,73,17,0.1,68.4,1.9,15.7,1.4,2.9,0.5,not fire
99 | 5,9,2012,29,75,16,0,80.8,3.4,24,2.8,5.1,1.7,fire
100 | 6,9,2012,29,74,19,0.1,75.8,3.6,32.2,2.1,5.6,0.9,not fire
101 | 7,9,2012,31,71,17,0.3,69.6,3.2,30.1,1.5,5.1,0.6,not fire
102 | 8,9,2012,30,73,17,0.9,62,2.6,8.4,1.1,3,0.4,not fire
103 | 9,9,2012,30,77,15,1,56.1,2.1,8.4,0.7,2.6,0.2,not fire
104 | 10,9,2012,33,73,12,1.8,59.9,2.2,8.9,0.7,2.7,0.3,not fire
105 | 11,9,2012,30,77,21,1.8,58.5,1.9,8.4,1.1,2.4,0.3,not fire
106 | 12,9,2012,29,88,13,0,71,2.6,16.6,1.2,3.7,0.5,not fire
107 | 13,9,2012,25,86,21,4.6,40.9,1.3,7.5,0.1,1.8,0,not fire
108 | 14,9,2012,22,76,26,8.3,47.4,1.1,7,0.4,1.6,0.1,not fire
109 | 15,9,2012,24,82,15,0.4,44.9,0.9,7.3,0.2,1.4,0,not fire
110 | 16,9,2012,30,65,14,0,78.1,3.2,15.7,1.9,4.2,0.8,not fire
111 | 17,9,2012,31,52,14,0,87.7,6.4,24.3,6.2,7.7,5.9,fire
112 | 18,9,2012,32,49,11,0,89.4,9.8,33.1,6.8,11.3,7.7,fire
113 | 19,9,2012,29,57,14,0,89.3,12.5,41.3,7.8,14.2,9.7,fire
114 | 20,9,2012,28,84,18,0,83.8,13.5,49.3,4.5,16,6.3,fire
115 | 21,9,2012,31,55,11,0,87.8,16.5,57.9,5.4,19.2,8.3,fire
116 | 22,9,2012,31,50,19,0.6,77.8,10.6,41.4,2.4,12.9,2.8,not fire
117 | 23,9,2012,32,54,11,0.5,73.7,7.9,30.4,1.2,9.6,0.7,not fire
118 | 24,9,2012,29,65,19,0.6,68.3,5.5,15.2,1.5,5.8,0.7,not fire
119 | 25,9,2012,26,81,21,5.8,48.6,3,7.7,0.4,3,0.1,not fire
120 | 26,9,2012,31,54,11,0,82,6,16.3,2.5,6.2,1.7,not fire
121 | 27,9,2012,31,66,11,0,85.7,8.3,24.9,4,9,4.1,fire
122 | 28,9,2012,32,47,14,0.7,77.5,7.1,8.8,1.8,6.8,0.9,not fire
123 | 29,9,2012,26,80,16,1.8,47.4,2.9,7.7,0.3,3,0.1,not fire
124 | 30,9,2012,25,78,14,1.4,45,1.9,7.5,0.2,2.4,0.1,not fire
125 | ,,,,,,,,,,,,,
126 | Sidi-Bel Abbes Region Dataset,,,,,,,,,,,,,
127 | day,month,year,Temperature, RH, Ws,Rain ,FFMC,DMC,DC,ISI,BUI,FWI,Classes
128 | 1,6,2012,32,71,12,0.7,57.1,2.5,8.2,0.6,2.8,0.2,not fire
129 | 2,6,2012,30,73,13,4,55.7,2.7,7.8,0.6,2.9,0.2,not fire
130 | 3,6,2012,29,80,14,2,48.7,2.2,7.6,0.3,2.6,0.1,not fire
131 | 4,6,2012,30,64,14,0,79.4,5.2,15.4,2.2,5.6,1,not fire
132 | 5,6,2012,32,60,14,0.2,77.1,6,17.6,1.8,6.5,0.9,not fire
133 | 6,6,2012,35,54,11,0.1,83.7,8.4,26.3,3.1,9.3,3.1,fire
134 | 7,6,2012,35,44,17,0.2,85.6,9.9,28.9,5.4,10.7,6,fire
135 | 8,6,2012,28,51,17,1.3,71.4,7.7,7.4,1.5,7.3,0.8,not fire
136 | 9,6,2012,27,59,18,0.1,78.1,8.5,14.7,2.4,8.3,1.9,not fire
137 | 10,6,2012,30,41,15,0,89.4,13.3,22.5,8.4,13.1,10,fire
138 | 11,6,2012,31,42,21,0,90.6,18.2,30.5,13.4,18,16.7,fire
139 | 12,6,2012,27,58,17,0,88.9,21.3,37.8,8.7,21.2,12.9,fire
140 | 13,6,2012,30,52,15,2,72.3,11.4,7.8,1.4,10.9,0.9,not fire
141 | 14,6,2012,27,79,16,0.7,53.4,6.4,7.3,0.5,6.1,0.3,not fire
142 | 15,6,2012,28,90,15,0,66.8,7.2,14.7,1.2,7.1,0.6,not fire
143 | 16,6,2012,29,87,15,0.4,47.4,4.2,8,0.2,4.1,0.1,not fire
144 | 17,6,2012,31,69,17,4.7,62.2,3.9,8,1.1,3.8,0.4,not fire
145 | 18,6,2012,33,62,10,8.7,65.5,4.6,8.3,0.9,4.4,0.4,not fire
146 | 19,6,2012,32,67,14,4.5,64.6,4.4,8.2,1,4.2,0.4,not fire
147 | 20,6,2012,31,72,14,0.2,60.2,3.8,8,0.8,3.7,0.3,not fire
148 | 21,6,2012,32,55,14,0,86.2,8.3,18.4,5,8.2,4.9,fire
149 | 22,6,2012,33,46,14,1.1,78.3,8.1,8.3,1.9,7.7,1.2,not fire
150 | 23,6,2012,33,59,16,0.8,74.2,7,8.3,1.6,6.7,0.8,not fire
151 | 24,6,2012,35,68,16,0,85.3,10,17,4.9,9.9,5.3,fire
152 | 25,6,2012,34,70,16,0,86,12.8,25.6,5.4,12.7,6.7,fire
153 | 26,6,2012,36,62,16,0,87.8,16.5,34.5,7,16.4,9.5,fire
154 | 27,6,2012,36,55,15,0,89.1,20.9,43.3,8,20.8,12,fire
155 | 28,6,2012,37,37,13,0,92.5,27.2,52.4,11.7,27.1,18.4,fire
156 | 29,6,2012,37,36,13,0.6,86.2,17.9,36.7,4.8,17.8,7.2,fire
157 | 30,6,2012,34,42,15,1.7,79.7,12,8.5,2.2,11.5,2.2,not fire
158 | 1,7,2012,28,58,18,2.2,63.7,3.2,8.5,1.2,3.3,0.5,not fire
159 | 2,7,2012,33,48,16,0,87.6,7.9,17.8,6.8,7.8,6.4,fire
160 | 3,7,2012,34,56,17,0.1,84.7,9.7,27.3,4.7,10.3,5.2,fire
161 | 4,7,2012,34,58,18,0,88,13.6,36.8,8,14.1,9.9,fire
162 | 5,7,2012,34,45,18,0,90.5,18.7,46.4,11.3,18.7,15,fire
163 | 6,7,2012,35,42,15,0.3,84.7,15.5,45.1,4.3,16.7,6.3,fire
164 | 7,7,2012,38,43,13,0.5,85,13,35.4,4.1,13.7,5.2,fire
165 | 8,7,2012,35,47,18,6,80.8,9.8,9.7,3.1,9.4,3,fire
166 | 9,7,2012,36,43,15,1.9,82.3,9.4,9.9,3.2,9,3.1,fire
167 | 10,7,2012,34,51,16,3.8,77.5,8,9.5,2,7.7,1.3,not fire
168 | 11,7,2012,34,56,15,2.9,74.8,7.1,9.5,1.6,6.8,0.8,not fire
169 | 12,7,2012,36,44,13,0,90.1,12.6,19.4,8.3,12.5,9.6,fire
170 | 13,7,2012,39,45,13,0.6,85.2,11.3,10.4,4.2,10.9,4.7,fire
171 | 14,7,2012,37,37,18,0.2,88.9,12.9,14.6 9,12.5,10.4,fire ,
172 | 15,7,2012,34,45,17,0,90.5,18,24.1,10.9,17.7,14.1,fire
173 | 16,7,2012,31,83,17,0,84.5,19.4,33.1,4.7,19.2,7.3,fire
174 | 17,7,2012,32,81,17,0,84.6,21.1,42.3,4.7,20.9,7.7,fire
175 | 18,7,2012,33,68,15,0,86.1,23.9,51.6,5.2,23.9,9.1,fire
176 | 19,7,2012,34,58,16,0,88.1,27.8,61.1,7.3,27.7,13,fire
177 | 20,7,2012,36,50,16,0,89.9,32.7,71,9.5,32.6,17.3,fire
178 | 21,7,2012,36,29,18,0,93.9,39.6,80.6,18.5,39.5,30,fire
179 | 22,7,2012,32,48,18,0,91.5,44.2,90.1,13.2,44,25.4,fire
180 | 23,7,2012,31,71,17,0,87.3,46.6,99,6.9,46.5,16.3,fire
181 | 24,7,2012,33,63,17,1.1,72.8,20.9,56.6,1.6,21.7,2.5,not fire
182 | 25,7,2012,39,64,9,1.2,73.8,11.7,15.9,1.1,11.4,0.7,not fire
183 | 26,7,2012,35,58,10,0.2,78.3,10.8,19.7,1.6,10.7,1,not fire
184 | 27,7,2012,29,87,18,0,80,11.8,28.3,2.8,11.8,3.2,not fire
185 | 28,7,2012,33,57,16,0,87.5,15.7,37.6,6.7,15.7,9,fire
186 | 29,7,2012,34,59,16,0,88.1,19.5,47.2,7.4,19.5,10.9,fire
187 | 30,7,2012,36,56,16,0,88.9,23.8,57.1,8.2,23.8,13.2,fire
188 | 31,7,2012,37,55,15,0,89.3,28.3,67.2,8.3,28.3,14.5,fire
189 | 1,8,2012,38,52,14,0,78.3,4.4,10.5,2,4.4,0.8,not fire
190 | 2,8,2012,40,34,14,0,93.3,10.8,21.4,13.8,10.6,13.5,fire
191 | 3,8,2012,39,33,17,0,93.7,17.1,32.1,17.2,16.9,19.5,fire
192 | 4,8,2012,38,35,15,0,93.8,23,42.7,15.7,22.9,20.9,fire
193 | 5,8,2012,34,42,17,0.1,88.3,23.6,52.5,19,23.5,12.6,fire
194 | 6,8,2012,30,54,14,3.1,70.5,11,9.1,1.3,10.5,0.8,not fire
195 | 7,8,2012,34,63,13,2.9,69.7,7.2,9.8,1.2,6.9,0.6,not fire
196 | 8,8,2012,37,56,11,0,87.4,11.2,20.2,5.2,11,5.9,fire
197 | 9,8,2012,39,43,12,0,91.7,16.5,30.9,9.6,16.4,12.7,fire
198 | 10,8,2012,39,39,15,0.2,89.3,15.8,35.4,8.2,15.8,10.7,fire
199 | 11,8,2012,40,31,15,0,94.2,22.5,46.3,16.6,22.4,21.6,fire
200 | 12,8,2012,39,21,17,0.4,93,18.4,41.5,15.5,18.4,18.8,fire
201 | 13,8,2012,35,34,16,0.2,88.3,16.9,45.1,7.5,17.5,10.5,fire
202 | 14,8,2012,37,40,13,0,91.9,22.3,55.5,10.8,22.3,15.7,fire
203 | 15,8,2012,35,46,13,0.3,83.9,16.9,54.2,3.5,19,5.5,fire
204 | 16,8,2012,40,41,10,0.1,92,22.6,65.1,9.5,24.2,14.8,fire
205 | 17,8,2012,42,24,9,0,96,30.3,76.4,15.7,30.4,24,fire
206 | 18,8,2012,37,37,14,0,94.3,35.9,86.8,16,35.9,26.3,fire
207 | 19,8,2012,35,66,15,0.1,82.7,32.7,96.8,3.3,35.5,7.7,fire
208 | 20,8,2012,36,81,15,0,83.7,34.4,107,3.8,38.1,9,fire
209 | 21,8,2012,36,71,15,0,86,36.9,117.1,5.1,41.3,12.2,fire
210 | 22,8,2012,37,53,14,0,89.5,41.1,127.5,8,45.5,18.1,fire
211 | 23,8,2012,36,43,16,0,91.2,46.1,137.7,11.5,50.2,24.5,fire
212 | 24,8,2012,35,38,15,0,92.1,51.3,147.7,12.2,54.9,26.9,fire
213 | 25,8,2012,34,40,18,0,92.1,56.3,157.5,14.3,59.5,31.1,fire
214 | 26,8,2012,33,37,16,0,92.2,61.3,167.2,13.1,64,30.3,fire
215 | 27,8,2012,36,54,14,0,91,65.9,177.3,10,68,26.1,fire
216 | 28,8,2012,35,56,14,0.4,79.2,37,166,2.1,30.6,6.1,not fire
217 | 29,8,2012,35,53,17,0.5,80.2,20.7,149.2,2.7,30.6,5.9,fire
218 | 30,8,2012,34,49,15,0,89.2,24.8,159.1,8.1,35.7,16,fire
219 | 31,8,2012,30,59,19,0,89.1,27.8,168.2,9.8,39.3,19.4,fire
220 | 1,9,2012,29,86,16,0,37.9,0.9,8.2,0.1,1.4,0,not fire
221 | 2,9,2012,28,67,19,0,75.4,2.9,16.3,2,4,0.8,not fire
222 | 3,9,2012,28,75,16,0,82.2,4.4,24.3,3.3,6,2.5,fire
223 | 4,9,2012,30,66,15,0.2,73.5,4.1,26.6,1.5,6,0.7,not fire
224 | 5,9,2012,30,58,12,4.1,66.1,4,8.4,1,3.9,0.4,not fire
225 | 6,9,2012,34,71,14,6.5,64.5,3.3,9.1,1,3.5,0.4,not fire
226 | 7,9,2012,31,62,15,0,83.3,5.8,17.7,3.8,6.4,3.2,fire
227 | 8,9,2012,30,88,14,0,82.5,6.6,26.1,3,8.1,2.7,fire
228 | 9,9,2012,30,80,15,0,83.1,7.9,34.5,3.5,10,3.7,fire
229 | 10,9,2012,29,74,15,1.1,59.5,4.7,8.2,0.8,4.6,0.3,not fire
230 | 11,9,2012,30,73,14,0,79.2,6.5,16.6,2.1,6.6,1.2,not fire
231 | 12,9,2012,31,72,14,0,84.2,8.3,25.2,3.8,9.1,3.9,fire
232 | 13,9,2012,29,49,19,0,88.6,11.5,33.4,9.1,12.4,10.3,fire
233 | 14,9,2012,28,81,15,0,84.6,12.6,41.5,4.3,14.3,5.7,fire
234 | 15,9,2012,32,51,13,0,88.7,16,50.2,6.9,17.8,9.8,fire
235 | 16,9,2012,33,26,13,0,93.9,21.2,59.2,14.2,22.4,19.3,fire
236 | 17,9,2012,34,44,12,0,92.5,25.2,63.3,11.2,26.2,17.5,fire
237 | 18,9,2012,36,33,13,0.1,90.6,25.8,77.8,9,28.2,15.4,fire
238 | 19,9,2012,29,41,8,0.1,83.9,24.9,86,2.7,28.9,5.6,fire
239 | 20,9,2012,34,58,13,0.2,79.5,18.7,88,2.1,24.4,3.8,not fire
240 | 21,9,2012,35,34,17,0,92.2,23.6,97.3,13.8,29.4,21.6,fire
241 | 22,9,2012,33,64,13,0,88.9,26.1,106.3,7.1,32.4,13.7,fire
242 | 23,9,2012,35,56,14,0,89,29.4,115.6,7.5,36,15.2,fire
243 | 24,9,2012,26,49,6,2,61.3,11.9,28.1,0.6,11.9,0.4,not fire
244 | 25,9,2012,28,70,15,0,79.9,13.8,36.1,2.4,14.1,3,not fire
245 | 26,9,2012,30,65,14,0,85.4,16,44.5,4.5,16.9,6.5,fire
246 | 27,9,2012,28,87,15,4.4,41.1,6.5,8,0.1,6.2,0,not fire
247 | 28,9,2012,27,87,29,0.5,45.9,3.5,7.9,0.4,3.4,0.2,not fire
248 | 29,9,2012,24,54,18,0.1,79.7,4.3,15.2,1.7,5.1,0.7,not fire
249 | 30,9,2012,24,64,15,0.2,67.3,3.8,16.5,1.2,4.8,0.5,not fire
250 |
--------------------------------------------------------------------------------
/Supervised-learning/Algerian_forest_fires_dataset_UPDATE.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/Supervised-learning/Algerian_forest_fires_dataset_UPDATE.xlsx
--------------------------------------------------------------------------------
/Supervised-learning/IPL-IMB381IPL2013.csv:
--------------------------------------------------------------------------------
1 | Sl.NO.,PLAYER NAME,AGE,COUNTRY,TEAM,PLAYING ROLE,T-RUNS,T-WKTS,ODI-RUNS-S,ODI-SR-B,ODI-WKTS,ODI-SR-BL,CAPTAINCY EXP,RUNS-S,HS,AVE,SR-B,SIXERS,RUNS-C,WKTS,AVE-BL,ECON,SR-BL,AUCTION YEAR,BASE PRICE,SOLD PRICE
2 | 1,"Abdulla, YA",2,SA,KXIP,Allrounder,0,0,0,0,0,0,0,0,0,0,0,0,307,15,20.47,8.9,13.93,2009,50000,50000
3 | 2,Abdur Razzak,2,BAN,RCB,Bowler,214,18,657,71.41,185,37.6,0,0,0,0,0,0,29,0,0,14.5,0,2008,50000,50000
4 | 3,"Agarkar, AB",2,IND,KKR,Bowler,571,58,1269,80.62,288,32.9,0,167,39,18.56,121.01,5,1059,29,36.52,8.81,24.9,2008,200000,350000
5 | 4,"Ashwin, R",1,IND,CSK,Bowler,284,31,241,84.56,51,36.8,0,58,11,5.8,76.32,0,1125,49,22.96,6.23,22.14,2011,100000,850000
6 | 5,"Badrinath, S",2,IND,CSK,Batsman,63,0,79,45.93,0,0,0,1317,71,32.93,120.71,28,0,0,0,0,0,2011,100000,800000
7 | 6,"Bailey, GJ",2,AUS,CSK,Batsman,0,0,172,72.26,0,0,1,63,48,21,95.45,0,0,0,0,0,0,2009,50000,50000
8 | 7,"Balaji, L",2,IND,CSK+,Bowler,51,27,120,78.94,34,42.5,0,26,15,4.33,72.22,1,1342,52,25.81,7.98,19.4,2011,100000,500000
9 | 8,"Bollinger, DE",2,AUS,CSK,Bowler,54,50,50,92.59,62,31.3,0,21,16,21,165.88,1,693,37,18.73,7.22,15.57,2011,200000,700000
10 | 9,"Botha, J",2,SA,RR,Allrounder,83,17,609,85.77,72,53,1,335,67,30.45,114.73,3,610,19,32.11,6.85,28.11,2011,200000,950000
11 | 10,"Boucher, MV",2,SA,RCB+,W. Keeper,5515,1,4686,84.76,0,0,1,394,50,28.14,127.51,13,0,0,0,0,0,2008,200000,450000
12 | 11,"Bravo, DJ",2,WI,MI+,Allrounder,2200,86,2004,81.39,142,34.1,0,839,70,27.97,127.12,38,1338,47,28.47,8.12,21.11,2011,200000,200000
13 | 12,"Chanderpaul, S",3,WI,RCB,Batsman,9918,9,8778,70.74,14,52.8,1,25,16,8.33,80.64,0,0,0,0,0,0,2008,200000,200000
14 | 13,"Chawla, PP",1,IND,KXIP,Allrounder,5,3,38,65.51,32,41,0,337,24,13.48,113.09,9,1819,73,126.3,38.11,100.2,2008,125000,400000
15 | 14,"de Villiers, AB",2,SA,DD+,W. Keeper,5457,2,4998,93.19,0,0,1,1302,105,34.26,128.53,42,0,0,0,0,0,2008,200000,300000
16 | 15,"Dhawan, S",2,IND,MI+,Batsman,0,0,69,56.09,0,0,0,1540,95,31.43,122.32,36,66,4,16.5,8.25,12,2011,100000,300000
17 | 16,"Dhoni, MS",2,IND,CSK,W. Keeper,3509,0,6773,88.19,1,12,1,1782,70,37.13,136.45,64,0,0,0,0,0,2008,400000,1500000
18 | 17,"Dilshan, TM",2,SL,DD+,Allrounder,4722,32,6455,86.8,67,58.3,1,1077,76,28.34,117.83,24,356,5,71.2,8.07,53,2008,150000,250000
19 | 18,"Dinda, AB",2,IND,KKR+,Bowler,0,0,18,60,5,61.4,0,6,2,1,33.33,0,926,36,25.72,7.29,21.19,2011,100000,375000
20 | 19,"Dravid, RS",3,IND,RCB+,Batsman,13288,1,10889,71.24,4,46.5,1,1703,75,27.92,116.88,23,0,0,0,0,0,2011,400000,500000
21 | 20,"Duminy, J-P",2,SA,MI+,Batsman,654,11,2536,84,25,47.6,0,978,74,36.22,119.27,35,377,10,37.7,7.11,31.8,2009,300000,300000
22 | 21,"Edwards, FH",2,WI,DC,Bowler,380,157,73,45.62,60,35.6,0,4,3,4,80,0,154,5,30.8,6.6,28,2009,150000,150000
23 | 22,"Fernando, CRD",2,SL,MI,Bowler,249,97,239,60.96,187,34.7,0,4,2,0,133.33,0,298,17,17.53,7.64,13.76,2008,150000,150000
24 | 23,"Fleming, SP",3,NZ,CSK,Batsman,7172,0,8037,71.49,1,29,1,196,45,21.77,118.78,3,0,0,0,0,0,2008,350000,350000
25 | 24,"Flintoff, A",2,ENG,CSK,Allrounder,3845,226,3394,88.82,169,33.2,1,62,24,31,116.98,2,105,2,52.5,9.55,33,2009,950000,1550000
26 | 25,"Gambhir, G",2,IND,DD+,Batsman,3712,0,4819,86.17,0,0,1,2065,93,33.31,128.9,32,0,0,0,0,0,2008,220000,725000
27 | 26,"Ganguly, SC",3,IND,KKR+,Batsman,7212,32,11363,73.7,100,45.6,1,1349,91,25.45,106.81,42,363,10,36.3,7.89,27.6,2011,200000,400000
28 | 27,"Gayle, CH",2,WI,KKR+,Allrounder,6373,72,8087,83.95,156,44.4,1,1804,128,50.11,161.79,129,606,13,46.62,8.05,34.85,2008,250000,800000
29 | 28,"Gibbs, HH",3,SA,DC,Batsman,6167,0,8094,83.26,0,0,1,886,69,27.69,109.79,31,0,0,0,0,0,2008,250000,575000
30 | 29,"Gilchrist, AC",3,AUS,DC+,W. Keeper,5570,0,9619,96.94,0,0,1,1775,109,27.73,140.21,86,0,0,0,0,0,2008,300000,700000
31 | 30,"Gony, MS",2,IND,CSK+,Bowler,0,0,0,0,2,39,0,54,15,9,117.39,5,999,30,33.3,8.47,23.6,2011,50000,290000
32 | 31,Harbhajan Singh,2,IND,MI,Bowler,2164,406,1190,80.51,259,46.5,1,430,49,16.54,151.41,22,1469,54,27.2,6.85,23.83,2008,250000,850000
33 | 32,"Harris, RJ",2,AUS,DC+,Bowler,199,46,48,100,44,23.4,0,115,17,10.45,107.48,3,975,44,22.16,7.71,17.27,2011,200000,325000
34 | 33,"Hayden, ML",3,AUS,CSK,Batsman,8625,0,6133,78.96,0,0,0,1107,93,36.9,137.52,44,0,0,0,0,0,2008,225000,375000
35 | 34,"Henderson, T",3,SA,RR,Allrounder,0,0,0,0,0,0,0,11,11,5.5,68.75,1,40,1,40,6.66,36,2009,100000,650000
36 | 35,"Henriques, MC",1,AUS,KKR+,Allrounder,0,0,18,60,1,90,0,49,30,16.33,108.89,1,142,3,47.33,8.82,32.33,2011,50000,50000
37 | 36,"Hodge, BJ",3,AUS,KKR+,Batsman,503,0,575,87.51,1,66,0,1006,73,31.44,121.5,28,300,17,17.65,7.89,13.41,2011,200000,425000
38 | 37,"Hopes, JR",2,AUS,KXIP,Allrounder,0,0,1326,93.71,67,47.1,0,417,71,26.06,136.27,11,548,14,39.14,9.13,25.71,2011,200000,350000
39 | 38,"Hussey, DJ",2,AUS,KKR+,Allrounder,0,0,1488,91.4,18,36.2,0,971,71,26.24,125.78,48,345,6,57.5,8.85,39,2008,100000,625000
40 | 39,"Hussey, MEK",3,AUS,CSK,Batsman,5708,7,5262,86.97,2,117,1,958,116,39.92,120.65,25,0,0,0,0,0,2008,250000,250000
41 | 40,"Jadeja, RA",1,IND,RR+,Allrounder,0,0,860,78.61,57,46.2,0,904,48,23.18,120.86,35,750,26,28.85,7.33,23.65,2011,100000,950000
42 | 41,"Jaffer, W",2,IND,RCB,Allrounder,1944,2,10,43.47,0,0,0,130,50,16.25,107.44,3,0,0,0,0,0,2008,150000,150000
43 | 42,"Jayasuriya, ST",3,SL,MI,Allrounder,6973,98,13430,91.21,323,46,1,768,114,27.43,144.36,39,390,13,30,7.96,22.62,2008,250000,975000
44 | 43,"Jayawardena, DPMD",2,SL,KXIP+,Batsman,10440,6,10596,78.08,7,83.1,1,1471,110,30.65,128.02,33,0,0,0,0,0,2008,250000,475000
45 | 44,"Kaif, M",2,IND,RR+,Batsman,624,0,2753,72.03,0,0,0,259,34,14.39,103.6,6,0,0,0,0,0,2008,125000,675000
46 | 45,"Kallis, JH",3,SA,RCB,Allrounder,12379,276,11498,72.97,270,39.3,1,1965,89,30.7,110.95,37,1713,45,38.07,7.96,28.71,2008,225000,900000
47 | 46,Kamran Akmal,2,PAK,RR,W. Keeper,2648,0,2924,84.31,0,0,0,128,53,25.6,164.1,8,0,0,0,0,0,2008,150000,150000
48 | 47,Kamran Khan,1,IND,RR+,Bowler,0,0,0,0,0,0,0,3,3,3,60,0,224,9,24.89,8.48,17.78,2009,20000,24000
49 | 48,"Karthik, KD",2,IND,DD+,W. Keeper,1000,0,1008,74.5,0,0,0,1231,69,24.14,123.84,28,0,0,0,0,0,2008,200000,525000
50 | 49,"Kartik, M",2,IND,KKR+,Bowler,88,24,126,70.78,37,51.5,0,111,21,18.5,105.71,1,1013,21,48.24,7.02,41.33,2008,200000,425000
51 | 50,"Katich, SM",3,AUS,KXIP,Batsman,4188,21,1324,68.74,0,0,0,241,75,24.1,129.57,8,0,0,0,0,0,2008,200000,200000
52 | 51,"Kohli, V",1,IND,RCB,Batsman,491,0,3590,86.31,2,137,1,1639,73,28.26,119.29,49,345,4,86.25,8.84,58.5,2011,150000,1800000
53 | 52,"Kumar, P",1,IND,RCB+,Bowler,149,27,292,88.21,77,42.1,0,243,34,10.57,114.08,14,1919,53,36.21,7.73,28.11,2011,200000,800000
54 | 53,"Kumble, A",3,IND,RCB,Bowler,2506,619,938,61.06,337,43,1,35,8,11.67,74.47,0,105,2,52.5,9.55,33,2008,250000,500000
55 | 54,"Langeveldt, CK",3,SA,KKR+,Bowler,16,16,73,58.87,100,34.8,0,8,8,4,88.89,1,187,13,14.38,7.19,12,2011,100000,140000
56 | 55,"Laxman, VVS",3,IND,DC+,Batsman,8781,2,2338,71.23,0,0,1,282,52,15.67,105.62,5,0,0,0,0,0,2008,150000,375000
57 | 56,"Lee, B",2,AUS,KXI+,Bowler,1451,310,1100,82.45,377,29.2,0,103,25,11.44,121.18,6,1009,21,48.05,7.56,38.24,2008,300000,900000
58 | 57,"Maharoof, MF",2,SL,DD,Allrounder,556,25,1042,84.44,133,33.3,0,177,39,17.7,143.9,9,520,27,19.26,7.43,15.56,2008,150000,225000
59 | 58,"Malinga, SL",2,SL,MI,Bowler,275,101,327,73.81,185,31.1,0,64,17,5.82,100,4,1381,83,16.64,6.36,15.69,2008,200000,350000
60 | 59,"Mascarenhas, AD",2,ENG,RR+,Allrounder,0,0,245,95.33,13,63.2,0,74,27,8.22,101.37,1,331,19,17.42,7.01,14.95,2011,100000,100000
61 | 60,"Mathews, AD",1,SL,KKR+,Allrounder,1219,7,1447,82.59,42,43,0,376,65,25.07,123.28,12,537,15,35.8,8.2,26.33,2011,300000,950000
62 | 61,"McCullum, BB",2,NZ,KKR+,W. Keeper,3763,0,4511,89.62,0,0,1,1233,158,28.02,123.42,48,0,0,0,0,0,2008,175000,700000
63 | 62,"McDonald, AB",2,AUS,DD+,Allrounder,107,9,0,0,0,0,0,123,33,30.75,125.51,4,244,10,24.4,8.41,17.4,2011,50000,80000
64 | 63,"McGrath, GD",3,AUS,DD,Bowler,641,563,115,48.72,381,34,0,4,4,4,80,0,357,12,29.75,6.61,27,2008,350000,350000
65 | 64,Misbah-ul-Haq,3,PAK,RCB,Batsman,2173,0,2763,75.1,0,0,1,117,47,16.71,144.44,6,0,0,0,0,0,2010,100000,100000
66 | 65,"Mishra, A",2,IND,DD+,Bowler,392,43,5,27.77,19,40.1,0,186,31,10.94,102.2,3,1530,74,20.68,7.11,17.46,2011,100000,300000
67 | 66,"Mithun, A",1,IND,RCB,Bowler,120,9,51,92.72,3,60,0,32,11,8,133.33,1,435,6,72.5,9.89,44,2011,100000,260000
68 | 67,Mohammad Asif,2,PAK,DD,Bowler,141,106,34,34,46,42.1,0,3,3,1.5,50,0,296,8,37,9.25,24,2008,225000,650000
69 | 68,"Morkel, JA",2,SA,CSK,Allrounder,58,1,782,100.25,50,41.4,0,781,71,24.41,146.25,45,1899,69,27.52,8.25,20.01,2008,225000,675000
70 | 69,"Morkel, M",2,SA,RR+,Bowler,555,139,117,75.97,94,28.5,0,60,16,10,111.11,2,884,38,23.26,7.37,18.95,2011,100000,475000
71 | 70,"Muralitharan, M",3,SL,CSK+,Bowler,1261,800,674,77.56,534,35.2,0,20,6,3.33,66.67,0,1395,57,24.47,6.49,22.63,2008,250000,600000
72 | 71,"Nannes, DP",3,AUS,DD+,Bowler,0,0,1,50,1,42,0,4,3,4,30.77,0,627,24,26.13,7.17,21.92,2011,200000,650000
73 | 72,"Nayar, AM",2,IND,MI+,Allrounder,0,0,0,0,0,0,0,563,35,19.41,123.19,19,263,7,37.57,8.74,25.86,2011,50000,800000
74 | 73,"Nehra, A",2,IND,DD+,Bowler,77,44,141,57.31,157,36.6,0,38,22,19,82.61,1,1192,48,24.83,7.57,19.73,2011,200000,850000
75 | 74,"Noffke, AA",2,AUS,RCB,Allrounder,0,0,0,0,1,54,0,9,9,9,90,0,40,1,40,10,24,2010,20000,20000
76 | 75,"Ntini, M",2,SA,CSK,Bowler,699,390,199,66.77,266,32.6,0,11,11,11,61.11,0,242,7,34.57,6.91,30,2008,200000,200000
77 | 76,"Ojha, NV",2,IND,RR+,W. Keeper,0,0,1,14.28,0,0,0,960,94,22.33,117.94,50,0,0,0,0,0,2011,100000,270000
78 | 77,"Ojha, PP",1,IND,DC+,Bowler,70,62,41,43.61,20,41.7,0,10,3,1,30.3,0,1548,69,22.43,7.16,18.8,2011,200000,500000
79 | 78,"Oram, JDP",2,NZ,CSK+,Allrounder,1780,60,2377,87.16,168,39.7,0,106,41,13.25,98.15,5,327,9,36.33,9.26,23.67,2008,200000,675000
80 | 79,Pankaj Singh,2,IND,RCB+,Bowler,0,0,3,100,0,0,0,7,4,3.5,58.33,0,468,11,42.55,9.36,27.27,2011,50000,95000
81 | 80,"Patel, MM",2,IND,RR+,Bowler,60,35,74,66.07,86,36.6,0,39,23,7.8,235.49,0,1504,70,21.49,7.39,17.47,2008,100000,275000
82 | 81,"Patel, PA",2,IND,CSK+,W. Keeper,683,0,736,76.5,0,0,0,912,57,20.27,107.29,13,0,0,0,0,0,2008,150000,325000
83 | 82,"Pathan, IK",2,IND,KXIP+,Allrounder,1105,100,1468,78.96,165,34,0,929,60,23.82,128.31,34,1975,66,29.92,7.74,23.23,2008,200000,925000
84 | 83,"Pathan, YK",2,IND,RR+,Allrounder,0,0,810,113.6,33,45.1,0,1488,100,25.66,149.25,81,1139,36,31.64,7.2,26.36,2008,100000,475000
85 | 84,"Pietersen, KP",2,ENG,RCB+,Batsman,6654,5,4184,86.76,7,57.1,1,634,103,42.27,141.2,30,215,7,30.71,7.41,24.86,2009,1350000,1550000
86 | 85,"Pollock, SM",3,SA,MI,Allrounder,3781,421,3519,86.69,393,39.9,1,147,33,18.37,132.43,8,301,11,27.36,6.54,25,2008,200000,550000
87 | 86,"Pomersbach, LA",2,AUS,KXIP+,Batsman,0,0,0,0,0,0,0,244,79,27.11,130.48,12,0,0,0,0,0,2011,20000,50000
88 | 87,"Ponting, RT",3,AUS,KKR,Batsman,13218,5,13704,80.39,3,50,1,39,20,9.75,73.58,1,0,0,0,0,0,2008,335000,400000
89 | 88,"Powar, RR",2,IND,KXIP+,Bowler,13,6,163,62.69,34,45.1,0,67,28,22.33,104.69,1,527,13,40.54,7.42,32.77,2008,150000,170000
90 | 89,"Raina, SK",1,IND,CSK,Batsman,710,13,3525,92.71,16,61.9,0,2254,98,33.64,139.39,97,678,20,33.9,7.05,28.9,2008,125000,650000
91 | 90,"Ryder, JD",2,NZ,RCB+,Allrounder,1269,5,1100,89.72,11,34.8,0,604,86,21.57,131.88,19,303,8,37.88,7.73,29.5,2009,100000,160000
92 | 91,"Saha, WP",2,IND,KKR+,W. Keeper,74,0,4,80,0,0,0,372,59,28.62,128.28,16,0,0,0,0,0,2011,100000,100000
93 | 92,"Sangakkara, KC",2,SL,KXIP+,W. Keeper,9382,0,10472,75.75,0,0,1,1567,94,27.98,124.76,27,0,0,0,0,0,2008,250000,700000
94 | 93,"Sarwan, RR",2,WI,KXIP,Batsman,5842,23,5644,75.76,16,36.3,0,73,31,18.25,97.33,1,0,0,0,0,0,2008,225000,225000
95 | 94,"Sehwag, V",2,IND,DD,Batsman,8178,40,8090,104.68,95,45.4,1,1879,119,30.31,167.32,79,226,6,37.67,10.56,21.67,2011,400000,1800000
96 | 95,Shahid Afridi,2,PAK,DC,Allrounder,1716,48,7040,113.87,344,43.4,1,81,33,10.12,176.08,6,225,9,25,7.5,20,2008,225000,675000
97 | 96,"Sharma, I",1,IND,KKR+,Bowler,432,133,47,34.05,64,33.6,0,37,9,9.25,80.43,1,1176,36,32.67,7.63,23.61,2008,150000,950000
98 | 97,"Sharma, J",2,IND,CSK,Allrounder,0,0,35,116.66,1,150,0,36,16,9,120,2,419,12,34.92,9.88,21.33,2008,100000,225000
99 | 98,"Sharma, RG",1,IND,DC+,Batsman,0,0,1961,78.85,8,59.1,0,1975,109,31.35,129.17,82,408,14,29.14,8,21.86,2008,150000,750000
100 | 99,Shoaib Akhtar,3,PAK,KKR,Bowler,544,178,394,73.23,247,31.4,0,2,2,2,28.57,0,54,5,10.8,7.71,8.4,2008,250000,425000
101 | 100,Shoaib Malik,2,PAK,DD,Allrounder,1606,21,5253,78.37,139,47.6,1,52,24,13,110.63,0,85,2,42.5,10,25.5,2008,300000,500000
102 | 101,"Silva, LPC",2,SL,DC,Batsman,537,1,1587,70.4,1,42,0,40,23,20,153.84,1,21,0,0,21,0,2008,100000,100000
103 | 102,"Singh, RP",2,IND,DC+,Bowler,116,40,104,42.97,69,37.1,0,52,10,3.47,68.42,1,1892,74,25.57,7.75,19.78,2008,200000,875000
104 | 103,"Smith, DR",2,WI,DC+,Allrounder,320,7,925,97.26,56,44.8,0,439,87,25.82,148.81,24,338,9,37.56,8.14,27.89,2009,100000,100000
105 | 104,"Smith, GC",2,SA,RR+,Batsman,8042,8,6598,81.58,18,57,1,739,91,28.42,110.63,9,0,0,0,0,0,2008,250000,250000
106 | 105,Sohail Tanvir,2,PAK,RR,Bowler,17,5,268,94.03,55,37.4,0,36,13,12,124.13,1,266,22,12.09,6.46,11.2,2008,100000,100000
107 | 106,"Sreesanth, S",2,IND,KXIP+,Bowler,281,87,44,36.36,75,33,0,33,15,11,64.71,0,1031,35,29.46,8.25,21.43,2008,200000,625000
108 | 107,"Steyn, DW",2,SA,RCB+,Bowler,770,272,142,73.57,91,33.7,0,70,13,4.67,86.42,1,1304,59,22.1,6.58,20.15,2008,150000,325000
109 | 108,"Styris, SB",3,NZ,DC+,Allrounder,1586,20,4483,79.41,137,44.6,0,131,36,18.71,98.5,3,276,8,34.5,7.67,27,2008,175000,175000
110 | 109,"Symonds, A",3,AUS,DC+,Allrounder,1462,24,5088,92.44,133,44.6,0,974,117,36.07,129.87,41,674,20,33.7,7.7,26.35,2008,250000,1350000
111 | 110,"Taibu, T",2,ZIM,KKR,W. Keeper,1546,1,3393,67.58,2,42,1,31,15,10.33,119.23,0,0,0,0,0,0,2008,125000,125000
112 | 111,"Taylor, LRPL",2,NZ,RCB+,Batsman,2742,2,3185,81.77,0,0,1,895,81,27.97,130.28,45,24,0,0,12,0,2008,400000,1000000
113 | 112,"Tendulkar, SR",3,IND,MI,Batsman,15470,45,18426,86.23,154,52.2,1,2047,100,37.91,119.22,24,58,0,0,9.67,0,2011,400000,1800000
114 | 113,"Tiwary, MK",2,IND,DD+,Batsman,0,0,165,75.68,1,60,0,969,75,31.26,113.33,22,45,1,45,11.25,24,2008,100000,675000
115 | 114,"Tiwary, SS",1,IND,MI+,Batsman,0,0,49,87.5,0,0,0,836,42,25.33,119.6,32,0,0,0,0,0,2011,100000,1600000
116 | 115,"Tyagi, S",1,IND,CSK,Bowler,0,0,1,50,3,55,0,3,3,3,0.75,0,295,6,49.17,8.55,34.83,2011,50000,240000
117 | 116,Umar Gul,2,PAK,KKR,Bowler,541,157,368,69.04,154,32.2,0,39,24,13,205.26,5,184,12,15.33,8.17,11.2,2008,150000,150000
118 | 117,"Uthappa, RV",2,IND,RCB+,Batsman,0,0,786,91.92,0,0,0,1538,69,26.98,126.17,59,0,0,0,0,0,2008,200000,800000
119 | 118,"Vaas, WPUJC",3,SL,DC,Bowler,3089,355,2025,72.52,400,39.4,0,81,20,10.13,110.96,3,355,18,19.72,7.55,15.67,2008,200000,200000
120 | 119,Van der Merwe,2,SA,RCB+,Allrounder,0,0,39,95.12,17,41.4,0,137,35,15.22,118.1,8,427,18,23.72,6.83,20.94,2011,50000,50000
121 | 120,"Venugopal Rao, Y",2,IND,DC+,Batsman,0,0,218,60.05,0,0,0,914,71,22.29,118.24,37,321,6,53.5,9.44,34,2011,100000,700000
122 | 121,"Vettori, DL",2,NZ,DD+,Allrounder,4486,359,2105,81.93,282,45.7,1,121,29,15.13,107.08,2,878,28,31.36,6.81,27.75,2008,250000,625000
123 | 122,"Vinay Kumar, R",2,IND,RCB+,Bowler,11,1,43,43.87,28,35.3,0,217,25,9.43,104.83,5,1664,61,27.28,8.24,19.87,2011,100000,475000
124 | 123,"Warne, SK",3,AUS,RR,Bowler,3154,708,1018,72.04,293,36.3,1,198,34,9.9,92.52,6,1447,57,25.39,7.27,20.95,2008,450000,450000
125 | 124,"Warner, DA",1,AUS,DD,Batsman,483,2,876,85.79,0,0,0,1025,109,27.7,135.76,44,0,0,0,0,0,2011,200000,750000
126 | 125,"White, CL",2,AUS,RCB+,Batsman,146,5,2037,80.48,12,27.5,1,745,78,31.04,132.09,29,70,0,0,14,0,2008,100000,500000
127 | 126,"Yadav, AS",2,IND,DC,Batsman,0,0,0,0,0,0,0,49,16,9.8,125.64,2,0,0,0,0,0,2010,50000,750000
128 | 127,Younis Khan,2,PAK,RR,Batsman,6398,7,6814,75.78,3,86.6,1,3,3,3,42.85,0,0,0,0,0,0,2008,225000,225000
129 | 128,Yuvraj Singh,2,IND,KXIP+,Batsman,1775,9,8051,87.58,109,44.3,1,1237,66,26.32,131.88,67,569,23,24.74,7.02,21.13,2011,400000,1800000
130 | 129,Zaheer Khan,2,IND,MI+,Bowler,1114,288,790,73.55,278,35.4,0,99,23,9.9,91.67,1,1783,65,27.43,7.75,21.26,2008,200000,450000
131 | 130,"Zoysa, DNT",2,SL,DC,Bowler,288,64,343,95.81,108,39.4,0,11,10,11,122.22,0,99,2,49.5,9,33,2008,100000,110000
--------------------------------------------------------------------------------
/Supervised-learning/Study-dataset.csv:
--------------------------------------------------------------------------------
1 | studytime,score
2 | 1.8108904159657686,18.544139500848544
3 | 3.827500072434707,36.77996397201773
4 | 3.061978796339918,31.07859184607669
5 | 2.595304694689628,16.015202373891817
6 | 1.0460652415485279,9.362292976297718
7 | 1.0459808211767092,12.245371069325824
8 | 0.7032926425886981,14.42239664959456
9 | 3.531616510212273,32.72481401075449
10 | 2.6039025411012306,21.99655739654637
11 | 2.9782540222861593,27.27375500493891
12 | 0.5720457300353086,10.297467888863457
13 | 3.89468448256698,40.59060037396822
14 | 3.413549242801476,31.486691409179564
15 | 1.2431868873739664,14.998206039306444
16 | 1.136387385224852,11.849261598988722
17 | 1.1419157844870185,16.26238279753463
18 | 1.564847850358382,12.138213034197058
19 | 2.336647510712832,21.72816437413948
20 | 2.0118075652474054,18.157534886813266
21 | 1.5193019906931466,7.8754451662708735
22 | 2.641485131528328,27.89545270060616
23 | 0.9882285122821464,11.187561483720911
24 | 1.5225062698732637,15.25062998194494
25 | 1.782266451527921,16.649728848403477
26 | 2.0962449447596256,13.885595737344186
27 | 3.2481158648755475,30.37793203492868
28 | 1.1988582375542591,10.275009792908744
29 | 2.2998205344476403,18.986818998368307
30 | 2.5734509910171486,24.928081351841442
31 | 0.662576444519992,8.646018729272612
32 | 2.6264069816550344,35.694999322602996
33 | 1.0968344329055204,11.841233393214399
34 | 0.7276805754484783,8.564557708098604
35 | 3.8210993803866664,37.83876422503583
36 | 3.8797121157609578,29.203265081114374
37 | 3.329390718407614,33.161337806830055
38 | 1.5661481921067975,15.962632970773106
39 | 0.8418523990223435,20.73473455264987
40 | 2.894815592792549,27.986351104019878
41 | 2.0405337280886044,21.913073992554107
42 | 0.9271338219567259,9.097779371041042
43 | 2.2331191853894454,16.48780166579679
44 | 0.6203598239032644,11.917712311607747
45 | 3.6826214072757373,40.585879236191246
46 | 1.4057299356000592,18.012459091215824
47 | 2.818827995238937,23.641342678415675
48 | 1.5909887663129383,22.923859217809877
49 | 2.320238074122338,16.193125427261975
50 | 2.4134859777014785,27.069145246016138
51 | 1.1469905943393446,22.422184072443336
52 | 3.893546197175955,33.98278034610611
53 | 3.212964881763901,29.29816016962515
54 | 3.788246295474662,38.380719780184826
55 | 3.631895726496771,33.80157899438671
56 | 2.592649925838798,18.173182103057314
57 | 3.726559822580909,37.60841309983923
58 | 0.8097237571817182,2.7857190031866574
59 | 1.1859400184670081,14.227362337845989
60 | 0.6582955111868832,1.9858339406998171
61 | 1.6386561576714251,24.136233601801948
62 | 1.860370513913187,14.687438677450682
63 | 1.4497216112086357,12.88690853105798
64 | 3.4005812820317525,38.073398907165874
65 | 1.7486366434275624,11.332044852105849
66 | 1.4832707839058328,15.970007512078976
67 | 2.3994362910538696,30.53007668195084
68 | 0.9932347874116693,1.8949317013105542
69 | 3.3076894326391386,34.0000636190529
70 | 0.7609272528791979,8.908686500034097
71 | 3.9541042781018105,43.45015713990466
72 | 3.202856692538301,25.8438133709926
73 | 1.1955048853696035,5.352765788274652
74 | 0.5193274099326084,7.802981927410572
75 | 3.3541149995919195,35.02607336208513
76 | 2.97400070346666,30.99247128639598
77 | 3.0515250881434555,32.24749192891943
78 | 3.19944621340081,28.594338526115646
79 | 0.7591562810693162,8.75283129649818
80 | 1.754630049904954,19.011662865542945
81 | 0.905541708337954,5.4836599932477
82 | 3.5208619905645775,44.53749246136956
83 | 2.6815434438964525,29.184599043523463
84 | 1.6581430869842722,10.624913383829476
85 | 0.7224542260010827,10.507310303179976
86 | 1.5884381260048177,11.010972908911569
87 | 1.6381416270936147,20.316839289648406
88 | 3.0536216241832244,36.32919413686926
89 | 2.731451149743246,23.211099905673912
90 | 3.605244599017143,40.86932663639304
91 | 2.1527522380668227,23.591427015350718
92 | 0.918579860784056,13.29609940781301
93 | 2.9963567552804826,39.44753246607456
94 | 3.162747670159141,30.40053612157706
95 | 2.4644701914932368,20.87602109314492
96 | 3.1983851298409633,27.536279150282017
97 | 2.2282845872753674,18.203794447926484
98 | 2.329564902836979,22.910140481299266
99 | 1.9963935642549238,21.66969551663246
100 | 0.5889669436043332,7.273123432693428
101 | 0.8776199944765656,12.912116189945774
102 |
--------------------------------------------------------------------------------
/Supervised-learning/User_Data.csv:
--------------------------------------------------------------------------------
1 | User ID,Gender,Age,EstimatedSalary,Purchased
2 | 15624510,Male,19,19000,0
3 | 15810944,Male,35,20000,0
4 | 15668575,Female,26,43000,0
5 | 15603246,Female,27,57000,0
6 | 15804002,Male,19,76000,0
7 | 15728773,Male,27,58000,0
8 | 15598044,Female,27,84000,0
9 | 15694829,Female,32,150000,1
10 | 15600575,Male,25,33000,0
11 | 15727311,Female,35,65000,0
12 | 15570769,Female,26,80000,0
13 | 15606274,Female,26,52000,0
14 | 15746139,Male,20,86000,0
15 | 15704987,Male,32,18000,0
16 | 15628972,Male,18,82000,0
17 | 15697686,Male,29,80000,0
18 | 15733883,Male,47,25000,1
19 | 15617482,Male,45,26000,1
20 | 15704583,Male,46,28000,1
21 | 15621083,Female,48,29000,1
22 | 15649487,Male,45,22000,1
23 | 15736760,Female,47,49000,1
24 | 15714658,Male,48,41000,1
25 | 15599081,Female,45,22000,1
26 | 15705113,Male,46,23000,1
27 | 15631159,Male,47,20000,1
28 | 15792818,Male,49,28000,1
29 | 15633531,Female,47,30000,1
30 | 15744529,Male,29,43000,0
31 | 15669656,Male,31,18000,0
32 | 15581198,Male,31,74000,0
33 | 15729054,Female,27,137000,1
34 | 15573452,Female,21,16000,0
35 | 15776733,Female,28,44000,0
36 | 15724858,Male,27,90000,0
37 | 15713144,Male,35,27000,0
38 | 15690188,Female,33,28000,0
39 | 15689425,Male,30,49000,0
40 | 15671766,Female,26,72000,0
41 | 15782806,Female,27,31000,0
42 | 15764419,Female,27,17000,0
43 | 15591915,Female,33,51000,0
44 | 15772798,Male,35,108000,0
45 | 15792008,Male,30,15000,0
46 | 15715541,Female,28,84000,0
47 | 15639277,Male,23,20000,0
48 | 15798850,Male,25,79000,0
49 | 15776348,Female,27,54000,0
50 | 15727696,Male,30,135000,1
51 | 15793813,Female,31,89000,0
52 | 15694395,Female,24,32000,0
53 | 15764195,Female,18,44000,0
54 | 15744919,Female,29,83000,0
55 | 15671655,Female,35,23000,0
56 | 15654901,Female,27,58000,0
57 | 15649136,Female,24,55000,0
58 | 15775562,Female,23,48000,0
59 | 15807481,Male,28,79000,0
60 | 15642885,Male,22,18000,0
61 | 15789109,Female,32,117000,0
62 | 15814004,Male,27,20000,0
63 | 15673619,Male,25,87000,0
64 | 15595135,Female,23,66000,0
65 | 15583681,Male,32,120000,1
66 | 15605000,Female,59,83000,0
67 | 15718071,Male,24,58000,0
68 | 15679760,Male,24,19000,0
69 | 15654574,Female,23,82000,0
70 | 15577178,Female,22,63000,0
71 | 15595324,Female,31,68000,0
72 | 15756932,Male,25,80000,0
73 | 15726358,Female,24,27000,0
74 | 15595228,Female,20,23000,0
75 | 15782530,Female,33,113000,0
76 | 15592877,Male,32,18000,0
77 | 15651983,Male,34,112000,1
78 | 15746737,Male,18,52000,0
79 | 15774179,Female,22,27000,0
80 | 15667265,Female,28,87000,0
81 | 15655123,Female,26,17000,0
82 | 15595917,Male,30,80000,0
83 | 15668385,Male,39,42000,0
84 | 15709476,Male,20,49000,0
85 | 15711218,Male,35,88000,0
86 | 15798659,Female,30,62000,0
87 | 15663939,Female,31,118000,1
88 | 15694946,Male,24,55000,0
89 | 15631912,Female,28,85000,0
90 | 15768816,Male,26,81000,0
91 | 15682268,Male,35,50000,0
92 | 15684801,Male,22,81000,0
93 | 15636428,Female,30,116000,0
94 | 15809823,Male,26,15000,0
95 | 15699284,Female,29,28000,0
96 | 15786993,Female,29,83000,0
97 | 15709441,Female,35,44000,0
98 | 15710257,Female,35,25000,0
99 | 15582492,Male,28,123000,1
100 | 15575694,Male,35,73000,0
101 | 15756820,Female,28,37000,0
102 | 15766289,Male,27,88000,0
103 | 15593014,Male,28,59000,0
104 | 15584545,Female,32,86000,0
105 | 15675949,Female,33,149000,1
106 | 15672091,Female,19,21000,0
107 | 15801658,Male,21,72000,0
108 | 15706185,Female,26,35000,0
109 | 15789863,Male,27,89000,0
110 | 15720943,Male,26,86000,0
111 | 15697997,Female,38,80000,0
112 | 15665416,Female,39,71000,0
113 | 15660200,Female,37,71000,0
114 | 15619653,Male,38,61000,0
115 | 15773447,Male,37,55000,0
116 | 15739160,Male,42,80000,0
117 | 15689237,Male,40,57000,0
118 | 15679297,Male,35,75000,0
119 | 15591433,Male,36,52000,0
120 | 15642725,Male,40,59000,0
121 | 15701962,Male,41,59000,0
122 | 15811613,Female,36,75000,0
123 | 15741049,Male,37,72000,0
124 | 15724423,Female,40,75000,0
125 | 15574305,Male,35,53000,0
126 | 15678168,Female,41,51000,0
127 | 15697020,Female,39,61000,0
128 | 15610801,Male,42,65000,0
129 | 15745232,Male,26,32000,0
130 | 15722758,Male,30,17000,0
131 | 15792102,Female,26,84000,0
132 | 15675185,Male,31,58000,0
133 | 15801247,Male,33,31000,0
134 | 15725660,Male,30,87000,0
135 | 15638963,Female,21,68000,0
136 | 15800061,Female,28,55000,0
137 | 15578006,Male,23,63000,0
138 | 15668504,Female,20,82000,0
139 | 15687491,Male,30,107000,1
140 | 15610403,Female,28,59000,0
141 | 15741094,Male,19,25000,0
142 | 15807909,Male,19,85000,0
143 | 15666141,Female,18,68000,0
144 | 15617134,Male,35,59000,0
145 | 15783029,Male,30,89000,0
146 | 15622833,Female,34,25000,0
147 | 15746422,Female,24,89000,0
148 | 15750839,Female,27,96000,1
149 | 15749130,Female,41,30000,0
150 | 15779862,Male,29,61000,0
151 | 15767871,Male,20,74000,0
152 | 15679651,Female,26,15000,0
153 | 15576219,Male,41,45000,0
154 | 15699247,Male,31,76000,0
155 | 15619087,Female,36,50000,0
156 | 15605327,Male,40,47000,0
157 | 15610140,Female,31,15000,0
158 | 15791174,Male,46,59000,0
159 | 15602373,Male,29,75000,0
160 | 15762605,Male,26,30000,0
161 | 15598840,Female,32,135000,1
162 | 15744279,Male,32,100000,1
163 | 15670619,Male,25,90000,0
164 | 15599533,Female,37,33000,0
165 | 15757837,Male,35,38000,0
166 | 15697574,Female,33,69000,0
167 | 15578738,Female,18,86000,0
168 | 15762228,Female,22,55000,0
169 | 15614827,Female,35,71000,0
170 | 15789815,Male,29,148000,1
171 | 15579781,Female,29,47000,0
172 | 15587013,Male,21,88000,0
173 | 15570932,Male,34,115000,0
174 | 15794661,Female,26,118000,0
175 | 15581654,Female,34,43000,0
176 | 15644296,Female,34,72000,0
177 | 15614420,Female,23,28000,0
178 | 15609653,Female,35,47000,0
179 | 15594577,Male,25,22000,0
180 | 15584114,Male,24,23000,0
181 | 15673367,Female,31,34000,0
182 | 15685576,Male,26,16000,0
183 | 15774727,Female,31,71000,0
184 | 15694288,Female,32,117000,1
185 | 15603319,Male,33,43000,0
186 | 15759066,Female,33,60000,0
187 | 15814816,Male,31,66000,0
188 | 15724402,Female,20,82000,0
189 | 15571059,Female,33,41000,0
190 | 15674206,Male,35,72000,0
191 | 15715160,Male,28,32000,0
192 | 15730448,Male,24,84000,0
193 | 15662067,Female,19,26000,0
194 | 15779581,Male,29,43000,0
195 | 15662901,Male,19,70000,0
196 | 15689751,Male,28,89000,0
197 | 15667742,Male,34,43000,0
198 | 15738448,Female,30,79000,0
199 | 15680243,Female,20,36000,0
200 | 15745083,Male,26,80000,0
201 | 15708228,Male,35,22000,0
202 | 15628523,Male,35,39000,0
203 | 15708196,Male,49,74000,0
204 | 15735549,Female,39,134000,1
205 | 15809347,Female,41,71000,0
206 | 15660866,Female,58,101000,1
207 | 15766609,Female,47,47000,0
208 | 15654230,Female,55,130000,1
209 | 15794566,Female,52,114000,0
210 | 15800890,Female,40,142000,1
211 | 15697424,Female,46,22000,0
212 | 15724536,Female,48,96000,1
213 | 15735878,Male,52,150000,1
214 | 15707596,Female,59,42000,0
215 | 15657163,Male,35,58000,0
216 | 15622478,Male,47,43000,0
217 | 15779529,Female,60,108000,1
218 | 15636023,Male,49,65000,0
219 | 15582066,Male,40,78000,0
220 | 15666675,Female,46,96000,0
221 | 15732987,Male,59,143000,1
222 | 15789432,Female,41,80000,0
223 | 15663161,Male,35,91000,1
224 | 15694879,Male,37,144000,1
225 | 15593715,Male,60,102000,1
226 | 15575002,Female,35,60000,0
227 | 15622171,Male,37,53000,0
228 | 15795224,Female,36,126000,1
229 | 15685346,Male,56,133000,1
230 | 15691808,Female,40,72000,0
231 | 15721007,Female,42,80000,1
232 | 15794253,Female,35,147000,1
233 | 15694453,Male,39,42000,0
234 | 15813113,Male,40,107000,1
235 | 15614187,Male,49,86000,1
236 | 15619407,Female,38,112000,0
237 | 15646227,Male,46,79000,1
238 | 15660541,Male,40,57000,0
239 | 15753874,Female,37,80000,0
240 | 15617877,Female,46,82000,0
241 | 15772073,Female,53,143000,1
242 | 15701537,Male,42,149000,1
243 | 15736228,Male,38,59000,0
244 | 15780572,Female,50,88000,1
245 | 15769596,Female,56,104000,1
246 | 15586996,Female,41,72000,0
247 | 15722061,Female,51,146000,1
248 | 15638003,Female,35,50000,0
249 | 15775590,Female,57,122000,1
250 | 15730688,Male,41,52000,0
251 | 15753102,Female,35,97000,1
252 | 15810075,Female,44,39000,0
253 | 15723373,Male,37,52000,0
254 | 15795298,Female,48,134000,1
255 | 15584320,Female,37,146000,1
256 | 15724161,Female,50,44000,0
257 | 15750056,Female,52,90000,1
258 | 15609637,Female,41,72000,0
259 | 15794493,Male,40,57000,0
260 | 15569641,Female,58,95000,1
261 | 15815236,Female,45,131000,1
262 | 15811177,Female,35,77000,0
263 | 15680587,Male,36,144000,1
264 | 15672821,Female,55,125000,1
265 | 15767681,Female,35,72000,0
266 | 15600379,Male,48,90000,1
267 | 15801336,Female,42,108000,1
268 | 15721592,Male,40,75000,0
269 | 15581282,Male,37,74000,0
270 | 15746203,Female,47,144000,1
271 | 15583137,Male,40,61000,0
272 | 15680752,Female,43,133000,0
273 | 15688172,Female,59,76000,1
274 | 15791373,Male,60,42000,1
275 | 15589449,Male,39,106000,1
276 | 15692819,Female,57,26000,1
277 | 15727467,Male,57,74000,1
278 | 15734312,Male,38,71000,0
279 | 15764604,Male,49,88000,1
280 | 15613014,Female,52,38000,1
281 | 15759684,Female,50,36000,1
282 | 15609669,Female,59,88000,1
283 | 15685536,Male,35,61000,0
284 | 15750447,Male,37,70000,1
285 | 15663249,Female,52,21000,1
286 | 15638646,Male,48,141000,0
287 | 15734161,Female,37,93000,1
288 | 15631070,Female,37,62000,0
289 | 15761950,Female,48,138000,1
290 | 15649668,Male,41,79000,0
291 | 15713912,Female,37,78000,1
292 | 15586757,Male,39,134000,1
293 | 15596522,Male,49,89000,1
294 | 15625395,Male,55,39000,1
295 | 15760570,Male,37,77000,0
296 | 15566689,Female,35,57000,0
297 | 15725794,Female,36,63000,0
298 | 15673539,Male,42,73000,1
299 | 15705298,Female,43,112000,1
300 | 15675791,Male,45,79000,0
301 | 15747043,Male,46,117000,1
302 | 15736397,Female,58,38000,1
303 | 15678201,Male,48,74000,1
304 | 15720745,Female,37,137000,1
305 | 15637593,Male,37,79000,1
306 | 15598070,Female,40,60000,0
307 | 15787550,Male,42,54000,0
308 | 15603942,Female,51,134000,0
309 | 15733973,Female,47,113000,1
310 | 15596761,Male,36,125000,1
311 | 15652400,Female,38,50000,0
312 | 15717893,Female,42,70000,0
313 | 15622585,Male,39,96000,1
314 | 15733964,Female,38,50000,0
315 | 15753861,Female,49,141000,1
316 | 15747097,Female,39,79000,0
317 | 15594762,Female,39,75000,1
318 | 15667417,Female,54,104000,1
319 | 15684861,Male,35,55000,0
320 | 15742204,Male,45,32000,1
321 | 15623502,Male,36,60000,0
322 | 15774872,Female,52,138000,1
323 | 15611191,Female,53,82000,1
324 | 15674331,Male,41,52000,0
325 | 15619465,Female,48,30000,1
326 | 15575247,Female,48,131000,1
327 | 15695679,Female,41,60000,0
328 | 15713463,Male,41,72000,0
329 | 15785170,Female,42,75000,0
330 | 15796351,Male,36,118000,1
331 | 15639576,Female,47,107000,1
332 | 15693264,Male,38,51000,0
333 | 15589715,Female,48,119000,1
334 | 15769902,Male,42,65000,0
335 | 15587177,Male,40,65000,0
336 | 15814553,Male,57,60000,1
337 | 15601550,Female,36,54000,0
338 | 15664907,Male,58,144000,1
339 | 15612465,Male,35,79000,0
340 | 15810800,Female,38,55000,0
341 | 15665760,Male,39,122000,1
342 | 15588080,Female,53,104000,1
343 | 15776844,Male,35,75000,0
344 | 15717560,Female,38,65000,0
345 | 15629739,Female,47,51000,1
346 | 15729908,Male,47,105000,1
347 | 15716781,Female,41,63000,0
348 | 15646936,Male,53,72000,1
349 | 15768151,Female,54,108000,1
350 | 15579212,Male,39,77000,0
351 | 15721835,Male,38,61000,0
352 | 15800515,Female,38,113000,1
353 | 15591279,Male,37,75000,0
354 | 15587419,Female,42,90000,1
355 | 15750335,Female,37,57000,0
356 | 15699619,Male,36,99000,1
357 | 15606472,Male,60,34000,1
358 | 15778368,Male,54,70000,1
359 | 15671387,Female,41,72000,0
360 | 15573926,Male,40,71000,1
361 | 15709183,Male,42,54000,0
362 | 15577514,Male,43,129000,1
363 | 15778830,Female,53,34000,1
364 | 15768072,Female,47,50000,1
365 | 15768293,Female,42,79000,0
366 | 15654456,Male,42,104000,1
367 | 15807525,Female,59,29000,1
368 | 15574372,Female,58,47000,1
369 | 15671249,Male,46,88000,1
370 | 15779744,Male,38,71000,0
371 | 15624755,Female,54,26000,1
372 | 15611430,Female,60,46000,1
373 | 15774744,Male,60,83000,1
374 | 15629885,Female,39,73000,0
375 | 15708791,Male,59,130000,1
376 | 15793890,Female,37,80000,0
377 | 15646091,Female,46,32000,1
378 | 15596984,Female,46,74000,0
379 | 15800215,Female,42,53000,0
380 | 15577806,Male,41,87000,1
381 | 15749381,Female,58,23000,1
382 | 15683758,Male,42,64000,0
383 | 15670615,Male,48,33000,1
384 | 15715622,Female,44,139000,1
385 | 15707634,Male,49,28000,1
386 | 15806901,Female,57,33000,1
387 | 15775335,Male,56,60000,1
388 | 15724150,Female,49,39000,1
389 | 15627220,Male,39,71000,0
390 | 15672330,Male,47,34000,1
391 | 15668521,Female,48,35000,1
392 | 15807837,Male,48,33000,1
393 | 15592570,Male,47,23000,1
394 | 15748589,Female,45,45000,1
395 | 15635893,Male,60,42000,1
396 | 15757632,Female,39,59000,0
397 | 15691863,Female,46,41000,1
398 | 15706071,Male,51,23000,1
399 | 15654296,Female,50,20000,1
400 | 15755018,Male,36,33000,0
401 | 15594041,Female,49,36000,1
--------------------------------------------------------------------------------
/Supervised-learning/Weight_height.csv:
--------------------------------------------------------------------------------
1 | Weight,Height
2 | 45,120
3 | 58,125
4 | 48,123
5 | 60,145
6 | 70,160
7 | 78,162
8 | 80,163
9 | 90,175
10 | 95,182
11 | 78,170
12 | 82,176
13 | 95,182
14 | 105,175
15 | 100,183
16 | 85,170
17 | 78,177
18 | 50,140
19 | 65,159
20 | 76,150
21 | 87,167
22 | 45,129
23 | 56,140
24 | 72,160
25 |
--------------------------------------------------------------------------------
/Supervised-learning/cost-fun.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/Supervised-learning/cost-fun.png
--------------------------------------------------------------------------------
/Supervised-learning/curve.csv:
--------------------------------------------------------------------------------
1 | x,y
2 | 2,-1.999618
3 | 2,-1.999618
4 | 8,-3.978312
5 | 9,-1.969175
6 | 10,-0.95777
7 | 10,3.04223
8 | 11,-3.943845
9 | 14,3.11557
10 | 15,0.142095
11 | 21,-2.610555
12 | 25,2.656825
13 | 26,-0.26121
14 | 31,2.251935
15 | 31,2.251935
16 | 35,1.801555
17 | 35,-1.198445
18 | 37,-0.871723
19 | 41,-0.104375
20 | 42,4.112662
21 | 44,7.57874
22 | 47,3.361647
23 | 51,3.572515
24 | 54,10.61473
25 | 56,5.37716
26 | 58,9.196038
27 | 59,12.627275
28 | 63,12.503423
29 | 63,9.503423
30 | 64,12.01152
31 | 71,17.033895
32 | 71,17.033895
33 | 77,23.176157
34 | 77,22.176157
35 | 79,19.709455
36 | 81,24.322385
37 | 83,25.016963
38 | 84,25.8955
39 | 84,22.8955
40 | 85,22.795205
41 | 87,24.659127
42 | 88,31.623848
43 | 88,26.623848
44 | 92,30.707012
45 | 93,37.785133
46 | 94,34.88669
47 | 95,40.011935
48 | 98,37.532318
49 | 99,44.754835
50 | 99,43.754835
51 | 100,43.0023
52 |
--------------------------------------------------------------------------------
/Supervised-learning/dataset.csv:
--------------------------------------------------------------------------------
1 | studytime,score
2 | 2,111
3 | 1,107
4 | 3,26
5 | 3,63
6 | 4,7
7 | 2,10
8 | 2,68
9 | 2,55
10 | 3,46
11 | 3,87
12 | 2,96
13 | 1,95
14 | 2,118
15 | 2,93
16 | 3,75
17 | 2,110
18 | 3,100
19 | 3,19
20 | 2,20
21 | 2,74
22 | 4,92
23 | 4,66
24 | 4,10
25 | 3,43
26 | 2,106
27 | 4,30
28 | 1,38
29 | 1,115
30 | 4,66
31 | 2,76
32 | 4,120
33 | 4,6
34 | 2,99
35 | 2,92
36 | 2,115
37 | 3,117
38 | 2,114
39 | 1,36
40 | 2,118
41 | 1,16
42 | 3,77
43 | 1,91
44 | 2,14
45 | 4,105
46 | 2,71
47 | 2,14
48 | 4,119
49 | 4,68
50 | 2,76
51 | 2,12
52 | 4,118
53 | 2,7
54 | 2,83
55 | 2,71
56 | 2,29
57 | 4,68
58 | 3,108
59 | 1,105
60 | 3,3
61 | 1,86
62 | 1,53
63 | 1,10
64 | 1,18
65 | 4,55
66 | 1,39
67 | 1,77
68 | 2,37
69 | 4,27
70 | 1,35
71 | 3,74
72 | 3,30
73 | 2,14
74 | 4,102
75 | 4,70
76 | 1,25
77 | 3,103
78 | 3,3
79 | 1,92
80 | 2,36
81 | 2,92
82 | 3,31
83 | 1,32
84 | 2,54
85 | 3,61
86 | 2,35
87 | 4,39
88 | 4,65
89 | 2,103
90 | 3,119
91 | 4,76
92 | 2,40
93 | 1,57
94 | 2,7
95 | 2,35
96 | 1,18
97 | 3,89
98 | 3,111
99 | 3,38
100 | 1,88
101 | 1,17
102 |
--------------------------------------------------------------------------------
/Supervised-learning/fit-line.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/Supervised-learning/fit-line.png
--------------------------------------------------------------------------------
/Supervised-learning/logistic_function_plot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/Supervised-learning/logistic_function_plot.png
--------------------------------------------------------------------------------
/Supervised-learning/ols-method.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/Supervised-learning/ols-method.png
--------------------------------------------------------------------------------
/Supervised-learning/pair-plt.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/Supervised-learning/pair-plt.png
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 | console.log("Hello, World!");
--------------------------------------------------------------------------------
/ml-data/Codes/Chapter 1 - Python_Basics.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ml-data/Codes/Chapter 1 - Python_Basics.pdf
--------------------------------------------------------------------------------
/ml-data/Codes/Chapter 10 - Text Analytics.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ml-data/Codes/Chapter 10 - Text Analytics.pdf
--------------------------------------------------------------------------------
/ml-data/Codes/Chapter 2 - Descriptive Analytics.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ml-data/Codes/Chapter 2 - Descriptive Analytics.pdf
--------------------------------------------------------------------------------
/ml-data/Codes/Chapter 3 - Basic Statistical Analysis.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ml-data/Codes/Chapter 3 - Basic Statistical Analysis.pdf
--------------------------------------------------------------------------------
/ml-data/Codes/Chapter 4 - Linear Regression.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ml-data/Codes/Chapter 4 - Linear Regression.pdf
--------------------------------------------------------------------------------
/ml-data/Codes/Chapter 5 - Classification Problems.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ml-data/Codes/Chapter 5 - Classification Problems.pdf
--------------------------------------------------------------------------------
/ml-data/Codes/Chapter 6 - Advanced Machine Learning.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ml-data/Codes/Chapter 6 - Advanced Machine Learning.pdf
--------------------------------------------------------------------------------
/ml-data/Codes/Chapter 7 - Clustering.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ml-data/Codes/Chapter 7 - Clustering.pdf
--------------------------------------------------------------------------------
/ml-data/Codes/Chapter 8 - Forecasting.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ml-data/Codes/Chapter 8 - Forecasting.pdf
--------------------------------------------------------------------------------
/ml-data/Codes/Chapter 9 - Recommendation Systems.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ml-data/Codes/Chapter 9 - Recommendation Systems.pdf
--------------------------------------------------------------------------------
/ml-data/Country-Data.csv:
--------------------------------------------------------------------------------
1 | Country,Age,Salary,Purchased
2 | France,44,72000,No
3 | Spain,27,48000,Yes
4 | Germany,30,54000,No
5 | Spain,38,61000,No
6 | Germany,40,,Yes
7 | France,35,58000,Yes
8 | Spain,,52000,No
9 | France,48,79000,Yes
10 | Germany,50,83000,No
11 | France,37,67000,Yes
--------------------------------------------------------------------------------
/ml-data/IPL-IMB381IPL2013.csv:
--------------------------------------------------------------------------------
1 | Sl.NO.,PLAYER NAME,AGE,COUNTRY,TEAM,PLAYING ROLE,T-RUNS,T-WKTS,ODI-RUNS-S,ODI-SR-B,ODI-WKTS,ODI-SR-BL,CAPTAINCY EXP,RUNS-S,HS,AVE,SR-B,SIXERS,RUNS-C,WKTS,AVE-BL,ECON,SR-BL,AUCTION YEAR,BASE PRICE,SOLD PRICE
2 | 1,"Abdulla, YA",2,SA,KXIP,Allrounder,0,0,0,0,0,0,0,0,0,0,0,0,307,15,20.47,8.9,13.93,2009,50000,50000
3 | 2,Abdur Razzak,2,BAN,RCB,Bowler,214,18,657,71.41,185,37.6,0,0,0,0,0,0,29,0,0,14.5,0,2008,50000,50000
4 | 3,"Agarkar, AB",2,IND,KKR,Bowler,571,58,1269,80.62,288,32.9,0,167,39,18.56,121.01,5,1059,29,36.52,8.81,24.9,2008,200000,350000
5 | 4,"Ashwin, R",1,IND,CSK,Bowler,284,31,241,84.56,51,36.8,0,58,11,5.8,76.32,0,1125,49,22.96,6.23,22.14,2011,100000,850000
6 | 5,"Badrinath, S",2,IND,CSK,Batsman,63,0,79,45.93,0,0,0,1317,71,32.93,120.71,28,0,0,0,0,0,2011,100000,800000
7 | 6,"Bailey, GJ",2,AUS,CSK,Batsman,0,0,172,72.26,0,0,1,63,48,21,95.45,0,0,0,0,0,0,2009,50000,50000
8 | 7,"Balaji, L",2,IND,CSK+,Bowler,51,27,120,78.94,34,42.5,0,26,15,4.33,72.22,1,1342,52,25.81,7.98,19.4,2011,100000,500000
9 | 8,"Bollinger, DE",2,AUS,CSK,Bowler,54,50,50,92.59,62,31.3,0,21,16,21,165.88,1,693,37,18.73,7.22,15.57,2011,200000,700000
10 | 9,"Botha, J",2,SA,RR,Allrounder,83,17,609,85.77,72,53,1,335,67,30.45,114.73,3,610,19,32.11,6.85,28.11,2011,200000,950000
11 | 10,"Boucher, MV",2,SA,RCB+,W. Keeper,5515,1,4686,84.76,0,0,1,394,50,28.14,127.51,13,0,0,0,0,0,2008,200000,450000
12 | 11,"Bravo, DJ",2,WI,MI+,Allrounder,2200,86,2004,81.39,142,34.1,0,839,70,27.97,127.12,38,1338,47,28.47,8.12,21.11,2011,200000,200000
13 | 12,"Chanderpaul, S",3,WI,RCB,Batsman,9918,9,8778,70.74,14,52.8,1,25,16,8.33,80.64,0,0,0,0,0,0,2008,200000,200000
14 | 13,"Chawla, PP",1,IND,KXIP,Allrounder,5,3,38,65.51,32,41,0,337,24,13.48,113.09,9,1819,73,126.3,38.11,100.2,2008,125000,400000
15 | 14,"de Villiers, AB",2,SA,DD+,W. Keeper,5457,2,4998,93.19,0,0,1,1302,105,34.26,128.53,42,0,0,0,0,0,2008,200000,300000
16 | 15,"Dhawan, S",2,IND,MI+,Batsman,0,0,69,56.09,0,0,0,1540,95,31.43,122.32,36,66,4,16.5,8.25,12,2011,100000,300000
17 | 16,"Dhoni, MS",2,IND,CSK,W. Keeper,3509,0,6773,88.19,1,12,1,1782,70,37.13,136.45,64,0,0,0,0,0,2008,400000,1500000
18 | 17,"Dilshan, TM",2,SL,DD+,Allrounder,4722,32,6455,86.8,67,58.3,1,1077,76,28.34,117.83,24,356,5,71.2,8.07,53,2008,150000,250000
19 | 18,"Dinda, AB",2,IND,KKR+,Bowler,0,0,18,60,5,61.4,0,6,2,1,33.33,0,926,36,25.72,7.29,21.19,2011,100000,375000
20 | 19,"Dravid, RS",3,IND,RCB+,Batsman,13288,1,10889,71.24,4,46.5,1,1703,75,27.92,116.88,23,0,0,0,0,0,2011,400000,500000
21 | 20,"Duminy, J-P",2,SA,MI+,Batsman,654,11,2536,84,25,47.6,0,978,74,36.22,119.27,35,377,10,37.7,7.11,31.8,2009,300000,300000
22 | 21,"Edwards, FH",2,WI,DC,Bowler,380,157,73,45.62,60,35.6,0,4,3,4,80,0,154,5,30.8,6.6,28,2009,150000,150000
23 | 22,"Fernando, CRD",2,SL,MI,Bowler,249,97,239,60.96,187,34.7,0,4,2,0,133.33,0,298,17,17.53,7.64,13.76,2008,150000,150000
24 | 23,"Fleming, SP",3,NZ,CSK,Batsman,7172,0,8037,71.49,1,29,1,196,45,21.77,118.78,3,0,0,0,0,0,2008,350000,350000
25 | 24,"Flintoff, A",2,ENG,CSK,Allrounder,3845,226,3394,88.82,169,33.2,1,62,24,31,116.98,2,105,2,52.5,9.55,33,2009,950000,1550000
26 | 25,"Gambhir, G",2,IND,DD+,Batsman,3712,0,4819,86.17,0,0,1,2065,93,33.31,128.9,32,0,0,0,0,0,2008,220000,725000
27 | 26,"Ganguly, SC",3,IND,KKR+,Batsman,7212,32,11363,73.7,100,45.6,1,1349,91,25.45,106.81,42,363,10,36.3,7.89,27.6,2011,200000,400000
28 | 27,"Gayle, CH",2,WI,KKR+,Allrounder,6373,72,8087,83.95,156,44.4,1,1804,128,50.11,161.79,129,606,13,46.62,8.05,34.85,2008,250000,800000
29 | 28,"Gibbs, HH",3,SA,DC,Batsman,6167,0,8094,83.26,0,0,1,886,69,27.69,109.79,31,0,0,0,0,0,2008,250000,575000
30 | 29,"Gilchrist, AC",3,AUS,DC+,W. Keeper,5570,0,9619,96.94,0,0,1,1775,109,27.73,140.21,86,0,0,0,0,0,2008,300000,700000
31 | 30,"Gony, MS",2,IND,CSK+,Bowler,0,0,0,0,2,39,0,54,15,9,117.39,5,999,30,33.3,8.47,23.6,2011,50000,290000
32 | 31,Harbhajan Singh,2,IND,MI,Bowler,2164,406,1190,80.51,259,46.5,1,430,49,16.54,151.41,22,1469,54,27.2,6.85,23.83,2008,250000,850000
33 | 32,"Harris, RJ",2,AUS,DC+,Bowler,199,46,48,100,44,23.4,0,115,17,10.45,107.48,3,975,44,22.16,7.71,17.27,2011,200000,325000
34 | 33,"Hayden, ML",3,AUS,CSK,Batsman,8625,0,6133,78.96,0,0,0,1107,93,36.9,137.52,44,0,0,0,0,0,2008,225000,375000
35 | 34,"Henderson, T",3,SA,RR,Allrounder,0,0,0,0,0,0,0,11,11,5.5,68.75,1,40,1,40,6.66,36,2009,100000,650000
36 | 35,"Henriques, MC",1,AUS,KKR+,Allrounder,0,0,18,60,1,90,0,49,30,16.33,108.89,1,142,3,47.33,8.82,32.33,2011,50000,50000
37 | 36,"Hodge, BJ",3,AUS,KKR+,Batsman,503,0,575,87.51,1,66,0,1006,73,31.44,121.5,28,300,17,17.65,7.89,13.41,2011,200000,425000
38 | 37,"Hopes, JR",2,AUS,KXIP,Allrounder,0,0,1326,93.71,67,47.1,0,417,71,26.06,136.27,11,548,14,39.14,9.13,25.71,2011,200000,350000
39 | 38,"Hussey, DJ",2,AUS,KKR+,Allrounder,0,0,1488,91.4,18,36.2,0,971,71,26.24,125.78,48,345,6,57.5,8.85,39,2008,100000,625000
40 | 39,"Hussey, MEK",3,AUS,CSK,Batsman,5708,7,5262,86.97,2,117,1,958,116,39.92,120.65,25,0,0,0,0,0,2008,250000,250000
41 | 40,"Jadeja, RA",1,IND,RR+,Allrounder,0,0,860,78.61,57,46.2,0,904,48,23.18,120.86,35,750,26,28.85,7.33,23.65,2011,100000,950000
42 | 41,"Jaffer, W",2,IND,RCB,Allrounder,1944,2,10,43.47,0,0,0,130,50,16.25,107.44,3,0,0,0,0,0,2008,150000,150000
43 | 42,"Jayasuriya, ST",3,SL,MI,Allrounder,6973,98,13430,91.21,323,46,1,768,114,27.43,144.36,39,390,13,30,7.96,22.62,2008,250000,975000
44 | 43,"Jayawardena, DPMD",2,SL,KXIP+,Batsman,10440,6,10596,78.08,7,83.1,1,1471,110,30.65,128.02,33,0,0,0,0,0,2008,250000,475000
45 | 44,"Kaif, M",2,IND,RR+,Batsman,624,0,2753,72.03,0,0,0,259,34,14.39,103.6,6,0,0,0,0,0,2008,125000,675000
46 | 45,"Kallis, JH",3,SA,RCB,Allrounder,12379,276,11498,72.97,270,39.3,1,1965,89,30.7,110.95,37,1713,45,38.07,7.96,28.71,2008,225000,900000
47 | 46,Kamran Akmal,2,PAK,RR,W. Keeper,2648,0,2924,84.31,0,0,0,128,53,25.6,164.1,8,0,0,0,0,0,2008,150000,150000
48 | 47,Kamran Khan,1,IND,RR+,Bowler,0,0,0,0,0,0,0,3,3,3,60,0,224,9,24.89,8.48,17.78,2009,20000,24000
49 | 48,"Karthik, KD",2,IND,DD+,W. Keeper,1000,0,1008,74.5,0,0,0,1231,69,24.14,123.84,28,0,0,0,0,0,2008,200000,525000
50 | 49,"Kartik, M",2,IND,KKR+,Bowler,88,24,126,70.78,37,51.5,0,111,21,18.5,105.71,1,1013,21,48.24,7.02,41.33,2008,200000,425000
51 | 50,"Katich, SM",3,AUS,KXIP,Batsman,4188,21,1324,68.74,0,0,0,241,75,24.1,129.57,8,0,0,0,0,0,2008,200000,200000
52 | 51,"Kohli, V",1,IND,RCB,Batsman,491,0,3590,86.31,2,137,1,1639,73,28.26,119.29,49,345,4,86.25,8.84,58.5,2011,150000,1800000
53 | 52,"Kumar, P",1,IND,RCB+,Bowler,149,27,292,88.21,77,42.1,0,243,34,10.57,114.08,14,1919,53,36.21,7.73,28.11,2011,200000,800000
54 | 53,"Kumble, A",3,IND,RCB,Bowler,2506,619,938,61.06,337,43,1,35,8,11.67,74.47,0,105,2,52.5,9.55,33,2008,250000,500000
55 | 54,"Langeveldt, CK",3,SA,KKR+,Bowler,16,16,73,58.87,100,34.8,0,8,8,4,88.89,1,187,13,14.38,7.19,12,2011,100000,140000
56 | 55,"Laxman, VVS",3,IND,DC+,Batsman,8781,2,2338,71.23,0,0,1,282,52,15.67,105.62,5,0,0,0,0,0,2008,150000,375000
57 | 56,"Lee, B",2,AUS,KXI+,Bowler,1451,310,1100,82.45,377,29.2,0,103,25,11.44,121.18,6,1009,21,48.05,7.56,38.24,2008,300000,900000
58 | 57,"Maharoof, MF",2,SL,DD,Allrounder,556,25,1042,84.44,133,33.3,0,177,39,17.7,143.9,9,520,27,19.26,7.43,15.56,2008,150000,225000
59 | 58,"Malinga, SL",2,SL,MI,Bowler,275,101,327,73.81,185,31.1,0,64,17,5.82,100,4,1381,83,16.64,6.36,15.69,2008,200000,350000
60 | 59,"Mascarenhas, AD",2,ENG,RR+,Allrounder,0,0,245,95.33,13,63.2,0,74,27,8.22,101.37,1,331,19,17.42,7.01,14.95,2011,100000,100000
61 | 60,"Mathews, AD",1,SL,KKR+,Allrounder,1219,7,1447,82.59,42,43,0,376,65,25.07,123.28,12,537,15,35.8,8.2,26.33,2011,300000,950000
62 | 61,"McCullum, BB",2,NZ,KKR+,W. Keeper,3763,0,4511,89.62,0,0,1,1233,158,28.02,123.42,48,0,0,0,0,0,2008,175000,700000
63 | 62,"McDonald, AB",2,AUS,DD+,Allrounder,107,9,0,0,0,0,0,123,33,30.75,125.51,4,244,10,24.4,8.41,17.4,2011,50000,80000
64 | 63,"McGrath, GD",3,AUS,DD,Bowler,641,563,115,48.72,381,34,0,4,4,4,80,0,357,12,29.75,6.61,27,2008,350000,350000
65 | 64,Misbah-ul-Haq,3,PAK,RCB,Batsman,2173,0,2763,75.1,0,0,1,117,47,16.71,144.44,6,0,0,0,0,0,2010,100000,100000
66 | 65,"Mishra, A",2,IND,DD+,Bowler,392,43,5,27.77,19,40.1,0,186,31,10.94,102.2,3,1530,74,20.68,7.11,17.46,2011,100000,300000
67 | 66,"Mithun, A",1,IND,RCB,Bowler,120,9,51,92.72,3,60,0,32,11,8,133.33,1,435,6,72.5,9.89,44,2011,100000,260000
68 | 67,Mohammad Asif,2,PAK,DD,Bowler,141,106,34,34,46,42.1,0,3,3,1.5,50,0,296,8,37,9.25,24,2008,225000,650000
69 | 68,"Morkel, JA",2,SA,CSK,Allrounder,58,1,782,100.25,50,41.4,0,781,71,24.41,146.25,45,1899,69,27.52,8.25,20.01,2008,225000,675000
70 | 69,"Morkel, M",2,SA,RR+,Bowler,555,139,117,75.97,94,28.5,0,60,16,10,111.11,2,884,38,23.26,7.37,18.95,2011,100000,475000
71 | 70,"Muralitharan, M",3,SL,CSK+,Bowler,1261,800,674,77.56,534,35.2,0,20,6,3.33,66.67,0,1395,57,24.47,6.49,22.63,2008,250000,600000
72 | 71,"Nannes, DP",3,AUS,DD+,Bowler,0,0,1,50,1,42,0,4,3,4,30.77,0,627,24,26.13,7.17,21.92,2011,200000,650000
73 | 72,"Nayar, AM",2,IND,MI+,Allrounder,0,0,0,0,0,0,0,563,35,19.41,123.19,19,263,7,37.57,8.74,25.86,2011,50000,800000
74 | 73,"Nehra, A",2,IND,DD+,Bowler,77,44,141,57.31,157,36.6,0,38,22,19,82.61,1,1192,48,24.83,7.57,19.73,2011,200000,850000
75 | 74,"Noffke, AA",2,AUS,RCB,Allrounder,0,0,0,0,1,54,0,9,9,9,90,0,40,1,40,10,24,2010,20000,20000
76 | 75,"Ntini, M",2,SA,CSK,Bowler,699,390,199,66.77,266,32.6,0,11,11,11,61.11,0,242,7,34.57,6.91,30,2008,200000,200000
77 | 76,"Ojha, NV",2,IND,RR+,W. Keeper,0,0,1,14.28,0,0,0,960,94,22.33,117.94,50,0,0,0,0,0,2011,100000,270000
78 | 77,"Ojha, PP",1,IND,DC+,Bowler,70,62,41,43.61,20,41.7,0,10,3,1,30.3,0,1548,69,22.43,7.16,18.8,2011,200000,500000
79 | 78,"Oram, JDP",2,NZ,CSK+,Allrounder,1780,60,2377,87.16,168,39.7,0,106,41,13.25,98.15,5,327,9,36.33,9.26,23.67,2008,200000,675000
80 | 79,Pankaj Singh,2,IND,RCB+,Bowler,0,0,3,100,0,0,0,7,4,3.5,58.33,0,468,11,42.55,9.36,27.27,2011,50000,95000
81 | 80,"Patel, MM",2,IND,RR+,Bowler,60,35,74,66.07,86,36.6,0,39,23,7.8,235.49,0,1504,70,21.49,7.39,17.47,2008,100000,275000
82 | 81,"Patel, PA",2,IND,CSK+,W. Keeper,683,0,736,76.5,0,0,0,912,57,20.27,107.29,13,0,0,0,0,0,2008,150000,325000
83 | 82,"Pathan, IK",2,IND,KXIP+,Allrounder,1105,100,1468,78.96,165,34,0,929,60,23.82,128.31,34,1975,66,29.92,7.74,23.23,2008,200000,925000
84 | 83,"Pathan, YK",2,IND,RR+,Allrounder,0,0,810,113.6,33,45.1,0,1488,100,25.66,149.25,81,1139,36,31.64,7.2,26.36,2008,100000,475000
85 | 84,"Pietersen, KP",2,ENG,RCB+,Batsman,6654,5,4184,86.76,7,57.1,1,634,103,42.27,141.2,30,215,7,30.71,7.41,24.86,2009,1350000,1550000
86 | 85,"Pollock, SM",3,SA,MI,Allrounder,3781,421,3519,86.69,393,39.9,1,147,33,18.37,132.43,8,301,11,27.36,6.54,25,2008,200000,550000
87 | 86,"Pomersbach, LA",2,AUS,KXIP+,Batsman,0,0,0,0,0,0,0,244,79,27.11,130.48,12,0,0,0,0,0,2011,20000,50000
88 | 87,"Ponting, RT",3,AUS,KKR,Batsman,13218,5,13704,80.39,3,50,1,39,20,9.75,73.58,1,0,0,0,0,0,2008,335000,400000
89 | 88,"Powar, RR",2,IND,KXIP+,Bowler,13,6,163,62.69,34,45.1,0,67,28,22.33,104.69,1,527,13,40.54,7.42,32.77,2008,150000,170000
90 | 89,"Raina, SK",1,IND,CSK,Batsman,710,13,3525,92.71,16,61.9,0,2254,98,33.64,139.39,97,678,20,33.9,7.05,28.9,2008,125000,650000
91 | 90,"Ryder, JD",2,NZ,RCB+,Allrounder,1269,5,1100,89.72,11,34.8,0,604,86,21.57,131.88,19,303,8,37.88,7.73,29.5,2009,100000,160000
92 | 91,"Saha, WP",2,IND,KKR+,W. Keeper,74,0,4,80,0,0,0,372,59,28.62,128.28,16,0,0,0,0,0,2011,100000,100000
93 | 92,"Sangakkara, KC",2,SL,KXIP+,W. Keeper,9382,0,10472,75.75,0,0,1,1567,94,27.98,124.76,27,0,0,0,0,0,2008,250000,700000
94 | 93,"Sarwan, RR",2,WI,KXIP,Batsman,5842,23,5644,75.76,16,36.3,0,73,31,18.25,97.33,1,0,0,0,0,0,2008,225000,225000
95 | 94,"Sehwag, V",2,IND,DD,Batsman,8178,40,8090,104.68,95,45.4,1,1879,119,30.31,167.32,79,226,6,37.67,10.56,21.67,2011,400000,1800000
96 | 95,Shahid Afridi,2,PAK,DC,Allrounder,1716,48,7040,113.87,344,43.4,1,81,33,10.12,176.08,6,225,9,25,7.5,20,2008,225000,675000
97 | 96,"Sharma, I",1,IND,KKR+,Bowler,432,133,47,34.05,64,33.6,0,37,9,9.25,80.43,1,1176,36,32.67,7.63,23.61,2008,150000,950000
98 | 97,"Sharma, J",2,IND,CSK,Allrounder,0,0,35,116.66,1,150,0,36,16,9,120,2,419,12,34.92,9.88,21.33,2008,100000,225000
99 | 98,"Sharma, RG",1,IND,DC+,Batsman,0,0,1961,78.85,8,59.1,0,1975,109,31.35,129.17,82,408,14,29.14,8,21.86,2008,150000,750000
100 | 99,Shoaib Akhtar,3,PAK,KKR,Bowler,544,178,394,73.23,247,31.4,0,2,2,2,28.57,0,54,5,10.8,7.71,8.4,2008,250000,425000
101 | 100,Shoaib Malik,2,PAK,DD,Allrounder,1606,21,5253,78.37,139,47.6,1,52,24,13,110.63,0,85,2,42.5,10,25.5,2008,300000,500000
102 | 101,"Silva, LPC",2,SL,DC,Batsman,537,1,1587,70.4,1,42,0,40,23,20,153.84,1,21,0,0,21,0,2008,100000,100000
103 | 102,"Singh, RP",2,IND,DC+,Bowler,116,40,104,42.97,69,37.1,0,52,10,3.47,68.42,1,1892,74,25.57,7.75,19.78,2008,200000,875000
104 | 103,"Smith, DR",2,WI,DC+,Allrounder,320,7,925,97.26,56,44.8,0,439,87,25.82,148.81,24,338,9,37.56,8.14,27.89,2009,100000,100000
105 | 104,"Smith, GC",2,SA,RR+,Batsman,8042,8,6598,81.58,18,57,1,739,91,28.42,110.63,9,0,0,0,0,0,2008,250000,250000
106 | 105,Sohail Tanvir,2,PAK,RR,Bowler,17,5,268,94.03,55,37.4,0,36,13,12,124.13,1,266,22,12.09,6.46,11.2,2008,100000,100000
107 | 106,"Sreesanth, S",2,IND,KXIP+,Bowler,281,87,44,36.36,75,33,0,33,15,11,64.71,0,1031,35,29.46,8.25,21.43,2008,200000,625000
108 | 107,"Steyn, DW",2,SA,RCB+,Bowler,770,272,142,73.57,91,33.7,0,70,13,4.67,86.42,1,1304,59,22.1,6.58,20.15,2008,150000,325000
109 | 108,"Styris, SB",3,NZ,DC+,Allrounder,1586,20,4483,79.41,137,44.6,0,131,36,18.71,98.5,3,276,8,34.5,7.67,27,2008,175000,175000
110 | 109,"Symonds, A",3,AUS,DC+,Allrounder,1462,24,5088,92.44,133,44.6,0,974,117,36.07,129.87,41,674,20,33.7,7.7,26.35,2008,250000,1350000
111 | 110,"Taibu, T",2,ZIM,KKR,W. Keeper,1546,1,3393,67.58,2,42,1,31,15,10.33,119.23,0,0,0,0,0,0,2008,125000,125000
112 | 111,"Taylor, LRPL",2,NZ,RCB+,Batsman,2742,2,3185,81.77,0,0,1,895,81,27.97,130.28,45,24,0,0,12,0,2008,400000,1000000
113 | 112,"Tendulkar, SR",3,IND,MI,Batsman,15470,45,18426,86.23,154,52.2,1,2047,100,37.91,119.22,24,58,0,0,9.67,0,2011,400000,1800000
114 | 113,"Tiwary, MK",2,IND,DD+,Batsman,0,0,165,75.68,1,60,0,969,75,31.26,113.33,22,45,1,45,11.25,24,2008,100000,675000
115 | 114,"Tiwary, SS",1,IND,MI+,Batsman,0,0,49,87.5,0,0,0,836,42,25.33,119.6,32,0,0,0,0,0,2011,100000,1600000
116 | 115,"Tyagi, S",1,IND,CSK,Bowler,0,0,1,50,3,55,0,3,3,3,0.75,0,295,6,49.17,8.55,34.83,2011,50000,240000
117 | 116,Umar Gul,2,PAK,KKR,Bowler,541,157,368,69.04,154,32.2,0,39,24,13,205.26,5,184,12,15.33,8.17,11.2,2008,150000,150000
118 | 117,"Uthappa, RV",2,IND,RCB+,Batsman,0,0,786,91.92,0,0,0,1538,69,26.98,126.17,59,0,0,0,0,0,2008,200000,800000
119 | 118,"Vaas, WPUJC",3,SL,DC,Bowler,3089,355,2025,72.52,400,39.4,0,81,20,10.13,110.96,3,355,18,19.72,7.55,15.67,2008,200000,200000
120 | 119,Van der Merwe,2,SA,RCB+,Allrounder,0,0,39,95.12,17,41.4,0,137,35,15.22,118.1,8,427,18,23.72,6.83,20.94,2011,50000,50000
121 | 120,"Venugopal Rao, Y",2,IND,DC+,Batsman,0,0,218,60.05,0,0,0,914,71,22.29,118.24,37,321,6,53.5,9.44,34,2011,100000,700000
122 | 121,"Vettori, DL",2,NZ,DD+,Allrounder,4486,359,2105,81.93,282,45.7,1,121,29,15.13,107.08,2,878,28,31.36,6.81,27.75,2008,250000,625000
123 | 122,"Vinay Kumar, R",2,IND,RCB+,Bowler,11,1,43,43.87,28,35.3,0,217,25,9.43,104.83,5,1664,61,27.28,8.24,19.87,2011,100000,475000
124 | 123,"Warne, SK",3,AUS,RR,Bowler,3154,708,1018,72.04,293,36.3,1,198,34,9.9,92.52,6,1447,57,25.39,7.27,20.95,2008,450000,450000
125 | 124,"Warner, DA",1,AUS,DD,Batsman,483,2,876,85.79,0,0,0,1025,109,27.7,135.76,44,0,0,0,0,0,2011,200000,750000
126 | 125,"White, CL",2,AUS,RCB+,Batsman,146,5,2037,80.48,12,27.5,1,745,78,31.04,132.09,29,70,0,0,14,0,2008,100000,500000
127 | 126,"Yadav, AS",2,IND,DC,Batsman,0,0,0,0,0,0,0,49,16,9.8,125.64,2,0,0,0,0,0,2010,50000,750000
128 | 127,Younis Khan,2,PAK,RR,Batsman,6398,7,6814,75.78,3,86.6,1,3,3,3,42.85,0,0,0,0,0,0,2008,225000,225000
129 | 128,Yuvraj Singh,2,IND,KXIP+,Batsman,1775,9,8051,87.58,109,44.3,1,1237,66,26.32,131.88,67,569,23,24.74,7.02,21.13,2011,400000,1800000
130 | 129,Zaheer Khan,2,IND,MI+,Bowler,1114,288,790,73.55,278,35.4,0,99,23,9.9,91.67,1,1783,65,27.43,7.75,21.26,2008,200000,450000
131 | 130,"Zoysa, DNT",2,SL,DC,Bowler,288,64,343,95.81,108,39.4,0,11,10,11,122.22,0,99,2,49.5,9,33,2008,100000,110000
--------------------------------------------------------------------------------
/ml-data/Income Data.csv:
--------------------------------------------------------------------------------
1 | income,age
2 | 41100.0,48.75
3 | 54100.0,28.1
4 | 47800.0,46.75
5 | 19100.0,40.25
6 | 18200.0,35.8
7 | 19800.0,41.45
8 | 51500.0,31.55
9 | 39900.0,48.2
10 | 13000.0,36.8
11 | 31900.0,43.05
12 | 51700.0,29.7
13 | 56700.0,29.25
14 | 58800.0,29.65
15 | 46500.0,47.2
16 | 55000.0,28.35
17 | 18200.0,41.15
18 | 46400.0,45.15
19 | 46300.0,44.3
20 | 22100.0,46.65
21 | 53200.0,29.0
22 | 16700.0,40.2
23 | 46000.0,44.8
24 | 53200.0,29.2
25 | 41200.0,47.3
26 | 8000.0,36.5
27 | 13200.0,38.95
28 | 40600.0,43.7
29 | 12500.0,38.35
30 | 56700.0,29.85
31 | 56900.0,30.8
32 | 23900.0,38.3
33 | 40500.0,48.65
34 | 47300.0,43.3
35 | 55900.0,29.7
36 | 57100.0,28.3
37 | 6100.0,37.3
38 | 56500.0,29.35
39 | 27300.0,30.65
40 | 41700.0,47.6
41 | 52400.0,30.1
42 | 18800.0,43.55
43 | 56100.0,30.1
44 | 40200.0,45.65
45 | 53900.0,28.0
46 | 54300.0,29.0
47 | 20900.0,39.95
48 | 56100.0,28.5
49 | 17000.0,28.95
50 | 27600.0,41.35
51 | 54400.0,28.35
52 | 15700.0,40.55
53 | 55200.0,31.75
54 | 15000.0,44.15
55 | 25400.0,39.8
56 | 53300.0,28.75
57 | 41100.0,46.6
58 | 43700.0,48.1
59 | 23200.0,43.25
60 | 3200.0,39.65
61 | 50900.0,49.45
62 | 53600.0,29.15
63 | 59500.0,29.9
64 | 42100.0,46.6
65 | 11500.0,44.15
66 | 57200.0,29.25
67 | 3700.0,40.45
68 | 44600.0,44.2
69 | 54800.0,29.85
70 | 42200.0,47.85
71 | 54500.0,29.9
72 | 28000.0,37.4
73 | 30400.0,45.6
74 | 3100.0,42.6
75 | 25900.0,39.2
76 | 55300.0,30.55
77 | 45600.0,48.45
78 | 56100.0,29.4
79 | 16100.0,36.15
80 | 43300.0,46.05
81 | 45000.0,47.1
82 | 9900.000000000004,37.5
83 | 48400.0,47.15
84 | 58300.0,28.8
85 | 39500.0,45.6
86 | 41800.0,49.6
87 | 54300.0,48.4
88 | 52700.0,29.0
89 | 9600.0,42.4
90 | 44500.0,49.95
91 | 41500.0,45.15
92 | 27400.0,38.75
93 | 58500.0,30.35
94 | 8900.0,41.05
95 | 50700.0,46.1
96 | 56600.0,28.900000000000002
97 | 55300.0,28.8
98 | 39700.0,48.45
99 | 52000.0,29.85
100 | 8300.0,34.1
101 | 43300.0,43.75
102 | 56900.0,29.35
103 | 26000.0,44.4
104 | 44800.0,50.1
105 | 57000.0,28.3
106 | 49200.0,47.85
107 | 48600.0,46.2
108 | 12900.0,40.7
109 | 46200.0,47.95
110 | 54100.0,30.45
111 | 52900.0,28.15
112 | 40600.0,47.9
113 | 22200.0,40.9
114 | 53400.0,49.2
115 | 45300.0,47.3
116 | 17900.0,38.95
117 | 17000.0,38.05
118 | 40300.0,46.6
119 | 52200.0,48.35
120 | 51000.0,28.45
121 | 56900.0,29.05
122 | 52200.0,29.9
123 | 4100.0,41.0
124 | 54200.0,31.0
125 | 53700.0,29.1
126 | 56400.0,30.15
127 | 18400.0,36.75
128 | 54600.0,29.95
129 | 54700.0,29.2
130 | 53600.0,28.0
131 | 5099.999999999995,40.0
132 | 23000.0,40.55
133 | 17300.0,37.55
134 | 42200.0,42.05
135 | 10800.0,37.55
136 | 17000.0,35.55
137 | 54800.0,29.6
138 | 19400.0,40.35
139 | 42100.0,45.8
140 | 18000.0,44.9
141 | 21400.0,36.5
142 | 54800.0,29.65
143 | 40100.0,48.3
144 | 55200.0,29.1
145 | 46600.0,43.45
146 | 25800.0,40.55
147 | 17600.0,38.75
148 | 39800.0,48.05
149 | 21700.0,45.7
150 | 22900.0,36.2
151 | 54200.0,28.95
152 | 42700.0,44.15
153 | 21200.0,35.35
154 | 42700.0,45.65
155 | 57900.0,30.55
156 | 18200.0,36.4
157 | 15900.0,37.95
158 | 55000.0,28.75
159 | 56600.0,29.6
160 | 43100.0,48.95
161 | 54800.0,27.849999999999998
162 | 37900.0,50.4
163 | 49700.0,48.9
164 | 55300.0,30.5
165 | 41300.0,50.35
166 | 26200.0,41.65
167 | 52300.0,43.8
168 | 48200.0,45.2
169 | 47100.0,46.1
170 | 41500.0,43.05
171 | 22200.0,41.75
172 | 60100.0,29.45
173 | 44900.0,46.15
174 | 26300.0,44.55
175 | 55700.0,30.0
176 | 53500.0,30.7
177 | 58500.0,27.4
178 | 42200.0,45.8
179 | 56900.0,29.5
180 | 42800.0,50.4
181 | 50700.0,50.3
182 | 56100.0,28.599999999999998
183 | 25500.0,39.6
184 | 46300.0,44.35
185 | 45300.0,49.45
186 | 44300.0,47.35
187 | 41200.0,42.55
188 | 55800.0,29.15
189 | 22900.0,33.2
190 | 43200.0,45.9
191 | 25300.0,39.85
192 | 44600.0,47.4
193 | 46000.0,49.15
194 | 55600.0,27.849999999999998
195 | 54100.0,28.7
196 | 15100.0,40.2
197 | 44400.0,45.2
198 | 57700.0,29.05
199 | 16400.0,38.85
200 | 47200.0,48.35
201 | 12599.999999999995,39.3
202 | 55300.0,29.2
203 | 18800.0,39.9
204 | 49300.0,44.05
205 | 11800.0,42.35
206 | 55500.0,29.2
207 | 25100.0,43.2
208 | 38800.0,47.4
209 | 14600.0,40.3
210 | 43000.0,50.2
211 | 55500.0,29.65
212 | 45500.0,45.7
213 | 43400.0,46.85
214 | 17800.0,37.1
215 | 54000.0,28.05
216 | 54200.0,31.15
217 | 52900.0,31.15
218 | 53200.0,30.25
219 | 38200.0,47.45
220 | 25800.0,30.3
221 | 57400.0,29.8
222 | 47300.0,43.3
223 | 50200.0,30.1
224 | 56900.0,30.2
225 | 57100.0,29.85
226 | 45400.0,47.3
227 | 5600.0,37.7
228 | 16400.0,31.9
229 | 43100.0,45.0
230 | 53800.0,43.6
231 | 33800.0,39.45
232 | 40100.0,50.4
233 | 54400.0,29.45
234 | 54600.0,30.1
235 | 41300.0,45.35
236 | 54300.0,29.1
237 | 12300.0,42.45
238 | 10400.0,32.7
239 | 26300.0,30.0
240 | 40800.0,43.45
241 | 43500.0,46.65
242 | 55500.0,29.85
243 | 20000.0,42.3
244 | 23500.0,41.15
245 | 16200.0,48.05
246 | 10299.999999999996,36.9
247 | 56800.0,27.9
248 | 31900.0,42.0
249 | 13400.0,38.1
250 | 45400.0,47.8
251 | 57400.0,29.85
252 | 57100.0,29.0
253 | 47500.0,47.35
254 | 51900.0,29.0
255 | 52000.0,29.95
256 | 22300.0,40.1
257 | 39600.0,47.05
258 | 52500.0,29.55
259 | 28300.0,39.8
260 | 48600.0,46.2
261 | 49500.0,46.15
262 | 14300.0,39.2
263 | 49300.0,44.35
264 | 29000.0,36.5
265 | 16500.0,34.65
266 | 34600.0,47.85
267 | 22400.0,39.45
268 | 42000.0,46.2
269 | 55300.0,30.65
270 | 54400.0,29.25
271 | 55100.0,30.35
272 | 55400.0,29.4
273 | 48200.0,45.05
274 | 47800.0,48.15
275 | 42900.0,46.55
276 | 48600.0,46.7
277 | 52500.0,49.5
278 | 21100.0,33.35
279 | 15100.0,37.8
280 | 54900.0,27.8
281 | 49500.0,47.15
282 | 47700.0,46.8
283 | 54200.0,29.1
284 | 55500.0,28.75
285 | 56400.0,26.7
286 | 21100.0,39.4
287 | 57300.0,30.65
288 | 14100.0,35.7
289 | 13100.0,40.4
290 | 54900.0,29.6
291 | 55700.0,30.05
292 | 18300.0,39.2
293 | 26300.0,38.65
294 | 30300.0,41.55
295 | 60200.0,28.45
296 | 46900.0,46.2
297 | 38300.0,46.1
298 | 27400.0,44.15
299 | 54200.0,28.25
300 | 41800.0,44.85
301 | 38600.0,39.35
302 |
--------------------------------------------------------------------------------
/ml-data/MBA-Salary.csv:
--------------------------------------------------------------------------------
1 | S. No.,Percentage in Grade 10,Salary
2 | 1,62,270000
3 | 2,76.33,200000
4 | 3,72,240000
5 | 4,60,250000
6 | 5,61,180000
7 | 6,55,300000
8 | 7,70,260000
9 | 8,68,235000
10 | 9,82.8,425000
11 | 10,59,240000
12 | 11,58,250000
13 | 12,60,180000
14 | 13,66,428000
15 | 14,83,450000
16 | 15,68,300000
17 | 16,37.33,240000
18 | 17,79,252000
19 | 18,68.4,280000
20 | 19,70,231000
21 | 20,59,224000
22 | 21,63,120000
23 | 22,50,260000
24 | 23,69,300000
25 | 24,52,120000
26 | 25,49,120000
27 | 26,64.6,250000
28 | 27,50,180000
29 | 28,74,218000
30 | 29,58,360000
31 | 30,67,150000
32 | 31,75,250000
33 | 32,60,200000
34 | 33,55,300000
35 | 34,78,330000
36 | 35,50.08,265000
37 | 36,56,340000
38 | 37,68,177600
39 | 38,52,236000
40 | 39,54,265000
41 | 40,52,200000
42 | 41,76,393000
43 | 42,64.8,360000
44 | 43,74.4,300000
45 | 44,74.5,250000
46 | 45,73.5,360000
47 | 46,57.58,180000
48 | 47,68,180000
49 | 48,69,270000
50 | 49,66,240000
51 | 50,60.8,300000
--------------------------------------------------------------------------------
/ml-data/beer.csv:
--------------------------------------------------------------------------------
1 | name,calories,sodium,alcohol,cost
2 | Budweiser,144,15,4.7,0.43
3 | Schlitz,151,19,4.9,0.43
4 | Lowenbrau,157,15,0.9,0.48
5 | Kronenbourg,170,7,5.2,0.73
6 | Heineken,152,11,5.0,0.77
7 | Old_Milwaukee,145,23,4.6,0.28
8 | Augsberger,175,24,5.5,0.40
9 | Srohs_Bohemian_Style,149,27,4.7,0.42
10 | Miller_Lite,99,10,4.3,0.43
11 | Budweiser_Light,113,8,3.7,0.40
12 | Coors,140,18,4.6,0.44
13 | Coors_Light,102,15,4.1,0.46
14 | Michelob_Light,135,11,4.2,0.50
15 | Becks,150,19,4.7,0.76
16 | Kirin,149,6,5.0,0.79
17 | Pabst_Extra_Light,68,15,2.3,0.38
18 | Hamms,139,19,4.4,0.43
19 | Heilemans_Old_Style,144,24,4.9,0.43
20 | Olympia_Goled_Light,72,6,2.9,0.46
21 | Schlitz_Light,97,7,4.2,0.47
22 |
--------------------------------------------------------------------------------
/ml-data/bollywood.csv:
--------------------------------------------------------------------------------
1 | SlNo,Release Date,MovieName,ReleaseTime,Genre,Budget,BoxOfficeCollection,YoutubeViews,YoutubeLikes,YoutubeDislikes
2 | 1,18-Apr-14,2 States,LW,Romance,36,104,8576361,26622,2527
3 | 2,4-Jan-13,Table No. 21,N,Thriller ,10,12,1087320,1129,137
4 | 3,18-Jul-14,Amit Sahni Ki List,N,Comedy,10,4,572336,586,54
5 | 4,4-Jan-13,Rajdhani Express,N, Drama ,7,0.35,42626,86,19
6 | 5,4-Jul-14,Bobby Jasoos,N,Comedy,18,10.8,3113427,4512,1224
7 | 6,30-May-14,Citylights,HS, Drama ,7,35,1076591,1806,84
8 | 7,19-Sep-14,Daawat-E-Ishq,N,Comedy,30,24.6,3905050,8315,1373
9 | 8,11-Jan-13,Matru Ki Bijlee Ka Mandola,N,Comedy,33,40,2435283,4326,647
10 | 9,10-Jan-14,Dedh Ishqiya,LW,Comedy,31,27,2333067,2436,591
11 | 10,11-Jan-13,Gangoobai,N, Drama ,2,0.01,4354,1,1
12 | 11,28-Mar-14,Dishkiyaoon,N,Action ,28,6,2397647,3308,465
13 | 12,18-Jan-13,Inkaar,N,Romance,5,10.25,1077723,897,114
14 | 13,27-Jun-14,Ek Villain,HS,Romance,35,105.5,4518748,13558,1094
15 | 14,18-Jan-13,Mumbai Mirror,N,Action ,4,1.8,235767,192,164
16 | 15,8-Aug-14,Entertainment,N,Comedy,60,163,6336058,20472,2843
17 | 16,18-Jan-13,Bandook,N,Action,4,0.09,28391,8,14
18 | 17,25-Jan-13,Race 2,LW,Thriller ,60,162,7641333,12617,2138
19 | 18,6-Jun-14,Filmistaan,HS,Comedy,10,6,989096,1608,173
20 | 19,12-Sep-14,Finding Fanny,N,Comedy,15,58.9,6144142,13013,2746
21 | 20,13-Jun-14,Fugly,HS,Comedy,10,12.6,2343109,2963,556
22 | 21,1-Feb-13,David,N,Thriller ,10,3.65,839270,1542,95
23 | 22,7-Mar-14,Gulaab Gang,N,Action ,27,14.3,4687259,13219,1605
24 | 23,14-Feb-14,Gunday,N,Action,50,76.7,3503887,8383,1419
25 | 24,2-Oct-14,Haider,FS, Drama ,24,69,7048104,23590,2443
26 | 25,21-Nov-14,Happy Ending,N,Romance,48,21.5,5192338,6928,1120
27 | 26,8-Feb-13,Special 26,N, Drama ,42,103,1829417,6248,568
28 | 27,24-Oct-14,Happy New Year,LW, Drama ,125,383,6395202,37585,10269
29 | 28,8-Feb-13,ABCD _ Any Body Can Dance,N, Drama ,12,44,4042091,10609,788
30 | 29,18-Jul-14,Hate Story 2,N,Thriller,12,27.16,13798789,15708,5226
31 | 30,9-May-14,Hawaa Hawaai,HS, Drama ,11,10,764725,1377,94
32 | 31,7-Feb-14,Heartless,N,Thriller,12,1.5,4823892,5797,227
33 | 32,15-Feb-13,Murder 3,FS,Thriller,12,25,2249055,1879,747
34 | 33,23-May-14,Heropanti,HS,Romance,21,65,4005695,8189,1814
35 | 34,22-Feb-13,Kai Po Che!,N, Drama ,30,62.2,1888754,5686,512
36 | 35,21-Feb-14,Highway,N, Drama ,30,27.25,3043012,9100,971
37 | 36,22-Feb-13,Zilla Ghaziabad,N,Action,36,16,1551786,1753,619
38 | 37,6-Jun-14,Holiday,HS,Action,50,129,6841635,14182,1997
39 | 38,11-Jul-14,Humpty Sharma Ki Dulhania,N,Romance,20,130,6604595,14535,2208
40 | 39,1-Mar-13,The Attacks of 26/11,N,Thriller,30,32,1041892,1911,344
41 | 40,20-Jun-14,Humshakals,HS,Comedy,75,63.7,6280868,1,1
42 | 41,8-Mar-13,"Saheb, Biwi Aur Gangster Returns",FS, Drama ,15,22,1176596,1037,189
43 | 42,24-Jan-14,Jai Ho,N,Action,120,111,868743,2783,404
44 | 43,19-Sep-14,Khoobsurat,N,Romance,12,39.4,5454349,12548,2038
45 | 44,25-Jul-14,Kick,N,Action,100,377,18986221,101275,10066
46 | 45,15-Mar-13,3G,N,Thriller,5,5.82,1200152,1385,345
47 | 46,14-Nov-14,Kill Dil,N,Romance,35,36,2375050,4358,555
48 | 47,15-Mar-13,Mere Dad Ki Maruti,N,Comedy,5,11,569711,1226,133
49 | 48,15-Mar-13,Jolly LLB,N,Comedy,10,32.7,1741424,3024,181
50 | 49,9-May-14,Koyelaanchal,HS,Action,8,2,591838,347,31
51 | 50,2-May-14,Kya Dilli Kya Lahore,HS, Drama ,7,0.5,492344,2018,64
52 | 51,4-Jul-14,Lekar Hum Deewana Dil,N,Romance,16,2.5,3998331,2307,497
53 | 52,22-Mar-13,Aatma,N,Thriller,8,8.78,841891,997,306
54 | 53,4-Apr-14,Main Tera Hero,LW,Comedy,40,55,4903941,12657,2139
55 | 54,22-Mar-13,Sona Spa,N,Thriller,10,0.24,79430,61,112
56 | 55,29-Mar-13,Himmatwala,N,Action,50,100,2501277,1,1
57 | 56,9-May-14,Manjunath,HS, Drama ,3,1.75,370106,991,35
58 | 57,22-Aug-14,Mardaani,N, Drama ,15,36,4376650,14408,1035
59 | 58,5-Apr-13,Chashme Baddoor,N,Comedy,20,42,166066,2897,283
60 | 59,5-Sep-14,Mary Kom,N, Drama ,15,104,6086811,26560,1647
61 | 60,12-Apr-13,Nautanki Saala,LW,Comedy,9,21.5,1208841,1442,342
62 | 61,9-May-14,Mastram,HS,Comedy,2,3,3557585,3927,1364
63 | 62,12-Apr-13,Commando-A One Man Army,LW,Action,22,21,3162465,7519,562
64 | 63,19-Apr-13,Ek Thi Daayan,FS,Thriller,24,45,1837437,3296,613
65 | 64,17-Jan-14,Miss Lovely,N, Drama ,5,0.7,808439,328,95
66 | 65,26-Apr-13,Aashiqui 2,N,Romance,12,110,2926673,4944,628
67 | 66,28-Mar-14,O Teri,N,Comedy,21,3.75,1511318,3004,278
68 | 67,31-Jan-14,One by Two,N,Romance,12,2.5,1016858,1081,212
69 | 68,2-May-14,Purani Jeans,HS,Romance,10,1.1,1537063,822,229
70 | 69,3-May-13,Shootout at Wadala,N,Thriller,45,75,769365,784,126
71 | 70,7-Mar-14,Queen,N, Drama ,25,61,2981381,5339,443
72 | 71,3-May-13,Bombay Talkies,N, Drama ,6,14.3,1528538,2766,328
73 | 72,21-Mar-14,Ragini MMS 2,N,Thriller,18,50,5416213,6807,2717
74 | 73,10-May-13,Go Goa Gone,N,Comedy,16,28.2,4321162,10126,964
75 | 74,29-Aug-14,Raja Natwarlal,N,Thriller,35,29.6,4050407,8197,1155
76 | 75,25-Apr-14,Revolver Rani,N, Drama ,24,10,1744602,3013,392
77 | 76,28-Feb-14,Shaadi Ke Side Effects,N,Comedy,50,37.8,4590539,9021,1465
78 | 77,17-May-13,Aurangzeb,N, Drama ,20,23,1901503,3394,754
79 | 78,3-Jan-14,Sholay 3D,N,Action,25,11,1001670,2110,268
80 | 79,15-Aug-14,Singham Returns,LW,Action,90,200,8287587,20011,2899
81 | 80,7-Nov-14,The Shaukeens,N,Comedy,35,32.5,3395361,8395,1204
82 | 81,24-May-13,Ishkq in Paris,LW,Romance,15,2.13,630506,463,83
83 | 82,28-Nov-14,Ungli,N, Drama ,5,13.8,2763615,6907,489
84 | 83,31-May-13,Yeh Jawaani Hai Deewani,N,Romance,70,311,5123217,13826,1640
85 | 84,10-Jan-14,Yaariyan,LW,Romance,19,40,3331082,4853,767
86 | 85,7-Jun-13,Yamla Pagla Deewana 2,N,Comedy,24,55,2775792,5457,681
87 | 86,9-May-14,Yeh Hai Bakrapur,HS,Comedy,5,2,1257416,758,923
88 | 87,28-Mar-14,Youngistaan,N, Drama ,26,6.75,603710,1082,177
89 | 88,14-Jun-13,Fukrey,N,Comedy,5,36.2,227912,222,39
90 | 89,28-Nov-14,Zid,N,Thriller,20,14.05,6698987,6750,2234
91 | 90,19-Dec-14,PK,HS, Drama ,85,735,13270623,43767,5693
92 | 91,21-Jun-13,Raanjhanaa,N,Romance,35,100,5470632,12413,1018
93 | 92,5-Dec-14,Action Jackson,N,Action,90,57.45,13279636,19240,3859
94 | 93,2-Oct-14,Bang Bang,FS,Action,140,340,15889811,61805,6283
95 | 94,21-Jun-13,Shortcut Romeo,N,Thriller,15,2.58,1131573,913,255
96 | 95,12-Sep-14,Creature,N,Thriller,24,20,2876498,5844,1537
97 | 96,28-Jun-13,Ghanchakkar,N,Comedy,30,38,2248046,5108,614
98 | 97,2-Oct-13,Besharam,LW,Comedy,85,35,5319606,10253,8165
99 | 98,5-Jul-13,Lootera,N,Romance,27,28,2581449,4416,819
100 | 99,18-Jul-14,Pizza 3D,N,Thriller,10,4.35,838943,1116,153
101 | 100,16-May-14,The Xpose,HS,Thriller,16,29,3707889,3512,1292
102 | 101,5-Jul-13,Policegiri,N,Action,30,16.8,1754266,1674,420
103 | 102,12-Jul-13,Bhaag Milkha Bhaag,N, Drama ,30,164,2635390,4622,456
104 | 103,25-Apr-14,Kaanchi,N, Drama ,31,4,2360968,2807,595
105 | 104,25-Apr-14,Samrat and Co.,N,Thriller,18,2,371133,532,180
106 | 105,12-Jul-13,B.A. Pass,N, Drama ,2,5.9,2240961,842,348
107 | 106,19-Jul-13,Ramaiya Vastavaiya,N,Romance,35,30,3788488,4111,687
108 | 107,14-Mar-14,Bewakoofiyan,FS,Romance,22,14,2850376,4764,1068
109 | 108,19-Jul-13,D Day,N,Action,30,24,1939868,1804,224
110 | 109,21-Feb-14,Darr@ The Mall,N,Thriller,15,4.5,1679872,255,51
111 | 110,26-Jul-13,Issaq,N,Romance,10,5,68511,76,16
112 | 111,7-Feb-14,Hasee To Phansee,N,Romance,27,35.5,2543707,6390,975
113 | 112,26-Jul-13,Bajatey Raho,N,Comedy,5,10,462722,513,77
114 | 113,3-Jan-14,Mr Joe B Carvalho,N,Comedy,15,4,268289,139,40
115 | 114,26-Jul-13,Nasha,N,Thriller,4,8,613448,288,188
116 | 115,11-Apr-14,Bhootnath Returns,LW,Thriller,29,53,3049275,10759,657
117 | 116,9-Aug-13,Chennai Express,FS,Comedy,75,395,1882346,7954,1593
118 | 117,15-Aug-13,Once Upon Ay Time In Mumbai Dobaara!,FS,Thriller,100,80,3077135,9200,2821
119 | 118,23-Aug-13,Madras Cafe,N,Thriller,35,42.7,1709945,2974,594
120 | 119,30-Aug-13,Satyagraha,N, Drama ,50,60,1422568,2736,394
121 | 120,1-Nov-13,Krrish 3,FS,Action,115,300,23171067,72070,11888
122 | 121,7-Mar-14,Total Siyappa,N, Drama ,10,12,1719399,3656,548
123 | 122,8-Nov-13,Satya 2,FS,Thriller,15,1.1,110315,89,26
124 | 123,15-Nov-13,Rajjo,FS,Romance,12,2,1718619,1606,1083
125 | 124,22-Nov-13,Gori Tere Pyaar Mein,N,Romance,30,12.65,3799253,8559,1871
126 | 125,29-Nov-13,Bullett Raja,N,Comedy,50,40,4015674,8084,2129
127 | 126,11-Oct-13,War Chhod Na Yaar,FS,Comedy,14,7.5,40450,33,7
128 | 127,11-Oct-13,Baat Ban Gayi,FS,Comedy,4,1.5,23186,9,4
129 | 128,16-Oct-13,Boss,FS, Drama ,50,45,6464704,22977,3396
130 | 129,18-Oct-13,Shahid,FS, Drama ,6,40,1148516,3155,396
131 | 130,25-Oct-13,Mickey Virus,N,Comedy,11,6,335807,574,129
132 | 131,6-Sep-13,Zanjeer,LW,Action,75,18.2,2539431,5634,922
133 | 132,6-Sep-13,Shuddh Desi Romance,LW,Romance,25,55,5142,1,1
134 | 133,13-Sep-13,Grand Masti,LW,Comedy,35,298,1795640,3015,721
135 | 134,16-May-14,Children Of War,HS, Drama ,12,22,577569,1640,156
136 | 135,20-Sep-13,Phata Poster Nikla Hero,N, Drama ,40,34,5176897,10672,1392
137 | 136,20-Sep-13,The Lunchbox,N, Drama ,10,85,1064854,4054,285
138 | 137,9-Jan-15,Tevar,N,Action,40,56,6132407,11241,1729
139 | 138,16-Jan-15,Alone,N,Thriller,18,25,8788913,18907,2940
140 | 139,23-Jan-15,Baby,N,Action,60,125,7968156,29246,2426
141 | 140,30-Jan-15,Hawaizaada,N, Drama ,25,30.25,2368404,8619,539
142 | 141,30-Jan-15,Khamoshiyan,N,Thriller,11,14.02,3094001,4599,997
143 | 142,6-Feb-15,Shamitabh,N, Drama ,40,38,2105508,5599,677
144 | 143,13-Feb-15,Roy,FS,Romance,40,58,7687797,18974,3229
145 | 144,20-Feb-15,Badlapur,FS,Action,23,77,4550051,10602,893
146 | 145,27-Feb-15,Dum Laga Ke Haisha,N,Comedy,15,30,3250917,8185,615
147 | 146,13-Mar-15,NH10,N,Thriller,13,32.1,5592977,15464,1513
148 | 147,20-Mar-15,Dilliwali Zaalim Girlfriend,N,Comedy,32,12,2316047,4289,807
149 | 148,20-Mar-15,Hunterrr,N,Comedy,5,11.89,4674795,3706,762
150 | 149,23-May-14,Kochadaiiyaan,HS,Action,150,120,4740727,13466,2649
--------------------------------------------------------------------------------
/ml-data/bollywoodmovies.csv:
--------------------------------------------------------------------------------
1 | production_cost
2 | 601
3 | 627
4 | 330
5 | 364
6 | 562
7 | 353
8 | 583
9 | 254
10 | 528
11 | 470
12 | 125
13 | 60
14 | 101
15 | 110
16 | 60
17 | 252
18 | 281
19 | 227
20 | 484
21 | 402
22 | 408
23 | 601
24 | 593
25 | 729
26 | 402
27 | 530
28 | 708
29 | 599
30 | 439
31 | 762
32 | 292
33 | 636
34 | 444
35 | 286
36 | 636
37 | 667
38 | 252
39 | 335
40 | 457
41 | 632
--------------------------------------------------------------------------------
/ml-data/breakups.csv:
--------------------------------------------------------------------------------
1 | Before_Breakup,After_Breakup
2 | 470,408
3 | 354,439
4 | 496,321
5 | 351,437
6 | 349,335
7 | 449,344
8 | 378,318
9 | 359,492
10 | 469,531
11 | 329,417
12 | 389,358
13 | 497,391
14 | 493,398
15 | 268,394
16 | 445,508
17 | 287,399
18 | 338,345
19 | 271,341
20 | 412,326
21 | 335,467
--------------------------------------------------------------------------------
/ml-data/con-new.csv:
--------------------------------------------------------------------------------
1 | Country,Age,Salary,Purchased
2 | India,38,68000,No
3 | France,43,45000,yes
4 | Germany,30,54000,No
5 | France,48,65000,No
6 | Germany,40,,Yes
7 | India,35,58000,Yes
8 | Germany,,53000,No
9 | France,49,79000,Yes
10 | India,50,88000,No
11 | France,37,77000,Yes
--------------------------------------------------------------------------------
/ml-data/country.csv:
--------------------------------------------------------------------------------
1 | Country,Corruption_Index,Gini_Index
2 | Hong Kong,77,53.7
3 | South Korea,53,30.2
4 | China,40,46.2
5 | Italy,47,32.7
6 | Mongolia,38,36.5
7 | Austria,75,27.6
8 | Norway,85,23.5
9 | UK,81,31.6
10 | Canada,82,33.7
11 | Germany,81,30.7
12 | Sweden,88,25.4
13 | Denmark,90,27.5
14 | France,69,30.1
15 | United States,74,40.8
16 | Russia ,29,40.1
17 | Portugal,62,34.2
18 | Romania,48,34
19 | Argentina,36,42.7
20 | Greece,44,34.2
21 | Thailand ,35,39.4
--------------------------------------------------------------------------------
/ml-data/curve.csv:
--------------------------------------------------------------------------------
1 | x,y
2 | 2,-1.999618
3 | 2,-1.999618
4 | 8,-3.978312
5 | 9,-1.969175
6 | 10,-0.95777
7 | 10,3.04223
8 | 11,-3.943845
9 | 14,3.11557
10 | 15,0.142095
11 | 21,-2.610555
12 | 25,2.656825
13 | 26,-0.26121
14 | 31,2.251935
15 | 31,2.251935
16 | 35,1.801555
17 | 35,-1.198445
18 | 37,-0.871723
19 | 41,-0.104375
20 | 42,4.112662
21 | 44,7.57874
22 | 47,3.361647
23 | 51,3.572515
24 | 54,10.61473
25 | 56,5.37716
26 | 58,9.196038
27 | 59,12.627275
28 | 63,12.503423
29 | 63,9.503423
30 | 64,12.01152
31 | 71,17.033895
32 | 71,17.033895
33 | 77,23.176157
34 | 77,22.176157
35 | 79,19.709455
36 | 81,24.322385
37 | 83,25.016963
38 | 84,25.8955
39 | 84,22.8955
40 | 85,22.795205
41 | 87,24.659127
42 | 88,31.623848
43 | 88,26.623848
44 | 92,30.707012
45 | 93,37.785133
46 | 94,34.88669
47 | 95,40.011935
48 | 98,37.532318
49 | 99,44.754835
50 | 99,43.754835
51 | 100,43.0023
52 |
--------------------------------------------------------------------------------
/ml-data/customerspends.csv:
--------------------------------------------------------------------------------
1 | Customer,Apparel,Beauty and Healthcare
2 | 1,21.1,0.7
3 | 2,15.23,5.5
4 | 3,5.22,18.6
5 | 4,31.1,1.8
6 | 5,6.12,21.5
7 | 6,14.5,8.2
8 | 7,8.5,16.2
9 | 8,26.5,2.2
10 | 9,4.34,17.7
11 | 10,13.75,7.3
12 | 11,5.2,16.2
13 | 12,14.2,2.9
14 | 13,4.4,19.4
15 | 14,4.25,15.5
16 | 15,22.3,0.9
17 | 16,7.9,18.8
18 | 17,13.4,4.2
19 | 18,30.6,1.9
20 | 19,14.4,6.28
21 | 20,6.25,9.98
--------------------------------------------------------------------------------
/ml-data/forecast.xls:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ml-data/forecast.xls
--------------------------------------------------------------------------------
/ml-data/healthdrink.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ml-data/healthdrink.xlsx
--------------------------------------------------------------------------------
/ml-data/onestop.csv:
--------------------------------------------------------------------------------
1 | discount_0,discount_10,discount_20
2 | 39,34,42
3 | 32,41,43
4 | 25,45,44
5 | 25,39,46
6 | 37,38,41
7 | 28,33,52
8 | 26,35,43
9 | 26,41,42
10 | 40,47,50
11 | 29,34,41
12 | 37,47,41
13 | 34,44,47
14 | 28,46,55
15 | 36,38,55
16 | 38,42,47
17 | 38,33,48
18 | 34,37,41
19 | 31,45,42
20 | 39,38,45
21 | 36,44,48
22 | 34,38,40
23 | 25,35,50
24 | 33,34,52
25 | 26,34,43
26 | 33,37,47
27 | 26,39,55
28 | 26,34,49
29 | 27,34,46
30 | 32,36,55
31 | 40,41,42
--------------------------------------------------------------------------------
/ml-data/passport.csv:
--------------------------------------------------------------------------------
1 | processing_time
2 | 16.00
3 | 16.00
4 | 30.00
5 | 37.00
6 | 25.00
7 | 22.00
8 | 19.00
9 | 35.00
10 | 27.00
11 | 32.00
12 | 34.00
13 | 28.00
14 | 24.00
15 | 35.00
16 | 24.00
17 | 21.00
18 | 32.00
19 | 29.00
20 | 24.00
21 | 35.00
22 | 28.00
23 | 29.00
24 | 18.00
25 | 31.00
26 | 28.00
27 | 33.00
28 | 32.00
29 | 24.00
30 | 25.00
31 | 22.00
32 | 21.00
33 | 27.00
34 | 41.00
35 | 23.00
36 | 23.00
37 | 16.00
38 | 24.00
39 | 38.00
40 | 26.00
41 | 28.00
--------------------------------------------------------------------------------
/ml-data/snd.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ml-data/snd.png
--------------------------------------------------------------------------------
/ml-data/store.xls:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/arunp77/Machine-Learning/a75290e6472db2455bd6c750657df1208a2c34d0/ml-data/store.xls
--------------------------------------------------------------------------------
/ml-data/trainingscores.csv:
--------------------------------------------------------------------------------
1 | Student,Score Before Training,Score After Training
2 | 1,30,77
3 | 2,57,79
4 | 3,97,46
5 | 4,94,44
6 | 5,WC,89
7 | 6,26,84
8 | 7,39,89
9 | 8,32,84
10 | 9,41,74
11 | 10,45,81
12 | 11,95,44
13 | 12,39,86
14 | 13,89,45
15 | 14,40,56
16 | 15,79,80
17 | 16,40,75
18 | 17,14,63
19 | 18,44,28
20 | 19,22,88
21 | 20,45,61
22 | 21,59,50
23 | 22,73,76
24 | 23,45,100
25 | 24,57,32
26 | 25,80,79
27 | 26,70,74
28 | 27,44,38
29 | 28,52,53
30 | 29,45,46
31 | 30,53,42
--------------------------------------------------------------------------------
/ml-data/vimana.csv:
--------------------------------------------------------------------------------
1 | Month,demand
2 | 1,457
3 | 2,439
4 | 3,404
5 | 4,392
6 | 5,403
7 | 6,371
8 | 7,382
9 | 8,358
10 | 9,594
11 | 10,482
12 | 11,574
13 | 12,704
14 | 13,486
15 | 14,509
16 | 15,537
17 | 16,407
18 | 17,523
19 | 18,363
20 | 19,479
21 | 20,516
22 | 21,656
23 | 22,558
24 | 23,647
25 | 24,864
26 | 25,610
27 | 26,677
28 | 27,609
29 | 28,673
30 | 29,400
31 | 30,443
32 | 31,503
33 | 32,688
34 | 33,602
35 | 34,629
36 | 35,823
37 | 36,671
38 | 37,487
--------------------------------------------------------------------------------
/ml-data/wsb.csv:
--------------------------------------------------------------------------------
1 | Month,Sale Quantity,Promotion Expenses,Competition Promotion
1,3002666,105,1
2,4401553,145,0
3,3205279,118,1
4,4245349,130,0
5,3001940,98,1
6,4377766,156,0
7,2798343,98,1
8,4303668,144,0
9,2958185,112,1
10,3623386,120,0
11,3279115,125,0
12,2843766,102,1
13,4447581,160,0
14,3675305,130,0
15,3477156,130,0
16,3720794,140,0
17,3834086,167,1
18,3888913,148,1
19,3871342,150,1
20,3679862,129,0
21,3358242,120,0
22,3361488,122,0
23,3670362,135,0
24,3123966,110,1
25,4634047,165,0
26,3772879,129,1
27,3187110,120,1
28,3093683,112,1
29,4557363,162,0
30,3816956,140,1
31,4410887,160,0
32,3694713,139,0
33,3822669,141,1
34,3689286,136,0
35,3728654,130,1
36,4732677,168,0
37,3216483,121,1
38,3453239,128,0
39,5431651,170,0
40,4241851,160,0
41,3909887,151,1
42,3216438,120,1
43,4222005,152,0
44,3621034,125,0
45,5162201,170,0
46,4627177,160,0
47,4623945,168,0
48,4599368,166,0
--------------------------------------------------------------------------------
/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "machine-learning",
3 | "version": "1.0.0",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "machine-learning",
9 | "version": "1.0.0",
10 | "license": "MIT"
11 | }
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "machine-learning",
3 | "version": "1.0.0",
4 | "description": "This Repository contains the Machine learning related files with fundamental theory is included in the package.",
5 | "main": "index.js",
6 | "scripts": {
7 | "test": "exit 0"
8 | },
9 | "repository": {
10 | "type": "git",
11 | "url": "git+https://github.com/arunp77/Machine-Learning.git"
12 | },
13 | "keywords": [
14 | "Machine-learning"
15 | ],
16 | "author": "Arun Kumar Pandey",
17 | "license": "MIT",
18 | "bugs": {
19 | "url": "https://github.com/arunp77/Machine-Learning/issues"
20 | },
21 | "homepage": "https://github.com/arunp77/Machine-Learning#readme",
22 | "publishConfig": {
23 | "@arunp77:registry": "https://npm.pkg.github.com/"
24 | }
25 | }
26 |
--------------------------------------------------------------------------------