├── .gitignore ├── 00_CourseIntro ├── 00_SelfPresentation.pptx ├── 10_CourseScope.pptx ├── 20_HowDoProgrammingLanguagesWork.pptx ├── 30_WhatPythonIsAndWhyYouShouldLearnIt.pptx ├── 40_UseCases.pptx ├── 50_DesignPhilosopy.pptx ├── 60_PackageManager.pptx ├── 99_Motivation.pptx └── motivation.py ├── 10_Basics ├── 000_SystemSetup │ └── SystemSetup.pptx ├── 010_data_types │ └── data_types.py ├── 020_flow_control │ ├── 20_conditional_expression.py │ ├── 30_dice.py │ └── 40_weather.py ├── 030_lists_loops_sets │ ├── 20_list_acronyms.py │ ├── 30_sets_intro.py │ ├── 50_loops_intro.py │ ├── 60_loop_exercises.py │ ├── 70_exercise_100doors.py │ ├── 80_exercise_fizzbuzz.py │ ├── 90_fibonacci_exercise.py │ └── 90_fibonacci_solution.py ├── 035_dictionaries │ ├── 20_acronyms_dict.py │ ├── 30exercise_dicts_general.py │ ├── 40_exercise_iss_location.py │ └── 40_solution_iss_location.py ├── 040_functions │ ├── 20_functions_intro.py │ ├── 21_exercise_clickbait.py │ ├── 22_exercise_prime_number.py │ ├── 23_exercises_functions.py │ ├── 25_exercise_caesar.py │ ├── 26_exercise_palindrome.py │ ├── 30_functions_args_kwargs.py │ ├── 35_kwargs.py │ └── 50_lambda_funcs.py ├── 050_file_handling │ ├── 20_os_intro.py │ ├── 30_working_dirs.py │ ├── 40_pickle_save_load.py │ ├── 50_zip_file.py │ ├── 60_exercise_file_handling.py │ ├── my_archive.zip │ ├── my_list.pkl │ └── work_files │ │ ├── 1.csv │ │ ├── 13.csv │ │ ├── 144.csv │ │ ├── 2.csv │ │ ├── 21.csv │ │ ├── 3.csv │ │ ├── 34.csv │ │ ├── 5.csv │ │ ├── 55.csv │ │ ├── 8.csv │ │ └── 89.csv └── 060_numerical_calc │ ├── 20_numpy_arrays.py │ ├── 30_numpy_operations.py │ ├── 40_numpy_ufuncs.py │ ├── 50_numpy_1_exercise.py │ ├── 50_numpy_1_solution.py │ ├── 60_numpy_2_exercise.py │ └── 60_numpy_2_solution.py ├── 20_DataHandling ├── 010_data_handling │ ├── 10_pandas_start.py │ ├── 20_filtering.py │ ├── 25_eda.py │ ├── 30_modify.py │ ├── 60_plotting.py │ └── factbook.csv ├── 020_data_im_export │ ├── export.py │ ├── languages.json │ └── languages.xlsx ├── 030_data_visualisation │ ├── Diamonds.csv │ ├── altair_intro.py │ ├── dash │ │ ├── Google_Stock_Price_Test.csv │ │ ├── Google_Stock_Price_Train.csv │ │ ├── helloWorld.py │ │ └── lineGraph.py │ ├── folium_intro.py │ ├── ggplot_intro.py │ ├── matplotlib_intro.py │ ├── seaborn_intro.py │ └── temp-plot.html └── 040_data_manipulation │ ├── 00_DataAggregation.pdf │ ├── 10_grouping.py │ ├── 15_DataReshaping.pdf │ ├── 20_reshape.py │ ├── 25_JoiningDatasets.pdf │ ├── 30_join.py │ ├── data │ ├── df_cut_ideal_premium.csv │ └── diamonds.csv │ ├── diamonds_cut.png │ ├── diamonds_exercise.py │ ├── diamonds_solution.py │ ├── exercise_manipulation.py │ ├── exercise_manipulation_solution.py │ ├── file_example_XLS_10.xls │ ├── file_example_XLS_10.xlsx │ └── mito.py ├── 30_object_oriented_prog ├── 010_intro.py ├── 020_carddeck.py ├── 030_inheritance.py ├── 040_stats_exercise.py └── 040_stats_solution.py ├── 40_MachineLearning ├── 00_Introduction │ ├── 10_HighLevelCourseOverview.pptx │ ├── 20_AIOverview.pptx │ ├── 30_MachineLearning101.pptx │ └── 40_Models.pptx ├── 10_Regression │ ├── 025_univariate_interactive.py │ ├── 030_univariate_regression.py │ ├── 040_hubble_exercise.py │ ├── 040_hubble_solution.py │ ├── 060_polynomial_regression.py │ └── 080_multivariate_regression.py ├── 20_ModelPrepAndEval │ ├── 025_train_val_test_interactive.txt │ ├── 030_train_val_test_split.py │ └── 050_resampling_techniques.py ├── 30_Regularization │ └── 020_regularization.py ├── 40_Classification │ ├── 040_ROCCurve_Interactive.txt │ ├── 110_logistic_regression.py │ ├── 210_decision_trees.py │ ├── 310_random_forest.py │ ├── 410_svm.py │ ├── 510_ensemble_xbg.py │ └── diabetes.png ├── 50_AssociationRules │ └── 30_apriori_intro.py ├── 60_Clustering │ ├── 30_kmeans_lab.py │ ├── 40_kmeans_exercise.py │ ├── 50_kmeans_solution.py │ ├── 70_hierarchical_lab.py │ └── 90_dbscan_lab.py ├── 70_DimensionalityReduction │ ├── 20_fa_lab.py │ ├── 40_PCA_Lab.py │ ├── 50_pca_exercise.py │ ├── 60_pca_solution.py │ ├── 80_t_SNE_lab.py │ ├── 90_capstone_clust_dimred.py │ └── data │ │ └── marketing_campaign.csv ├── 80_ReinforcementLearning │ ├── 30_ucb_interactive.py │ └── 40_ucb_lab.py └── data │ ├── Hubble.csv │ ├── OnlineRetail.xlsx │ ├── Starwars.csv │ ├── diabetes.csv │ ├── direct_marketing.csv │ ├── housing.csv │ └── winequality-red.csv ├── Overview_PythonUltimate.xlsx └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /00_CourseIntro/00_SelfPresentation.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/00_CourseIntro/00_SelfPresentation.pptx -------------------------------------------------------------------------------- /00_CourseIntro/10_CourseScope.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/00_CourseIntro/10_CourseScope.pptx -------------------------------------------------------------------------------- /00_CourseIntro/20_HowDoProgrammingLanguagesWork.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/00_CourseIntro/20_HowDoProgrammingLanguagesWork.pptx -------------------------------------------------------------------------------- /00_CourseIntro/30_WhatPythonIsAndWhyYouShouldLearnIt.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/00_CourseIntro/30_WhatPythonIsAndWhyYouShouldLearnIt.pptx -------------------------------------------------------------------------------- /00_CourseIntro/40_UseCases.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/00_CourseIntro/40_UseCases.pptx -------------------------------------------------------------------------------- /00_CourseIntro/50_DesignPhilosopy.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/00_CourseIntro/50_DesignPhilosopy.pptx -------------------------------------------------------------------------------- /00_CourseIntro/60_PackageManager.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/00_CourseIntro/60_PackageManager.pptx -------------------------------------------------------------------------------- /00_CourseIntro/99_Motivation.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/00_CourseIntro/99_Motivation.pptx -------------------------------------------------------------------------------- /00_CourseIntro/motivation.py: -------------------------------------------------------------------------------- 1 | #%% 2 | from transformers import pipeline 3 | # %% 4 | gpt2_generator = pipeline('text-generation', model='gpt2') 5 | 6 | #%% 7 | sentences = gpt2_generator("bert and lea are ", do_sample=True, top_k=5, temperature=0.9, max_length=180, num_return_sequences=3) 8 | # %% 9 | sentences 10 | # %% 11 | -------------------------------------------------------------------------------- /10_Basics/000_SystemSetup/SystemSetup.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/10_Basics/000_SystemSetup/SystemSetup.pptx -------------------------------------------------------------------------------- /10_Basics/010_data_types/data_types.py: -------------------------------------------------------------------------------- 1 | # %% numbers 2 | value_int = 1 # int 3 | value_float = 1.5 # float 4 | 5 | # %% type conversion 6 | int(value_float) 7 | 8 | # %% strings 9 | my_first_name = 'Bert' 10 | print(my_first_name) 11 | 12 | my_last_name = 'Gollnick' 13 | 14 | # %% string concatenation 15 | # print(my_first_name + my_last_name) 16 | print(my_first_name + ' ' + my_last_name) 17 | print(f"{my_first_name} {my_last_name}") 18 | 19 | # %% multiple assignments 20 | x, y = 8, 5 21 | 22 | -------------------------------------------------------------------------------- /10_Basics/020_flow_control/20_conditional_expression.py: -------------------------------------------------------------------------------- 1 | #%% reminder: condititional statement 2 | my_condition = False 3 | if my_condition: 4 | print('true') 5 | else: 6 | print('false') 7 | 8 | # alternatively with conditional expression: 9 | "true" if my_condition else "false" 10 | # %% 11 | -------------------------------------------------------------------------------- /10_Basics/020_flow_control/30_dice.py: -------------------------------------------------------------------------------- 1 | #%% learnings: 2 | # - if, else 3 | # - f-string 4 | 5 | # %% package import 6 | import random 7 | 8 | # %% 9 | roll = random.randint(1, 6) 10 | # %% 11 | guess = int(input('guess dice roll:')) 12 | 13 | if guess == roll: 14 | print('match') 15 | else: 16 | print(f'Wrong. You guessed {guess}, dice rolled {roll}') 17 | 18 | # %% -------------------------------------------------------------------------------- /10_Basics/020_flow_control/40_weather.py: -------------------------------------------------------------------------------- 1 | # Learnings: 2 | # comparators in Python 3 | # <, <=, ==, >=, >, != 4 | 5 | # %% 6 | temperature = -30 # deg C 7 | 8 | if temperature > 30: 9 | # all indented statements are code block 10 | # usually: 4 spaces 11 | print('stay inside') 12 | elif temperature < 0: 13 | print('stay inside') 14 | else: 15 | print('go outside') 16 | 17 | # %% combined conditional statement 18 | if temperature < 0 or temperature > 30: 19 | print('stay inside') 20 | else: 21 | print('go outside') 22 | 23 | 24 | # %% not-keyword 25 | temperature = 25 26 | forecast = 'sunny' 27 | if temperature > 10 and not forecast == 'rain': 28 | print('go outside') 29 | else: 30 | print('stay inside') 31 | # %% boolean variable 32 | is_raining = True 33 | if is_raining: 34 | print('stay inside') 35 | -------------------------------------------------------------------------------- /10_Basics/030_lists_loops_sets/20_list_acronyms.py: -------------------------------------------------------------------------------- 1 | # %% Learnings: 2 | # create list from scratch 3 | # append to list 4 | # for loop over list 5 | 6 | # %% create a list one-shot 7 | 8 | acronyms = ['Bob', 'Alice', 'Eve'] 9 | 10 | # %% create a list from scratch and add values 11 | acronyms = [] 12 | acronyms.append('Bob') 13 | acronyms.append('Alice') 14 | acronyms.append('Eve') 15 | 16 | 17 | acronyms 18 | # %% 19 | acronyms.remove('Eve') 20 | acronyms 21 | # %% check if value exists in list 22 | search_name = 'Alice' 23 | if search_name in acronyms: 24 | print(f'{search_name} is in list') 25 | 26 | # %% task: print each name in separate line 27 | for acronym in acronyms: 28 | print(acronym) 29 | 30 | -------------------------------------------------------------------------------- /10_Basics/030_lists_loops_sets/30_sets_intro.py: -------------------------------------------------------------------------------- 1 | # %% set creation 2 | s1 = set([1,2,3,4,5,6]) 3 | print(s1) 4 | 5 | s2 = set([1,2,2,3,4,4,5,6,6]) 6 | print(s2) 7 | 8 | s3 = set([3,4,5,6,6,6,1,1,2]) 9 | print(s3) 10 | 11 | # %% 12 | s4 = {"apple", "orange", "banana"} 13 | print(s4) 14 | 15 | s4.add('pineapple') 16 | print(s4) 17 | # %% set operations 18 | s5 = {1,2,3,4} 19 | s6 = {3,4,5,6} 20 | 21 | # union 22 | print(s5 | s6) 23 | print(s5.union(s6)) 24 | 25 | # %% intersection 26 | print(s5 & s6) 27 | print(s5.intersection(s6)) 28 | 29 | # %% difference 30 | print(s5 - s6) 31 | print(s5.difference(s6)) 32 | 33 | # %% issubset 34 | print(s5 <= s6) 35 | print(s5.issubset(s6)) 36 | 37 | s7 = {1,2,3} 38 | s8 = {1,2,3,4,5} 39 | 40 | print(s7 <= s8) 41 | print(s7.issubset(s8)) 42 | 43 | # %% 44 | my_set = {'Bob', 'Alice', 'Eve', 'Bob'} 45 | my_set 46 | # %% 47 | 48 | -------------------------------------------------------------------------------- /10_Basics/030_lists_loops_sets/50_loops_intro.py: -------------------------------------------------------------------------------- 1 | # %% 2 | for char in 'Hamburg': 3 | print(char) 4 | # %% 5 | for i in range(1, 10): 6 | print(i) 7 | # %% 8 | for i in range(10): 9 | print(i) 10 | 11 | # %% changed range start, end, step 12 | for i in range(1, 11, 2): 13 | print(i) 14 | 15 | # %% negative range 16 | for i in range(3, 0, -1): 17 | print(i) 18 | # %% iterate over list 19 | names = ['Albert', 'Leonardo', 'Alan'] 20 | for name in names: 21 | print(name) 22 | 23 | # %% Countdown 24 | for i in range(10, -1, -1): 25 | print(i) 26 | if i==0: 27 | print('TakeOff') 28 | # %% loop with else 29 | for i in range(10, 0, -1): 30 | print(i) 31 | else: # executed when loop finishs normally 32 | print('TakeOff') 33 | 34 | # %% for loop with if / else 35 | for i in range(2, 8): 36 | if i % 2 == 0: 37 | print(f'{i} is even') 38 | else: 39 | print(f'{i} is odd') 40 | # %% exit the loop early with "break" 41 | values = [0, 12, 23, 42, 54, 99] 42 | for i in values: 43 | if i == 42: 44 | break 45 | else: 46 | print(i) 47 | else: 48 | print('loop finished normally') 49 | 50 | # %% skip the current iteration and jump to next one 51 | for char in "Hamburg": 52 | if char == 'b': 53 | continue 54 | else: 55 | print(char) 56 | 57 | # %% 58 | -------------------------------------------------------------------------------- /10_Basics/030_lists_loops_sets/60_loop_exercises.py: -------------------------------------------------------------------------------- 1 | # %% E1: sum all expenses from list of expenses 2 | # sample: expenses = [1, 5.5, 8, 25, 3] 3 | # expected result: total: 42.5 4 | 5 | expenses = [1, 5.5, 8, 25, 3] 6 | total = 0 7 | for i in expenses: 8 | total += i 9 | 10 | print(f'total: {total}') 11 | 12 | # %% show all expenses in separate line 13 | for i in range(len(expenses)): 14 | print(expenses[i]) 15 | 16 | # %% 17 | -------------------------------------------------------------------------------- /10_Basics/030_lists_loops_sets/70_exercise_100doors.py: -------------------------------------------------------------------------------- 1 | #%% 2 | #%% packages 3 | import pandas as pd 4 | import numpy as np 5 | 6 | #%% init the dataframe 7 | cnt_doors = 101 8 | doors = [False] * cnt_doors 9 | # %% 10 | for i in range(1, cnt_doors): 11 | for j in range(i, cnt_doors, i): 12 | doors[j] = not doors[j] # switch the door 13 | # %% check open doors 14 | for i in range(1, cnt_doors): 15 | if doors[i] == True: 16 | print(i) 17 | 18 | -------------------------------------------------------------------------------- /10_Basics/030_lists_loops_sets/80_exercise_fizzbuzz.py: -------------------------------------------------------------------------------- 1 | #%% 2 | count_fizzbuzz = 0 3 | for i in range(1, 101): 4 | if i % 3 == 0 and i % 5 == 0: 5 | print("fizzbuzz") 6 | count_fizzbuzz += 1 7 | elif i % 3 == 0: 8 | print("fizz") 9 | elif i % 5 == 0: 10 | print("buzz") 11 | else: 12 | print(i) 13 | # %% 14 | count_fizzbuzz 15 | # %% 16 | -------------------------------------------------------------------------------- /10_Basics/030_lists_loops_sets/90_fibonacci_exercise.py: -------------------------------------------------------------------------------- 1 | #%% Fibonacci Sequence 2 | # -------------------- 3 | # Do you want to find out how many rabbits there are after x generations? Fibonacci found the mathematical number series that describes it. This series can be used for modeling other growth processes as well, e.g. plants. Besides this we learn its connection to Golden Ratio, and a lot about vectors handling. 4 | 5 | # At first you should know how this series looks like and how it is calculated. The first six elements are 6 | # 1, 1, 2, 3, 5, 8, … 7 | 8 | 9 | #%% Task: Create a function which returns the n-first Fibonacci numbers! 10 | 11 | -------------------------------------------------------------------------------- /10_Basics/030_lists_loops_sets/90_fibonacci_solution.py: -------------------------------------------------------------------------------- 1 | #%% Fibonacci Sequence 2 | # -------------------- 3 | # Do you want to find out how many rabbits there are after x generations? Fibonacci found the mathematical number series that describes it. This series can be used for modeling other growth processes as well, e.g. plants. Besides this we learn its connection to Golden Ratio, and a lot about vectors handling. 4 | 5 | # At first you should know how this series looks like and how it is calculated. The first six elements are 6 | # 1, 1, 2, 3, 5, 8, … 7 | 8 | 9 | #%% Task: Create a function which returns the n-first Fibonacci numbers! 10 | 11 | def fibonacci(element_nr = 10): 12 | fib = [1, 1] 13 | for i in range(3, element_nr+1): 14 | next_element = fib[-1] + fib[-2] 15 | fib.append(next_element) 16 | return fib 17 | 18 | fibonacci(4) 19 | # %% 20 | -------------------------------------------------------------------------------- /10_Basics/035_dictionaries/20_acronyms_dict.py: -------------------------------------------------------------------------------- 1 | # %% 2 | # can hold everything 3 | # key: value 4 | 5 | # %%creation on one shot 6 | acronyms = { 7 | 'LIFE': 'Learning is fun and exciting', 8 | 'MATH': 'Mental abuse to humans', 9 | 'LIVE': 'Learning Important Values Everyday' 10 | } 11 | 12 | # %% sequential creation 13 | acronyms = {} 14 | acronyms['LIFE'] = 'Learning is fun and exciting' 15 | print(acronyms) 16 | del acronyms['LIFE'] 17 | acronyms 18 | # %% accessing non-existing key -> key error 19 | acronyms['NON'] 20 | 21 | 22 | # %% avoid key error with get 23 | definition = acronyms.get('NON') # returns None instead of error 24 | # None...absence of value, evaluates to False in conditional 25 | 26 | # %% 27 | if definition: 28 | print(definition) 29 | else: 30 | print('no such key') 31 | # %% 32 | for key, value in acronyms.items(): 33 | print(f'{key} has value: {value}') 34 | # %% 35 | -------------------------------------------------------------------------------- /10_Basics/035_dictionaries/30exercise_dicts_general.py: -------------------------------------------------------------------------------- 1 | #%% E1: create a dictionary and add a key 2 | # Sample: {'name': 'your name', 'age': 43} 3 | # Expected Result: {'name': 'your name', 'age': 43, 'speaks_english': True} 4 | 5 | #%% E2: Write a script to create a dictionary where keys are numbers between 1 and 15 (both included) and values are square of keys. 6 | # expected result: {1: 1, 2: 4, 3: 9, 4: 16, 5: 25, 6: 36, 7: 49, 8: 64, 9: 81, 10: 100, 11: 121, 12: 144, 13: 169, 14: 196, 15: 225} 7 | 8 | #%% E3: multiply all values from the dictionary 9 | # sample: {'key1': 10, 'key2': 2, 'key3': 10} 10 | # expected result: 200 11 | 12 | #%% E4: find out whether a dictionary is empty 13 | -------------------------------------------------------------------------------- /10_Basics/035_dictionaries/40_exercise_iss_location.py: -------------------------------------------------------------------------------- 1 | # %% 2 | import requests 3 | # %% 4 | api_endpoint = 'http://api.open-notify.org/iss-now.json' 5 | 6 | 7 | # %% JSON 8 | # JavaScript Object Notation 9 | response = requests.get(api_endpoint).json() 10 | print(response) 11 | 12 | #%% Exercise 1: 13 | # extract the timestamp 14 | 15 | # %% Exercise 2: 16 | # extract the longitude of ISS 17 | 18 | #%% Exercise 3: (Bonus) 19 | # convert timestamp (Unix Epoch) to readable format 20 | -------------------------------------------------------------------------------- /10_Basics/035_dictionaries/40_solution_iss_location.py: -------------------------------------------------------------------------------- 1 | # %% 2 | import requests 3 | # %% 4 | api_endpoint = 'http://api.open-notify.org/iss-now.json' 5 | 6 | 7 | # %% JSON 8 | # JavaScript Object Notation 9 | response = requests.get(api_endpoint).json() 10 | print(response) 11 | 12 | #%% Exercise 1: 13 | # extract the timestamp 14 | response['timestamp'] 15 | # %% Exercise 2: 16 | # extract the longitude of ISS 17 | response['iss_position']['longitude'] 18 | #%% Exercise 3: (Bonus) 19 | # convert timestamp (Unix Epoch) to readable format 20 | import datetime 21 | datetime.datetime.fromtimestamp(1662480881) -------------------------------------------------------------------------------- /10_Basics/040_functions/20_functions_intro.py: -------------------------------------------------------------------------------- 1 | #%% most simple function 2 | def hello_world(): 3 | print('Hello World') 4 | 5 | hello_world() 6 | # %% typically with return statement 7 | def hello_world2(): 8 | return 'Hello World' 9 | 10 | x = hello_world2() 11 | x 12 | 13 | # %% with arguments 14 | def hello_world3(person): 15 | return f"Hello {person}" 16 | 17 | hello_world3(12) 18 | 19 | 20 | # %% with arguments and type hints 21 | def hello_world4(person: str) -> str: 22 | return f"Hello {person}" 23 | 24 | hello_world4('Lea') 25 | # %% 26 | -------------------------------------------------------------------------------- /10_Basics/040_functions/21_exercise_clickbait.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import random 3 | #%% 4 | def clickBaitGenereator(): 5 | PERSONS = ['Germans', 'Bavarians', 'Italians'] 6 | NOUNS = ['Cats', 'Dogs', 'Diets', 'Robots'] 7 | return f"What {random.choice(PERSONS)} don\'t want you to know about {random.choice(NOUNS)}!" 8 | 9 | #%% test 10 | clickBaitGenereator() 11 | # %% 12 | -------------------------------------------------------------------------------- /10_Basics/040_functions/22_exercise_prime_number.py: -------------------------------------------------------------------------------- 1 | # %% 2 | # prime if it has exactly two factors: 1 and the number itself (n) 3 | 4 | #%% Function 5 | def isPrime(value): 6 | for i in range(2,value): 7 | if (value % i) == 0: 8 | return False 9 | return True 10 | 11 | # %% Test 12 | isPrime(15) 13 | # %% -------------------------------------------------------------------------------- /10_Basics/040_functions/23_exercises_functions.py: -------------------------------------------------------------------------------- 1 | # %% EXERCISE 1 2 | def name_age(name, age): 3 | return f"{name} ist {age} Jahre alt." 4 | 5 | #%% TEST 1 6 | name_age('Kiki', 3) 7 | # %% 8 | #%% EXERCISE 2 9 | def multiply(l): 10 | product = 1 11 | for i in l: 12 | product *= i 13 | return product 14 | 15 | #%% TEST 2 16 | my_list = [2, 4, 8] 17 | multiply(my_list) 18 | 19 | #%% EXERCISE 3 20 | def currencyConversion(value, ratio = 1.1): 21 | return value * ratio 22 | # %% TEST 3 23 | currencyConversion(value = 20, ratio = 1.5) 24 | 25 | # %% 26 | currencyConversion(value = 20) 27 | # %% EXERCISE 4 28 | def simpleMath(a, b): 29 | sum = a + b 30 | diff = a - b 31 | mul = a * b 32 | div = a / b 33 | return sum, diff, mul, div 34 | 35 | # %% TEST 4 36 | simpleMath(12, 5) 37 | 38 | #%% EXERCISE 5 39 | def generateList(start, end): 40 | return list(range(start, end+1)) 41 | # %% 42 | generateList(start = 10, end =20) 43 | # %% EXERCISE 6 44 | def findMaxVal(l): 45 | return max(l) 46 | 47 | 48 | # %% 49 | findMaxVal([2, 8, 10, -5, 11, 3]) 50 | # %% EXERCISE 7 51 | def odd_or_even(val): 52 | return 'even' if val % 2 == 0 else 'odd' 53 | 54 | # %% TEST 7 55 | odd_or_even(7) 56 | # %% EXERCISE 8 57 | import math 58 | def rotor_area(val): 59 | return math.pi * val**2 60 | # %% 61 | rotor_area(82) 62 | # %% 63 | import pandas as pd 64 | df = pd.DataFrame({'Student': ['Stuart', 'Bob', 'Kevin'], 65 | 'Sport': [2, 3, 3], 'Art': [4, 2,1]}) 66 | # %% 67 | -------------------------------------------------------------------------------- /10_Basics/040_functions/25_exercise_caesar.py: -------------------------------------------------------------------------------- 1 | #%% 2 | # wikipedia: In cryptography, a Caesar cipher, also known as Caesar's cipher, the shift cipher, Caesar's code or Caesar shift, is one of the simplest and most widely known encryption techniques. It is a type of substitution cipher in which each letter in the plaintext is replaced by a letter some fixed number of positions down the alphabet. For example, with a left shift of 3, D would be replaced by A, E would become B, and so on. The method is named after Julius Caesar, who used it in his private correspondence 3 | 4 | def caesar(text, offset = 3, mode='encrypt'): 5 | """Encrypt or Decrypt Caesar Chiffre 6 | 7 | Args: 8 | text (str): The text to en/decrypt 9 | offset (int, optional): The offset in the alphabet. Defaults to 3. 10 | mode (str, optional): can be either 'encrypt' or 'decrypt'. Defaults to 'encrypt'. 11 | 12 | Returns: 13 | str: encrypted/decrypted text 14 | """ 15 | ALPHABET = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 16 | result = '' 17 | text = text.upper() 18 | 19 | for i in range(len(text)): 20 | letter_pos_in_alphabet = ALPHABET.index(text[i]) 21 | 22 | letter_pos_with_offset = (letter_pos_in_alphabet + offset) % 26 if mode=='encrypt' else (letter_pos_in_alphabet - offset) % 26 23 | result = f"{result}{ALPHABET[letter_pos_with_offset]}" 24 | return result 25 | #%% test encrypt 26 | caesar(text='hello', offset=3, mode='encrypt') 27 | # %% 28 | caesar(text='KHOOR', offset = 3, mode='decrypt') 29 | # %% 30 | -------------------------------------------------------------------------------- /10_Basics/040_functions/26_exercise_palindrome.py: -------------------------------------------------------------------------------- 1 | # %% Intro 2 | # A palindrome is a word identical forwards and backwards 3 | # examples: civic, deed, rotor 4 | # special case: 'race car' is a palindrome if space is not regarded 5 | 6 | 7 | # %% Function 8 | import re 9 | def isPalindrome(word): 10 | forward = ''.join(char for char in word.lower() if char.isalpha()) 11 | print(forward) 12 | backward = forward[::-1] 13 | return forward == backward 14 | # %% Test 15 | isPalindrome('Race cars') 16 | # %% 17 | -------------------------------------------------------------------------------- /10_Basics/040_functions/30_functions_args_kwargs.py: -------------------------------------------------------------------------------- 1 | #%% positional arguments args 2 | # args is convention 3 | # args is a tuple passing the function parameters 4 | 5 | 6 | def my_sum(*args): 7 | print(args) 8 | 9 | # %% 10 | import numpy as np 11 | def my_sum(*args): 12 | return np.sum(args) 13 | 14 | #%% test it 15 | my_sum(1,5) 16 | 17 | # %% kwargs 18 | def pass_kwargs(**kwargs): 19 | print(f"kwargs:, {kwargs}") 20 | for kw in kwargs: 21 | print(kw, '-', kwargs[kw]) 22 | 23 | pass_kwargs(a=5, b=15, c=25) 24 | 25 | # %% kwargs 26 | def colors(green, red, **kwargs): 27 | print(f'green: {green}') 28 | print(f'red: {red}') 29 | 30 | kw = {'green': 1, 'red': 2} 31 | colors(**kw) 32 | 33 | 34 | # %% 35 | -------------------------------------------------------------------------------- /10_Basics/040_functions/35_kwargs.py: -------------------------------------------------------------------------------- 1 | #%% kwargs - keyword arguments 2 | # parameters passed as dictionary 3 | # 4 | #%% 5 | def my_volume(**kwargs): 6 | print(kwargs) 7 | 8 | my_volume(height=5, width=2, depth = 3) 9 | 10 | # %% 11 | def convert_usd_to_aud(amount, rate=0.75): 12 | return amount / rate 13 | 14 | # %% 15 | # 3. Create a new function convert_and_sum_list which will take a list of amounts, convert them to AUD, and return the sum: 16 | 17 | def convert_and_sum_list(usd_list, rate=0.75): 18 | total = 0 19 | for amount in usd_list: 20 | total += convert_usd_to_aud(amount, rate=rate) 21 | return total 22 | 23 | print(convert_and_sum_list([1, 3])) 24 | 25 | # %% 26 | # Note that the function convert_and_sum_list didn’t need the rate argument. It simply needed to pass it through to the convert_usd_to_aud function. Imagine that instead of one argument, we had 10 that needed to be passed through. There will be a lot of unnecessary code. Instead, we will use the kwargs dictionary. 27 | 28 | # 5. Add the following function to conversion.py: 29 | def convert_and_sum_list_kwargs(usd_list, **kwargs): 30 | total = 0 31 | for amount in usd_list: 32 | total += convert_usd_to_aud(amount, **kwargs) 33 | return total 34 | 35 | print(convert_and_sum_list_kwargs([1, 3], rate=0.8)) -------------------------------------------------------------------------------- /10_Basics/040_functions/50_lambda_funcs.py: -------------------------------------------------------------------------------- 1 | #%% lambda function 2 | # can be used as anonymous function 3 | # less overhead, e.g. no name 4 | # keyword lambda 5 | # bound variable 6 | # body 7 | 8 | 9 | #%% one argument 10 | multiply_by_ten = (lambda x: x * 10) 11 | multiply_by_ten(5) 12 | 13 | # %% multiple arguments 14 | x = lambda a, b : a / b 15 | x(15, 3) 16 | 17 | 18 | # %% can be used as anonymous function 19 | def myfunc(n): 20 | return lambda a : a * n 21 | 22 | mydoubler = myfunc(2) 23 | mydoubler(10) 24 | 25 | # %% can be combined with filter 26 | # example: keep only numbers divisible by 5 without residual 27 | l = [1, 2, 5, 10] 28 | list(filter(lambda x: (x % 5 == 0), l)) 29 | 30 | # %% 31 | -------------------------------------------------------------------------------- /10_Basics/050_file_handling/20_os_intro.py: -------------------------------------------------------------------------------- 1 | #%% 2 | import os 3 | 4 | #%% get cwd 5 | os.getcwd() 6 | # %% get folder content 7 | os.listdir('.') 8 | # %% create subfolder 9 | subfolder_name = 'my_subfolder' 10 | subfolder_exists = os.path.exists(subfolder_name) 11 | if not subfolder_exists: 12 | os.makedirs(subfolder_name) 13 | # %% 14 | -------------------------------------------------------------------------------- /10_Basics/050_file_handling/30_working_dirs.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import os 3 | 4 | #%% current working directory 5 | os.getcwd() # get current working dir 6 | # os.chdir() # change current working dir 7 | # %% create a subfolder if it does not exist 8 | subfolder_name = 'work_files' 9 | subfolder_exists = os.path.exists(subfolder_name) 10 | if not subfolder_exists: 11 | os.makedirs(subfolder_name) 12 | # %% 13 | -------------------------------------------------------------------------------- /10_Basics/050_file_handling/40_pickle_save_load.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import pickle 3 | 4 | #%% data to be stored 5 | my_list = [0, 5, -10] 6 | 7 | #%% functions for storing / loading data 8 | def save_obj(obj, file_path): 9 | with open(file_path, 'wb') as f: 10 | pickle.dump(obj, f) 11 | 12 | def load_obj(file_path): 13 | with open(file_path, 'rb') as f: 14 | return pickle.load(f) 15 | 16 | #%% Test it 17 | save_obj(my_list, 'my_list.pkl') 18 | # %% 19 | load_obj('my_list.pkl') -------------------------------------------------------------------------------- /10_Basics/050_file_handling/50_zip_file.py: -------------------------------------------------------------------------------- 1 | #%% python 2 | from zipfile import ZipFile 3 | 4 | #%% 5 | with ZipFile('my_archive.zip', mode='w') as archive: 6 | archive.write('working_dirs.py') 7 | 8 | # %% 9 | -------------------------------------------------------------------------------- /10_Basics/050_file_handling/60_exercise_file_handling.py: -------------------------------------------------------------------------------- 1 | #%% TASKS: 2 | # - check your current working directory 3 | # - change it if necessary 4 | # - create subfolder 'fibonaccy' if it does not exist 5 | # - create a file {x.csv} for the first 10 Fibonacci numbers with x being the number 6 | # Help: each Fibonacci number is the sum of its two predecessors, e.g. 1, 1, 2, 3, 5, 8, 13, ... 7 | # Result files: 1.csv, 2.csv, 3.csv, ... 8 | 9 | #%% 10 | import os 11 | import pandas as pd 12 | 13 | # get first ten fibonacci numbers 14 | def create_fibonacci_list(): 15 | fib = [1,1] 16 | for i in range(10): 17 | fib.append(fib[-2]+fib[-1]) 18 | return fib 19 | 20 | fib_numbers = create_fibonacci_list() 21 | fib_numbers 22 | 23 | 24 | #%% create files 25 | def create_files(): 26 | sub_folder = 'work_files' 27 | if not os.path.exists(sub_folder): 28 | os.makedirs(sub_folder) 29 | 30 | for f in fib_numbers: 31 | pd.DataFrame({}).to_csv(f"{sub_folder}/{f}.csv") 32 | 33 | # %% list files 34 | files_in_folder = os.listdir('./work_files') 35 | files_in_folder 36 | # %% extract parts of filename 37 | # extract the numbers from each filename 38 | [int(os.path.splitext(f)[0]) for f in files_in_folder] 39 | # %% 40 | -------------------------------------------------------------------------------- /10_Basics/050_file_handling/my_archive.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/10_Basics/050_file_handling/my_archive.zip -------------------------------------------------------------------------------- /10_Basics/050_file_handling/my_list.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/10_Basics/050_file_handling/my_list.pkl -------------------------------------------------------------------------------- /10_Basics/050_file_handling/work_files/1.csv: -------------------------------------------------------------------------------- 1 | "" 2 | -------------------------------------------------------------------------------- /10_Basics/050_file_handling/work_files/13.csv: -------------------------------------------------------------------------------- 1 | "" 2 | -------------------------------------------------------------------------------- /10_Basics/050_file_handling/work_files/144.csv: -------------------------------------------------------------------------------- 1 | "" 2 | -------------------------------------------------------------------------------- /10_Basics/050_file_handling/work_files/2.csv: -------------------------------------------------------------------------------- 1 | "" 2 | -------------------------------------------------------------------------------- /10_Basics/050_file_handling/work_files/21.csv: -------------------------------------------------------------------------------- 1 | "" 2 | -------------------------------------------------------------------------------- /10_Basics/050_file_handling/work_files/3.csv: -------------------------------------------------------------------------------- 1 | "" 2 | -------------------------------------------------------------------------------- /10_Basics/050_file_handling/work_files/34.csv: -------------------------------------------------------------------------------- 1 | "" 2 | -------------------------------------------------------------------------------- /10_Basics/050_file_handling/work_files/5.csv: -------------------------------------------------------------------------------- 1 | "" 2 | -------------------------------------------------------------------------------- /10_Basics/050_file_handling/work_files/55.csv: -------------------------------------------------------------------------------- 1 | "" 2 | -------------------------------------------------------------------------------- /10_Basics/050_file_handling/work_files/8.csv: -------------------------------------------------------------------------------- 1 | "" 2 | -------------------------------------------------------------------------------- /10_Basics/050_file_handling/work_files/89.csv: -------------------------------------------------------------------------------- 1 | "" 2 | -------------------------------------------------------------------------------- /10_Basics/060_numerical_calc/20_numpy_arrays.py: -------------------------------------------------------------------------------- 1 | #%% package 2 | import numpy as np 3 | 4 | #%% create array from list 5 | my_list = [0, 1, 2, 3, 4] 6 | arr = np.array(my_list) 7 | arr 8 | #%% create array 9 | arr = np.arange(5) # starts with 0 10 | arr 11 | # %% 12 | arr = np.arange(2,5) # specify start and end 13 | arr 14 | 15 | #%% 16 | arr = np.arange(-2,15, 1.5) # specify start and end 17 | arr 18 | 19 | # %% create array from list 20 | np.array([2, 5, 8], dtype=np.float64) 21 | 22 | # %% 2D 23 | my_matrix = [1, 2],[3,4] 24 | arr_2d = np.array(my_matrix) 25 | arr_2d 26 | # %% built-in functionality 27 | #%% dimension of array 28 | arr.shape # shape of array 29 | 30 | #%% reshape 31 | arr_2d.shape 32 | # %% 33 | np.arange(4).reshape((2,2)) 34 | # %% 35 | -------------------------------------------------------------------------------- /10_Basics/060_numerical_calc/30_numpy_operations.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import numpy as np 3 | 4 | #%% 5 | my_matrix = [1, 2],[3,4] 6 | arr_2d = np.array(my_matrix) 7 | arr_2d.reshape((1,4)) 8 | # %% 9 | # np.ones((3, 3)) 10 | # np.zeros((3, 3)) 11 | diagonal_ones = np.eye(3) 12 | 13 | # %% change values based on value 14 | diagonal_twos = diagonal_ones 15 | # diagonal_twos[diagonal_twos == 1] = 2 16 | diagonal_twos[diagonal_twos < 10] = 2 17 | diagonal_twos 18 | # %% change values based on position 19 | diagonal_twos[1:] = 5 # change second to last row 20 | diagonal_twos 21 | # %% change columns 22 | diagonal_twos[: , 2] = 10 23 | diagonal_twos 24 | # %% last row 25 | diagonal_twos[-1: , :] = 100 26 | diagonal_twos 27 | 28 | # %% view vs. copy 29 | # view 30 | diagonal_ones = np.eye(3) 31 | diagonal_view = diagonal_ones.view() # affects both objects 32 | diagonal_copy = diagonal_ones.copy() 33 | 34 | diagonal_view[:, -1] = -50 35 | print(f"diagonal_view: {diagonal_view}") 36 | print(f"original: {diagonal_ones}") # affected as well 37 | print(f"diagonal_copy: {diagonal_copy}") 38 | 39 | 40 | # %% operations 41 | diagonal_ones[0].sum(axis=0) 42 | # %% 43 | diagonal_ones.prod() 44 | # %% 45 | diagonal_ones.mean() 46 | # %% 47 | diagonal_ones.min() 48 | # %% 49 | diagonal_ones.max() 50 | # %% 51 | diagonal_ones.argmax() 52 | # %% 53 | diagonal_ones.argmin() 54 | # %% array flatten 55 | my_arr = np.array([[1,2, 3],[4,5,6],[7,8,9]]) 56 | flat_arr = my_arr.reshape(my_arr.size) 57 | # %% 58 | my_arr.flatten() 59 | # %% transpose 60 | my_arr.T 61 | # %% linspace 62 | # number with equal distance 63 | np.linspace(0, 20, 5) 64 | 65 | # %% random 66 | # create array of shape and provide values with uniform distribution from 0 to 1 67 | np.random.rand(2) 68 | 69 | #%% max, min, argmin and argmax 70 | arr_random = np.random.rand(20) 71 | 72 | # %% 73 | # arr_random.min() # get min value 74 | # arr_random.max() # get max value 75 | arr_random.argmax() # get position of max value 76 | arr_random.argmin() # get position of min value 77 | 78 | 79 | # %% 80 | arr = np.arange(20) 81 | arr 82 | # %% 83 | arr + arr 84 | # %% 85 | arr * arr 86 | # %% division by zero replaced by nan (no error) 87 | arr / arr 88 | # %% joining arrays (1D) 89 | a1 = np.array([0, 1, 2]) 90 | a2 = np.array([3, 4, 5]) 91 | np.concatenate((a1, a2)) 92 | # %% joining arrays (2D) 93 | a3 = np.array([[1, 2], [3, 4]]) 94 | a4 = np.array([[5, 6], [7, 8]]) 95 | 96 | np.concatenate((a3, a4), axis=1).shape 97 | # %% 98 | np.concatenate((a3, a4), axis=0).shape 99 | # %% concatenates arrays along new axis 100 | np.stack((a3, a4), axis=1).shape 101 | # %% search with where 102 | a5 = np.arange(20) 103 | np.where(a5 > 10) 104 | 105 | # %% sort descending 106 | -np.sort(-a5) 107 | # %% 108 | -------------------------------------------------------------------------------- /10_Basics/060_numerical_calc/40_numpy_ufuncs.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | from timeit import timeit 3 | import numpy as np 4 | import time 5 | #%% 6 | #%%timeit 7 | x = [0, 1, 2, 3, 4, 5, 6, 7, 8] 8 | y = [4, 5, 6, 7, 8, 9, 0, 1, 2] 9 | z = [] 10 | 11 | for i, j in zip(x, y): 12 | 13 | z.append(i + j) 14 | print(z) 15 | # %% 16 | #%%timeit 17 | x = [0, 1, 2, 3, 4, 5, 6, 7, 8] 18 | y = [4, 5, 6, 7, 8, 9, 0, 1, 2] 19 | z = np.add(x, y) 20 | # %% 21 | -------------------------------------------------------------------------------- /10_Basics/060_numerical_calc/50_numpy_1_exercise.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import numpy as np 3 | # %% E1: 4 | # - create a matrix with dimension 10x10 5 | # - fill it with values from 0 to 99 6 | # - write a function which takes a value as input and returns the number (if it is a square-number), returns 0 if it is not a square number 7 | # - replace all values which are not square-numbers, with 0 8 | -------------------------------------------------------------------------------- /10_Basics/060_numerical_calc/50_numpy_1_solution.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import numpy as np 3 | # %% E1: 4 | # - create a matrix with dimension 10x10 5 | # - fill it with values from 0 to 99 6 | # - replace all values which are not square-numbers, with 0 7 | 8 | arr = np.arange(100).reshape((10, 10)) 9 | arr 10 | 11 | 12 | #%% 13 | def is_number_square(val): 14 | sqrt_val = np.sqrt(val) 15 | if (sqrt_val - int(sqrt_val) == 0): 16 | return val 17 | else: 18 | return 0 19 | 20 | # %% 21 | row_id = 0 22 | for x in np.arange(10): 23 | for y in np.arange(10): 24 | arr[x, y] = is_number_square(arr[x, y]) 25 | 26 | arr -------------------------------------------------------------------------------- /10_Basics/060_numerical_calc/60_numpy_2_exercise.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import numpy as np 3 | 4 | 5 | # %% A1. create an array with dim (10, 10) and values from 1 to 100 with two for loops 6 | # hint: initialize the array with zeros to begin with 7 | A1 = np.zeros((10, 10)) 8 | 9 | for i in np.arange(10): 10 | for j in np.arange(10): 11 | A1[i, j] = (i+1)*(j+1) 12 | 13 | A1 14 | 15 | # %% A2. create the same array with just one for loop 16 | A2 = np.zeros((10, 10)) 17 | for i in np.arange(10): 18 | A2[i, :] = (i+1) * (np.arange(10) + 1) 19 | 20 | A2 21 | 22 | 23 | # %% A3. create the array with for loop 24 | A3 = np.arange(1, 101).reshape(10, 10) 25 | A3 26 | 27 | # %% 28 | -------------------------------------------------------------------------------- /10_Basics/060_numerical_calc/60_numpy_2_solution.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import numpy as np 3 | 4 | 5 | # %% A1. create an array with dim (10, 10) and values from 1 to 100 with two for loops 6 | # hint: initialize the array with zeros to begin with 7 | A1 = np.zeros((10, 10)) 8 | 9 | for i in np.arange(10): 10 | for j in np.arange(10): 11 | A1[i, j] = (i+1)*(j+1) 12 | 13 | A1 14 | 15 | # %% A2. create the same array with just one for loop 16 | A2 = np.zeros((10, 10)) 17 | for i in np.arange(10): 18 | A2[i, :] = (i+1) * (np.arange(10) + 1) 19 | 20 | A2 21 | 22 | 23 | # %% A3. create the array with for loop 24 | A3 = np.arange(1, 101).reshape(10, 10) 25 | A3 26 | 27 | # %% 28 | -------------------------------------------------------------------------------- /20_DataHandling/010_data_handling/10_pandas_start.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import pandas as pd 3 | import numpy as np 4 | 5 | #%% Series 6 | my_series = pd.Series(np.arange(3), index=['a', 'b', 'c']) 7 | # %% 8 | # my_series[0] # access via index 9 | my_series['a'] # access via label 10 | 11 | #%% create dataframe 12 | # from dict 13 | my_dict = {'name': ['Bob', 'Stuart', 'Kevin'], 'grades': [1, 2, 3]} 14 | df = pd.DataFrame(my_dict) 15 | df 16 | # %% 17 | # from np array 18 | # df = pd.DataFrame(np.random.rand(2, 2), columns=['height', 'width']) 19 | # df.columns 20 | 21 | # now set/change column names after creation 22 | df = pd.DataFrame(np.random.rand(2, 2)) 23 | df.columns = ['height', 'width'] 24 | df 25 | # %% 26 | 27 | 28 | # %% 29 | -------------------------------------------------------------------------------- /20_DataHandling/010_data_handling/20_filtering.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import pandas as pd 3 | 4 | #%% 5 | # source: https://perso.telecom-paristech.fr/eagan/class/igr204/datasets 6 | file_path = 'factbook.csv' 7 | df = pd.read_csv(file_path, sep=';', skiprows=[1]) 8 | 9 | df 10 | # %% Data Filtering 11 | # select columns 12 | # select columns like df['Country] 13 | type(df['Country']) # returns Series 14 | type(df[['Country']]) # returns DataFrame 15 | # %% 16 | # 1. find number of unique countries 17 | len(pd.unique(df['Country'])) 18 | # %% 19 | # select rows 20 | # 2. get all countries with more than 1E6 people 21 | df[df['Population']>1E6] 22 | # %% 23 | df['Population']>1E6 24 | # %% 25 | 26 | df[['Country', 'Population']].shape 27 | 28 | 29 | # %% 30 | (df['Population']>1E6).value_counts() 31 | # %% 32 | df[df['Population']>1E6] 33 | # %% loc 34 | df.loc[1:3, ['Country']] 35 | # %% iloc 36 | df.iloc[10:20, -4:] # row 10 to 19, last four cols 37 | 38 | # integer location 39 | df.iloc[0,0] # get the first 40 | 41 | # %% 42 | df.iloc[1, :] # 2nd row 43 | 44 | # %% 45 | df.iloc[:, -1] # last column 46 | 47 | # %% perform multiplication 48 | df['Population'] /1000 * df['Birth rate(births/1000 population)'] 49 | # %% find the largest country in the world 50 | df['Population'].sort_values(ascending=False) 51 | # %% 52 | df.loc[[49], :] # manual approach 53 | # %% 54 | df.loc[[df['Population'].idxmax()], :] 55 | # %% 56 | -------------------------------------------------------------------------------- /20_DataHandling/010_data_handling/25_eda.py: -------------------------------------------------------------------------------- 1 | # %% [markdown] 2 | # Import **pandas** as the required package for working with dataframes. 3 | 4 | # %% 5 | import pandas as pd 6 | 7 | # A dataframe is a multi-dimensional table with rows and columns. 8 | 9 | # %% Creating a Dataframe 10 | # Usually you import a dataframe from a file, a SQL server, or a web-resource. But here I will show you how to create a dataframe from scratch. 11 | # 12 | # You can create a dataframe based on lists, tuples, arrays. Here we develop it based on a dictionary. 13 | 14 | # %% 15 | data = { 16 | 'A': [1,2,3], 17 | 'B': [4,5,6], 18 | 'C': [7,8,9] 19 | } 20 | df = pd.DataFrame(data=data) 21 | df 22 | 23 | # A dataframe has columns (here: A, B, C), and rows. The index is creating and starts with 0. 24 | 25 | # # Import and Export of Dataframes 26 | 27 | # You can export a dataframe into different formats like Excel, JSON, ... Here I export it to a CSV file. 28 | 29 | # %% 30 | filename = 'df.csv' 31 | df.to_csv(filename, index=False) 32 | 33 | # Similarly the dataframe can be imported with **pandas**. There are many different read-functions to import from different formats. 34 | 35 | # %% 36 | df = pd.read_csv(filename) 37 | 38 | # # Exploratory Data Analysis 39 | 40 | # %% [markdown] 41 | # You can explore the data with *head()* to see the first observations. If you are interested in the last observations go with *tail()*. The argument refers to the number of observations to be shown. 42 | 43 | # %% 44 | df.head(2) 45 | 46 | # %% [markdown] 47 | # Statistical properties are shown with the *describe()* method. 48 | 49 | # %% 50 | df.describe() 51 | 52 | # %% [markdown] 53 | # A general summary on the dataframe is provided by *info()* method. 54 | 55 | # %% 56 | df.info() 57 | 58 | # %% [markdown] 59 | # Often you are interested in getting the number of rows and columns. You can get this with the shape property. 60 | 61 | # %% 62 | df.shape 63 | 64 | # %% [markdown] 65 | # The column-names are stored in the property *columns*. 66 | 67 | # %% 68 | df.columns -------------------------------------------------------------------------------- /20_DataHandling/010_data_handling/30_modify.py: -------------------------------------------------------------------------------- 1 | # %% [markdown] 2 | # Import **pandas** as the required package for working with dataframes. 3 | 4 | # %% 5 | import pandas as pd 6 | 7 | # A dataframe is a multi-dimensional table with rows and columns. 8 | 9 | # %% Creating a Dataframe 10 | # Usually you import a dataframe from a file, a SQL server, or a web-resource. But here I will show you how to create a dataframe from scratch. 11 | # 12 | # You can create a dataframe based on lists, tuples, arrays. Here we develop it based on a dictionary. 13 | 14 | # %% 15 | data = { 16 | 'A': [1,2,3], 17 | 'B': [4,5,6], 18 | 'C': [7,8,9] 19 | } 20 | df = pd.DataFrame(data=data) 21 | df 22 | 23 | # A dataframe has columns (here: A, B, C), and rows. The index is creating and starts with 0. 24 | 25 | # # Import and Export of Dataframes 26 | 27 | # You can export a dataframe into different formats like Excel, JSON, ... Here I export it to a CSV file. 28 | 29 | # # Adding/Modifying Columns 30 | 31 | # %% 32 | df['D'] = list(range(10,13)) 33 | df 34 | 35 | # %% [markdown] 36 | # # Delete Rows or Columns 37 | 38 | # %% [markdown] 39 | # If you want to delete a column use method *drop()* and specify the column name. The argument axis needs to be 1 for columns. With inplace set to true the dataframe is directly modified. 40 | 41 | # %% 42 | df.drop('C', axis=1, inplace=True) 43 | df 44 | 45 | # %% [markdown] 46 | # Similarly you can delete rows by specifying the index of the row, the axis is 0 for rows and inplace is set to true to change the dataframe directly. 47 | 48 | # %% 49 | df.drop(1, axis=0, inplace=True) 50 | df 51 | 52 | # %% [markdown] 53 | # # Apply a lambda function to a column 54 | 55 | # %% [markdown] 56 | # You can also apply a specific function to a column. 57 | 58 | # %% 59 | my_func = lambda x: x + 2 60 | 61 | df['E'] = df['A'].apply(my_func) 62 | df 63 | 64 | # %% [markdown] 65 | # # Reshape your dataframe structure 66 | 67 | # %% [markdown] 68 | # You can reshape your dataframe structure from wide data to tidy data and vice versa. We are starting with wide-data. 69 | 70 | -------------------------------------------------------------------------------- /20_DataHandling/010_data_handling/60_plotting.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import pandas as pd 3 | import numpy as np 4 | 5 | #%% 6 | df = pd.DataFrame({'language': ['R', 'Python', 'SQL', 'R', 'R', 'Python', 'Python'], 7 | 'year': [2020, 2020, 2020, 2021, 2022, 2022, 2022], 8 | 'users': [1E6, 2E6, 0.5E6, 1.1E6, 1.2E6, 2.2E6, 2.4E6]}) 9 | df 10 | # %% 11 | df['users'].plot() 12 | # %% 13 | import matplotlib.pyplot as plt 14 | plt.plot(df.loc[df['language']=='Python', 'users']) 15 | plt.plot(df.loc[df['language']=='R', 'users']) 16 | plt.plot(df.loc[df['language']=='SQL', 'users']) 17 | plt.show() 18 | # %% 19 | -------------------------------------------------------------------------------- /20_DataHandling/020_data_im_export/export.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import pandas as pd 3 | import numpy as np 4 | 5 | #%% 6 | df = pd.DataFrame({'language': ['R', 'Python', 'SQL', 'R', 'R', 'Python', 'Python'], 7 | 'year': [2020, 2020, 2020, 2021, 2022, 2022, 2022], 8 | 'users': [1E6, 2E6, 0.5E6, 1.1E6, 1.2E6, 2.2E6, 2.4E6]}) 9 | df 10 | # %% 11 | df.to_clipboard() 12 | # %% 13 | df.to_excel('languages.xlsx') 14 | # %% 15 | df.to_excel('languages.xlsx', index=False) 16 | 17 | #%% excel with multiple sheets 18 | excel_writer = pd.ExcelWriter('languages.xlsx', engine='xlsxwriter') 19 | df.to_excel(excel_writer, sheet_name='basic', index=False) 20 | df.to_excel(excel_writer, sheet_name='advanced', index=False) 21 | # conditional formatting 22 | sheet = excel_writer.sheets['advanced'] 23 | cell_range = 'C1:C8' 24 | sheet.conditional_format(cell_range, {'type': '2_color_scale', 25 | 'min_value': '1E6', 26 | 'max_value': '3E6'}) 27 | 28 | 29 | excel_writer.save() 30 | # %% 31 | df.to_json('languages.json') 32 | # %% 33 | -------------------------------------------------------------------------------- /20_DataHandling/020_data_im_export/languages.json: -------------------------------------------------------------------------------- 1 | {"language":{"0":"R","1":"Python","2":"SQL","3":"R","4":"R","5":"Python","6":"Python"},"year":{"0":2020,"1":2020,"2":2020,"3":2021,"4":2022,"5":2022,"6":2022},"users":{"0":1000000.0,"1":2000000.0,"2":500000.0,"3":1100000.0,"4":1200000.0,"5":2200000.0,"6":2400000.0}} -------------------------------------------------------------------------------- /20_DataHandling/020_data_im_export/languages.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/20_DataHandling/020_data_im_export/languages.xlsx -------------------------------------------------------------------------------- /20_DataHandling/030_data_visualisation/altair_intro.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import seaborn as sns 3 | import altair as alt 4 | import pandas as pd 5 | 6 | #%% 7 | titanic = sns.load_dataset("titanic") 8 | 9 | alt.Chart(titanic).mark_bar().encode( 10 | alt.X('class'), 11 | y='count()' 12 | ) 13 | # %% 14 | filename = "Diamonds.csv" 15 | diamonds = pd.read_csv(filename) 16 | # %% 17 | alt.Chart(diamonds[:100]).mark_point().encode( 18 | x='x', 19 | y='y', 20 | color='cut', 21 | size='price' 22 | ) 23 | # %% 24 | -------------------------------------------------------------------------------- /20_DataHandling/030_data_visualisation/dash/Google_Stock_Price_Test.csv: -------------------------------------------------------------------------------- 1 | Date,Open,High,Low,Close,Volume 2 | 1/3/2017,778.81,789.63,775.8,786.14,"1,657,300" 3 | 1/4/2017,788.36,791.34,783.16,786.9,"1,073,000" 4 | 1/5/2017,786.08,794.48,785.02,794.02,"1,335,200" 5 | 1/6/2017,795.26,807.9,792.2,806.15,"1,640,200" 6 | 1/9/2017,806.4,809.97,802.83,806.65,"1,272,400" 7 | 1/10/2017,807.86,809.13,803.51,804.79,"1,176,800" 8 | 1/11/2017,805,808.15,801.37,807.91,"1,065,900" 9 | 1/12/2017,807.14,807.39,799.17,806.36,"1,353,100" 10 | 1/13/2017,807.48,811.22,806.69,807.88,"1,099,200" 11 | 1/17/2017,807.08,807.14,800.37,804.61,"1,362,100" 12 | 1/18/2017,805.81,806.21,800.99,806.07,"1,294,400" 13 | 1/19/2017,805.12,809.48,801.8,802.17,"919,300" 14 | 1/20/2017,806.91,806.91,801.69,805.02,"1,670,000" 15 | 1/23/2017,807.25,820.87,803.74,819.31,"1,963,600" 16 | 1/24/2017,822.3,825.9,817.82,823.87,"1,474,000" 17 | 1/25/2017,829.62,835.77,825.06,835.67,"1,494,500" 18 | 1/26/2017,837.81,838,827.01,832.15,"2,973,900" 19 | 1/27/2017,834.71,841.95,820.44,823.31,"2,965,800" 20 | 1/30/2017,814.66,815.84,799.8,802.32,"3,246,600" 21 | 1/31/2017,796.86,801.25,790.52,796.79,"2,160,600" -------------------------------------------------------------------------------- /20_DataHandling/030_data_visualisation/dash/helloWorld.py: -------------------------------------------------------------------------------- 1 | import dash 2 | from dash import html 3 | 4 | app = dash.Dash(__name__) 5 | 6 | app.layout = html.H3("Hello World!") 7 | 8 | if __name__ == "__main__": 9 | app.run_server(debug=True) -------------------------------------------------------------------------------- /20_DataHandling/030_data_visualisation/dash/lineGraph.py: -------------------------------------------------------------------------------- 1 | #%% 2 | import dash 3 | from dash import html 4 | from dash import dcc 5 | import plotly.express as px 6 | import pandas as pd 7 | 8 | #%% Data Prep 9 | # source: https://www.kaggle.com/datasets/medharawat/google-stock-price?resource=download&select=Google_Stock_Price_Train.csv 10 | df = pd.read_csv("Google_Stock_Price_Train.csv") 11 | 12 | #%% Visualisation 13 | fig = px.line(df, x="Date", y="Open", title="Google Stock") 14 | fig 15 | #%% 16 | app = dash.Dash(__name__) 17 | app.title = "Google Stock" 18 | 19 | app.layout = html.Div( 20 | id="app-container", 21 | children=[ 22 | html.H1("Google Stock"), 23 | html.P("[USD]"), 24 | dcc.Graph(figure=fig) 25 | ] 26 | ) 27 | 28 | if __name__ == "__main__": 29 | app.run_server(debug=True) 30 | # %% 31 | -------------------------------------------------------------------------------- /20_DataHandling/030_data_visualisation/folium_intro.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import pandas as pd 3 | import numpy as np 4 | import folium 5 | 6 | 7 | # %% 8 | df = pd.DataFrame({'city': ['Hamburg', 'Berlin', 'Munich'], 9 | 'lat': [53.5511, 52.5200, 48.1351], 10 | 'long': [9.9937, 13.4050, 11.5820] 11 | }) 12 | 13 | # calculate median of given lat long coords 14 | center = [np.median(df['lat']), np.median(df['long'])] 15 | # %% 16 | m = folium.Map(location=center, zoom_start=5) 17 | m 18 | # %% add all cities as markers to map 19 | for i in range(len(df)): 20 | folium.CircleMarker(location=[df.loc[i, 'lat'], df.loc[i, 'long']], tooltip=df.loc[i, 'city'], color='blue').add_to(m) 21 | m 22 | # %% 23 | -------------------------------------------------------------------------------- /20_DataHandling/030_data_visualisation/ggplot_intro.py: -------------------------------------------------------------------------------- 1 | # %% packages 2 | import pandas as pd 3 | from plotnine import * 4 | 5 | # %% 6 | filename = "Diamonds.csv" 7 | diamonds = pd.read_csv(filename) 8 | 9 | # %% EDA 10 | diamonds.head() 11 | 12 | # %% One Variable 13 | 14 | # %% Discrete Feature 15 | (ggplot(data=diamonds) 16 | + aes(x = 'cut') 17 | + geom_bar() 18 | ) 19 | 20 | # %% Continuous Feature 21 | (ggplot(diamonds) + 22 | aes(x = 'price') + 23 | geom_density() 24 | ) 25 | 26 | # %% 27 | (ggplot(diamonds) + 28 | aes(x = 'price') + 29 | geom_area(stat='bin') 30 | ) 31 | 32 | # %% [markdown] 33 | # # Two Variables 34 | 35 | # %% Continuous X, Continous Y 36 | (ggplot(data=diamonds[diamonds['cut'].isin(['Fair', 'Premium'])]) + 37 | aes(x='x', y='y', color='price', size ='carat') + 38 | geom_point() + 39 | facet_grid(['color','cut']) 40 | #scale_y_log10() 41 | ) 42 | 43 | # %% Continuous X, Discrete Y 44 | (ggplot(data=diamonds) + 45 | aes(x='price', y='clarity') + 46 | geom_bin2d() 47 | ) 48 | 49 | # %% Discrete X, Continuous Y 50 | (ggplot(data=diamonds) + 51 | aes(x='clarity', y='price') + 52 | geom_jitter(alpha=.2) 53 | ) 54 | 55 | # %% 56 | (ggplot(data=diamonds) + 57 | aes(x='clarity', y='price') + 58 | geom_violin() 59 | ) 60 | 61 | # %% 62 | (ggplot(data=diamonds) + 63 | aes(x='clarity', y='price') + 64 | geom_boxplot() 65 | ) 66 | 67 | # %% Discrete X, Discrete Y 68 | (ggplot(data=diamonds) + 69 | aes(x='clarity', y='cut') + 70 | geom_jitter(alpha=.2) 71 | ) 72 | 73 | # %% 74 | 75 | 76 | 77 | -------------------------------------------------------------------------------- /20_DataHandling/030_data_visualisation/matplotlib_intro.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import matplotlib.pyplot as plt 3 | import pandas as pd 4 | import numpy as np 5 | 6 | #%% 1D data -> automatic x labels 7 | plt.plot([0, 2, 5, 6]) 8 | plt.title('my first plot') 9 | plt.xlabel('automatically generated') 10 | plt.show() 11 | 12 | 13 | #%% 2D data 14 | plt.plot([1, 3, 5, 7], [-10, 5, 25, 20]) 15 | 16 | #%% formatting 17 | # color and line type: default b-, 18 | plt.plot([1, 3, 5, 7], [-10, 5, 25, 20], 'go') 19 | plt.axis([0, 10, -15, 30]) # [xmin, xmax, ymin, ymax] 20 | 21 | #%% multiple series 22 | my_range = np.arange(0, 10, 1) 23 | # plt.plot(my_range, my_range) 24 | # plt.plot(my_range, my_range, 'b-') 25 | plt.plot(my_range, my_range, 'b-o', my_range, my_range**0.5, 'g-s', my_range, my_range**1.5, 'r-^') 26 | 27 | 28 | #%% Data Import 29 | filename = "Diamonds.csv" 30 | diamonds = pd.read_csv(filename) 31 | 32 | # %% additional dimensions 33 | diamonds['volume'] = diamonds['x'] * diamonds['y'] * diamonds['z'] 34 | plt.scatter(data=diamonds.iloc[:100, :], x='carat', y = 'depth', s='price', c='volume') 35 | plt.xlabel('carat') 36 | plt.ylabel('depth') 37 | plt.colorbar(label='volume') 38 | plt.show() 39 | 40 | # %% 41 | -------------------------------------------------------------------------------- /20_DataHandling/030_data_visualisation/seaborn_intro.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import seaborn as sns 3 | import pandas as pd 4 | 5 | # %% 6 | filename = "Diamonds.csv" 7 | diamonds = pd.read_csv(filename) 8 | diamonds['volume'] = diamonds['x'] * diamonds['y'] * diamonds['z'] 9 | #%% 10 | sns.set_theme(style='darkgrid') 11 | s= sns.relplot(data=diamonds.iloc[1500:2000, :], x='carat', y = 'price', size='depth', hue='cut', style='clarity') 12 | s.set(ylim = (3000, 3150)) 13 | # %% 14 | -------------------------------------------------------------------------------- /20_DataHandling/040_data_manipulation/00_DataAggregation.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/20_DataHandling/040_data_manipulation/00_DataAggregation.pdf -------------------------------------------------------------------------------- /20_DataHandling/040_data_manipulation/10_grouping.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import pandas as pd 3 | import numpy as np 4 | 5 | #%% 6 | df = pd.DataFrame({'language': ['R', 'Python', 'SQL', 'R', 'R', 'Python', 'Python'], 7 | 'year': [2020, 2020, 2020, 2021, 2022, 2022, 2022], 8 | 'users': [1E6, 2E6, 0.5E6, 1.1E6, 1.2E6, 2.2E6, 2.4E6]}) 9 | df 10 | 11 | # %% understand grouping 12 | for name, group in df.groupby('language'): 13 | mean_val = group['users'].mean() 14 | print(f"{name}: {mean_val}") 15 | 16 | # %% group on a column and average over all other cols 17 | # df.groupby('language').mean() 18 | df.groupby('language').agg(np.mean) 19 | # %% group on a column, average over a specific column 20 | df.groupby('language')['users'].mean() 21 | # %% group by multiple columns 22 | df.groupby(['language', 'year'])['users'].mean() 23 | # %% several aggregation functions 24 | df.groupby('language').agg({'users': ['mean', 'min', 'max', 'sum']}) 25 | 26 | # %% 27 | df.groupby('language').agg([np.mean, np.sum]) 28 | 29 | # %% 30 | import pandas as pd 31 | data = {'group_col': ['A', 'B', 'C', 'A', 'B', 'A'], 32 | 'value_col': [0, -2, 5, 2, 2, 4]} 33 | df = pd.DataFrame(data) 34 | df 35 | 36 | 37 | #%% 38 | 39 | # %% 40 | for name, group in df.groupby('group_col'): 41 | group_values = group['value_col'].tolist() 42 | print(f"{name}: {group_values}") 43 | # %% 44 | df.groupby('group_col').agg({'value_col': [np.mean, np.sum]}) 45 | 46 | # %% 47 | df.groupby('group_col').size() 48 | # %% 49 | -------------------------------------------------------------------------------- /20_DataHandling/040_data_manipulation/15_DataReshaping.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/20_DataHandling/040_data_manipulation/15_DataReshaping.pdf -------------------------------------------------------------------------------- /20_DataHandling/040_data_manipulation/20_reshape.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import pandas as pd 3 | 4 | # %% 5 | data = { 6 | 'student': ['Stuart', 'Bob', 'Kevin'], 7 | 'math': [2,3,3], 8 | 'sport': [3,1,2], 9 | 'art': [4,2,1] 10 | 11 | } 12 | df_wide = pd.DataFrame(data=data) 13 | df_wide 14 | 15 | # %% test 16 | df_long = df_wide.melt(id_vars= ['student'], 17 | var_name = 'subject', 18 | value_name = 'grade') 19 | df_long 20 | 21 | #%% value_vars can be used as filter 22 | # here: subject 'art' is not converted 23 | df_long = df_wide.melt(id_vars= ['student'], 24 | var_name = 'subject', 25 | value_name = 'grade', value_vars=['math', 'sport']) 26 | df_long 27 | 28 | # %% 29 | df_wide2 = df_long.pivot(index='student', columns='subject', values='grade', ) 30 | df_wide2 31 | 32 | 33 | # %% index names 34 | # df_wide does not have an index name, df_wide2 got one created 35 | # df_wide.index 36 | df_wide2.index 37 | 38 | # similar for column, df_wide2 got a name 39 | 40 | #%% create graph 41 | # barplot uses wide format 42 | df_wide2.plot.bar() 43 | 44 | #%% 45 | df_wide_wo_index = df_long.pivot(index='student', columns='subject', values='grade', ).reset_index().rename_axis(None, axis=1) 46 | df_wide_wo_index 47 | 48 | #%% 49 | df_wide2.index 50 | 51 | # %% 52 | -------------------------------------------------------------------------------- /20_DataHandling/040_data_manipulation/25_JoiningDatasets.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/20_DataHandling/040_data_manipulation/25_JoiningDatasets.pdf -------------------------------------------------------------------------------- /20_DataHandling/040_data_manipulation/30_join.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import pandas as pd 3 | import numpy as np 4 | 5 | #%% prepare dataframes 6 | # use main characters of animated films 7 | 8 | minions = pd.DataFrame({ 9 | 'student': ['Stuart', 'Bob', 'Kevin', 'Gru'], 10 | 'art': [4,2,1, 2] 11 | 12 | }) 13 | print(f"minions:\n {minions}") 14 | 15 | despicable_me = pd.DataFrame({ 16 | 'student': ['Agnes', 'Margo', 'Edith', 'Gru'], 17 | 'sport': [1,2,2, 3] 18 | 19 | }) 20 | print(f"despicable me:\n {despicable_me}") 21 | 22 | 23 | 24 | 25 | 26 | 27 | frozen = pd.DataFrame({ 28 | 'student': ['Anna', 'Elsa', 'Olaf'], 29 | 'art': [4,2,1] 30 | }) 31 | print(f"frozen:\n {frozen}") 32 | 33 | simpsons = pd.DataFrame({ 34 | 'student': ['Bart', 'Lisa'], 35 | 'math': [5,1], 36 | 'sport': [1,5] 37 | 38 | }) 39 | print(f"simpsons:\n {simpsons}") 40 | 41 | #%% left join 42 | # how: left, right, inner, outer 43 | # minions.merge(right=despicable_me, how='right', on='student', indicator=True) 44 | minions.merge(right=despicable_me, how='right', left_on='student', right_on='student', indicator=True) 45 | 46 | #%% join via index 47 | despicable_me_index = despicable_me.copy() 48 | despicable_me_index.index = despicable_me_index['student'] 49 | despicable_me_index.drop(columns=['student'], axis=1, inplace=True) 50 | print(f"despicable me index:\n {despicable_me_index}") 51 | 52 | minions.merge(right=despicable_me_index, how='right', left_on='student', right_index=True, indicator=True).reset_index(drop=True) 53 | 54 | 55 | 56 | # %% append rows 57 | minions.append(frozen) 58 | # %% alternatively 59 | pd.concat([minions, frozen]) 60 | # %% append columns 61 | pd.concat([minions, simpsons]) 62 | 63 | # %% 64 | -------------------------------------------------------------------------------- /20_DataHandling/040_data_manipulation/diamonds_cut.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/20_DataHandling/040_data_manipulation/diamonds_cut.png -------------------------------------------------------------------------------- /20_DataHandling/040_data_manipulation/diamonds_exercise.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | 3 | #%% 0. add required packages at the top of this script 4 | 5 | # %% 1. import dataset from subfolder "data" 6 | 7 | # %% 2. how many rows and columns does it have 8 | 9 | # %% 3. which columns does it have? 10 | 11 | # %% 4. There is a column named 'Unnamed: 0'. Please delete it. 12 | 13 | # %% 5. which levels does column cut have? 14 | 15 | # %% 6. create a barplot for diamonds and their cut's 16 | #%% 7. save the graph as png-file 17 | 18 | # %% 8. find out what the median price per cut is. 19 | 20 | # %% 9. Create two filtered dataframes which only have the cut 'Ideal' and 'Premium'. Store it in the dataframes 'df_cut_ideal' and 'df_cut_premium' 21 | 22 | # %% 10. stack the dataframes together to get a combined version df_cut_ideal_premium 23 | 24 | # %% 11. save the dataframe to a csv file in subfolder data: 'df_cut_ideal_premium.csv' 25 | 26 | # %% 27 | -------------------------------------------------------------------------------- /20_DataHandling/040_data_manipulation/diamonds_solution.py: -------------------------------------------------------------------------------- 1 | #%% 2 | import pandas as pd 3 | import numpy as np 4 | from plotnine import ggplot, aes, geom_bar, labs, ggsave 5 | #%% 0. add required packages at the top of this script 6 | 7 | # %% 1. import dataset from subfolder "data" 8 | df = pd.read_csv('data/diamonds.csv', index_col=None) 9 | # %% 2. how many rows and columns does it have 10 | df.shape 11 | 12 | # %% 3. which columns does it have? 13 | df.columns 14 | 15 | # %% 4. There is a column named 'Unnamed: 0'. Please delete it. 16 | df.drop(columns='Unnamed: 0', inplace=True) 17 | 18 | # %% 5. which levels does column cut have? 19 | df['cut'].unique() 20 | 21 | # %% 6. create a barplot for diamonds and their cut's 22 | g = ggplot(data = df) + aes('cut') + geom_bar() + labs(x = 'Cut', y = 'Count [-]', title="Diamonds Cut's Count") 23 | g 24 | #%% 7. save the graph as png-file 25 | ggsave(g, 'diamonds_cut.png') 26 | 27 | # %% 8. find out what the median price per cut is. 28 | df.groupby('cut')['price'].agg(np.mean) 29 | 30 | # %% 9. Create two filtered dataframes which only have the cut 'Ideal' and 'Premium'. Store it in the dataframes 'df_cut_ideal' and 'df_cut_premium' 31 | df_cut_ideal = df[df['cut'] == 'Ideal'] 32 | df_cut_premium = df[df['cut'] == 'Premium'] 33 | 34 | # %% 10. stack the dataframes together to get a combined version df_cut_ideal_premium 35 | df_cut_ideal_premium = pd.concat([df_cut_ideal, df_cut_premium]) 36 | 37 | # %% 11. save the dataframe to a csv file in subfolder data: 'df_cut_ideal_premium.csv' 38 | df_cut_ideal_premium.to_csv('data/df_cut_ideal_premium.csv') 39 | 40 | # %% 41 | -------------------------------------------------------------------------------- /20_DataHandling/040_data_manipulation/exercise_manipulation.py: -------------------------------------------------------------------------------- 1 | #%% package import 2 | import numpy as np 3 | import pandas as pd 4 | import matplotlib.pyplot as plt 5 | from sklearn.datasets import fetch_openml 6 | 7 | #%% Data Source: California Housing 8 | # https://scikit-learn.org/stable/modules/generated/sklearn.datasets.fetch_california_housing.html#sklearn.datasets.fetch_california_housing 9 | 10 | #%% Data Import 11 | X, y = fetch_openml(name="house_prices", as_frame=True, return_X_y=True)# %% 12 | 13 | #%% 1. Combine X and y 14 | # X covers all house information, y covers the price. 15 | # Please combine both to one dataframe called housing, the values in y should then be called 'price' 16 | # Hint: you don't need to use pd.merge 17 | # Hint2: consider that X and y have different types 18 | housing = X.copy() 19 | housing['price'] = y 20 | 21 | #%% 2. make yourself familiar with the columns of the dataframe 22 | 23 | 24 | 25 | # %% 3. group the 'OverallQual' and get the min, max, mean price for the different conditions 26 | 27 | 28 | # %% 4. find the unique values for represented 'Street' and create a dataframe for each street 29 | 30 | 31 | # %% 5. check the number of rows for both dataframes 32 | 33 | 34 | # %% 6. combine both dataframes to one new dataframe 'housing_combined' and check the number of rows 35 | 36 | 37 | # %% 7. use the shown housing_types dataframe and convert it to a wide dataframe 38 | # the columns shall represent the 'GarageType' and the indices the 'BldgType' 39 | housing_types = housing.groupby(['BldgType', 'GarageType']).agg({'price': np.mean}).reset_index() 40 | 41 | # %% 42 | -------------------------------------------------------------------------------- /20_DataHandling/040_data_manipulation/exercise_manipulation_solution.py: -------------------------------------------------------------------------------- 1 | #%% package import 2 | import numpy as np 3 | import pandas as pd 4 | import matplotlib.pyplot as plt 5 | from sklearn.datasets import fetch_openml 6 | 7 | #%% Data Source: California Housing 8 | # https://scikit-learn.org/stable/modules/generated/sklearn.datasets.fetch_california_housing.html#sklearn.datasets.fetch_california_housing 9 | 10 | #%% Data Import 11 | X, y = fetch_openml(name="house_prices", as_frame=True, return_X_y=True)# %% 12 | 13 | #%% 1. Combine X and y 14 | # X covers all house information, y covers the price. 15 | # Please combine both to one dataframe called housing, the values in y should then be called 'price' 16 | # Hint: you don't need to use pd.merge 17 | # Hint2: consider that X and y have different types 18 | housing = X.copy() 19 | housing['price'] = y 20 | 21 | #%% 2. make yourself familiar with the columns of the dataframe 22 | housing.columns 23 | 24 | 25 | # %% 3. group the 'OverallQual' and get the min, max, mean price for the different conditions 26 | housing.groupby('OverallQual').agg({'price': [np.min, np.max, np.mean]}) 27 | # %% 4. find the unique values for represented 'Street' and create a dataframe for each street 28 | housing_pave = housing[housing['Street'] == 'Pave'] 29 | housing_grvl = housing[housing['Street'] == 'Grvl'] 30 | 31 | 32 | # %% 5. check the number of rows for both dataframes 33 | print(housing_pave.shape) 34 | print(housing_grvl.shape) 35 | 36 | # %% 6. combine both dataframes to one new dataframe 'housing_combined' and check the number of rows 37 | housing_combined = pd.concat([housing_pave, housing_grvl]) 38 | housing_combined.shape 39 | 40 | # %% 7. use the shown housing_types dataframe and convert it to a wide dataframe 41 | # the columns shall represent the 'GarageType' and the indices the 'BldgType' 42 | housing_types = housing.groupby(['BldgType', 'GarageType']).agg({'price': np.mean}).reset_index() 43 | 44 | housing_types.pivot(index = 'BldgType', columns='GarageType', values='price') 45 | # %% 46 | -------------------------------------------------------------------------------- /20_DataHandling/040_data_manipulation/file_example_XLS_10.xls: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/20_DataHandling/040_data_manipulation/file_example_XLS_10.xls -------------------------------------------------------------------------------- /20_DataHandling/040_data_manipulation/file_example_XLS_10.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/20_DataHandling/040_data_manipulation/file_example_XLS_10.xlsx -------------------------------------------------------------------------------- /20_DataHandling/040_data_manipulation/mito.py: -------------------------------------------------------------------------------- 1 | # installation 2 | # !pip install mitoinstaller 3 | # !python -m mitoinstaller install 4 | 5 | #%% load package 6 | import mitosheet 7 | 8 | mitosheet.sheet() 9 | # %% 10 | -------------------------------------------------------------------------------- /30_object_oriented_prog/010_intro.py: -------------------------------------------------------------------------------- 1 | #%% class definition 2 | class Pet: 3 | pass 4 | # %% instantiate an object 5 | kiki = Pet() 6 | # %% 7 | print(kiki) 8 | # %% 9 | class Pet: 10 | def __init__(self, name, species): 11 | self.name = name 12 | self.species = species 13 | 14 | pet_one = Pet("Kiki", "dog") 15 | pet_two = Pet("Bubbles", "cat") 16 | # %% 17 | pet_one.name 18 | # %% Object Methods 19 | class Pet: 20 | def __init__(self, name, species): 21 | self.name = name 22 | self.species = species 23 | 24 | def hello(self): 25 | print(f"Hello! My name is {self.name} and I am a {self.species}") 26 | 27 | pet_one = Pet("Kiki", "dog") 28 | pet_two = Pet("Bubbles", "cat") 29 | 30 | # %% 31 | pet_one.hello() 32 | # %% 33 | pet_new = Pet('Waldo', 'dog') 34 | pet_new2 = Pet('Waldo', 'dog') 35 | -------------------------------------------------------------------------------- /30_object_oriented_prog/020_carddeck.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import random 3 | 4 | #%% class definition 5 | class Deck: 6 | """Class for Card Deck 7 | 8 | """ 9 | SUIT = ["♣", "♠", "♦", "♥"] # unicode \u2660 - \u2663 10 | "\u2663" 11 | RANK = [str(n) for n in range(7, 11)] + ["J", "Q", "K", "A"] 12 | 13 | def __init__(self): 14 | self.cards = [] 15 | for suit in self.SUIT: 16 | for rank in self.RANK: 17 | self.cards.append(f"{suit} {rank}") 18 | 19 | def shuffle(self): 20 | random.shuffle(self.cards) 21 | 22 | def draw(self): 23 | return self.cards.pop() 24 | 25 | def deal(self, num_hands, num_cards): 26 | self.hands = [] 27 | for i in range(num_hands): 28 | self.hands.append([]) 29 | for j in range(num_cards): 30 | self.hands[i].append(self.draw()) 31 | return self.hands 32 | 33 | def __str__(self): 34 | return f"Deck of {self.cards}" 35 | 36 | def __repr__(self): 37 | print(f"Player Hands: {self.hands}") 38 | print(f"Deck: {self.cards}") 39 | return None 40 | 41 | def __len__(self): 42 | return len(self.cards) 43 | 44 | 45 | 46 | #%% instantiate an object 47 | deck = Deck() 48 | deck.cards 49 | 50 | # %% 51 | deck.deal(num_cards=8, num_hands=2) 52 | 53 | # %% repr and __str__ 54 | # __str__ invoked by print or str(object) 55 | # str made for users 56 | # return string object 57 | # __repr__ invoked by repr and returns object in string format 58 | # repr made for developers -------------------------------------------------------------------------------- /30_object_oriented_prog/030_inheritance.py: -------------------------------------------------------------------------------- 1 | #%% 2 | class Pet: 3 | is_human = False # class attribute, identical for all instances 4 | 5 | def __init__(self, name): 6 | self.name = name 7 | 8 | def hello(self): 9 | print(f"Hello! My name is {self.name}") 10 | 11 | 12 | class Dog(Pet): 13 | def __init__(self, name, breed): 14 | super().__init__(name) 15 | self.species = 'dog' 16 | self.breed = breed 17 | 18 | def hello(self): 19 | print(f"Hello! My name is {self.name}, I am a dog of type {self.breed}") 20 | 21 | #%% instantiation 22 | waldo = Dog(name="Waldo", breed="wiener dog") 23 | waldo.hello() 24 | # %% 25 | waldo 26 | # %% 27 | -------------------------------------------------------------------------------- /30_object_oriented_prog/040_stats_exercise.py: -------------------------------------------------------------------------------- 1 | #%% 2 | import numpy as np 3 | 4 | 5 | #%% 6 | heights = np.random.randint(low= 0, high=250, size = 50) 7 | 8 | 9 | # %% Task: create a class that calculates count, sum, min, max, range, mean, and median of a given list 10 | # Implement a method "overview" which shows all results at once 11 | # %% 12 | -------------------------------------------------------------------------------- /30_object_oriented_prog/040_stats_solution.py: -------------------------------------------------------------------------------- 1 | #%% 2 | import numpy as np 3 | 4 | 5 | #%% 6 | heights = np.random.randint(low= 0, high=250, size = 50) 7 | 8 | 9 | # %% Task: create a class that calculates count, sum, min, max, range, mean, and median of a given list 10 | # Implement a method "overview" which shows all results at once 11 | class Stats: 12 | def __init__(self, l) -> None: 13 | self.l = l 14 | 15 | def count(self): 16 | return len(self.l) 17 | 18 | def sum(self): 19 | return np.sum(self.l) 20 | 21 | def min(self): 22 | return np.min(self.l) 23 | 24 | def max(self): 25 | return np.max(self.l) 26 | 27 | def range(self): 28 | return np.ptp(self.l) 29 | 30 | def mean(self): 31 | return np.mean(self.l) 32 | 33 | def median(self): 34 | return np.median(self.l) 35 | 36 | def overview(self): 37 | print(f"Count: {self.count()}") 38 | print(f"Sum: {self.sum()}") 39 | print(f"Min: {self.min()}") 40 | print(f"Max: {self.max()}") 41 | print(f"Range: {self.range()}") 42 | print(f"Mean: {self.mean()}") 43 | print(f"Median: {self.median()}") 44 | 45 | # Test 46 | data = Stats(heights) 47 | data.overview() 48 | 49 | # %% 50 | -------------------------------------------------------------------------------- /40_MachineLearning/00_Introduction/10_HighLevelCourseOverview.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/40_MachineLearning/00_Introduction/10_HighLevelCourseOverview.pptx -------------------------------------------------------------------------------- /40_MachineLearning/00_Introduction/20_AIOverview.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/40_MachineLearning/00_Introduction/20_AIOverview.pptx -------------------------------------------------------------------------------- /40_MachineLearning/00_Introduction/30_MachineLearning101.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/40_MachineLearning/00_Introduction/30_MachineLearning101.pptx -------------------------------------------------------------------------------- /40_MachineLearning/00_Introduction/40_Models.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/40_MachineLearning/00_Introduction/40_Models.pptx -------------------------------------------------------------------------------- /40_MachineLearning/10_Regression/025_univariate_interactive.py: -------------------------------------------------------------------------------- 1 | # go to http://gollnickdata.com//2019/01/09/univariate-regression/ -------------------------------------------------------------------------------- /40_MachineLearning/10_Regression/030_univariate_regression.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import pandas as pd 3 | import numpy as np 4 | from plotnine import ggplot, aes, geom_point, geom_smooth, geom_text 5 | from sklearn.linear_model import LinearRegression 6 | from sklearn.metrics import r2_score 7 | # %% Introduction 8 | # Based on star wars characters we will predict person weight based on its height. 9 | 10 | # What is independent and dependent variable? Well, the height is more or less defined in our genes, but our weight can be influenced. So I use height as independent variable ad mass as dependent variable. 11 | 12 | #%% Data Import 13 | starwars = pd.read_csv('../data/Starwars.csv') 14 | starwars.head() 15 | # %% Shape 16 | starwars.shape 17 | 18 | # %% visualise the height and mass 19 | def plot_height_mass(df): 20 | g = (ggplot(df) 21 | + aes(x='height', y = 'mass') 22 | + geom_point() 23 | + geom_smooth(method = 'lm') 24 | ) 25 | return g 26 | 27 | plot_height_mass(starwars) 28 | 29 | 30 | # %% outlier 31 | starwars[starwars['mass']>=1000] 32 | 33 | # %% filter outlier 34 | starwars_filt = starwars[starwars['mass']<1000] 35 | plot_height_mass(starwars_filt) 36 | # %% 37 | starwars_filt.shape 38 | # %% Modeling 39 | X_train = np.array(starwars_filt['height']).reshape(-1, 1) 40 | y_train = np.array(starwars_filt['mass']).reshape(-1, 1) 41 | 42 | # %% 43 | regressor = LinearRegression() 44 | regressor.fit(X_train, y_train) 45 | # %% Create Predictions 46 | y_pred = regressor.predict(X_train).reshape(-1) 47 | starwars_filt['y_pred'] = y_pred 48 | # %% visualise result 49 | (ggplot(starwars_filt) 50 | + aes(x='height', y = 'mass', label = 'name') 51 | + geom_point() 52 | + geom_text() 53 | + geom_smooth(method = 'lm') 54 | + geom_point(aes(y='y_pred'), color = 'red') 55 | ) 56 | # %% calculate metrics 57 | coefficient_of_dermination = r2_score(y_train, y_pred) 58 | coefficient_of_dermination 59 | 60 | # %% 61 | -------------------------------------------------------------------------------- /40_MachineLearning/10_Regression/040_hubble_exercise.py: -------------------------------------------------------------------------------- 1 | # %% Hubble Exercise 2 | # In this tutorial you will take a look at measurements of Hubble (the telescope). Besides taking beautiful pictures, it measured speed and distance of Super-Novae. Similar data was used in 1929 by Hubble (the person) and he found out that there is a linear relationship. 3 | 4 | 5 | # He discovered that galaxies appear to move away. This can be visualised with red-shift of spectral lines. This observation was the first indication that the universe expands. 6 | 7 | # You will create a linear model based on observations and create predictions. 8 | 9 | # Steps: 10 | # 0. Import required packages 11 | # 1. Import the data 12 | # 2. visualise the data for better understanding 13 | # 3. Create a linear regression model 14 | # 4. create predictions 15 | # 5. calculate the $R^2$ metric 16 | # 6. Bonus: calculate the Hubble Constant: $H_0=\frac{v}{D}$ -------------------------------------------------------------------------------- /40_MachineLearning/10_Regression/040_hubble_solution.py: -------------------------------------------------------------------------------- 1 | # %% Hubble Exercise 2 | # In this tutorial you will take a look at measurements of Hubble (the telescope). Besides taking beautiful pictures, it measured speed and distance of Super-Novae. Similar data was used in 1929 by Hubble (the person) and he found out that there is a linear relationship. 3 | 4 | 5 | # He discovered that galaxies appear to move away. This can be visualised with red-shift of spectral lines. This observation was the first indication that the universe expands. 6 | 7 | # You will create a linear model based on observations and create predictions. 8 | 9 | # Steps: 10 | #%% 0. Import required packages 11 | import numpy as np 12 | import pandas as pd 13 | from plotnine import ggplot, aes, geom_point, geom_smooth 14 | from sklearn.linear_model import LinearRegression 15 | from sklearn.metrics import r2_score 16 | 17 | #%% 1. Import the data 18 | hubble = pd.read_csv('../data/Hubble.csv') 19 | hubble.head() 20 | #%% 2. visualise the data for better understanding 21 | (ggplot(hubble) 22 | + aes(x='v', y='D') 23 | + geom_point() 24 | + geom_smooth(method='lm', color ='blue', se = False) 25 | ) 26 | 27 | #%% 3. Create a linear regression model 28 | X_train = np.array(hubble['v']).reshape(-1, 1) 29 | y_train = np.array(hubble['D']).reshape(-1, 1) 30 | 31 | regressor = LinearRegression() 32 | regressor.fit(X_train, y_train) 33 | #%% 4. create predictions 34 | hubble['y_pred'] = regressor.predict(X_train) 35 | 36 | #%% 5. calculate the $R^2$ metric 37 | coefficient_of_dermination = r2_score(y_train, hubble['y_pred']) 38 | coefficient_of_dermination 39 | 40 | #%% 6. Bonus: calculate the Hubble Constant: $H_0=\frac{v}{D}$ 41 | hubble['H0'] = hubble['v'] / hubble['D'] 42 | np.mean(hubble['H0']) 43 | # %% 44 | # You can compare this to most recent observed values, as shown in [this](https://en.wikipedia.org/wiki/Hubble%27s_law) Wikipedia article -------------------------------------------------------------------------------- /40_MachineLearning/10_Regression/060_polynomial_regression.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import numpy as np 3 | import pandas as pd 4 | from plotnine import ggplot, aes, geom_point, geom_line 5 | from sklearn.linear_model import LinearRegression 6 | from sklearn.metrics import r2_score 7 | from sklearn.preprocessing import PolynomialFeatures 8 | import random 9 | # %% Data Preparation 10 | sample_data = pd.DataFrame(np.arange(-20,40, 0.5), columns=['x']) 11 | sample_data['y'] = 50 + 0.25 * (sample_data['x']-5)**3 12 | sample_data['y_noise'] = sample_data['y'] + np.random.normal(100, 500, sample_data.shape[0]) 13 | 14 | # %% 15 | (ggplot(sample_data) 16 | + aes(x = 'x', y = 'y_noise') 17 | + geom_point() 18 | + geom_line(aes(y ='y'), color ='red') 19 | ) 20 | 21 | #%% Model 22 | X_train = np.array(sample_data['x']).reshape(-1, 1) 23 | y_train = np.array(sample_data['y_noise']).reshape(-1, 1) 24 | 25 | # prepare the features 26 | degree = 2 27 | poly_feat = PolynomialFeatures(degree=degree) 28 | x_poly = poly_feat.fit_transform(X_train) 29 | 30 | # fit the model 31 | model = LinearRegression() 32 | model.fit(x_poly, y_train) 33 | 34 | # create predictions 35 | sample_data['y_poly_pred'] = model.predict(x_poly) 36 | 37 | # visualise results 38 | (ggplot(sample_data) 39 | + aes(x = 'x', y = 'y_noise') 40 | + geom_point() 41 | + geom_line(aes(y ='y'), color ='red') 42 | + geom_line(aes(y ='y_poly_pred'), color='green') 43 | ) 44 | # %% Calculate R**2 score 45 | r2 = r2_score(sample_data['y_noise'], sample_data['y_poly_pred']) 46 | r2 47 | 48 | # %% Adjusted R**2 49 | # calculate adjusted R2 which is better suitable (cost for too many parameters) 50 | 51 | # $R^2=1-(1-R^2)*\frac{n-1}{n-p-1}$ 52 | 53 | p = degree # nr of independent variables 54 | n = sample_data.shape[0] # nr of observations 55 | adj_r2 = 1 - (1-r2)*(n-1)/(n-p-1) 56 | adj_r2 57 | 58 | # %% 59 | -------------------------------------------------------------------------------- /40_MachineLearning/10_Regression/080_multivariate_regression.py: -------------------------------------------------------------------------------- 1 | #%% Data Understanding 2 | 3 | # Wine variants of Portuguese "Vinho Verde" are analysed with regards to their chemical properties. Finally, we are interested how these chemical properties influence wine quality. 4 | 5 | 6 | # These are our independent variables: 7 | 8 | # 1. fixed acidity 9 | # 2. volatile acidity 10 | # 3. citric acid 11 | # 4. residual sugar 12 | # 5. chlorides 13 | # 6. free sulfur dioxide 14 | # 7. total sulfur dioxide 15 | # 8. density 16 | # 9. pH 17 | # 10. sulphates 18 | # 11. alcohol 19 | 20 | # This is our dependent variable: 21 | 22 | # 12. quality (score between 0 and 10) 23 | 24 | #%% Packages 25 | import numpy as np 26 | import pandas as pd 27 | from sklearn.linear_model import LinearRegression 28 | from sklearn.metrics import r2_score 29 | import seaborn as sns 30 | import matplotlib.pyplot as plt 31 | # %% 32 | wine = pd.read_csv('../data/winequality-red.csv', sep=';') 33 | wine.head() 34 | # %% 35 | wine.describe() 36 | 37 | # %% visualise the data 38 | sns.pairplot(wine.iloc[:, 7:12], hue='quality') 39 | plt.show() 40 | # %% calculate correlation matrix 41 | wine.corr() 42 | 43 | #%% Modeling 44 | X_train = np.array(wine.loc[:, wine.columns != 'quality']) 45 | y_train = np.array(wine['quality']).reshape(-1, 1) 46 | # %% 47 | regressor = LinearRegression() 48 | regressor.fit(X_train, y_train) 49 | # %% create predictions 50 | wine['quality_pred'] = regressor.predict(X_train) 51 | 52 | #%% 53 | sns.scatterplot(data=wine, x='quality', y='quality_pred') 54 | plt.show() 55 | # %% calculate metric 56 | r2_score(wine['quality'], wine['quality_pred']) 57 | 58 | # %% 59 | -------------------------------------------------------------------------------- /40_MachineLearning/20_ModelPrepAndEval/025_train_val_test_interactive.txt: -------------------------------------------------------------------------------- 1 | go to http://gollnickdata.com/2019/01/10/train-validation-test-split/ -------------------------------------------------------------------------------- /40_MachineLearning/20_ModelPrepAndEval/030_train_val_test_split.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import pandas as pd 3 | import numpy as np 4 | from sklearn.model_selection import train_test_split 5 | # %% 6 | df = pd.DataFrame(np.arange(1500).reshape((500, 3)), columns=['x1', 'x2', 'y']) 7 | df.head() 8 | # %% Train / Test Split 9 | X, y = df.loc[:, df.columns != 'y'], df['y'] # separate independent (X) and dependent (y) features 10 | 11 | # %% 12 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2) 13 | # %% 14 | print(f"X_train shape: {X_train.shape}\nX_test shape: {X_test.shape}\ny_train shape: {y_train.shape}\ny_test shape: {y_test.shape}") 15 | # %% 16 | -------------------------------------------------------------------------------- /40_MachineLearning/20_ModelPrepAndEval/050_resampling_techniques.py: -------------------------------------------------------------------------------- 1 | # 1. Introduction 2 | # 3 | # In this tutorial different resampling methods are presented: hold-out method, k-fold cross validation and leave-one-out cross validation. We start with bias-variance tradeoff and will analyse different techniques in a real example and classify wines with random forest. 4 | # 5 | # 2. Bias-Variance Tradeoff 6 | # 7 | # In machine learning two errors should be minimized at the same time: bias and variance. 8 | # 9 | # Bias: Bias is an error caused by false assumptions in algorithm. High bias might cause underfitting. Variance: Variance is an error stemming from sensitivity in training data. A high variance might cause overfitting. In this case noise in training data is modeled rather than the underlying relationship. Both errors cannot be minimized at the same time, so a reasonable tradeoff is required. 10 | # 11 | # 3. Resampling Methods 12 | # 13 | # There are different resampling methods. In this post hold-out, k-fold cross validation and leave-one-out cross validation are presented. 14 | # 15 | # 16 | # 3.1 Holdout Method 17 | # 18 | # Easiest approach is to take a certain ratio of data, e.g. say 80 % for training, and the residual 20 % for testing. Problems might be that it is not affordable to hold back a subset for validation. Also, the split might have a negative impact on the validation error. 19 | # 20 | # Some methods have been developed to overcome these limitations. 21 | # 22 | # 3.2 k-Fold Cross Validation (CV) 23 | # 24 | # This method is a generalisation of hold-out method. Data is randomly splitted in k-folds, typically 10. Let’s assume 10 folds for now. Folds 2 to 10 are used for training the model, and the residual first fold for validation of the model. Now, the process is repeated. But this time the second fold is used for validation and folds 1, 3 to 10 are used for training the model. This process is repeated k times. 25 | # 26 | # Final predictor is the average of the models. 27 | # 28 | # 3.3 Leave-One-Out Cross Validation (LOOCV) 29 | # 30 | # This method requires (n-1) data for training, and 1 data set for validation. This process is repeated n times. It is numerically very costly, but also is prone to overfitting. 31 | # 32 | 33 | #%% Packages 34 | 35 | # %% 36 | import numpy as np 37 | import pandas as pd 38 | from sklearn.linear_model import LinearRegression 39 | from sklearn.metrics import r2_score 40 | from sklearn.model_selection import train_test_split, KFold, LeaveOneOut, cross_val_score 41 | import seaborn as sns 42 | 43 | #%% Data Understanding 44 | # 45 | 46 | # Wine variants of Portuguese “Vinho Verde” are analysed with regards to their chemical properties. Finally, we are interested how these chemical properties influence wine quality. 47 | # 48 | # These are our independent variables: 49 | # 50 | # 1. fixed acidity 51 | # 2. volatile acidity 52 | # 3. citric acid 53 | # 4. residual sugar 54 | # 5. chlorides 55 | # 6. free sulfur dioxide 56 | # 7. total sulfur dioxide 57 | # 8. density 58 | # 9. pH 59 | # 10. sulphates 60 | # 11. alcohol 61 | # 62 | # This is our dependent variable: 63 | # 64 | # 12. quality (score between 0 and 10) 65 | # 66 | 67 | # %% [markdown] 68 | # # Data Import 69 | 70 | # %% 71 | wine = pd.read_csv('../data/winequality-red.csv', sep=';') 72 | 73 | # %% 74 | X, y = np.array(wine.loc[:, wine.columns != 'quality']), np.array(wine.loc[:, wine.columns == 'quality']) 75 | 76 | # %% 77 | print(X.shape, y.shape) 78 | 79 | 80 | #%% Train / Test Split 81 | 82 | # %% 83 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2) 84 | 85 | regressor = LinearRegression() 86 | regressor.fit(X_train, y_train) 87 | y_pred = regressor.predict(X_test) 88 | r2_score(y_true=y_test, y_pred=y_pred) 89 | # run this several times to show how much the results vary 90 | 91 | 92 | #%% K-Fold Cross Validation 93 | # %% 94 | kf = KFold(n_splits=10, shuffle=True) # 10-fold CV 95 | kf.get_n_splits(X) 96 | print(kf) 97 | 98 | # %% 99 | scores = [] # initialise the individual scores 100 | model = LinearRegression() 101 | for train_index, test_index in kf.split(X): 102 | #print("TRAIN:", train_index, "TEST:", test_index) 103 | X_train, X_test, y_train, y_test = X[train_index], X[test_index], y[train_index], y[test_index] 104 | model.fit(X_train, y_train) 105 | scores.append(model.score(X_test, y_test)) 106 | print(scores, np.median(scores)) 107 | # you see that the data is not shuffled --> set parameter shuffle to True, also specify random_state 108 | 109 | # Often you are only interested in cross validation score, so you only calculate this. 110 | 111 | # %% 112 | # if only the average result is requested: 113 | np.median(cross_val_score(model, X, y, cv=10, scoring='r2')) 114 | 115 | 116 | #%% Leave One Out CV 117 | y_preds = [] # initialise the individual scores 118 | y_act = [] 119 | model = LinearRegression() 120 | 121 | loocv = LeaveOneOut() 122 | loocv.get_n_splits(X) 123 | 124 | for train_index, test_index in loocv.split(X): 125 | #print("TRAIN:", train_index, "TEST:", test_index) 126 | X_train, X_test, y_train, y_test = X[train_index], X[test_index], y[train_index], y[test_index] 127 | model.fit(X_train, y_train) 128 | y_preds.append(model.predict(X_test)[0][0]) 129 | y_act.append(y_test[0][0]) 130 | y_preds[0:10] # get the first results 131 | 132 | # %% 133 | df = pd.DataFrame({'y_act': y_act, 'y_preds': y_preds}) 134 | 135 | 136 | # %% plot the correlation 137 | sns.regplot(data=df, x='y_act', y='y_preds', scatter_kws={'alpha': 0.1}) 138 | 139 | 140 | 141 | # %% 142 | -------------------------------------------------------------------------------- /40_MachineLearning/30_Regularization/020_regularization.py: -------------------------------------------------------------------------------- 1 | # %% packages 2 | import numpy as np 3 | import pandas as pd 4 | import seaborn as sns 5 | from sklearn.model_selection import train_test_split 6 | from sklearn.linear_model import LinearRegression, Ridge, Lasso, ElasticNet 7 | from sklearn.metrics import mean_squared_error 8 | from sklearn.pipeline import Pipeline 9 | from sklearn.preprocessing import StandardScaler, PolynomialFeatures 10 | # %% Data Import 11 | col = ['CRIM', 'ZN', 'INDUS', 'CHAS', 'NOX', 'RM', 'AGE', 'DIS', 'RAD', 'TAX', 'PTRATIO', 'B', 'LSTAT', 'MEDV'] 12 | housing = pd.read_csv('../data/housing.csv', delim_whitespace=True, names=col) 13 | 14 | 15 | print("Boston housing dataset has {} data points with {} variables each.".format(*housing.shape)) 16 | # %% Data Prep 17 | # Boston Housing dataset is used for predictions of housing prices. It has these independent features: 18 | 19 | # - RM...average number of rooms per dwelling 20 | # - LSTAT...% lower status of the population 21 | # - PTRATIO...pupil-teacher ratio by town 22 | 23 | # The dependent feature is: 24 | 25 | # - MEDV...Median value of owner-occupied homes in $1000's 26 | 27 | #%% separate independent / dependent features 28 | X = housing.drop('MEDV', axis=1) 29 | y = housing['MEDV'] 30 | # %% train / test split 31 | X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=123, test_size=0.4) 32 | # %% Modeling 33 | lr_model = LinearRegression() 34 | lr_model.fit(X_train, y_train) 35 | 36 | print('Training score: {}'.format(lr_model.score(X_train, y_train))) 37 | print('Test score: {}'.format(lr_model.score(X_test, y_test))) 38 | # %% 2nd order modeling to improve results 39 | steps = [ 40 | ('scalar', StandardScaler()), 41 | ('poly', PolynomialFeatures(degree=2)), 42 | ('model', LinearRegression()) 43 | ] 44 | 45 | pipeline = Pipeline(steps) 46 | 47 | pipeline.fit(X_train, y_train) 48 | 49 | print('Training score: {}'.format(pipeline.score(X_train, y_train))) 50 | print('Test score: {}'.format(pipeline.score(X_test, y_test))) 51 | 52 | # This already improved the performance a lot. 53 | 54 | # %% Ridge Regression (L2 Regularization) 55 | steps = [ 56 | ('scalar', StandardScaler()), 57 | ('poly', PolynomialFeatures(degree=2)), 58 | ('model', Ridge(alpha=10, fit_intercept=True)) 59 | ] 60 | 61 | ridge_pipe = Pipeline(steps) 62 | ridge_pipe.fit(X_train, y_train) 63 | 64 | print('Training Score: {}'.format(ridge_pipe.score(X_train, y_train))) 65 | print('Test Score: {}'.format(ridge_pipe.score(X_test, y_test))) 66 | 67 | # %% Lasso Regression (L1 Regularization) 68 | steps = [ 69 | ('scalar', StandardScaler()), 70 | ('poly', PolynomialFeatures(degree=2)), 71 | ('model', Lasso(alpha=0.3, fit_intercept=True)) 72 | ] 73 | 74 | lasso_pipe = Pipeline(steps) 75 | 76 | lasso_pipe.fit(X_train, y_train) 77 | 78 | print('Training score: {}'.format(lasso_pipe.score(X_train, y_train))) 79 | print('Test score: {}'.format(lasso_pipe.score(X_test, y_test))) 80 | 81 | # %% 82 | # This result is better with Ridge-Regression. -------------------------------------------------------------------------------- /40_MachineLearning/40_Classification/040_ROCCurve_Interactive.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/40_MachineLearning/40_Classification/040_ROCCurve_Interactive.txt -------------------------------------------------------------------------------- /40_MachineLearning/40_Classification/110_logistic_regression.py: -------------------------------------------------------------------------------- 1 | #%% Logistic Regression 2 | 3 | # The dataset is provided by UCI Machine Learning repository and deals with direct marketing of a bank. The target variable describes a customer subscribing (1) to a deposit or not (0). 4 | 5 | 6 | # %% Packages 7 | import numpy as np 8 | import pandas as pd 9 | from sklearn.preprocessing import LabelEncoder, StandardScaler 10 | from sklearn.pipeline import Pipeline 11 | from sklearn.linear_model import LogisticRegression 12 | from sklearn.model_selection import train_test_split 13 | from sklearn.metrics import confusion_matrix, accuracy_score, classification_report 14 | 15 | from plotnine import ggplot, aes, geom_bar, labs 16 | 17 | #%% Data Preparation 18 | 19 | banking = pd.read_csv("../data/direct_marketing.csv", sep=';') 20 | 21 | #%% 22 | banking.describe() 23 | 24 | #%% 25 | banking.head() 26 | 27 | #%% Target Variable 28 | (ggplot(data=banking) + 29 | aes(x='y') + 30 | geom_bar() + 31 | labs(title = "Target Variable Count", y = "Count", x = "Target Variable") 32 | ) 33 | # %% Filter Data 34 | 35 | # The new object banking_filt only holds these information: 36 | 37 | cols_to_keep = ['age','balance','day', 'campaign', 'previous'] 38 | banking_numerical = banking[cols_to_keep] 39 | y = banking['y'].apply(lambda x: 1 if x == 'yes' else 0).tolist() 40 | 41 | # %% transform categorical data into numerical 42 | banking_cat = pd.get_dummies(banking[['default', 'housing', 'loan','marital', 'education', 'job', 'poutcome']]) 43 | 44 | #%% combine numeric and categorical columns 45 | X = pd.concat([banking_numerical, banking_cat], axis=1) 46 | 47 | 48 | 49 | #%% Perform Train / Test Split 50 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42) 51 | 52 | #%% Set up Model Pipeline with StandardScaler, LogisticRegression 53 | steps = [ 54 | ('scaler', StandardScaler()), 55 | ('log_reg', LogisticRegression()) 56 | ] 57 | 58 | pipeline = Pipeline(steps) 59 | 60 | clf = pipeline.fit(X_train, y_train) 61 | 62 | 63 | #%% Calculate Predictions on Test data 64 | y_pred = clf.predict(X_test) 65 | 66 | 67 | #%% Calculate Baseline Classifier 68 | 1 - np.sum(y_test) / len(y_test) 69 | 70 | #%% Calculate Confusion Matrix on Test data 71 | confusion_matrix(y_true=y_test, y_pred=y_pred) 72 | 73 | #%% Compare our Model Accuracy to Baseline Model Accuracy 74 | accuracy_score(y_test, y_pred) 75 | # %% 76 | print(classification_report(y_test, y_pred)) 77 | # %% 78 | -------------------------------------------------------------------------------- /40_MachineLearning/40_Classification/210_decision_trees.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import numpy as np 3 | import pandas as pd 4 | from sklearn.model_selection import train_test_split 5 | from sklearn.pipeline import Pipeline 6 | from sklearn.preprocessing import StandardScaler 7 | from sklearn.tree import DecisionTreeClassifier 8 | from sklearn.metrics import confusion_matrix, accuracy_score 9 | import seaborn as sns 10 | import matplotlib.pyplot as plt 11 | #%% Data Understanding 12 | # This dataset is originally from the National Institute of Diabetes and Digestive and Kidney Diseases. The objective of the dataset is to diagnostically predict whether or not a patient has diabetes, based on certain diagnostic measurements included in the dataset. Several constraints were placed on the selection of these instances from a larger database. In particular, all patients here are females at least 21 years old of Pima Indian heritage. 13 | 14 | # %% Data Import 15 | diabetes = pd.read_csv('../data/diabetes.csv') 16 | 17 | # %% Exploratory Data Analysis 18 | diabetes.head() 19 | 20 | # %% 21 | diabetes.describe() 22 | # %% correlation matrix 23 | corr = diabetes.corr() 24 | # generate mask for the upper triangle 25 | mask = np.zeros_like(corr, dtype=np.bool) 26 | mask[np.triu_indices_from(mask)] = True 27 | # Generate a custom diverging colormap 28 | cmap = sns.diverging_palette(220, 10, as_cmap=True) 29 | 30 | sns.heatmap(corr, mask=mask, cmap=cmap, vmin=-1, vmax=1, center=0, linewidths=.5) 31 | plt.show() 32 | # %% Train / Test Split 33 | X = diabetes.drop(['Outcome'], axis=1) 34 | y = diabetes['Outcome'] 35 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=123) 36 | # %% Modeling 37 | steps = [ 38 | ('scaler', StandardScaler()), 39 | ('decision_tree', DecisionTreeClassifier()) 40 | ] 41 | 42 | pipeline = Pipeline(steps) 43 | 44 | # train the Decision Tree 45 | clf = pipeline.fit(X_train, y_train) 46 | 47 | # prediction for Test data 48 | y_pred = clf.predict(X_test) 49 | 50 | # %% Model Evaluation 51 | cm = confusion_matrix(y_test, y_pred) 52 | cm 53 | 54 | # %% 55 | accuracy_score(y_true=y_test, y_pred=y_pred) 56 | # %% Visualise Decision Tree 57 | from sklearn import tree 58 | tree.plot_tree(clf) 59 | # %% 60 | plt.show() 61 | -------------------------------------------------------------------------------- /40_MachineLearning/40_Classification/310_random_forest.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import numpy as np 3 | import pandas as pd 4 | from sklearn.model_selection import train_test_split 5 | from sklearn.pipeline import Pipeline 6 | from sklearn.preprocessing import StandardScaler 7 | from sklearn.ensemble import RandomForestClassifier 8 | from sklearn.metrics import confusion_matrix, accuracy_score 9 | import seaborn as sns 10 | import matplotlib.pyplot as plt 11 | #%% Data Understanding 12 | # This dataset is originally from the National Institute of Diabetes and Digestive and Kidney Diseases. The objective of the dataset is to diagnostically predict whether or not a patient has diabetes, based on certain diagnostic measurements included in the dataset. Several constraints were placed on the selection of these instances from a larger database. In particular, all patients here are females at least 21 years old of Pima Indian heritage. 13 | 14 | # %% Data Import 15 | diabetes = pd.read_csv('../data/diabetes.csv') 16 | 17 | # %% Exploratory Data Analysis 18 | diabetes.head() 19 | 20 | # %% 21 | diabetes.describe() 22 | # %% correlation matrix 23 | corr = diabetes.corr() 24 | # generate mask for the upper triangle 25 | mask = np.zeros_like(corr, dtype=np.bool) 26 | mask[np.triu_indices_from(mask)] = True 27 | # Generate a custom diverging colormap 28 | cmap = sns.diverging_palette(220, 10, as_cmap=True) 29 | 30 | sns.heatmap(corr, mask=mask, cmap=cmap, vmin=-1, vmax=1, center=0, linewidths=.5) 31 | plt.show() 32 | 33 | # %% Train / Test Split 34 | X = diabetes.drop(['Outcome'], axis=1) 35 | y = diabetes['Outcome'] 36 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=123) 37 | 38 | # %% Modeling 39 | steps = [ 40 | ('scaler', StandardScaler()), 41 | ('random_forest', RandomForestClassifier(n_estimators = 1000, random_state = 42, bootstrap=True)) 42 | ] 43 | 44 | pipeline = Pipeline(steps) 45 | 46 | # train the Decision Tree 47 | clf = pipeline.fit(X_train, y_train) 48 | 49 | # prediction for Test data 50 | y_pred = clf.predict(X_test) 51 | 52 | #%% 53 | y_pred_class = [round(i, 0) for i in y_pred] 54 | cm = confusion_matrix(y_test, y_pred_class) 55 | cm 56 | # %% 57 | accuracy_score(y_true=y_test, y_pred=y_pred_class) * 100 58 | # %% Variable Importance 59 | importances = list(RandomForestClassifier(n_estimators = 1000, random_state = 42, bootstrap=True).fit(X_train, y_train).feature_importances_) 60 | 61 | # %% 62 | feature_names = X_train.columns 63 | feature_names 64 | 65 | # create a list with feature importances 66 | feature_importance = pd.DataFrame([(feature_names, round(importances, 2)) for feature_names, importances in zip(feature_names, importances)], 67 | columns=['feature', 'importance']) 68 | 69 | # sort the importances by most importance first 70 | feature_importance = feature_importance.sort_values(by=['importance'], ascending=False) 71 | 72 | g = sns.barplot(data=feature_importance, x='feature', y='importance') 73 | g.set_xticklabels(labels=feature_importance['feature'], rotation=90) # set x tick-labels vertically 74 | feature_importance 75 | plt.show() 76 | 77 | # %% 78 | -------------------------------------------------------------------------------- /40_MachineLearning/40_Classification/410_svm.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import numpy as np 3 | import pandas as pd 4 | import random 5 | 6 | from sklearn.model_selection import train_test_split 7 | from sklearn.pipeline import Pipeline 8 | from sklearn.preprocessing import StandardScaler 9 | from sklearn.ensemble import RandomForestRegressor 10 | from sklearn.metrics import confusion_matrix, accuracy_score 11 | from sklearn.svm import SVC # SVMs for Classification 12 | 13 | # Visualisation 14 | import seaborn as sns 15 | import matplotlib.pyplot as plt 16 | # %% Data Prep 17 | x = random.sample(population=set(np.linspace(start=-10, stop=10, num=100000)), k=1000) 18 | y = random.sample(population=set(np.linspace(start=-10, stop=10, num=100000)), k=1000) 19 | z = [(x**2 + y**2) for x, y in zip(x, y)] 20 | 21 | df = pd.DataFrame(list(zip(x, y, z)), columns=['x', 'y', 'z']) 22 | df['class'] = [1 if i<50 else 0 for i in df['z']] 23 | # %% 24 | sns.scatterplot(data=df, x='x', y='y', hue='class') 25 | plt.show() 26 | # %% Separate Independent / Dependent Features 27 | X = df[['x', 'y', 'z']] 28 | y = df['class'] 29 | 30 | #%% Train Test Split 31 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=12) 32 | # %% Modeling 33 | clf = SVC(kernel='rbf') 34 | clf.fit(X_train, y_train) 35 | 36 | # %% create preds 37 | y_test_pred = clf.predict(X_test) 38 | 39 | df_test = pd.DataFrame(X_test) 40 | df_test['y_test_pred'] = y_test_pred 41 | df_test['y_test'] = y_test 42 | # %% 43 | sns.scatterplot(data=df_test, x='x', y='y', hue='y_test_pred') 44 | plt.show() 45 | # %% 46 | confusion_matrix(y_test, y_test_pred) 47 | 48 | # %% 49 | accuracy_score(y_true=y_test, y_pred=y_test_pred) 50 | # %% 51 | -------------------------------------------------------------------------------- /40_MachineLearning/40_Classification/510_ensemble_xbg.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import pandas as pd 3 | import numpy as np 4 | from sklearn.metrics import classification_report 5 | from sklearn.model_selection import train_test_split 6 | from sklearn.datasets import make_blobs 7 | from sklearn.ensemble import GradientBoostingClassifier 8 | 9 | import seaborn as sns 10 | import matplotlib.pyplot as plt 11 | # %% Data Prep 12 | X, y = make_blobs(random_state=0) 13 | # %% 14 | df = pd.DataFrame(X) 15 | df.columns = ['x1', 'x2'] 16 | df['y_true'] = y 17 | 18 | sns.scatterplot(data=df, x='x1', y='x2', hue='y_true') 19 | plt.show() 20 | # %% Train / Test Split 21 | X_train, X_test, y_train, y_test = train_test_split( 22 | X, y, test_size=0.33, random_state=42) 23 | # %% Modeling 24 | clf = GradientBoostingClassifier(n_estimators=100, learning_rate=1.0, max_depth=1, random_state=0).fit(X_train, y_train) 25 | # %% Model Evaluation 26 | clf.score(X_test, y_test) 27 | # %% 28 | -------------------------------------------------------------------------------- /40_MachineLearning/40_Classification/diabetes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/40_MachineLearning/40_Classification/diabetes.png -------------------------------------------------------------------------------- /40_MachineLearning/50_AssociationRules/30_apriori_intro.py: -------------------------------------------------------------------------------- 1 | # %% Packages 2 | import pandas as pd 3 | import numpy as np 4 | from mlxtend.frequent_patterns import apriori, association_rules 5 | from plotnine import ggplot, aes, theme, geom_col, element_text 6 | 7 | # %% Data Preparation 8 | # open the file and import all transactions 9 | 10 | retail = pd.read_excel("../data/OnlineRetail.xlsx") 11 | retail.head() 12 | # %% 13 | retail.columns 14 | 15 | # %% Data Cleaning 16 | # stripping extra spaces in the description 17 | retail['Description'] = retail['Description'].str.strip() 18 | 19 | 20 | # %% 21 | retail.shape 22 | 23 | # %% Dropping the rows without any invoice number 24 | retail.dropna(subset =['InvoiceNo'], axis = 0, inplace = True) 25 | retail['InvoiceNo'] = retail['InvoiceNo'].astype('str') 26 | retail.shape 27 | 28 | # %% Modeling 29 | # Create a basket (sparse matrix with transactions x items) 30 | basket_germany = (retail[retail['Country'] == 'Germany'].groupby(['InvoiceNo', 'Description'])['Quantity'].sum().unstack().reset_index().fillna(0).set_index('InvoiceNo') 31 | ) 32 | basket_germany 33 | # %% 34 | def hot_encode(x): 35 | if(x<=0): 36 | return 0 37 | if (x >= 1): 38 | return 1 39 | # %% 40 | basket_germany = basket_germany.applymap(hot_encode) 41 | # %% 42 | freq_items = pd.DataFrame(apriori(basket_germany, min_support=0.05, use_colnames=True)).sort_values('support', ascending=False) 43 | freq_items 44 | # %% 45 | (ggplot(data = freq_items[:40], mapping=aes(x='itemsets', y='support')) + geom_col() + theme(axis_text=element_text(rotation=90, hjust=1))) 46 | # %% 47 | rules = association_rules(freq_items, metric='lift', min_threshold=1) 48 | rules 49 | # %% 50 | # all itemsets with >= 2 items, support of 10 % 51 | freq_items['length'] = freq_items['itemsets'].apply(lambda x: len(x)) 52 | # %% 53 | freq_items[(freq_items['length'] >= 2) & freq_items['support'] >= 0.1] 54 | # %% 55 | -------------------------------------------------------------------------------- /40_MachineLearning/60_Clustering/30_kmeans_lab.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | # data handling 3 | import numpy as np 4 | import pandas as pd 5 | 6 | # modeling 7 | from sklearn.cluster import KMeans 8 | 9 | # prepare sample data 10 | from sklearn.datasets import make_blobs 11 | 12 | # data visualisation 13 | from plotnine import ggplot, aes, geom_point, labs 14 | # %% Data Prep 15 | X, y_true = make_blobs(n_samples=1000, 16 | centers=3, 17 | cluster_std=1, 18 | random_state=123) 19 | 20 | df = pd.DataFrame(X) 21 | df.columns = ['x', 'y'] 22 | df['y_true'] = [str(i) for i in y_true.tolist()] 23 | 24 | (ggplot(df) + 25 | aes(x='x', y='y', color=y_true) + 26 | geom_point() + 27 | labs(x='x', y='y', color='target') 28 | ) 29 | 30 | #%% Modeling 31 | kmeans = KMeans(n_clusters=3) 32 | kmeans.fit(X) 33 | 34 | # get cluster centers 35 | centers = pd.DataFrame(kmeans.cluster_centers_) 36 | centers.columns = ['x', 'y'] 37 | 38 | # create predictions 39 | df['y_kmeans'] = kmeans.predict(X) 40 | df['y_kmeans'] = df['y_kmeans'].astype("category") 41 | 42 | # visualise results 43 | (ggplot(data=df, mapping=aes(x='x', y='y')) + 44 | geom_point(mapping=aes(color='y_kmeans')) + 45 | geom_point(data=centers, mapping=aes(x='x', y='y'), color='red', size=2) + 46 | labs(x='x', y='y', color='target') 47 | ) 48 | # %% 49 | -------------------------------------------------------------------------------- /40_MachineLearning/60_Clustering/40_kmeans_exercise.py: -------------------------------------------------------------------------------- 1 | # 1. Import required packages. 2 | # %% 3 | # place the code here 4 | 5 | # # Data Preparation 6 | 7 | # 2. Choose the same data as in the Lab-lecture and prepare the dataframe "df" 8 | 9 | # %% 10 | # place the code here 11 | 12 | # We can clearly see two different patterns - an inner ring and an outer ring. The task is to find an algorithm that can distinguish both classes. 13 | 14 | # %% 15 | # place the code here 16 | 17 | # 3. Check visually that the data was created successfully 18 | 19 | # %% 20 | # place the code here 21 | 22 | # # Modeling 23 | 24 | # 4. Perform the clustering based on Kmeans algorithm. 25 | 26 | # %% 27 | # place the code here 28 | 29 | # %% 30 | # place the code here 31 | 32 | # %% 33 | # place the code here 34 | 35 | # 5. Create a graph that shows the result. 36 | 37 | # %% 38 | # place the code here 39 | 40 | 41 | -------------------------------------------------------------------------------- /40_MachineLearning/60_Clustering/50_kmeans_solution.py: -------------------------------------------------------------------------------- 1 | 2 | #%% Packages 3 | # 1. Import required packages. 4 | import numpy as np 5 | import pandas as pd 6 | import random 7 | from sklearn.cluster import DBSCAN, KMeans 8 | 9 | # visualisation 10 | from plotnine import * 11 | 12 | 13 | # Data Preparation 14 | #%% 2. Choose the same data as in the Lab-lecture and prepare the dataframe "df" 15 | num_points = 4000 16 | x = random.sample(population=set(np.linspace(start=-10, stop=10, num=num_points)), k=num_points) 17 | y = random.sample(population=set(np.linspace(start=-10, stop=10, num=num_points)), k=num_points) 18 | z = [(x**2 + y**2) for x, y in zip(x, y)] 19 | 20 | df = pd.DataFrame(list(zip(x, y, z)), columns=['x', 'y', 'z']) 21 | df['class'] = [1 if ((i<10) | (80 < i< 100)) else 0 for i in df['z']] 22 | 23 | 24 | # We can clearly see two different patterns - an inner ring and an outer ring. The task is to find an algorithm that can distinguish both classes. 25 | df = df[df['class']==1] # filter for class 1 26 | df = df.drop(['z', 'class'], axis=1) # delete not required columns 27 | 28 | 29 | #%% 3. Check visually that the data was created successfully 30 | 31 | (ggplot(data=df) + 32 | aes(x='x', y='y') + 33 | geom_point() 34 | ) 35 | 36 | # # Modeling 37 | #%% 4. Perform the clustering based on Kmeans algorithm. 38 | clustering = KMeans(n_clusters=2).fit(df) 39 | 40 | df['labels'] = clustering.labels_ 41 | df['labels'] = df['labels'].astype("category") 42 | 43 | #%% 5. Create a graph that shows the result. 44 | (ggplot(data=df) + 45 | aes(x='x', y='y', color='labels') + 46 | geom_point() 47 | ) 48 | 49 | 50 | # The two different classes are distinguished not very well. -------------------------------------------------------------------------------- /40_MachineLearning/60_Clustering/70_hierarchical_lab.py: -------------------------------------------------------------------------------- 1 | # %% Data Preparation 2 | import numpy as np 3 | import pandas as pd 4 | import random 5 | 6 | # Modeling 7 | from scipy.cluster.hierarchy import dendrogram, linkage 8 | from sklearn.cluster import AgglomerativeClustering 9 | 10 | # Visualisation 11 | from matplotlib import pyplot as plt 12 | from plotnine import ggplot, aes, geom_point, geom_text 13 | 14 | # %% Data Preparation 15 | num_points = 10 16 | x = random.sample(population=set(np.linspace(start=0, stop=10, num=num_points)), k=num_points) 17 | y = random.sample(population=set(np.linspace(start=0, stop=10, num=num_points)), k=num_points) 18 | labels = range(1, num_points+1) 19 | df = pd.DataFrame(list(zip(x, y, labels)), columns=['x', 'y', 'point_labels']) 20 | 21 | 22 | # %% 23 | (ggplot(data=df) 24 | + aes(x='x', y='y', label='point_labels') 25 | + geom_point(size=0) 26 | + geom_text(size=20) 27 | ) 28 | 29 | # %% Modeling 30 | X = np.array(df[['x', 'y']]) 31 | X 32 | 33 | linked = linkage(X, 'single') 34 | 35 | plt.figure(figsize=(10, 7)) 36 | dendrogram(linked, 37 | orientation='top', 38 | labels=labels, 39 | distance_sort='descending', 40 | show_leaf_counts=True) 41 | plt.show() 42 | 43 | 44 | # %% Modeling 45 | cluster = AgglomerativeClustering(n_clusters=3, affinity='euclidean', linkage='ward') 46 | cluster.fit_predict(X) 47 | 48 | 49 | # %% 50 | df['class_labels'] = cluster.labels_ # assign the result to a new column 51 | df['class_labels'] = df['class_labels'].astype('category') 52 | (ggplot(data=df) 53 | + aes(x='x', y='y', color='class_labels', label='point_labels') 54 | + geom_point(size = 0) 55 | + geom_text(size=20) 56 | ) 57 | 58 | 59 | 60 | 61 | 62 | # %% 63 | -------------------------------------------------------------------------------- /40_MachineLearning/60_Clustering/90_dbscan_lab.py: -------------------------------------------------------------------------------- 1 | #%% Packages 2 | import numpy as np 3 | import pandas as pd 4 | import random 5 | from sklearn.cluster import DBSCAN, KMeans 6 | # visualisation 7 | from plotnine import ggplot, aes, geom_point 8 | 9 | # %% Data Preparation 10 | 11 | num_points = 4000 12 | x = random.sample(population=set(np.linspace(start=-10, stop=10, num=num_points)), k=num_points) 13 | y = random.sample(population=set(np.linspace(start=-10, stop=10, num=num_points)), k=num_points) 14 | z = [(x**2 + y**2) for x, y in zip(x, y)] 15 | 16 | df = pd.DataFrame(list(zip(x, y, z)), columns=['x', 'y', 'z']) 17 | df['class'] = [1 if ((i<10) | (80 < i< 100)) else 0 for i in df['z']] 18 | 19 | # We can clearly see two different patterns - an inner ring and an outer ring. The task is to find an algorithm that can distinguish both classes. 20 | 21 | df = df[df['class']==1] # filter for class 1 22 | df = df.drop(['z', 'class'], axis=1) # delete not required columns 23 | 24 | 25 | # %% Visualisation of results 26 | (ggplot(data=df) + 27 | aes(x='x', y='y') + 28 | geom_point() 29 | ) 30 | 31 | # %% Modeling 32 | # clustering = KMeans(n_clusters=2).fit(df) 33 | clustering = DBSCAN(eps=3, min_samples=2).fit(df) 34 | 35 | df['labels'] = [str(i) for i in clustering.labels_] 36 | 37 | (ggplot(data=df) + 38 | aes(x='x', y='y', color = 'labels') + 39 | geom_point() 40 | ) 41 | 42 | 43 | # Now the two different classes are separated nicely. 44 | -------------------------------------------------------------------------------- /40_MachineLearning/70_DimensionalityReduction/20_fa_lab.py: -------------------------------------------------------------------------------- 1 | # %% Factor Analysis 2 | #%% Packages 3 | import pandas as pd 4 | import numpy as np 5 | from sklearn import datasets 6 | from sklearn.preprocessing import StandardScaler 7 | from sklearn.pipeline import Pipeline 8 | from sklearn.decomposition import FactorAnalysis 9 | from plotnine import ggplot, aes, geom_point, scale_color_discrete, labs 10 | from factor_analyzer.factor_analyzer import calculate_bartlett_sphericity, calculate_kmo 11 | from factor_analyzer import FactorAnalyzer 12 | import matplotlib.pyplot as plt 13 | 14 | #%% Data Preparation 15 | # We will work with the **iris** dataset. It is shipped with *sklearn*. 16 | 17 | iris = datasets.load_iris() 18 | 19 | X = iris.data 20 | y = iris.target 21 | 22 | 23 | # %% 24 | iris.target_names 25 | 26 | 27 | # %% 28 | X = pd.DataFrame(X, columns=['sepal_length', 'sepal_width', 'petal_length', 'petal_width']) 29 | 30 | print(X.head()) # head of dataframe 31 | print(X.columns) # columns 32 | print(X.shape) # object shape 33 | 34 | # %% Factor Analysis 35 | 36 | #%% Test for Factorability 37 | print('Bartlett-sphericity Chi-square: {}'.format(calculate_bartlett_sphericity(X)[0])) 38 | print('Bartlett-sphericity P-value: {}'.format(calculate_bartlett_sphericity(X)[1])) 39 | 40 | kmo_all, kmo_model = calculate_kmo(X); 41 | print('KMO score: {}'.format(kmo_model)); 42 | 43 | # for good KMO value should be around 0.6 44 | 45 | #%% Number of Factors 46 | fa = FactorAnalyzer() 47 | fa.fit(X, 10) 48 | ev, v = fa.get_eigenvalues() 49 | plt.plot(range(1,X.shape[1]+1),ev) 50 | plt.show() 51 | # %% Modeling 52 | steps = [ 53 | ('scalar', StandardScaler()), 54 | ('fa', FactorAnalysis(n_components=2, random_state=123)) 55 | ] 56 | 57 | pipeline = Pipeline(steps) 58 | 59 | factors = pipeline.fit_transform(X) 60 | 61 | # %% [markdown] 62 | # We want to have a target vector with names instead of numbers. To achieve this, we need to create a mapping, and then use list comprehension to create a new list *y_strings*. 63 | 64 | # %% 65 | mapping = {0: 'setosa', 1:'versicolor', 2: 'virginica'} 66 | y_strings = y.astype(int) 67 | y_strings = [mapping[y[i]] for i in range(len(y_strings))] 68 | 69 | 70 | # %% 71 | factors_df = pd.DataFrame(factors, columns=['F1', 'F2']) 72 | factors_df['target'] = y_strings 73 | 74 | 75 | # %% 76 | (ggplot(data=factors_df) 77 | + aes(x='F1', y='F2', color='target') 78 | + geom_point() 79 | + labs(x='Factor 1', y='Factor 2', title='Factor Analysis for Iris Dataset') 80 | + scale_color_discrete(name='Iris Class') 81 | ) 82 | 83 | 84 | # %% 85 | factors 86 | 87 | 88 | 89 | 90 | -------------------------------------------------------------------------------- /40_MachineLearning/70_DimensionalityReduction/40_PCA_Lab.py: -------------------------------------------------------------------------------- 1 | # %% Required Packages 2 | import numpy as np 3 | import pandas as pd 4 | from sklearn import datasets 5 | from sklearn.pipeline import Pipeline 6 | from sklearn.preprocessing import StandardScaler 7 | from sklearn.decomposition import PCA 8 | from sklearn.model_selection import train_test_split 9 | 10 | from plotnine import ggplot, aes, geom_point, labs, theme_bw 11 | import seaborn as sns 12 | import matplotlib.pyplot as plt 13 | # %% PCA for Data Visualisation 14 | # PCA can be used to reduce the number of dimensions, so that you can see differences in a lower dimension. If the dimension is two or three you can plot the result. 15 | # %% Data Preparation 16 | # We will work with the **iris** dataset. It is shipped with *sklearn*. 17 | 18 | iris = datasets.load_iris() 19 | 20 | 21 | # %% 22 | iris_data = iris.data 23 | 24 | 25 | # %% 26 | X = pd.DataFrame(iris_data, columns=['sepal_length', 'sepal_width', 'petal_length', 'petal_width']) 27 | X.head() 28 | 29 | 30 | # %% 31 | y = iris.target 32 | 33 | #%% Modeling 34 | # Standardisation of data, followed by direct implementation of PCA. PCA is performed to reduce the number of dimensions from 4 to 2. 35 | # 36 | # Scaling of the data is relevant, because features can be of very different ranges. 37 | pca = PCA(n_components=2) 38 | 39 | steps = [ 40 | ('scalar', StandardScaler()), 41 | ('pca', pca) 42 | ] 43 | 44 | pipeline = Pipeline(steps) 45 | 46 | prin_comp = pipeline.fit_transform(X) 47 | 48 | #%% PCA Loadings 49 | pca_loadings = pca.components_.T * np.sqrt(pca.explained_variance_) 50 | pca_loading_matrix = pd.DataFrame(pca_loadings, columns = 51 | ['PC{}'.format(i) for i in range(1, 3)], 52 | index=iris.feature_names 53 | ) 54 | sns.heatmap(pca_loading_matrix, cmap=sns.diverging_palette(200, 30, n=200), annot=True) 55 | plt.show() 56 | # %% 57 | prin_df = pd.DataFrame(data = prin_comp 58 | , columns = ['PC1', 'PC2']) 59 | 60 | y_df = pd.DataFrame([str(i) for i in y], columns=['y']) 61 | 62 | prin_df = pd.concat([prin_df, y_df], axis = 1) 63 | 64 | 65 | # %% 66 | (ggplot(data=prin_df) 67 | + aes(x='PC1', y='PC2') 68 | + geom_point(aes(color='y')) 69 | + labs(title ='Principal Component Analysis for Iris Dataset') 70 | + theme_bw() 71 | ) 72 | 73 | # %% [markdown] 74 | # The classes are now much clearer to separate, although we reduced the dimensions from four to two. 75 | # %% [markdown] 76 | # The explained variance can be extracted from the pca object. 77 | 78 | # %% 79 | pca.explained_variance_ratio_ 80 | 81 | # %% [markdown] 82 | # # PCA for Speeding up ML 83 | # %% [markdown] 84 | # Import the data for Mnist. It is part of *sklearn*. 85 | 86 | # %% 87 | mnist = datasets.load_digits() 88 | 89 | 90 | # %% 91 | #images_and_labels = list(zip(mnist.images, mnist.target)) 92 | 93 | 94 | # %% 95 | n_samples = len(mnist.images) 96 | X = mnist.images.reshape((n_samples, -1)) 97 | y = mnist.target 98 | 99 | 100 | # %% 101 | X.shape, y.shape 102 | 103 | # %% [markdown] 104 | # Data is splitted into training and testing. 105 | 106 | # %% 107 | X_train, X_test, y_train, y_test = train_test_split( mnist.data, mnist.target, test_size=0.2, random_state=0) 108 | 109 | 110 | # %% 111 | X_train.shape 112 | 113 | 114 | # %% [markdown] 115 | # **fit()** calculates mean and standard deviation. We need to ensure that these values are derived only from training data, but that the transformations are applied to training and testing. 116 | 117 | # %% 118 | scaler = StandardScaler() 119 | pca = PCA(n_components=2) 120 | steps = [ 121 | ('scalar', scaler), 122 | ('pca', pca) 123 | ] 124 | 125 | pipeline = Pipeline(steps) 126 | 127 | X_train_res = pipeline.fit_transform(X_train) 128 | X_test_res = pipeline.transform(X_test) 129 | 130 | # %% [markdown] 131 | # Now we will create a graph that shows PC1 and PC2. The colors indicate the classes (digits). We do this for the training and test data. 132 | # %% [markdown] 133 | # At first the training data 134 | 135 | # %% 136 | prin_df = pd.DataFrame(data = X_train_res, columns = ['PC1', 'PC2']) 137 | y_df = pd.DataFrame(y_train, columns=['y']) 138 | y_df['y'] = y_df['y'].astype('category') 139 | prin_df = pd.concat([prin_df, y_df], axis = 1) 140 | 141 | (ggplot(data=prin_df) 142 | + aes(x='PC1', y='PC2') 143 | + geom_point(aes(color='y')) 144 | + labs(title ='Principal Component Analysis for Mnist Train-Dataset') 145 | + theme_bw() 146 | ) 147 | 148 | # %% [markdown] 149 | # Now the test data. 150 | 151 | # %% 152 | prin_df = pd.DataFrame(data = X_test_res, columns = ['PC1', 'PC2']) 153 | y_df = pd.DataFrame(y_test, columns=['y']) 154 | y_df['y'] = y_df['y'].astype('category') 155 | prin_df = pd.concat([prin_df, y_df], axis = 1) 156 | 157 | (ggplot(data=prin_df) 158 | + aes(x='PC1', y='PC2') 159 | + geom_point(aes(color='y')) 160 | + labs(title ='Principal Component Analysis for Mnist Test-Dataset') 161 | + theme_bw() 162 | ) 163 | 164 | 165 | # %% 166 | 167 | 168 | 169 | -------------------------------------------------------------------------------- /40_MachineLearning/70_DimensionalityReduction/50_pca_exercise.py: -------------------------------------------------------------------------------- 1 | # Intro: You will use the "digits" dataset and apply PCA to it 2 | 3 | 4 | #%% Packages 5 | # 1. Import required packages 6 | # Data Preparation 7 | 8 | # Modeling 9 | 10 | # Visualisation 11 | 12 | 13 | # # Data Preparation 14 | 15 | 16 | #%% 2. Load required data 17 | 18 | 19 | # # Modeling 20 | 21 | #%% 3. Apply PCA and reduce the number of dimensions to two! 22 | 23 | 24 | #%% 4. Visualise the result with a graphic framework of your choice. 25 | 26 | -------------------------------------------------------------------------------- /40_MachineLearning/70_DimensionalityReduction/60_pca_solution.py: -------------------------------------------------------------------------------- 1 | # Intro: You will use the "digits" dataset and apply PCA to it 2 | 3 | 4 | #%% Packages 5 | # 1. Import required packages 6 | # Data Preparation 7 | import numpy as np 8 | import pandas as pd 9 | from sklearn.datasets import load_digits 10 | 11 | # Modeling 12 | from sklearn.pipeline import Pipeline 13 | from sklearn.preprocessing import StandardScaler 14 | from sklearn.decomposition import PCA 15 | from sklearn.model_selection import train_test_split 16 | 17 | # Visualisation 18 | from plotnine import * 19 | 20 | 21 | # # Data Preparation 22 | 23 | 24 | #%% 2. Load required data 25 | digits = load_digits() 26 | digits_data = digits.data 27 | digits_data.shape 28 | 29 | 30 | # # Modeling 31 | 32 | #%% 3. Apply PCA and reduce the number of dimensions to two! 33 | pca = PCA(2) # project from 64 to 2 dimensions 34 | projected = pca.fit_transform(digits_data) 35 | print("Shape of Digits: %s" % str(digits_data.shape)) 36 | print("Shape of Projection: %s" % str(projected.shape)) 37 | 38 | df_proj = pd.DataFrame(projected, columns=['PC1', 'PC2']) 39 | df_proj['true_label'] = pd.Categorical(digits.target) 40 | 41 | 42 | #%% 4. Visualise the result with a graphic framework of your choice. 43 | 44 | (ggplot(data=df_proj) 45 | + aes(x='PC1', y='PC2', color='true_label') 46 | + geom_point() 47 | ) 48 | 49 | 50 | -------------------------------------------------------------------------------- /40_MachineLearning/70_DimensionalityReduction/80_t_SNE_lab.py: -------------------------------------------------------------------------------- 1 | # %% tSNE 2 | # # Introduction 3 | # %% [markdown] 4 | # t-Distributed Stochastic Neighbor Embedding (t-SNE) is an unsupervised, non-linear technique primarily used for data exploration and visualizing high-dimensional data. In simpler terms, t-SNE gives you a feel or intuition of how the data is arranged in a high-dimensional space. It was developed by Laurens van der Maatens and Geoffrey Hinton in 2008. [source](https://towardsdatascience.com/an-introduction-to-t-sne-with-python-example-5a3a293108d1) 5 | # %% [markdown] 6 | # # Packages 7 | 8 | # %% 9 | import pandas as pd 10 | from plotnine import ggplot, aes, geom_point # ggplot 11 | from sklearn import datasets # Mnist dataset 12 | from sklearn.manifold import TSNE 13 | import seaborn as sns 14 | import matplotlib.pyplot as plt 15 | # %% [markdown] 16 | # # Data Preparation 17 | # %% [markdown] 18 | # We will use Mnist digital character dataset. 19 | 20 | # %% 21 | digits = datasets.load_digits() 22 | n_samples = len(digits.images) 23 | X = digits.images.reshape((n_samples, -1)) 24 | y = digits.target 25 | X.shape 26 | y.shape 27 | #%% 28 | digits.images.shape 29 | sns.heatmap(digits.images[0]) 30 | plt.show() 31 | 32 | 33 | # %% 34 | digits_embedded = TSNE(n_components=2).fit_transform(X) 35 | 36 | 37 | # %% 38 | df = pd.DataFrame(digits_embedded, columns=['Component1', 'Component2']) 39 | df['target'] = pd.Categorical(y) 40 | 41 | 42 | # %% 43 | (ggplot(data=df) 44 | + aes(x='Component1', y='Component2', color='target') 45 | + geom_point() 46 | ) 47 | 48 | # t-SNE is able to correctly separate the digits 49 | 50 | 51 | 52 | 53 | # %% 54 | -------------------------------------------------------------------------------- /40_MachineLearning/70_DimensionalityReduction/90_capstone_clust_dimred.py: -------------------------------------------------------------------------------- 1 | #%% Data Description 2 | # 3 | # source: https://www.kaggle.com/datasets/imakash3011/customer-personality-analysis 4 | # 5 | # Customer Personality Analysis is a detailed analysis of a company’s ideal customers. It helps a business to better understand its customers and makes it easier for them to modify products according to the specific needs, behaviors and concerns of different types of customers. 6 | # Customer personality analysis helps a business to modify its product based on its target customers from different types of customer segments. For example, instead of spending money to market a new product to every customer in the company’s database, a company can analyze which customer segment is most likely to buy the product and then market the product only on that particular segment. 7 | 8 | # Attributes 9 | 10 | # People 11 | 12 | # ID: Customer's unique identifier 13 | # Year_Birth: Customer's birth year 14 | # Education: Customer's education level 15 | # Marital_Status: Customer's marital status 16 | # Income: Customer's yearly household income 17 | # Kidhome: Number of children in customer's household 18 | # Teenhome: Number of teenagers in customer's household 19 | # Dt_Customer: Date of customer's enrollment with the company 20 | # Recency: Number of days since customer's last purchase 21 | # Complain: 1 if the customer complained in the last 2 years, 0 otherwise 22 | # Products 23 | 24 | # MntWines: Amount spent on wine in last 2 years 25 | # MntFruits: Amount spent on fruits in last 2 years 26 | # MntMeatProducts: Amount spent on meat in last 2 years 27 | # MntFishProducts: Amount spent on fish in last 2 years 28 | # MntSweetProducts: Amount spent on sweets in last 2 years 29 | # MntGoldProds: Amount spent on gold in last 2 years 30 | # Promotion 31 | 32 | # NumDealsPurchases: Number of purchases made with a discount 33 | # AcceptedCmp1: 1 if customer accepted the offer in the 1st campaign, 0 otherwise 34 | # AcceptedCmp2: 1 if customer accepted the offer in the 2nd campaign, 0 otherwise 35 | # AcceptedCmp3: 1 if customer accepted the offer in the 3rd campaign, 0 otherwise 36 | # AcceptedCmp4: 1 if customer accepted the offer in the 4th campaign, 0 otherwise 37 | # AcceptedCmp5: 1 if customer accepted the offer in the 5th campaign, 0 otherwise 38 | # Response: 1 if customer accepted the offer in the last campaign, 0 otherwise 39 | # Place 40 | 41 | # NumWebPurchases: Number of purchases made through the company’s website 42 | # NumCatalogPurchases: Number of purchases made using a catalogue 43 | # NumStorePurchases: Number of purchases made directly in stores 44 | # NumWebVisitsMonth: Number of visits to company’s website in the last month 45 | # Target 46 | # Need to perform clustering to summarize customer segments. 47 | 48 | #%% packages 49 | import pandas as pd 50 | import numpy as np 51 | import seaborn as sns 52 | import matplotlib.pyplot as plt 53 | from sklearn.pipeline import Pipeline 54 | from sklearn.preprocessing import StandardScaler 55 | from sklearn.preprocessing import LabelEncoder 56 | from sklearn.decomposition import PCA 57 | from sklearn.cluster import KMeans 58 | from yellowbrick.cluster import KElbowVisualizer 59 | #%% Data Import 60 | #-------------- 61 | df_raw = pd.read_csv('data//marketing_campaign.csv', sep='\t') 62 | # %% Exploartory Data Analysis 63 | #----------------------------- 64 | # shape of dataframe 65 | df_raw.shape 66 | 67 | # %% Head of Data 68 | df_raw.head() 69 | 70 | #%% get more info about data 71 | df_raw.info() 72 | # %% Clean the data by deleting rows with NaNs 73 | df_filt = df_raw.copy().dropna() 74 | df_filt.shape 75 | # %% Feature Engineering 76 | #----------------------- 77 | 78 | # %% create column 'Age' from birth year 79 | df_filt['Age'] = 2022 - df_filt['Year_Birth'] 80 | 81 | # %% create column 'MoneyTotal' from columns starting with Mnt... 82 | df_filt['MoneyTotal'] = df_filt["MntWines"] + df_filt["MntFruits"] + df_filt["MntMeatProducts"] + df_filt["MntFishProducts"] + df_filt["MntSweetProducts"] + df_filt["MntGoldProds"] 83 | # %% create column 'KidsTotal' from 'KidHome' and 'TeenHome' 84 | df_filt['KidsTotal'] = df_filt['Kidhome'] + df_filt['Teenhome'] 85 | # %% create column 'IsParent' 86 | # hint: use list comprehension or np.where() 87 | df_filt['IsParent'] = [1 if i > 0 else 0 for i in df_filt['KidsTotal']] 88 | 89 | 90 | #%% column Education 91 | def EducationToNum(education): 92 | if education == 'Basic': 93 | return 0 94 | elif education == '2n Cycle': 95 | return 1 96 | elif education == 'Graduation': 97 | return 2 98 | elif education == 'Master': 99 | return 3 100 | elif education == 'PhD': 101 | return 4 102 | 103 | df_filt['EducationNum'] = df_filt['Education'].apply(EducationToNum) 104 | 105 | #%% Feature Correlation 106 | cols_to_plot = ['MoneyTotal', 'KidsTotal', 'Income', 'Age', 'IsParent'] 107 | sns.pairplot(df_filt[cols_to_plot], hue='IsParent') 108 | plt.show() 109 | # %% Can you spot outliers? If so, filter these 110 | sns.displot(df_filt, x='Age', kind='kde') 111 | plt.show() 112 | df_filt = df_filt[df_filt['Age'] < 85] 113 | 114 | # %% 115 | sns.displot(df_filt, x='Income', kind='kde') 116 | plt.show() 117 | df_filt = df_filt[df_filt['Income'] < 200000] 118 | 119 | #%% delete columns that are not needed any more 120 | df_filt.drop(columns=['ID', 'Z_CostContact', 'Education'], inplace=True) 121 | 122 | # %% Heatmap showing feature correlation 123 | cols_to_plot = ['MoneyTotal', 'KidsTotal', 'Income', 'Age', 'IsParent', 'MntWines', 'MntFruits', 124 | 'MntMeatProducts', 'MntFishProducts', 'MntSweetProducts', 125 | 'MntGoldProds', 'NumDealsPurchases', 'NumWebPurchases'] 126 | corr = df_filt[cols_to_plot].corr() 127 | plt.figure(figsize=(10,10)) 128 | sns.heatmap(corr, annot=True, center=0) 129 | plt.show() 130 | #%% 131 | cols_to_keep = ['Income', 'NumWebPurchases','NumCatalogPurchases', 132 | 'NumStorePurchases', 'NumWebVisitsMonth', 'Age', 'MoneyTotal', 'KidsTotal', 133 | 'IsParent', 'EducationNum'] 134 | 135 | # %% Modeling 136 | #------------ 137 | #%% Scaling 138 | pca = PCA(n_components=3) 139 | steps = [ 140 | ('scalar', StandardScaler()), 141 | ('pca', pca) 142 | ] 143 | 144 | 145 | 146 | pipeline = Pipeline(steps) 147 | 148 | prin_comp = pipeline.fit_transform(df_filt[cols_to_keep]) 149 | 150 | #%% PCA 151 | pca_loadings = pca.components_.T * np.sqrt(pca.explained_variance_) 152 | pca_loading_matrix = pd.DataFrame(pca_loadings, columns = 153 | ['PC{}'.format(i) for i in range(1, 4)], index = df_filt[cols_to_keep].columns 154 | ) 155 | sns.heatmap(pca_loading_matrix, cmap=sns.diverging_palette(200, 30, n=200), annot=True) 156 | plt.show() 157 | # %% 158 | prin_df = pd.DataFrame(data = prin_comp 159 | , columns = ['PC1', 'PC2', 'PC3']) 160 | 161 | 162 | #%% Elbow Method 163 | elbow = KElbowVisualizer(KMeans(), k=10) 164 | elbow.fit(prin_df) 165 | elbow.show() 166 | 167 | # %% Kmeans clustering 168 | clustering = KMeans(n_clusters=4).fit(prin_comp) 169 | 170 | # %% 171 | df_filt['cluster'] = clustering.labels_ 172 | df_filt['cluster'] = df_filt['cluster'].astype("category") 173 | # %% Result Evaluation 174 | #--------------------- 175 | 176 | sns.countplot(df_filt['cluster']) 177 | plt.show() 178 | # %% 179 | sns.scatterplot(data= df_filt, x = 'Income', y='MoneyTotal', hue='cluster', alpha=.2) 180 | plt.show() 181 | # %% 182 | sns.violinplot(data=df_filt, x='cluster', y='MoneyTotal') 183 | plt.show() 184 | # %% 185 | -------------------------------------------------------------------------------- /40_MachineLearning/80_ReinforcementLearning/30_ucb_interactive.py: -------------------------------------------------------------------------------- 1 | # go to http://gollnickdata.com/2019/01/18/upper-confidence-bound/ 2 | -------------------------------------------------------------------------------- /40_MachineLearning/80_ReinforcementLearning/40_ucb_lab.py: -------------------------------------------------------------------------------- 1 | #%% packages 2 | import pandas as pd 3 | import numpy as np 4 | from plotnine import ggplot, aes, geom_violin, geom_point 5 | import math 6 | # %% bandit returns 7 | # The designer of the bandits inherently designed average and variation parameters into the machines. 8 | A_mean = .97 9 | B_mean = .98 10 | C_mean = 1.1 11 | D_mean = .99 12 | E_mean = .96 13 | A_sd = 0.05 14 | B_sd = 0.15 15 | C_sd = 0.10 16 | D_sd = 0.11 17 | E_sd = 0.08 18 | 19 | bandit_names = ["A", "B", "C", "D", "E"] 20 | bandit_design = pd.DataFrame({'Bandit': bandit_names, 'Mean': [A_mean, B_mean, C_mean, D_mean, E_mean], 'SD': [A_sd, B_sd, C_sd, D_sd, E_sd]}) 21 | 22 | n_pts =1000 23 | 24 | bandits = pd.DataFrame({'A': np.random.normal(A_mean, A_sd, n_pts), 'B': np.random.normal(B_mean, B_sd, n_pts), 'C': np.random.normal(C_mean, C_sd, n_pts), 'D': np.random.normal(D_mean, D_sd, n_pts), 'E': np.random.normal(E_mean, E_sd, n_pts)}) 25 | # %% 26 | bandits_long = bandits.melt() 27 | 28 | ggplot(data = bandits_long) + aes(x='variable', y='value', fill='variable') + geom_violin() 29 | 30 | # %% Exploration / Exploitation with Time 31 | # This information is only know to the designer, but not to the player. The player might perform a set of runs, e.g. 1000 runs, per machine, and find out that machine B is having the highest return. But this strategy is very costly. 32 | 33 | # We decide to play 100 rounds. In each round we decide to play on which machine based on previous knowledge. 34 | 35 | df_round = pd.DataFrame({'Bandit': bandit_names}) 36 | df_round['N'] = 1 # number of times the slot machine was played 37 | df_round['R'] = 1 # sum of rewards 38 | df_round['r'] = 1 # average rewards 39 | df_round['delta'] = 1E4 # confidence interval 40 | df_round['upper_conf_bound'] = 1E10 # r + delta 41 | 42 | max_it = 10000 # maximum iteration 43 | 44 | df_iterations = pd.DataFrame({'n': range(1,max_it)}) 45 | df_iterations['SelectedBandit'] = np.nan 46 | df_iterations['UCB'] = np.nan 47 | df_iterations['r'] = np.nan 48 | df_iterations['delta'] = np.nan 49 | 50 | 51 | # %% iterate for each round 52 | for n in range(2, max_it+1): 53 | if (n==2): 54 | bandit_chosen_name = 'A' 55 | bandit_chosen_nr = np.where(df_round['Bandit'] == bandit_chosen_name)[0][0] 56 | 57 | # increase nr of runs per selected machine 58 | df_round.loc[bandit_chosen_nr, 'N'] = df_round.loc[bandit_chosen_nr, 'N'] + 1 59 | 60 | # get mean/sd of selected machine 61 | current_bandit_mean = bandit_design.loc[bandit_chosen_nr, 'Mean'] 62 | current_bandit_sd = bandit_design.loc[bandit_chosen_nr, 'SD'] 63 | # calculate current return 64 | current_return = np.random.binomial(n=1, p = current_bandit_mean/2) * 2 65 | 66 | # calculate sum of returns 67 | df_round.loc[bandit_chosen_nr, 'R'] = df_round.loc[bandit_chosen_nr, 'R'] + current_return 68 | 69 | # calculate average returns 70 | df_round.loc[bandit_chosen_nr, 'r'] = df_round.loc[bandit_chosen_nr, 'R'] / df_round.loc[bandit_chosen_nr, 'N'] 71 | 72 | # calculate confidence interval 73 | df_round.loc[bandit_chosen_nr, 'delta'] = np.sqrt(2*math.log(n, math.e) / df_round.loc[bandit_chosen_nr, 'N']) 74 | 75 | # calculate upper confidence bound 76 | df_round.loc[bandit_chosen_nr, 'upper_conf_bound'] = df_round.loc[bandit_chosen_nr, 'r'] + df_round.loc[bandit_chosen_nr, 'delta'] 77 | 78 | # store selection in df_iteration 79 | df_iterations.loc[n, 'SelectedBandit'] = bandit_chosen_name 80 | df_iterations.loc[n, 'UCB'] = df_round.loc[bandit_chosen_nr, 'upper_conf_bound'] 81 | df_iterations.loc[n, 'r'] = df_round.loc[bandit_chosen_nr, 'r'] 82 | df_iterations.loc[n, 'delta'] <- df_round.loc[bandit_chosen_nr, 'delta'] 83 | 84 | # define bandit for next run 85 | max_value = max(df_round['upper_conf_bound']) 86 | bandit_chosen_nr = df_round['upper_conf_bound'].tolist().index(max_value) 87 | bandit_chosen_name = bandit_design.loc[bandit_chosen_nr, 'Bandit'] 88 | 89 | df_round 90 | # %% visualise progress over time 91 | ggplot(data = df_iterations[['n', 'SelectedBandit', 'r']].dropna()) + aes(x='n', y='r', color='SelectedBandit') + geom_point() 92 | 93 | # %% 94 | -------------------------------------------------------------------------------- /40_MachineLearning/data/Hubble.csv: -------------------------------------------------------------------------------- 1 | "v","D","sigma" 2 | 9065,134.7,2.3 3 | 12012,158.9,3.1 4 | 15055,198.6,2.8 5 | 16687,238.9,2.8 6 | 9801,117.1,3.4 7 | 4124,56,2.9 8 | 13707,183.9,3.1 9 | 7880,121.5,2.2 10 | 22426,274.6,3.4 11 | 7765,102.1,2.7 12 | 4227,58,2.4 13 | 30253,467,2.4 14 | 18212,262.2,2.9 15 | 5935,88.6,2.1 16 | 10696,151.4,2.4 17 | 13518,202.5,2.3 18 | 17371,235.9,2.6 19 | 12871,176.8,2.6 20 | 5434,77.9,2.4 21 | 23646,309.5,2.6 22 | 26318,391.5,3.1 23 | 18997,280.1,2.8 24 | 21190,303.4,2.4 25 | 15567,236.1,2.1 26 | 15002,215.4,2.4 27 | 8604,119.7,2.9 28 | 14764,202.3,2.7 29 | 5424,71.8,3.1 30 | 7241,96.7,2.6 31 | 8691,127.8,2.7 32 | 4847,66.8,2.5 33 | 10715,149.9,2.6 34 | 14634,185.6,2.7 35 | 6673,82.4,2.8 36 | 9024,136,2.5 37 | 10446,132.7,2.7 38 | -------------------------------------------------------------------------------- /40_MachineLearning/data/OnlineRetail.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/40_MachineLearning/data/OnlineRetail.xlsx -------------------------------------------------------------------------------- /40_MachineLearning/data/Starwars.csv: -------------------------------------------------------------------------------- 1 | "height","mass","name" 2 | 172,77,"Luke Skywalker" 3 | 167,75,"C-3PO" 4 | 96,32,"R2-D2" 5 | 202,136,"Darth Vader" 6 | 150,49,"Leia Organa" 7 | 178,120,"Owen Lars" 8 | 165,75,"Beru Whitesun lars" 9 | 97,32,"R5-D4" 10 | 183,84,"Biggs Darklighter" 11 | 182,77,"Obi-Wan Kenobi" 12 | 188,84,"Anakin Skywalker" 13 | 180,NA,"Wilhuff Tarkin" 14 | 228,112,"Chewbacca" 15 | 180,80,"Han Solo" 16 | 173,74,"Greedo" 17 | 175,1358,"Jabba Desilijic Tiure" 18 | 170,77,"Wedge Antilles" 19 | 180,110,"Jek Tono Porkins" 20 | 66,17,"Yoda" 21 | 170,75,"Palpatine" 22 | 183,78.2,"Boba Fett" 23 | 200,140,"IG-88" 24 | 190,113,"Bossk" 25 | 177,79,"Lando Calrissian" 26 | 175,79,"Lobot" 27 | 180,83,"Ackbar" 28 | 150,NA,"Mon Mothma" 29 | NA,NA,"Arvel Crynyd" 30 | 88,20,"Wicket Systri Warrick" 31 | 160,68,"Nien Nunb" 32 | 193,89,"Qui-Gon Jinn" 33 | 191,90,"Nute Gunray" 34 | 170,NA,"Finis Valorum" 35 | 196,66,"Jar Jar Binks" 36 | 224,82,"Roos Tarpals" 37 | 206,NA,"Rugor Nass" 38 | 183,NA,"Ric Olie" 39 | 137,NA,"Watto" 40 | 112,40,"Sebulba" 41 | 183,NA,"Quarsh Panaka" 42 | 163,NA,"Shmi Skywalker" 43 | 175,80,"Darth Maul" 44 | 180,NA,"Bib Fortuna" 45 | 178,55,"Ayla Secura" 46 | 94,45,"Dud Bolt" 47 | 122,NA,"Gasgano" 48 | 163,65,"Ben Quadinaros" 49 | 188,84,"Mace Windu" 50 | 198,82,"Ki-Adi-Mundi" 51 | 196,87,"Kit Fisto" 52 | 171,NA,"Eeth Koth" 53 | 184,50,"Adi Gallia" 54 | 188,NA,"Saesee Tiin" 55 | 264,NA,"Yarael Poof" 56 | 188,80,"Plo Koon" 57 | 196,NA,"Mas Amedda" 58 | 185,85,"Gregar Typho" 59 | 157,NA,"Corde" 60 | 183,NA,"Cliegg Lars" 61 | 183,80,"Poggle the Lesser" 62 | 170,56.2,"Luminara Unduli" 63 | 166,50,"Barriss Offee" 64 | 165,NA,"Dorme" 65 | 193,80,"Dooku" 66 | 191,NA,"Bail Prestor Organa" 67 | 183,79,"Jango Fett" 68 | 168,55,"Zam Wesell" 69 | 198,102,"Dexter Jettster" 70 | 229,88,"Lama Su" 71 | 213,NA,"Taun We" 72 | 167,NA,"Jocasta Nu" 73 | 79,15,"Ratts Tyerell" 74 | 96,NA,"R4-P17" 75 | 193,48,"Wat Tambor" 76 | 191,NA,"San Hill" 77 | 178,57,"Shaak Ti" 78 | 216,159,"Grievous" 79 | 234,136,"Tarfful" 80 | 188,79,"Raymus Antilles" 81 | 178,48,"Sly Moore" 82 | 206,80,"Tion Medon" 83 | NA,NA,"Finn" 84 | NA,NA,"Rey" 85 | NA,NA,"Poe Dameron" 86 | NA,NA,"BB8" 87 | NA,NA,"Captain Phasma" 88 | 165,45,"Padme Amidala" 89 | -------------------------------------------------------------------------------- /40_MachineLearning/data/diabetes.csv: -------------------------------------------------------------------------------- 1 | Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction,Age,Outcome 2 | 6,148,72,35,0,33.6,0.627,50,1 3 | 1,85,66,29,0,26.6,0.351,31,0 4 | 8,183,64,0,0,23.3,0.672,32,1 5 | 1,89,66,23,94,28.1,0.167,21,0 6 | 0,137,40,35,168,43.1,2.288,33,1 7 | 5,116,74,0,0,25.6,0.201,30,0 8 | 3,78,50,32,88,31,0.248,26,1 9 | 10,115,0,0,0,35.3,0.134,29,0 10 | 2,197,70,45,543,30.5,0.158,53,1 11 | 8,125,96,0,0,0,0.232,54,1 12 | 4,110,92,0,0,37.6,0.191,30,0 13 | 10,168,74,0,0,38,0.537,34,1 14 | 10,139,80,0,0,27.1,1.441,57,0 15 | 1,189,60,23,846,30.1,0.398,59,1 16 | 5,166,72,19,175,25.8,0.587,51,1 17 | 7,100,0,0,0,30,0.484,32,1 18 | 0,118,84,47,230,45.8,0.551,31,1 19 | 7,107,74,0,0,29.6,0.254,31,1 20 | 1,103,30,38,83,43.3,0.183,33,0 21 | 1,115,70,30,96,34.6,0.529,32,1 22 | 3,126,88,41,235,39.3,0.704,27,0 23 | 8,99,84,0,0,35.4,0.388,50,0 24 | 7,196,90,0,0,39.8,0.451,41,1 25 | 9,119,80,35,0,29,0.263,29,1 26 | 11,143,94,33,146,36.6,0.254,51,1 27 | 10,125,70,26,115,31.1,0.205,41,1 28 | 7,147,76,0,0,39.4,0.257,43,1 29 | 1,97,66,15,140,23.2,0.487,22,0 30 | 13,145,82,19,110,22.2,0.245,57,0 31 | 5,117,92,0,0,34.1,0.337,38,0 32 | 5,109,75,26,0,36,0.546,60,0 33 | 3,158,76,36,245,31.6,0.851,28,1 34 | 3,88,58,11,54,24.8,0.267,22,0 35 | 6,92,92,0,0,19.9,0.188,28,0 36 | 10,122,78,31,0,27.6,0.512,45,0 37 | 4,103,60,33,192,24,0.966,33,0 38 | 11,138,76,0,0,33.2,0.42,35,0 39 | 9,102,76,37,0,32.9,0.665,46,1 40 | 2,90,68,42,0,38.2,0.503,27,1 41 | 4,111,72,47,207,37.1,1.39,56,1 42 | 3,180,64,25,70,34,0.271,26,0 43 | 7,133,84,0,0,40.2,0.696,37,0 44 | 7,106,92,18,0,22.7,0.235,48,0 45 | 9,171,110,24,240,45.4,0.721,54,1 46 | 7,159,64,0,0,27.4,0.294,40,0 47 | 0,180,66,39,0,42,1.893,25,1 48 | 1,146,56,0,0,29.7,0.564,29,0 49 | 2,71,70,27,0,28,0.586,22,0 50 | 7,103,66,32,0,39.1,0.344,31,1 51 | 7,105,0,0,0,0,0.305,24,0 52 | 1,103,80,11,82,19.4,0.491,22,0 53 | 1,101,50,15,36,24.2,0.526,26,0 54 | 5,88,66,21,23,24.4,0.342,30,0 55 | 8,176,90,34,300,33.7,0.467,58,1 56 | 7,150,66,42,342,34.7,0.718,42,0 57 | 1,73,50,10,0,23,0.248,21,0 58 | 7,187,68,39,304,37.7,0.254,41,1 59 | 0,100,88,60,110,46.8,0.962,31,0 60 | 0,146,82,0,0,40.5,1.781,44,0 61 | 0,105,64,41,142,41.5,0.173,22,0 62 | 2,84,0,0,0,0,0.304,21,0 63 | 8,133,72,0,0,32.9,0.27,39,1 64 | 5,44,62,0,0,25,0.587,36,0 65 | 2,141,58,34,128,25.4,0.699,24,0 66 | 7,114,66,0,0,32.8,0.258,42,1 67 | 5,99,74,27,0,29,0.203,32,0 68 | 0,109,88,30,0,32.5,0.855,38,1 69 | 2,109,92,0,0,42.7,0.845,54,0 70 | 1,95,66,13,38,19.6,0.334,25,0 71 | 4,146,85,27,100,28.9,0.189,27,0 72 | 2,100,66,20,90,32.9,0.867,28,1 73 | 5,139,64,35,140,28.6,0.411,26,0 74 | 13,126,90,0,0,43.4,0.583,42,1 75 | 4,129,86,20,270,35.1,0.231,23,0 76 | 1,79,75,30,0,32,0.396,22,0 77 | 1,0,48,20,0,24.7,0.14,22,0 78 | 7,62,78,0,0,32.6,0.391,41,0 79 | 5,95,72,33,0,37.7,0.37,27,0 80 | 0,131,0,0,0,43.2,0.27,26,1 81 | 2,112,66,22,0,25,0.307,24,0 82 | 3,113,44,13,0,22.4,0.14,22,0 83 | 2,74,0,0,0,0,0.102,22,0 84 | 7,83,78,26,71,29.3,0.767,36,0 85 | 0,101,65,28,0,24.6,0.237,22,0 86 | 5,137,108,0,0,48.8,0.227,37,1 87 | 2,110,74,29,125,32.4,0.698,27,0 88 | 13,106,72,54,0,36.6,0.178,45,0 89 | 2,100,68,25,71,38.5,0.324,26,0 90 | 15,136,70,32,110,37.1,0.153,43,1 91 | 1,107,68,19,0,26.5,0.165,24,0 92 | 1,80,55,0,0,19.1,0.258,21,0 93 | 4,123,80,15,176,32,0.443,34,0 94 | 7,81,78,40,48,46.7,0.261,42,0 95 | 4,134,72,0,0,23.8,0.277,60,1 96 | 2,142,82,18,64,24.7,0.761,21,0 97 | 6,144,72,27,228,33.9,0.255,40,0 98 | 2,92,62,28,0,31.6,0.13,24,0 99 | 1,71,48,18,76,20.4,0.323,22,0 100 | 6,93,50,30,64,28.7,0.356,23,0 101 | 1,122,90,51,220,49.7,0.325,31,1 102 | 1,163,72,0,0,39,1.222,33,1 103 | 1,151,60,0,0,26.1,0.179,22,0 104 | 0,125,96,0,0,22.5,0.262,21,0 105 | 1,81,72,18,40,26.6,0.283,24,0 106 | 2,85,65,0,0,39.6,0.93,27,0 107 | 1,126,56,29,152,28.7,0.801,21,0 108 | 1,96,122,0,0,22.4,0.207,27,0 109 | 4,144,58,28,140,29.5,0.287,37,0 110 | 3,83,58,31,18,34.3,0.336,25,0 111 | 0,95,85,25,36,37.4,0.247,24,1 112 | 3,171,72,33,135,33.3,0.199,24,1 113 | 8,155,62,26,495,34,0.543,46,1 114 | 1,89,76,34,37,31.2,0.192,23,0 115 | 4,76,62,0,0,34,0.391,25,0 116 | 7,160,54,32,175,30.5,0.588,39,1 117 | 4,146,92,0,0,31.2,0.539,61,1 118 | 5,124,74,0,0,34,0.22,38,1 119 | 5,78,48,0,0,33.7,0.654,25,0 120 | 4,97,60,23,0,28.2,0.443,22,0 121 | 4,99,76,15,51,23.2,0.223,21,0 122 | 0,162,76,56,100,53.2,0.759,25,1 123 | 6,111,64,39,0,34.2,0.26,24,0 124 | 2,107,74,30,100,33.6,0.404,23,0 125 | 5,132,80,0,0,26.8,0.186,69,0 126 | 0,113,76,0,0,33.3,0.278,23,1 127 | 1,88,30,42,99,55,0.496,26,1 128 | 3,120,70,30,135,42.9,0.452,30,0 129 | 1,118,58,36,94,33.3,0.261,23,0 130 | 1,117,88,24,145,34.5,0.403,40,1 131 | 0,105,84,0,0,27.9,0.741,62,1 132 | 4,173,70,14,168,29.7,0.361,33,1 133 | 9,122,56,0,0,33.3,1.114,33,1 134 | 3,170,64,37,225,34.5,0.356,30,1 135 | 8,84,74,31,0,38.3,0.457,39,0 136 | 2,96,68,13,49,21.1,0.647,26,0 137 | 2,125,60,20,140,33.8,0.088,31,0 138 | 0,100,70,26,50,30.8,0.597,21,0 139 | 0,93,60,25,92,28.7,0.532,22,0 140 | 0,129,80,0,0,31.2,0.703,29,0 141 | 5,105,72,29,325,36.9,0.159,28,0 142 | 3,128,78,0,0,21.1,0.268,55,0 143 | 5,106,82,30,0,39.5,0.286,38,0 144 | 2,108,52,26,63,32.5,0.318,22,0 145 | 10,108,66,0,0,32.4,0.272,42,1 146 | 4,154,62,31,284,32.8,0.237,23,0 147 | 0,102,75,23,0,0,0.572,21,0 148 | 9,57,80,37,0,32.8,0.096,41,0 149 | 2,106,64,35,119,30.5,1.4,34,0 150 | 5,147,78,0,0,33.7,0.218,65,0 151 | 2,90,70,17,0,27.3,0.085,22,0 152 | 1,136,74,50,204,37.4,0.399,24,0 153 | 4,114,65,0,0,21.9,0.432,37,0 154 | 9,156,86,28,155,34.3,1.189,42,1 155 | 1,153,82,42,485,40.6,0.687,23,0 156 | 8,188,78,0,0,47.9,0.137,43,1 157 | 7,152,88,44,0,50,0.337,36,1 158 | 2,99,52,15,94,24.6,0.637,21,0 159 | 1,109,56,21,135,25.2,0.833,23,0 160 | 2,88,74,19,53,29,0.229,22,0 161 | 17,163,72,41,114,40.9,0.817,47,1 162 | 4,151,90,38,0,29.7,0.294,36,0 163 | 7,102,74,40,105,37.2,0.204,45,0 164 | 0,114,80,34,285,44.2,0.167,27,0 165 | 2,100,64,23,0,29.7,0.368,21,0 166 | 0,131,88,0,0,31.6,0.743,32,1 167 | 6,104,74,18,156,29.9,0.722,41,1 168 | 3,148,66,25,0,32.5,0.256,22,0 169 | 4,120,68,0,0,29.6,0.709,34,0 170 | 4,110,66,0,0,31.9,0.471,29,0 171 | 3,111,90,12,78,28.4,0.495,29,0 172 | 6,102,82,0,0,30.8,0.18,36,1 173 | 6,134,70,23,130,35.4,0.542,29,1 174 | 2,87,0,23,0,28.9,0.773,25,0 175 | 1,79,60,42,48,43.5,0.678,23,0 176 | 2,75,64,24,55,29.7,0.37,33,0 177 | 8,179,72,42,130,32.7,0.719,36,1 178 | 6,85,78,0,0,31.2,0.382,42,0 179 | 0,129,110,46,130,67.1,0.319,26,1 180 | 5,143,78,0,0,45,0.19,47,0 181 | 5,130,82,0,0,39.1,0.956,37,1 182 | 6,87,80,0,0,23.2,0.084,32,0 183 | 0,119,64,18,92,34.9,0.725,23,0 184 | 1,0,74,20,23,27.7,0.299,21,0 185 | 5,73,60,0,0,26.8,0.268,27,0 186 | 4,141,74,0,0,27.6,0.244,40,0 187 | 7,194,68,28,0,35.9,0.745,41,1 188 | 8,181,68,36,495,30.1,0.615,60,1 189 | 1,128,98,41,58,32,1.321,33,1 190 | 8,109,76,39,114,27.9,0.64,31,1 191 | 5,139,80,35,160,31.6,0.361,25,1 192 | 3,111,62,0,0,22.6,0.142,21,0 193 | 9,123,70,44,94,33.1,0.374,40,0 194 | 7,159,66,0,0,30.4,0.383,36,1 195 | 11,135,0,0,0,52.3,0.578,40,1 196 | 8,85,55,20,0,24.4,0.136,42,0 197 | 5,158,84,41,210,39.4,0.395,29,1 198 | 1,105,58,0,0,24.3,0.187,21,0 199 | 3,107,62,13,48,22.9,0.678,23,1 200 | 4,109,64,44,99,34.8,0.905,26,1 201 | 4,148,60,27,318,30.9,0.15,29,1 202 | 0,113,80,16,0,31,0.874,21,0 203 | 1,138,82,0,0,40.1,0.236,28,0 204 | 0,108,68,20,0,27.3,0.787,32,0 205 | 2,99,70,16,44,20.4,0.235,27,0 206 | 6,103,72,32,190,37.7,0.324,55,0 207 | 5,111,72,28,0,23.9,0.407,27,0 208 | 8,196,76,29,280,37.5,0.605,57,1 209 | 5,162,104,0,0,37.7,0.151,52,1 210 | 1,96,64,27,87,33.2,0.289,21,0 211 | 7,184,84,33,0,35.5,0.355,41,1 212 | 2,81,60,22,0,27.7,0.29,25,0 213 | 0,147,85,54,0,42.8,0.375,24,0 214 | 7,179,95,31,0,34.2,0.164,60,0 215 | 0,140,65,26,130,42.6,0.431,24,1 216 | 9,112,82,32,175,34.2,0.26,36,1 217 | 12,151,70,40,271,41.8,0.742,38,1 218 | 5,109,62,41,129,35.8,0.514,25,1 219 | 6,125,68,30,120,30,0.464,32,0 220 | 5,85,74,22,0,29,1.224,32,1 221 | 5,112,66,0,0,37.8,0.261,41,1 222 | 0,177,60,29,478,34.6,1.072,21,1 223 | 2,158,90,0,0,31.6,0.805,66,1 224 | 7,119,0,0,0,25.2,0.209,37,0 225 | 7,142,60,33,190,28.8,0.687,61,0 226 | 1,100,66,15,56,23.6,0.666,26,0 227 | 1,87,78,27,32,34.6,0.101,22,0 228 | 0,101,76,0,0,35.7,0.198,26,0 229 | 3,162,52,38,0,37.2,0.652,24,1 230 | 4,197,70,39,744,36.7,2.329,31,0 231 | 0,117,80,31,53,45.2,0.089,24,0 232 | 4,142,86,0,0,44,0.645,22,1 233 | 6,134,80,37,370,46.2,0.238,46,1 234 | 1,79,80,25,37,25.4,0.583,22,0 235 | 4,122,68,0,0,35,0.394,29,0 236 | 3,74,68,28,45,29.7,0.293,23,0 237 | 4,171,72,0,0,43.6,0.479,26,1 238 | 7,181,84,21,192,35.9,0.586,51,1 239 | 0,179,90,27,0,44.1,0.686,23,1 240 | 9,164,84,21,0,30.8,0.831,32,1 241 | 0,104,76,0,0,18.4,0.582,27,0 242 | 1,91,64,24,0,29.2,0.192,21,0 243 | 4,91,70,32,88,33.1,0.446,22,0 244 | 3,139,54,0,0,25.6,0.402,22,1 245 | 6,119,50,22,176,27.1,1.318,33,1 246 | 2,146,76,35,194,38.2,0.329,29,0 247 | 9,184,85,15,0,30,1.213,49,1 248 | 10,122,68,0,0,31.2,0.258,41,0 249 | 0,165,90,33,680,52.3,0.427,23,0 250 | 9,124,70,33,402,35.4,0.282,34,0 251 | 1,111,86,19,0,30.1,0.143,23,0 252 | 9,106,52,0,0,31.2,0.38,42,0 253 | 2,129,84,0,0,28,0.284,27,0 254 | 2,90,80,14,55,24.4,0.249,24,0 255 | 0,86,68,32,0,35.8,0.238,25,0 256 | 12,92,62,7,258,27.6,0.926,44,1 257 | 1,113,64,35,0,33.6,0.543,21,1 258 | 3,111,56,39,0,30.1,0.557,30,0 259 | 2,114,68,22,0,28.7,0.092,25,0 260 | 1,193,50,16,375,25.9,0.655,24,0 261 | 11,155,76,28,150,33.3,1.353,51,1 262 | 3,191,68,15,130,30.9,0.299,34,0 263 | 3,141,0,0,0,30,0.761,27,1 264 | 4,95,70,32,0,32.1,0.612,24,0 265 | 3,142,80,15,0,32.4,0.2,63,0 266 | 4,123,62,0,0,32,0.226,35,1 267 | 5,96,74,18,67,33.6,0.997,43,0 268 | 0,138,0,0,0,36.3,0.933,25,1 269 | 2,128,64,42,0,40,1.101,24,0 270 | 0,102,52,0,0,25.1,0.078,21,0 271 | 2,146,0,0,0,27.5,0.24,28,1 272 | 10,101,86,37,0,45.6,1.136,38,1 273 | 2,108,62,32,56,25.2,0.128,21,0 274 | 3,122,78,0,0,23,0.254,40,0 275 | 1,71,78,50,45,33.2,0.422,21,0 276 | 13,106,70,0,0,34.2,0.251,52,0 277 | 2,100,70,52,57,40.5,0.677,25,0 278 | 7,106,60,24,0,26.5,0.296,29,1 279 | 0,104,64,23,116,27.8,0.454,23,0 280 | 5,114,74,0,0,24.9,0.744,57,0 281 | 2,108,62,10,278,25.3,0.881,22,0 282 | 0,146,70,0,0,37.9,0.334,28,1 283 | 10,129,76,28,122,35.9,0.28,39,0 284 | 7,133,88,15,155,32.4,0.262,37,0 285 | 7,161,86,0,0,30.4,0.165,47,1 286 | 2,108,80,0,0,27,0.259,52,1 287 | 7,136,74,26,135,26,0.647,51,0 288 | 5,155,84,44,545,38.7,0.619,34,0 289 | 1,119,86,39,220,45.6,0.808,29,1 290 | 4,96,56,17,49,20.8,0.34,26,0 291 | 5,108,72,43,75,36.1,0.263,33,0 292 | 0,78,88,29,40,36.9,0.434,21,0 293 | 0,107,62,30,74,36.6,0.757,25,1 294 | 2,128,78,37,182,43.3,1.224,31,1 295 | 1,128,48,45,194,40.5,0.613,24,1 296 | 0,161,50,0,0,21.9,0.254,65,0 297 | 6,151,62,31,120,35.5,0.692,28,0 298 | 2,146,70,38,360,28,0.337,29,1 299 | 0,126,84,29,215,30.7,0.52,24,0 300 | 14,100,78,25,184,36.6,0.412,46,1 301 | 8,112,72,0,0,23.6,0.84,58,0 302 | 0,167,0,0,0,32.3,0.839,30,1 303 | 2,144,58,33,135,31.6,0.422,25,1 304 | 5,77,82,41,42,35.8,0.156,35,0 305 | 5,115,98,0,0,52.9,0.209,28,1 306 | 3,150,76,0,0,21,0.207,37,0 307 | 2,120,76,37,105,39.7,0.215,29,0 308 | 10,161,68,23,132,25.5,0.326,47,1 309 | 0,137,68,14,148,24.8,0.143,21,0 310 | 0,128,68,19,180,30.5,1.391,25,1 311 | 2,124,68,28,205,32.9,0.875,30,1 312 | 6,80,66,30,0,26.2,0.313,41,0 313 | 0,106,70,37,148,39.4,0.605,22,0 314 | 2,155,74,17,96,26.6,0.433,27,1 315 | 3,113,50,10,85,29.5,0.626,25,0 316 | 7,109,80,31,0,35.9,1.127,43,1 317 | 2,112,68,22,94,34.1,0.315,26,0 318 | 3,99,80,11,64,19.3,0.284,30,0 319 | 3,182,74,0,0,30.5,0.345,29,1 320 | 3,115,66,39,140,38.1,0.15,28,0 321 | 6,194,78,0,0,23.5,0.129,59,1 322 | 4,129,60,12,231,27.5,0.527,31,0 323 | 3,112,74,30,0,31.6,0.197,25,1 324 | 0,124,70,20,0,27.4,0.254,36,1 325 | 13,152,90,33,29,26.8,0.731,43,1 326 | 2,112,75,32,0,35.7,0.148,21,0 327 | 1,157,72,21,168,25.6,0.123,24,0 328 | 1,122,64,32,156,35.1,0.692,30,1 329 | 10,179,70,0,0,35.1,0.2,37,0 330 | 2,102,86,36,120,45.5,0.127,23,1 331 | 6,105,70,32,68,30.8,0.122,37,0 332 | 8,118,72,19,0,23.1,1.476,46,0 333 | 2,87,58,16,52,32.7,0.166,25,0 334 | 1,180,0,0,0,43.3,0.282,41,1 335 | 12,106,80,0,0,23.6,0.137,44,0 336 | 1,95,60,18,58,23.9,0.26,22,0 337 | 0,165,76,43,255,47.9,0.259,26,0 338 | 0,117,0,0,0,33.8,0.932,44,0 339 | 5,115,76,0,0,31.2,0.343,44,1 340 | 9,152,78,34,171,34.2,0.893,33,1 341 | 7,178,84,0,0,39.9,0.331,41,1 342 | 1,130,70,13,105,25.9,0.472,22,0 343 | 1,95,74,21,73,25.9,0.673,36,0 344 | 1,0,68,35,0,32,0.389,22,0 345 | 5,122,86,0,0,34.7,0.29,33,0 346 | 8,95,72,0,0,36.8,0.485,57,0 347 | 8,126,88,36,108,38.5,0.349,49,0 348 | 1,139,46,19,83,28.7,0.654,22,0 349 | 3,116,0,0,0,23.5,0.187,23,0 350 | 3,99,62,19,74,21.8,0.279,26,0 351 | 5,0,80,32,0,41,0.346,37,1 352 | 4,92,80,0,0,42.2,0.237,29,0 353 | 4,137,84,0,0,31.2,0.252,30,0 354 | 3,61,82,28,0,34.4,0.243,46,0 355 | 1,90,62,12,43,27.2,0.58,24,0 356 | 3,90,78,0,0,42.7,0.559,21,0 357 | 9,165,88,0,0,30.4,0.302,49,1 358 | 1,125,50,40,167,33.3,0.962,28,1 359 | 13,129,0,30,0,39.9,0.569,44,1 360 | 12,88,74,40,54,35.3,0.378,48,0 361 | 1,196,76,36,249,36.5,0.875,29,1 362 | 5,189,64,33,325,31.2,0.583,29,1 363 | 5,158,70,0,0,29.8,0.207,63,0 364 | 5,103,108,37,0,39.2,0.305,65,0 365 | 4,146,78,0,0,38.5,0.52,67,1 366 | 4,147,74,25,293,34.9,0.385,30,0 367 | 5,99,54,28,83,34,0.499,30,0 368 | 6,124,72,0,0,27.6,0.368,29,1 369 | 0,101,64,17,0,21,0.252,21,0 370 | 3,81,86,16,66,27.5,0.306,22,0 371 | 1,133,102,28,140,32.8,0.234,45,1 372 | 3,173,82,48,465,38.4,2.137,25,1 373 | 0,118,64,23,89,0,1.731,21,0 374 | 0,84,64,22,66,35.8,0.545,21,0 375 | 2,105,58,40,94,34.9,0.225,25,0 376 | 2,122,52,43,158,36.2,0.816,28,0 377 | 12,140,82,43,325,39.2,0.528,58,1 378 | 0,98,82,15,84,25.2,0.299,22,0 379 | 1,87,60,37,75,37.2,0.509,22,0 380 | 4,156,75,0,0,48.3,0.238,32,1 381 | 0,93,100,39,72,43.4,1.021,35,0 382 | 1,107,72,30,82,30.8,0.821,24,0 383 | 0,105,68,22,0,20,0.236,22,0 384 | 1,109,60,8,182,25.4,0.947,21,0 385 | 1,90,62,18,59,25.1,1.268,25,0 386 | 1,125,70,24,110,24.3,0.221,25,0 387 | 1,119,54,13,50,22.3,0.205,24,0 388 | 5,116,74,29,0,32.3,0.66,35,1 389 | 8,105,100,36,0,43.3,0.239,45,1 390 | 5,144,82,26,285,32,0.452,58,1 391 | 3,100,68,23,81,31.6,0.949,28,0 392 | 1,100,66,29,196,32,0.444,42,0 393 | 5,166,76,0,0,45.7,0.34,27,1 394 | 1,131,64,14,415,23.7,0.389,21,0 395 | 4,116,72,12,87,22.1,0.463,37,0 396 | 4,158,78,0,0,32.9,0.803,31,1 397 | 2,127,58,24,275,27.7,1.6,25,0 398 | 3,96,56,34,115,24.7,0.944,39,0 399 | 0,131,66,40,0,34.3,0.196,22,1 400 | 3,82,70,0,0,21.1,0.389,25,0 401 | 3,193,70,31,0,34.9,0.241,25,1 402 | 4,95,64,0,0,32,0.161,31,1 403 | 6,137,61,0,0,24.2,0.151,55,0 404 | 5,136,84,41,88,35,0.286,35,1 405 | 9,72,78,25,0,31.6,0.28,38,0 406 | 5,168,64,0,0,32.9,0.135,41,1 407 | 2,123,48,32,165,42.1,0.52,26,0 408 | 4,115,72,0,0,28.9,0.376,46,1 409 | 0,101,62,0,0,21.9,0.336,25,0 410 | 8,197,74,0,0,25.9,1.191,39,1 411 | 1,172,68,49,579,42.4,0.702,28,1 412 | 6,102,90,39,0,35.7,0.674,28,0 413 | 1,112,72,30,176,34.4,0.528,25,0 414 | 1,143,84,23,310,42.4,1.076,22,0 415 | 1,143,74,22,61,26.2,0.256,21,0 416 | 0,138,60,35,167,34.6,0.534,21,1 417 | 3,173,84,33,474,35.7,0.258,22,1 418 | 1,97,68,21,0,27.2,1.095,22,0 419 | 4,144,82,32,0,38.5,0.554,37,1 420 | 1,83,68,0,0,18.2,0.624,27,0 421 | 3,129,64,29,115,26.4,0.219,28,1 422 | 1,119,88,41,170,45.3,0.507,26,0 423 | 2,94,68,18,76,26,0.561,21,0 424 | 0,102,64,46,78,40.6,0.496,21,0 425 | 2,115,64,22,0,30.8,0.421,21,0 426 | 8,151,78,32,210,42.9,0.516,36,1 427 | 4,184,78,39,277,37,0.264,31,1 428 | 0,94,0,0,0,0,0.256,25,0 429 | 1,181,64,30,180,34.1,0.328,38,1 430 | 0,135,94,46,145,40.6,0.284,26,0 431 | 1,95,82,25,180,35,0.233,43,1 432 | 2,99,0,0,0,22.2,0.108,23,0 433 | 3,89,74,16,85,30.4,0.551,38,0 434 | 1,80,74,11,60,30,0.527,22,0 435 | 2,139,75,0,0,25.6,0.167,29,0 436 | 1,90,68,8,0,24.5,1.138,36,0 437 | 0,141,0,0,0,42.4,0.205,29,1 438 | 12,140,85,33,0,37.4,0.244,41,0 439 | 5,147,75,0,0,29.9,0.434,28,0 440 | 1,97,70,15,0,18.2,0.147,21,0 441 | 6,107,88,0,0,36.8,0.727,31,0 442 | 0,189,104,25,0,34.3,0.435,41,1 443 | 2,83,66,23,50,32.2,0.497,22,0 444 | 4,117,64,27,120,33.2,0.23,24,0 445 | 8,108,70,0,0,30.5,0.955,33,1 446 | 4,117,62,12,0,29.7,0.38,30,1 447 | 0,180,78,63,14,59.4,2.42,25,1 448 | 1,100,72,12,70,25.3,0.658,28,0 449 | 0,95,80,45,92,36.5,0.33,26,0 450 | 0,104,64,37,64,33.6,0.51,22,1 451 | 0,120,74,18,63,30.5,0.285,26,0 452 | 1,82,64,13,95,21.2,0.415,23,0 453 | 2,134,70,0,0,28.9,0.542,23,1 454 | 0,91,68,32,210,39.9,0.381,25,0 455 | 2,119,0,0,0,19.6,0.832,72,0 456 | 2,100,54,28,105,37.8,0.498,24,0 457 | 14,175,62,30,0,33.6,0.212,38,1 458 | 1,135,54,0,0,26.7,0.687,62,0 459 | 5,86,68,28,71,30.2,0.364,24,0 460 | 10,148,84,48,237,37.6,1.001,51,1 461 | 9,134,74,33,60,25.9,0.46,81,0 462 | 9,120,72,22,56,20.8,0.733,48,0 463 | 1,71,62,0,0,21.8,0.416,26,0 464 | 8,74,70,40,49,35.3,0.705,39,0 465 | 5,88,78,30,0,27.6,0.258,37,0 466 | 10,115,98,0,0,24,1.022,34,0 467 | 0,124,56,13,105,21.8,0.452,21,0 468 | 0,74,52,10,36,27.8,0.269,22,0 469 | 0,97,64,36,100,36.8,0.6,25,0 470 | 8,120,0,0,0,30,0.183,38,1 471 | 6,154,78,41,140,46.1,0.571,27,0 472 | 1,144,82,40,0,41.3,0.607,28,0 473 | 0,137,70,38,0,33.2,0.17,22,0 474 | 0,119,66,27,0,38.8,0.259,22,0 475 | 7,136,90,0,0,29.9,0.21,50,0 476 | 4,114,64,0,0,28.9,0.126,24,0 477 | 0,137,84,27,0,27.3,0.231,59,0 478 | 2,105,80,45,191,33.7,0.711,29,1 479 | 7,114,76,17,110,23.8,0.466,31,0 480 | 8,126,74,38,75,25.9,0.162,39,0 481 | 4,132,86,31,0,28,0.419,63,0 482 | 3,158,70,30,328,35.5,0.344,35,1 483 | 0,123,88,37,0,35.2,0.197,29,0 484 | 4,85,58,22,49,27.8,0.306,28,0 485 | 0,84,82,31,125,38.2,0.233,23,0 486 | 0,145,0,0,0,44.2,0.63,31,1 487 | 0,135,68,42,250,42.3,0.365,24,1 488 | 1,139,62,41,480,40.7,0.536,21,0 489 | 0,173,78,32,265,46.5,1.159,58,0 490 | 4,99,72,17,0,25.6,0.294,28,0 491 | 8,194,80,0,0,26.1,0.551,67,0 492 | 2,83,65,28,66,36.8,0.629,24,0 493 | 2,89,90,30,0,33.5,0.292,42,0 494 | 4,99,68,38,0,32.8,0.145,33,0 495 | 4,125,70,18,122,28.9,1.144,45,1 496 | 3,80,0,0,0,0,0.174,22,0 497 | 6,166,74,0,0,26.6,0.304,66,0 498 | 5,110,68,0,0,26,0.292,30,0 499 | 2,81,72,15,76,30.1,0.547,25,0 500 | 7,195,70,33,145,25.1,0.163,55,1 501 | 6,154,74,32,193,29.3,0.839,39,0 502 | 2,117,90,19,71,25.2,0.313,21,0 503 | 3,84,72,32,0,37.2,0.267,28,0 504 | 6,0,68,41,0,39,0.727,41,1 505 | 7,94,64,25,79,33.3,0.738,41,0 506 | 3,96,78,39,0,37.3,0.238,40,0 507 | 10,75,82,0,0,33.3,0.263,38,0 508 | 0,180,90,26,90,36.5,0.314,35,1 509 | 1,130,60,23,170,28.6,0.692,21,0 510 | 2,84,50,23,76,30.4,0.968,21,0 511 | 8,120,78,0,0,25,0.409,64,0 512 | 12,84,72,31,0,29.7,0.297,46,1 513 | 0,139,62,17,210,22.1,0.207,21,0 514 | 9,91,68,0,0,24.2,0.2,58,0 515 | 2,91,62,0,0,27.3,0.525,22,0 516 | 3,99,54,19,86,25.6,0.154,24,0 517 | 3,163,70,18,105,31.6,0.268,28,1 518 | 9,145,88,34,165,30.3,0.771,53,1 519 | 7,125,86,0,0,37.6,0.304,51,0 520 | 13,76,60,0,0,32.8,0.18,41,0 521 | 6,129,90,7,326,19.6,0.582,60,0 522 | 2,68,70,32,66,25,0.187,25,0 523 | 3,124,80,33,130,33.2,0.305,26,0 524 | 6,114,0,0,0,0,0.189,26,0 525 | 9,130,70,0,0,34.2,0.652,45,1 526 | 3,125,58,0,0,31.6,0.151,24,0 527 | 3,87,60,18,0,21.8,0.444,21,0 528 | 1,97,64,19,82,18.2,0.299,21,0 529 | 3,116,74,15,105,26.3,0.107,24,0 530 | 0,117,66,31,188,30.8,0.493,22,0 531 | 0,111,65,0,0,24.6,0.66,31,0 532 | 2,122,60,18,106,29.8,0.717,22,0 533 | 0,107,76,0,0,45.3,0.686,24,0 534 | 1,86,66,52,65,41.3,0.917,29,0 535 | 6,91,0,0,0,29.8,0.501,31,0 536 | 1,77,56,30,56,33.3,1.251,24,0 537 | 4,132,0,0,0,32.9,0.302,23,1 538 | 0,105,90,0,0,29.6,0.197,46,0 539 | 0,57,60,0,0,21.7,0.735,67,0 540 | 0,127,80,37,210,36.3,0.804,23,0 541 | 3,129,92,49,155,36.4,0.968,32,1 542 | 8,100,74,40,215,39.4,0.661,43,1 543 | 3,128,72,25,190,32.4,0.549,27,1 544 | 10,90,85,32,0,34.9,0.825,56,1 545 | 4,84,90,23,56,39.5,0.159,25,0 546 | 1,88,78,29,76,32,0.365,29,0 547 | 8,186,90,35,225,34.5,0.423,37,1 548 | 5,187,76,27,207,43.6,1.034,53,1 549 | 4,131,68,21,166,33.1,0.16,28,0 550 | 1,164,82,43,67,32.8,0.341,50,0 551 | 4,189,110,31,0,28.5,0.68,37,0 552 | 1,116,70,28,0,27.4,0.204,21,0 553 | 3,84,68,30,106,31.9,0.591,25,0 554 | 6,114,88,0,0,27.8,0.247,66,0 555 | 1,88,62,24,44,29.9,0.422,23,0 556 | 1,84,64,23,115,36.9,0.471,28,0 557 | 7,124,70,33,215,25.5,0.161,37,0 558 | 1,97,70,40,0,38.1,0.218,30,0 559 | 8,110,76,0,0,27.8,0.237,58,0 560 | 11,103,68,40,0,46.2,0.126,42,0 561 | 11,85,74,0,0,30.1,0.3,35,0 562 | 6,125,76,0,0,33.8,0.121,54,1 563 | 0,198,66,32,274,41.3,0.502,28,1 564 | 1,87,68,34,77,37.6,0.401,24,0 565 | 6,99,60,19,54,26.9,0.497,32,0 566 | 0,91,80,0,0,32.4,0.601,27,0 567 | 2,95,54,14,88,26.1,0.748,22,0 568 | 1,99,72,30,18,38.6,0.412,21,0 569 | 6,92,62,32,126,32,0.085,46,0 570 | 4,154,72,29,126,31.3,0.338,37,0 571 | 0,121,66,30,165,34.3,0.203,33,1 572 | 3,78,70,0,0,32.5,0.27,39,0 573 | 2,130,96,0,0,22.6,0.268,21,0 574 | 3,111,58,31,44,29.5,0.43,22,0 575 | 2,98,60,17,120,34.7,0.198,22,0 576 | 1,143,86,30,330,30.1,0.892,23,0 577 | 1,119,44,47,63,35.5,0.28,25,0 578 | 6,108,44,20,130,24,0.813,35,0 579 | 2,118,80,0,0,42.9,0.693,21,1 580 | 10,133,68,0,0,27,0.245,36,0 581 | 2,197,70,99,0,34.7,0.575,62,1 582 | 0,151,90,46,0,42.1,0.371,21,1 583 | 6,109,60,27,0,25,0.206,27,0 584 | 12,121,78,17,0,26.5,0.259,62,0 585 | 8,100,76,0,0,38.7,0.19,42,0 586 | 8,124,76,24,600,28.7,0.687,52,1 587 | 1,93,56,11,0,22.5,0.417,22,0 588 | 8,143,66,0,0,34.9,0.129,41,1 589 | 6,103,66,0,0,24.3,0.249,29,0 590 | 3,176,86,27,156,33.3,1.154,52,1 591 | 0,73,0,0,0,21.1,0.342,25,0 592 | 11,111,84,40,0,46.8,0.925,45,1 593 | 2,112,78,50,140,39.4,0.175,24,0 594 | 3,132,80,0,0,34.4,0.402,44,1 595 | 2,82,52,22,115,28.5,1.699,25,0 596 | 6,123,72,45,230,33.6,0.733,34,0 597 | 0,188,82,14,185,32,0.682,22,1 598 | 0,67,76,0,0,45.3,0.194,46,0 599 | 1,89,24,19,25,27.8,0.559,21,0 600 | 1,173,74,0,0,36.8,0.088,38,1 601 | 1,109,38,18,120,23.1,0.407,26,0 602 | 1,108,88,19,0,27.1,0.4,24,0 603 | 6,96,0,0,0,23.7,0.19,28,0 604 | 1,124,74,36,0,27.8,0.1,30,0 605 | 7,150,78,29,126,35.2,0.692,54,1 606 | 4,183,0,0,0,28.4,0.212,36,1 607 | 1,124,60,32,0,35.8,0.514,21,0 608 | 1,181,78,42,293,40,1.258,22,1 609 | 1,92,62,25,41,19.5,0.482,25,0 610 | 0,152,82,39,272,41.5,0.27,27,0 611 | 1,111,62,13,182,24,0.138,23,0 612 | 3,106,54,21,158,30.9,0.292,24,0 613 | 3,174,58,22,194,32.9,0.593,36,1 614 | 7,168,88,42,321,38.2,0.787,40,1 615 | 6,105,80,28,0,32.5,0.878,26,0 616 | 11,138,74,26,144,36.1,0.557,50,1 617 | 3,106,72,0,0,25.8,0.207,27,0 618 | 6,117,96,0,0,28.7,0.157,30,0 619 | 2,68,62,13,15,20.1,0.257,23,0 620 | 9,112,82,24,0,28.2,1.282,50,1 621 | 0,119,0,0,0,32.4,0.141,24,1 622 | 2,112,86,42,160,38.4,0.246,28,0 623 | 2,92,76,20,0,24.2,1.698,28,0 624 | 6,183,94,0,0,40.8,1.461,45,0 625 | 0,94,70,27,115,43.5,0.347,21,0 626 | 2,108,64,0,0,30.8,0.158,21,0 627 | 4,90,88,47,54,37.7,0.362,29,0 628 | 0,125,68,0,0,24.7,0.206,21,0 629 | 0,132,78,0,0,32.4,0.393,21,0 630 | 5,128,80,0,0,34.6,0.144,45,0 631 | 4,94,65,22,0,24.7,0.148,21,0 632 | 7,114,64,0,0,27.4,0.732,34,1 633 | 0,102,78,40,90,34.5,0.238,24,0 634 | 2,111,60,0,0,26.2,0.343,23,0 635 | 1,128,82,17,183,27.5,0.115,22,0 636 | 10,92,62,0,0,25.9,0.167,31,0 637 | 13,104,72,0,0,31.2,0.465,38,1 638 | 5,104,74,0,0,28.8,0.153,48,0 639 | 2,94,76,18,66,31.6,0.649,23,0 640 | 7,97,76,32,91,40.9,0.871,32,1 641 | 1,100,74,12,46,19.5,0.149,28,0 642 | 0,102,86,17,105,29.3,0.695,27,0 643 | 4,128,70,0,0,34.3,0.303,24,0 644 | 6,147,80,0,0,29.5,0.178,50,1 645 | 4,90,0,0,0,28,0.61,31,0 646 | 3,103,72,30,152,27.6,0.73,27,0 647 | 2,157,74,35,440,39.4,0.134,30,0 648 | 1,167,74,17,144,23.4,0.447,33,1 649 | 0,179,50,36,159,37.8,0.455,22,1 650 | 11,136,84,35,130,28.3,0.26,42,1 651 | 0,107,60,25,0,26.4,0.133,23,0 652 | 1,91,54,25,100,25.2,0.234,23,0 653 | 1,117,60,23,106,33.8,0.466,27,0 654 | 5,123,74,40,77,34.1,0.269,28,0 655 | 2,120,54,0,0,26.8,0.455,27,0 656 | 1,106,70,28,135,34.2,0.142,22,0 657 | 2,155,52,27,540,38.7,0.24,25,1 658 | 2,101,58,35,90,21.8,0.155,22,0 659 | 1,120,80,48,200,38.9,1.162,41,0 660 | 11,127,106,0,0,39,0.19,51,0 661 | 3,80,82,31,70,34.2,1.292,27,1 662 | 10,162,84,0,0,27.7,0.182,54,0 663 | 1,199,76,43,0,42.9,1.394,22,1 664 | 8,167,106,46,231,37.6,0.165,43,1 665 | 9,145,80,46,130,37.9,0.637,40,1 666 | 6,115,60,39,0,33.7,0.245,40,1 667 | 1,112,80,45,132,34.8,0.217,24,0 668 | 4,145,82,18,0,32.5,0.235,70,1 669 | 10,111,70,27,0,27.5,0.141,40,1 670 | 6,98,58,33,190,34,0.43,43,0 671 | 9,154,78,30,100,30.9,0.164,45,0 672 | 6,165,68,26,168,33.6,0.631,49,0 673 | 1,99,58,10,0,25.4,0.551,21,0 674 | 10,68,106,23,49,35.5,0.285,47,0 675 | 3,123,100,35,240,57.3,0.88,22,0 676 | 8,91,82,0,0,35.6,0.587,68,0 677 | 6,195,70,0,0,30.9,0.328,31,1 678 | 9,156,86,0,0,24.8,0.23,53,1 679 | 0,93,60,0,0,35.3,0.263,25,0 680 | 3,121,52,0,0,36,0.127,25,1 681 | 2,101,58,17,265,24.2,0.614,23,0 682 | 2,56,56,28,45,24.2,0.332,22,0 683 | 0,162,76,36,0,49.6,0.364,26,1 684 | 0,95,64,39,105,44.6,0.366,22,0 685 | 4,125,80,0,0,32.3,0.536,27,1 686 | 5,136,82,0,0,0,0.64,69,0 687 | 2,129,74,26,205,33.2,0.591,25,0 688 | 3,130,64,0,0,23.1,0.314,22,0 689 | 1,107,50,19,0,28.3,0.181,29,0 690 | 1,140,74,26,180,24.1,0.828,23,0 691 | 1,144,82,46,180,46.1,0.335,46,1 692 | 8,107,80,0,0,24.6,0.856,34,0 693 | 13,158,114,0,0,42.3,0.257,44,1 694 | 2,121,70,32,95,39.1,0.886,23,0 695 | 7,129,68,49,125,38.5,0.439,43,1 696 | 2,90,60,0,0,23.5,0.191,25,0 697 | 7,142,90,24,480,30.4,0.128,43,1 698 | 3,169,74,19,125,29.9,0.268,31,1 699 | 0,99,0,0,0,25,0.253,22,0 700 | 4,127,88,11,155,34.5,0.598,28,0 701 | 4,118,70,0,0,44.5,0.904,26,0 702 | 2,122,76,27,200,35.9,0.483,26,0 703 | 6,125,78,31,0,27.6,0.565,49,1 704 | 1,168,88,29,0,35,0.905,52,1 705 | 2,129,0,0,0,38.5,0.304,41,0 706 | 4,110,76,20,100,28.4,0.118,27,0 707 | 6,80,80,36,0,39.8,0.177,28,0 708 | 10,115,0,0,0,0,0.261,30,1 709 | 2,127,46,21,335,34.4,0.176,22,0 710 | 9,164,78,0,0,32.8,0.148,45,1 711 | 2,93,64,32,160,38,0.674,23,1 712 | 3,158,64,13,387,31.2,0.295,24,0 713 | 5,126,78,27,22,29.6,0.439,40,0 714 | 10,129,62,36,0,41.2,0.441,38,1 715 | 0,134,58,20,291,26.4,0.352,21,0 716 | 3,102,74,0,0,29.5,0.121,32,0 717 | 7,187,50,33,392,33.9,0.826,34,1 718 | 3,173,78,39,185,33.8,0.97,31,1 719 | 10,94,72,18,0,23.1,0.595,56,0 720 | 1,108,60,46,178,35.5,0.415,24,0 721 | 5,97,76,27,0,35.6,0.378,52,1 722 | 4,83,86,19,0,29.3,0.317,34,0 723 | 1,114,66,36,200,38.1,0.289,21,0 724 | 1,149,68,29,127,29.3,0.349,42,1 725 | 5,117,86,30,105,39.1,0.251,42,0 726 | 1,111,94,0,0,32.8,0.265,45,0 727 | 4,112,78,40,0,39.4,0.236,38,0 728 | 1,116,78,29,180,36.1,0.496,25,0 729 | 0,141,84,26,0,32.4,0.433,22,0 730 | 2,175,88,0,0,22.9,0.326,22,0 731 | 2,92,52,0,0,30.1,0.141,22,0 732 | 3,130,78,23,79,28.4,0.323,34,1 733 | 8,120,86,0,0,28.4,0.259,22,1 734 | 2,174,88,37,120,44.5,0.646,24,1 735 | 2,106,56,27,165,29,0.426,22,0 736 | 2,105,75,0,0,23.3,0.56,53,0 737 | 4,95,60,32,0,35.4,0.284,28,0 738 | 0,126,86,27,120,27.4,0.515,21,0 739 | 8,65,72,23,0,32,0.6,42,0 740 | 2,99,60,17,160,36.6,0.453,21,0 741 | 1,102,74,0,0,39.5,0.293,42,1 742 | 11,120,80,37,150,42.3,0.785,48,1 743 | 3,102,44,20,94,30.8,0.4,26,0 744 | 1,109,58,18,116,28.5,0.219,22,0 745 | 9,140,94,0,0,32.7,0.734,45,1 746 | 13,153,88,37,140,40.6,1.174,39,0 747 | 12,100,84,33,105,30,0.488,46,0 748 | 1,147,94,41,0,49.3,0.358,27,1 749 | 1,81,74,41,57,46.3,1.096,32,0 750 | 3,187,70,22,200,36.4,0.408,36,1 751 | 6,162,62,0,0,24.3,0.178,50,1 752 | 4,136,70,0,0,31.2,1.182,22,1 753 | 1,121,78,39,74,39,0.261,28,0 754 | 3,108,62,24,0,26,0.223,25,0 755 | 0,181,88,44,510,43.3,0.222,26,1 756 | 8,154,78,32,0,32.4,0.443,45,1 757 | 1,128,88,39,110,36.5,1.057,37,1 758 | 7,137,90,41,0,32,0.391,39,0 759 | 0,123,72,0,0,36.3,0.258,52,1 760 | 1,106,76,0,0,37.5,0.197,26,0 761 | 6,190,92,0,0,35.5,0.278,66,1 762 | 2,88,58,26,16,28.4,0.766,22,0 763 | 9,170,74,31,0,44,0.403,43,1 764 | 9,89,62,0,0,22.5,0.142,33,0 765 | 10,101,76,48,180,32.9,0.171,63,0 766 | 2,122,70,27,0,36.8,0.34,27,0 767 | 5,121,72,23,112,26.2,0.245,30,0 768 | 1,126,60,0,0,30.1,0.349,47,1 769 | 1,93,70,31,0,30.4,0.315,23,0 -------------------------------------------------------------------------------- /40_MachineLearning/data/housing.csv: -------------------------------------------------------------------------------- 1 | 0.00632 18.00 2.310 0 0.5380 6.5750 65.20 4.0900 1 296.0 15.30 396.90 4.98 24.00 2 | 0.02731 0.00 7.070 0 0.4690 6.4210 78.90 4.9671 2 242.0 17.80 396.90 9.14 21.60 3 | 0.02729 0.00 7.070 0 0.4690 7.1850 61.10 4.9671 2 242.0 17.80 392.83 4.03 34.70 4 | 0.03237 0.00 2.180 0 0.4580 6.9980 45.80 6.0622 3 222.0 18.70 394.63 2.94 33.40 5 | 0.06905 0.00 2.180 0 0.4580 7.1470 54.20 6.0622 3 222.0 18.70 396.90 5.33 36.20 6 | 0.02985 0.00 2.180 0 0.4580 6.4300 58.70 6.0622 3 222.0 18.70 394.12 5.21 28.70 7 | 0.08829 12.50 7.870 0 0.5240 6.0120 66.60 5.5605 5 311.0 15.20 395.60 12.43 22.90 8 | 0.14455 12.50 7.870 0 0.5240 6.1720 96.10 5.9505 5 311.0 15.20 396.90 19.15 27.10 9 | 0.21124 12.50 7.870 0 0.5240 5.6310 100.00 6.0821 5 311.0 15.20 386.63 29.93 16.50 10 | 0.17004 12.50 7.870 0 0.5240 6.0040 85.90 6.5921 5 311.0 15.20 386.71 17.10 18.90 11 | 0.22489 12.50 7.870 0 0.5240 6.3770 94.30 6.3467 5 311.0 15.20 392.52 20.45 15.00 12 | 0.11747 12.50 7.870 0 0.5240 6.0090 82.90 6.2267 5 311.0 15.20 396.90 13.27 18.90 13 | 0.09378 12.50 7.870 0 0.5240 5.8890 39.00 5.4509 5 311.0 15.20 390.50 15.71 21.70 14 | 0.62976 0.00 8.140 0 0.5380 5.9490 61.80 4.7075 4 307.0 21.00 396.90 8.26 20.40 15 | 0.63796 0.00 8.140 0 0.5380 6.0960 84.50 4.4619 4 307.0 21.00 380.02 10.26 18.20 16 | 0.62739 0.00 8.140 0 0.5380 5.8340 56.50 4.4986 4 307.0 21.00 395.62 8.47 19.90 17 | 1.05393 0.00 8.140 0 0.5380 5.9350 29.30 4.4986 4 307.0 21.00 386.85 6.58 23.10 18 | 0.78420 0.00 8.140 0 0.5380 5.9900 81.70 4.2579 4 307.0 21.00 386.75 14.67 17.50 19 | 0.80271 0.00 8.140 0 0.5380 5.4560 36.60 3.7965 4 307.0 21.00 288.99 11.69 20.20 20 | 0.72580 0.00 8.140 0 0.5380 5.7270 69.50 3.7965 4 307.0 21.00 390.95 11.28 18.20 21 | 1.25179 0.00 8.140 0 0.5380 5.5700 98.10 3.7979 4 307.0 21.00 376.57 21.02 13.60 22 | 0.85204 0.00 8.140 0 0.5380 5.9650 89.20 4.0123 4 307.0 21.00 392.53 13.83 19.60 23 | 1.23247 0.00 8.140 0 0.5380 6.1420 91.70 3.9769 4 307.0 21.00 396.90 18.72 15.20 24 | 0.98843 0.00 8.140 0 0.5380 5.8130 100.00 4.0952 4 307.0 21.00 394.54 19.88 14.50 25 | 0.75026 0.00 8.140 0 0.5380 5.9240 94.10 4.3996 4 307.0 21.00 394.33 16.30 15.60 26 | 0.84054 0.00 8.140 0 0.5380 5.5990 85.70 4.4546 4 307.0 21.00 303.42 16.51 13.90 27 | 0.67191 0.00 8.140 0 0.5380 5.8130 90.30 4.6820 4 307.0 21.00 376.88 14.81 16.60 28 | 0.95577 0.00 8.140 0 0.5380 6.0470 88.80 4.4534 4 307.0 21.00 306.38 17.28 14.80 29 | 0.77299 0.00 8.140 0 0.5380 6.4950 94.40 4.4547 4 307.0 21.00 387.94 12.80 18.40 30 | 1.00245 0.00 8.140 0 0.5380 6.6740 87.30 4.2390 4 307.0 21.00 380.23 11.98 21.00 31 | 1.13081 0.00 8.140 0 0.5380 5.7130 94.10 4.2330 4 307.0 21.00 360.17 22.60 12.70 32 | 1.35472 0.00 8.140 0 0.5380 6.0720 100.00 4.1750 4 307.0 21.00 376.73 13.04 14.50 33 | 1.38799 0.00 8.140 0 0.5380 5.9500 82.00 3.9900 4 307.0 21.00 232.60 27.71 13.20 34 | 1.15172 0.00 8.140 0 0.5380 5.7010 95.00 3.7872 4 307.0 21.00 358.77 18.35 13.10 35 | 1.61282 0.00 8.140 0 0.5380 6.0960 96.90 3.7598 4 307.0 21.00 248.31 20.34 13.50 36 | 0.06417 0.00 5.960 0 0.4990 5.9330 68.20 3.3603 5 279.0 19.20 396.90 9.68 18.90 37 | 0.09744 0.00 5.960 0 0.4990 5.8410 61.40 3.3779 5 279.0 19.20 377.56 11.41 20.00 38 | 0.08014 0.00 5.960 0 0.4990 5.8500 41.50 3.9342 5 279.0 19.20 396.90 8.77 21.00 39 | 0.17505 0.00 5.960 0 0.4990 5.9660 30.20 3.8473 5 279.0 19.20 393.43 10.13 24.70 40 | 0.02763 75.00 2.950 0 0.4280 6.5950 21.80 5.4011 3 252.0 18.30 395.63 4.32 30.80 41 | 0.03359 75.00 2.950 0 0.4280 7.0240 15.80 5.4011 3 252.0 18.30 395.62 1.98 34.90 42 | 0.12744 0.00 6.910 0 0.4480 6.7700 2.90 5.7209 3 233.0 17.90 385.41 4.84 26.60 43 | 0.14150 0.00 6.910 0 0.4480 6.1690 6.60 5.7209 3 233.0 17.90 383.37 5.81 25.30 44 | 0.15936 0.00 6.910 0 0.4480 6.2110 6.50 5.7209 3 233.0 17.90 394.46 7.44 24.70 45 | 0.12269 0.00 6.910 0 0.4480 6.0690 40.00 5.7209 3 233.0 17.90 389.39 9.55 21.20 46 | 0.17142 0.00 6.910 0 0.4480 5.6820 33.80 5.1004 3 233.0 17.90 396.90 10.21 19.30 47 | 0.18836 0.00 6.910 0 0.4480 5.7860 33.30 5.1004 3 233.0 17.90 396.90 14.15 20.00 48 | 0.22927 0.00 6.910 0 0.4480 6.0300 85.50 5.6894 3 233.0 17.90 392.74 18.80 16.60 49 | 0.25387 0.00 6.910 0 0.4480 5.3990 95.30 5.8700 3 233.0 17.90 396.90 30.81 14.40 50 | 0.21977 0.00 6.910 0 0.4480 5.6020 62.00 6.0877 3 233.0 17.90 396.90 16.20 19.40 51 | 0.08873 21.00 5.640 0 0.4390 5.9630 45.70 6.8147 4 243.0 16.80 395.56 13.45 19.70 52 | 0.04337 21.00 5.640 0 0.4390 6.1150 63.00 6.8147 4 243.0 16.80 393.97 9.43 20.50 53 | 0.05360 21.00 5.640 0 0.4390 6.5110 21.10 6.8147 4 243.0 16.80 396.90 5.28 25.00 54 | 0.04981 21.00 5.640 0 0.4390 5.9980 21.40 6.8147 4 243.0 16.80 396.90 8.43 23.40 55 | 0.01360 75.00 4.000 0 0.4100 5.8880 47.60 7.3197 3 469.0 21.10 396.90 14.80 18.90 56 | 0.01311 90.00 1.220 0 0.4030 7.2490 21.90 8.6966 5 226.0 17.90 395.93 4.81 35.40 57 | 0.02055 85.00 0.740 0 0.4100 6.3830 35.70 9.1876 2 313.0 17.30 396.90 5.77 24.70 58 | 0.01432 100.00 1.320 0 0.4110 6.8160 40.50 8.3248 5 256.0 15.10 392.90 3.95 31.60 59 | 0.15445 25.00 5.130 0 0.4530 6.1450 29.20 7.8148 8 284.0 19.70 390.68 6.86 23.30 60 | 0.10328 25.00 5.130 0 0.4530 5.9270 47.20 6.9320 8 284.0 19.70 396.90 9.22 19.60 61 | 0.14932 25.00 5.130 0 0.4530 5.7410 66.20 7.2254 8 284.0 19.70 395.11 13.15 18.70 62 | 0.17171 25.00 5.130 0 0.4530 5.9660 93.40 6.8185 8 284.0 19.70 378.08 14.44 16.00 63 | 0.11027 25.00 5.130 0 0.4530 6.4560 67.80 7.2255 8 284.0 19.70 396.90 6.73 22.20 64 | 0.12650 25.00 5.130 0 0.4530 6.7620 43.40 7.9809 8 284.0 19.70 395.58 9.50 25.00 65 | 0.01951 17.50 1.380 0 0.4161 7.1040 59.50 9.2229 3 216.0 18.60 393.24 8.05 33.00 66 | 0.03584 80.00 3.370 0 0.3980 6.2900 17.80 6.6115 4 337.0 16.10 396.90 4.67 23.50 67 | 0.04379 80.00 3.370 0 0.3980 5.7870 31.10 6.6115 4 337.0 16.10 396.90 10.24 19.40 68 | 0.05789 12.50 6.070 0 0.4090 5.8780 21.40 6.4980 4 345.0 18.90 396.21 8.10 22.00 69 | 0.13554 12.50 6.070 0 0.4090 5.5940 36.80 6.4980 4 345.0 18.90 396.90 13.09 17.40 70 | 0.12816 12.50 6.070 0 0.4090 5.8850 33.00 6.4980 4 345.0 18.90 396.90 8.79 20.90 71 | 0.08826 0.00 10.810 0 0.4130 6.4170 6.60 5.2873 4 305.0 19.20 383.73 6.72 24.20 72 | 0.15876 0.00 10.810 0 0.4130 5.9610 17.50 5.2873 4 305.0 19.20 376.94 9.88 21.70 73 | 0.09164 0.00 10.810 0 0.4130 6.0650 7.80 5.2873 4 305.0 19.20 390.91 5.52 22.80 74 | 0.19539 0.00 10.810 0 0.4130 6.2450 6.20 5.2873 4 305.0 19.20 377.17 7.54 23.40 75 | 0.07896 0.00 12.830 0 0.4370 6.2730 6.00 4.2515 5 398.0 18.70 394.92 6.78 24.10 76 | 0.09512 0.00 12.830 0 0.4370 6.2860 45.00 4.5026 5 398.0 18.70 383.23 8.94 21.40 77 | 0.10153 0.00 12.830 0 0.4370 6.2790 74.50 4.0522 5 398.0 18.70 373.66 11.97 20.00 78 | 0.08707 0.00 12.830 0 0.4370 6.1400 45.80 4.0905 5 398.0 18.70 386.96 10.27 20.80 79 | 0.05646 0.00 12.830 0 0.4370 6.2320 53.70 5.0141 5 398.0 18.70 386.40 12.34 21.20 80 | 0.08387 0.00 12.830 0 0.4370 5.8740 36.60 4.5026 5 398.0 18.70 396.06 9.10 20.30 81 | 0.04113 25.00 4.860 0 0.4260 6.7270 33.50 5.4007 4 281.0 19.00 396.90 5.29 28.00 82 | 0.04462 25.00 4.860 0 0.4260 6.6190 70.40 5.4007 4 281.0 19.00 395.63 7.22 23.90 83 | 0.03659 25.00 4.860 0 0.4260 6.3020 32.20 5.4007 4 281.0 19.00 396.90 6.72 24.80 84 | 0.03551 25.00 4.860 0 0.4260 6.1670 46.70 5.4007 4 281.0 19.00 390.64 7.51 22.90 85 | 0.05059 0.00 4.490 0 0.4490 6.3890 48.00 4.7794 3 247.0 18.50 396.90 9.62 23.90 86 | 0.05735 0.00 4.490 0 0.4490 6.6300 56.10 4.4377 3 247.0 18.50 392.30 6.53 26.60 87 | 0.05188 0.00 4.490 0 0.4490 6.0150 45.10 4.4272 3 247.0 18.50 395.99 12.86 22.50 88 | 0.07151 0.00 4.490 0 0.4490 6.1210 56.80 3.7476 3 247.0 18.50 395.15 8.44 22.20 89 | 0.05660 0.00 3.410 0 0.4890 7.0070 86.30 3.4217 2 270.0 17.80 396.90 5.50 23.60 90 | 0.05302 0.00 3.410 0 0.4890 7.0790 63.10 3.4145 2 270.0 17.80 396.06 5.70 28.70 91 | 0.04684 0.00 3.410 0 0.4890 6.4170 66.10 3.0923 2 270.0 17.80 392.18 8.81 22.60 92 | 0.03932 0.00 3.410 0 0.4890 6.4050 73.90 3.0921 2 270.0 17.80 393.55 8.20 22.00 93 | 0.04203 28.00 15.040 0 0.4640 6.4420 53.60 3.6659 4 270.0 18.20 395.01 8.16 22.90 94 | 0.02875 28.00 15.040 0 0.4640 6.2110 28.90 3.6659 4 270.0 18.20 396.33 6.21 25.00 95 | 0.04294 28.00 15.040 0 0.4640 6.2490 77.30 3.6150 4 270.0 18.20 396.90 10.59 20.60 96 | 0.12204 0.00 2.890 0 0.4450 6.6250 57.80 3.4952 2 276.0 18.00 357.98 6.65 28.40 97 | 0.11504 0.00 2.890 0 0.4450 6.1630 69.60 3.4952 2 276.0 18.00 391.83 11.34 21.40 98 | 0.12083 0.00 2.890 0 0.4450 8.0690 76.00 3.4952 2 276.0 18.00 396.90 4.21 38.70 99 | 0.08187 0.00 2.890 0 0.4450 7.8200 36.90 3.4952 2 276.0 18.00 393.53 3.57 43.80 100 | 0.06860 0.00 2.890 0 0.4450 7.4160 62.50 3.4952 2 276.0 18.00 396.90 6.19 33.20 101 | 0.14866 0.00 8.560 0 0.5200 6.7270 79.90 2.7778 5 384.0 20.90 394.76 9.42 27.50 102 | 0.11432 0.00 8.560 0 0.5200 6.7810 71.30 2.8561 5 384.0 20.90 395.58 7.67 26.50 103 | 0.22876 0.00 8.560 0 0.5200 6.4050 85.40 2.7147 5 384.0 20.90 70.80 10.63 18.60 104 | 0.21161 0.00 8.560 0 0.5200 6.1370 87.40 2.7147 5 384.0 20.90 394.47 13.44 19.30 105 | 0.13960 0.00 8.560 0 0.5200 6.1670 90.00 2.4210 5 384.0 20.90 392.69 12.33 20.10 106 | 0.13262 0.00 8.560 0 0.5200 5.8510 96.70 2.1069 5 384.0 20.90 394.05 16.47 19.50 107 | 0.17120 0.00 8.560 0 0.5200 5.8360 91.90 2.2110 5 384.0 20.90 395.67 18.66 19.50 108 | 0.13117 0.00 8.560 0 0.5200 6.1270 85.20 2.1224 5 384.0 20.90 387.69 14.09 20.40 109 | 0.12802 0.00 8.560 0 0.5200 6.4740 97.10 2.4329 5 384.0 20.90 395.24 12.27 19.80 110 | 0.26363 0.00 8.560 0 0.5200 6.2290 91.20 2.5451 5 384.0 20.90 391.23 15.55 19.40 111 | 0.10793 0.00 8.560 0 0.5200 6.1950 54.40 2.7778 5 384.0 20.90 393.49 13.00 21.70 112 | 0.10084 0.00 10.010 0 0.5470 6.7150 81.60 2.6775 6 432.0 17.80 395.59 10.16 22.80 113 | 0.12329 0.00 10.010 0 0.5470 5.9130 92.90 2.3534 6 432.0 17.80 394.95 16.21 18.80 114 | 0.22212 0.00 10.010 0 0.5470 6.0920 95.40 2.5480 6 432.0 17.80 396.90 17.09 18.70 115 | 0.14231 0.00 10.010 0 0.5470 6.2540 84.20 2.2565 6 432.0 17.80 388.74 10.45 18.50 116 | 0.17134 0.00 10.010 0 0.5470 5.9280 88.20 2.4631 6 432.0 17.80 344.91 15.76 18.30 117 | 0.13158 0.00 10.010 0 0.5470 6.1760 72.50 2.7301 6 432.0 17.80 393.30 12.04 21.20 118 | 0.15098 0.00 10.010 0 0.5470 6.0210 82.60 2.7474 6 432.0 17.80 394.51 10.30 19.20 119 | 0.13058 0.00 10.010 0 0.5470 5.8720 73.10 2.4775 6 432.0 17.80 338.63 15.37 20.40 120 | 0.14476 0.00 10.010 0 0.5470 5.7310 65.20 2.7592 6 432.0 17.80 391.50 13.61 19.30 121 | 0.06899 0.00 25.650 0 0.5810 5.8700 69.70 2.2577 2 188.0 19.10 389.15 14.37 22.00 122 | 0.07165 0.00 25.650 0 0.5810 6.0040 84.10 2.1974 2 188.0 19.10 377.67 14.27 20.30 123 | 0.09299 0.00 25.650 0 0.5810 5.9610 92.90 2.0869 2 188.0 19.10 378.09 17.93 20.50 124 | 0.15038 0.00 25.650 0 0.5810 5.8560 97.00 1.9444 2 188.0 19.10 370.31 25.41 17.30 125 | 0.09849 0.00 25.650 0 0.5810 5.8790 95.80 2.0063 2 188.0 19.10 379.38 17.58 18.80 126 | 0.16902 0.00 25.650 0 0.5810 5.9860 88.40 1.9929 2 188.0 19.10 385.02 14.81 21.40 127 | 0.38735 0.00 25.650 0 0.5810 5.6130 95.60 1.7572 2 188.0 19.10 359.29 27.26 15.70 128 | 0.25915 0.00 21.890 0 0.6240 5.6930 96.00 1.7883 4 437.0 21.20 392.11 17.19 16.20 129 | 0.32543 0.00 21.890 0 0.6240 6.4310 98.80 1.8125 4 437.0 21.20 396.90 15.39 18.00 130 | 0.88125 0.00 21.890 0 0.6240 5.6370 94.70 1.9799 4 437.0 21.20 396.90 18.34 14.30 131 | 0.34006 0.00 21.890 0 0.6240 6.4580 98.90 2.1185 4 437.0 21.20 395.04 12.60 19.20 132 | 1.19294 0.00 21.890 0 0.6240 6.3260 97.70 2.2710 4 437.0 21.20 396.90 12.26 19.60 133 | 0.59005 0.00 21.890 0 0.6240 6.3720 97.90 2.3274 4 437.0 21.20 385.76 11.12 23.00 134 | 0.32982 0.00 21.890 0 0.6240 5.8220 95.40 2.4699 4 437.0 21.20 388.69 15.03 18.40 135 | 0.97617 0.00 21.890 0 0.6240 5.7570 98.40 2.3460 4 437.0 21.20 262.76 17.31 15.60 136 | 0.55778 0.00 21.890 0 0.6240 6.3350 98.20 2.1107 4 437.0 21.20 394.67 16.96 18.10 137 | 0.32264 0.00 21.890 0 0.6240 5.9420 93.50 1.9669 4 437.0 21.20 378.25 16.90 17.40 138 | 0.35233 0.00 21.890 0 0.6240 6.4540 98.40 1.8498 4 437.0 21.20 394.08 14.59 17.10 139 | 0.24980 0.00 21.890 0 0.6240 5.8570 98.20 1.6686 4 437.0 21.20 392.04 21.32 13.30 140 | 0.54452 0.00 21.890 0 0.6240 6.1510 97.90 1.6687 4 437.0 21.20 396.90 18.46 17.80 141 | 0.29090 0.00 21.890 0 0.6240 6.1740 93.60 1.6119 4 437.0 21.20 388.08 24.16 14.00 142 | 1.62864 0.00 21.890 0 0.6240 5.0190 100.00 1.4394 4 437.0 21.20 396.90 34.41 14.40 143 | 3.32105 0.00 19.580 1 0.8710 5.4030 100.00 1.3216 5 403.0 14.70 396.90 26.82 13.40 144 | 4.09740 0.00 19.580 0 0.8710 5.4680 100.00 1.4118 5 403.0 14.70 396.90 26.42 15.60 145 | 2.77974 0.00 19.580 0 0.8710 4.9030 97.80 1.3459 5 403.0 14.70 396.90 29.29 11.80 146 | 2.37934 0.00 19.580 0 0.8710 6.1300 100.00 1.4191 5 403.0 14.70 172.91 27.80 13.80 147 | 2.15505 0.00 19.580 0 0.8710 5.6280 100.00 1.5166 5 403.0 14.70 169.27 16.65 15.60 148 | 2.36862 0.00 19.580 0 0.8710 4.9260 95.70 1.4608 5 403.0 14.70 391.71 29.53 14.60 149 | 2.33099 0.00 19.580 0 0.8710 5.1860 93.80 1.5296 5 403.0 14.70 356.99 28.32 17.80 150 | 2.73397 0.00 19.580 0 0.8710 5.5970 94.90 1.5257 5 403.0 14.70 351.85 21.45 15.40 151 | 1.65660 0.00 19.580 0 0.8710 6.1220 97.30 1.6180 5 403.0 14.70 372.80 14.10 21.50 152 | 1.49632 0.00 19.580 0 0.8710 5.4040 100.00 1.5916 5 403.0 14.70 341.60 13.28 19.60 153 | 1.12658 0.00 19.580 1 0.8710 5.0120 88.00 1.6102 5 403.0 14.70 343.28 12.12 15.30 154 | 2.14918 0.00 19.580 0 0.8710 5.7090 98.50 1.6232 5 403.0 14.70 261.95 15.79 19.40 155 | 1.41385 0.00 19.580 1 0.8710 6.1290 96.00 1.7494 5 403.0 14.70 321.02 15.12 17.00 156 | 3.53501 0.00 19.580 1 0.8710 6.1520 82.60 1.7455 5 403.0 14.70 88.01 15.02 15.60 157 | 2.44668 0.00 19.580 0 0.8710 5.2720 94.00 1.7364 5 403.0 14.70 88.63 16.14 13.10 158 | 1.22358 0.00 19.580 0 0.6050 6.9430 97.40 1.8773 5 403.0 14.70 363.43 4.59 41.30 159 | 1.34284 0.00 19.580 0 0.6050 6.0660 100.00 1.7573 5 403.0 14.70 353.89 6.43 24.30 160 | 1.42502 0.00 19.580 0 0.8710 6.5100 100.00 1.7659 5 403.0 14.70 364.31 7.39 23.30 161 | 1.27346 0.00 19.580 1 0.6050 6.2500 92.60 1.7984 5 403.0 14.70 338.92 5.50 27.00 162 | 1.46336 0.00 19.580 0 0.6050 7.4890 90.80 1.9709 5 403.0 14.70 374.43 1.73 50.00 163 | 1.83377 0.00 19.580 1 0.6050 7.8020 98.20 2.0407 5 403.0 14.70 389.61 1.92 50.00 164 | 1.51902 0.00 19.580 1 0.6050 8.3750 93.90 2.1620 5 403.0 14.70 388.45 3.32 50.00 165 | 2.24236 0.00 19.580 0 0.6050 5.8540 91.80 2.4220 5 403.0 14.70 395.11 11.64 22.70 166 | 2.92400 0.00 19.580 0 0.6050 6.1010 93.00 2.2834 5 403.0 14.70 240.16 9.81 25.00 167 | 2.01019 0.00 19.580 0 0.6050 7.9290 96.20 2.0459 5 403.0 14.70 369.30 3.70 50.00 168 | 1.80028 0.00 19.580 0 0.6050 5.8770 79.20 2.4259 5 403.0 14.70 227.61 12.14 23.80 169 | 2.30040 0.00 19.580 0 0.6050 6.3190 96.10 2.1000 5 403.0 14.70 297.09 11.10 23.80 170 | 2.44953 0.00 19.580 0 0.6050 6.4020 95.20 2.2625 5 403.0 14.70 330.04 11.32 22.30 171 | 1.20742 0.00 19.580 0 0.6050 5.8750 94.60 2.4259 5 403.0 14.70 292.29 14.43 17.40 172 | 2.31390 0.00 19.580 0 0.6050 5.8800 97.30 2.3887 5 403.0 14.70 348.13 12.03 19.10 173 | 0.13914 0.00 4.050 0 0.5100 5.5720 88.50 2.5961 5 296.0 16.60 396.90 14.69 23.10 174 | 0.09178 0.00 4.050 0 0.5100 6.4160 84.10 2.6463 5 296.0 16.60 395.50 9.04 23.60 175 | 0.08447 0.00 4.050 0 0.5100 5.8590 68.70 2.7019 5 296.0 16.60 393.23 9.64 22.60 176 | 0.06664 0.00 4.050 0 0.5100 6.5460 33.10 3.1323 5 296.0 16.60 390.96 5.33 29.40 177 | 0.07022 0.00 4.050 0 0.5100 6.0200 47.20 3.5549 5 296.0 16.60 393.23 10.11 23.20 178 | 0.05425 0.00 4.050 0 0.5100 6.3150 73.40 3.3175 5 296.0 16.60 395.60 6.29 24.60 179 | 0.06642 0.00 4.050 0 0.5100 6.8600 74.40 2.9153 5 296.0 16.60 391.27 6.92 29.90 180 | 0.05780 0.00 2.460 0 0.4880 6.9800 58.40 2.8290 3 193.0 17.80 396.90 5.04 37.20 181 | 0.06588 0.00 2.460 0 0.4880 7.7650 83.30 2.7410 3 193.0 17.80 395.56 7.56 39.80 182 | 0.06888 0.00 2.460 0 0.4880 6.1440 62.20 2.5979 3 193.0 17.80 396.90 9.45 36.20 183 | 0.09103 0.00 2.460 0 0.4880 7.1550 92.20 2.7006 3 193.0 17.80 394.12 4.82 37.90 184 | 0.10008 0.00 2.460 0 0.4880 6.5630 95.60 2.8470 3 193.0 17.80 396.90 5.68 32.50 185 | 0.08308 0.00 2.460 0 0.4880 5.6040 89.80 2.9879 3 193.0 17.80 391.00 13.98 26.40 186 | 0.06047 0.00 2.460 0 0.4880 6.1530 68.80 3.2797 3 193.0 17.80 387.11 13.15 29.60 187 | 0.05602 0.00 2.460 0 0.4880 7.8310 53.60 3.1992 3 193.0 17.80 392.63 4.45 50.00 188 | 0.07875 45.00 3.440 0 0.4370 6.7820 41.10 3.7886 5 398.0 15.20 393.87 6.68 32.00 189 | 0.12579 45.00 3.440 0 0.4370 6.5560 29.10 4.5667 5 398.0 15.20 382.84 4.56 29.80 190 | 0.08370 45.00 3.440 0 0.4370 7.1850 38.90 4.5667 5 398.0 15.20 396.90 5.39 34.90 191 | 0.09068 45.00 3.440 0 0.4370 6.9510 21.50 6.4798 5 398.0 15.20 377.68 5.10 37.00 192 | 0.06911 45.00 3.440 0 0.4370 6.7390 30.80 6.4798 5 398.0 15.20 389.71 4.69 30.50 193 | 0.08664 45.00 3.440 0 0.4370 7.1780 26.30 6.4798 5 398.0 15.20 390.49 2.87 36.40 194 | 0.02187 60.00 2.930 0 0.4010 6.8000 9.90 6.2196 1 265.0 15.60 393.37 5.03 31.10 195 | 0.01439 60.00 2.930 0 0.4010 6.6040 18.80 6.2196 1 265.0 15.60 376.70 4.38 29.10 196 | 0.01381 80.00 0.460 0 0.4220 7.8750 32.00 5.6484 4 255.0 14.40 394.23 2.97 50.00 197 | 0.04011 80.00 1.520 0 0.4040 7.2870 34.10 7.3090 2 329.0 12.60 396.90 4.08 33.30 198 | 0.04666 80.00 1.520 0 0.4040 7.1070 36.60 7.3090 2 329.0 12.60 354.31 8.61 30.30 199 | 0.03768 80.00 1.520 0 0.4040 7.2740 38.30 7.3090 2 329.0 12.60 392.20 6.62 34.60 200 | 0.03150 95.00 1.470 0 0.4030 6.9750 15.30 7.6534 3 402.0 17.00 396.90 4.56 34.90 201 | 0.01778 95.00 1.470 0 0.4030 7.1350 13.90 7.6534 3 402.0 17.00 384.30 4.45 32.90 202 | 0.03445 82.50 2.030 0 0.4150 6.1620 38.40 6.2700 2 348.0 14.70 393.77 7.43 24.10 203 | 0.02177 82.50 2.030 0 0.4150 7.6100 15.70 6.2700 2 348.0 14.70 395.38 3.11 42.30 204 | 0.03510 95.00 2.680 0 0.4161 7.8530 33.20 5.1180 4 224.0 14.70 392.78 3.81 48.50 205 | 0.02009 95.00 2.680 0 0.4161 8.0340 31.90 5.1180 4 224.0 14.70 390.55 2.88 50.00 206 | 0.13642 0.00 10.590 0 0.4890 5.8910 22.30 3.9454 4 277.0 18.60 396.90 10.87 22.60 207 | 0.22969 0.00 10.590 0 0.4890 6.3260 52.50 4.3549 4 277.0 18.60 394.87 10.97 24.40 208 | 0.25199 0.00 10.590 0 0.4890 5.7830 72.70 4.3549 4 277.0 18.60 389.43 18.06 22.50 209 | 0.13587 0.00 10.590 1 0.4890 6.0640 59.10 4.2392 4 277.0 18.60 381.32 14.66 24.40 210 | 0.43571 0.00 10.590 1 0.4890 5.3440 100.00 3.8750 4 277.0 18.60 396.90 23.09 20.00 211 | 0.17446 0.00 10.590 1 0.4890 5.9600 92.10 3.8771 4 277.0 18.60 393.25 17.27 21.70 212 | 0.37578 0.00 10.590 1 0.4890 5.4040 88.60 3.6650 4 277.0 18.60 395.24 23.98 19.30 213 | 0.21719 0.00 10.590 1 0.4890 5.8070 53.80 3.6526 4 277.0 18.60 390.94 16.03 22.40 214 | 0.14052 0.00 10.590 0 0.4890 6.3750 32.30 3.9454 4 277.0 18.60 385.81 9.38 28.10 215 | 0.28955 0.00 10.590 0 0.4890 5.4120 9.80 3.5875 4 277.0 18.60 348.93 29.55 23.70 216 | 0.19802 0.00 10.590 0 0.4890 6.1820 42.40 3.9454 4 277.0 18.60 393.63 9.47 25.00 217 | 0.04560 0.00 13.890 1 0.5500 5.8880 56.00 3.1121 5 276.0 16.40 392.80 13.51 23.30 218 | 0.07013 0.00 13.890 0 0.5500 6.6420 85.10 3.4211 5 276.0 16.40 392.78 9.69 28.70 219 | 0.11069 0.00 13.890 1 0.5500 5.9510 93.80 2.8893 5 276.0 16.40 396.90 17.92 21.50 220 | 0.11425 0.00 13.890 1 0.5500 6.3730 92.40 3.3633 5 276.0 16.40 393.74 10.50 23.00 221 | 0.35809 0.00 6.200 1 0.5070 6.9510 88.50 2.8617 8 307.0 17.40 391.70 9.71 26.70 222 | 0.40771 0.00 6.200 1 0.5070 6.1640 91.30 3.0480 8 307.0 17.40 395.24 21.46 21.70 223 | 0.62356 0.00 6.200 1 0.5070 6.8790 77.70 3.2721 8 307.0 17.40 390.39 9.93 27.50 224 | 0.61470 0.00 6.200 0 0.5070 6.6180 80.80 3.2721 8 307.0 17.40 396.90 7.60 30.10 225 | 0.31533 0.00 6.200 0 0.5040 8.2660 78.30 2.8944 8 307.0 17.40 385.05 4.14 44.80 226 | 0.52693 0.00 6.200 0 0.5040 8.7250 83.00 2.8944 8 307.0 17.40 382.00 4.63 50.00 227 | 0.38214 0.00 6.200 0 0.5040 8.0400 86.50 3.2157 8 307.0 17.40 387.38 3.13 37.60 228 | 0.41238 0.00 6.200 0 0.5040 7.1630 79.90 3.2157 8 307.0 17.40 372.08 6.36 31.60 229 | 0.29819 0.00 6.200 0 0.5040 7.6860 17.00 3.3751 8 307.0 17.40 377.51 3.92 46.70 230 | 0.44178 0.00 6.200 0 0.5040 6.5520 21.40 3.3751 8 307.0 17.40 380.34 3.76 31.50 231 | 0.53700 0.00 6.200 0 0.5040 5.9810 68.10 3.6715 8 307.0 17.40 378.35 11.65 24.30 232 | 0.46296 0.00 6.200 0 0.5040 7.4120 76.90 3.6715 8 307.0 17.40 376.14 5.25 31.70 233 | 0.57529 0.00 6.200 0 0.5070 8.3370 73.30 3.8384 8 307.0 17.40 385.91 2.47 41.70 234 | 0.33147 0.00 6.200 0 0.5070 8.2470 70.40 3.6519 8 307.0 17.40 378.95 3.95 48.30 235 | 0.44791 0.00 6.200 1 0.5070 6.7260 66.50 3.6519 8 307.0 17.40 360.20 8.05 29.00 236 | 0.33045 0.00 6.200 0 0.5070 6.0860 61.50 3.6519 8 307.0 17.40 376.75 10.88 24.00 237 | 0.52058 0.00 6.200 1 0.5070 6.6310 76.50 4.1480 8 307.0 17.40 388.45 9.54 25.10 238 | 0.51183 0.00 6.200 0 0.5070 7.3580 71.60 4.1480 8 307.0 17.40 390.07 4.73 31.50 239 | 0.08244 30.00 4.930 0 0.4280 6.4810 18.50 6.1899 6 300.0 16.60 379.41 6.36 23.70 240 | 0.09252 30.00 4.930 0 0.4280 6.6060 42.20 6.1899 6 300.0 16.60 383.78 7.37 23.30 241 | 0.11329 30.00 4.930 0 0.4280 6.8970 54.30 6.3361 6 300.0 16.60 391.25 11.38 22.00 242 | 0.10612 30.00 4.930 0 0.4280 6.0950 65.10 6.3361 6 300.0 16.60 394.62 12.40 20.10 243 | 0.10290 30.00 4.930 0 0.4280 6.3580 52.90 7.0355 6 300.0 16.60 372.75 11.22 22.20 244 | 0.12757 30.00 4.930 0 0.4280 6.3930 7.80 7.0355 6 300.0 16.60 374.71 5.19 23.70 245 | 0.20608 22.00 5.860 0 0.4310 5.5930 76.50 7.9549 7 330.0 19.10 372.49 12.50 17.60 246 | 0.19133 22.00 5.860 0 0.4310 5.6050 70.20 7.9549 7 330.0 19.10 389.13 18.46 18.50 247 | 0.33983 22.00 5.860 0 0.4310 6.1080 34.90 8.0555 7 330.0 19.10 390.18 9.16 24.30 248 | 0.19657 22.00 5.860 0 0.4310 6.2260 79.20 8.0555 7 330.0 19.10 376.14 10.15 20.50 249 | 0.16439 22.00 5.860 0 0.4310 6.4330 49.10 7.8265 7 330.0 19.10 374.71 9.52 24.50 250 | 0.19073 22.00 5.860 0 0.4310 6.7180 17.50 7.8265 7 330.0 19.10 393.74 6.56 26.20 251 | 0.14030 22.00 5.860 0 0.4310 6.4870 13.00 7.3967 7 330.0 19.10 396.28 5.90 24.40 252 | 0.21409 22.00 5.860 0 0.4310 6.4380 8.90 7.3967 7 330.0 19.10 377.07 3.59 24.80 253 | 0.08221 22.00 5.860 0 0.4310 6.9570 6.80 8.9067 7 330.0 19.10 386.09 3.53 29.60 254 | 0.36894 22.00 5.860 0 0.4310 8.2590 8.40 8.9067 7 330.0 19.10 396.90 3.54 42.80 255 | 0.04819 80.00 3.640 0 0.3920 6.1080 32.00 9.2203 1 315.0 16.40 392.89 6.57 21.90 256 | 0.03548 80.00 3.640 0 0.3920 5.8760 19.10 9.2203 1 315.0 16.40 395.18 9.25 20.90 257 | 0.01538 90.00 3.750 0 0.3940 7.4540 34.20 6.3361 3 244.0 15.90 386.34 3.11 44.00 258 | 0.61154 20.00 3.970 0 0.6470 8.7040 86.90 1.8010 5 264.0 13.00 389.70 5.12 50.00 259 | 0.66351 20.00 3.970 0 0.6470 7.3330 100.00 1.8946 5 264.0 13.00 383.29 7.79 36.00 260 | 0.65665 20.00 3.970 0 0.6470 6.8420 100.00 2.0107 5 264.0 13.00 391.93 6.90 30.10 261 | 0.54011 20.00 3.970 0 0.6470 7.2030 81.80 2.1121 5 264.0 13.00 392.80 9.59 33.80 262 | 0.53412 20.00 3.970 0 0.6470 7.5200 89.40 2.1398 5 264.0 13.00 388.37 7.26 43.10 263 | 0.52014 20.00 3.970 0 0.6470 8.3980 91.50 2.2885 5 264.0 13.00 386.86 5.91 48.80 264 | 0.82526 20.00 3.970 0 0.6470 7.3270 94.50 2.0788 5 264.0 13.00 393.42 11.25 31.00 265 | 0.55007 20.00 3.970 0 0.6470 7.2060 91.60 1.9301 5 264.0 13.00 387.89 8.10 36.50 266 | 0.76162 20.00 3.970 0 0.6470 5.5600 62.80 1.9865 5 264.0 13.00 392.40 10.45 22.80 267 | 0.78570 20.00 3.970 0 0.6470 7.0140 84.60 2.1329 5 264.0 13.00 384.07 14.79 30.70 268 | 0.57834 20.00 3.970 0 0.5750 8.2970 67.00 2.4216 5 264.0 13.00 384.54 7.44 50.00 269 | 0.54050 20.00 3.970 0 0.5750 7.4700 52.60 2.8720 5 264.0 13.00 390.30 3.16 43.50 270 | 0.09065 20.00 6.960 1 0.4640 5.9200 61.50 3.9175 3 223.0 18.60 391.34 13.65 20.70 271 | 0.29916 20.00 6.960 0 0.4640 5.8560 42.10 4.4290 3 223.0 18.60 388.65 13.00 21.10 272 | 0.16211 20.00 6.960 0 0.4640 6.2400 16.30 4.4290 3 223.0 18.60 396.90 6.59 25.20 273 | 0.11460 20.00 6.960 0 0.4640 6.5380 58.70 3.9175 3 223.0 18.60 394.96 7.73 24.40 274 | 0.22188 20.00 6.960 1 0.4640 7.6910 51.80 4.3665 3 223.0 18.60 390.77 6.58 35.20 275 | 0.05644 40.00 6.410 1 0.4470 6.7580 32.90 4.0776 4 254.0 17.60 396.90 3.53 32.40 276 | 0.09604 40.00 6.410 0 0.4470 6.8540 42.80 4.2673 4 254.0 17.60 396.90 2.98 32.00 277 | 0.10469 40.00 6.410 1 0.4470 7.2670 49.00 4.7872 4 254.0 17.60 389.25 6.05 33.20 278 | 0.06127 40.00 6.410 1 0.4470 6.8260 27.60 4.8628 4 254.0 17.60 393.45 4.16 33.10 279 | 0.07978 40.00 6.410 0 0.4470 6.4820 32.10 4.1403 4 254.0 17.60 396.90 7.19 29.10 280 | 0.21038 20.00 3.330 0 0.4429 6.8120 32.20 4.1007 5 216.0 14.90 396.90 4.85 35.10 281 | 0.03578 20.00 3.330 0 0.4429 7.8200 64.50 4.6947 5 216.0 14.90 387.31 3.76 45.40 282 | 0.03705 20.00 3.330 0 0.4429 6.9680 37.20 5.2447 5 216.0 14.90 392.23 4.59 35.40 283 | 0.06129 20.00 3.330 1 0.4429 7.6450 49.70 5.2119 5 216.0 14.90 377.07 3.01 46.00 284 | 0.01501 90.00 1.210 1 0.4010 7.9230 24.80 5.8850 1 198.0 13.60 395.52 3.16 50.00 285 | 0.00906 90.00 2.970 0 0.4000 7.0880 20.80 7.3073 1 285.0 15.30 394.72 7.85 32.20 286 | 0.01096 55.00 2.250 0 0.3890 6.4530 31.90 7.3073 1 300.0 15.30 394.72 8.23 22.00 287 | 0.01965 80.00 1.760 0 0.3850 6.2300 31.50 9.0892 1 241.0 18.20 341.60 12.93 20.10 288 | 0.03871 52.50 5.320 0 0.4050 6.2090 31.30 7.3172 6 293.0 16.60 396.90 7.14 23.20 289 | 0.04590 52.50 5.320 0 0.4050 6.3150 45.60 7.3172 6 293.0 16.60 396.90 7.60 22.30 290 | 0.04297 52.50 5.320 0 0.4050 6.5650 22.90 7.3172 6 293.0 16.60 371.72 9.51 24.80 291 | 0.03502 80.00 4.950 0 0.4110 6.8610 27.90 5.1167 4 245.0 19.20 396.90 3.33 28.50 292 | 0.07886 80.00 4.950 0 0.4110 7.1480 27.70 5.1167 4 245.0 19.20 396.90 3.56 37.30 293 | 0.03615 80.00 4.950 0 0.4110 6.6300 23.40 5.1167 4 245.0 19.20 396.90 4.70 27.90 294 | 0.08265 0.00 13.920 0 0.4370 6.1270 18.40 5.5027 4 289.0 16.00 396.90 8.58 23.90 295 | 0.08199 0.00 13.920 0 0.4370 6.0090 42.30 5.5027 4 289.0 16.00 396.90 10.40 21.70 296 | 0.12932 0.00 13.920 0 0.4370 6.6780 31.10 5.9604 4 289.0 16.00 396.90 6.27 28.60 297 | 0.05372 0.00 13.920 0 0.4370 6.5490 51.00 5.9604 4 289.0 16.00 392.85 7.39 27.10 298 | 0.14103 0.00 13.920 0 0.4370 5.7900 58.00 6.3200 4 289.0 16.00 396.90 15.84 20.30 299 | 0.06466 70.00 2.240 0 0.4000 6.3450 20.10 7.8278 5 358.0 14.80 368.24 4.97 22.50 300 | 0.05561 70.00 2.240 0 0.4000 7.0410 10.00 7.8278 5 358.0 14.80 371.58 4.74 29.00 301 | 0.04417 70.00 2.240 0 0.4000 6.8710 47.40 7.8278 5 358.0 14.80 390.86 6.07 24.80 302 | 0.03537 34.00 6.090 0 0.4330 6.5900 40.40 5.4917 7 329.0 16.10 395.75 9.50 22.00 303 | 0.09266 34.00 6.090 0 0.4330 6.4950 18.40 5.4917 7 329.0 16.10 383.61 8.67 26.40 304 | 0.10000 34.00 6.090 0 0.4330 6.9820 17.70 5.4917 7 329.0 16.10 390.43 4.86 33.10 305 | 0.05515 33.00 2.180 0 0.4720 7.2360 41.10 4.0220 7 222.0 18.40 393.68 6.93 36.10 306 | 0.05479 33.00 2.180 0 0.4720 6.6160 58.10 3.3700 7 222.0 18.40 393.36 8.93 28.40 307 | 0.07503 33.00 2.180 0 0.4720 7.4200 71.90 3.0992 7 222.0 18.40 396.90 6.47 33.40 308 | 0.04932 33.00 2.180 0 0.4720 6.8490 70.30 3.1827 7 222.0 18.40 396.90 7.53 28.20 309 | 0.49298 0.00 9.900 0 0.5440 6.6350 82.50 3.3175 4 304.0 18.40 396.90 4.54 22.80 310 | 0.34940 0.00 9.900 0 0.5440 5.9720 76.70 3.1025 4 304.0 18.40 396.24 9.97 20.30 311 | 2.63548 0.00 9.900 0 0.5440 4.9730 37.80 2.5194 4 304.0 18.40 350.45 12.64 16.10 312 | 0.79041 0.00 9.900 0 0.5440 6.1220 52.80 2.6403 4 304.0 18.40 396.90 5.98 22.10 313 | 0.26169 0.00 9.900 0 0.5440 6.0230 90.40 2.8340 4 304.0 18.40 396.30 11.72 19.40 314 | 0.26938 0.00 9.900 0 0.5440 6.2660 82.80 3.2628 4 304.0 18.40 393.39 7.90 21.60 315 | 0.36920 0.00 9.900 0 0.5440 6.5670 87.30 3.6023 4 304.0 18.40 395.69 9.28 23.80 316 | 0.25356 0.00 9.900 0 0.5440 5.7050 77.70 3.9450 4 304.0 18.40 396.42 11.50 16.20 317 | 0.31827 0.00 9.900 0 0.5440 5.9140 83.20 3.9986 4 304.0 18.40 390.70 18.33 17.80 318 | 0.24522 0.00 9.900 0 0.5440 5.7820 71.70 4.0317 4 304.0 18.40 396.90 15.94 19.80 319 | 0.40202 0.00 9.900 0 0.5440 6.3820 67.20 3.5325 4 304.0 18.40 395.21 10.36 23.10 320 | 0.47547 0.00 9.900 0 0.5440 6.1130 58.80 4.0019 4 304.0 18.40 396.23 12.73 21.00 321 | 0.16760 0.00 7.380 0 0.4930 6.4260 52.30 4.5404 5 287.0 19.60 396.90 7.20 23.80 322 | 0.18159 0.00 7.380 0 0.4930 6.3760 54.30 4.5404 5 287.0 19.60 396.90 6.87 23.10 323 | 0.35114 0.00 7.380 0 0.4930 6.0410 49.90 4.7211 5 287.0 19.60 396.90 7.70 20.40 324 | 0.28392 0.00 7.380 0 0.4930 5.7080 74.30 4.7211 5 287.0 19.60 391.13 11.74 18.50 325 | 0.34109 0.00 7.380 0 0.4930 6.4150 40.10 4.7211 5 287.0 19.60 396.90 6.12 25.00 326 | 0.19186 0.00 7.380 0 0.4930 6.4310 14.70 5.4159 5 287.0 19.60 393.68 5.08 24.60 327 | 0.30347 0.00 7.380 0 0.4930 6.3120 28.90 5.4159 5 287.0 19.60 396.90 6.15 23.00 328 | 0.24103 0.00 7.380 0 0.4930 6.0830 43.70 5.4159 5 287.0 19.60 396.90 12.79 22.20 329 | 0.06617 0.00 3.240 0 0.4600 5.8680 25.80 5.2146 4 430.0 16.90 382.44 9.97 19.30 330 | 0.06724 0.00 3.240 0 0.4600 6.3330 17.20 5.2146 4 430.0 16.90 375.21 7.34 22.60 331 | 0.04544 0.00 3.240 0 0.4600 6.1440 32.20 5.8736 4 430.0 16.90 368.57 9.09 19.80 332 | 0.05023 35.00 6.060 0 0.4379 5.7060 28.40 6.6407 1 304.0 16.90 394.02 12.43 17.10 333 | 0.03466 35.00 6.060 0 0.4379 6.0310 23.30 6.6407 1 304.0 16.90 362.25 7.83 19.40 334 | 0.05083 0.00 5.190 0 0.5150 6.3160 38.10 6.4584 5 224.0 20.20 389.71 5.68 22.20 335 | 0.03738 0.00 5.190 0 0.5150 6.3100 38.50 6.4584 5 224.0 20.20 389.40 6.75 20.70 336 | 0.03961 0.00 5.190 0 0.5150 6.0370 34.50 5.9853 5 224.0 20.20 396.90 8.01 21.10 337 | 0.03427 0.00 5.190 0 0.5150 5.8690 46.30 5.2311 5 224.0 20.20 396.90 9.80 19.50 338 | 0.03041 0.00 5.190 0 0.5150 5.8950 59.60 5.6150 5 224.0 20.20 394.81 10.56 18.50 339 | 0.03306 0.00 5.190 0 0.5150 6.0590 37.30 4.8122 5 224.0 20.20 396.14 8.51 20.60 340 | 0.05497 0.00 5.190 0 0.5150 5.9850 45.40 4.8122 5 224.0 20.20 396.90 9.74 19.00 341 | 0.06151 0.00 5.190 0 0.5150 5.9680 58.50 4.8122 5 224.0 20.20 396.90 9.29 18.70 342 | 0.01301 35.00 1.520 0 0.4420 7.2410 49.30 7.0379 1 284.0 15.50 394.74 5.49 32.70 343 | 0.02498 0.00 1.890 0 0.5180 6.5400 59.70 6.2669 1 422.0 15.90 389.96 8.65 16.50 344 | 0.02543 55.00 3.780 0 0.4840 6.6960 56.40 5.7321 5 370.0 17.60 396.90 7.18 23.90 345 | 0.03049 55.00 3.780 0 0.4840 6.8740 28.10 6.4654 5 370.0 17.60 387.97 4.61 31.20 346 | 0.03113 0.00 4.390 0 0.4420 6.0140 48.50 8.0136 3 352.0 18.80 385.64 10.53 17.50 347 | 0.06162 0.00 4.390 0 0.4420 5.8980 52.30 8.0136 3 352.0 18.80 364.61 12.67 17.20 348 | 0.01870 85.00 4.150 0 0.4290 6.5160 27.70 8.5353 4 351.0 17.90 392.43 6.36 23.10 349 | 0.01501 80.00 2.010 0 0.4350 6.6350 29.70 8.3440 4 280.0 17.00 390.94 5.99 24.50 350 | 0.02899 40.00 1.250 0 0.4290 6.9390 34.50 8.7921 1 335.0 19.70 389.85 5.89 26.60 351 | 0.06211 40.00 1.250 0 0.4290 6.4900 44.40 8.7921 1 335.0 19.70 396.90 5.98 22.90 352 | 0.07950 60.00 1.690 0 0.4110 6.5790 35.90 10.7103 4 411.0 18.30 370.78 5.49 24.10 353 | 0.07244 60.00 1.690 0 0.4110 5.8840 18.50 10.7103 4 411.0 18.30 392.33 7.79 18.60 354 | 0.01709 90.00 2.020 0 0.4100 6.7280 36.10 12.1265 5 187.0 17.00 384.46 4.50 30.10 355 | 0.04301 80.00 1.910 0 0.4130 5.6630 21.90 10.5857 4 334.0 22.00 382.80 8.05 18.20 356 | 0.10659 80.00 1.910 0 0.4130 5.9360 19.50 10.5857 4 334.0 22.00 376.04 5.57 20.60 357 | 8.98296 0.00 18.100 1 0.7700 6.2120 97.40 2.1222 24 666.0 20.20 377.73 17.60 17.80 358 | 3.84970 0.00 18.100 1 0.7700 6.3950 91.00 2.5052 24 666.0 20.20 391.34 13.27 21.70 359 | 5.20177 0.00 18.100 1 0.7700 6.1270 83.40 2.7227 24 666.0 20.20 395.43 11.48 22.70 360 | 4.26131 0.00 18.100 0 0.7700 6.1120 81.30 2.5091 24 666.0 20.20 390.74 12.67 22.60 361 | 4.54192 0.00 18.100 0 0.7700 6.3980 88.00 2.5182 24 666.0 20.20 374.56 7.79 25.00 362 | 3.83684 0.00 18.100 0 0.7700 6.2510 91.10 2.2955 24 666.0 20.20 350.65 14.19 19.90 363 | 3.67822 0.00 18.100 0 0.7700 5.3620 96.20 2.1036 24 666.0 20.20 380.79 10.19 20.80 364 | 4.22239 0.00 18.100 1 0.7700 5.8030 89.00 1.9047 24 666.0 20.20 353.04 14.64 16.80 365 | 3.47428 0.00 18.100 1 0.7180 8.7800 82.90 1.9047 24 666.0 20.20 354.55 5.29 21.90 366 | 4.55587 0.00 18.100 0 0.7180 3.5610 87.90 1.6132 24 666.0 20.20 354.70 7.12 27.50 367 | 3.69695 0.00 18.100 0 0.7180 4.9630 91.40 1.7523 24 666.0 20.20 316.03 14.00 21.90 368 | 13.52220 0.00 18.100 0 0.6310 3.8630 100.00 1.5106 24 666.0 20.20 131.42 13.33 23.10 369 | 4.89822 0.00 18.100 0 0.6310 4.9700 100.00 1.3325 24 666.0 20.20 375.52 3.26 50.00 370 | 5.66998 0.00 18.100 1 0.6310 6.6830 96.80 1.3567 24 666.0 20.20 375.33 3.73 50.00 371 | 6.53876 0.00 18.100 1 0.6310 7.0160 97.50 1.2024 24 666.0 20.20 392.05 2.96 50.00 372 | 9.23230 0.00 18.100 0 0.6310 6.2160 100.00 1.1691 24 666.0 20.20 366.15 9.53 50.00 373 | 8.26725 0.00 18.100 1 0.6680 5.8750 89.60 1.1296 24 666.0 20.20 347.88 8.88 50.00 374 | 11.10810 0.00 18.100 0 0.6680 4.9060 100.00 1.1742 24 666.0 20.20 396.90 34.77 13.80 375 | 18.49820 0.00 18.100 0 0.6680 4.1380 100.00 1.1370 24 666.0 20.20 396.90 37.97 13.80 376 | 19.60910 0.00 18.100 0 0.6710 7.3130 97.90 1.3163 24 666.0 20.20 396.90 13.44 15.00 377 | 15.28800 0.00 18.100 0 0.6710 6.6490 93.30 1.3449 24 666.0 20.20 363.02 23.24 13.90 378 | 9.82349 0.00 18.100 0 0.6710 6.7940 98.80 1.3580 24 666.0 20.20 396.90 21.24 13.30 379 | 23.64820 0.00 18.100 0 0.6710 6.3800 96.20 1.3861 24 666.0 20.20 396.90 23.69 13.10 380 | 17.86670 0.00 18.100 0 0.6710 6.2230 100.00 1.3861 24 666.0 20.20 393.74 21.78 10.20 381 | 88.97620 0.00 18.100 0 0.6710 6.9680 91.90 1.4165 24 666.0 20.20 396.90 17.21 10.40 382 | 15.87440 0.00 18.100 0 0.6710 6.5450 99.10 1.5192 24 666.0 20.20 396.90 21.08 10.90 383 | 9.18702 0.00 18.100 0 0.7000 5.5360 100.00 1.5804 24 666.0 20.20 396.90 23.60 11.30 384 | 7.99248 0.00 18.100 0 0.7000 5.5200 100.00 1.5331 24 666.0 20.20 396.90 24.56 12.30 385 | 20.08490 0.00 18.100 0 0.7000 4.3680 91.20 1.4395 24 666.0 20.20 285.83 30.63 8.80 386 | 16.81180 0.00 18.100 0 0.7000 5.2770 98.10 1.4261 24 666.0 20.20 396.90 30.81 7.20 387 | 24.39380 0.00 18.100 0 0.7000 4.6520 100.00 1.4672 24 666.0 20.20 396.90 28.28 10.50 388 | 22.59710 0.00 18.100 0 0.7000 5.0000 89.50 1.5184 24 666.0 20.20 396.90 31.99 7.40 389 | 14.33370 0.00 18.100 0 0.7000 4.8800 100.00 1.5895 24 666.0 20.20 372.92 30.62 10.20 390 | 8.15174 0.00 18.100 0 0.7000 5.3900 98.90 1.7281 24 666.0 20.20 396.90 20.85 11.50 391 | 6.96215 0.00 18.100 0 0.7000 5.7130 97.00 1.9265 24 666.0 20.20 394.43 17.11 15.10 392 | 5.29305 0.00 18.100 0 0.7000 6.0510 82.50 2.1678 24 666.0 20.20 378.38 18.76 23.20 393 | 11.57790 0.00 18.100 0 0.7000 5.0360 97.00 1.7700 24 666.0 20.20 396.90 25.68 9.70 394 | 8.64476 0.00 18.100 0 0.6930 6.1930 92.60 1.7912 24 666.0 20.20 396.90 15.17 13.80 395 | 13.35980 0.00 18.100 0 0.6930 5.8870 94.70 1.7821 24 666.0 20.20 396.90 16.35 12.70 396 | 8.71675 0.00 18.100 0 0.6930 6.4710 98.80 1.7257 24 666.0 20.20 391.98 17.12 13.10 397 | 5.87205 0.00 18.100 0 0.6930 6.4050 96.00 1.6768 24 666.0 20.20 396.90 19.37 12.50 398 | 7.67202 0.00 18.100 0 0.6930 5.7470 98.90 1.6334 24 666.0 20.20 393.10 19.92 8.50 399 | 38.35180 0.00 18.100 0 0.6930 5.4530 100.00 1.4896 24 666.0 20.20 396.90 30.59 5.00 400 | 9.91655 0.00 18.100 0 0.6930 5.8520 77.80 1.5004 24 666.0 20.20 338.16 29.97 6.30 401 | 25.04610 0.00 18.100 0 0.6930 5.9870 100.00 1.5888 24 666.0 20.20 396.90 26.77 5.60 402 | 14.23620 0.00 18.100 0 0.6930 6.3430 100.00 1.5741 24 666.0 20.20 396.90 20.32 7.20 403 | 9.59571 0.00 18.100 0 0.6930 6.4040 100.00 1.6390 24 666.0 20.20 376.11 20.31 12.10 404 | 24.80170 0.00 18.100 0 0.6930 5.3490 96.00 1.7028 24 666.0 20.20 396.90 19.77 8.30 405 | 41.52920 0.00 18.100 0 0.6930 5.5310 85.40 1.6074 24 666.0 20.20 329.46 27.38 8.50 406 | 67.92080 0.00 18.100 0 0.6930 5.6830 100.00 1.4254 24 666.0 20.20 384.97 22.98 5.00 407 | 20.71620 0.00 18.100 0 0.6590 4.1380 100.00 1.1781 24 666.0 20.20 370.22 23.34 11.90 408 | 11.95110 0.00 18.100 0 0.6590 5.6080 100.00 1.2852 24 666.0 20.20 332.09 12.13 27.90 409 | 7.40389 0.00 18.100 0 0.5970 5.6170 97.90 1.4547 24 666.0 20.20 314.64 26.40 17.20 410 | 14.43830 0.00 18.100 0 0.5970 6.8520 100.00 1.4655 24 666.0 20.20 179.36 19.78 27.50 411 | 51.13580 0.00 18.100 0 0.5970 5.7570 100.00 1.4130 24 666.0 20.20 2.60 10.11 15.00 412 | 14.05070 0.00 18.100 0 0.5970 6.6570 100.00 1.5275 24 666.0 20.20 35.05 21.22 17.20 413 | 18.81100 0.00 18.100 0 0.5970 4.6280 100.00 1.5539 24 666.0 20.20 28.79 34.37 17.90 414 | 28.65580 0.00 18.100 0 0.5970 5.1550 100.00 1.5894 24 666.0 20.20 210.97 20.08 16.30 415 | 45.74610 0.00 18.100 0 0.6930 4.5190 100.00 1.6582 24 666.0 20.20 88.27 36.98 7.00 416 | 18.08460 0.00 18.100 0 0.6790 6.4340 100.00 1.8347 24 666.0 20.20 27.25 29.05 7.20 417 | 10.83420 0.00 18.100 0 0.6790 6.7820 90.80 1.8195 24 666.0 20.20 21.57 25.79 7.50 418 | 25.94060 0.00 18.100 0 0.6790 5.3040 89.10 1.6475 24 666.0 20.20 127.36 26.64 10.40 419 | 73.53410 0.00 18.100 0 0.6790 5.9570 100.00 1.8026 24 666.0 20.20 16.45 20.62 8.80 420 | 11.81230 0.00 18.100 0 0.7180 6.8240 76.50 1.7940 24 666.0 20.20 48.45 22.74 8.40 421 | 11.08740 0.00 18.100 0 0.7180 6.4110 100.00 1.8589 24 666.0 20.20 318.75 15.02 16.70 422 | 7.02259 0.00 18.100 0 0.7180 6.0060 95.30 1.8746 24 666.0 20.20 319.98 15.70 14.20 423 | 12.04820 0.00 18.100 0 0.6140 5.6480 87.60 1.9512 24 666.0 20.20 291.55 14.10 20.80 424 | 7.05042 0.00 18.100 0 0.6140 6.1030 85.10 2.0218 24 666.0 20.20 2.52 23.29 13.40 425 | 8.79212 0.00 18.100 0 0.5840 5.5650 70.60 2.0635 24 666.0 20.20 3.65 17.16 11.70 426 | 15.86030 0.00 18.100 0 0.6790 5.8960 95.40 1.9096 24 666.0 20.20 7.68 24.39 8.30 427 | 12.24720 0.00 18.100 0 0.5840 5.8370 59.70 1.9976 24 666.0 20.20 24.65 15.69 10.20 428 | 37.66190 0.00 18.100 0 0.6790 6.2020 78.70 1.8629 24 666.0 20.20 18.82 14.52 10.90 429 | 7.36711 0.00 18.100 0 0.6790 6.1930 78.10 1.9356 24 666.0 20.20 96.73 21.52 11.00 430 | 9.33889 0.00 18.100 0 0.6790 6.3800 95.60 1.9682 24 666.0 20.20 60.72 24.08 9.50 431 | 8.49213 0.00 18.100 0 0.5840 6.3480 86.10 2.0527 24 666.0 20.20 83.45 17.64 14.50 432 | 10.06230 0.00 18.100 0 0.5840 6.8330 94.30 2.0882 24 666.0 20.20 81.33 19.69 14.10 433 | 6.44405 0.00 18.100 0 0.5840 6.4250 74.80 2.2004 24 666.0 20.20 97.95 12.03 16.10 434 | 5.58107 0.00 18.100 0 0.7130 6.4360 87.90 2.3158 24 666.0 20.20 100.19 16.22 14.30 435 | 13.91340 0.00 18.100 0 0.7130 6.2080 95.00 2.2222 24 666.0 20.20 100.63 15.17 11.70 436 | 11.16040 0.00 18.100 0 0.7400 6.6290 94.60 2.1247 24 666.0 20.20 109.85 23.27 13.40 437 | 14.42080 0.00 18.100 0 0.7400 6.4610 93.30 2.0026 24 666.0 20.20 27.49 18.05 9.60 438 | 15.17720 0.00 18.100 0 0.7400 6.1520 100.00 1.9142 24 666.0 20.20 9.32 26.45 8.70 439 | 13.67810 0.00 18.100 0 0.7400 5.9350 87.90 1.8206 24 666.0 20.20 68.95 34.02 8.40 440 | 9.39063 0.00 18.100 0 0.7400 5.6270 93.90 1.8172 24 666.0 20.20 396.90 22.88 12.80 441 | 22.05110 0.00 18.100 0 0.7400 5.8180 92.40 1.8662 24 666.0 20.20 391.45 22.11 10.50 442 | 9.72418 0.00 18.100 0 0.7400 6.4060 97.20 2.0651 24 666.0 20.20 385.96 19.52 17.10 443 | 5.66637 0.00 18.100 0 0.7400 6.2190 100.00 2.0048 24 666.0 20.20 395.69 16.59 18.40 444 | 9.96654 0.00 18.100 0 0.7400 6.4850 100.00 1.9784 24 666.0 20.20 386.73 18.85 15.40 445 | 12.80230 0.00 18.100 0 0.7400 5.8540 96.60 1.8956 24 666.0 20.20 240.52 23.79 10.80 446 | 10.67180 0.00 18.100 0 0.7400 6.4590 94.80 1.9879 24 666.0 20.20 43.06 23.98 11.80 447 | 6.28807 0.00 18.100 0 0.7400 6.3410 96.40 2.0720 24 666.0 20.20 318.01 17.79 14.90 448 | 9.92485 0.00 18.100 0 0.7400 6.2510 96.60 2.1980 24 666.0 20.20 388.52 16.44 12.60 449 | 9.32909 0.00 18.100 0 0.7130 6.1850 98.70 2.2616 24 666.0 20.20 396.90 18.13 14.10 450 | 7.52601 0.00 18.100 0 0.7130 6.4170 98.30 2.1850 24 666.0 20.20 304.21 19.31 13.00 451 | 6.71772 0.00 18.100 0 0.7130 6.7490 92.60 2.3236 24 666.0 20.20 0.32 17.44 13.40 452 | 5.44114 0.00 18.100 0 0.7130 6.6550 98.20 2.3552 24 666.0 20.20 355.29 17.73 15.20 453 | 5.09017 0.00 18.100 0 0.7130 6.2970 91.80 2.3682 24 666.0 20.20 385.09 17.27 16.10 454 | 8.24809 0.00 18.100 0 0.7130 7.3930 99.30 2.4527 24 666.0 20.20 375.87 16.74 17.80 455 | 9.51363 0.00 18.100 0 0.7130 6.7280 94.10 2.4961 24 666.0 20.20 6.68 18.71 14.90 456 | 4.75237 0.00 18.100 0 0.7130 6.5250 86.50 2.4358 24 666.0 20.20 50.92 18.13 14.10 457 | 4.66883 0.00 18.100 0 0.7130 5.9760 87.90 2.5806 24 666.0 20.20 10.48 19.01 12.70 458 | 8.20058 0.00 18.100 0 0.7130 5.9360 80.30 2.7792 24 666.0 20.20 3.50 16.94 13.50 459 | 7.75223 0.00 18.100 0 0.7130 6.3010 83.70 2.7831 24 666.0 20.20 272.21 16.23 14.90 460 | 6.80117 0.00 18.100 0 0.7130 6.0810 84.40 2.7175 24 666.0 20.20 396.90 14.70 20.00 461 | 4.81213 0.00 18.100 0 0.7130 6.7010 90.00 2.5975 24 666.0 20.20 255.23 16.42 16.40 462 | 3.69311 0.00 18.100 0 0.7130 6.3760 88.40 2.5671 24 666.0 20.20 391.43 14.65 17.70 463 | 6.65492 0.00 18.100 0 0.7130 6.3170 83.00 2.7344 24 666.0 20.20 396.90 13.99 19.50 464 | 5.82115 0.00 18.100 0 0.7130 6.5130 89.90 2.8016 24 666.0 20.20 393.82 10.29 20.20 465 | 7.83932 0.00 18.100 0 0.6550 6.2090 65.40 2.9634 24 666.0 20.20 396.90 13.22 21.40 466 | 3.16360 0.00 18.100 0 0.6550 5.7590 48.20 3.0665 24 666.0 20.20 334.40 14.13 19.90 467 | 3.77498 0.00 18.100 0 0.6550 5.9520 84.70 2.8715 24 666.0 20.20 22.01 17.15 19.00 468 | 4.42228 0.00 18.100 0 0.5840 6.0030 94.50 2.5403 24 666.0 20.20 331.29 21.32 19.10 469 | 15.57570 0.00 18.100 0 0.5800 5.9260 71.00 2.9084 24 666.0 20.20 368.74 18.13 19.10 470 | 13.07510 0.00 18.100 0 0.5800 5.7130 56.70 2.8237 24 666.0 20.20 396.90 14.76 20.10 471 | 4.34879 0.00 18.100 0 0.5800 6.1670 84.00 3.0334 24 666.0 20.20 396.90 16.29 19.90 472 | 4.03841 0.00 18.100 0 0.5320 6.2290 90.70 3.0993 24 666.0 20.20 395.33 12.87 19.60 473 | 3.56868 0.00 18.100 0 0.5800 6.4370 75.00 2.8965 24 666.0 20.20 393.37 14.36 23.20 474 | 4.64689 0.00 18.100 0 0.6140 6.9800 67.60 2.5329 24 666.0 20.20 374.68 11.66 29.80 475 | 8.05579 0.00 18.100 0 0.5840 5.4270 95.40 2.4298 24 666.0 20.20 352.58 18.14 13.80 476 | 6.39312 0.00 18.100 0 0.5840 6.1620 97.40 2.2060 24 666.0 20.20 302.76 24.10 13.30 477 | 4.87141 0.00 18.100 0 0.6140 6.4840 93.60 2.3053 24 666.0 20.20 396.21 18.68 16.70 478 | 15.02340 0.00 18.100 0 0.6140 5.3040 97.30 2.1007 24 666.0 20.20 349.48 24.91 12.00 479 | 10.23300 0.00 18.100 0 0.6140 6.1850 96.70 2.1705 24 666.0 20.20 379.70 18.03 14.60 480 | 14.33370 0.00 18.100 0 0.6140 6.2290 88.00 1.9512 24 666.0 20.20 383.32 13.11 21.40 481 | 5.82401 0.00 18.100 0 0.5320 6.2420 64.70 3.4242 24 666.0 20.20 396.90 10.74 23.00 482 | 5.70818 0.00 18.100 0 0.5320 6.7500 74.90 3.3317 24 666.0 20.20 393.07 7.74 23.70 483 | 5.73116 0.00 18.100 0 0.5320 7.0610 77.00 3.4106 24 666.0 20.20 395.28 7.01 25.00 484 | 2.81838 0.00 18.100 0 0.5320 5.7620 40.30 4.0983 24 666.0 20.20 392.92 10.42 21.80 485 | 2.37857 0.00 18.100 0 0.5830 5.8710 41.90 3.7240 24 666.0 20.20 370.73 13.34 20.60 486 | 3.67367 0.00 18.100 0 0.5830 6.3120 51.90 3.9917 24 666.0 20.20 388.62 10.58 21.20 487 | 5.69175 0.00 18.100 0 0.5830 6.1140 79.80 3.5459 24 666.0 20.20 392.68 14.98 19.10 488 | 4.83567 0.00 18.100 0 0.5830 5.9050 53.20 3.1523 24 666.0 20.20 388.22 11.45 20.60 489 | 0.15086 0.00 27.740 0 0.6090 5.4540 92.70 1.8209 4 711.0 20.10 395.09 18.06 15.20 490 | 0.18337 0.00 27.740 0 0.6090 5.4140 98.30 1.7554 4 711.0 20.10 344.05 23.97 7.00 491 | 0.20746 0.00 27.740 0 0.6090 5.0930 98.00 1.8226 4 711.0 20.10 318.43 29.68 8.10 492 | 0.10574 0.00 27.740 0 0.6090 5.9830 98.80 1.8681 4 711.0 20.10 390.11 18.07 13.60 493 | 0.11132 0.00 27.740 0 0.6090 5.9830 83.50 2.1099 4 711.0 20.10 396.90 13.35 20.10 494 | 0.17331 0.00 9.690 0 0.5850 5.7070 54.00 2.3817 6 391.0 19.20 396.90 12.01 21.80 495 | 0.27957 0.00 9.690 0 0.5850 5.9260 42.60 2.3817 6 391.0 19.20 396.90 13.59 24.50 496 | 0.17899 0.00 9.690 0 0.5850 5.6700 28.80 2.7986 6 391.0 19.20 393.29 17.60 23.10 497 | 0.28960 0.00 9.690 0 0.5850 5.3900 72.90 2.7986 6 391.0 19.20 396.90 21.14 19.70 498 | 0.26838 0.00 9.690 0 0.5850 5.7940 70.60 2.8927 6 391.0 19.20 396.90 14.10 18.30 499 | 0.23912 0.00 9.690 0 0.5850 6.0190 65.30 2.4091 6 391.0 19.20 396.90 12.92 21.20 500 | 0.17783 0.00 9.690 0 0.5850 5.5690 73.50 2.3999 6 391.0 19.20 395.77 15.10 17.50 501 | 0.22438 0.00 9.690 0 0.5850 6.0270 79.70 2.4982 6 391.0 19.20 396.90 14.33 16.80 502 | 0.06263 0.00 11.930 0 0.5730 6.5930 69.10 2.4786 1 273.0 21.00 391.99 9.67 22.40 503 | 0.04527 0.00 11.930 0 0.5730 6.1200 76.70 2.2875 1 273.0 21.00 396.90 9.08 20.60 504 | 0.06076 0.00 11.930 0 0.5730 6.9760 91.00 2.1675 1 273.0 21.00 396.90 5.64 23.90 505 | 0.10959 0.00 11.930 0 0.5730 6.7940 89.30 2.3889 1 273.0 21.00 393.45 6.48 22.00 506 | 0.04741 0.00 11.930 0 0.5730 6.0300 80.80 2.5050 1 273.0 21.00 396.90 7.88 11.90 507 | -------------------------------------------------------------------------------- /Overview_PythonUltimate.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rohanmistry231/PythonUltimateMaterial/89cfb9306ba365baddf41cc820d23ec81d997874/Overview_PythonUltimate.xlsx -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # PythonUltimateMaterial 2 | Here you can find the material for the course 3 | --------------------------------------------------------------------------------