├── Intro Pg 1.jpg ├── Intro Pg 2.jpg ├── PGM 7 OUTPUT.docx ├── PGM 9 OUTPUT.PNG ├── trainingexamples.csv ├── p-tennis.csv ├── tennis.csv ├── 3. Candidate-Elimination.ipynb ├── 4. ID3.ipynb ├── 8. S-KNN.ipynb ├── 6. A-NaiveBayes.ipynb ├── iris.data ├── 1. A-star.ipynb ├── VIVA AIML.txt ├── 7. S-EM-Kmeans.ipynb ├── 2. S-AO-star.ipynb ├── 5. BackPropagation.ipynb └── 9. S-LinearRegression.ipynb /Intro Pg 1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/afuu21/18CSL76-AIML-Programs/HEAD/Intro Pg 1.jpg -------------------------------------------------------------------------------- /Intro Pg 2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/afuu21/18CSL76-AIML-Programs/HEAD/Intro Pg 2.jpg -------------------------------------------------------------------------------- /PGM 7 OUTPUT.docx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/afuu21/18CSL76-AIML-Programs/HEAD/PGM 7 OUTPUT.docx -------------------------------------------------------------------------------- /PGM 9 OUTPUT.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/afuu21/18CSL76-AIML-Programs/HEAD/PGM 9 OUTPUT.PNG -------------------------------------------------------------------------------- /trainingexamples.csv: -------------------------------------------------------------------------------- 1 | Sunny,Warm,Normal,Strong,Warm,Same,Yes 2 | Sunny,Warm,High,Strong,Warm,Same,Yes 3 | Rainy,Cold,High,Strong,Warm,Change,No 4 | Sunny,Warm,High,Strong,Cool,Change,Yes 5 | -------------------------------------------------------------------------------- /p-tennis.csv: -------------------------------------------------------------------------------- 1 | Outlook,Temperature,Humidity,Windy,PlayTennis 2 | Sunny,Hot,High,False,No 3 | Sunny,Hot,High,True,No 4 | Overcast,Hot,High,False,Yes 5 | Rainy,Mild,High,False,Yes 6 | Rainy,Cool,Normal,False,Yes 7 | Rainy,Cool,Normal,True,No 8 | Overcast,Cool,Normal,True,Yes 9 | Sunny,Mild,High,False,No 10 | Sunny,Cool,Normal,False,Yes 11 | Rainy,Mild,Normal,False,Yes 12 | Sunny,Mild,Normal,True,Yes 13 | Overcast,Mild,High,True,Yes 14 | Overcast,Hot,Normal,False,Yes 15 | Rainy,Mild,High,True,No -------------------------------------------------------------------------------- /tennis.csv: -------------------------------------------------------------------------------- 1 | ,PlayTennis,Outlook,Temperature,Humidity,Wind 2 | 0,No,Sunny,Hot,High,Weak 3 | 1,No,Sunny,Hot,High,Strong 4 | 2,Yes,Overcast,Hot,High,Weak 5 | 3,Yes,Rain,Mild,High,Weak 6 | 4,Yes,Rain,Cool,Normal,Weak 7 | 5,No,Rain,Cool,Normal,Strong 8 | 6,Yes,Overcast,Cool,Normal,Strong 9 | 7,No,Sunny,Mild,High,Weak 10 | 8,Yes,Sunny,Cool,Normal,Weak 11 | 9,Yes,Rain,Mild,Normal,Weak 12 | 10,Yes,Sunny,Mild,Normal,Strong 13 | 11,Yes,Overcast,Mild,High,Strong 14 | 12,Yes,Overcast,Hot,Normal,Weak 15 | 13,No,Rain,Mild,High,Strong 16 | -------------------------------------------------------------------------------- /3. Candidate-Elimination.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 6, 6 | "id": "e3df5056", 7 | "metadata": {}, 8 | "outputs": [ 9 | { 10 | "name": "stdout", 11 | "output_type": "stream", 12 | "text": [ 13 | "\n", 14 | "Step 1 of Candidate Elimination Algorithm\n", 15 | "['Sunny', 'Warm', 'Normal', 'Strong', 'Warm', 'Same']\n", 16 | "[['?', '?', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', '?']]\n", 17 | "\n", 18 | "Step 2 of Candidate Elimination Algorithm\n", 19 | "['Sunny', 'Warm', '?', 'Strong', 'Warm', 'Same']\n", 20 | "[['?', '?', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', '?']]\n", 21 | "\n", 22 | "Step 3 of Candidate Elimination Algorithm\n", 23 | "['Sunny', 'Warm', '?', 'Strong', 'Warm', 'Same']\n", 24 | "[['Sunny', '?', '?', '?', '?', '?'], ['?', 'Warm', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', 'Same']]\n", 25 | "\n", 26 | "Step 4 of Candidate Elimination Algorithm\n", 27 | "['Sunny', 'Warm', '?', 'Strong', '?', '?']\n", 28 | "[['Sunny', '?', '?', '?', '?', '?'], ['?', 'Warm', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', '?'], ['?', '?', '?', '?', '?', '?']]\n", 29 | "\n", 30 | "Final Specific hypothesis:\n", 31 | " ['Sunny', 'Warm', '?', 'Strong', '?', '?']\n", 32 | "\n", 33 | "Final General hypothesis:\n", 34 | " [['Sunny', '?', '?', '?', '?', '?'], ['?', 'Warm', '?', '?', '?', '?']]\n" 35 | ] 36 | } 37 | ], 38 | "source": [ 39 | "import csv\n", 40 | "\n", 41 | "with open(\"trainingexamples.csv\") as f:\n", 42 | " csv_file = csv.reader(f)\n", 43 | " data = list(csv_file)\n", 44 | "\n", 45 | " specific = data[0][:-1]\n", 46 | " general = [['?' for i in range(len(specific))] for j in range(len(specific))]\n", 47 | "\n", 48 | " for i in data:\n", 49 | " if i[-1] == \"Yes\":\n", 50 | " for j in range(len(specific)):\n", 51 | " if i[j] != specific[j]:\n", 52 | " specific[j] = \"?\"\n", 53 | " general[j][j] = \"?\"\n", 54 | "\n", 55 | " elif i[-1] == \"No\":\n", 56 | " for j in range(len(specific)):\n", 57 | " if i[j] != specific[j]:\n", 58 | " general[j][j] = specific[j]\n", 59 | " else:\n", 60 | " general[j][j] = \"?\"\n", 61 | "\n", 62 | " print(\"\\nStep \" + str(data.index(i)+1) + \" of Candidate Elimination Algorithm\")\n", 63 | " print(specific)\n", 64 | " print(general)\n", 65 | "\n", 66 | " gh = [] # gh = general Hypothesis\n", 67 | " for i in general:\n", 68 | " for j in i:\n", 69 | " if j != '?':\n", 70 | " gh.append(i)\n", 71 | " break\n", 72 | " print(\"\\nFinal Specific hypothesis:\\n\", specific)\n", 73 | " print(\"\\nFinal General hypothesis:\\n\", gh)\n" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": null, 79 | "id": "b0b53036", 80 | "metadata": {}, 81 | "outputs": [], 82 | "source": [] 83 | } 84 | ], 85 | "metadata": { 86 | "kernelspec": { 87 | "display_name": "Python 3 (ipykernel)", 88 | "language": "python", 89 | "name": "python3" 90 | }, 91 | "language_info": { 92 | "codemirror_mode": { 93 | "name": "ipython", 94 | "version": 3 95 | }, 96 | "file_extension": ".py", 97 | "mimetype": "text/x-python", 98 | "name": "python", 99 | "nbconvert_exporter": "python", 100 | "pygments_lexer": "ipython3", 101 | "version": "3.9.7" 102 | } 103 | }, 104 | "nbformat": 4, 105 | "nbformat_minor": 5 106 | } 107 | -------------------------------------------------------------------------------- /4. ID3.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "id": "332c1439", 7 | "metadata": {}, 8 | "outputs": [ 9 | { 10 | "name": "stdout", 11 | "output_type": "stream", 12 | "text": [ 13 | "List of attribut name\n", 14 | " Outlook Temperature Humidity Windy PlayTennis\n", 15 | "0 0 0 0 False 0\n", 16 | "1 0 0 0 True 0\n", 17 | "2 1 0 0 False 1\n", 18 | "3 2 1 0 False 1\n", 19 | "4 2 2 1 False 1\n", 20 | "5 2 2 1 True 0\n", 21 | "6 1 2 1 True 1\n", 22 | "7 0 1 0 False 0\n", 23 | "8 0 2 1 False 1\n", 24 | "9 2 1 1 False 1\n", 25 | "10 0 1 1 True 1\n", 26 | "11 1 1 0 True 1\n", 27 | "12 1 0 1 False 1\n", 28 | "13 2 1 0 True 0\n", 29 | "The tree structure\n", 30 | "{'Outlook': {0: {'Humidity': {0: 0, 1: 1}},\n", 31 | " 1: 1,\n", 32 | " 2: {'Windy': {False: 1, True: 0}}}}\n" 33 | ] 34 | } 35 | ], 36 | "source": [ 37 | "import pandas as pd\n", 38 | "from pprint import pprint\n", 39 | "from sklearn.feature_selection import mutual_info_classif\n", 40 | "from collections import Counter\n", 41 | "\n", 42 | "def id3(df, target_attribute, attribute_names, default_class=None):\n", 43 | " cnt=Counter(x for x in df[target_attribute])\n", 44 | " if len(cnt)==1:\n", 45 | " return next(iter(cnt))\n", 46 | " \n", 47 | " elif df.empty or (not attribute_names):\n", 48 | " return default_class\n", 49 | "\n", 50 | " else:\n", 51 | " gainz = mutual_info_classif(df[attribute_names],df[target_attribute],discrete_features=True)\n", 52 | " index_of_max=gainz.tolist().index(max(gainz))\n", 53 | " best_attr=attribute_names[index_of_max]\n", 54 | " tree={best_attr:{}}\n", 55 | " remaining_attribute_names=[i for i in attribute_names if i!=best_attr]\n", 56 | " \n", 57 | " for attr_val, data_subset in df.groupby(best_attr):\n", 58 | " subtree=id3(data_subset, target_attribute, remaining_attribute_names,default_class)\n", 59 | " tree[best_attr][attr_val]=subtree\n", 60 | " \n", 61 | " return tree\n", 62 | " \n", 63 | "\n", 64 | "df=pd.read_csv(\"p-tennis.csv\")\n", 65 | "\n", 66 | "attribute_names=df.columns.tolist()\n", 67 | "print(\"List of attribut name\")\n", 68 | "\n", 69 | "attribute_names.remove(\"PlayTennis\")\n", 70 | "\n", 71 | "for colname in df.select_dtypes(\"object\"):\n", 72 | " df[colname], _ = df[colname].factorize()\n", 73 | " \n", 74 | "print(df)\n", 75 | "\n", 76 | "tree= id3(df,\"PlayTennis\", attribute_names)\n", 77 | "print(\"The tree structure\")\n", 78 | "pprint(tree)" 79 | ] 80 | }, 81 | { 82 | "cell_type": "code", 83 | "execution_count": null, 84 | "id": "f5e0af9b", 85 | "metadata": {}, 86 | "outputs": [], 87 | "source": [] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": null, 92 | "id": "e2c13cbe", 93 | "metadata": {}, 94 | "outputs": [], 95 | "source": [] 96 | } 97 | ], 98 | "metadata": { 99 | "kernelspec": { 100 | "display_name": "Python 3 (ipykernel)", 101 | "language": "python", 102 | "name": "python3" 103 | }, 104 | "language_info": { 105 | "codemirror_mode": { 106 | "name": "ipython", 107 | "version": 3 108 | }, 109 | "file_extension": ".py", 110 | "mimetype": "text/x-python", 111 | "name": "python", 112 | "nbconvert_exporter": "python", 113 | "pygments_lexer": "ipython3", 114 | "version": "3.9.7" 115 | } 116 | }, 117 | "nbformat": 4, 118 | "nbformat_minor": 5 119 | } 120 | -------------------------------------------------------------------------------- /8. S-KNN.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 7, 6 | "id": "df36cd24", 7 | "metadata": {}, 8 | "outputs": [ 9 | { 10 | "name": "stdout", 11 | "output_type": "stream", 12 | "text": [ 13 | "Iris Data set loaded...\n", 14 | "Label 0 - setosa\n", 15 | "Label 1 - versicolor\n", 16 | "Label 2 - virginica\n", 17 | "Results of Classification using K-nn with K=5 \n", 18 | " Sample: [6.8 3.2 5.9 2.3] Actual-label: 2 Predicted-label: 2\n", 19 | "Classification Accuracy : 0.9333333333333333\n", 20 | " Sample: [5.8 2.6 4. 1.2] Actual-label: 1 Predicted-label: 1\n", 21 | "Classification Accuracy : 0.9333333333333333\n", 22 | " Sample: [6.3 3.3 4.7 1.6] Actual-label: 1 Predicted-label: 1\n", 23 | "Classification Accuracy : 0.9333333333333333\n", 24 | " Sample: [6.2 3.4 5.4 2.3] Actual-label: 2 Predicted-label: 2\n", 25 | "Classification Accuracy : 0.9333333333333333\n", 26 | " Sample: [7.1 3. 5.9 2.1] Actual-label: 2 Predicted-label: 2\n", 27 | "Classification Accuracy : 0.9333333333333333\n", 28 | " Sample: [5.4 3.7 1.5 0.2] Actual-label: 0 Predicted-label: 0\n", 29 | "Classification Accuracy : 0.9333333333333333\n", 30 | " Sample: [6.3 2.5 4.9 1.5] Actual-label: 1 Predicted-label: 2\n", 31 | "Classification Accuracy : 0.9333333333333333\n", 32 | " Sample: [6.8 3. 5.5 2.1] Actual-label: 2 Predicted-label: 2\n", 33 | "Classification Accuracy : 0.9333333333333333\n", 34 | " Sample: [5.6 2.9 3.6 1.3] Actual-label: 1 Predicted-label: 1\n", 35 | "Classification Accuracy : 0.9333333333333333\n", 36 | " Sample: [5.5 2.3 4. 1.3] Actual-label: 1 Predicted-label: 1\n", 37 | "Classification Accuracy : 0.9333333333333333\n", 38 | " Sample: [5.6 3. 4.5 1.5] Actual-label: 1 Predicted-label: 1\n", 39 | "Classification Accuracy : 0.9333333333333333\n", 40 | " Sample: [5.8 2.7 5.1 1.9] Actual-label: 2 Predicted-label: 2\n", 41 | "Classification Accuracy : 0.9333333333333333\n", 42 | " Sample: [7.3 2.9 6.3 1.8] Actual-label: 2 Predicted-label: 2\n", 43 | "Classification Accuracy : 0.9333333333333333\n", 44 | " Sample: [5. 3.5 1.6 0.6] Actual-label: 0 Predicted-label: 0\n", 45 | "Classification Accuracy : 0.9333333333333333\n", 46 | " Sample: [6.4 3.2 5.3 2.3] Actual-label: 2 Predicted-label: 2\n", 47 | "Classification Accuracy : 0.9333333333333333\n" 48 | ] 49 | } 50 | ], 51 | "source": [ 52 | "from sklearn.model_selection import train_test_split \n", 53 | "from sklearn.neighbors import KNeighborsClassifier \n", 54 | "from sklearn import datasets\n", 55 | "iris=datasets.load_iris() \n", 56 | "print(\"Iris Data set loaded...\")\n", 57 | "x_train, x_test, y_train, y_test = train_test_split(iris.data,iris.target,test_size=0.1)\n", 58 | "#random_state=0\n", 59 | "for i in range(len(iris.target_names)):\n", 60 | " print(\"Label\", i , \"-\",str(iris.target_names[i]))\n", 61 | "classifier = KNeighborsClassifier(n_neighbors=5)\n", 62 | "classifier.fit(x_train, y_train)\n", 63 | "y_pred=classifier.predict(x_test)\n", 64 | "print(\"Results of Classification using K-nn with K=5 \") \n", 65 | "for r in range(0,len(x_test)):\n", 66 | " print(\" Sample:\", str(x_test[r]), \" Actual-label:\", str(y_test[r]),\" Predicted-label:\", str(y_pred[r]))\n", 67 | "\n", 68 | " print(\"Classification Accuracy :\" , classifier.score(x_test,y_test));\n" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": null, 74 | "id": "3c98c5f0", 75 | "metadata": {}, 76 | "outputs": [], 77 | "source": [] 78 | }, 79 | { 80 | "cell_type": "code", 81 | "execution_count": null, 82 | "id": "f6c6d09d", 83 | "metadata": {}, 84 | "outputs": [], 85 | "source": [] 86 | } 87 | ], 88 | "metadata": { 89 | "kernelspec": { 90 | "display_name": "Python 3 (ipykernel)", 91 | "language": "python", 92 | "name": "python3" 93 | }, 94 | "language_info": { 95 | "codemirror_mode": { 96 | "name": "ipython", 97 | "version": 3 98 | }, 99 | "file_extension": ".py", 100 | "mimetype": "text/x-python", 101 | "name": "python", 102 | "nbconvert_exporter": "python", 103 | "pygments_lexer": "ipython3", 104 | "version": "3.9.7" 105 | } 106 | }, 107 | "nbformat": 4, 108 | "nbformat_minor": 5 109 | } 110 | -------------------------------------------------------------------------------- /6. A-NaiveBayes.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "id": "ed9b2665", 7 | "metadata": {}, 8 | "outputs": [ 9 | { 10 | "name": "stdout", 11 | "output_type": "stream", 12 | "text": [ 13 | "The first 5 Values of data is :\n", 14 | " Outlook Temperature Humidity Windy PlayTennis\n", 15 | "0 Sunny Hot High False No\n", 16 | "1 Sunny Hot High True No\n", 17 | "2 Overcast Hot High False Yes\n", 18 | "3 Rainy Mild High False Yes\n", 19 | "4 Rainy Cool Normal False Yes\n", 20 | "\n", 21 | "The First 5 values of the train data is\n", 22 | " Outlook Temperature Humidity Windy\n", 23 | "0 Sunny Hot High False\n", 24 | "1 Sunny Hot High True\n", 25 | "2 Overcast Hot High False\n", 26 | "3 Rainy Mild High False\n", 27 | "4 Rainy Cool Normal False\n", 28 | "\n", 29 | "The First 5 values of train output is\n", 30 | " 0 No\n", 31 | "1 No\n", 32 | "2 Yes\n", 33 | "3 Yes\n", 34 | "4 Yes\n", 35 | "Name: PlayTennis, dtype: object\n", 36 | "\n", 37 | "Now the Train output is\n", 38 | " Outlook Temperature Humidity Windy\n", 39 | "0 2 1 0 0\n", 40 | "1 2 1 0 1\n", 41 | "2 0 1 0 0\n", 42 | "3 1 2 0 0\n", 43 | "4 1 0 1 0\n", 44 | "\n", 45 | "Now the Train output is\n", 46 | " [0 0 1 1 1 0 1 0 1 1 1 1 1 0]\n", 47 | "Accuracy is: 0.6666666666666666\n" 48 | ] 49 | } 50 | ], 51 | "source": [ 52 | "# import necessary libraries\n", 53 | "import pandas as pd\n", 54 | "from sklearn import tree\n", 55 | "from sklearn.preprocessing import LabelEncoder\n", 56 | "from sklearn.naive_bayes import GaussianNB\n", 57 | "\n", 58 | "# Load Data from CSV\n", 59 | "data = pd.read_csv('p-tennis.csv')\n", 60 | "print(\"The first 5 Values of data is :\\n\", data.head())\n", 61 | "\n", 62 | "# obtain train data and train output\n", 63 | "X = data.iloc[:, :-1]\n", 64 | "print(\"\\nThe First 5 values of the train data is\\n\", X.head())\n", 65 | "\n", 66 | "y = data.iloc[:, -1]\n", 67 | "print(\"\\nThe First 5 values of train output is\\n\", y.head())\n", 68 | "\n", 69 | "# convert them in numbers\n", 70 | "le_outlook = LabelEncoder()\n", 71 | "X.Outlook = le_outlook.fit_transform(X.Outlook)\n", 72 | "\n", 73 | "le_Temperature = LabelEncoder()\n", 74 | "X.Temperature = le_Temperature.fit_transform(X.Temperature)\n", 75 | "\n", 76 | "le_Humidity = LabelEncoder()\n", 77 | "X.Humidity = le_Humidity.fit_transform(X.Humidity)\n", 78 | "\n", 79 | "le_Windy = LabelEncoder()\n", 80 | "X.Windy = le_Windy.fit_transform(X.Windy)\n", 81 | "\n", 82 | "print(\"\\nNow the Train output is\\n\", X.head())\n", 83 | "\n", 84 | "le_PlayTennis = LabelEncoder()\n", 85 | "y = le_PlayTennis.fit_transform(y)\n", 86 | "print(\"\\nNow the Train output is\\n\",y)\n", 87 | "\n", 88 | "from sklearn.model_selection import train_test_split\n", 89 | "X_train, X_test, y_train, y_test = train_test_split(X,y, test_size = 0.20)\n", 90 | "\n", 91 | "classifier = GaussianNB()\n", 92 | "classifier.fit(X_train, y_train)\n", 93 | "\n", 94 | "from sklearn.metrics import accuracy_score\n", 95 | "print(\"Accuracy is:\", accuracy_score(classifier.predict(X_test), y_test))" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": null, 101 | "id": "976d4cbd", 102 | "metadata": {}, 103 | "outputs": [], 104 | "source": [] 105 | } 106 | ], 107 | "metadata": { 108 | "kernelspec": { 109 | "display_name": "Python 3 (ipykernel)", 110 | "language": "python", 111 | "name": "python3" 112 | }, 113 | "language_info": { 114 | "codemirror_mode": { 115 | "name": "ipython", 116 | "version": 3 117 | }, 118 | "file_extension": ".py", 119 | "mimetype": "text/x-python", 120 | "name": "python", 121 | "nbconvert_exporter": "python", 122 | "pygments_lexer": "ipython3", 123 | "version": "3.9.7" 124 | } 125 | }, 126 | "nbformat": 4, 127 | "nbformat_minor": 5 128 | } 129 | -------------------------------------------------------------------------------- /iris.data: -------------------------------------------------------------------------------- 1 | 5.1,3.5,1.4,0.2,Iris-setosa 2 | 4.9,3.0,1.4,0.2,Iris-setosa 3 | 4.7,3.2,1.3,0.2,Iris-setosa 4 | 4.6,3.1,1.5,0.2,Iris-setosa 5 | 5.0,3.6,1.4,0.2,Iris-setosa 6 | 5.4,3.9,1.7,0.4,Iris-setosa 7 | 4.6,3.4,1.4,0.3,Iris-setosa 8 | 5.0,3.4,1.5,0.2,Iris-setosa 9 | 4.4,2.9,1.4,0.2,Iris-setosa 10 | 4.9,3.1,1.5,0.1,Iris-setosa 11 | 5.4,3.7,1.5,0.2,Iris-setosa 12 | 4.8,3.4,1.6,0.2,Iris-setosa 13 | 4.8,3.0,1.4,0.1,Iris-setosa 14 | 4.3,3.0,1.1,0.1,Iris-setosa 15 | 5.8,4.0,1.2,0.2,Iris-setosa 16 | 5.7,4.4,1.5,0.4,Iris-setosa 17 | 5.4,3.9,1.3,0.4,Iris-setosa 18 | 5.1,3.5,1.4,0.3,Iris-setosa 19 | 5.7,3.8,1.7,0.3,Iris-setosa 20 | 5.1,3.8,1.5,0.3,Iris-setosa 21 | 5.4,3.4,1.7,0.2,Iris-setosa 22 | 5.1,3.7,1.5,0.4,Iris-setosa 23 | 4.6,3.6,1.0,0.2,Iris-setosa 24 | 5.1,3.3,1.7,0.5,Iris-setosa 25 | 4.8,3.4,1.9,0.2,Iris-setosa 26 | 5.0,3.0,1.6,0.2,Iris-setosa 27 | 5.0,3.4,1.6,0.4,Iris-setosa 28 | 5.2,3.5,1.5,0.2,Iris-setosa 29 | 5.2,3.4,1.4,0.2,Iris-setosa 30 | 4.7,3.2,1.6,0.2,Iris-setosa 31 | 4.8,3.1,1.6,0.2,Iris-setosa 32 | 5.4,3.4,1.5,0.4,Iris-setosa 33 | 5.2,4.1,1.5,0.1,Iris-setosa 34 | 5.5,4.2,1.4,0.2,Iris-setosa 35 | 4.9,3.1,1.5,0.1,Iris-setosa 36 | 5.0,3.2,1.2,0.2,Iris-setosa 37 | 5.5,3.5,1.3,0.2,Iris-setosa 38 | 4.9,3.1,1.5,0.1,Iris-setosa 39 | 4.4,3.0,1.3,0.2,Iris-setosa 40 | 5.1,3.4,1.5,0.2,Iris-setosa 41 | 5.0,3.5,1.3,0.3,Iris-setosa 42 | 4.5,2.3,1.3,0.3,Iris-setosa 43 | 4.4,3.2,1.3,0.2,Iris-setosa 44 | 5.0,3.5,1.6,0.6,Iris-setosa 45 | 5.1,3.8,1.9,0.4,Iris-setosa 46 | 4.8,3.0,1.4,0.3,Iris-setosa 47 | 5.1,3.8,1.6,0.2,Iris-setosa 48 | 4.6,3.2,1.4,0.2,Iris-setosa 49 | 5.3,3.7,1.5,0.2,Iris-setosa 50 | 5.0,3.3,1.4,0.2,Iris-setosa 51 | 7.0,3.2,4.7,1.4,Iris-versicolor 52 | 6.4,3.2,4.5,1.5,Iris-versicolor 53 | 6.9,3.1,4.9,1.5,Iris-versicolor 54 | 5.5,2.3,4.0,1.3,Iris-versicolor 55 | 6.5,2.8,4.6,1.5,Iris-versicolor 56 | 5.7,2.8,4.5,1.3,Iris-versicolor 57 | 6.3,3.3,4.7,1.6,Iris-versicolor 58 | 4.9,2.4,3.3,1.0,Iris-versicolor 59 | 6.6,2.9,4.6,1.3,Iris-versicolor 60 | 5.2,2.7,3.9,1.4,Iris-versicolor 61 | 5.0,2.0,3.5,1.0,Iris-versicolor 62 | 5.9,3.0,4.2,1.5,Iris-versicolor 63 | 6.0,2.2,4.0,1.0,Iris-versicolor 64 | 6.1,2.9,4.7,1.4,Iris-versicolor 65 | 5.6,2.9,3.6,1.3,Iris-versicolor 66 | 6.7,3.1,4.4,1.4,Iris-versicolor 67 | 5.6,3.0,4.5,1.5,Iris-versicolor 68 | 5.8,2.7,4.1,1.0,Iris-versicolor 69 | 6.2,2.2,4.5,1.5,Iris-versicolor 70 | 5.6,2.5,3.9,1.1,Iris-versicolor 71 | 5.9,3.2,4.8,1.8,Iris-versicolor 72 | 6.1,2.8,4.0,1.3,Iris-versicolor 73 | 6.3,2.5,4.9,1.5,Iris-versicolor 74 | 6.1,2.8,4.7,1.2,Iris-versicolor 75 | 6.4,2.9,4.3,1.3,Iris-versicolor 76 | 6.6,3.0,4.4,1.4,Iris-versicolor 77 | 6.8,2.8,4.8,1.4,Iris-versicolor 78 | 6.7,3.0,5.0,1.7,Iris-versicolor 79 | 6.0,2.9,4.5,1.5,Iris-versicolor 80 | 5.7,2.6,3.5,1.0,Iris-versicolor 81 | 5.5,2.4,3.8,1.1,Iris-versicolor 82 | 5.5,2.4,3.7,1.0,Iris-versicolor 83 | 5.8,2.7,3.9,1.2,Iris-versicolor 84 | 6.0,2.7,5.1,1.6,Iris-versicolor 85 | 5.4,3.0,4.5,1.5,Iris-versicolor 86 | 6.0,3.4,4.5,1.6,Iris-versicolor 87 | 6.7,3.1,4.7,1.5,Iris-versicolor 88 | 6.3,2.3,4.4,1.3,Iris-versicolor 89 | 5.6,3.0,4.1,1.3,Iris-versicolor 90 | 5.5,2.5,4.0,1.3,Iris-versicolor 91 | 5.5,2.6,4.4,1.2,Iris-versicolor 92 | 6.1,3.0,4.6,1.4,Iris-versicolor 93 | 5.8,2.6,4.0,1.2,Iris-versicolor 94 | 5.0,2.3,3.3,1.0,Iris-versicolor 95 | 5.6,2.7,4.2,1.3,Iris-versicolor 96 | 5.7,3.0,4.2,1.2,Iris-versicolor 97 | 5.7,2.9,4.2,1.3,Iris-versicolor 98 | 6.2,2.9,4.3,1.3,Iris-versicolor 99 | 5.1,2.5,3.0,1.1,Iris-versicolor 100 | 5.7,2.8,4.1,1.3,Iris-versicolor 101 | 6.3,3.3,6.0,2.5,Iris-virginica 102 | 5.8,2.7,5.1,1.9,Iris-virginica 103 | 7.1,3.0,5.9,2.1,Iris-virginica 104 | 6.3,2.9,5.6,1.8,Iris-virginica 105 | 6.5,3.0,5.8,2.2,Iris-virginica 106 | 7.6,3.0,6.6,2.1,Iris-virginica 107 | 4.9,2.5,4.5,1.7,Iris-virginica 108 | 7.3,2.9,6.3,1.8,Iris-virginica 109 | 6.7,2.5,5.8,1.8,Iris-virginica 110 | 7.2,3.6,6.1,2.5,Iris-virginica 111 | 6.5,3.2,5.1,2.0,Iris-virginica 112 | 6.4,2.7,5.3,1.9,Iris-virginica 113 | 6.8,3.0,5.5,2.1,Iris-virginica 114 | 5.7,2.5,5.0,2.0,Iris-virginica 115 | 5.8,2.8,5.1,2.4,Iris-virginica 116 | 6.4,3.2,5.3,2.3,Iris-virginica 117 | 6.5,3.0,5.5,1.8,Iris-virginica 118 | 7.7,3.8,6.7,2.2,Iris-virginica 119 | 7.7,2.6,6.9,2.3,Iris-virginica 120 | 6.0,2.2,5.0,1.5,Iris-virginica 121 | 6.9,3.2,5.7,2.3,Iris-virginica 122 | 5.6,2.8,4.9,2.0,Iris-virginica 123 | 7.7,2.8,6.7,2.0,Iris-virginica 124 | 6.3,2.7,4.9,1.8,Iris-virginica 125 | 6.7,3.3,5.7,2.1,Iris-virginica 126 | 7.2,3.2,6.0,1.8,Iris-virginica 127 | 6.2,2.8,4.8,1.8,Iris-virginica 128 | 6.1,3.0,4.9,1.8,Iris-virginica 129 | 6.4,2.8,5.6,2.1,Iris-virginica 130 | 7.2,3.0,5.8,1.6,Iris-virginica 131 | 7.4,2.8,6.1,1.9,Iris-virginica 132 | 7.9,3.8,6.4,2.0,Iris-virginica 133 | 6.4,2.8,5.6,2.2,Iris-virginica 134 | 6.3,2.8,5.1,1.5,Iris-virginica 135 | 6.1,2.6,5.6,1.4,Iris-virginica 136 | 7.7,3.0,6.1,2.3,Iris-virginica 137 | 6.3,3.4,5.6,2.4,Iris-virginica 138 | 6.4,3.1,5.5,1.8,Iris-virginica 139 | 6.0,3.0,4.8,1.8,Iris-virginica 140 | 6.9,3.1,5.4,2.1,Iris-virginica 141 | 6.7,3.1,5.6,2.4,Iris-virginica 142 | 6.9,3.1,5.1,2.3,Iris-virginica 143 | 5.8,2.7,5.1,1.9,Iris-virginica 144 | 6.8,3.2,5.9,2.3,Iris-virginica 145 | 6.7,3.3,5.7,2.5,Iris-virginica 146 | 6.7,3.0,5.2,2.3,Iris-virginica 147 | 6.3,2.5,5.0,1.9,Iris-virginica 148 | 6.5,3.0,5.2,2.0,Iris-virginica 149 | 6.2,3.4,5.4,2.3,Iris-virginica 150 | 5.9,3.0,5.1,1.8,Iris-virginica -------------------------------------------------------------------------------- /1. A-star.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "id": "052f0bf1", 7 | "metadata": {}, 8 | "outputs": [ 9 | { 10 | "name": "stdout", 11 | "output_type": "stream", 12 | "text": [ 13 | "Path found: ['A', 'F', 'G', 'I', 'J']\n" 14 | ] 15 | }, 16 | { 17 | "data": { 18 | "text/plain": [ 19 | "['A', 'F', 'G', 'I', 'J']" 20 | ] 21 | }, 22 | "execution_count": 1, 23 | "metadata": {}, 24 | "output_type": "execute_result" 25 | } 26 | ], 27 | "source": [ 28 | "def aStarAlgo(start_node, stop_node):\n", 29 | " \n", 30 | "\n", 31 | " open_set = set(start_node) # {A}, len{open_set}=1\n", 32 | " closed_set = set()\n", 33 | " g = {} # store the distance from starting node\n", 34 | " parents = {}\n", 35 | " g[start_node] = 0\n", 36 | " parents[start_node] = start_node # parents['A']='A\"\n", 37 | "\n", 38 | " while len(open_set) > 0 :\n", 39 | " n = None\n", 40 | " \n", 41 | " for v in open_set: # v='B'/'F'\n", 42 | " if n == None or g[v] + heuristic(v) < g[n] + heuristic(n):\n", 43 | " n = v # n='A'\n", 44 | "\n", 45 | " if n == stop_node or Graph_nodes[n] == None:\n", 46 | " pass\n", 47 | " else:\n", 48 | " for (m, weight) in get_neighbors(n):\n", 49 | " # nodes 'm' not in first and last set are added to first\n", 50 | " # n is set its parent\n", 51 | " if m not in open_set and m not in closed_set:\n", 52 | " open_set.add(m) # m=B weight=6 {'F','B','A'} len{open_set}=2\n", 53 | " parents[m] = n # parents={'A':A,'B':A} len{parent}=2\n", 54 | " g[m] = g[n] + weight # g={'A':0,'B':6, 'F':3} len{g}=2\n", 55 | "\n", 56 | "\n", 57 | " #for each node m,compare its distance from start i.e g(m) to the \n", 58 | " #from start through n node\n", 59 | " else:\n", 60 | " if g[m] > g[n] + weight:\n", 61 | " #update g(m)\n", 62 | " g[m] = g[n] + weight\n", 63 | " #change parent of m to n\n", 64 | " parents[m] = n\n", 65 | "\n", 66 | " #if m in closed set,remove and add to open\n", 67 | " if m in closed_set:\n", 68 | " closed_set.remove(m)\n", 69 | " open_set.add(m)\n", 70 | "\n", 71 | " if n == None:\n", 72 | " print('Path does not exist!')\n", 73 | " return None\n", 74 | "\n", 75 | " # if the current node is the stop_node\n", 76 | " # then we begin reconstructin the path from it to the start_node\n", 77 | " if n == stop_node:\n", 78 | " path = []\n", 79 | "\n", 80 | " while parents[n] != n:\n", 81 | " path.append(n)\n", 82 | " n = parents[n]\n", 83 | "\n", 84 | " path.append(start_node)\n", 85 | "\n", 86 | " path.reverse()\n", 87 | "\n", 88 | " print('Path found: {}'.format(path))\n", 89 | " return path\n", 90 | "\n", 91 | "\n", 92 | " # remove n from the open_list, and add it to closed_list\n", 93 | " # because all of his neighbors were inspected\n", 94 | " open_set.remove(n)# {'F','B'} len=2\n", 95 | " closed_set.add(n) #{A} len=1\n", 96 | "\n", 97 | " print('Path does not exist!')\n", 98 | " return None\n", 99 | "\n", 100 | "#define fuction to return neighbor and its distance\n", 101 | "#from the passed node\n", 102 | "def get_neighbors(v):\n", 103 | " if v in Graph_nodes:\n", 104 | " return Graph_nodes[v]\n", 105 | " else:\n", 106 | " return None\n", 107 | "#for simplicity we ll consider heuristic distances given\n", 108 | "#and this function returns heuristic distance for all nodes\n", 109 | " \n", 110 | "def heuristic(n):\n", 111 | " H_dist = {\n", 112 | " 'A': 10,\n", 113 | " 'B': 8,\n", 114 | " 'C': 5,\n", 115 | " 'D': 7,\n", 116 | " 'E': 3,\n", 117 | " 'F': 6,\n", 118 | " 'G': 5,\n", 119 | " 'H': 3,\n", 120 | " 'I': 1,\n", 121 | " 'J': 0\n", 122 | " }\n", 123 | "\n", 124 | " return H_dist[n]\n", 125 | "\n", 126 | "#Describe your graph here\n", 127 | "Graph_nodes = {\n", 128 | " \n", 129 | " 'A': [('B', 6), ('F', 3)],\n", 130 | " 'B': [('C', 3), ('D', 2)],\n", 131 | " 'C': [('D', 1), ('E', 5)],\n", 132 | " 'D': [('C', 1), ('E', 8)],\n", 133 | " 'E': [('I', 5), ('J', 5)],\n", 134 | " 'F': [('G', 1),('H', 7)] ,\n", 135 | " 'G': [('I', 3)],\n", 136 | " 'H': [('I', 2)],\n", 137 | " 'I': [('E', 5), ('J', 3)],\n", 138 | "\n", 139 | "}\n", 140 | "aStarAlgo('A', 'J')" 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": null, 146 | "id": "7fbbd940", 147 | "metadata": {}, 148 | "outputs": [], 149 | "source": [] 150 | } 151 | ], 152 | "metadata": { 153 | "kernelspec": { 154 | "display_name": "Python 3 (ipykernel)", 155 | "language": "python", 156 | "name": "python3" 157 | }, 158 | "language_info": { 159 | "codemirror_mode": { 160 | "name": "ipython", 161 | "version": 3 162 | }, 163 | "file_extension": ".py", 164 | "mimetype": "text/x-python", 165 | "name": "python", 166 | "nbconvert_exporter": "python", 167 | "pygments_lexer": "ipython3", 168 | "version": "3.9.7" 169 | } 170 | }, 171 | "nbformat": 4, 172 | "nbformat_minor": 5 173 | } 174 | -------------------------------------------------------------------------------- /VIVA AIML.txt: -------------------------------------------------------------------------------- 1 | Program BASED Questions 2 | 3 | 1. What is A-star? 4 | A star is a luminous ball of gas, mostly hydrogen and helium, held together by its own gravity... NAH its more harder... 5 | A* Search algorithm is one of the best and popular technique used in path-finding and graph traversals. 6 | 7 | 2. What is AO-star? 8 | The AO* algorithm is a knowledge-based search technique, meaning the start state and the goal state is already defined , and the best path is found using heuristics. 9 | 10 | 3. What is Candidate Elimination? 11 | Candidate elimination algorithm incrementally builds the version space given a hypothesis space H and a set E of examples. 12 | 13 | 4. What is Decision Tree or ID3? 14 | A decision tree is a non-parametric supervised learning algorithm, which is utilized for both classification and regression tasks. 15 | 16 | 5. What is Backpropagation? 17 | The Back propagation algorithm in neural network computes the gradient of the loss function for a single weight by the chain rule. 18 | 19 | 6. What is Naive Bayes? 20 | Naïve Bayes algorithm is a supervised learning algorithm, which is based on Bayes theorem and used for solving classification problems. 21 | 22 | 7. What EM and KMeans? 23 | EM- Expectancy Maximization - expectation–maximization (EM) algorithm is an iterative method to find (local) maximum likelihood 24 | Kmeans - K-Means Clustering is an Unsupervised Learning algorithm, which groups the unlabeled dataset into different clusters. 25 | These algorithms are used to classify the dataset into various groups based on the similarities and dissimilarities 26 | 27 | 8. What is KNN? 28 | K-Nearest Neighbours is an algorithm that stores all the available data and classifies a new data point based on the similarity. 29 | 30 | 9. What is Linear Regression? 31 | Linear Regression is a machine learning algorithm based on supervised learning. It performs a regression task. Regression models a target prediction value based on independent variables. It is mostly used for finding out the relationship between variables and forecasting. 32 | 33 | General VIVA Questions 34 | 1. What is machine learning? 35 | The capability of a machine to imitate intelligent human behavior. 36 | 37 | 2. Define supervised, unsupervised and reinforcement learning? 38 | Supervised learning requires a labeled dataset for training. 39 | Unsupervised learning identifies hidden data patterns from an unlabeled dataset, 40 | while Reinforcement learning does not require data as it learns by interacting with the environment. 41 | 42 | 3. What are the examples of Supervised and Unsupervised Learning? 43 | Unsupervised Learning areas of application include market basket analysis, semantic clustering, recommender systems, etc. 44 | The most commonly used Supervised Learning algorithms are decision tree, logistic regression, linear regression, support vector machine. 45 | 46 | 4. What do you mean by hypothesis? 47 | It is defined as the approximate function that best describes the target in supervised machine learning algorithms. 48 | 49 | 5. What is classification? 50 | In machine learning, classification refers to a predictive modeling problem where a class label is predicted for a given example of input data. 51 | 52 | 6. What is clustering? 53 | Grouping unlabeled examples is called clustering. 54 | 55 | 7. Define precision, accuracy and recall. 56 | Accuracy tells you how many times the ML model was correct overall. Precision is how good the model is at predicting a specific category. Recall tells you how many times the model was able to detect a specific category. 57 | 58 | 8. Define Entropy. 59 | Entropy, as it relates to machine learning, is a measure of the randomness in the information being processed. 60 | 61 | 9. Define regression. 62 | Regression analysis is a statistical method to model the relationship between a dependent (target) and independent (predictor) variables with one or more independent variables. 63 | 64 | 10. How KNN is different from K-means and Clustering? 65 | KNN is a supervised machine learning algorithm used for classification, whereas KMeans is an unsupervised machine learning algorithm used for clustering. 66 | 67 | 11. What is concept learning. 68 | Concept learning describes the process by which experience allows us to partition objects in the world into classes for the purpose of generalization, discrimination, and inference. 69 | 70 | 12. Define specific and general boundary. 71 | The general boundary , with respect to hypothesis space and training data , is the set of maximally general members of consistent with . The specific boundary , with respect to hypothesis space and training data , is the set of minimally general (i.e., maximally specific) members of consistent with . 72 | 73 | 13. Define target function 74 | A target function, in machine learning, is a method for solving a problem that an AI algorithm parses its training data to find. Once an algorithm finds its target function, that function can be used to predict results (predictive analysis). 75 | 76 | 14. Define Decison tree 77 | Decision Tree is the most powerful and popular tool for classification and prediction. A Decision tree is a flowchart-like tree structure. 78 | 79 | 15. What is ANN 80 | Artificial Neural Network. Its a network made up of neurons which is artificial. Just kidding, Artificial Neural Networks (ANN) are algorithms based on brain function and are used to model complicated patterns and forecast issues. 81 | 82 | 16. Explain gradient descent approximation 83 | Gradient descent is an algorithm that numerically estimates where a function outputs its lowest values. 84 | 85 | 17. State Bayes Theorem 86 | Bayes' theorem named after Thomas Bayes, describes the probability of an event, based on prior knowledge of conditions. 87 | 88 | 18. Define Bayesian Belief Networks 89 | Bayesian belief network is key computer technology for dealing with probabilistic events and to solve a problem which has uncertainty. 90 | 91 | 19. Differentiate between hard and soft clustering 92 | Hard clustering is method to grouping the data items such that each item is only assigned to one cluster, K-Means is one of them. While Soft clustering is method to grouping the data items such that an item can exist in multiple clusters, Fuzzy C-Means (FCM) is an example. 93 | 94 | 20. Define Variance 95 | The variance is a measure of variability. It is calculated by taking the average of squared deviations from the mean. 96 | 97 | 21. What is inductive machine learning. 98 | Inductive Learning Algorithm (ILA) is an iterative and inductive machine learning algorithm which is used for generating a set of a classification rule, which produces rules of the form “IF-THEN”, for a set of examples, producing rules at each iteration and appending to the set of rules. 99 | 100 | 22. Why KNN is lazy learning? 101 | Because it does no training at all when you supply the training data. 102 | 103 | 23. Why is naive bayes naive? 104 | Naive Bayes is called naive because it assumes that each input variable is independent. 105 | 106 | 24. Mention classification algorithms 107 | Logistic Regression. 108 | Naive Bayes. 109 | K-Nearest Neighbors. 110 | Decision Tree. 111 | Support Vector Machines. 112 | 113 | 25. Define pruning. 114 | Pruning is the process of removing weight connections in a network to increase inference speed and decrease model storage size. 115 | 116 | 26. Differentiate clustering and classification 117 | Classification is used for supervised learning whereas clustering is used for unsupervised learning. 118 | 119 | 27. Mention clustering algorithms 120 | Density-based. ... 121 | Distribution-based. ... 122 | Centroid-based. ... 123 | Hierarchical-based. ... 124 | K-means clustering algorithm. 125 | 126 | 28. Define Bias 127 | Bias is a phenomenon that skews the result of an algorithm in favor or against an idea. 128 | 129 | 29. Is this much viva qns not enough for you? 130 | 131 | - Prepared with <3 by manoj and afuu. -------------------------------------------------------------------------------- /7. S-EM-Kmeans.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 4, 6 | "id": "3d1a2146", 7 | "metadata": {}, 8 | "outputs": [ 9 | { 10 | "name": "stdout", 11 | "output_type": "stream", 12 | "text": [ 13 | "{'data': array([[5.1, 3.5, 1.4, 0.2],\n", 14 | " [4.9, 3. , 1.4, 0.2],\n", 15 | " [4.7, 3.2, 1.3, 0.2],\n", 16 | " [4.6, 3.1, 1.5, 0.2],\n", 17 | " [5. , 3.6, 1.4, 0.2],\n", 18 | " [5.4, 3.9, 1.7, 0.4],\n", 19 | " [4.6, 3.4, 1.4, 0.3],\n", 20 | " [5. , 3.4, 1.5, 0.2],\n", 21 | " [4.4, 2.9, 1.4, 0.2],\n", 22 | " [4.9, 3.1, 1.5, 0.1],\n", 23 | " [5.4, 3.7, 1.5, 0.2],\n", 24 | " [4.8, 3.4, 1.6, 0.2],\n", 25 | " [4.8, 3. , 1.4, 0.1],\n", 26 | " [4.3, 3. , 1.1, 0.1],\n", 27 | " [5.8, 4. , 1.2, 0.2],\n", 28 | " [5.7, 4.4, 1.5, 0.4],\n", 29 | " [5.4, 3.9, 1.3, 0.4],\n", 30 | " [5.1, 3.5, 1.4, 0.3],\n", 31 | " [5.7, 3.8, 1.7, 0.3],\n", 32 | " [5.1, 3.8, 1.5, 0.3],\n", 33 | " [5.4, 3.4, 1.7, 0.2],\n", 34 | " [5.1, 3.7, 1.5, 0.4],\n", 35 | " [4.6, 3.6, 1. , 0.2],\n", 36 | " [5.1, 3.3, 1.7, 0.5],\n", 37 | " [4.8, 3.4, 1.9, 0.2],\n", 38 | " [5. , 3. , 1.6, 0.2],\n", 39 | " [5. , 3.4, 1.6, 0.4],\n", 40 | " [5.2, 3.5, 1.5, 0.2],\n", 41 | " [5.2, 3.4, 1.4, 0.2],\n", 42 | " [4.7, 3.2, 1.6, 0.2],\n", 43 | " [4.8, 3.1, 1.6, 0.2],\n", 44 | " [5.4, 3.4, 1.5, 0.4],\n", 45 | " [5.2, 4.1, 1.5, 0.1],\n", 46 | " [5.5, 4.2, 1.4, 0.2],\n", 47 | " [4.9, 3.1, 1.5, 0.2],\n", 48 | " [5. , 3.2, 1.2, 0.2],\n", 49 | " [5.5, 3.5, 1.3, 0.2],\n", 50 | " [4.9, 3.6, 1.4, 0.1],\n", 51 | " [4.4, 3. , 1.3, 0.2],\n", 52 | " [5.1, 3.4, 1.5, 0.2],\n", 53 | " [5. , 3.5, 1.3, 0.3],\n", 54 | " [4.5, 2.3, 1.3, 0.3],\n", 55 | " [4.4, 3.2, 1.3, 0.2],\n", 56 | " [5. , 3.5, 1.6, 0.6],\n", 57 | " [5.1, 3.8, 1.9, 0.4],\n", 58 | " [4.8, 3. , 1.4, 0.3],\n", 59 | " [5.1, 3.8, 1.6, 0.2],\n", 60 | " [4.6, 3.2, 1.4, 0.2],\n", 61 | " [5.3, 3.7, 1.5, 0.2],\n", 62 | " [5. , 3.3, 1.4, 0.2],\n", 63 | " [7. , 3.2, 4.7, 1.4],\n", 64 | " [6.4, 3.2, 4.5, 1.5],\n", 65 | " [6.9, 3.1, 4.9, 1.5],\n", 66 | " [5.5, 2.3, 4. , 1.3],\n", 67 | " [6.5, 2.8, 4.6, 1.5],\n", 68 | " [5.7, 2.8, 4.5, 1.3],\n", 69 | " [6.3, 3.3, 4.7, 1.6],\n", 70 | " [4.9, 2.4, 3.3, 1. ],\n", 71 | " [6.6, 2.9, 4.6, 1.3],\n", 72 | " [5.2, 2.7, 3.9, 1.4],\n", 73 | " [5. , 2. , 3.5, 1. ],\n", 74 | " [5.9, 3. , 4.2, 1.5],\n", 75 | " [6. , 2.2, 4. , 1. ],\n", 76 | " [6.1, 2.9, 4.7, 1.4],\n", 77 | " [5.6, 2.9, 3.6, 1.3],\n", 78 | " [6.7, 3.1, 4.4, 1.4],\n", 79 | " [5.6, 3. , 4.5, 1.5],\n", 80 | " [5.8, 2.7, 4.1, 1. ],\n", 81 | " [6.2, 2.2, 4.5, 1.5],\n", 82 | " [5.6, 2.5, 3.9, 1.1],\n", 83 | " [5.9, 3.2, 4.8, 1.8],\n", 84 | " [6.1, 2.8, 4. , 1.3],\n", 85 | " [6.3, 2.5, 4.9, 1.5],\n", 86 | " [6.1, 2.8, 4.7, 1.2],\n", 87 | " [6.4, 2.9, 4.3, 1.3],\n", 88 | " [6.6, 3. , 4.4, 1.4],\n", 89 | " [6.8, 2.8, 4.8, 1.4],\n", 90 | " [6.7, 3. , 5. , 1.7],\n", 91 | " [6. , 2.9, 4.5, 1.5],\n", 92 | " [5.7, 2.6, 3.5, 1. ],\n", 93 | " [5.5, 2.4, 3.8, 1.1],\n", 94 | " [5.5, 2.4, 3.7, 1. ],\n", 95 | " [5.8, 2.7, 3.9, 1.2],\n", 96 | " [6. , 2.7, 5.1, 1.6],\n", 97 | " [5.4, 3. , 4.5, 1.5],\n", 98 | " [6. , 3.4, 4.5, 1.6],\n", 99 | " [6.7, 3.1, 4.7, 1.5],\n", 100 | " [6.3, 2.3, 4.4, 1.3],\n", 101 | " [5.6, 3. , 4.1, 1.3],\n", 102 | " [5.5, 2.5, 4. , 1.3],\n", 103 | " [5.5, 2.6, 4.4, 1.2],\n", 104 | " [6.1, 3. , 4.6, 1.4],\n", 105 | " [5.8, 2.6, 4. , 1.2],\n", 106 | " [5. , 2.3, 3.3, 1. ],\n", 107 | " [5.6, 2.7, 4.2, 1.3],\n", 108 | " [5.7, 3. , 4.2, 1.2],\n", 109 | " [5.7, 2.9, 4.2, 1.3],\n", 110 | " [6.2, 2.9, 4.3, 1.3],\n", 111 | " [5.1, 2.5, 3. , 1.1],\n", 112 | " [5.7, 2.8, 4.1, 1.3],\n", 113 | " [6.3, 3.3, 6. , 2.5],\n", 114 | " [5.8, 2.7, 5.1, 1.9],\n", 115 | " [7.1, 3. , 5.9, 2.1],\n", 116 | " [6.3, 2.9, 5.6, 1.8],\n", 117 | " [6.5, 3. , 5.8, 2.2],\n", 118 | " [7.6, 3. , 6.6, 2.1],\n", 119 | " [4.9, 2.5, 4.5, 1.7],\n", 120 | " [7.3, 2.9, 6.3, 1.8],\n", 121 | " [6.7, 2.5, 5.8, 1.8],\n", 122 | " [7.2, 3.6, 6.1, 2.5],\n", 123 | " [6.5, 3.2, 5.1, 2. ],\n", 124 | " [6.4, 2.7, 5.3, 1.9],\n", 125 | " [6.8, 3. , 5.5, 2.1],\n", 126 | " [5.7, 2.5, 5. , 2. ],\n", 127 | " [5.8, 2.8, 5.1, 2.4],\n", 128 | " [6.4, 3.2, 5.3, 2.3],\n", 129 | " [6.5, 3. , 5.5, 1.8],\n", 130 | " [7.7, 3.8, 6.7, 2.2],\n", 131 | " [7.7, 2.6, 6.9, 2.3],\n", 132 | " [6. , 2.2, 5. , 1.5],\n", 133 | " [6.9, 3.2, 5.7, 2.3],\n", 134 | " [5.6, 2.8, 4.9, 2. ],\n", 135 | " [7.7, 2.8, 6.7, 2. ],\n", 136 | " [6.3, 2.7, 4.9, 1.8],\n", 137 | " [6.7, 3.3, 5.7, 2.1],\n", 138 | " [7.2, 3.2, 6. , 1.8],\n", 139 | " [6.2, 2.8, 4.8, 1.8],\n", 140 | " [6.1, 3. , 4.9, 1.8],\n", 141 | " [6.4, 2.8, 5.6, 2.1],\n", 142 | " [7.2, 3. , 5.8, 1.6],\n", 143 | " [7.4, 2.8, 6.1, 1.9],\n", 144 | " [7.9, 3.8, 6.4, 2. ],\n", 145 | " [6.4, 2.8, 5.6, 2.2],\n", 146 | " [6.3, 2.8, 5.1, 1.5],\n", 147 | " [6.1, 2.6, 5.6, 1.4],\n", 148 | " [7.7, 3. , 6.1, 2.3],\n", 149 | " [6.3, 3.4, 5.6, 2.4],\n", 150 | " [6.4, 3.1, 5.5, 1.8],\n", 151 | " [6. , 3. , 4.8, 1.8],\n", 152 | " [6.9, 3.1, 5.4, 2.1],\n", 153 | " [6.7, 3.1, 5.6, 2.4],\n", 154 | " [6.9, 3.1, 5.1, 2.3],\n", 155 | " [5.8, 2.7, 5.1, 1.9],\n", 156 | " [6.8, 3.2, 5.9, 2.3],\n", 157 | " [6.7, 3.3, 5.7, 2.5],\n", 158 | " [6.7, 3. , 5.2, 2.3],\n", 159 | " [6.3, 2.5, 5. , 1.9],\n", 160 | " [6.5, 3. , 5.2, 2. ],\n", 161 | " [6.2, 3.4, 5.4, 2.3],\n", 162 | " [5.9, 3. , 5.1, 1.8]]), 'target': array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 163 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 164 | " 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", 165 | " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", 166 | " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,\n", 167 | " 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,\n", 168 | " 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]), 'frame': None, 'target_names': array(['setosa', 'versicolor', 'virginica'], dtype='cost:\n", 186 | " minimumCost=cost\n", 187 | " costToChildNodeListDict[minimumCost]=nodeList \n", 188 | "# set the Minimum Cost child node/s\n", 189 | " \n", 190 | " \n", 191 | " return minimumCost, costToChildNodeListDict[minimumCost] \n", 192 | " # return Minimum Cost and Minimum Cost child node/s\n", 193 | " \n", 194 | " \n", 195 | " \n", 196 | " def aoStar(self, v, backTracking): \n", 197 | "# AO* algorithm for a start node and backTracking status flag\n", 198 | " \n", 199 | " print(\"HEURISTIC VALUES :\", self.H)\n", 200 | " print(\"SOLUTION GRAPH :\", self.solutionGraph)\n", 201 | " print(\"PROCESSING NODE :\", v)\n", 202 | " print(\"--------------------------------------------------------------------\")\n", 203 | " \n", 204 | " if self.getStatus(v) >= 0: \n", 205 | " # if status node v >= 0, compute Minimum Cost nodes of v\n", 206 | " minimumCost, childNodeList = self.computeMinimumCostChildNodes(v)\n", 207 | " self.setHeuristicNodeValue(v, minimumCost)\n", 208 | " self.setStatus(v,len(childNodeList))\n", 209 | " \n", 210 | " solved=True \n", 211 | "# check the Minimum Cost nodes of v are solved \n", 212 | " for childNode in childNodeList:\n", 213 | " self.parent[childNode]=v\n", 214 | " if self.getStatus(childNode)!=-1:\n", 215 | " solved=solved & False\n", 216 | " \n", 217 | " if solved==True: \n", 218 | " # if the Minimum Cost nodes of v are solved, set the current node status as solved(-1)\n", 219 | " self.setStatus(v,-1) \n", 220 | " self.solutionGraph[v]=childNodeList \n", 221 | "# update the solution graph with the solved nodes which may be a part of solution \n", 222 | " \n", 223 | " \n", 224 | " if v!=self.start: \n", 225 | " # check the current node is the start node for backtracking the current node value \n", 226 | " self.aoStar(self.parent[v], True) \n", 227 | "# backtracking the current node value with backtracking status set to true\n", 228 | " \n", 229 | " if backTracking==False: \n", 230 | " # check the current call is not for backtracking \n", 231 | " for childNode in childNodeList: \n", 232 | " # for each Minimum Cost child node\n", 233 | " self.setStatus(childNode,0) \n", 234 | "# set the status of child node to 0(needs exploration)\n", 235 | " self.aoStar(childNode, False)\n", 236 | " # Minimum Cost child node is further explored with backtracking status as false\n", 237 | " \n", 238 | " \n", 239 | " \n", 240 | "h1 = {'A': 1, 'B': 6, 'C': 2, 'D': 12, 'E': 2, 'F': 1, 'G': 5, 'H': 7, 'I': 7, 'J': 1, 'T': 3}\n", 241 | "graph1 = {\n", 242 | " 'A': [[('B', 1), ('C', 1)], [('D', 1)]],\n", 243 | " 'B': [[('G', 1)], [('H', 1)]],\n", 244 | " 'C': [[('J', 1)]],\n", 245 | " 'D': [[('E', 1), ('F', 1)]],\n", 246 | " 'G': [[('I', 1)]] \n", 247 | "}\n", 248 | "G1= Graph(graph1, h1, 'A')\n", 249 | "G1.applyAOStar() \n", 250 | "G1.printSolution()\n", 251 | " \n", 252 | "h2 = {'A': 1, 'B': 6, 'C': 12, 'D': 10, 'E': 4, 'F': 4, 'G': 5, 'H': 7} # Heuristic values of Nodes \n", 253 | "graph2 = { # Graph of Nodes and Edges \n", 254 | " 'A': [[('B', 1), ('C', 1)], [('D', 1)]], # Neighbors of Node 'A', B, C & D with repective weights \n", 255 | " 'B': [[('G', 1)], [('H', 1)]], # Neighbors are included in a list of lists\n", 256 | " 'D': [[('E', 1), ('F', 1)]] # Each sublist indicate a \"OR\" node or \"AND\" nodes\n", 257 | "}\n", 258 | " \n", 259 | "G2 = Graph(graph2, h2, 'A') # Instantiate Graph object with graph, heuristic values and start Node\n", 260 | "G2.applyAOStar() # Run the AO* algorithm\n", 261 | "G2.printSolution() # Print the solution graph as output of the AO* algorithm search\n" 262 | ] 263 | }, 264 | { 265 | "cell_type": "code", 266 | "execution_count": null, 267 | "id": "ad614e4a", 268 | "metadata": {}, 269 | "outputs": [], 270 | "source": [] 271 | } 272 | ], 273 | "metadata": { 274 | "kernelspec": { 275 | "display_name": "Python 3 (ipykernel)", 276 | "language": "python", 277 | "name": "python3" 278 | }, 279 | "language_info": { 280 | "codemirror_mode": { 281 | "name": "ipython", 282 | "version": 3 283 | }, 284 | "file_extension": ".py", 285 | "mimetype": "text/x-python", 286 | "name": "python", 287 | "nbconvert_exporter": "python", 288 | "pygments_lexer": "ipython3", 289 | "version": "3.9.7" 290 | } 291 | }, 292 | "nbformat": 4, 293 | "nbformat_minor": 5 294 | } 295 | -------------------------------------------------------------------------------- /5. BackPropagation.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "id": "e4da963c", 7 | "metadata": {}, 8 | "outputs": [ 9 | { 10 | "name": "stdout", 11 | "output_type": "stream", 12 | "text": [ 13 | "**********************\n", 14 | "iteration: 0 :::: [[ 0.0720921 -0.94906383]]\n", 15 | "###output######## [[0.9279079 0.94906383]]\n", 16 | "**********************\n", 17 | "iteration: 1 :::: [[ 0.07203592 -0.94865304]]\n", 18 | "###output######## [[0.92796408 0.94865304]]\n", 19 | "**********************\n", 20 | "iteration: 2 :::: [[ 0.07197988 -0.94823618]]\n", 21 | "###output######## [[0.92802012 0.94823618]]\n", 22 | "**********************\n", 23 | "iteration: 3 :::: [[ 0.07192399 -0.94781309]]\n", 24 | "###output######## [[0.92807601 0.94781309]]\n", 25 | "**********************\n", 26 | "iteration: 4 :::: [[ 0.07186825 -0.94738367]]\n", 27 | "###output######## [[0.92813175 0.94738367]]\n", 28 | "**********************\n", 29 | "iteration: 5 :::: [[ 0.07181265 -0.94694777]]\n", 30 | "###output######## [[0.92818735 0.94694777]]\n", 31 | "**********************\n", 32 | "iteration: 6 :::: [[ 0.07175721 -0.94650525]]\n", 33 | "###output######## [[0.92824279 0.94650525]]\n", 34 | "**********************\n", 35 | "iteration: 7 :::: [[ 0.0717019 -0.94605598]]\n", 36 | "###output######## [[0.9282981 0.94605598]]\n", 37 | "**********************\n", 38 | "iteration: 8 :::: [[ 0.07164675 -0.9455998 ]]\n", 39 | "###output######## [[0.92835325 0.9455998 ]]\n", 40 | "**********************\n", 41 | "iteration: 9 :::: [[ 0.07159173 -0.94513658]]\n", 42 | "###output######## [[0.92840827 0.94513658]]\n", 43 | "**********************\n", 44 | "iteration: 10 :::: [[ 0.07153687 -0.94466616]]\n", 45 | "###output######## [[0.92846313 0.94466616]]\n", 46 | "**********************\n", 47 | "iteration: 11 :::: [[ 0.07148214 -0.94418838]]\n", 48 | "###output######## [[0.92851786 0.94418838]]\n", 49 | "**********************\n", 50 | "iteration: 12 :::: [[ 0.07142756 -0.94370307]]\n", 51 | "###output######## [[0.92857244 0.94370307]]\n", 52 | "**********************\n", 53 | "iteration: 13 :::: [[ 0.07137313 -0.94321009]]\n", 54 | "###output######## [[0.92862687 0.94321009]]\n", 55 | "**********************\n", 56 | "iteration: 14 :::: [[ 0.07131884 -0.94270924]]\n", 57 | "###output######## [[0.92868116 0.94270924]]\n", 58 | "**********************\n", 59 | "iteration: 15 :::: [[ 0.07126469 -0.94220037]]\n", 60 | "###output######## [[0.92873531 0.94220037]]\n", 61 | "**********************\n", 62 | "iteration: 16 :::: [[ 0.07121068 -0.94168329]]\n", 63 | "###output######## [[0.92878932 0.94168329]]\n", 64 | "**********************\n", 65 | "iteration: 17 :::: [[ 0.07115681 -0.94115781]]\n", 66 | "###output######## [[0.92884319 0.94115781]]\n", 67 | "**********************\n", 68 | "iteration: 18 :::: [[ 0.07110309 -0.94062375]]\n", 69 | "###output######## [[0.92889691 0.94062375]]\n", 70 | "**********************\n", 71 | "iteration: 19 :::: [[ 0.0710495 -0.94008092]]\n", 72 | "###output######## [[0.9289505 0.94008092]]\n", 73 | "**********************\n", 74 | "iteration: 20 :::: [[ 0.07099606 -0.9395291 ]]\n", 75 | "###output######## [[0.92900394 0.9395291 ]]\n", 76 | "**********************\n", 77 | "iteration: 21 :::: [[ 0.07094276 -0.93896809]]\n", 78 | "###output######## [[0.92905724 0.93896809]]\n", 79 | "**********************\n", 80 | "iteration: 22 :::: [[ 0.0708896 -0.93839769]]\n", 81 | "###output######## [[0.9291104 0.93839769]]\n", 82 | "**********************\n", 83 | "iteration: 23 :::: [[ 0.07083657 -0.93781767]]\n", 84 | "###output######## [[0.92916343 0.93781767]]\n", 85 | "**********************\n", 86 | "iteration: 24 :::: [[ 0.07078369 -0.93722781]]\n", 87 | "###output######## [[0.92921631 0.93722781]]\n", 88 | "**********************\n", 89 | "iteration: 25 :::: [[ 0.07073094 -0.93662788]]\n", 90 | "###output######## [[0.92926906 0.93662788]]\n", 91 | "**********************\n", 92 | "iteration: 26 :::: [[ 0.07067833 -0.93601765]]\n", 93 | "###output######## [[0.92932167 0.93601765]]\n", 94 | "**********************\n", 95 | "iteration: 27 :::: [[ 0.07062586 -0.93539685]]\n", 96 | "###output######## [[0.92937414 0.93539685]]\n", 97 | "**********************\n", 98 | "iteration: 28 :::: [[ 0.07057353 -0.93476526]]\n", 99 | "###output######## [[0.92942647 0.93476526]]\n", 100 | "**********************\n", 101 | "iteration: 29 :::: [[ 0.07052133 -0.93412259]]\n", 102 | "###output######## [[0.92947867 0.93412259]]\n", 103 | "**********************\n", 104 | "iteration: 30 :::: [[ 0.07046927 -0.9334686 ]]\n", 105 | "###output######## [[0.92953073 0.9334686 ]]\n", 106 | "**********************\n", 107 | "iteration: 31 :::: [[ 0.07041735 -0.932803 ]]\n", 108 | "###output######## [[0.92958265 0.932803 ]]\n", 109 | "**********************\n", 110 | "iteration: 32 :::: [[ 0.07036556 -0.93212552]]\n", 111 | "###output######## [[0.92963444 0.93212552]]\n", 112 | "**********************\n", 113 | "iteration: 33 :::: [[ 0.0703139 -0.93143586]]\n", 114 | "###output######## [[0.9296861 0.93143586]]\n", 115 | "**********************\n", 116 | "iteration: 34 :::: [[ 0.07026238 -0.93073371]]\n", 117 | "###output######## [[0.92973762 0.93073371]]\n", 118 | "**********************\n", 119 | "iteration: 35 :::: [[ 0.070211 -0.93001879]]\n", 120 | "###output######## [[0.929789 0.93001879]]\n", 121 | "**********************\n", 122 | "iteration: 36 :::: [[ 0.07015975 -0.92929075]]\n", 123 | "###output######## [[0.92984025 0.92929075]]\n", 124 | "**********************\n", 125 | "iteration: 37 :::: [[ 0.07010863 -0.92854929]]\n", 126 | "###output######## [[0.92989137 0.92854929]]\n", 127 | "**********************\n", 128 | "iteration: 38 :::: [[ 0.07005764 -0.92779406]]\n", 129 | "###output######## [[0.92994236 0.92779406]]\n", 130 | "**********************\n", 131 | "iteration: 39 :::: [[ 0.07000679 -0.92702471]]\n", 132 | "###output######## [[0.92999321 0.92702471]]\n", 133 | "**********************\n", 134 | "iteration: 40 :::: [[ 0.06995607 -0.92624088]]\n", 135 | "###output######## [[0.93004393 0.92624088]]\n", 136 | "**********************\n", 137 | "iteration: 41 :::: [[ 0.06990548 -0.92544221]]\n", 138 | "###output######## [[0.93009452 0.92544221]]\n", 139 | "**********************\n", 140 | "iteration: 42 :::: [[ 0.06985502 -0.92462832]]\n", 141 | "###output######## [[0.93014498 0.92462832]]\n", 142 | "**********************\n", 143 | "iteration: 43 :::: [[ 0.0698047 -0.92379882]]\n", 144 | "###output######## [[0.9301953 0.92379882]]\n", 145 | "**********************\n", 146 | "iteration: 44 :::: [[ 0.0697545 -0.92295329]]\n", 147 | "###output######## [[0.9302455 0.92295329]]\n", 148 | "**********************\n", 149 | "iteration: 45 :::: [[ 0.06970443 -0.92209133]]\n", 150 | "###output######## [[0.93029557 0.92209133]]\n", 151 | "**********************\n", 152 | "iteration: 46 :::: [[ 0.06965449 -0.9212125 ]]\n", 153 | "###output######## [[0.93034551 0.9212125 ]]\n", 154 | "**********************\n", 155 | "iteration: 47 :::: [[ 0.06960468 -0.92031637]]\n", 156 | "###output######## [[0.93039532 0.92031637]]\n", 157 | "**********************\n", 158 | "iteration: 48 :::: [[ 0.069555 -0.91940247]]\n", 159 | "###output######## [[0.930445 0.91940247]]\n", 160 | "**********************\n", 161 | "iteration: 49 :::: [[ 0.06950544 -0.91847034]]\n", 162 | "###output######## [[0.93049456 0.91847034]]\n", 163 | "**********************\n", 164 | "iteration: 5951 :::: [[ 0.02082056 -0.02234698]]\n", 165 | "###output######## [[0.97917944 0.02234698]]\n", 166 | "**********************\n", 167 | "iteration: 5952 :::: [[ 0.02081891 -0.02234494]]\n", 168 | "###output######## [[0.97918109 0.02234494]]\n", 169 | "**********************\n", 170 | "iteration: 5953 :::: [[ 0.02081726 -0.0223429 ]]\n", 171 | "###output######## [[0.97918274 0.0223429 ]]\n", 172 | "**********************\n", 173 | "iteration: 5954 :::: [[ 0.02081561 -0.02234086]]\n", 174 | "###output######## [[0.97918439 0.02234086]]\n", 175 | "**********************\n", 176 | "iteration: 5955 :::: [[ 0.02081396 -0.02233882]]\n", 177 | "###output######## [[0.97918604 0.02233882]]\n", 178 | "**********************\n", 179 | "iteration: 5956 :::: [[ 0.02081231 -0.02233678]]\n", 180 | "###output######## [[0.97918769 0.02233678]]\n", 181 | "**********************\n", 182 | "iteration: 5957 :::: [[ 0.02081067 -0.02233475]]\n", 183 | "###output######## [[0.97918933 0.02233475]]\n", 184 | "**********************\n", 185 | "iteration: 5958 :::: [[ 0.02080902 -0.02233271]]\n", 186 | "###output######## [[0.97919098 0.02233271]]\n", 187 | "**********************\n", 188 | "iteration: 5959 :::: [[ 0.02080737 -0.02233067]]\n", 189 | "###output######## [[0.97919263 0.02233067]]\n", 190 | "**********************\n", 191 | "iteration: 5960 :::: [[ 0.02080572 -0.02232864]]\n", 192 | "###output######## [[0.97919428 0.02232864]]\n", 193 | "**********************\n", 194 | "iteration: 5961 :::: [[ 0.02080408 -0.0223266 ]]\n", 195 | "###output######## [[0.97919592 0.0223266 ]]\n", 196 | "**********************\n", 197 | "iteration: 5962 :::: [[ 0.02080243 -0.02232457]]\n", 198 | "###output######## [[0.97919757 0.02232457]]\n", 199 | "**********************\n", 200 | "iteration: 5963 :::: [[ 0.02080079 -0.02232253]]\n", 201 | "###output######## [[0.97919921 0.02232253]]\n", 202 | "**********************\n", 203 | "iteration: 5964 :::: [[ 0.02079914 -0.0223205 ]]\n", 204 | "###output######## [[0.97920086 0.0223205 ]]\n", 205 | "**********************\n", 206 | "iteration: 5965 :::: [[ 0.0207975 -0.02231846]]\n", 207 | "###output######## [[0.9792025 0.02231846]]\n", 208 | "**********************\n", 209 | "iteration: 5966 :::: [[ 0.02079585 -0.02231643]]\n", 210 | "###output######## [[0.97920415 0.02231643]]\n", 211 | "**********************\n", 212 | "iteration: 5967 :::: [[ 0.02079421 -0.0223144 ]]\n", 213 | "###output######## [[0.97920579 0.0223144 ]]\n", 214 | "**********************\n", 215 | "iteration: 5968 :::: [[ 0.02079256 -0.02231237]]\n", 216 | "###output######## [[0.97920744 0.02231237]]\n", 217 | "**********************\n", 218 | "iteration: 5969 :::: [[ 0.02079092 -0.02231034]]\n", 219 | "###output######## [[0.97920908 0.02231034]]\n", 220 | "**********************\n", 221 | "iteration: 5970 :::: [[ 0.02078928 -0.02230831]]\n", 222 | "###output######## [[0.97921072 0.02230831]]\n", 223 | "**********************\n", 224 | "iteration: 5971 :::: [[ 0.02078763 -0.02230628]]\n", 225 | "###output######## [[0.97921237 0.02230628]]\n", 226 | "**********************\n", 227 | "iteration: 5972 :::: [[ 0.02078599 -0.02230425]]\n", 228 | "###output######## [[0.97921401 0.02230425]]\n", 229 | "**********************\n", 230 | "iteration: 5973 :::: [[ 0.02078435 -0.02230222]]\n", 231 | "###output######## [[0.97921565 0.02230222]]\n", 232 | "**********************\n", 233 | "iteration: 5974 :::: [[ 0.02078271 -0.02230019]]\n", 234 | "###output######## [[0.97921729 0.02230019]]\n", 235 | "**********************\n", 236 | "iteration: 5975 :::: [[ 0.02078107 -0.02229816]]\n", 237 | "###output######## [[0.97921893 0.02229816]]\n", 238 | "**********************\n", 239 | "iteration: 5976 :::: [[ 0.02077943 -0.02229613]]\n", 240 | "###output######## [[0.97922057 0.02229613]]\n", 241 | "**********************\n", 242 | "iteration: 5977 :::: [[ 0.02077779 -0.02229411]]\n", 243 | "###output######## [[0.97922221 0.02229411]]\n", 244 | "**********************\n", 245 | "iteration: 5978 :::: [[ 0.02077615 -0.02229208]]\n", 246 | "###output######## [[0.97922385 0.02229208]]\n", 247 | "**********************\n", 248 | "iteration: 5979 :::: [[ 0.02077451 -0.02229005]]\n", 249 | "###output######## [[0.97922549 0.02229005]]\n", 250 | "**********************\n", 251 | "iteration: 5980 :::: [[ 0.02077287 -0.02228803]]\n", 252 | "###output######## [[0.97922713 0.02228803]]\n", 253 | "**********************\n", 254 | "iteration: 5981 :::: [[ 0.02077123 -0.022286 ]]\n", 255 | "###output######## [[0.97922877 0.022286 ]]\n", 256 | "**********************\n", 257 | "iteration: 5982 :::: [[ 0.02076959 -0.02228398]]\n", 258 | "###output######## [[0.97923041 0.02228398]]\n", 259 | "**********************\n", 260 | "iteration: 5983 :::: [[ 0.02076795 -0.02228196]]\n", 261 | "###output######## [[0.97923205 0.02228196]]\n", 262 | "**********************\n", 263 | "iteration: 5984 :::: [[ 0.02076631 -0.02227993]]\n", 264 | "###output######## [[0.97923369 0.02227993]]\n", 265 | "**********************\n", 266 | "iteration: 5985 :::: [[ 0.02076468 -0.02227791]]\n", 267 | "###output######## [[0.97923532 0.02227791]]\n", 268 | "**********************\n", 269 | "iteration: 5986 :::: [[ 0.02076304 -0.02227589]]\n", 270 | "###output######## [[0.97923696 0.02227589]]\n", 271 | "**********************\n", 272 | "iteration: 5987 :::: [[ 0.0207614 -0.02227387]]\n", 273 | "###output######## [[0.9792386 0.02227387]]\n", 274 | "**********************\n", 275 | "iteration: 5988 :::: [[ 0.02075977 -0.02227185]]\n", 276 | "###output######## [[0.97924023 0.02227185]]\n", 277 | "**********************\n", 278 | "iteration: 5989 :::: [[ 0.02075813 -0.02226983]]\n", 279 | "###output######## [[0.97924187 0.02226983]]\n", 280 | "**********************\n", 281 | "iteration: 5990 :::: [[ 0.0207565 -0.02226781]]\n", 282 | "###output######## [[0.9792435 0.02226781]]\n", 283 | "**********************\n", 284 | "iteration: 5991 :::: [[ 0.02075486 -0.02226579]]\n", 285 | "###output######## [[0.97924514 0.02226579]]\n", 286 | "**********************\n", 287 | "iteration: 5992 :::: [[ 0.02075323 -0.02226377]]\n", 288 | "###output######## [[0.97924677 0.02226377]]\n", 289 | "**********************\n", 290 | "iteration: 5993 :::: [[ 0.02075159 -0.02226175]]\n", 291 | "###output######## [[0.97924841 0.02226175]]\n", 292 | "**********************\n", 293 | "iteration: 5994 :::: [[ 0.02074996 -0.02225973]]\n", 294 | "###output######## [[0.97925004 0.02225973]]\n", 295 | "**********************\n", 296 | "iteration: 5995 :::: [[ 0.02074833 -0.02225772]]\n", 297 | "###output######## [[0.97925167 0.02225772]]\n", 298 | "**********************\n", 299 | "iteration: 5996 :::: [[ 0.02074669 -0.0222557 ]]\n", 300 | "###output######## [[0.97925331 0.0222557 ]]\n", 301 | "**********************\n", 302 | "iteration: 5997 :::: [[ 0.02074506 -0.02225368]]\n", 303 | "###output######## [[0.97925494 0.02225368]]\n", 304 | "**********************\n", 305 | "iteration: 5998 :::: [[ 0.02074343 -0.02225167]]\n", 306 | "###output######## [[0.97925657 0.02225167]]\n", 307 | "**********************\n", 308 | "iteration: 5999 :::: [[ 0.0207418 -0.02224965]]\n", 309 | "###output######## [[0.9792582 0.02224965]]\n" 310 | ] 311 | } 312 | ], 313 | "source": [ 314 | "import numpy as np \n", 315 | "\n", 316 | "inputNeurons=2 \n", 317 | "hiddenlayerNeurons=4 \n", 318 | "outputNeurons=2 \n", 319 | "iteration=6000\n", 320 | "\n", 321 | "input = np.random.randint(1,5,inputNeurons) \n", 322 | "output = np.array([1.0,0.0]) \n", 323 | "hidden_layer=np.random.rand(1,hiddenlayerNeurons)\n", 324 | "\n", 325 | "hidden_biass=np.random.rand(1,hiddenlayerNeurons) \n", 326 | "output_bias=np.random.rand(1,outputNeurons) \n", 327 | "hidden_weights=np.random.rand(inputNeurons,hiddenlayerNeurons) \n", 328 | "output_weights=np.random.rand(hiddenlayerNeurons,outputNeurons)\n", 329 | "\n", 330 | "def sigmoid (layer):\n", 331 | " return 1/(1 + np.exp(-layer))\n", 332 | "\n", 333 | "\n", 334 | "def gradient(layer): \n", 335 | " return layer*(1-layer)\n", 336 | "\n", 337 | "for i in range(iteration):\n", 338 | "\n", 339 | " hidden_layer=np.dot(input,hidden_weights) \n", 340 | " hidden_layer=sigmoid(hidden_layer+hidden_biass)\n", 341 | "\n", 342 | " output_layer=np.dot(hidden_layer,output_weights) \n", 343 | " output_layer=sigmoid(output_layer+output_bias)\n", 344 | "\n", 345 | " error = (output-output_layer) \n", 346 | " gradient_outputLayer=gradient(output_layer)\n", 347 | " error_terms_output=gradient_outputLayer * error \n", 348 | " error_terms_hidden=gradient(hidden_layer)*np.dot(error_terms_output,output_weights.T)\n", 349 | "\n", 350 | " gradient_hidden_weights = np.dot(input.reshape(inputNeurons,1),error_terms_hidden.reshape(1,hiddenlayerNeurons))\n", 351 | " gradient_ouput_weights = np.dot(hidden_layer.reshape(hiddenlayerNeurons,1),error_terms_output.reshape(1,outputNeurons))\n", 352 | "\n", 353 | " hidden_weights = hidden_weights + 0.05*gradient_hidden_weights \n", 354 | " output_weights = output_weights + 0.05*gradient_ouput_weights \n", 355 | " if i<50 or i>iteration-50:\n", 356 | " print(\"**********************\") \n", 357 | " print(\"iteration:\",i,\"::::\",error) \n", 358 | " print(\"###output########\",output_layer)" 359 | ] 360 | }, 361 | { 362 | "cell_type": "code", 363 | "execution_count": null, 364 | "id": "eaf6b602", 365 | "metadata": {}, 366 | "outputs": [], 367 | "source": [] 368 | } 369 | ], 370 | "metadata": { 371 | "kernelspec": { 372 | "display_name": "Python 3 (ipykernel)", 373 | "language": "python", 374 | "name": "python3" 375 | }, 376 | "language_info": { 377 | "codemirror_mode": { 378 | "name": "ipython", 379 | "version": 3 380 | }, 381 | "file_extension": ".py", 382 | "mimetype": "text/x-python", 383 | "name": "python", 384 | "nbconvert_exporter": "python", 385 | "pygments_lexer": "ipython3", 386 | "version": "3.9.7" 387 | } 388 | }, 389 | "nbformat": 4, 390 | "nbformat_minor": 5 391 | } 392 | -------------------------------------------------------------------------------- /9. S-LinearRegression.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "id": "a09e26ca", 7 | "metadata": {}, 8 | "outputs": [ 9 | { 10 | "data": { 11 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXwAAAD4CAYAAADvsV2wAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAAsTAAALEwEAmpwYAABWiUlEQVR4nO3deXyc1X3o/8/3WWbVjHaNZFlekI0DZgs1xGCaOGEJEAgQcEI2SGjCbZp0Jentcl9N0t60t7dZmtyk4RcakpC0EFYDLhCWQBIMppgd2Rhblm1Ztka7NPs8y/n9MSPvxpvs0XLer9fgWR7Ncx4kfXXmnO/5HlFKoWmapk1/RqUboGmapp0YOuBrmqbNEDrga5qmzRA64Guaps0QOuBrmqbNEFalG/BOGhoa1Lx58yrdDE3TtCnjpZdeGlBKNR7otUkd8OfNm8fatWsr3QxN07QpQ0S2Huw1PaSjaZo2Q+iAr2maNkPogK9pmjZD6ICvaZo2Q+iAr2maNkPogK9pmjZD6ICvaZo2Q0zPgL96Ndx3X6VboWmadmTyebjlFti587i8/aReeHU0Nqx9hsY/+ix1r29h6+Vn0nPdhzCrarFjTbQuXkairb3STdQ0TdtLsruT7Y/9nJO/8f+IbR2ic/sz9F/7Ceafd/WExqxpFfA3rH2G1NPfYfRjZ3KyGmPuI6/Rum0dQ1c1MGg1s/WN+xl5/5+xaMnySjdV0zSNZHcnGx/+Nie9vop337kdyfmkVtQQPmWY8IaVbEr1wSU3T1jQn1YBf/DFe7Bdh8ZiJ8EPe7h1QazfFGgc6aN6RYq02sm2x75GMvFj3dPXNK1ikt2dbHrydpq33s+Sju0EH0xDSPBujBJohgB5Et5WCkN19HSsnrB4Na3G8IOjm6kp7KCKFKYo+P0g3kfC0OsRuD1DdW+KxYVXGfjx9WxY+0ylm6tp2gyU7O5k0+M/onbb47T+Zhuhe9PQZOLfFEWaTQwUMS+LgaImuxUn1Tdh555WPXwAiwJBr4gAAnCqjV9jYNydxfxpBvlImPYFbzO46iaee/b3Cb3rQj22r2nacZfs7qSnYzWFt5+gpn8DC1e+jv1mEX+xhboiDLYAYKDwEWxVxCaPHWuasDZMq4BfqFkAvVuw2Wdj9lmlv57G3VmMX+YIXOTTeO4A8cFVdL3UxcbO5+DDX9FBX9O042LD2mcYfvY2QqluThl9i/gvhzC2e/jLg6hlARDZ63hBYfo+mUAtrYuXTVg7plXAD807hx1ugbq+B7GUu/eLcQP/hijGgzmMJwpYgz7GpdCWfwvVv4nen6yl65SPM3/pxM6Ka5o2syW7Oxl+9t+pHnmTk/q7CNyZgTG/NNx8qn3Ar/GAvBnEec8f6yydg2ldvIyukR1syL6HWdk3afQG977AgOBfF0aeLmA8V0TGFPGPgB8QAl4nGzb+F52pfrj48zroa5p21MaHb5xUH/6ON5g18t+0dvVg3pMDAf9TEWg7cPgtBfsImxffwtJLPjGh7ZpWAT/R1g7LVtBl2CQ3F8nne6hT/cT9HLs+MImgPhDCrzaQx/IYP8/AxyKEqmBu8S227DDo6ThVB3xN045KsruTrtX3oDwP1f82i0aepOa1YYyHc1Bt4F8fgbq982X8Pe73mg1015zPshV/PuFtm1YBH0pBP9F2C8nuq+l6fiWjmx9nQeYVIhT2SklSvxdAxQXj/hzGTzL4H49Q3ZBhgfcab3a9ANxQqUvQNG0KSnZ30vX8SkKbVlFbHMWmSEilqP7tIOYzBVSbif/RCITlgF/vYvKW+S5yCy8nXD/nuLRRlFKHPuqd3kAkBPwWCFL6A3KvUuqr+xwjwHeBy4Es8Bml1MuHeu8lS5aoY93iMNndyeBPPkbC3U5UFRD8vf/K7fAwfpkFT5W+GXMsRowIG+b/Ae0Xflb39DVNO6RkdyebHv4W8eE3qHV2UOONYHoewUezGK86pUycK8Ng7R/sixhsshcxOucigrWteLlR5i9bcdSxR0ReUkotOdBrE9HDLwAfUEqlRcQGnhWRR5VSa/Y45jJgYfn2HuCH5X+Pu0RbO12nfJxs1zOECwPUer0kvKHdB8wy8T8Txbgri/EfWdSHw8QXZzll821s3/YUXadcpydyNU17R51P/YRZfb8mSpoaL4VZVMi9WYzNHv4FAdT7gvtl4nhAr1lD99xPEW5ZhJ3qwwxGmXP2Jcct3hxzwFeljwjp8kO7fNv3Y8NVwB3lY9eISI2ItCiljk+FoH3MX3o1nal+irkBUt7JRPsf3XuIp9YoBf27sxgP5GDMJ75UschZR7Lj+2zs26jTNjVNO6BkdyfNWx6gyesngIuR8UujBjt9/A+FUO8OAKWguGfI3242s7PpQhZcdNMJiy0TstJWREwReRXoA55QSr2wzyGtQPcej7eXnzvQe90sImtFZG1/f/9ENI9EWzvtF3+eUPt7kZbT2RBZQkZCe02UEBb8T0bwT7UwniogTxUwlKLZG+Ls3ntI//hKXnn8PyekPZqmTQ/J7k62/+xzzPa2E8LFGPExfpaFPh9/RXhXsIdSwC8CLpA0a+hbfDMLrrzlhHYkJ2TSVinlAWeJSA3wgIicppR6c49DDjRLccDJA6XUj4AfQWkMfyLaB+OTuaX/scnua3jrrq+waGw1VeR3/9WzBHVNGD+Sx1hThKxCXRHCNlxavR5qnvsya157gMDpV+rVuZo2wyW7O9l831c5q/AqJkDSw7gzC67C/+TeaZelYG8ybNYyQj32Ff+XpRUo4jihWTpKqREReQa4FNgz4G8H2vZ4PBvYMZHnPhKJtna4/l9Y9+TtNG+9j1avFxNVCvwiqA+G8CMGxm8LqJyCj4QxbCHmZThz7Al2rnmD3pcS7Djnf/DuCc6T1TRt8kt2d7L1F3/MWdnnCeDDFhfjnmxprc+NUWg0gVK6pQIK2HSGzyBbewr156yoWMXeicjSaQSccrAPA48D/6yUWrXHMR8CvkQpS+c9wPeUUuce6r0nIkvnUJLdnXTd9ZfMy7xIvTe61xiXrC0ij+WhzcT7WARCggJcDDxMimaAdXM/xwWf/fvj2kZN0yaHZHcnm576Cc1b7qXN21nq2a93MFbmSnOBH49A9e4o4gFFLF5tuZ4L/ugHJ6SN75SlMxFj+C3A0yLyOvAipTH8VSLyhyLyh+VjHgE2A5uA24A/moDzTohEWzvzr/+/bJ7zCbrMORQwdo01qSUB1DVh6PEwf55B0j4GEMAniEPUy3D65ltZc893KnkJmqadAKUhnK9x8pbbmVMO9vJKEeO+HLSYpZ79HsE+j8WIWUWXfTILr/yLyjV8D8fcwz+eTkQPf9z4oonwxlU0OZ3UeGMEyqHf73Sx7s1CVPA/Ed1rlZwL9JoJQn/wqB7T17Rp6JXH/xP/5Z/Tkl9PtTeGjYcBGP9dwHi8gDrJxF8R2VXtEqCIsMOczWBkPv685Sz96C0nrL3v1MPXAX8f44Hf3vw4bfl1VHspDBTGdhfjlzkwy3UwGkpjdApwgNfiFyHzztNbKWraNPLK4/9J1Zp/wVMGc9wugnigFLK6iPFMAfUuC//qvRdUjUqI9bM+RqDllGNeRHU0dMA/CsnuTjbf//c0DL/ELG8nQVykz8P4jywoSrPwid1BP4fNDnM26WAjTjhB7QWf01spatoU98Y/LKPJ20bcS5cmZ5XaVXzRP91GXRkCoxTsfSBNkHWzrsdqmFOxzt/xXmk7LSXa2uEjf0fX6nsY7n6e08Z+R7AJ/BsiGL/IYvwii/+JCLSYCGDj0OztwCn001ccYfTp75JMtOmevqZNURvWPsPc4kZCFEqTs0ohv8pjrHXwz7ZRl4V2rZ71gG1mK8m517Dss9+oZLPfkQ7472BX9c3nbYbWvUmzN4DUm6W6+r/IYPyiVHSN2RYWYFDA9hxa2MZoJsWmJ28nMYm/+ZqmlexZzlgheJlh2rbcSXh8Rb6vkFV5jNcd/KUB1IW7SyXksOg22xhq+yALLrqpotdxKNNqT9vjIdHWztKP3kL/+f9AHqu0Ore2tJkKEaM0xLPVRQATCOJTRZ6oGmXW1gf03rmaNsmNlzP2Chm87BgNHT/htM7baPEGSitGPYU8kCsF+/cFURcGUSLloVyLzuDpjC7+NAsuuXnSf6LXAf8wvfuST/B6/RWMmDEKmHjVBv4NpZxb484sdO7eYcsAavwsdV4fuV99TQd9TZvEejpWY4arUZ5Dous+Zns9RMor8MVTGCtzGOtd/IuCqN8v9ezz2KSMMG/Gl9N8449ZuuLElkg4WjrgH4H2a/+OjU0fZps1jzGzimLMwvt0BOpLm6Szydl1rAFU4VDvbGb42dtIdndWruGaph1UIbkBZ2cHDa//G3O97diU1ttQDvay3sW/OIhaGgRKk7NDZg3rqy9k/vX/d0oE+nE64B+BRFs77VfewsgpnyRptbHDnEU2Gsb9VBSaDIx7cnv19AFmeaPMGXmWTU/eXqFWa5p2IMnuTtbc/S0auh5m4c77mOPt3F30a99g/54gRcBB2GzOY8epX6B9xd9PqWAPOi3zqG1Y+wyjT3+X1syr1HqjBHMuxi8yMOjjfywC83fPh7vAqBlnY+IazPq5iCidr69pFTQ+bm90P8+czMvUe2O7e7+eQsaHccrBXgF9Zi295lxaP3P7pP691Xn4x0myu5NN5QJsc7ydGFkf4xdZGPJL+1bO2x30SzU1bHrNJvqjp2DYARSmztfXtBNsfI1N7fDrzPG27lo5C+wd7C8qDeO4QL9Zx/aaZVPi91UH/OMs2d3JzttvYLH7Zmnzg19kYcQvpWzOKQV9RWnsr4DFqBmj315AQOVwxCb8wa9N+h8iTZsONqx9huyjX2NB8U0CuJio3cM4fjnYr9sd7D0gadbTPe8TU2bL0+NdPG3GS7S103LTHXSac/GiRmkVbrycvdNdGtMXSv+zQ7g0e8O0OutxPbCUoyd1Ne0E2LD2GbxVf8GCYgcBHNgz2CuFPJzHWOfilYO9AtbZp5O94nbO/8z/nhLB/lB0wJ8gibZ25Irv8ZZ1KsUqE+9TEYiVgr7q2TvoC1DrpZnnvoW4BUR59HSsrmTzNW3aSnZ3svonf0tg1R8x19tKkCIme6w6HV9B+0Y5z35pkBwWr9lnc9bfPTutPn3rgD+BFi1ZTuKmX9BRdR69NY24n4pARDDvzEKft9expbRNlzneZnzXwUn1VabRmjaNJbs72fT4j6jtfoIwGYK4pbLGexwjTxdK5RKWBnAvCDBmhOkMnkrV5V+tVLOPGx3wJ1iirR3jzI/TvfAP2Fo7l+wnq8EWjP/MwrC/3/ERPBaln8Xc/BQvP3aHHtrRtAnU07EaY2wbVf4IYT+3316rsrpcCO1sm9yFUdJmlK2BUwhd+g/Tqmc/Tgf846B18TLENOltvZQdDW3kPhEHD4z/yEBq/6Bf5ec5Of0StWv+ke6ff1GvzNW0CZDs7qTw9pPMHV1Dg9dPRBX2el1eLGI8XcA/zca9LMRboTN5u/YDzLrh36dlsIcJCPgi0iYiT4vIehHpEJE/PcAxy0VkVEReLd/+7ljPO5kl2tqZv2wFkbYz6Gt+L9ub51K4PgpZVerp5/bOjDKBEAUavEHmFV8n8+hXdU9f047BeJ59IN1LrTe8e/VsmbxaxPhVHn+RReHDIfISxBRF7QWfnxaTswczEdUyXeAWpdTLIhIDXhKRJ5RS6/Y57ndKqSsm4HxTQqKtvfyDcwPJ7k42/vTTLFzxOsG7Mhh3ZUuZPIHdHzANIEyRoFdkodfB67rSpqYdlWR3J52rvk1k8E0WFt7YtXPdLhsc5L/y+PNNnGsieIbNTnMW2dZlnDVNe/bjjrmHr5TaqZR6uXw/BawHWo/1faeTRFs7mXddy7pFZ1P8SBR2eMjdWXB3/yDKHrcgDk1bH9K9fE07QuM9+8jgm8wtvkUAb+9w3+1iPJCDFoPiiggFy2bQrKO/9WJCzYsq1ewTZkLH8EVkHvBu4IUDvHyeiLwmIo+KyOJ3eI+bRWStiKzt7++fyOZV1PylV1NceDlrf/8TDF7VjLHFK+107+/d+zAoDfEkvB10rvq2DvqadpjGV9BWvf0ACwodRFRhVycKgH4P45dZiBu410coBkJkzQjd9cuxYg20Ll5WwdafGBO20lZEqoDfAN9QSt2/z2txwFdKpUXkcuC7SqmFh3rPqbLS9nCNb7KQ793A4jt/SPXjw/jnBlAX795MYZwHbDbnk6qajzf3AuYvvXpajy1q2tHasPYZBp/9MU0jrxFnGMGj2suUdqkaN+Zj/DQDPvg3RknVBtkWOpVcsBH7tKumVV2r477FoYjYwH3Af+wb7AGUUmN73H9ERP5NRBqUUgMTcf6pYve4Prww+BYLxlZTvyaJH5ddpVfHGUCb181wOkV63VY29m2ED39l2vxQatpE2LD2GVJPf4eIOwa4RL0sQdxdpUwMgJwqrXrPK9wbogzVxxgxmsm2X37CNxivtInI0hHgx8B6pdS3D3JMc/k4ROTc8nkHj/XcU5macz5dVy3BO8XGeLKAdDh7vS6AhUuVnyKiRmkYfFmXWNa0fQy+eA9OsIZwcZRWbydB3F0r2g0AR5X2qhj08VZEGG2NMyqNpIOJGRfsYWJ6+MuATwNviMir5ef+BpgDoJS6FbgO+IKIuEAOuF5N5qptJ8D8pVfTmepn8CMv03BHH/JQDlUlMHf3t8QEoqpAwHNJYhLf/uvKNVjTJqHQWBfBfD9z3S5s9l7jopTCeCiHdHu414TpX1DDgDmXQqiR2Pv/fMYFe9DVMisq2d3Jzp9/jpbRt2m8vRdJ+3g3RpHG3aOP4x9NAVwsXp71CRZe8Wcz8odV08Yluzvpen4ls9f9G/XeENY+efYA8us8xnNFchdGGLmgkRGjkYJVRfiSr07bhVWgq2VOWom2dsKXfJUdtacx+sl6sMp1d/ZYjTu+OXqp6JrLab13M/bja3nup/9LZ/BoM1Kyu5POJ24jtPFBQn4WG3//kgmvFjGeK+K+O8DOZXPYPP8G0mfehH/2Z6d1sD8UHfArbNGS5UQu+muSdXPp/0Qz5BRyTxacvT95CWADIb9Ao7eD+i2r6HxCl1XWZp5Nq77LrG0rWZBfR0QV8Ni7GBpdLvJIaWFV+tIqdrZcTKCmBS83OiNSL9+JDviTwKIly5HL/4VtC86h+9p2ZIcPD+dA7R/0TRQRCrR53VRve4yu51dWpM2adqKNlzk+NXkvtd4QFl75tpsa8DDuy0K9Qe7aKL2BORjFEcxgdEZO0u5rQtIytWO3aMlyNgDb7R9TOzBA7MlRvPoC3vtCe+UTG5TG9QWfud52dqz7Gc//bIhgYtG0yiXWtD0luzvpfPhbtPU9QaScY6/Kt10yPuZdWTCFsY/VMBaNM5RYhtkwj7MvvaEyDZ9kdMCfRBYtWQ5LlrNm1jc5eeib1P0uiao34TR7r+NKaWc+Fj4t3k6crscZS/WxqacDLrlZB31t2kh2d9K1ZiW1HT/jVD9JQDm7NhEapwBxFcY9OUgr+m9sIV0fxzNCEG/GjjVVqPWTjx7SmYTmn3cNb938KdLz4pgP53C3u/sdY1H65lkUaaQHnCz2aJce4tGmjfFefWz9XbR6PZiqNIQzHrTGyyZ4SiGP5pHtHn1XzyI7uxpB6Ks9F8O0Zvy4/Z50wJ+EEm3ttF/+BTr+/Ca8uIV9Tw5nBPatpO9TyuCp8TLMH/0dgWwvsk1vlahND11rVlIzsJa4P4yFTxDngAFL1roYrzkMvreJ7b93BoOheexsvojwwgv0uP0+9JDOJJVoayfxJ9/hlUyOM79+G8a9ebwbwxj27lHL8R9+F4h7aWAbY+khkt2d+odcm/Kszb+mwd9BxCtgHiDPHkB1udiP58guijLwL3fye+d+4IS3cyrRPfxJbtanvsLGT70HO1nE+y8P7wAL5SzAxiPo5QiR18M62pSX7O6kJfcWYS+PfYCevQLcYR/z/hzFhiCvf+ZDLNLB/pB0wJ/kEm3t8If/SPdlZxJ9M43/govH/sM7AoQoUuuNEV9/p87P16as8TLHMW8UGxdr3w1MAK+osO7J4SuTjv/5BQJnfagCLZ16dMCfAhYtWU7wh/cy+J4zCD6Vw9vs7fcrUMrR9wlQYI67hc579TaJ2tQzvoq2dfg5bBzsAwV7peDhIka/x9s3LqPYmtATs4dJB/wpIjFnAfVPrqbQGCH4QA5/mP1+FUxKwzsWDgtHn2HjwwcsXqppk9J4z76h+1fUecP7lUsYJ6uLBNcXSF48m9FT2/TE7BHQAX8qqaqi45bPgALj3jyZYmC/oR0ofVNDXp62vid0L1+bEsa3Joymu7BUHhMX+wDHqc0u1jMFUqfH2fzpmwmefLEO9kdAB/wpZvbH/4LOj78bu69IZNXYfuUXxh+FcEh4/ez8+efYsPaZE95OTTsSnU/9hMCWX9PqbKLV24lxgKEcNepjrszhNVps+sjZGJath3KOkA74U0yirR3/j75J94fOwl5XxH1x7z7+ntU1DXxaip3kH/uqDvrapLVh7TPM3nI3c3JvEPMyWChMdicmKMBzFcZ9WZQL2z86n1ziVD2UcxR0wJ+CFi1ZTvAH9zB8ahOhJzMUt6v9hnYUpX1xQ16eecW3kFV/ooO+NillfvUPNHoDxPzcrufGV9G6lH6WjScLGDt8Nn3sXHoXnEP7FX+hg/1RmIgtDttE5GkRWS8iHSLypwc4RkTkeyKySUReF5Gzj/W8M11izgLevulKijVhAvdncbJ7fwhWQIBSqqapXOr8JKmnv6ODvjZpJLs7efbfvsjp+bVYeJiovTb8EaBIgOIbPtbaIgPnNzN09gJqL/i8DvZHaSJ6+C5wi1LqFGAp8EUROXWfYy4DFpZvNwM/nIDzznh+Q4INn1mOZBXmg3nyfqmu5vjmzQqwUARwias8sUwXg8/+uJJN1jSgFOw3PvQvzOv71a79Z0v7OJf4lD6hjg5GCT+SIT2vms4v/Tnzr/36jN7A5Fgdc8BXSu1USr1cvp8C1gOt+xx2FXCHKlkD1IhIy7Gee6YLJhaROXsZXR97L8HOPN5zkMfclc42/q8JeAizvF7mjjyne/laxfV0rCY2so6Yn9rvtfHOilMwabw7iRuOkLvnUd7z8b/UPftjNKG1dERkHvBu4IV9XmoFuvd4vL383M4DvMfNlD4FMGfOnIls3rTTungZXavvYei6FcS70zT8Zi2DsxPY8/KY+2U5KCwKxDworPoiz715De0Xflb/AmknzPg+tKr3VeJjm2lxNhNW+fL+DntTSpF/LEjdyBjy61/RuOS8SjR52pmwSVsRqQLuA/5MKTW278sH+JID7p6ulPqRUmqJUmpJY2PjRDVvWkq0tTN/2QrMUBVbPncNuUQNtfcPkMkEKJb/l/vsHs8PAAqfKGkSW1ay6eFv6Tx97YRIdney6fEfYW19mmAmSZXbR9zP7trIZM+kAwWkX7Opf6MX+d//G9773so0ehqakB6+iNiUgv1/KKXuP8Ah24G2PR7PBnZMxLlnukRb+65e+sDsJQQvvYzgvVmcT4XBdAiUJ8Kg9EsVxsH00limg9v3HF3Pt5Nou6Vi7ddmhp6O1dhDb1GT3UpAFandYyWtwe6OiQKKfVDzq1G45BL4y7+sWJuno4nI0hHgx8B6pdTB1vI/BNxQztZZCowqpfYbztGOTcP7L2HTTR8m0p1BfltEYeBT+ng1/q8BBHAJe3lavB7st1dVtM3azFBIbqAhswlTOdR6Q4Rw9npdAAfIFU0C9+dQsTjccQcYOnN8Ik1ED38Z8GngDRF5tfzc3wBzAJRStwKPAJcDm4As8NkJOK92ADV/801613WR+N1r9M+fhXmST9TPY+LvM4nrYQJt7gbW3PMtBIUda9L74mrHx8h2IqSIe2mCeAC7xu5L1V8NBLCfKGANusjjd0MiUcEGT0/HHPCVUs9y4DH6PY9RwBeP9VzaoSXa2kne9gtyF15E7cp+dvyP+fRVNbPQ2wzsnjgJUAr6MS+D6n6R4GlX4OZSdK2+B/QKRm0CJbs7CeSSBFRxV7CH3UGjtEjQwFlnUPPKCPzN38BFF1WiqdOe/rw0DSVOPo3II09gOkLtQ/2EVIFiOW9nfAUj7C7BUD+2jmJqEDtagxmupqdDb5OoTZyejtUUg7XYvrPfa+M/k/nRAFWPjMGyZfD1r5/wNs4UOuBPV6efjvGv3yW+cZjwCxkyZmiPvtVuBpDweglseJDCaB9WOIaT6jvRrdWmsfyWF5k1+gIR3P1e8wDPFSL3jmEEw/Cf/wmW3nn1eNEBfzr7H/8Drr2Wpie3M9ZTh9rj2632+DdMgXmFDqLr7iKz9VXsWFNFmqtNL8nuTlb/5G+Z2/sICW9ovw6Hopyh87RHeGcW46c/Bb325rjSAX86E4HbbkM1N9N6fxcZJ4SD7PrFGy/BIEDAL9LkdXNSz/3ke9/W+fnaMRmvbx/u/W8UPvZe3Y0SD/A2+URfSNP34Yvh6qsr0NKZRQf86a62FvPuewiO5DD/y8FTBi7mriqE4zcbn5iXocobI7FlJVvv+rIuwaAdtZ6O1XjpQeYWOmj2hoDd6cGU//UzYD2cJ9PWjPrmv1aopTOLDvgzwfnns+PT11Lz5iCjb8RJmVVQToOD3RNnFj4BHEwKRAu9jD79Xd3T145KfsuLtGxfRczLIOweQhwveewrhbmqgFFQ5G6/ncTCfestaseDDvgzhPW1bzC6uJ3EYz2MDNdTkNL2iC6lbB1gV9XCuD9GkQCh4qDO2NGOWLK7k0TvUzR7yV3P7VnQTxDUyz6BjQWMb36Lhosuq0g7ZyId8GeIxLyTKf7kZ6hgkFkPJ+my3kUeC4vdK3Ap/xtSBarcIXzD0hk72hFJdnfS+cRtNHp92Hi7Kl/uWd4jPWwReDJH4b0XwB//ceUaOwPpgD+DNJ6zDOv2nxLr2snsrXNxzPABEuXARtHk7aRoVOmMHe2wJbs76brrL2nbdh/hPUonjPfuXUB5iuj9afxwhOBdd+vSCSeY/r8901x3HXzmM9T/9BfktgUwkb2C/u56Ow6RXB/RBp0mpx1asruTTQ9/i3mZtVT5Y7tSLvet4+T/xiW4s4D1s59Di94S40TTAX8m+t73KDTW0rRyJ/m8dYDK+QAGdfQxtPoneuJWO6SuNSuJD7+BwsBU/l4/U7s2NNnqE3w+B5/7nE7BrBAd8GeiWIyt//OLBEYLmI8X9htnHb8f9jM0Dr/Mpsd/pIO+9o7UztcwfKdcgru4V5EuBXh5RfDBHN7cufCd71SolZoO+DNUzXU30nPxaURfT+N3+LtSM3d//PYJqwJxBon3rmbTk7dXtsHapGYWM8S8ISJk9wsqDsBjRYyUh/XLu6GqqgIt1EAH/Bkr0dZO5mvfITu7CvvRLN6o4LA76JeydxQhr0jc6WfW1gf0QiztoEolOtIYntprOEcBvOkRfLNA//IWOPfcyjRQA3TAn9EWLb2QDX/5JfAN5OECjtpdUbO07ZyBhUuNN0ydP4Sx6k945fH/rHCrtclkw9pneO6HX6Q1/SpBzyFAAdhjFfeIT+jRLPm2EJuueH9F26rpgD/jzbr6c2z5zFUEt+RRLyjy5aonRSxcBBNFiAIon0aVpP65r+qgrwGlYJ998p+oGniVCDkCFLGBIiYuguMrzIdyoKD3mjnUv/fzlW7yjDchAV9EbheRPhF58yCvLxeRURF5tXz7u4k4r3bsEm3tVP3t/2Hw3NOJ/XqEkf5qhsw4CiFYXjhTWh3pI0oRV2MEn/umnsSd4ZLdnRQf+Vtm59+kzevatXOyAkx8shLEfMHF2OYxdlkNPWddx6IlyyvaZm3ievg/BS49xDG/U0qdVb79/QSdV5sAiTkLqH/kaZx4FY337yTlRHfVNhwfjw3jEFAO+D4Jeul6fmXF2qtV1vhq2oS/DcHH9D0iXh6T0vyPicLudbCfyTF2SjVblpzBgotuqnSzNSYo4CulfgsMTcR7aRVSX0/m+98n1J+j+uk0263ZeBi7fkAUpeJqYYpYXgHZpmvszFQ9HauRka3YyiHmZQmrAlJewGcArqsIPZjGC1vsvLId/9yb9ZaZk8SJHMM/T0ReE5FHRWTxwQ4SkZtFZK2IrO3v7z+BzdNqP3kjmT/4DE3Pd0NvHb1mAo9ScbXxrRENIIxLS/pVPawzQ+W3vMissZcI+vld2VwmHkY56KvfeNh9Dm/f8EFY8S3efcknKt1krexEBfyXgblKqTOB/wesPNiBSqkfKaWWKKWWNDY2nqDmaeOi3/8hubmttP/yRYaMk3Gx9tooZVyDN8LWu27RqZozTLK7k+jg6wQp4GPisbsjYKGQrR6h57P0Ln83p/5glR63n2ROSMBXSo0ppdLl+48Atog0nIhza0coFCL7/92Klcow/77XyRqhXZul7FkTRfCIZrYz/Oxtuqc/AyS7O3n5sTvY+su/JO4kCXtZjPJU7fjNLygCD2Vx6gJsu+m6CrdYO5ATEvBFpFlEpHz/3PJ5B0/EubUjV//BK8j81Veoe30rxisuCmNXsN+zrnkjOzELY7pm/jQ3vl1hYbCbeHYrFk659LG3ay8FF/B/VUTGFD0faccPBivZZO0gJmR7eBG5E1gONIjIduCrgA2glLoVuA74goi4QA64Xim1b80ubRKJff0bsPoFah5fTbEtiFEne+1cpICol8N007pm/jTX07EaM1yNu+2/CfoZLFwMPCx29+7Ntxzs1wsM/X4DfYtOQUT/ek9GExLwlVIfP8Tr3we+PxHn0k4Qw6D/n75O7YWXYD5YQN0YRAzZq/SCg0/Qy4GumT+tOak+AtXNRIc7CJEl5mV2BXoBSPuYj+QptAR5+8MX4oerCOmfiUlJr7TVDqp7uIutX/gEgZ4i6lkHD2OvnYtMFC3+NgobntKTt9OYQnBevYc5ThdRL4uBhyqvxHaUYKzKQxH6rmlBDB/CDbQuXlbpZmsHoAO+dlBOqo+xi9/P8JmN2L/L42/3yiUXdu9/K8qnamwTmcf/UQf9aSjZ3YnX30kiuwEPwcIlgI+Ni4eF/6qPucll8KJmnMYIqu09tF/8eZ13P0npgK8dlB1rws2l2PiJS/DiFvaDWbxiKS9fAS6C7XvM9jbTWNzE8NPfq3STtQnW07EaIz8M+Jg42Oze3MQeKhB5PEXqpGq6LlxG36k3sXTFLTrYT2I64GsH1bp4GV5uFLf1XWy7dhHGsEfg8Qwwnp6p8FAUsYiQZ3ZGL8aabgrJDcQyWwhQ3CsNE19hP5QDU+i+dhFO7UnMP+/qyjZWO6QJmbTVpqdEWzssW0FPx2p2LHkf8bcHaHg2SWGhiVpk4WMQQFHARwATj85V32ZLw3zsWBOti5fp3t4Up7Ij+JiE/RwGgkKVVl4/V8TY7pH6SDXpptksuESXT5gKdMDX3lGirb38i3wDa7KjVHXeT2BVDmdWFSomCD5xL4NCEMOjaugNsg3teIUMXavvgWUrdCCYwpQIVd4QtnIx8UvP7fQwf1ugeGqAwdObcKPN+ns8ReghHe2wGeRY/9kPggPGqjxGOQhY5QQ9X0HYGSa24V7crS/ge65elDWFJbs7CY10EiWFgVuapHcU5oM5VFRwLouU0jLDNZVtqHbYdMDXDpsbaSafiLP1qrOxOx2Ml9xSyl6pWj5h5VJFiiIBQtlepH8D+d4NlW62dpQ2rfouTfm3CeDgY5Tq5jxdwBjwyV8ZoxANkSVKqHlRpZuqHSYd8LXDVn/OCuzCCL2/fyqZBVXYT+ZgwENhYOCjUIT9HHG3H8tNg2FAbqTSzdaOULK7k9U/+VsW995NVGXJEcQAVKeL+d9F3CUBhhY2sVNa8a2QzrmfQnTA1w7boiXLib3/z1GBGKNX1eEHDIyVBTyvtBViAI+gKhAlTdgdRXwPidRUutnaEUh2d7Lp8R9R2/0Elu8S9AvUeGn8TCkrx2s0yVwYZ8RowLdDjLR+QI/fTyE64GtHZNGS5Zz/hR+wtW05PVfOxe51CP0mg1ce1hEUMS9FnBGU8pBwXaWbrB2Bno7V2ENv0eDvxMTFRKGUIrQqg+QV2WviDIaaSNWdxljzMr2T1RSjs3S0o1Mzm7HFCYrv3on9XB63PQBzBQMoYOJiYBVSqG3P8vzPhggmFuk0zSkgv+VF5mdeI+gVcDEJ4mK9VMTc6FK4JEyuJUpv8wcIzTtHfz+nIN3D145KMLGIXKiRgUub8OtMgg9m8POCD4QoEvMyRAtJlDJQudFdaZp6YdbklezuJD7wIkFVwMIliAf9HuaTefx2C+ecMJ2Rczj/D/8fZ196gw72U5AO+NpRaV28DM+OMxBuY/QjdUjKJ/BoBkMpfAxcMQmoHEFnCDPTix2twQxX6zTNSaxrzUpsN48ngo2HuD72AzkICrkrq+ixZqNiLZVupnYMdMDXjkqirZ3aCz5HPtyENCuc94YwO1yM10spfLbyiDIGYmB5OQCscEzXzp+Exnezql5/J1HGCHoOCjB+XUD6fNwrQ2SrIxTNqM65n+L0GL521BYtWc4GoLhqLVwgmFtczMfyMCuINAq1XopsoZfh0HwA3FwKW9dJn1TGd7PyPZdaP4tNERsXtamcgnlOEHdBiFFqyQfqdc79FDchPXwRuV1E+kTkzYO8LiLyPRHZJCKvi8jZE3FerfIyA9voN2eRtWIMX9uIsg3C96ewHA+AKkapy2/GeeVOvK7VRBvmVLjF2p56OlajPI9gz/OYOES9PCqtCD6cw28ycC8MkjRryVvVeMG4zrmf4iZqSOenwKXv8PplwMLy7WbghxN0Xq3CnFQfqfi78DHJxyLkr4pi9PsYjxcZMasI+Q4hssQzWzGdMYaf/XddN38SKSQ3IP3rCRWGAIX4HqEH0lBUpK+J41oWUQqIUtReoOvcT3UTEvCVUr8Fht7hkKuAO1TJGqBGRPTszzRgx5qgYSGjViNZiZE/OUrh/BD2KwWib6TJGmEcLKpIY7tZQrkeRp/+rs7WmSxGthNJdVHv7aDGT2H/Lo+x1cO5NEyhOcqYGSNDFSOz38+iJcsr3VrtGJ2oSdtWoHuPx9vLz2lTXOviZYhpMpI4j5xVg+8rnPdFKLYFqFo1ijmUp5o0Jh6uGcGXIFW5HrqeX1npps94ye5OArkkcW8QUFidOezf5XHPsPHODKIQ+uy57Kh5D1ZVbaWbq02AExXw5QDPHXBbexG5WUTWisja/v7+49ws7Vgl2tqZv2wFkbYzSNWdStEIkQ+E6Lu2FWUIDff0EyzkiXpZ6rMbsYqj5O0aVO+rlW76jNfTsZpisJYwGaIjGcIPplENBs6lEQoSABTFUAM0LtST7dPEiQr424G2PR7PBnYc6ECl1I+UUkuUUksaGxtPSOO0Y5Noa+fsS28gMv899ETPoNeaR7q2joGrmzB6fQKPZwGoIk2Lvw1PwYH7ANqJlN/yIvHRDcScDJEHxqCoyF1bhREQikYAhwCe2BimpSdrp4kTFfAfAm4oZ+ssBUaVUjtP0Lm1E8RJ9eHE5pCumkcm3IKzKEjxvBDmyw7Wazks5RLwHdqKb+HHdbZOJY2vqq1hBPltAXObi3NZBLcpSFoCuAQYMlvAspmvN7GZNiYkD19E7gSWAw0ish34KmADKKVuBR4BLgc2AVngsxNxXm1ysWNN+MUCFMcoVM3CyLxG+sJqqne4hB5J4zZZqBabsJeH7BDJ7k4dSCqka81KGtwU1W8NEX42i3emjTrDwlQ+WbOKUakhddJlhOrn6O/RNDJRWTofV0q1KKVspdRspdSPlVK3loM95eycLyql2pVSpyul1k7EebXJZXwCV9UvgkKeiCogokhdW4OKGkTvHcPL+eTMCHZ+QE/cVpBse47IQJroyjH8ZpPCpREADHwyxBmJLdRDOdOQLq2gTZjxCdxgfRti+PRIC75hYkV8ctfFkLRP9f0juL5gFUaQbbquTqVUD3aRuKsHTCheF8W3SxVOfQxcI4C0naOHcqYhHfC1CTU+gWu0nMlI7VkMUY8JGLMU7gdDGF0edU/3EywMU5XbqfPxKyC5dSNzH9iAMeSR/0gUVQOBcu37jBlkaM4HWbriFh3spyEd8LXjwo41YaoCWbOanBnARHDPDlE4K0jk2Qyt67aQsRt19cwK8P7urwm/laV4UQjmWQgGXrl3nyes6+VMYzrga8dF6+JliJOn1usjSxSnnIapLg3hzraIPzBI9baduB0P8vJjd+ie/ony4IO0/Pw+8qeH8c+1MfEB8BFAYeLrcftpTAd87bhItLXjNi6mSIAAeVwCuNi4lkl+RQyqhPafv4IxJnpzlBPl5ZdRH/842dY43uUhEMErb0tpAA42o9TqoZxpTAd87bgxozX0zf8IRULkzQBF08YVCzMKqY/VYBR9Tr3tcUil9OYox9v27XDllRQjNoMfawQbFAY+VmmiFpMRM066am6lW6odRzrga8eNHWvCrmmhz56LQwDTd7CVhw8Um0P0XzeLcG+K9n/+PlYgqjdHOV7SabjySvyxUXZ8bDZWlUdBbEAw8HGwcAyLPFWoOXo4ZzrTAV87bloXL6M4sAWbAkNWC65h4yKAEPQd1EKLHR+cR92b3bT820+xq3QpjQnnOHD99fD667x185VEG8eIehks5eNh4GLhmBYFCTIanc/8866udIu140gHfO24SbS1Y8abSEfaMMXHRVCmQVEsLOUQVAUi5+QYfk8dzaueZuFvDrh/jna0fB8+9zn4r/9i801XUx37b2y/iJLxUXsQfAKey5DZQvX7/1SP309zOuBrx5WgkNnnMBZ/FzY+tudgKp+8GaIoNoIi98Eqxj74AWL//E348Y8r3eTpQSn4ylfgjjvovu4ikmfWYeLhG4KtXBQ+CgOFkDVDWJf9k653PwPogK8dV0oJRvcalGmTIkbBDCJIOec7wrBRgytB1l3SzOjvnY66+WZ48MFKN3vq++d/hm9/m74rL6L3k9dgFEcwcLA8hY+BhcIzDHJmiO7QYh3sZwgd8LXjS8obHwgUCCOqlPdtKA+LAlWUSieLn2fTlz9LZv4s1Mc+Ck8+Wbk2T3W33gp//dfw8Y/T9cmLsSJxwpmdBHyXAAUs3HL+vSJPEPekiyrdYu0E0QFfO64EhZpzHuIUMASUmGRMGxGDMAVEOXgIjem3KPa9xrovf5p8SxNceSU89VSlmz/13HorfOEL8KEPwU9/ijJM8h3/RavXia1cPEy88q+9ocDB0hO1M4gO+NpxZceaMO0QBOP0N76Hzsg5WChAIV6pfkuYAi4mkbEuvHw3b/z5tbBwYSno//rXlb6EqeOHPywF+yuugPvuI5nsxhvroyrbDQhFsTBQmCgcLHJGgJxZpydqZxAd8LXjqnXxMrzcKEauH4wATqyZAWMW3WY7iGD6Cg+TIHlq3F4MJ4trO6XefXt7KXjpoH9oP/wh/NEflf5I3nsvBIP0dKwm0DCPbKgZB4ugclEYFLHImUFsfAp2TaVbrp1AOuBrx9V4yWQvWIeZ60fMAMO1Z+CLiTJMlECAImHyBMkSye1AIjXQ2FgK+iedBJdfDitXVvpSJiel4P/8n93B/p57IBgk2d1J4e0n8bauwfJy5AhRFIuiYSIIBqBQuFa40legnUA64GvHXaKtnfYr/gLVchZW4lSk+TTq3O2YnoNnmAT8ImEvj6Cw/DzBRLlaY1MTPPMMnHUWXHst/Pu/V/IyJh/fh7/4i10TtOM9+2R3J12r7wHPIZDZSSg/RJ03imsYOBKgYNoUCTBAE+UpdW2GmJCALyKXisgGEdkkIn91gNeXi8ioiLxavv3dRJxXmzrGe/pmMIrg42NRNEOI8vFFKJoWNh5VjOxdrbGhodTTv+QS+Pzn4RvfKPVqZ7pCAW64Af71X+FP/gR+8QsIBADo6ViN8jxMr0DQHcNURRSKoOcgKMaoIRmYz3D8FPxAtLLXoZ1Qx7ynrYiYwA+Ai4HtwIsi8pBSat0+h/5OKXXFsZ5Pm7oSbe27Jgg7Ou4iRBbbV1h4lAYYDEzT3/8Lo1F46CG46Sb4X/8Ltm6F739/V4CbcZJJ+MhH4Lnn4B//Ef7qr0Bk18tOqg8GNhIsDFLlDZcXXBnYvo+HSX9sMX64Bis/jN+8pIIXop1oE9HDPxfYpJTarJQqAncBV03A+2rTVLK7E9MAyy9i42GgMBAKpolH4MB73do2/OxnpeGL226DCy+EvhlYbO3VV+Gcc+CVV+Duu0v/P/YI9gAKoWZsHUEvQ0HCmHiIKBwxCKgiLalXkMIYTrWunTPTTETAbwW693i8vfzcvs4TkddE5FERWXywNxORm0VkrYis7e/vn4DmaZNNT8dqxgKzsBXlfVRLm2+EPJcxqlC9rx74Cw2j1KO980546SVYsgTWrj2RTa+sn/0Mli0rDWk9+yysWHHg41RphXPcH6RO9VHlZwGTjBEmLTEMfDACLLjkZp2SOcMc85AOIAd4bt9B1peBuUqptIhcDqwEFh7ozZRSPwJ+BLBkyRI9WDsNOak+vHAdxbxJ0AOvnJdv4FPPENnMwDu/wfXXw8knwzXXwPnnl7JU/uzPSn8QjpNkdyddz68k8PYqmt0ubFxSxNmRuJCFV/zZ8Q2c6XQpC+fnP4fly0t/8JqbD3q4yg2RseqoK/YS9fIoBOX5BEyTHrsV37BBlA72M9BEBPztQNsej2cDO/Y8QCk1tsf9R0Tk30SkQSl1iN9sbTqyY02gCqSJEWIQm9LYcs60MfCJuCMkuzvfOSCdfXZpWOMP/gBuuaVUiuEnP4FE4pjbl+zupKdjNcXXH6Q1+yrV3gC1QB2lj8SlogRQR4q5O+6AH91BFoMBs45kaBHF5rMxo7UICjvWROviZUcfXF94oTQ5u3EjfO1rpTkM03zHL1HZEcQ0yBGkmrFdlTFNHEyK5ILN6OycmWkiukQvAgtFZL6IBIDrgYf2PEBEmkVKA40icm75vIMTcG5tChrf79bGwzFtitgoBAMhj40on80PfP3Qe93W1cH998MPflBanHXqqXDHHUedxZPs7mT1T/8Xwz/5KHOe+zJLUk/S4g0QBUx2/7IY5cd7CuLT6g1wRmY1Z3TeSqLjNooD245++8ZstvSH7PzzS/efegq++tVDBnsAJUJ9rgtPQgyaNSgEGxfD8wk5Y9heCmk+68jao00LxxzwlVIu8CXgV8B64G6lVIeI/KGI/GH5sOuAN0XkNeB7wPVK6dy6mWp8v9s8oXLPPkTajFLAJIRDwM8STneTH9x26GApUhrueOUVeNe74MYb4bLLYPPmI2rThrXPsPOOz3F65620O5uoVYXyVi1HxgLCOLR6O1m642fMfeUfsTqfOvBE9ME88giceSZ8+9ulVNSODnj/+w/7y0UpHGxM5RDABRQFbIpmsLShoVOk9qTfO8Ir06aDCRn0VEo9opQ6WSnVrpT6Rvm5W5VSt5bvf18ptVgpdaZSaqlS6rmJOK82dZnRGpLNH6BACKFUxKs0dQsZux7fCGAMbsD33MPb6/aUU+B3vyula65eXXr8la/AyMg7ftl4rz6+6kZOyb9MFOeYfymE3Z8I6rwxTs6vJbr+3kP38tetK/2x+tCHSn/InnqqVAwtHj+i8ysRbBw8sckTomDaWCgEn9FgC/m57yUzsO1oL0+bwvRKW60ilBLsbC87oqczJjVYuAT9Ij4GMacfQ/koz4ex3sPf69Yw4ItfhA0b4JOfhG99CxYsKP2bTu93eLK7k84nbqN+yypqvbEJmdDalwBRfBa469j2iy8deJhq40b47GfhjDPg+edLPfs334QPfODozqkUQ1aCiErR4A1jKEXGDOMQpGjHMYJ6/+CZSgd8rTLKdfI9K0w60IiFi618BMVoYBa+aRHO7cRObStN8h6JWbPg9tvh5Zfh3e+GL38Z5s6Fv/97GNw9ddT51E9o3raS+d4WbA6w4GsCBYCTsy9hvvjvdN33VTasfaaUUvrJT5aGou66C770pVLw//M/P6ZFZUqEkMqTI0rGDGH5PlEvh4OBb4Xxkx1H/v9UmxaOR6dG0w5JULj176Kq53cIPnkC+KYQVgWqiknGpAlDuQScUZr3LLVwJM46C554AtasgX/6J/jqV1H/+A2GzzubnSdbLG58i5A4mIeZseIBRUxGzBgBL0cNhf0mb99JhALzU28y9HIfDd9+FLqHSquIb7mldJuADCMo9fDToVmEMm+jAF8EHyFEkdjYRgqBur3LV2gzhg74WkXYsSb85AYyNSeD51I9mEThk5MgcUaoLgyTJkZvzTnHni++dCnJ73+b3veeTPMD91G/5iXqnnFQcUEtslGLLJhjgrH/FK0C0gR5K/4+AqdfSeviZcwutyfZ3cmmJ2+nZutjNNBPzEsTwNt/ojfrI50u5nqXWKdL3Bum0Bhk80d/n6q/+x5Ni886tuvbV7gGyewsl65wKRomBoKgiPhp0labzsGfoXTA1yqidfEytr35AE64gUAuSVFK2x9aCAaKUaqwUFSn32bNPd9i/tKrjzpIjVePVMERNl97Hv77fkdjRz/GOhd5uYjxYhEVBGabqDkWapYJjQb5qMlW6yTMK77N0gPs+Zpoayfx2W8A3wBgzT3fIvLG3byrfz1Wrws7PGSriyRLw0UqJqjfC+CdYlGYHSYXKLBzzZ2oeGzCAnCyuxMZ3U51ejNhlS0PnVkUEDLUkLPj6Bz8mUsHfK0iEm3tdM29APrWEyoOk7brMIsOLgYWHnFSgKLfbsfvW0/XageWrTiqwNjTsRozXI1fHCGW2kK9MYacHkCdHsAvKqTTRbpcZJuH8XRh19cFIzbz2wuEX/8etNxTyvsPBks3gEymdBsagu5ulnRtxti6FcNxAVAWMNvEXx5EzTdhlgkiCBDEod7vpZBcS9fzTSTabjnm/6fjk9DR1GaCFPBFcDFwCWDh4iNkwrPB0r/2M5X+zmsVM3/p1XStdkgpQQmEBkaoUWlA8AEfi8bcJjLOAKmaefR0rD6qgF9IbkDlRpk99gp13ggmCkUpg0YCgn+KjTrFxgTGCmHGktX4zlwS1BHpG4C334bf/AaGh/df1BUOQ3U1tLVhnXEmmQ8spz9mMBL3mVt8mIjkMdk/n98Agl6OWcWNDL91Jy8/1nhsK3KBrjUrMUc2YymXPEHiShHzC4yZNj2BdmzcUuVMXSFzxtIBX6uYRFs7LFtB1/MrsbufRVD0W7OpcocIkyMncRSKgJ9D+jeQd3JHfI5kdydm/zrCuT4avMFdk6zjOf+yx/1Rgjh/vJq5Bwu6SoHrQrFYuh+J7Fe/J1q+Aaz5l+uYk3mJBm8Y2VXgoMSl1Ms3fCFDhnR5Re7RfooBUDtfA7EI+FmUmPTRQo05QEgVCHkZilYMp3o+C3SFzBlLp2VqFZVoa2fpR2+h7aPfZDQ6H9vNEiWN4BNQWSzlUDSjpcCaGzni9+9asxJxHWLecDkbZ+/0S59S9s2AWc3b8QveOdiKlMo0R6NQVXXIYm217/8SydDJjJlhvH3OaQE2Pkr5gIkdrcEMVx/eIrODUgTyAxQkQlilqGIMF4uC2CBCYcGlukLmDKcDvjYpJNraUXPOJxOdRZYqwCdMlipShJ1RjEKqtNftEVI7X8MLxqliBAOFBbuC7/jgTNqM0G/Npfb9fzIxF1O2aMlyIhf9NSPUlGuBloK9UPrFM4AgLgZFCqN9WOHYMS2IkuaziBX7CflpHGwsisRUBoUwWnUyS1fcooP9DKcDvjZ5CHhmhDGrkQKlzbVtVSBMBssZQ8J1h/1Wye5OXn7sDuqGXmPe6LPEvQwupWAvsCv4esDG6DJCl/4Diw6QiXOsFi1ZzlDVqQyYjeT3GUFVgIlPjRoluu4uMltfPaYFUfPPu5qCEcKmQDUj2LhkJMIw9UQzW468gJs27egxfG3SEBRqznnI+nuJkCNPmFGJEyVHoriF7VufI9l96PTMXWmYnkc1RcJeqRBaAMqV90u3PEEGzDqWfuXe43pdXiDKkNVSGqryBBMPEx+D0h8cy3eZ7W6muWcbG9wiye4jm7wdL+fspPqwJEQTORwC5AjjYxKmQE7qj3rSW5s+dA9fmzTsWBOmHcK3QvTY7RSwqWcImyJZIsSy2w+r1PB4GiaZXjwC2BT3y5IpYpMxwyRrzjl+F1QmLWfiBGIUCTBqxss7fO2eTQjgIr7CVB41/c/T+cRth90bH//j5hUyKDGJeqNYOBQJ4IlNwawibdaAaev6OZoO+Nrk0bp4GV5uFLuYJuoMUccwIIxQB2ISUenDqp7ppErj4fbwFurpx8Tfa9y+dBNcAtRf8AfH+apK6afFhtMYMJqxKWKi8DAoYGEguFi4hoGhFI1+L5Gdzx12OeXxP27KczC2PktAZQGDCBlE+RTNMJ4ZwMfU9XM0HfC1ySPR1k7tgqVgGNQwgOk5KKCWYYIqxajdcFjVM+1YE9m3f8u83CtEvUw59VIoQrnwgeAaBt01S4/LuP2Brqv94s+TftcKhoxmhswa8hLEYnwS1yfm57CUi6k8Gp3t2Jsff8de/vgchdvxIM7ODvyuZwnl+ykaVQwTx8AnRopwcZiCXYcXjOv6OZoO+NrkkhnYxnDzMhwiFMwQpfqZihAO+WACszB4yJ6qGBbzB57E8pxyBRnK/zXwMBkyY+ww5hF+18Un5Jpgd/rp8NwPsrX2ffSbifKnDkHKgzwGCtt3sCkQ8LIH7eXvOYzjRlqQYopEugOFSdauBQkwSAND0kCQPMoOU3vB5/X4vTYxAV9ELhWRDSKySUT+6gCvi4h8r/z66yJy9kScV5t+nFQfhptnSBopEsQqD3/kCBJy+lGYh+yp5tf/itJ0aGnwZnyxlUJImVFyxMlEZlWkxxtMLMKa9x5GTv8DkmYLRSxsxnv6pT9sEa9AXsKo3lcP+B7jwzh2tAYVqSeS3kZEpahxtmMol6xRRTqQoGhFGQm0MP/ar5+QTzLa5HfMAV9ETOAHwGXAqcDHReTUfQ67DFhYvt0M/PBYz6tNT3asCTu1DWUa5CVCvzTiY1CtUjQWtuHXzDvke9SMrsPCx0ThY+IwvtjJw0QxFJxN9fv/tCI93vF5CjFtBmLvYtSsxqXUz4fdaaNzi+upGt18wGGd8TmK4lg/9sCb4Pu4GNT4o1QXe8mEZ5OraScfbia/4Ards9d2mYge/rnAJqXUZqVUEbgLuGqfY64C7lAla4AaEWmZgHNr00zr4mUEnFEM38MTi5gaJazyjEkVffZc7NrWQ2bqBP0stl8EZNeYvQH4GHRGz2X2p/6tYj3eRFs785etwAxG8e0wHjY5QngY+BgoTFwE2y/iGeYBr9WONeHmUng9r1CV3U7Qz1AgRNYIY+ARzfcgvo9TPZ/5uoyCtoeJCPitQPcej7eXnzvSYwAQkZtFZK2IrO3v75+A5mlTSaKtnUx0HlX+IDF/GBOPvIQAA88MH1YJghwhbOVh4mDiI5SqRqYkgGp6V8V7vIm2ds6+9AZOuuZrFKSURZMzQ+U0zdIQlGPYuGYMNdDJ5ge+vtfWiOOfEmIj6wl7o0T9MUIUyRHBx8DyiwQX/L4uo6DtZyIC/v67RuxfcPtwjik9qdSPlFJLlFJLGhsbj7lx2lTkkzOqGTPqcbEpEgQEwy8VTztYCYLxzBW7XBTYRJULowkOBjkjisqOnNAreSeJtnZGIifhYFLExsPExSpX/BFCzjAU04TT3ThvPbpra8TxTwkRL0WULD7CGDUUjRgKA9cIcPalN+hgr+1nIlbabgfa9ng8G9hxFMdoGgCWm6VohPHFJOhnCFJAAUEnhQO4udR+mTob1j7D8LO3IcrDwEGVc18cLDxDMH2/VEknXFOJSzoo1bSInp0Z6vNdBE0HHw9RJmG/QJx+zLEsuVACL9KE5EcYfvY2konSjlUdhAmTwcQjSA5XhQBB9i3hrGllE9HDfxFYKCLzRSQAXA88tM8xDwE3lLN1lgKjSqmdE3BubRryAlFydh1RbwTBw8bBxqFe9ZPuegkvN7pXhk2yu5PhZ/8dZVh4VpQoGSgP4ygBV2wyZoQsUULNiyp3YQcQTCzCabuA0WArY0QxlYEjFo5hUaUyNJPEJQBioEI1iPLo6VhNsrsTA7/86UdRRYoGlcSkQMGuqfRlaZPUMQd8pZQLfAn4FbAeuFsp1SEifygif1g+7BFgM7AJuA34o2M9rzZ9ScuZRNwh8hLFw6aIjY0H+NT0rqZ2wdK9hit6OlYjeCixiI++jY1HUcxdP9xD0kS/2YpvhSbd4qPWxcsQ06QQbqI/dDI9Rht+OZ/IVC6G8pmVX48xtgO8PF64ASfVR0/HakbiJ5MjglnO6PeBIAXA14XStAOakDx8pdQjSqmTlVLtSqlvlJ+7VSl1a/m+Ukp9sfz66UqptRNxXm16mr/0ahwrStgbI0oaH4sh6ui1T8JQHsObX9rreCfVhxesJzayjjp3B4KHgYGDiWPY2KqAGAYjrR+YdOPa4+PxXrCOWH4HUTVWqoODjS9CUWwMfJpS67Ay/UisBTvWhJPqQ9rOxS+vMigSIk0VRYL4WHStWVnZC9MmJb3SVpt0Em3tFE+6GEGRoposVbhGgCpvBEO5yLa9M3QUgjW6lTnOZgLky1UiwUKRIYqPwVjzMhZcdFNlLugQEm3t1J+zAtcMYuASJE+IfPmX0981H+GZQcQsLTyzY02YgTCeGSRNdSk7B48cVXjBeGn3K03bhw742qQ0f+nV5K0YeaMKiwIRP03EH8VWReLprbuGLJLdnXj9nTTku1CUSg1HVZaCESJp1uMQJB1omPQpipmBbYzUnEEAl7CXB6VKQVz5jFFN2q4H02Z+eQvEXYXm/BwRxjBx8TFwjDCRTDdmMVPpS9ImIR3wtUkp0dbO6OwLqfKHqCKNj5CWOILCN6xddWa61qzEyg+QM2JkjDCm8rF9Dw+DlJ2gaISnxGpTJ9WHWAF2WnMxUAR8FwW4YhAmT7rp9wiefNGu6xgvNBdQeWxcHCzyhAmpDAE/h2tFKntB2qSkA742abVf+FlcAuVJW4eQypGXCGOxBbvqzJS2MKzGsatwCeCYFp4IcS9DrbODXKBuSqw2HS8pUeWNMmDWUzBtLN8j6LsYFAiMdO6XmTT44j2lTzDEKBJGiYWPgSs21Myp4NVok5UO+Nqk5hkBclSRlmqyRhzPCoDyMYoZXn7sDmJjmwiNbcUujhEhXyqloKC0YlWRiy+Y9L172F1SwlIFXIxyQYhSD99AYXm5XceOV8u00914hoXgESaDa9iMhWeTCzRMuvRTbXLQWxxqk1ZPx2oKVQuJ5npwrQi+YWO5WeKpTWSibXiFDGNVC6lOv02D2oHCK5dUUOTMAGmi2Lmpsdwj0dZOZ/0ZxHqfpIFBithkzQhSnrI1vCybH/g6PSdfRDHVj/I8bDeFJwFcI4KnPGw/j2+E8K0gcyZZ+qk2OegevjZpOak+ZM655EONoBSB/BCh4jA1agjPDOK7Rcy2s8mEWgioIlGvUPo606JAmDBZIrneCl/F4QvNO4eB6MkUCOJhYJULS0TIY7lFBMErZDC3PovsfBXxfKLeKPgunrIIqDyBwoCufa8dlA742qQ1vsct899H0YoR80YIqAJ5QqBA9ZSWc6j576MgQQRwJEiKKhQGIXIYbqGyF3EEWhcvQ8RgQFqgvM25oBihhio1ikKwozW4ZojW9KsE/cyuxVZh0gwFZpNtXaZr32sHpQO+NmmNpx66uTFChUFSdj1jgSZSdgNVuR6U5+MPdBKsbiJHhEy54mSULDZ5igRQZqDSl3HYEm3tFBsWE1JZLBxcLNJU4UgIVyyQ8q+rWyRMHksVKZhR8lL6A+cYk690hDa56ICvTVrjq1AZ20nAz+LYcfKhBBgBYk4/tWPrMVI9OJmRUoYONh4WHgY2LiEKFI1gpS/jsCW7OxHfxTFD7LBOokCQGkapUQNkjBqM4hgA8XwPY0SIqhQ13gAKGDPqiDr9k650hDa56ICvTWqJtnbMhvmMNJ6Di0ld6i2qiv0UCBNUaeLpLbhb1uAQwMfAwC+nKlaRJ0TAy02ZujI9Haux6+cyUrMYAUwUI1TjYlLtDdCY6sD979tp9rrxsemz2kgZtdiqgGdapMMteuxee0c64GuTnh1rwjdDJLIbMfApGEFMcTGAbLAWZdmkquZSpDR8kyNMzogzYjaRCzW942Ypk8n41oVm67tRpkHGrMaVEA5BfIQARWYVO0mSwEAR9UbIWbWMBZsxUag5unevvTOdlqlNetGGOZgv/RiTAkopwp6LZwTIEKOqOERwOE8hUE2eKEWjCgOfdLgFx4qjQrX4B9gsZTIa37owEG/ElSg13k7C5MgRZsyqx7eiNOQ3Y+Jj4mKrIhR3MhachWdGpsQCM62ydA9fm9SS3Z0Mb1pDMVBNSupQmHhGgOFAGyGVJ6gyGH6RUH6IOgbxlEHWqqUYnYWYBsSb99ssZbIan6ROd71ETX4bETIUsUgbtcTdIeziKAY+Ni4DwTYyRjUBHAqhepyTLtHDOdoh6YCvTWo9Hasxw9W41XNJRWbjiYnpOzTlN2LgECaPY0XJhRsYpIEGlcT2MiA2fv0iDNOaMhOZ4/VxYn0v4UiAIkFcggTJ4wON/g5cLMJkiTojFMwqhgKzEDF07147LHpIR5vUnFQfgepmisE64oW1FAkSIU+dN4wnMGZUE3RThJ0RfDFIqSrG4oswG+Zhx5poXbxsSvV8MwPbcCIJMtEzGM30UZ9aj+k7QJEqL4uYPmPECfop6vw+BkhQCOQO+b6aBscY8EWkDvglMA/YAnxUKTV8gOO2ACnAA1yl1JJjOa82c4yPaxv5YTKBZmrzXShlM2LGiZClxhvFRJEnDGKSM+JQM5v3rPhypZt+VJxUH164Abw8flUzg0pRO7aeFm+UgmmRIYYJhCnSL80UgjX4oXq6Vt8D5dLJmnYwxzqk81fAU0qphcBT5ccH836l1Fk62GtHYnxc28j1Y/hFMnY9eYL4mAQ9FxO/dKAIBStKNjwLlR2paJuPhR1rQmItGE4OSScJ5ZOYeGTNMDtkHhYeAQr4CFVqhEhxGKPlNMxw9ZTJRtIq51gD/lXAz8r3fwZcfYzvp2l72XMLwIgzgK8UYQq42DgCBj413iiGcvGVhW9HIFxT6WYftfE9blX9IgLFEQJOiirS5IgQUhkUihB5PCxsHFyxERGscAxnimQjaZVzrAE/oZTaCVD+92DpEAp4XEReEpGb3+kNReRmEVkrImv7+/uPsXnadGE0nIQSg6riIAZFwmTBsMibNj5CNSPYfhbVuGhKlxcY/wMXrG8DpXCsGAPSgotFg0pS5w1jeg4GHnmipKrm4Q1sxM2lpkw2klY5hwz4IvKkiLx5gNtVR3CeZUqps4HLgC+KyHsPdqBS6kdKqSVKqSWNjY1HcAptOhqv/W4GIoy2XUyAPHE1homHg4VgMmrGKWIT8HNQSE+ZrJyDSbS1c/alN1CoPol8oA5lGZg4pYJxCHZ5W5gCNsoMYmT68XKjU/66tePvkJO2SqmLDvaaiCRFpEUptVNEWoADfqZUSu0o/9snIg8A5wK/Pco2azPIeFqmHa3BjtbQO3AGodQLVHkZMmaYUaJEyGMrh7yEUYY1bSYuJVJDcKgDzwjhYwOlISzHsEu1/vFoGlpLf+3ZnKQnbLXDcKxDOg8BN5bv3wg8uO8BIhIVkdj4feAS4M1jPK82Q4yXGwAojPZh4JMhTtYM4hCgVqXwMdluzGc4OIfAQMeUqZ1zKMHEIpxAHF8pYoyRMaJkzAi+GITJkbZrccwIflVzpZuqTRHHGvD/D3CxiGwELi4/RkRmicgj5WMSwLMi8hrw38B/KaUeO8bzajPEeFomgD/QiRtuYCC6EJcAYXJ4CCYucTWM6RVwzdC0yVZpXbwMz44T8PIUCGIAAVVAlCKIQ52zk9HwHAIN86bNNWvH1zHl4SulBoELD/D8DuDy8v3NwJnHch5t5mpdvKyUYw5QGAUzgIiQtObR6m7CxsPAxwMsVcBwCxSSGyra5omSaGtnx6mXU/3cV3ERLBSGD74Bg9SUHuPhOXn8YqbSzdWmAF1aQZvUxrNWzGAU8AGFF6whH51FniDgYSq/tA2gl0fEmNJ5+PtSvsv2+BKGQ/NxMTDxCPpF6tUQGYnjRhvxkx06Q0c7LDrga5PeeNbKSdd8DaN+AVZugHB2B6AQJeQkjItFkBzRXA9KpNJNnjDj+/qWxu2LjJlxsmYIUUKVGsZM9RLID+oMHe2w6ICvTRnjvX3Dd7BUER+bYaOGPGEMfEBIh2YhSlW6qRNmfF9f03MoEERhkCfEmBHDw6Y6143bdoHO0NEOiw742pSSaGun2HI26ar5ZIza0gYhEiJNjIxZO+VX2u5rvLRE0EvRZ88lJxEKRpT+4ByGg62IIbpSpnbYdLVMbcoJJhZRsCLki8OQH8QQH58g+WBdaaVt/ZxKN3HCJNraYdkKtm/5DSF3jHQggcLHFMB3GIqfxiLdu9cOk+7ha1POeL2ZQut5jMUXMlJ9CqlYO8XW86dU/fvDlWhrp+qi/0ku2EAuOotc7ankIq24dpy6ZZ+tdPO0KUT38LUpZ7zX29OxmryTg9wIRGoI1rdNufr3h2vRkuVsAAZfvAcrswM30kzs/M+yaMnySjdNm0JETeIJriVLlqi1a9dWuhmapmlThoi8dLAy9HpIR9M0bYbQAV/TNG2G0AFf0zRthtABX9M0bYbQAV/TNG2GmNRZOiLSD2ytdDuOUAMwUOlGnGD6mmeGmXbNU/V65yqlDrhd4KQO+FORiKw9WErUdKWveWaYadc8Ha9XD+lomqbNEDrga5qmzRA64E+8H1W6ARWgr3lmmGnXPO2uV4/ha5qmzRC6h69pmjZD6ICvaZo2Q+iAfxyJyJdFRIlIQ6XbcryJyL+IyFsi8rqIPCAiNZVu0/EgIpeKyAYR2SQif1Xp9hxvItImIk+LyHoR6RCRP610m04UETFF5BURWVXptkwUHfCPExFpAy4GtlW6LSfIE8BpSqkzgLeBv65weyaciJjAD4DLgFOBj4vIqZVt1XHnArcopU4BlgJfnAHXPO5PgfWVbsRE0gH/+PkO8JfAjJgVV0o9rpRyyw/XALMr2Z7j5Fxgk1Jqs1KqCNwFXFXhNh1XSqmdSqmXy/dTlAJga2VbdfyJyGzgQ8C/V7otE0kH/ONARD4M9CilXqt0WyrkJuDRSjfiOGgFuvd4vJ0ZEPzGicg84N3ACxVuyonwr5Q6bH6F2zGh9BaHR0lEngSaD/DS3wJ/A1xyYlt0/L3TNSulHiwf87eUhgH+40S27QSRAzw3Iz7BiUgVcB/wZ0qpsUq353gSkSuAPqXUSyKyvMLNmVA64B8lpdRFB3peRE4H5gOviQiUhjZeFpFzlVK9J7CJE+5g1zxORG4ErgAuVNNzgcd2oG2Px7OBHRVqywkjIjalYP8fSqn7K92eE2AZ8GERuRwIAXER+YVS6lMVbtcx0wuvjjMR2QIsUUpNxap7h01ELgW+DbxPKdVf6fYcDyJiUZqQvhDoAV4EPqGU6qhow44jKfVafgYMKaX+rMLNOeHKPfwvK6WuqHBTJoQew9cmyveBGPCEiLwqIrdWukETrTwp/SXgV5QmL++ezsG+bBnwaeAD5e/rq+WerzYF6R6+pmnaDKF7+JqmaTOEDviapmkzhA74mqZpM4QO+JqmaTOEDviapmkzhA74mqZpM4QO+JqmaTPE/w+1JXzHy/FX+gAAAABJRU5ErkJggg==\n", 12 | "text/plain": [ 13 | "
" 14 | ] 15 | }, 16 | "metadata": { 17 | "needs_background": "light" 18 | }, 19 | "output_type": "display_data" 20 | } 21 | ], 22 | "source": [ 23 | "import numpy as np\n", 24 | "import matplotlib.pyplot as plt\n", 25 | "\n", 26 | "x = np.linspace(-5, 5, 1000)\n", 27 | "y = np.log(np.abs((x ** 2) - 1) + 0.5)\n", 28 | "x = x + np.random.normal(scale=0.05, size=1000) \n", 29 | "plt.scatter(x, y, alpha=0.3)\n", 30 | "def local_regression(x0, x, y, tau): \n", 31 | " x0 = np.r_[1, x0]\n", 32 | " x = np.c_[np.ones(len(x)), x]\n", 33 | " xw =x.T * radial_kernel(x0, x, tau) \n", 34 | " beta = np.linalg.pinv(xw @ x) @ xw @ y \n", 35 | " return x0 @ beta\n", 36 | "\n", 37 | "\n", 38 | "def radial_kernel(x0, x, tau):\n", 39 | " return np.exp(np.sum((x - x0) ** 2, axis=1) / (-2 * tau ** 2))\n", 40 | "\n", 41 | "\n", 42 | "def plot_lr(tau):\n", 43 | " domain = np.linspace(-5, 5, num=500)\n", 44 | " pred = [local_regression(x0, x, y, tau) for x0 in domain] \n", 45 | " plt.scatter(x, y, alpha=0.3)\n", 46 | " plt.plot(domain, pred, color=\"red\") \n", 47 | " return plt\n", 48 | "\n", 49 | "\n", 50 | "plot_lr(1).show()\n" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": null, 56 | "id": "a1224b0d", 57 | "metadata": {}, 58 | "outputs": [], 59 | "source": [] 60 | } 61 | ], 62 | "metadata": { 63 | "kernelspec": { 64 | "display_name": "Python 3 (ipykernel)", 65 | "language": "python", 66 | "name": "python3" 67 | }, 68 | "language_info": { 69 | "codemirror_mode": { 70 | "name": "ipython", 71 | "version": 3 72 | }, 73 | "file_extension": ".py", 74 | "mimetype": "text/x-python", 75 | "name": "python", 76 | "nbconvert_exporter": "python", 77 | "pygments_lexer": "ipython3", 78 | "version": "3.9.7" 79 | } 80 | }, 81 | "nbformat": 4, 82 | "nbformat_minor": 5 83 | } 84 | --------------------------------------------------------------------------------