├── .gitattributes ├── .ipynb_checkpoints ├── Chapter 1-checkpoint.ipynb ├── Chapter 2-checkpoint.ipynb ├── Chapter 3-checkpoint.ipynb ├── Chapter 4-checkpoint.ipynb ├── Chapter 5-checkpoint.ipynb ├── Chapter 6-checkpoint.ipynb └── Chapter 7-checkpoint.ipynb ├── 9781484253601.jpg ├── Chapter 1.ipynb ├── Chapter 2.ipynb ├── Chapter 3.ipynb ├── Chapter 4.ipynb ├── Chapter 5.ipynb ├── Chapter 6.ipynb ├── Chapter 7.ipynb ├── Contributing.md ├── LICENSE.txt ├── README.md ├── errata.md ├── hp_train.csv ├── titanic_test.csv └── titanic_train.csv /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Chapter 1-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Basic Set Operation" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": {}, 14 | "outputs": [ 15 | { 16 | "name": "stdout", 17 | "output_type": "stream", 18 | "text": [ 19 | "Union : {0, 1, 2, 3, 4, 5, 6, 8}\n", 20 | "Intersection : {2, 4}\n", 21 | "Difference : {0, 8, 6}\n" 22 | ] 23 | } 24 | ], 25 | "source": [ 26 | "# Example Sets \n", 27 | "A = {0, 2, 4, 6, 8}\n", 28 | "B = {1, 2, 3, 4, 5} \n", 29 | " \n", 30 | "# union of above sets \n", 31 | "print(\"Union :\", A | B) \n", 32 | " \n", 33 | "# intersection of above sets\n", 34 | "print(\"Intersection :\", A & B) \n", 35 | " \n", 36 | "# difference between above sets\n", 37 | "print(\"Difference :\", A - B)" 38 | ] 39 | }, 40 | { 41 | "cell_type": "markdown", 42 | "metadata": {}, 43 | "source": [ 44 | "# Trapezoidal Membership Function" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 2, 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [ 53 | "import numpy as np\n", 54 | "import skfuzzy as sk\n", 55 | "\n", 56 | "#Defining the Numpy array for Tip Quality\n", 57 | "x_qual = np.arange(0, 11, 1)\n", 58 | "\n", 59 | "#Defining the Numpy array for Trapezoidal membership functions\n", 60 | "qual_lo = sk.trapmf(x_qual, [0, 0, 5,5])" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "# Gaussian Membership Function" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 3, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [ 76 | "import numpy as np\n", 77 | "import skfuzzy as sk\n", 78 | "\n", 79 | "#Defining the Numpy array for Tip Quality\n", 80 | "x_qual = np.arange(0, 11, 1)\n", 81 | "\n", 82 | "#Defining the Numpy array for Gaussian membership functions\n", 83 | "qual_lo = sk.gaussmf(x_qual, np.mean(x_qual), np.std(x_qual))\n" 84 | ] 85 | }, 86 | { 87 | "cell_type": "markdown", 88 | "metadata": {}, 89 | "source": [ 90 | "# Generalized Bell membership function" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": 4, 96 | "metadata": {}, 97 | "outputs": [], 98 | "source": [ 99 | "import numpy as np\n", 100 | "import skfuzzy as sk\n", 101 | "\n", 102 | "#Defining the Numpy array for Tip Quality\n", 103 | "x_qual = np.arange(0, 11, 1)\n", 104 | "\n", 105 | "#Defining the Numpy array for Generalized Bell membership functions\n", 106 | "qual_lo = sk.gbellmf(x_qual, 0.5, 0.5, 0.5)\n" 107 | ] 108 | }, 109 | { 110 | "cell_type": "markdown", 111 | "metadata": {}, 112 | "source": [ 113 | "# Sigmoid Membership Function" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": 5, 119 | "metadata": {}, 120 | "outputs": [], 121 | "source": [ 122 | "import numpy as np\n", 123 | "import skfuzzy as sk\n", 124 | "\n", 125 | "#Defining the Numpy array for Tip Quality\n", 126 | "x_qual = np.arange(0, 11, 1)\n", 127 | "\n", 128 | "#Defining the Numpy array for Sigmoid membership functions\n", 129 | "qual_lo = sk.sigmf(x_qual, 0.5,0.5)\n" 130 | ] 131 | }, 132 | { 133 | "cell_type": "markdown", 134 | "metadata": {}, 135 | "source": [ 136 | "# Fuzzy OR Operation" 137 | ] 138 | }, 139 | { 140 | "cell_type": "code", 141 | "execution_count": 7, 142 | "metadata": {}, 143 | "outputs": [ 144 | { 145 | "data": { 146 | "text/plain": [ 147 | "(array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]),\n", 148 | " array([1. , 0.8, 0.6, 0.6, 0.8, 1. , 0.8, 0.6, 0.4, 0.2, 0. ]))" 149 | ] 150 | }, 151 | "execution_count": 7, 152 | "metadata": {}, 153 | "output_type": "execute_result" 154 | } 155 | ], 156 | "source": [ 157 | "import skfuzzy as sk\n", 158 | "import numpy as np\n", 159 | "\n", 160 | "#Defining the Numpy array for Tip Quality\n", 161 | "x_qual = np.arange(0, 11, 1)\n", 162 | "\n", 163 | "#Defining the Numpy array for two membership functions (Triangular)\n", 164 | "qual_lo = sk.trimf(x_qual, [0, 0, 5])\n", 165 | "qual_md = sk.trimf(x_qual, [0, 5, 10])\n", 166 | "\n", 167 | "#Finding the Maximum (Fuzzy Or)\n", 168 | "sk.fuzzy_or(x_qual,qual_lo,x_qual,qual_md)\n" 169 | ] 170 | }, 171 | { 172 | "cell_type": "markdown", 173 | "metadata": {}, 174 | "source": [ 175 | "# Fuzzy AND Operation" 176 | ] 177 | }, 178 | { 179 | "cell_type": "code", 180 | "execution_count": 8, 181 | "metadata": {}, 182 | "outputs": [ 183 | { 184 | "data": { 185 | "text/plain": [ 186 | "(array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]),\n", 187 | " array([0. , 0.2, 0.4, 0.4, 0.2, 0. , 0. , 0. , 0. , 0. , 0. ]))" 188 | ] 189 | }, 190 | "execution_count": 8, 191 | "metadata": {}, 192 | "output_type": "execute_result" 193 | } 194 | ], 195 | "source": [ 196 | "import skfuzzy as sk\n", 197 | "import numpy as np\n", 198 | "\n", 199 | "#Defining the Numpy array for Tip Quality\n", 200 | "x_qual = np.arange(0, 11, 1)\n", 201 | "\n", 202 | "#Defining the Numpy array for two membership functions (Triangular)\n", 203 | "qual_lo = sk.trimf(x_qual, [0, 0, 5])\n", 204 | "qual_md = sk.trimf(x_qual, [0, 5, 10])\n", 205 | "\n", 206 | "#Finding the Minimum (Fuzzy AND)\n", 207 | "sk.fuzzy_and(x_qual,qual_lo,x_qual,qual_md)\n" 208 | ] 209 | }, 210 | { 211 | "cell_type": "markdown", 212 | "metadata": {}, 213 | "source": [ 214 | "# Fuzzy NOT Operation" 215 | ] 216 | }, 217 | { 218 | "cell_type": "code", 219 | "execution_count": 9, 220 | "metadata": {}, 221 | "outputs": [ 222 | { 223 | "data": { 224 | "text/plain": [ 225 | "array([0. , 0.2, 0.4, 0.6, 0.8, 1. , 1. , 1. , 1. , 1. , 1. ])" 226 | ] 227 | }, 228 | "execution_count": 9, 229 | "metadata": {}, 230 | "output_type": "execute_result" 231 | } 232 | ], 233 | "source": [ 234 | "import skfuzzy as sk\n", 235 | "import numpy as np\n", 236 | "\n", 237 | "#Defining the Numpy array for Tip Quality\n", 238 | "x_qual = np.arange(0, 11, 1)\n", 239 | "\n", 240 | "#Defining the Numpy array for two membership functions (Triangular)\n", 241 | "qual_lo = sk.trimf(x_qual, [0, 0, 5])\n", 242 | "qual_md = sk.trimf(x_qual, [0, 5, 10])\n", 243 | "\n", 244 | "#Finding the Complement (Fuzzy NOT)\n", 245 | "sk.fuzzy_not(qual_lo)\n" 246 | ] 247 | }, 248 | { 249 | "cell_type": "markdown", 250 | "metadata": {}, 251 | "source": [ 252 | "# Fuzzy Cartesian Operation" 253 | ] 254 | }, 255 | { 256 | "cell_type": "code", 257 | "execution_count": 11, 258 | "metadata": {}, 259 | "outputs": [ 260 | { 261 | "data": { 262 | "text/plain": [ 263 | "array([[0. , 0.2, 0.4, 0.6, 0.8, 1. , 0.8, 0.6, 0.4, 0.2, 0. ],\n", 264 | " [0. , 0.2, 0.4, 0.6, 0.8, 0.8, 0.8, 0.6, 0.4, 0.2, 0. ],\n", 265 | " [0. , 0.2, 0.4, 0.6, 0.6, 0.6, 0.6, 0.6, 0.4, 0.2, 0. ],\n", 266 | " [0. , 0.2, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.2, 0. ],\n", 267 | " [0. , 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0. ],\n", 268 | " [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ],\n", 269 | " [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ],\n", 270 | " [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ],\n", 271 | " [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ],\n", 272 | " [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ],\n", 273 | " [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ]])" 274 | ] 275 | }, 276 | "execution_count": 11, 277 | "metadata": {}, 278 | "output_type": "execute_result" 279 | } 280 | ], 281 | "source": [ 282 | "import skfuzzy as sk\n", 283 | "import numpy as np\n", 284 | "\n", 285 | "#Defining the Numpy array for Tip Quality\n", 286 | "x_qual = np.arange(0, 11, 1)\n", 287 | "\n", 288 | "#Defining the Numpy array for two membership functions (Triangular)\n", 289 | "qual_lo = sk.trimf(x_qual, [0, 0, 5])\n", 290 | "qual_md = sk.trimf(x_qual, [0, 5, 10])\n", 291 | "\n", 292 | "#Finding the Product (Fuzzy Cartesian)\n", 293 | "sk.cartprod(qual_lo, qual_md)\n" 294 | ] 295 | }, 296 | { 297 | "cell_type": "markdown", 298 | "metadata": {}, 299 | "source": [ 300 | "# Fuzzy Subtract Operation" 301 | ] 302 | }, 303 | { 304 | "cell_type": "code", 305 | "execution_count": 13, 306 | "metadata": {}, 307 | "outputs": [ 308 | { 309 | "data": { 310 | "text/plain": [ 311 | "(array([-10., -9., -8., -7., -6., -5., -4., -3., -2., -1., 0.,\n", 312 | " 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.]),\n", 313 | " array([0. , 0.2, 0.4, 0.6, 0.8, 1. , 0.8, 0.8, 0.6, 0.6, 0.4, 0.4, 0.2,\n", 314 | " 0.2, 0. , 0. , 0. , 0. , 0. , 0. , 0. ]))" 315 | ] 316 | }, 317 | "execution_count": 13, 318 | "metadata": {}, 319 | "output_type": "execute_result" 320 | } 321 | ], 322 | "source": [ 323 | "import skfuzzy as sk\n", 324 | "import numpy as np\n", 325 | "\n", 326 | "#Defining the Numpy array for Tip Quality\n", 327 | "x_qual = np.arange(0, 11, 1)\n", 328 | "\n", 329 | "#Defining the Numpy array for two membership functions (Triangular)\n", 330 | "qual_lo = sk.trimf(x_qual, [0, 0, 5])\n", 331 | "qual_md = sk.trimf(x_qual, [0, 5, 10])\n", 332 | "\n", 333 | "#Finding the Difference (Fuzzy Subtract)\n", 334 | "sk.fuzzy_sub(x_qual,qual_lo,x_qual,qual_md)\n" 335 | ] 336 | } 337 | ], 338 | "metadata": { 339 | "kernelspec": { 340 | "display_name": "ADP", 341 | "language": "python", 342 | "name": "adp" 343 | }, 344 | "language_info": { 345 | "codemirror_mode": { 346 | "name": "ipython", 347 | "version": 3 348 | }, 349 | "file_extension": ".py", 350 | "mimetype": "text/x-python", 351 | "name": "python", 352 | "nbconvert_exporter": "python", 353 | "pygments_lexer": "ipython3", 354 | "version": "3.7.4" 355 | } 356 | }, 357 | "nbformat": 4, 358 | "nbformat_minor": 2 359 | } 360 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Chapter 3-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Mamdani Fuzzy Inference System" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": { 14 | "scrolled": true 15 | }, 16 | "outputs": [], 17 | "source": [ 18 | "import fuzzylite as fl\n", 19 | "#Declaring and Initializing the Fuzzy Engine\n", 20 | "engine = fl.Engine(\n", 21 | "\tname=\"SimpleDimmer\",\n", 22 | "\tdescription=\"Simple Dimmer Fuzzy System which dims light based upon Light Conditions\"\n", 23 | ")\n", 24 | "#Defining the Input Variables (Fuzzification)\n", 25 | "engine.input_variables = [\n", 26 | "\tfl.InputVariable(\n", 27 | " \tname=\"Ambient\",\n", 28 | " \tdescription=\"\",\n", 29 | " \tenabled=True,\n", 30 | " \tminimum=0.000,\n", 31 | " \tmaximum=1.000,\n", 32 | " \tlock_range=False,\n", 33 | " \tterms=[\n", 34 | " \tfl.Triangle(\"DARK\", 0.000, 0.250, 0.500), #Triangular Membership Function defining “Dark”\n", 35 | " \tfl.Triangle(\"MEDIUM\", 0.250, 0.500, 0.750), #Triangular Membership Function defining “Medium”\n", 36 | " \tfl.Triangle(\"BRIGHT\", 0.500, 0.750, 1.000) #Triangular Membership Function defining “Bright”\n", 37 | " \t]\n", 38 | "\t)\n", 39 | "]\n", 40 | "#Defining the Output Variables (Defuzzification)\n", 41 | "engine.output_variables = [\n", 42 | "\tfl.OutputVariable(\n", 43 | " \tname=\"Power\",\n", 44 | " \tdescription=\"\",\n", 45 | " \tenabled=True,\n", 46 | " \tminimum=0.000,\n", 47 | " \tmaximum=1.000,\n", 48 | " \tlock_range=False,\n", 49 | " \taggregation=fl.Maximum(),\n", 50 | " \tdefuzzifier=fl.Centroid(200),\n", 51 | " \tlock_previous=False,\n", 52 | " \tterms=[\n", 53 | " \tfl.Triangle(\"LOW\", 0.000, 0.250, 0.500), #Triangular Membership Function defining “LOW Light”\n", 54 | " \tfl.Triangle(\"MEDIUM\", 0.250, 0.500, 0.750), #Triangular Membership Function defining “MEDIUM light”\n", 55 | " \tfl.Triangle(\"HIGH\", 0.500, 0.750, 1.000) #Triangular Membership Function defining “HIGH Light”\n", 56 | " \t]\n", 57 | "\t)\n", 58 | "]\n", 59 | "#Creation of Fuzzy Rule Base\n", 60 | "engine.rule_blocks = [\n", 61 | "\tfl.RuleBlock(\n", 62 | " \tname=\"\",\n", 63 | " \tdescription=\"\",\n", 64 | " \tenabled=True,\n", 65 | " \tconjunction=None,\n", 66 | " \tdisjunction=None,\n", 67 | " \timplication=fl.Minimum(),\n", 68 | " \tactivation=fl.General(),\n", 69 | " \trules=[\n", 70 | " \tfl.Rule.create(\"if Ambient is DARK then Power is HIGH\", engine),\n", 71 | " \tfl.Rule.create(\"if Ambient is MEDIUM then Power is MEDIUM\", engine),\n", 72 | " \tfl.Rule.create(\"if Ambient is BRIGHT then Power is LOW\", engine)\n", 73 | " \t]\n", 74 | "\t)\n", 75 | "]\n" 76 | ] 77 | }, 78 | { 79 | "cell_type": "markdown", 80 | "metadata": {}, 81 | "source": [ 82 | "# Takagi Sugeno Kang Fuzzy Inference System" 83 | ] 84 | }, 85 | { 86 | "cell_type": "code", 87 | "execution_count": null, 88 | "metadata": {}, 89 | "outputs": [], 90 | "source": [ 91 | "import fuzzylite as fl\n", 92 | "#Declaring and Initializing the Fuzzy Engine\n", 93 | "engine = fl.Engine(\n", 94 | "\tname=\"SimpleDimmer\",\n", 95 | "\tdescription=\"Simple Dimmer Fuzzy System which dims light based upon Light Conditions\"\n", 96 | ")\n", 97 | "#Defining the Input Variables (Fuzzification)\n", 98 | "engine.input_variables = [\n", 99 | "\tfl.InputVariable(\n", 100 | " \tname=\"Ambient\",\n", 101 | " \tdescription=\"\",\n", 102 | " \tenabled=True,\n", 103 | " \tminimum=0.000,\n", 104 | " \tmaximum=1.000,\n", 105 | " \tlock_range=False,\n", 106 | " \tterms=[\n", 107 | " \tfl.Triangle(\"DARK\", 0.000, 0.250, 0.500), #Triangular Membership Function defining “Dark”\n", 108 | " \tfl.Triangle(\"MEDIUM\", 0.250, 0.500, 0.750), #Triangular Membership Function defining “Medium”\n", 109 | " \tfl.Triangle(\"BRIGHT\", 0.500, 0.750, 1.000) #Triangular Membership Function defining “Bright”\n", 110 | " \t]\n", 111 | "\t)\n", 112 | "]\n", 113 | "#Defining the Output Variables (Defuzzification)\n", 114 | "engine.output_variables = [\n", 115 | "\tfl.OutputVariable(\n", 116 | " \tname=\"Power\",\n", 117 | " \tdescription=\"\",\n", 118 | " \tenabled=True,\n", 119 | " \tminimum=0.000,\n", 120 | " \tmaximum=1.000,\n", 121 | " \tlock_range=False,\n", 122 | " \taggregation=None,\n", 123 | " \tdefuzzifier=fl.WeightedAverage(\"TakagiSugeno\"),\n", 124 | " \tlock_previous=False,\n", 125 | " \tterms=[\n", 126 | " \tfl.Constant(\"LOW\", 0.250), #Constant Membership Function defining “LOW”\n", 127 | " \tfl.Constant(\"MEDIUM\", 0.500), #Constant Membership Function defining “MEDIUM”\n", 128 | " \tfl.Constant(\"HIGH\", 0.750) #Constant Membership Function defining “HIGH”\n", 129 | " \t]\n", 130 | "\t)\n", 131 | "]\n", 132 | "#Creation of Fuzzy Rule Base\n", 133 | "engine.rule_blocks = [\n", 134 | "\tfl.RuleBlock(\n", 135 | " \tname=\"\",\n", 136 | " \tdescription=\"\",\n", 137 | " \tenabled=True,\n", 138 | " \tconjunction=None,\n", 139 | " \tdisjunction=None,\n", 140 | " \timplication=None,\n", 141 | " \tactivation=fl.General(),\n", 142 | " \trules=[\n", 143 | " \tfl.Rule.create(\"if Ambient is DARK then Power is HIGH\", engine),\n", 144 | " \tfl.Rule.create(\"if Ambient is MEDIUM then Power is MEDIUM\", engine),\n", 145 | " \tfl.Rule.create(\"if Ambient is BRIGHT then Power is LOW\", engine)\n", 146 | " \t]\n", 147 | "\t)\n", 148 | "]" 149 | ] 150 | }, 151 | { 152 | "cell_type": "markdown", 153 | "metadata": {}, 154 | "source": [ 155 | "# Tsukamoto Fuzzy Inference System" 156 | ] 157 | }, 158 | { 159 | "cell_type": "code", 160 | "execution_count": null, 161 | "metadata": {}, 162 | "outputs": [], 163 | "source": [ 164 | "import fuzzylite as fl\n", 165 | "#Declaring and Initializing the Fuzzy Engine\n", 166 | "engine = fl.Engine(\n", 167 | "\tname=\"SimpleDimmer\",\n", 168 | "\tdescription=\"Simple Dimmer Fuzzy System which dims light based upon Light Conditions\"\n", 169 | ")\n", 170 | "#Defining the Input Variables (Fuzzification)\n", 171 | "engine.input_variables = [\n", 172 | "\tfl.InputVariable(\n", 173 | " \tname=\"Ambient\",\n", 174 | " \tdescription=\"\",\n", 175 | " \tenabled=True,\n", 176 | " \tminimum=0.000,\n", 177 | " \tmaximum=1.000,\n", 178 | " \tlock_range=False,\n", 179 | " \tterms=[\n", 180 | " \tfl.Bell(\"Dark\", -10.000, 5.000, 3.000), #Generalized Bell Membership Function defining “Dark”\n", 181 | " \tfl.Bell(\"medium\", 0.000, 5.000, 3.000), #Generalized Bell Membership Function defining “Medium”\n", 182 | " \tfl.Bell(\"Bright\", 10.000, 5.000, 3.000) #Generalized Bell Membership Function defining “Bright”\n", 183 | " \t]\n", 184 | "\t)\n", 185 | "]\n", 186 | "#Defining the Output Variables (Defuzzification)\n", 187 | "engine.output_variables = [\n", 188 | "\tfl.OutputVariable(\n", 189 | " \tname=\"Power\",\n", 190 | " \tdescription=\"\",\n", 191 | " \tenabled=True,\n", 192 | " \tminimum=0.000,\n", 193 | " \tmaximum=1.000,\n", 194 | " \tlock_range=False,\n", 195 | " \taggregation=fl.Maximum(),\n", 196 | " \tdefuzzifier=fl.Centroid(200),\n", 197 | " \tlock_previous=False,\n", 198 | " \tterms=[\n", 199 | "fl.Sigmoid(\"LOW\", 0.500, -30.000), #Triangular Membership Function defining “LOW Light”\n", 200 | " \tfl.Sigmoid(\"MEDIUM\", 0.130, 30.000), #Triangular Membership Function defining “MEDIUM light”\n", 201 | " \tfl.Sigmoid(\"HIGH\", 0.830, 30.000) #Triangular Membership Function defining “HIGH Light” \tfl.Triangle(\"HIGH\", 0.500, 0.750, 1.000) \t\n", 202 | "]\n", 203 | "\t)\n", 204 | "]\n", 205 | "#Creation of Fuzzy Rule Base\n", 206 | "engine.rule_blocks = [\n", 207 | "\tfl.RuleBlock(\n", 208 | " \tname=\"\",\n", 209 | " \tdescription=\"\",\n", 210 | " \tenabled=True,\n", 211 | " \tconjunction=None,\n", 212 | " \tdisjunction=None,\n", 213 | " \timplication=None,\n", 214 | " \tactivation=fl.General(),\n", 215 | " \trules=[\n", 216 | " \tfl.Rule.create(\"if Ambient is DARK then Power is HIGH\", engine),\n", 217 | " \tfl.Rule.create(\"if Ambient is MEDIUM then Power is MEDIUM\", engine),\n", 218 | " \tfl.Rule.create(\"if Ambient is BRIGHT then Power is LOW\", engine)\n", 219 | " \t]\n", 220 | "\t)\n", 221 | "]" 222 | ] 223 | } 224 | ], 225 | "metadata": { 226 | "kernelspec": { 227 | "display_name": "ADP", 228 | "language": "python", 229 | "name": "adp" 230 | }, 231 | "language_info": { 232 | "codemirror_mode": { 233 | "name": "ipython", 234 | "version": 3 235 | }, 236 | "file_extension": ".py", 237 | "mimetype": "text/x-python", 238 | "name": "python", 239 | "nbconvert_exporter": "python", 240 | "pygments_lexer": "ipython3", 241 | "version": "3.7.4" 242 | } 243 | }, 244 | "nbformat": 4, 245 | "nbformat_minor": 2 246 | } 247 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Chapter 4-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Classification Algorithm - Logistic Regression" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 8, 13 | "metadata": {}, 14 | "outputs": [ 15 | { 16 | "name": "stdout", 17 | "output_type": "stream", 18 | "text": [ 19 | "Confusion Matrix: [[94 24]\n", 20 | " [10 51]]\n", 21 | "Accuracy: 0.8100558659217877\n", 22 | "Recall: 0.8360655737704918\n", 23 | "Precision: 0.68\n", 24 | "F1 Score: 0.7500000000000001\n", 25 | "AUC: 0.841\n" 26 | ] 27 | }, 28 | { 29 | "name": "stderr", 30 | "output_type": "stream", 31 | "text": [ 32 | "/Users/singhhim1/anaconda3/envs/ADP/lib/python3.7/site-packages/pandas/core/frame.py:4102: SettingWithCopyWarning: \n", 33 | "A value is trying to be set on a copy of a slice from a DataFrame\n", 34 | "\n", 35 | "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n", 36 | " errors=errors,\n", 37 | "/Users/singhhim1/anaconda3/envs/ADP/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:432: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", 38 | " FutureWarning)\n", 39 | "/Users/singhhim1/anaconda3/envs/ADP/lib/python3.7/site-packages/sklearn/utils/validation.py:724: DataConversionWarning: A column-vector y was passed when a 1d array was expected. Please change the shape of y to (n_samples, ), for example using ravel().\n", 40 | " y = column_or_1d(y, warn=True)\n" 41 | ] 42 | }, 43 | { 44 | "data": { 45 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3deXjU1b3H8fc3YQ9hTcKaEPYdFWNwKYiCCqKg1lrcqq33Wtt6vbeLgmtdunj1Kt2sii0Vba214oKC1WoVooIhLmWJgCyBBFD2sGadc//4TWAICZnATGb7vJ6HJ7OcTM5P4OPhnPP7HnPOISIisS8p0h0QEZHQUKCLiMQJBbqISJxQoIuIxAkFuohInGgWqR+clpbmsrOzI/XjRURi0scff7zdOZde13sRC/Ts7GwKCgoi9eNFRGKSmW2o7z1NuYiIxAkFuohInFCgi4jECQW6iEicUKCLiMSJBgPdzGaZ2VYzW17P+2ZmvzGzNWa21MxGhr6bIiLSkGC2LT4N/A54pp73JwL9/b9GAY/7v4qICEBxPhTlQfZo73nN48zckP6YBgPdObfQzLKP0WQK8Izz6vAuNrMOZtbNObclRH0UEYldxfnwpwvBV4Uzb1LEcJDcEq6bG9JQD8Uceg+gOOB5if+1o5jZjWZWYGYF27ZtC8GPFhGJcusWgK8ScOCq/b98UF3hjdRDKBSBbnW8VuepGc65mc65HOdcTnp6nXeuiohEp+J8yHvE+9qINvu6eTPQPgdVJOOSWoAlQ3KLw1MwIRKKW/9LgMyA5z2BzSH4XBGR6HBo2qQSMGibAc1aHtmmqhz2bcUbz3ptXLOWlO/eS1vADJolJ2MTH4aDOyIzhx6EucDNZvY83mJoqebPRSSuFOX5w9yvbRfoMvTINl+tgH1fAf4pirZdsC5Daca/cbt3eVMZvmovzEf/OCzdbDDQzeyvwFggzcxKgJ8CzQGcc08A84ELgTXAAeDbYempiEhTW58Hy+dAy1S82WXnTZVMeuTo0XVxPm72ZFxVBeUkk9f7Vs6/YDLti/Nh9mRvzjwM0yyBgtnlcmUD7zvgByHrkYhINCjOh2cvAV9VrTfqXCJkc+pwZqU9SIviD9mZkcsNJ5/jvZGZ6+1mCdNWxUARK58rIhLVivK8KRLg0OgcvNeK8o4I5lc/28SdLy+n2teVWyfezo/PzCY5KWC/SGZuWIO8hgJdRKQu2aPBkrxthsnNAfNG63VMm7Rv3ZyTMzvwy8uGk9mpTWT6iwJdRKRumbkwaBJ88U9vygQOTZtUdc/hjwvWUlnt4+Zz+zN2YAZnD0jHrK5d3E1HgS4iiSPwFvzGToH4p00KN+9h2u8/ZNmmUiaN6IZzDjOLeJiDAl1EEkVxPsy+yNsvDt7UiR3j3sqauzkBZk+m4pqX+e3qTjz+3lo6tGnO768eycRhXaMiyGso0EUkMRTlQVXNXnKDnjnQ87T625csgQ2LAAfVFZQWvscTH5zE5JO7c/ekIXRMadEUvW4UBbqIJIbs0ZDc7PB+8PH3HXvaJWBfeVJyC9KHj+OdUcPI6hy5Rc+G6IALEUkMmbkw7qfe4wsfbnAOPa+sNzcl3cP/VV1OyeTnITM3qsMcNEIXkVh3IguddSg9UMnP5xfyQkEJfdKGcMOVU+nZu1MIOhp+CnQRiV3rFsCfL/Vu9rEkyBwFrTvW3fbgLihe7D2efytkDD7qfwDVPsfXn/iQ9dv38/2xfbllXH9aNU8O80WEjgJdRGLT/h3wyk2H7+Z0Pti5zquEWJd9W8HV3O1ZdcTdnjv3V9ChdXOSk4xbLxhIjw6tGdajfRNcRGgp0EUSVdEHsP49yDrDe75xkfe4x6mH22z6uO7X6xJM28Z83rF88U9441Yo2+MtcPqqva/f/HP90y51FMlyzvHSJ5u4//VCpk0YxFWjsrhgaNfj71eEKdBF4klNTe79W2HfNq+ca83jwK97NkPFvkj39sQlt4SJDwVXX7xWkayStsO4409LWLh6G6f26khujMyTH4sCXSQaHGthr+JArVDeCvu3HRncNV/LS+v+/JbtICXdm45IHwQt28OmAo6uHGjQ/zzoMxbWveeNhGsObKh5vS7BtG3M5x1L4Of4qhpXX9x/t+fLn5Zw11MLccB9k4dy7em9SEqKnhuEjpcCXSQSnIOy3VC6Cda+C+/c54WTJUHW6d4UQk141zeSbtXBC+iUDOg63PvaNt3/NePI581bHfm9gdMPSckcUXhqzK1e8PU8zasHXjNFUfN6XYJp25jPO5ban3Mc9cU7pbTk1OxO/OLSYfTsGN1bERvDnKu7tm+45eTkuIKCgoj8bJGwK9/rhfWeEv9X/6/SgK+V++v+3pR0bxRdO5TbZhweZaekH30EWmMF/qsA6v4XQmO2BAbTNlRbDBv5OZXVPp7KW0dVteOWcf0BDtVgiTVm9rFzLqfO9xToEhMiEQT1BV6XYXUEdEnA8811TH2Yd2xZ+x7Qrge07wntunuPy/bAP6ZBdaU34rxubpPUzk4UyzeVMm3OUlZs3sPFJ3XnN1NPjskgr3GsQNeUi0S/4nx4epL3T2wz6DwAWrZt/OeU74Mdq73pjoY+J7AtcMQBB3VJSfcCulMf6D06ILR7eK+ndoNmx6j90WVIk5xok0jKKqv5zTtf8OTCdXRs04InrhnJhGHdIt2tsFKgS/Qryjtc9c75F8Lqu3nkWA7uPhzQDX1OYFvvG/xfzVvIO2mqP7R7QGr3o+eoG6uJTrRJJBt2HOCpvHVcdkoP7po0hPZtmke6S2GnQBevotzK16H7qdBtxOHXtyz1dkL0yDny9foE076uNg19X4fe/pNjfN42tUufOL7wq70P+Vifc6xFw3PuUPhGqf3lVby54ksuG9mTgV1T+dePx0b0BKGmpkBPdDU1oo86CDdancCaT2MO663dFjQlEuUWrN7GHS8tY3PpQUb0bE+/jNSECnNQoCeumgW/0pKAMDcYeikMvBBWzYcVL+MFaBIMvcR7vT7BtK+rDTTu++o4oLdRGjO1Ubutgjwq7dpfwQPzCnnpk030TU/h7989g34ZqZHuVkQo0BNRcT7Mvth/ckvgar+D3mfDiG9Ax16w6o3D0xOnf+/YgRZM+7raQOO/7zj2HUt8qimmtWHHAW4+px83n9svpopphZoCPREV5fnD3HHkFEaSd9cdNG56Itj29bU53u+ThLVjXzkd27QgOcmYPmEQPTq2Zmj32CumFWrah56IivPhTxMPL/IFLvhpD7REMeccf/+4hJ+9Xsi0iYO4elSvSHepyWkfuhwpMxdGXAGf/RWun+e9ptGvRLninQe44+Vl5H2xndzsTpzRp3OkuxR1FOiJql0P7+aamgBXkEsUe+mTEu56ZTkGPHDJMK7OzYqLYlqhpkBPJBsWwbIXIGMobPrUu3GmOF9hLlEvrW1Lcnt34ueXDqdHh9aR7k7UUqAnipqdLb7KI1+fPVnz5hJ1Kqt9PLlgLdU++O/x/RkzIJ0xA9Ij3a2op0CPJzV7y1t3Prrgf1Fe3TcPVVec2L5ukRBbvqmUW19cyudb9jDl5O4xWxUxEhTo8WJ9Hjx7yZGhbcneIQIp6bB9NUdsUUxq7r+VXvu6JTqUVVbzq7e/4Km8dXRKacGT154a08fBRUJQgW5mE4BfA8nAH5xzD9Z6PwuYDXTwt5nunJsf4r7Ksax46egRuKv2zo1smerV5z4kCUZeA+0ztbNFosbGnQf44/vruHxkT+64cHBCFNMKtQYD3cySgceA84ASYImZzXXOFQY0uwt4wTn3uJkNAeYD2WHob+JqqI5315OPfG5JXiGra1/y2tcuTHXSVQpyibi9ZZX8Y/mXfCMnkwFdUnn3J2Pj6gShphbMCD0XWOOcWwdgZs8DU4DAQHdAO//j9sDmUHYy4RXnwx/P59CUSVIzL7AD+ar9D8yrDjjyW3DSlUduS9TdlhJF3l25lTtfXsaXe8o4JasD/TJSFeYnKJhA7wEUBzwvAUbVanMv8JaZ/ReQAoyv64PM7EbgRoCsrKzG9jVxFeVxRD3uzFFHB3JxPmz40GvnnHe4Qu02qrktUWDn/goeeL2Qlz/dRP+Mtrz4vTMTtphWqAUT6HUtL9euF3Al8LRz7hEzOwN41syGOed8R3yTczOBmeDd+n88HU5IrQPviHMw/ArIuf7INrWnVLTQKVGo2ue4/PEP2bjzALeM688PzulLy2aJW0wr1IIJ9BIgM+B5T46eUrkBmADgnFtkZq2ANGBrKDqZ8LZ9HvAkoIBWIE2pSBTbtreczileMa07LhxMj46tGdytXcPfKI2S1HATlgD9zay3mbUApgJza7XZCIwDMLPBQCtgWyg7mtBK/EXMLNk76b2+0XdmLoz+scJcooZzjr8t2ci5j7zHc/kbARg/pIvCPEwaHKE756rM7GbgTbwtibOccyvM7H6gwDk3F/gx8JSZ/RBvOuZ6F6kyjvFmzTve8Wy5N0JqV42+JWZs3HGA6S8t5cO1OxjVuxNf65cW6S7FPZXPjWa+anhyDJTvgZsLvNG5SAx48eMS7n5lOclJxu0XDuLK01RMK1RUPjdWvftz+Gq5dyixwlxiSJd2LTmzb2d+dukwurVXMa2mokCPVsX5kPeo9zhvBvQ5R1MtErUqqnw8/t5afM7xw/MGMLp/OqP7q5hWUwtmUVQiIXDveU0BLZEo9O/i3Vz82/eZ8fZqinceQMtnkaMRerQ6tJPFtK9cotLBimoe/ecq/vj+ejJSW/GHb+UwfkiXSHcroSnQo1VmLrRIhYxBcMEvNN0iUad41wFmf7iBqblZTJ84iHatVEwr0hTo4dZQUS2RGLLHX0zrCn8xrfduHUt3nSAUNRTo4VScD09P8ubAMUjrDy1Sgvveiv1Qsde7qUinCkkU+NfKr7jjpeVs3VvGyKyO9MtoqzCPMgr0cCrKg+qaI9+cd6BESpAr/4fqlzudKiQRtWNfOfe/Xsirn21mYJdUnrj2VPpltI10t6QOCvRwyh7tlbL1VUGz1nDJ48GHsoptSRSo9jm+8cQiincd4IfjB/C9sX1p0Uyb46KVAj2cMnNh8GRYOa/xUyYqtiURtHVvGWkpLUlOMu6cNJieHdswsKtK3EY7BfqJCPeCp+qXSxPz+Rx/XbKRX85fybSJg7j29F6MG6ytiLFCgX68ivNh9sVQVe5Nqwy7HNp1P7LNns2w4mXAaWFTol7R9v1Mf2kpi9ft5My+nTlbd3rGHAX68SrK88Ic582RL3vBK28byFVz1N2eCnSJQi8UFHP3K8tpkZzEg5cN55unZWKmYlqxRoEerNrTK4ELnsnN4fr5dR8Lp4VNiQE9OrRmzIB0HpgyjK7tW0W6O3KcFOjB+OJt+MvleKNtgzb+I+F8Vcf+Pi1sSpQqr6rm9++uxTnHj84fyFn90jhL9cpjngL9WGpG5Zs/4/Axqv4DmAEObPe++nz1T6doYVOizKcbdzFtzlJWf7WPr4/siXNO0ytxQoFen+J8mH0RVFUe/d6p34YuQzSdIjHlQEUVj7y1mlkfrKdru1bMuj6HcwdpB0s8UaDXZ/Wb/kXP2vyHNGs6RWLMpl0HeXbxBq4elcW0CYNIVTGtuKNAr0+3k/0PzFv8xLxb9wNH45pOkShXerCSN5ZtYWpuFv27pLLg1rE6QSiOKdDr090f6IMvgjNv8R5rNC4x5K0VX3LXK8vZsb+CnOxO9MtoqzCPcwr0hvS/4HCAK8glBmzfV869c1fw+tItDOqayh+uy1ExrQShQBeJI9U+x+WPf8jm3WX85PwBfPfsvjRPVjGtRKFAF4kDX+0pI72tV0zrpxcPpWfH1vTvomJaiUb/6xaJYT6f49nFGxj3yAL+8tEGAM4ZlKEwT1AaoYvEqHXb9jH9pWXkr9/J1/qlMXZgRqS7JBGmQBeJQX9bspF7Xl1By2ZJPHT5CL5xak/d7SkK9AZ98SakD9QOF4kqPTu2YexAr5hWRjsV0xKPAr0+mz/zvn7+ulecS7XMJYLKq6r57TtrAPjJBSqmJXXTomh9Spb4HwQc0iwSAR9v2MmFv87jd++uYeveMpxzDX+TJCSN0OvT8zT/gyQV35KI2F9excNvrmL2oiK6t2/N7O/kcvYAnSIk9QtqhG5mE8xslZmtMbPp9bS5wswKzWyFmT0X2m5GwKFb/ydpukUiYvPugzyXv5Fvnd6LN384RmEuDWpwhG5mycBjwHlACbDEzOY65woD2vQHbgfOcs7tMjPtnxI5DqUHKpm3bAtXjfKKaeXddg5dtOgpQQpmhJ4LrHHOrXPOVQDPA1NqtflP4DHn3C4A59zW0HYzAgIXRWdP9uqji4TRP5Z/yfgZC7j71eWs3bYPQGEujRJMoPcAigOel/hfCzQAGGBmH5jZYjObUNcHmdmNZlZgZgXbtm07vh43FS2KShPZureM7//lY27688ekt23Jqz84i77pKqYljRfMomhddyvUXmZvBvQHxgI9gTwzG+ac233ENzk3E5gJkJOTE9ml+tqHPtd2aFEUSGqmRVEJi2qf44onFrG5tIxbLxjIjWP6qJiWHLdgAr0EyAx43hPYXEebxc65SmC9ma3CC/glRKPifPjTRP8hzwad+0KLlCPblJUGPNE2MQmtLaUH6ZLayiumNXkomR3bqMStnLBghgJLgP5m1tvMWgBTgbm12rwCnANgZml4UzDrQtnRkCrK84c5gAMzSO125K/kgOO5fNWacpGQ8PkcT3+wnnGPLODPNcW0BmYozCUkGhyhO+eqzOxm4E0gGZjlnFthZvcDBc65uf73zjezQqAauNU5tyOcHQ/a+jxYOQ96nApdh3uvtc86/H5yS5jy+6OnXYrzdQi0hNSarfuYPmcpBRt2MWZAOucO0mYwCS2L1F1nOTk5rqCgILw/pDgfZl3gnQVan+QWcP28uufRG5pnFwnS8/kbuWfuClo3T+aei4Zw2cgeKqYlx8XMPnbO5dT1XnzfKVqUFxDmSTDsUhh0Eax8HZa/BLjD0yl1BbYOgZYQyerchvGDM7hv8jDSU1tGujsSp+I70LNHgyV5od6sJYy6yQvo9j1h5XxNp0jYlFVW85t3vgDgtgmDOLNvGmf2VTEtCa/4DvTMXOgyzNux8vU/HHnY83VzNZ0iYVFQtJPb5ixl3bb9TD0tE+ecplekScR3oAO0bOf9qh3amk6RENtXXsXD/1jJM4s30KNDa575Ti5jVH9FmlD8B7pIE/my9CDPLynmujOyufWCgaS01F8vaVr6EydyAnbtr+D1ZVu49vRe9MvwimnpBCGJFAW6yHFwzvHG8i+559Xl7D5QyZl9O9M3va3CXCJKgS7SSFv3lHH3q8t5c8VXDO/Rnme+M0rFtCQqxH+gl+/xdrkU52sRVE5Ytc/xjScX8WVpGbdPHMQNX+tNMxXTkigR34FenA9fLff2oc+erJOH5Lht3n2Qru28Ylr3TxlGZsfW9NGoXKJMfA8tAu8UVU1zOQ7VPsefahXTOntAusJcolJ8j9AD7xTVHaHSSGu27uW2F5fyycbdjB2YzrjBXSLdJZFjiu9Ar+9OUZEGPPfRRu6du4KUlsnM+OZJXHKyimlJ9IvvQIf67xQVOYbstDacP7QL904eSlpbFdOS2BD/gS4ShLLKama8vRrDmD5RxbQkNsX3oqhIED5at4OJv87jyQXr2FtWSaTOCBA5UfE/Qtc+dKnH3rJK/vcfK/nz4o1kdWrDc/8xijP7aVQusSu+R+g1+9B3b/D2oRfnR7pHEkW+2lPOix+X8B9f680//me0wlxiXnwHuvahSy0791fw7KIiAPpltCXvtnO566IhtGkR//9YlfgX33+KtQ9d/JxzvL50C/fOXcGeskrO6pdGn/S2Og5O4kp8B7r2oQvw1Z4y7nx5OW9//hUjerbnL5eP0p2eEpfiM9CL8w8fLycJrdrnuMJfTOvOCwfz7bOyVUxL4lb8BXpxPsy+CKoq/NMt1d7rKs6VUEp2HaBb+9YkJxkPTBlGVqc2ZKelRLpbImEVf0OVojyoKgfc4TAHLYomiGqf4w956xj/6AL+vNgrpjVmQLrCXBJC/IzQa6ZZWnc+vBCa1Mx77KvWomgCWPXlXm6bs5R/F+9m3KAMzh+qYlqSWOIj0Ivz4elJ3ig8kCXBxIfh4A4vzDXdErf+vHgD9722gtRWzfn11JOZfFJ3FdOShBMfgV6Ud3SYgzcyP7gDRv+46fskTcI5h5nRL6MtFw7vxj0XDaGzimlJgortQN+wCApfhZapAdMszf3TLFWaZoljByuqefSfq0hKMm6fOJjT+3Tm9D6dI90tkYiK3UCv2c3iqzrydTOY+JCmWeLYorU7mP7SUjbsOMC1p/c6NEoXSXSxG+hFed6UCgAG+CvkaZolbu0pq+SX81fy1/yN9Orchuf+c5RK3IoEiN1Azx4NScn+qZXmgGmaJc5t3VPOK59u4sYxffjh+AG0bpEc6S6JRJWgAt3MJgC/BpKBPzjnHqyn3eXA34HTnHMFIetlXTJzYehlsHwOXD/Pe63m7lBNs8SNHfvKee3fm7n+rN70y2jL+9PO0aKnSD0aDHQzSwYeA84DSoAlZjbXOVdYq10qcAvwUTg62qDMXAV5HHHOMfffm7l37gr2lVcxZkA6fdLbKsxFjiGYO0VzgTXOuXXOuQrgeWBKHe0eAB4CykLYv/oV58OKOd7doKp1Hlc27z7IDbML+O/nP6NX5xTm3TJaxbREghBMoPcAigOel/hfO8TMTgEynXOvH+uDzOxGMysws4Jt27Y1urNH+OKtw4uiuq0/blRV+5g6czGL1u7g7ouGMOd7ZzKgS2qkuyUSE4KZQ69rP9ihQxfNLAmYAVzf0Ac552YCMwFycnJO7ODGHWv9HUjSQmgcKN55gO4dWtMsOYlfXDqcrE5tyOrcJtLdEokpwQR6CZAZ8LwnsDngeSowDHjPvxe4KzDXzCaHbWF0x1r4/DUYMBEyT9NCaAyrqvYx64P1PPLWam6fOIjrz+rN1/prK6LI8Qgm0JcA/c2sN7AJmApcVfOmc64UOPQ30MzeA34StjAvzofXbgFLhot/Baldw/JjJPw+37KHaXOWsrSklPOGdGHi8G6R7pJITGsw0J1zVWZ2M/Am3rbFWc65FWZ2P1DgnJsb7k4eUpwPT18E1eVeJcXdGxXoMerZRUXc91oh7Vs353dXncKk4d10t6fICQpqH7pzbj4wv9Zr99TTduyJd6segUW4nPOea6olptTcpj+gSyoXn9Sduy8aQqeUFpHulkhciK07RWvfHaqF0JhxoKKK/3tzNc2SjTsuHMyoPp0ZpWJaIiEVWycWZebCqdd7j696QaPzGPHBmu1c8KuFzPpgPRVVPpw7sQ1OIlK32BqhA3TI8r72zIlsP6RBpQcr+cW8z/lbQTG901J44btnkNu7U6S7JRK3Yi/QJWZs31fOa0s3c9PZffmf8f1p1VzFtETCSYEuIbVtr1dM6ztf603f9La8P+1cLXqKNBEFuoSEc45XPtvEfa8VcqC8mnMGZdA7LUVhLtKEFOhywjbtPsidLy/jvVXbGJnVgYcuH0HvtJRId0sk4SjQ5YR4xbQWsWNfBfdePIRrz8gmOUk3CIlEggJdjsvGHQfo0dErpvXgZSPI6tSGzE4qpiUSSbG1D10irqrax+PvrWX8jAU8s6gIgLP6pSnMRaKARugStBWbS5k2ZynLN+3hgqFdmKRiWiJRRYEuQZn9YREPvF5IhzYtePzqkaqMKBKFFOhyTDXFtAZ1TWXKyT24+6LBdGijrYgi0UiBLnXaX17Fw2+uonmyceekISqmJRIDtCgqR1m4ehvnz1jI7EVFVFY7FdMSiREaocshpQcqeWBeIS9+XEKfdK+Y1mnZKqYlEisU6HLI9v3lvLFsC98f25dbxqmYlkisUaAnuK17y5j72Wb+Y3SfQ8W0Oqr+ikhMUqAnKOcccz7ZxAOvF3Kwsppxg7vQOy1FYS4SwxToCah45wHueHkZeV9sJ6dXRx78uoppicQDBXqCqar2ceVTi9m1v4IHpgzl6lG9SFIxLZG4oEBPEEXb95PZqQ3NkpN46HKvmFbPjqq/IhJPtA89zlVW+3js3TWcP2PhoWJaZ/ZNU5iLxCGN0OPY8k2l3PbiUgq37GHS8G5cNKJ7pLskImGkQI9Tf/pgPT+b9zmdUlrwxDWnMmFY10h3SUTCTIEeZ2qKaQ3t3p7LTunBXZOG0L5N80h3S0SagAI9Tuwrr+Khf6ykRXISd100hNzencjtrdv2RRKJFkXjwHurtnLBjIU8u3gDDlRMSyRBaYQew3btr+CBeYW89Mkm+mW05cWbzuTUXh0j3S0RiRAFegzbdaCCt1Z8xS3n9uMH5/ajZTMV0xJJZEFNuZjZBDNbZWZrzGx6He//yMwKzWypmb1jZr1C31UB2LqnjJkL1+Kco096Wz6Ydi4/On+gwlxEGg50M0sGHgMmAkOAK81sSK1mnwI5zrkRwIvAQ6HuaKJzzvHCkmLGPbqAR95aTdGOAwDawSIihwQz5ZILrHHOrQMws+eBKUBhTQPn3LsB7RcD14Syk4mueOcBbn9pGe+v2U5u7048eNlwFdMSkaMEE+g9gOKA5yXAqGO0vwF4o643zOxG4EaArKysILuY2GqKae0+UMnPLhnGVblZKqYlInUKJtDrSo8698WZ2TVADnB2Xe8752YCMwFycnK0t+4Y1m/fT5a/mNbDl59Er85t6N6hdaS7JSJRLJhF0RIgM+B5T2Bz7UZmNh64E5jsnCsPTfcST2W1j9++8wUXzFjI7A+LADijb2eFuYg0KJgR+hKgv5n1BjYBU4GrAhuY2SnAk8AE59zWkPcyQSwt2c1tLy5l5Zd7ufik7kw+WcW0RCR4DQa6c67KzG4G3gSSgVnOuRVmdj9Q4JybCzwMtAX+bmYAG51zk8PY77gz6/31/GxeIempLXnqWzmcN6RLpLskIjEmqBuLnHPzgfm1Xrsn4PH4EPcrYdQU0xrRsz3fPC2T6RMH0761tiKKSOPpTtEI2VtWyYNvrKRls2TuuXgIOdmdyMlWMS0ROX4qzhUB767cyvkzFvLX/I00SzYV0xKRkP4uX30AAAiKSURBVNAIvQnt3F/B/a+t4JXPNjOgS1t+f/WZnJKlYloiEhoK9CZUerCSdz7fyn+P688PzulHi2b6B5KIhI4CPcy+LC3jlc828d0xfeidlsL708/VoqeIhIUCPUycczy/pJhfzPucSp+PCUO7kp2WojAXkbBRoIfBhh37mT5nGYvW7eD0Pp148LIRZKuYloiEmQI9xKqqfVz11EeUHqzkF5cOZ+ppmSqmJSJNQoEeImu37aOXv5jWI1d4xbS6tVf9FRFpOtpmcYIqqnz86u3VTPjVQp5ZtAGA0/t0VpiLSJPTCP0EfFa8m2kvLmXVV3uZcnJ3LjmlR6S7JCIJTIF+nP74/np+Pq+QjNRW/PG6HMYNVjEtEYksBXoj1RTTOjmzPVNzs5g+cRDtWmkroohEngI9SHvKKvnl/JW0ap7ETy8eyqm9OnFqLxXTEpHooUXRILxd+BXnPbqAvy3ZSItmSSqmJSJRSSP0Y9ixr5z7Xitk7r83M6hrKjOvzeGkzA6R7paISJ0U6Mewt6yKd1dt5YfjB/C9sX1VTEtEopoCvZbNuw/y8qeb+P7YvmSnpfDB9HO16CkiMUGB7ufzOZ7L38iDb6yk2ueYNLwb2WkpCnMRiRkKdGD99v1Mn7OUj9bv5Kx+nfnlpSPI6twm0t0SEWmUhA/0qmof1/zhI/aUVfLQ10fwjZyemKmYlojEnoQN9DVb95LdOYVmyUnM+ObJ9Orchi7tWkW6WyIixy3htm2UV1Xz6D9XM+FXecz2F9PK7d1JYS4iMS+hRuifbNzFtBeX8sXWfVx2Sg8uUzEtEYkjCRPoTy1cxy/e+Jxu7Vrxp2+fxjkDMyLdJRGRkIr7QPf5HElJxsheHbh6VBbTJgwiVVsRRSQOxW2glx6s5OfzCmndPJn7pgxTMS0RiXtxuSj65oovOe/RBcz5ZBMpLZupmJaIJIS4GqFv31fOT19dwbxlWxjSrR2zrj+NYT3aR7pbIiJNIvYCffdG72tJAfQ5+4i39pVVkffFNm69YCA3julD8+S4/AeIiEidYivxivPh46e9x89dAcX5bNp9kN/96wucc2SnpfDh7eP4wTn9FOYiknCCSj0zm2Bmq8xsjZlNr+P9lmb2N//7H5lZdqg7CkBRHviqAXDVlXy68DXOf3QBj727lg07DgDQtmXs/aNDRCQUGgx0M0sGHgMmAkOAK81sSK1mNwC7nHP9gBnA/4a6owBkjwbzulzlknhgeSdG9urIWz8cQ3ZaSlh+pIhIrAhmhJ4LrHHOrXPOVQDPA1NqtZkCzPY/fhEYZ2GqcFWzX8WH4wfn9uOZ7+SS2UmVEUVEggn0HkBxwPMS/2t1tnHOVQGlQOfaH2RmN5pZgZkVbNu2rfG9LcrD/FsQW5hjXKvVqowoIuIXTKDXlZi1N3YH0wbn3EznXI5zLic9PT2Y/h0pezQ0awmWjCW38J6LiAgQ3LbFEiAz4HlPYHM9bUrMrBnQHtgZkh4GysyF6+Z6i6PZo73nIiICBBfoS4D+ZtYb2ARMBa6q1WYucB2wCLgc+JcL1+2ZmbkKchGROjQY6M65KjO7GXgTSAZmOedWmNn9QIFzbi7wR+BZM1uDNzKfGs5Oi4jI0YLatO2cmw/Mr/XaPQGPy4BvhLZrIiLSGLqdUkQkTijQRUTihAJdRCROKNBFROKERerwBzPbBmw4zm9PA7aHsDuxQNecGHTNieFErrmXc67OOzMjFugnwswKnHM5ke5HU9I1JwZdc2II1zVrykVEJE4o0EVE4kSsBvrMSHcgAnTNiUHXnBjCcs0xOYcuIiJHi9URuoiI1KJAFxGJE1Ed6FFzOHUTCuKaf2RmhWa21MzeMbNekehnKDV0zQHtLjczZ2Yxv8UtmGs2syv8v9crzOy5pu5jqAXxZzvLzN41s0/9f74vjEQ/Q8XMZpnZVjNbXs/7Zma/8f/3WGpmI0/4hzrnovIXXqnetUAfoAXwb2BIrTbfB57wP54K/C3S/W6Caz4HaON//L1EuGZ/u1RgIbAYyIl0v5vg97k/8CnQ0f88I9L9boJrngl8z/94CFAU6X6f4DWPAUYCy+t5/0LgDbwT304HPjrRnxnNI/SoOpy6iTR4zc65d51zB/xPF+OdIBXLgvl9BngAeAgoa8rOhUkw1/yfwGPOuV0AzrmtTdzHUAvmmh3Qzv+4PUefjBZTnHMLOfbJbVOAZ5xnMdDBzLqdyM+M5kAP2eHUMSSYaw50A97/4WNZg9dsZqcAmc6515uyY2EUzO/zAGCAmX1gZovNbEKT9S48grnme4FrzKwE7/yF/2qarkVMY/++NyioAy4iJGSHU8eQoK/HzK4BcoCzw9qj8DvmNZtZEjADuL6pOtQEgvl9boY37TIW719heWY2zDm3O8x9C5dgrvlK4Gnn3CNmdgbeKWjDnHO+8HcvIkKeX9E8Qm/M4dSE9XDqphPMNWNm44E7gcnOufIm6lu4NHTNqcAw4D0zK8Kba5wb4wujwf7ZftU5V+mcWw+swgv4WBXMNd8AvADgnFsEtMIrYhWvgvr73hjRHOiHDqc2sxZ4i55za7WpOZwawn04ddNo8Jr90w9P4oV5rM+rQgPX7Jwrdc6lOeeynXPZeOsGk51zBZHpbkgE82f7FbwFcMwsDW8KZl2T9jK0grnmjcA4ADMbjBfo25q0l01rLvAt/26X04FS59yWE/rESK8EN7BKfCGwGm91/E7/a/fj/YUG7zf878AaIB/oE+k+N8E1vw18BXzm/zU30n0O9zXXavseMb7LJcjfZwMeBQqBZcDUSPe5Ca55CPAB3g6Yz4DzI93nE7zevwJbgEq80fgNwE3ATQG/x4/5/3ssC8Wfa936LyISJ6J5ykVERBpBgS4iEicU6CIicUKBLiISJxToIiJxQoEuIhInFOgiInHi/wH2iaqv0Zf8OAAAAABJRU5ErkJggg==\n", 46 | "text/plain": [ 47 | "
" 48 | ] 49 | }, 50 | "metadata": { 51 | "needs_background": "light" 52 | }, 53 | "output_type": "display_data" 54 | } 55 | ], 56 | "source": [ 57 | "#Reading data\n", 58 | "import pandas as pd\n", 59 | "data = pd.read_csv(\"titanic_train.csv\")\n", 60 | "\n", 61 | "#Splitting Data into Categorical and Numerical Dataframes\n", 62 | "import numpy as np\n", 63 | "data_cat = data.select_dtypes(include=[object])\n", 64 | "data_num = data.select_dtypes(include=np.number)\n", 65 | "\n", 66 | "#Checking the number of null values\n", 67 | "data_cat.isnull().sum()\n", 68 | "data_num.isnull().sum()\n", 69 | "\n", 70 | "#Dropping the Columns having null values and columns which are not important\n", 71 | "data_cat.drop([\"Cabin\",\"Embarked\",\"Name\",\"Ticket\"], axis=1, inplace=True)\n", 72 | "data_num.drop([\"Age\",\"PassengerId\"], axis=1, inplace=True)\n", 73 | "\n", 74 | "#Checking the null values again\n", 75 | "data_cat.isnull().sum()\n", 76 | "data_num.isnull().sum()\n", 77 | "\n", 78 | "#Converting categorical variables into numbers\n", 79 | "from sklearn.preprocessing import LabelEncoder\n", 80 | "le = LabelEncoder()\n", 81 | "data_cat = data_cat.apply(le.fit_transform)\n", 82 | "\n", 83 | "#Combining both dataframes\n", 84 | "data = pd.concat([data_cat,data_num], axis=1)\n", 85 | "\n", 86 | "#Defining dependent and independent variables\n", 87 | "X = data.drop([\"Survived\"], axis=1)\n", 88 | "Y = pd.DataFrame(data[[\"Survived\"]])\n", 89 | "\n", 90 | "#Defining data into train and test set\n", 91 | "from sklearn.model_selection import train_test_split\n", 92 | "X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=0.20)\n", 93 | "\n", 94 | "#Applying Logistic Regression\n", 95 | "from sklearn.linear_model import LogisticRegression\n", 96 | "lr = LogisticRegression()\n", 97 | "lr.fit(X_train,y_train)\n", 98 | "\n", 99 | "#Predicting Values\n", 100 | "pred = lr.predict(X_test)\n", 101 | "\n", 102 | "#Finding different classification measures\n", 103 | "from sklearn.metrics import confusion_matrix, accuracy_score, recall_score, precision_score, f1_score\n", 104 | "print(\"Confusion Matrix: \", confusion_matrix(pred,y_test))\n", 105 | "print(\"Accuracy: \", accuracy_score(pred,y_test))\n", 106 | "print(\"Recall: \", recall_score(pred,y_test))\n", 107 | "print(\"Precision: \", precision_score(pred,y_test))\n", 108 | "print(\"F1 Score: \", f1_score(pred,y_test))\n", 109 | "\n", 110 | "from sklearn.metrics import roc_auc_score, roc_curve\n", 111 | "from matplotlib import pyplot\n", 112 | "# predict probabilities\n", 113 | "probs = lr.predict_proba(X_test)\n", 114 | "# keep probabilities for the positive outcome only\n", 115 | "probs = probs[:, 1]\n", 116 | "# calculate AUC\n", 117 | "auc = roc_auc_score(y_test, probs)\n", 118 | "print('AUC: %.3f' % auc)\n", 119 | "# calculate roc curve\n", 120 | "fpr, tpr, thresholds = roc_curve(y_test, probs)\n", 121 | "# plot no skill\n", 122 | "pyplot.plot([0, 1], [0, 1], linestyle='--')\n", 123 | "# plot the roc curve for the model\n", 124 | "pyplot.plot(fpr, tpr, marker='.')\n", 125 | "# show the plot\n", 126 | "pyplot.show()" 127 | ] 128 | }, 129 | { 130 | "cell_type": "markdown", 131 | "metadata": {}, 132 | "source": [ 133 | "# Regression Algorithm - Linear Regression" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": 11, 139 | "metadata": {}, 140 | "outputs": [ 141 | { 142 | "name": "stderr", 143 | "output_type": "stream", 144 | "text": [ 145 | "/Users/singhhim1/anaconda3/envs/ADP/lib/python3.7/site-packages/pandas/core/frame.py:4102: SettingWithCopyWarning: \n", 146 | "A value is trying to be set on a copy of a slice from a DataFrame\n", 147 | "\n", 148 | "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n", 149 | " errors=errors,\n", 150 | "/Users/singhhim1/anaconda3/envs/ADP/lib/python3.7/site-packages/pandas/core/generic.py:6287: SettingWithCopyWarning: \n", 151 | "A value is trying to be set on a copy of a slice from a DataFrame\n", 152 | "\n", 153 | "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n", 154 | " self._update_inplace(new_data)\n" 155 | ] 156 | }, 157 | { 158 | "data": { 159 | "text/html": [ 160 | "\n", 161 | "\n", 162 | "\n", 163 | " \n", 164 | "\n", 165 | "\n", 166 | " \n", 167 | "\n", 168 | "\n", 169 | " \n", 170 | "\n", 171 | "\n", 172 | " \n", 173 | "\n", 174 | "\n", 175 | " \n", 176 | "\n", 177 | "\n", 178 | " \n", 179 | "\n", 180 | "\n", 181 | " \n", 182 | "\n", 183 | "\n", 184 | " \n", 185 | "\n", 186 | "\n", 187 | " \n", 188 | "\n", 189 | "
OLS Regression Results
Dep. Variable: SalePrice R-squared (uncentered): 0.981
Model: OLS Adj. R-squared (uncentered): 0.979
Method: Least Squares F-statistic: 744.4
Date: Sun, 22 Sep 2019 Prob (F-statistic): 0.00
Time: 21:56:15 Log-Likelihood: -13598.
No. Observations: 1168 AIC: 2.734e+04
Df Residuals: 1094 BIC: 2.772e+04
Df Model: 74
Covariance Type: nonrobust
\n", 190 | "\n", 191 | "\n", 192 | " \n", 193 | "\n", 194 | "\n", 195 | " \n", 196 | "\n", 197 | "\n", 198 | " \n", 199 | "\n", 200 | "\n", 201 | " \n", 202 | "\n", 203 | "\n", 204 | " \n", 205 | "\n", 206 | "\n", 207 | " \n", 208 | "\n", 209 | "\n", 210 | " \n", 211 | "\n", 212 | "\n", 213 | " \n", 214 | "\n", 215 | "\n", 216 | " \n", 217 | "\n", 218 | "\n", 219 | " \n", 220 | "\n", 221 | "\n", 222 | " \n", 223 | "\n", 224 | "\n", 225 | " \n", 226 | "\n", 227 | "\n", 228 | " \n", 229 | "\n", 230 | "\n", 231 | " \n", 232 | "\n", 233 | "\n", 234 | " \n", 235 | "\n", 236 | "\n", 237 | " \n", 238 | "\n", 239 | "\n", 240 | " \n", 241 | "\n", 242 | "\n", 243 | " \n", 244 | "\n", 245 | "\n", 246 | " \n", 247 | "\n", 248 | "\n", 249 | " \n", 250 | "\n", 251 | "\n", 252 | " \n", 253 | "\n", 254 | "\n", 255 | " \n", 256 | "\n", 257 | "\n", 258 | " \n", 259 | "\n", 260 | "\n", 261 | " \n", 262 | "\n", 263 | "\n", 264 | " \n", 265 | "\n", 266 | "\n", 267 | " \n", 268 | "\n", 269 | "\n", 270 | " \n", 271 | "\n", 272 | "\n", 273 | " \n", 274 | "\n", 275 | "\n", 276 | " \n", 277 | "\n", 278 | "\n", 279 | " \n", 280 | "\n", 281 | "\n", 282 | " \n", 283 | "\n", 284 | "\n", 285 | " \n", 286 | "\n", 287 | "\n", 288 | " \n", 289 | "\n", 290 | "\n", 291 | " \n", 292 | "\n", 293 | "\n", 294 | " \n", 295 | "\n", 296 | "\n", 297 | " \n", 298 | "\n", 299 | "\n", 300 | " \n", 301 | "\n", 302 | "\n", 303 | " \n", 304 | "\n", 305 | "\n", 306 | " \n", 307 | "\n", 308 | "\n", 309 | " \n", 310 | "\n", 311 | "\n", 312 | " \n", 313 | "\n", 314 | "\n", 315 | " \n", 316 | "\n", 317 | "\n", 318 | " \n", 319 | "\n", 320 | "\n", 321 | " \n", 322 | "\n", 323 | "\n", 324 | " \n", 325 | "\n", 326 | "\n", 327 | " \n", 328 | "\n", 329 | "\n", 330 | " \n", 331 | "\n", 332 | "\n", 333 | " \n", 334 | "\n", 335 | "\n", 336 | " \n", 337 | "\n", 338 | "\n", 339 | " \n", 340 | "\n", 341 | "\n", 342 | " \n", 343 | "\n", 344 | "\n", 345 | " \n", 346 | "\n", 347 | "\n", 348 | " \n", 349 | "\n", 350 | "\n", 351 | " \n", 352 | "\n", 353 | "\n", 354 | " \n", 355 | "\n", 356 | "\n", 357 | " \n", 358 | "\n", 359 | "\n", 360 | " \n", 361 | "\n", 362 | "\n", 363 | " \n", 364 | "\n", 365 | "\n", 366 | " \n", 367 | "\n", 368 | "\n", 369 | " \n", 370 | "\n", 371 | "\n", 372 | " \n", 373 | "\n", 374 | "\n", 375 | " \n", 376 | "\n", 377 | "\n", 378 | " \n", 379 | "\n", 380 | "\n", 381 | " \n", 382 | "\n", 383 | "\n", 384 | " \n", 385 | "\n", 386 | "\n", 387 | " \n", 388 | "\n", 389 | "\n", 390 | " \n", 391 | "\n", 392 | "\n", 393 | " \n", 394 | "\n", 395 | "\n", 396 | " \n", 397 | "\n", 398 | "\n", 399 | " \n", 400 | "\n", 401 | "\n", 402 | " \n", 403 | "\n", 404 | "\n", 405 | " \n", 406 | "\n", 407 | "\n", 408 | " \n", 409 | "\n", 410 | "\n", 411 | " \n", 412 | "\n", 413 | "\n", 414 | " \n", 415 | "\n", 416 | "\n", 417 | " \n", 418 | "\n", 419 | "\n", 420 | " \n", 421 | "\n", 422 | "
coef std err t P>|t| [0.025 0.975]
MSZoning -1165.7401 1539.464 -0.757 0.449 -4186.376 1854.896
Street 3.222e+04 1.27e+04 2.546 0.011 7390.874 5.71e+04
LotShape -718.1397 664.207 -1.081 0.280 -2021.403 585.123
LandContour 1235.6655 1353.945 0.913 0.362 -1420.957 3892.288
Utilities -3.379e+04 3.02e+04 -1.121 0.263 -9.3e+04 2.54e+04
LotConfig -298.4770 542.717 -0.550 0.582 -1363.360 766.406
LandSlope 4103.7910 3864.835 1.062 0.289 -3479.536 1.17e+04
Neighborhood 233.4921 156.226 1.495 0.135 -73.044 540.029
Condition1 -1042.3035 976.607 -1.067 0.286 -2958.538 873.930
Condition2 -1.533e+04 3562.421 -4.304 0.000 -2.23e+04 -8343.255
BldgType -1276.6258 1536.222 -0.831 0.406 -4290.900 1737.648
HouseStyle -392.3779 676.011 -0.580 0.562 -1718.802 934.047
RoofStyle 1427.9843 1147.983 1.244 0.214 -824.514 3680.483
RoofMatl 3634.7378 1684.056 2.158 0.031 330.392 6939.083
Exterior1st -1097.5467 531.712 -2.064 0.039 -2140.838 -54.256
Exterior2nd 499.9442 482.670 1.036 0.301 -447.119 1447.008
MasVnrType 4639.5944 1605.118 2.891 0.004 1490.137 7789.051
ExterQual -8914.7271 2017.235 -4.419 0.000 -1.29e+04 -4956.640
ExterCond 511.9805 1262.722 0.405 0.685 -1965.650 2989.611
Foundation 610.7116 1629.947 0.375 0.708 -2587.465 3808.888
BsmtQual -7872.7731 1476.928 -5.331 0.000 -1.08e+04 -4974.842
BsmtCond 1991.4110 1368.135 1.456 0.146 -693.055 4675.877
BsmtExposure -2462.4311 891.401 -2.762 0.006 -4211.479 -713.383
BsmtFinType1 603.6284 660.350 0.914 0.361 -692.067 1899.324
BsmtFinType2 1306.0052 1405.594 0.929 0.353 -1451.959 4063.969
Heating 889.9318 3149.188 0.283 0.778 -5289.199 7069.062
HeatingQC -1037.8213 619.969 -1.674 0.094 -2254.285 178.642
CentralAir -3109.1404 4472.098 -0.695 0.487 -1.19e+04 5665.718
Electrical -189.0676 960.337 -0.197 0.844 -2073.378 1695.243
KitchenQual -6792.8368 1494.550 -4.545 0.000 -9725.345 -3860.329
Functional 4001.9568 946.697 4.227 0.000 2144.411 5859.503
FireplaceQu -2479.2410 1103.125 -2.247 0.025 -4643.721 -314.761
GarageType 512.7497 613.323 0.836 0.403 -690.673 1716.173
GarageFinish 91.1008 1490.750 0.061 0.951 -2833.951 3016.152
GarageQual 726.6309 1843.585 0.394 0.694 -2890.730 4343.992
GarageCond 2259.8080 2097.899 1.077 0.282 -1856.552 6376.168
PavedDrive -572.7771 2069.922 -0.277 0.782 -4634.243 3488.689
SaleType -525.3894 580.704 -0.905 0.366 -1664.808 614.030
SaleCondition 2600.2391 838.942 3.099 0.002 954.123 4246.356
Id -1.6710 2.040 -0.819 0.413 -5.673 2.331
MSSubClass -95.9793 47.297 -2.029 0.043 -188.782 -3.176
LotFrontage 20.7353 49.791 0.416 0.677 -76.960 118.431
LotArea 0.3920 0.101 3.879 0.000 0.194 0.590
OverallQual 8141.9894 1196.646 6.804 0.000 5794.009 1.05e+04
OverallCond 5795.2869 1048.905 5.525 0.000 3737.195 7853.379
YearBuilt 301.9530 79.092 3.818 0.000 146.764 457.142
YearRemodAdd 24.1377 68.483 0.352 0.725 -110.236 158.511
MasVnrArea 32.0383 6.144 5.214 0.000 19.983 44.094
BsmtFinSF1 16.9105 3.055 5.535 0.000 10.916 22.905
BsmtFinSF2 5.7592 6.351 0.907 0.365 -6.702 18.220
BsmtUnfSF -3.1604 2.907 -1.087 0.277 -8.863 2.543
TotalBsmtSF 19.5093 3.669 5.318 0.000 12.311 26.708
1stFlrSF 29.5382 6.310 4.681 0.000 17.157 41.919
2ndFlrSF 32.9418 5.739 5.740 0.000 21.682 44.202
LowQualFinSF -35.6469 15.214 -2.343 0.019 -65.499 -5.794
GrLivArea 26.8331 5.767 4.653 0.000 15.517 38.149
BsmtFullBath 2099.4732 2531.418 0.829 0.407 -2867.510 7066.457
BsmtHalfBath -810.6716 3890.494 -0.208 0.835 -8444.345 6823.002
FullBath -79.3889 2783.571 -0.029 0.977 -5541.130 5382.352
HalfBath -943.1513 2589.318 -0.364 0.716 -6023.742 4137.440
BedroomAbvGr -6071.5075 1708.284 -3.554 0.000 -9423.390 -2719.625
KitchenAbvGr -1.685e+04 4956.178 -3.399 0.001 -2.66e+04 -7123.626
TotRmsAbvGrd 2607.9617 1169.442 2.230 0.026 313.359 4902.564
Fireplaces 6222.3936 1783.577 3.489 0.001 2722.775 9722.012
GarageYrBlt 15.7996 70.252 0.225 0.822 -122.044 153.644
GarageCars 3252.8253 2917.934 1.115 0.265 -2472.555 8978.206
GarageArea 10.7453 10.031 1.071 0.284 -8.937 30.428
WoodDeckSF 13.4806 7.601 1.774 0.076 -1.433 28.394
OpenPorchSF -14.0978 14.094 -1.000 0.317 -41.751 13.556
EnclosedPorch -2.7550 15.521 -0.178 0.859 -33.209 27.699
3SsnPorch 12.8193 26.434 0.485 0.628 -39.049 64.687
ScreenPorch 18.6703 16.901 1.105 0.270 -14.491 51.832
PoolArea 73.2044 20.649 3.545 0.000 32.688 113.721
MiscVal -0.1852 1.582 -0.117 0.907 -3.290 2.920
MoSold -153.4176 316.720 -0.484 0.628 -774.864 468.029
YrSold -337.9522 89.400 -3.780 0.000 -513.366 -162.538
\n", 423 | "\n", 424 | "\n", 425 | " \n", 426 | "\n", 427 | "\n", 428 | " \n", 429 | "\n", 430 | "\n", 431 | " \n", 432 | "\n", 433 | "\n", 434 | " \n", 435 | "\n", 436 | "
Omnibus: 539.471 Durbin-Watson: 1.982
Prob(Omnibus): 0.000 Jarque-Bera (JB): 79865.186
Skew: -1.057 Prob(JB): 0.00
Kurtosis: 43.455 Cond. No. 1.37e+16


Warnings:
[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.
[2] The smallest eigenvalue is 1.47e-21. This might indicate that there are
strong multicollinearity problems or that the design matrix is singular." 437 | ], 438 | "text/plain": [ 439 | "\n", 440 | "\"\"\"\n", 441 | " OLS Regression Results \n", 442 | "=======================================================================================\n", 443 | "Dep. Variable: SalePrice R-squared (uncentered): 0.981\n", 444 | "Model: OLS Adj. R-squared (uncentered): 0.979\n", 445 | "Method: Least Squares F-statistic: 744.4\n", 446 | "Date: Sun, 22 Sep 2019 Prob (F-statistic): 0.00\n", 447 | "Time: 21:56:15 Log-Likelihood: -13598.\n", 448 | "No. Observations: 1168 AIC: 2.734e+04\n", 449 | "Df Residuals: 1094 BIC: 2.772e+04\n", 450 | "Df Model: 74 \n", 451 | "Covariance Type: nonrobust \n", 452 | "=================================================================================\n", 453 | " coef std err t P>|t| [0.025 0.975]\n", 454 | "---------------------------------------------------------------------------------\n", 455 | "MSZoning -1165.7401 1539.464 -0.757 0.449 -4186.376 1854.896\n", 456 | "Street 3.222e+04 1.27e+04 2.546 0.011 7390.874 5.71e+04\n", 457 | "LotShape -718.1397 664.207 -1.081 0.280 -2021.403 585.123\n", 458 | "LandContour 1235.6655 1353.945 0.913 0.362 -1420.957 3892.288\n", 459 | "Utilities -3.379e+04 3.02e+04 -1.121 0.263 -9.3e+04 2.54e+04\n", 460 | "LotConfig -298.4770 542.717 -0.550 0.582 -1363.360 766.406\n", 461 | "LandSlope 4103.7910 3864.835 1.062 0.289 -3479.536 1.17e+04\n", 462 | "Neighborhood 233.4921 156.226 1.495 0.135 -73.044 540.029\n", 463 | "Condition1 -1042.3035 976.607 -1.067 0.286 -2958.538 873.930\n", 464 | "Condition2 -1.533e+04 3562.421 -4.304 0.000 -2.23e+04 -8343.255\n", 465 | "BldgType -1276.6258 1536.222 -0.831 0.406 -4290.900 1737.648\n", 466 | "HouseStyle -392.3779 676.011 -0.580 0.562 -1718.802 934.047\n", 467 | "RoofStyle 1427.9843 1147.983 1.244 0.214 -824.514 3680.483\n", 468 | "RoofMatl 3634.7378 1684.056 2.158 0.031 330.392 6939.083\n", 469 | "Exterior1st -1097.5467 531.712 -2.064 0.039 -2140.838 -54.256\n", 470 | "Exterior2nd 499.9442 482.670 1.036 0.301 -447.119 1447.008\n", 471 | "MasVnrType 4639.5944 1605.118 2.891 0.004 1490.137 7789.051\n", 472 | "ExterQual -8914.7271 2017.235 -4.419 0.000 -1.29e+04 -4956.640\n", 473 | "ExterCond 511.9805 1262.722 0.405 0.685 -1965.650 2989.611\n", 474 | "Foundation 610.7116 1629.947 0.375 0.708 -2587.465 3808.888\n", 475 | "BsmtQual -7872.7731 1476.928 -5.331 0.000 -1.08e+04 -4974.842\n", 476 | "BsmtCond 1991.4110 1368.135 1.456 0.146 -693.055 4675.877\n", 477 | "BsmtExposure -2462.4311 891.401 -2.762 0.006 -4211.479 -713.383\n", 478 | "BsmtFinType1 603.6284 660.350 0.914 0.361 -692.067 1899.324\n", 479 | "BsmtFinType2 1306.0052 1405.594 0.929 0.353 -1451.959 4063.969\n", 480 | "Heating 889.9318 3149.188 0.283 0.778 -5289.199 7069.062\n", 481 | "HeatingQC -1037.8213 619.969 -1.674 0.094 -2254.285 178.642\n", 482 | "CentralAir -3109.1404 4472.098 -0.695 0.487 -1.19e+04 5665.718\n", 483 | "Electrical -189.0676 960.337 -0.197 0.844 -2073.378 1695.243\n", 484 | "KitchenQual -6792.8368 1494.550 -4.545 0.000 -9725.345 -3860.329\n", 485 | "Functional 4001.9568 946.697 4.227 0.000 2144.411 5859.503\n", 486 | "FireplaceQu -2479.2410 1103.125 -2.247 0.025 -4643.721 -314.761\n", 487 | "GarageType 512.7497 613.323 0.836 0.403 -690.673 1716.173\n", 488 | "GarageFinish 91.1008 1490.750 0.061 0.951 -2833.951 3016.152\n", 489 | "GarageQual 726.6309 1843.585 0.394 0.694 -2890.730 4343.992\n", 490 | "GarageCond 2259.8080 2097.899 1.077 0.282 -1856.552 6376.168\n", 491 | "PavedDrive -572.7771 2069.922 -0.277 0.782 -4634.243 3488.689\n", 492 | "SaleType -525.3894 580.704 -0.905 0.366 -1664.808 614.030\n", 493 | "SaleCondition 2600.2391 838.942 3.099 0.002 954.123 4246.356\n", 494 | "Id -1.6710 2.040 -0.819 0.413 -5.673 2.331\n", 495 | "MSSubClass -95.9793 47.297 -2.029 0.043 -188.782 -3.176\n", 496 | "LotFrontage 20.7353 49.791 0.416 0.677 -76.960 118.431\n", 497 | "LotArea 0.3920 0.101 3.879 0.000 0.194 0.590\n", 498 | "OverallQual 8141.9894 1196.646 6.804 0.000 5794.009 1.05e+04\n", 499 | "OverallCond 5795.2869 1048.905 5.525 0.000 3737.195 7853.379\n", 500 | "YearBuilt 301.9530 79.092 3.818 0.000 146.764 457.142\n", 501 | "YearRemodAdd 24.1377 68.483 0.352 0.725 -110.236 158.511\n", 502 | "MasVnrArea 32.0383 6.144 5.214 0.000 19.983 44.094\n", 503 | "BsmtFinSF1 16.9105 3.055 5.535 0.000 10.916 22.905\n", 504 | "BsmtFinSF2 5.7592 6.351 0.907 0.365 -6.702 18.220\n", 505 | "BsmtUnfSF -3.1604 2.907 -1.087 0.277 -8.863 2.543\n", 506 | "TotalBsmtSF 19.5093 3.669 5.318 0.000 12.311 26.708\n", 507 | "1stFlrSF 29.5382 6.310 4.681 0.000 17.157 41.919\n", 508 | "2ndFlrSF 32.9418 5.739 5.740 0.000 21.682 44.202\n", 509 | "LowQualFinSF -35.6469 15.214 -2.343 0.019 -65.499 -5.794\n", 510 | "GrLivArea 26.8331 5.767 4.653 0.000 15.517 38.149\n", 511 | "BsmtFullBath 2099.4732 2531.418 0.829 0.407 -2867.510 7066.457\n", 512 | "BsmtHalfBath -810.6716 3890.494 -0.208 0.835 -8444.345 6823.002\n", 513 | "FullBath -79.3889 2783.571 -0.029 0.977 -5541.130 5382.352\n", 514 | "HalfBath -943.1513 2589.318 -0.364 0.716 -6023.742 4137.440\n", 515 | "BedroomAbvGr -6071.5075 1708.284 -3.554 0.000 -9423.390 -2719.625\n", 516 | "KitchenAbvGr -1.685e+04 4956.178 -3.399 0.001 -2.66e+04 -7123.626\n", 517 | "TotRmsAbvGrd 2607.9617 1169.442 2.230 0.026 313.359 4902.564\n", 518 | "Fireplaces 6222.3936 1783.577 3.489 0.001 2722.775 9722.012\n", 519 | "GarageYrBlt 15.7996 70.252 0.225 0.822 -122.044 153.644\n", 520 | "GarageCars 3252.8253 2917.934 1.115 0.265 -2472.555 8978.206\n", 521 | "GarageArea 10.7453 10.031 1.071 0.284 -8.937 30.428\n", 522 | "WoodDeckSF 13.4806 7.601 1.774 0.076 -1.433 28.394\n", 523 | "OpenPorchSF -14.0978 14.094 -1.000 0.317 -41.751 13.556\n", 524 | "EnclosedPorch -2.7550 15.521 -0.178 0.859 -33.209 27.699\n", 525 | "3SsnPorch 12.8193 26.434 0.485 0.628 -39.049 64.687\n", 526 | "ScreenPorch 18.6703 16.901 1.105 0.270 -14.491 51.832\n", 527 | "PoolArea 73.2044 20.649 3.545 0.000 32.688 113.721\n", 528 | "MiscVal -0.1852 1.582 -0.117 0.907 -3.290 2.920\n", 529 | "MoSold -153.4176 316.720 -0.484 0.628 -774.864 468.029\n", 530 | "YrSold -337.9522 89.400 -3.780 0.000 -513.366 -162.538\n", 531 | "==============================================================================\n", 532 | "Omnibus: 539.471 Durbin-Watson: 1.982\n", 533 | "Prob(Omnibus): 0.000 Jarque-Bera (JB): 79865.186\n", 534 | "Skew: -1.057 Prob(JB): 0.00\n", 535 | "Kurtosis: 43.455 Cond. No. 1.37e+16\n", 536 | "==============================================================================\n", 537 | "\n", 538 | "Warnings:\n", 539 | "[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.\n", 540 | "[2] The smallest eigenvalue is 1.47e-21. This might indicate that there are\n", 541 | "strong multicollinearity problems or that the design matrix is singular.\n", 542 | "\"\"\"" 543 | ] 544 | }, 545 | "execution_count": 11, 546 | "metadata": {}, 547 | "output_type": "execute_result" 548 | } 549 | ], 550 | "source": [ 551 | "#Reading Data\n", 552 | "import pandas as pd\n", 553 | "house_price = pd.read_csv(\"hp_train.csv\")\n", 554 | "\n", 555 | "#Partition into Categorical and Numerical Variables\n", 556 | "import numpy as np\n", 557 | "cat = house_price.select_dtypes(include=[object])\n", 558 | "num = house_price.select_dtypes(include=[np.number])\n", 559 | "\n", 560 | "#Checking Null Values\n", 561 | "cat.isnull().sum()\n", 562 | "num.isnull().sum()\n", 563 | "\n", 564 | "#Removing unnecessary columns\n", 565 | "cat.drop([\"Alley\", \"PoolQC\", \"Fence\", \"MiscFeature\"], axis=1, inplace=True)\n", 566 | "\n", 567 | "#Removing Categorical Null Values with Mode\n", 568 | "cat.BsmtCond.fillna(cat.BsmtCond.value_counts().idxmax(),inplace=True)\n", 569 | "cat.BsmtQual.fillna(cat.BsmtQual.value_counts().idxmax(),inplace=True)\n", 570 | "cat.BsmtExposure.fillna(cat.BsmtExposure.value_counts().idxmax(),inplace=True)\n", 571 | "cat.BsmtFinType1.fillna(cat.BsmtFinType1.value_counts().idxmax(),inplace=True)\n", 572 | "cat.BsmtFinType2.fillna(cat.BsmtFinType2.value_counts().idxmax(),inplace=True)\n", 573 | "cat.FireplaceQu.fillna(cat.FireplaceQu.value_counts().idxmax(),inplace=True)\n", 574 | "cat.GarageCond.fillna(cat.GarageCond.value_counts().idxmax(),inplace=True)\n", 575 | "cat.GarageFinish.fillna(cat.GarageFinish.value_counts().idxmax(),inplace=True)\n", 576 | "cat.GarageQual.fillna(cat.GarageQual.value_counts().idxmax(),inplace=True)\n", 577 | "cat.GarageType.fillna(cat.GarageType.value_counts().idxmax(),inplace=True)\n", 578 | "cat.Electrical.fillna(cat.Electrical.value_counts().idxmax(),inplace=True)\n", 579 | "cat.MasVnrType.fillna(cat.MasVnrType.value_counts().idxmax(),inplace=True)\n", 580 | "\n", 581 | "#Removing Numerical Null Values with Mean\n", 582 | "num.LotFrontage.fillna(num.LotFrontage.mean(),inplace=True)\n", 583 | "num.GarageYrBlt.fillna(num.GarageYrBlt.mean(),inplace=True)\n", 584 | "num.MasVnrArea.fillna(num.MasVnrArea.mean(),inplace=True)\n", 585 | "\n", 586 | "#Converting words to Integers\n", 587 | "from sklearn.preprocessing import LabelEncoder\n", 588 | "le = LabelEncoder()\n", 589 | "cat1 = cat.apply(le.fit_transform)\n", 590 | "\n", 591 | "#Combining two dataframes\n", 592 | "house_price2 = pd.concat([cat1, num], axis=1)\n", 593 | "\n", 594 | "#Getting Dependent and Independent Variables\n", 595 | "X = house_price2.drop([\"SalePrice\"], axis=1)\n", 596 | "Y = pd.DataFrame(house_price2[\"SalePrice\"])\n", 597 | "\n", 598 | "#Getting Train and Test Set\n", 599 | "from sklearn.model_selection import train_test_split\n", 600 | "X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.20)\n", 601 | "\n", 602 | "#Applying Linear Regression\n", 603 | "import statsmodels.api as sm\n", 604 | "est = sm.OLS(Y_train, X_train)\n", 605 | "est2 = est.fit()\n", 606 | "est2.summary()\n" 607 | ] 608 | } 609 | ], 610 | "metadata": { 611 | "kernelspec": { 612 | "display_name": "ADP", 613 | "language": "python", 614 | "name": "adp" 615 | }, 616 | "language_info": { 617 | "codemirror_mode": { 618 | "name": "ipython", 619 | "version": 3 620 | }, 621 | "file_extension": ".py", 622 | "mimetype": "text/x-python", 623 | "name": "python", 624 | "nbconvert_exporter": "python", 625 | "pygments_lexer": "ipython3", 626 | "version": "3.7.4" 627 | } 628 | }, 629 | "nbformat": 4, 630 | "nbformat_minor": 2 631 | } 632 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Chapter 5-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Activation Functions" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import numpy as np\n", 17 | "def sigmoid(x):\n", 18 | " return 1 / (1 + np.exp(-x))\n", 19 | "\n", 20 | "def tanh(x):\n", 21 | " return np.tanh(x)\n", 22 | "\n", 23 | "def softmax(x):\n", 24 | " exps = np.exp(x)\n", 25 | " return exps / (np.sum(exps).reshape(-1,1))\n", 26 | "\n", 27 | "def relu(x):\n", 28 | " return 1.0*(x>0)\n", 29 | "\n", 30 | "def leaky_relu(x, leaky_slope):\n", 31 | " d=np.zeros_like(x)\n", 32 | " d[x<=0]= leaky_slope\n", 33 | " d[x>0]=1\n", 34 | " return d" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": null, 40 | "metadata": {}, 41 | "outputs": [], 42 | "source": [ 43 | "import numpy as np\n", 44 | "\n", 45 | "#Defining dummy values of x \n", 46 | "x = np.linspace(-np.pi, np.pi, 12)\n", 47 | "\n", 48 | "#Finding the Activation Function Outputs\n", 49 | "sigmoid_output = sigmoid(x)\n", 50 | "tanh_output = tanh(x)\n", 51 | "softmax_output = softmax(x)\n", 52 | "relu_output = relu(x)\n", 53 | "leaky_relu_output = leaky_relu(x,1)\n", 54 | "\n", 55 | "#Printing the Outputs\n", 56 | "print(sigmoid_output)\n", 57 | "print(tanh_output)\n", 58 | "print(softmax_output)\n", 59 | "print(relu_output)\n", 60 | "print(leaky_relu_output)" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "# Convolutional Neural Networks" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": null, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [ 76 | "from keras.datasets import mnist\n", 77 | "import matplotlib.pyplot as plt\n", 78 | "from keras.models import Sequential\n", 79 | "from keras.layers import Dense, Conv2D, Flatten\n", 80 | "from keras.utils import to_categorical\n", 81 | "\n", 82 | "#download mnist data and split into train and test sets\n", 83 | "(X_train, y_train), (X_test, y_test) = mnist.load_data()\n", 84 | "\n", 85 | "f1 = plt.figure(1)\n", 86 | "plt.imshow(X_train[0])\n", 87 | "f2 = plt.figure(2)\n", 88 | "plt.imshow(X_train[1])\n", 89 | "plt.show()\n", 90 | "\n", 91 | "#check image shape and data count\n", 92 | "print(X_train[0].shape, len(X_train))\n", 93 | "print(X_train[0].shape, len(X_test))\n", 94 | "\n", 95 | "#reshape data to fit model\n", 96 | "X_train = X_train.reshape(len(X_train),28,28,1)\n", 97 | "X_test = X_test.reshape(len(X_test),28,28,1)\n", 98 | "\n", 99 | "#One-hot encode target column\n", 100 | "y_train = to_categorical(y_train)\n", 101 | "y_test = to_categorical(y_test)\n", 102 | "y_train[0]\n", 103 | "\n", 104 | "#Create model\n", 105 | "model = Sequential()\n", 106 | "\n", 107 | "#Add Input CNN Layer\n", 108 | "model.add(Conv2D(64, kernel_size=3, activation='relu', input_shape=(28,28,1)))\n", 109 | "\n", 110 | "#Add second CNN Layer\n", 111 | "model.add(Conv2D(32, kernel_size=3, activation='relu'))\n", 112 | "\n", 113 | "#Add the fully connected layer\n", 114 | "model.add(Flatten())\n", 115 | "model.add(Dense(10, activation='softmax'))\n", 116 | "\n", 117 | "#Compile model using accuracy to measure model performance\n", 118 | "model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])\n", 119 | "\n", 120 | "#Train the model\n", 121 | "model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=3)\n", 122 | "\n", 123 | "#predict first 6 images in the test set\n", 124 | "model.predict(X_test[:6])\n", 125 | "\n", 126 | "#actual results for first 6 images in the test set\n", 127 | "y_test[:6]\n" 128 | ] 129 | }, 130 | { 131 | "cell_type": "markdown", 132 | "metadata": {}, 133 | "source": [ 134 | "# Recurrent Neural Networks" 135 | ] 136 | }, 137 | { 138 | "cell_type": "code", 139 | "execution_count": null, 140 | "metadata": {}, 141 | "outputs": [], 142 | "source": [ 143 | "import pandas as pd\n", 144 | "import numpy as np\n", 145 | "import matplotlib.pyplot as plt\n", 146 | "from keras.models import Sequential\n", 147 | "from keras.layers import Dense, SimpleRNN\n", 148 | "\n", 149 | "#Generating Random Data \n", 150 | "t=np.arange(0,1000)\n", 151 | "x=np.sin(0.02*t)+2*np.random.rand(1000)\n", 152 | "df = pd.DataFrame(x)\n", 153 | "df.head()\n", 154 | "\n", 155 | "#Splitting into Train and Test set\n", 156 | "values=df.values\n", 157 | "train, test = values[0:800,:], values[800:1000,:]\n", 158 | "\n", 159 | "# convert dataset into matrix\n", 160 | "def convertToMatrix(data, step=4):\n", 161 | " X, Y =[], []\n", 162 | " for i in range(len(data)-step):\n", 163 | " d=i+step \n", 164 | " X.append(data[i:d,])\n", 165 | " Y.append(data[d,])\n", 166 | " return np.array(X), np.array(Y)\n", 167 | "\n", 168 | "trainX,trainY =convertToMatrix(train,6)\n", 169 | "testX,testY =convertToMatrix(test,6)\n", 170 | "trainX = np.reshape(trainX, (trainX.shape[0], 1, trainX.shape[1]))\n", 171 | "testX = np.reshape(testX, (testX.shape[0], 1, testX.shape[1]))\n", 172 | "\n", 173 | "#Making the RNN Structure\n", 174 | "model = Sequential()\n", 175 | "model.add(SimpleRNN(units=32, input_shape=(1,6), activation=\"relu\"))\n", 176 | "model.add(Dense(8, activation=\"relu\")) \n", 177 | "model.add(Dense(1))\n", 178 | "\n", 179 | "#Compiling the Code\n", 180 | "model.compile(loss='mean_squared_error', optimizer='rmsprop')\n", 181 | "model.summary()\n", 182 | "\n", 183 | "#Training the Model\n", 184 | "model.fit(trainX,trainY, epochs=1, batch_size=500, verbose=2)\n", 185 | "\n", 186 | "#Predicting with the Model\n", 187 | "trainPredict = model.predict(trainX)\n", 188 | "testPredict= model.predict(testX)\n", 189 | "predicted=np.concatenate((trainPredict,testPredict),axis=0)\n" 190 | ] 191 | }, 192 | { 193 | "cell_type": "markdown", 194 | "metadata": {}, 195 | "source": [ 196 | "# Long Short Term Memory" 197 | ] 198 | }, 199 | { 200 | "cell_type": "code", 201 | "execution_count": null, 202 | "metadata": {}, 203 | "outputs": [], 204 | "source": [ 205 | "import pandas as pd\n", 206 | "import numpy as np\n", 207 | "import matplotlib.pyplot as plt\n", 208 | "from keras.models import Sequential\n", 209 | "from keras.layers import Dense, LSTM\n", 210 | "\n", 211 | "#Generating Random Data \n", 212 | "t=np.arange(0,1000)\n", 213 | "x=np.sin(0.02*t)+2*np.random.rand(1000)\n", 214 | "df = pd.DataFrame(x)\n", 215 | "df.head()\n", 216 | "\n", 217 | "#Splitting into Train and Test set\n", 218 | "values=df.values\n", 219 | "train, test = values[0:800,:], values[800:1000,:]\n", 220 | "\n", 221 | "# convert dataset into matrix\n", 222 | "def convertToMatrix(data, step=4):\n", 223 | " X, Y =[], []\n", 224 | " for i in range(len(data)-step):\n", 225 | " d=i+step \n", 226 | " X.append(data[i:d,])\n", 227 | " Y.append(data[d,])\n", 228 | " return np.array(X), np.array(Y)\n", 229 | "\n", 230 | "trainX,trainY =convertToMatrix(train,6)\n", 231 | "testX,testY =convertToMatrix(test,6)\n", 232 | "trainX = np.reshape(trainX, (trainX.shape[0], 1, trainX.shape[1]))\n", 233 | "testX = np.reshape(testX, (testX.shape[0], 1, testX.shape[1]))\n", 234 | "\n", 235 | "#Making the LSTM Structure\n", 236 | "model = Sequential()\n", 237 | "model.add(LSTM(units=4, input_shape=(1,6), activation=\"relu\"))\n", 238 | "model.add(Dense(8, activation=\"relu\")) \n", 239 | "model.add(Dense(1))\n", 240 | "\n", 241 | "#Compiling the Code\n", 242 | "model.compile(loss='mean_squared_error', optimizer='rmsprop')\n", 243 | "model.summary()\n", 244 | "\n", 245 | "#Training the Model\n", 246 | "model.fit(trainX,trainY, epochs=1, batch_size=500, verbose=2)\n", 247 | "\n", 248 | "#Predicting with the Model\n", 249 | "trainPredict = model.predict(trainX)\n", 250 | "testPredict= model.predict(testX)\n", 251 | "predicted=np.concatenate((trainPredict,testPredict),axis=0)" 252 | ] 253 | }, 254 | { 255 | "cell_type": "markdown", 256 | "metadata": {}, 257 | "source": [ 258 | "# Gated REcurrent Units" 259 | ] 260 | }, 261 | { 262 | "cell_type": "code", 263 | "execution_count": null, 264 | "metadata": {}, 265 | "outputs": [], 266 | "source": [ 267 | "import pandas as pd\n", 268 | "import numpy as np\n", 269 | "import matplotlib.pyplot as plt\n", 270 | "from keras.models import Sequential\n", 271 | "from keras.layers import Dense, GRU\n", 272 | " \n", 273 | "#Generating Random Data \n", 274 | "t=np.arange(0,1000)\n", 275 | "x=np.sin(0.02*t)+2*np.random.rand(1000)\n", 276 | "df = pd.DataFrame(x)\n", 277 | "df.head()\n", 278 | " \n", 279 | "#Splitting into Train and Test set\n", 280 | "values=df.values\n", 281 | "train, test = values[0:800,:], values[800:1000,:]\n", 282 | " \n", 283 | "# convert dataset into matrix\n", 284 | "def convertToMatrix(data, step=4):\n", 285 | "\tX, Y =[], []\n", 286 | "\tfor i in range(len(data)-step):\n", 287 | " \td=i+step\n", 288 | " \tX.append(data[i:d,])\n", 289 | " \tY.append(data[d,])\n", 290 | "\treturn np.array(X), np.array(Y)\n", 291 | " \n", 292 | "trainX,trainY =convertToMatrix(train,6)\n", 293 | "testX,testY =convertToMatrix(test,6)\n", 294 | "trainX = np.reshape(trainX, (trainX.shape[0], 1, trainX.shape[1]))\n", 295 | "testX = np.reshape(testX, (testX.shape[0], 1, testX.shape[1]))\n", 296 | " \n", 297 | "#Making the GRU Structure\n", 298 | "model = Sequential()\n", 299 | "model.add(GRU(units=4, input_shape=(1,6), activation=\"relu\"))\n", 300 | "model.add(Dense(8, activation=\"relu\"))\n", 301 | "model.add(Dense(1))\n", 302 | " \n", 303 | "#Compiling the Code\n", 304 | "model.compile(loss='mean_squared_error', optimizer='rmsprop')\n", 305 | "model.summary()\n", 306 | " \n", 307 | "#Training the Model\n", 308 | "model.fit(trainX,trainY, epochs=10, batch_size=500, verbose=1)\n", 309 | " \n", 310 | "#Predicting with the Model\n", 311 | "trainPredict = model.predict(trainX)\n", 312 | "testPredict= model.predict(testX)\n", 313 | "predicted=np.concatenate((trainPredict,testPredict),axis=0)" 314 | ] 315 | }, 316 | { 317 | "cell_type": "markdown", 318 | "metadata": {}, 319 | "source": [ 320 | "# Use Case" 321 | ] 322 | }, 323 | { 324 | "cell_type": "code", 325 | "execution_count": null, 326 | "metadata": {}, 327 | "outputs": [], 328 | "source": [ 329 | "import numpy\n", 330 | "import pandas as pd\n", 331 | "from keras.models import Sequential\n", 332 | "from keras.layers import Dense, LSTM, GRU\n", 333 | "from sklearn.preprocessing import StandardScaler\n", 334 | "from sklearn.metrics import mean_squared_error\n", 335 | "import math\n", 336 | " \n", 337 | "# convert an array of values into a dataset matrix\n", 338 | "def create_dataset(dataset, step=1):\n", 339 | "\tdataX, dataY = [], []\n", 340 | "\tfor i in range(len(dataset)-step-1):\n", 341 | " \ta = dataset[i:(i+step), 0]\n", 342 | " \tdataX.append(a)\n", 343 | " \tdataY.append(dataset[i + step, 0])\n", 344 | "\treturn numpy.array(dataX), numpy.array(dataY)\n", 345 | " \n", 346 | "# load the dataset\n", 347 | "dataframe = pd.read_csv('carriage.csv', usecols=[1])\n", 348 | "dataset = dataframe.values\n", 349 | "dataset = dataset.astype('float32')\n", 350 | " \n", 351 | "# standardize the dataset\n", 352 | "scaler = StandardScaler()\n", 353 | "dataset = scaler.fit_transform(dataset)\n", 354 | " \n", 355 | "# split into train and test sets\n", 356 | "train_size = int(len(dataset) * 0.90)\n", 357 | "test_size = len(dataset) - train_size\n", 358 | "train, test = dataset[0:train_size,:], dataset[train_size:len(dataset),:]\n", 359 | " \n", 360 | "# Reshaping Data for the model\n", 361 | "step = 1\n", 362 | "train_X, train_Y = create_dataset(train, step)\n", 363 | "test_X, test_Y = create_dataset(test, step)\n", 364 | " \n", 365 | "train_X = numpy.reshape(train_X, (train_X.shape[0], 1, train_X.shape[1]))\n", 366 | "test_X = numpy.reshape(test_X, (test_X.shape[0], 1, test_X.shape[1]))\n", 367 | " \n", 368 | "# create and fit the LSTM network\n", 369 | "model = Sequential()\n", 370 | "model.add(LSTM(10, input_shape=(1, step)))\n", 371 | "model.add(Dense(1))\n", 372 | "model.compile(loss='mean_squared_error', optimizer='adam')\n", 373 | "model.summary()\n", 374 | "model.fit(train_X, train_Y, epochs=10, batch_size=50, verbose=1)\n", 375 | "\n", 376 | "# create and fit the GRU network\n", 377 | "model1 = Sequential()\n", 378 | "model1.add(GRU(10, input_shape=(1, step)))\n", 379 | "model1.add(Dense(1))\n", 380 | "model1.compile(loss='mean_squared_error', optimizer='adam')\n", 381 | "model1.summary()\n", 382 | "model1.fit(train_X, train_Y, epochs=10, batch_size=50, verbose=1)\n", 383 | " \n", 384 | "# make predictions from LSTM\n", 385 | "trainPredict = model.predict(train_X)\n", 386 | "testPredict = model.predict(test_X)\n", 387 | "\n", 388 | "# make predictions from GRU\n", 389 | "trainPredict1 = model1.predict(train_X)\n", 390 | "testPredict1 = model1.predict(test_X)\n", 391 | " \n", 392 | "# invert predictions from LSTM\n", 393 | "trainPredict = scaler.inverse_transform(trainPredict)\n", 394 | "train_Y = scaler.inverse_transform([train_Y])\n", 395 | "testPredict = scaler.inverse_transform(testPredict)\n", 396 | "test_Y = scaler.inverse_transform([test_Y])\n", 397 | "\n", 398 | "# invert predictions from GRU\n", 399 | "trainPredict1 = scaler.inverse_transform(trainPredict1)\n", 400 | "testPredict1 = scaler.inverse_transform(testPredict1)\n", 401 | " \n", 402 | "# calculate root mean squared error for LSTM\n", 403 | "print(\"*****Results for LSTMs*****\")\n", 404 | "trainScore = math.sqrt(mean_squared_error(train_Y[0], trainPredict[:,0]))\n", 405 | "print('Error in Training data is: %.2f RMSE' % (trainScore))\n", 406 | "testScore = math.sqrt(mean_squared_error(test_Y[0], testPredict[:,0]))\n", 407 | "print('Error in Testing data is: %.2f RMSE' % (testScore))\n", 408 | "\n", 409 | "# calculate root mean squared error for GRU\n", 410 | "print(\"*****Results for GRUs*****\")\n", 411 | "trainScore1 = math.sqrt(mean_squared_error(train_Y[0], trainPredict1[:,0]))\n", 412 | "print('Error in Training data is: %.2f RMSE' % (trainScore1))\n", 413 | "testScore1 = math.sqrt(mean_squared_error(test_Y[0], testPredict1[:,0]))\n", 414 | "print('Error in Testing data is: %.2f RMSE' % (testScore1))" 415 | ] 416 | }, 417 | { 418 | "cell_type": "code", 419 | "execution_count": null, 420 | "metadata": {}, 421 | "outputs": [], 422 | "source": [] 423 | } 424 | ], 425 | "metadata": { 426 | "kernelspec": { 427 | "display_name": "ADP", 428 | "language": "python", 429 | "name": "adp" 430 | }, 431 | "language_info": { 432 | "codemirror_mode": { 433 | "name": "ipython", 434 | "version": 3 435 | }, 436 | "file_extension": ".py", 437 | "mimetype": "text/x-python", 438 | "name": "python", 439 | "nbconvert_exporter": "python", 440 | "pygments_lexer": "ipython3", 441 | "version": "3.7.4" 442 | } 443 | }, 444 | "nbformat": 4, 445 | "nbformat_minor": 2 446 | } 447 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Chapter 6-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Adaptive Neuro Fuzzy Inference System" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "# Importing necessary libraries\n", 17 | "import anfis\n", 18 | "from anfis.membership import membershipfunction, mfDerivs\n", 19 | "import numpy\n", 20 | "training_data = numpy.loadtxt(\"training.txt\", usecols=[1,2,3])\n", 21 | "X = training_data [:,0:2]\n", 22 | "Y = training_data [:,2]\n", 23 | "# Defining the Membership Functions\n", 24 | "mf = [[['gaussmf',{'mean':0.,'sigma':1.}],['gaussmf',{'mean':-1.,'sigma':2.}],['gaussmf',{'mean':-4.,'sigma':10.}],['gaussmf',{'mean':-7.,'sigma':7.}]], [['gaussmf',{'mean':1.,'sigma':2.}],['gaussmf',{'mean':2.,'sigma':3.}],['gaussmf',{'mean':-2.,'sigma':10.}],['gaussmf',{'mean':-10.5,'sigma':5.}]]]\n", 25 | "# Updating the model with Membership Functions\n", 26 | "mfc = membershipfunction.MemFuncs(mf)\n", 27 | "# Creating the ANFIS Model Object\n", 28 | "anf = anfis.ANFIS(X, Y, mfc)\n", 29 | "# Fitting the ANFIS Model\n", 30 | "anf.trainHybridJangOffLine(epochs=20)\n", 31 | "# Printing Output\n", 32 | "print(round(anf.consequents[-1][0],6))\n", 33 | "print(round(anf.consequents[-2][0],6))\n", 34 | "print(round(anf.fittedValues[9][0],6))\n", 35 | "# Plotting Model Performance\n", 36 | "anf.plotErrors()\n", 37 | "anf.plotResults()" 38 | ] 39 | } 40 | ], 41 | "metadata": { 42 | "kernelspec": { 43 | "display_name": "ADP", 44 | "language": "python", 45 | "name": "adp" 46 | }, 47 | "language_info": { 48 | "codemirror_mode": { 49 | "name": "ipython", 50 | "version": 3 51 | }, 52 | "file_extension": ".py", 53 | "mimetype": "text/x-python", 54 | "name": "python", 55 | "nbconvert_exporter": "python", 56 | "pygments_lexer": "ipython3", 57 | "version": "3.7.4" 58 | } 59 | }, 60 | "nbformat": 4, 61 | "nbformat_minor": 2 62 | } 63 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Chapter 7-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Fuzzy Clustering" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import pandas as pd\n", 17 | "import numpy as np\n", 18 | "import numpy as np\n", 19 | "import logging\n", 20 | "from fuzzy_clustering import FCM\n", 21 | "from visualization import draw_model_2d\n", 22 | "from sklearn import preprocessing\n", 23 | "\n", 24 | "\n", 25 | "dataset = pd.read_csv(\"AirlinesCluster.csv\") #Importing the airlines data\n", 26 | "\n", 27 | "dataset1 = dataset.copy() #Making a copy so that original data remains unaffected\n", 28 | "\n", 29 | "dataset1 = dataset1[[\"Balance\", \"BonusMiles\"]][:500] #Selecting only first 500 rows for faster computation\n", 30 | "\n", 31 | "\n", 32 | "dataset1_standardized = preprocessing.scale(dataset1) #Standardizing the data to scale it between the upper and lower limit of 1 and 0\n", 33 | "\n", 34 | "dataset1_standardized = pd.DataFrame(dataset1_standardized)\n", 35 | "\n", 36 | "fcm.set_logger(tostdout=False) #Telling the package class to stop the unnecessary output\n", 37 | "\n", 38 | "fcm = FCM(n_clusters=5) #Defining k=5\n", 39 | "\n", 40 | "fcm.fit(dataset1_standardized) #Training on data\n", 41 | "\n", 42 | "predicted_membership = fcm.predict(np.array(dataset1_standardized)) #Testing on same data\n", 43 | "\n", 44 | "draw_model_2d(fcm, data=np.array(dataset1_standardized), membership=predicted_membership) #Visualizing the data" 45 | ] 46 | }, 47 | { 48 | "cell_type": "markdown", 49 | "metadata": {}, 50 | "source": [ 51 | "# Fuzzy Adaptive Resonance Theory" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": null, 57 | "metadata": {}, 58 | "outputs": [], 59 | "source": [ 60 | "from functools import partial\n", 61 | "import numpy as np\n", 62 | "import FuzzyART as f\n", 63 | "import sklearn.datasets as ds\n", 64 | "\n", 65 | "l1_norm = partial(np.linalg.norm, ord=1, axis=-1)#Used for regularization so that we can penalize the parameters that are not important\n", 66 | "\n", 67 | "if __name__ == '__main__': \n", 68 | "\n", 69 | " iris = ds.load_iris()#load the dataset in the python environment\n", 70 | "\n", 71 | "data = iris['data'] / np.max(iris['data'], axis=0)#standardize the dataset\n", 72 | "\n", 73 | "net = f.FuzzyART(alpha=0.5, rho=0.5) #Initialize the FuzzyART Hyperparameters\n", 74 | "\n", 75 | " net.train(data, epochs=100) #Train on the data\n", 76 | "\n", 77 | " print(net.test(data).astype(int)) #Print the Cluster Results\n", 78 | "\n", 79 | " print(iris['target']) #Match the cluster results" 80 | ] 81 | } 82 | ], 83 | "metadata": { 84 | "kernelspec": { 85 | "display_name": "ADP", 86 | "language": "python", 87 | "name": "adp" 88 | }, 89 | "language_info": { 90 | "codemirror_mode": { 91 | "name": "ipython", 92 | "version": 3 93 | }, 94 | "file_extension": ".py", 95 | "mimetype": "text/x-python", 96 | "name": "python", 97 | "nbconvert_exporter": "python", 98 | "pygments_lexer": "ipython3", 99 | "version": "3.7.4" 100 | } 101 | }, 102 | "nbformat": 4, 103 | "nbformat_minor": 2 104 | } 105 | -------------------------------------------------------------------------------- /9781484253601.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Apress/deep-neuro-fuzzy-systems-w-python/16cf61c9902b3f6ac8225774e662c836774efa2d/9781484253601.jpg -------------------------------------------------------------------------------- /Chapter 1.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Basic Set Operation" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": {}, 14 | "outputs": [ 15 | { 16 | "name": "stdout", 17 | "output_type": "stream", 18 | "text": [ 19 | "Union : {0, 1, 2, 3, 4, 5, 6, 8}\n", 20 | "Intersection : {2, 4}\n", 21 | "Difference : {0, 8, 6}\n" 22 | ] 23 | } 24 | ], 25 | "source": [ 26 | "# Example Sets \n", 27 | "A = {0, 2, 4, 6, 8}\n", 28 | "B = {1, 2, 3, 4, 5} \n", 29 | " \n", 30 | "# union of above sets \n", 31 | "print(\"Union :\", A | B) \n", 32 | " \n", 33 | "# intersection of above sets\n", 34 | "print(\"Intersection :\", A & B) \n", 35 | " \n", 36 | "# difference between above sets\n", 37 | "print(\"Difference :\", A - B)" 38 | ] 39 | }, 40 | { 41 | "cell_type": "markdown", 42 | "metadata": {}, 43 | "source": [ 44 | "# Trapezoidal Membership Function" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 2, 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [ 53 | "import numpy as np\n", 54 | "import skfuzzy as sk\n", 55 | "\n", 56 | "#Defining the Numpy array for Tip Quality\n", 57 | "x_qual = np.arange(0, 11, 1)\n", 58 | "\n", 59 | "#Defining the Numpy array for Trapezoidal membership functions\n", 60 | "qual_lo = sk.trapmf(x_qual, [0, 0, 5,5])" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "# Gaussian Membership Function" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 3, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [ 76 | "import numpy as np\n", 77 | "import skfuzzy as sk\n", 78 | "\n", 79 | "#Defining the Numpy array for Tip Quality\n", 80 | "x_qual = np.arange(0, 11, 1)\n", 81 | "\n", 82 | "#Defining the Numpy array for Gaussian membership functions\n", 83 | "qual_lo = sk.gaussmf(x_qual, np.mean(x_qual), np.std(x_qual))\n" 84 | ] 85 | }, 86 | { 87 | "cell_type": "markdown", 88 | "metadata": {}, 89 | "source": [ 90 | "# Generalized Bell membership function" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": 4, 96 | "metadata": {}, 97 | "outputs": [], 98 | "source": [ 99 | "import numpy as np\n", 100 | "import skfuzzy as sk\n", 101 | "\n", 102 | "#Defining the Numpy array for Tip Quality\n", 103 | "x_qual = np.arange(0, 11, 1)\n", 104 | "\n", 105 | "#Defining the Numpy array for Generalized Bell membership functions\n", 106 | "qual_lo = sk.gbellmf(x_qual, 0.5, 0.5, 0.5)\n" 107 | ] 108 | }, 109 | { 110 | "cell_type": "markdown", 111 | "metadata": {}, 112 | "source": [ 113 | "# Sigmoid Membership Function" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": 5, 119 | "metadata": {}, 120 | "outputs": [], 121 | "source": [ 122 | "import numpy as np\n", 123 | "import skfuzzy as sk\n", 124 | "\n", 125 | "#Defining the Numpy array for Tip Quality\n", 126 | "x_qual = np.arange(0, 11, 1)\n", 127 | "\n", 128 | "#Defining the Numpy array for Sigmoid membership functions\n", 129 | "qual_lo = sk.sigmf(x_qual, 0.5,0.5)\n" 130 | ] 131 | }, 132 | { 133 | "cell_type": "markdown", 134 | "metadata": {}, 135 | "source": [ 136 | "# Fuzzy OR Operation" 137 | ] 138 | }, 139 | { 140 | "cell_type": "code", 141 | "execution_count": 7, 142 | "metadata": {}, 143 | "outputs": [ 144 | { 145 | "data": { 146 | "text/plain": [ 147 | "(array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]),\n", 148 | " array([1. , 0.8, 0.6, 0.6, 0.8, 1. , 0.8, 0.6, 0.4, 0.2, 0. ]))" 149 | ] 150 | }, 151 | "execution_count": 7, 152 | "metadata": {}, 153 | "output_type": "execute_result" 154 | } 155 | ], 156 | "source": [ 157 | "import skfuzzy as sk\n", 158 | "import numpy as np\n", 159 | "\n", 160 | "#Defining the Numpy array for Tip Quality\n", 161 | "x_qual = np.arange(0, 11, 1)\n", 162 | "\n", 163 | "#Defining the Numpy array for two membership functions (Triangular)\n", 164 | "qual_lo = sk.trimf(x_qual, [0, 0, 5])\n", 165 | "qual_md = sk.trimf(x_qual, [0, 5, 10])\n", 166 | "\n", 167 | "#Finding the Maximum (Fuzzy Or)\n", 168 | "sk.fuzzy_or(x_qual,qual_lo,x_qual,qual_md)\n" 169 | ] 170 | }, 171 | { 172 | "cell_type": "markdown", 173 | "metadata": {}, 174 | "source": [ 175 | "# Fuzzy AND Operation" 176 | ] 177 | }, 178 | { 179 | "cell_type": "code", 180 | "execution_count": 8, 181 | "metadata": {}, 182 | "outputs": [ 183 | { 184 | "data": { 185 | "text/plain": [ 186 | "(array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]),\n", 187 | " array([0. , 0.2, 0.4, 0.4, 0.2, 0. , 0. , 0. , 0. , 0. , 0. ]))" 188 | ] 189 | }, 190 | "execution_count": 8, 191 | "metadata": {}, 192 | "output_type": "execute_result" 193 | } 194 | ], 195 | "source": [ 196 | "import skfuzzy as sk\n", 197 | "import numpy as np\n", 198 | "\n", 199 | "#Defining the Numpy array for Tip Quality\n", 200 | "x_qual = np.arange(0, 11, 1)\n", 201 | "\n", 202 | "#Defining the Numpy array for two membership functions (Triangular)\n", 203 | "qual_lo = sk.trimf(x_qual, [0, 0, 5])\n", 204 | "qual_md = sk.trimf(x_qual, [0, 5, 10])\n", 205 | "\n", 206 | "#Finding the Minimum (Fuzzy AND)\n", 207 | "sk.fuzzy_and(x_qual,qual_lo,x_qual,qual_md)\n" 208 | ] 209 | }, 210 | { 211 | "cell_type": "markdown", 212 | "metadata": {}, 213 | "source": [ 214 | "# Fuzzy NOT Operation" 215 | ] 216 | }, 217 | { 218 | "cell_type": "code", 219 | "execution_count": 9, 220 | "metadata": {}, 221 | "outputs": [ 222 | { 223 | "data": { 224 | "text/plain": [ 225 | "array([0. , 0.2, 0.4, 0.6, 0.8, 1. , 1. , 1. , 1. , 1. , 1. ])" 226 | ] 227 | }, 228 | "execution_count": 9, 229 | "metadata": {}, 230 | "output_type": "execute_result" 231 | } 232 | ], 233 | "source": [ 234 | "import skfuzzy as sk\n", 235 | "import numpy as np\n", 236 | "\n", 237 | "#Defining the Numpy array for Tip Quality\n", 238 | "x_qual = np.arange(0, 11, 1)\n", 239 | "\n", 240 | "#Defining the Numpy array for two membership functions (Triangular)\n", 241 | "qual_lo = sk.trimf(x_qual, [0, 0, 5])\n", 242 | "qual_md = sk.trimf(x_qual, [0, 5, 10])\n", 243 | "\n", 244 | "#Finding the Complement (Fuzzy NOT)\n", 245 | "sk.fuzzy_not(qual_lo)\n" 246 | ] 247 | }, 248 | { 249 | "cell_type": "markdown", 250 | "metadata": {}, 251 | "source": [ 252 | "# Fuzzy Cartesian Operation" 253 | ] 254 | }, 255 | { 256 | "cell_type": "code", 257 | "execution_count": 11, 258 | "metadata": {}, 259 | "outputs": [ 260 | { 261 | "data": { 262 | "text/plain": [ 263 | "array([[0. , 0.2, 0.4, 0.6, 0.8, 1. , 0.8, 0.6, 0.4, 0.2, 0. ],\n", 264 | " [0. , 0.2, 0.4, 0.6, 0.8, 0.8, 0.8, 0.6, 0.4, 0.2, 0. ],\n", 265 | " [0. , 0.2, 0.4, 0.6, 0.6, 0.6, 0.6, 0.6, 0.4, 0.2, 0. ],\n", 266 | " [0. , 0.2, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.2, 0. ],\n", 267 | " [0. , 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0. ],\n", 268 | " [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ],\n", 269 | " [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ],\n", 270 | " [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ],\n", 271 | " [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ],\n", 272 | " [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ],\n", 273 | " [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ]])" 274 | ] 275 | }, 276 | "execution_count": 11, 277 | "metadata": {}, 278 | "output_type": "execute_result" 279 | } 280 | ], 281 | "source": [ 282 | "import skfuzzy as sk\n", 283 | "import numpy as np\n", 284 | "\n", 285 | "#Defining the Numpy array for Tip Quality\n", 286 | "x_qual = np.arange(0, 11, 1)\n", 287 | "\n", 288 | "#Defining the Numpy array for two membership functions (Triangular)\n", 289 | "qual_lo = sk.trimf(x_qual, [0, 0, 5])\n", 290 | "qual_md = sk.trimf(x_qual, [0, 5, 10])\n", 291 | "\n", 292 | "#Finding the Product (Fuzzy Cartesian)\n", 293 | "sk.cartprod(qual_lo, qual_md)\n" 294 | ] 295 | }, 296 | { 297 | "cell_type": "markdown", 298 | "metadata": {}, 299 | "source": [ 300 | "# Fuzzy Subtract Operation" 301 | ] 302 | }, 303 | { 304 | "cell_type": "code", 305 | "execution_count": 13, 306 | "metadata": {}, 307 | "outputs": [ 308 | { 309 | "data": { 310 | "text/plain": [ 311 | "(array([-10., -9., -8., -7., -6., -5., -4., -3., -2., -1., 0.,\n", 312 | " 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.]),\n", 313 | " array([0. , 0.2, 0.4, 0.6, 0.8, 1. , 0.8, 0.8, 0.6, 0.6, 0.4, 0.4, 0.2,\n", 314 | " 0.2, 0. , 0. , 0. , 0. , 0. , 0. , 0. ]))" 315 | ] 316 | }, 317 | "execution_count": 13, 318 | "metadata": {}, 319 | "output_type": "execute_result" 320 | } 321 | ], 322 | "source": [ 323 | "import skfuzzy as sk\n", 324 | "import numpy as np\n", 325 | "\n", 326 | "#Defining the Numpy array for Tip Quality\n", 327 | "x_qual = np.arange(0, 11, 1)\n", 328 | "\n", 329 | "#Defining the Numpy array for two membership functions (Triangular)\n", 330 | "qual_lo = sk.trimf(x_qual, [0, 0, 5])\n", 331 | "qual_md = sk.trimf(x_qual, [0, 5, 10])\n", 332 | "\n", 333 | "#Finding the Difference (Fuzzy Subtract)\n", 334 | "sk.fuzzy_sub(x_qual,qual_lo,x_qual,qual_md)\n" 335 | ] 336 | } 337 | ], 338 | "metadata": { 339 | "kernelspec": { 340 | "display_name": "ADP", 341 | "language": "python", 342 | "name": "adp" 343 | }, 344 | "language_info": { 345 | "codemirror_mode": { 346 | "name": "ipython", 347 | "version": 3 348 | }, 349 | "file_extension": ".py", 350 | "mimetype": "text/x-python", 351 | "name": "python", 352 | "nbconvert_exporter": "python", 353 | "pygments_lexer": "ipython3", 354 | "version": "3.7.4" 355 | } 356 | }, 357 | "nbformat": 4, 358 | "nbformat_minor": 2 359 | } 360 | -------------------------------------------------------------------------------- /Chapter 3.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Mamdani Fuzzy Inference System" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": { 14 | "scrolled": true 15 | }, 16 | "outputs": [], 17 | "source": [ 18 | "import fuzzylite as fl\n", 19 | "#Declaring and Initializing the Fuzzy Engine\n", 20 | "engine = fl.Engine(\n", 21 | "\tname=\"SimpleDimmer\",\n", 22 | "\tdescription=\"Simple Dimmer Fuzzy System which dims light based upon Light Conditions\"\n", 23 | ")\n", 24 | "#Defining the Input Variables (Fuzzification)\n", 25 | "engine.input_variables = [\n", 26 | "\tfl.InputVariable(\n", 27 | " \tname=\"Ambient\",\n", 28 | " \tdescription=\"\",\n", 29 | " \tenabled=True,\n", 30 | " \tminimum=0.000,\n", 31 | " \tmaximum=1.000,\n", 32 | " \tlock_range=False,\n", 33 | " \tterms=[\n", 34 | " \tfl.Triangle(\"DARK\", 0.000, 0.250, 0.500), #Triangular Membership Function defining “Dark”\n", 35 | " \tfl.Triangle(\"MEDIUM\", 0.250, 0.500, 0.750), #Triangular Membership Function defining “Medium”\n", 36 | " \tfl.Triangle(\"BRIGHT\", 0.500, 0.750, 1.000) #Triangular Membership Function defining “Bright”\n", 37 | " \t]\n", 38 | "\t)\n", 39 | "]\n", 40 | "#Defining the Output Variables (Defuzzification)\n", 41 | "engine.output_variables = [\n", 42 | "\tfl.OutputVariable(\n", 43 | " \tname=\"Power\",\n", 44 | " \tdescription=\"\",\n", 45 | " \tenabled=True,\n", 46 | " \tminimum=0.000,\n", 47 | " \tmaximum=1.000,\n", 48 | " \tlock_range=False,\n", 49 | " \taggregation=fl.Maximum(),\n", 50 | " \tdefuzzifier=fl.Centroid(200),\n", 51 | " \tlock_previous=False,\n", 52 | " \tterms=[\n", 53 | " \tfl.Triangle(\"LOW\", 0.000, 0.250, 0.500), #Triangular Membership Function defining “LOW Light”\n", 54 | " \tfl.Triangle(\"MEDIUM\", 0.250, 0.500, 0.750), #Triangular Membership Function defining “MEDIUM light”\n", 55 | " \tfl.Triangle(\"HIGH\", 0.500, 0.750, 1.000) #Triangular Membership Function defining “HIGH Light”\n", 56 | " \t]\n", 57 | "\t)\n", 58 | "]\n", 59 | "#Creation of Fuzzy Rule Base\n", 60 | "engine.rule_blocks = [\n", 61 | "\tfl.RuleBlock(\n", 62 | " \tname=\"\",\n", 63 | " \tdescription=\"\",\n", 64 | " \tenabled=True,\n", 65 | " \tconjunction=None,\n", 66 | " \tdisjunction=None,\n", 67 | " \timplication=fl.Minimum(),\n", 68 | " \tactivation=fl.General(),\n", 69 | " \trules=[\n", 70 | " \tfl.Rule.create(\"if Ambient is DARK then Power is HIGH\", engine),\n", 71 | " \tfl.Rule.create(\"if Ambient is MEDIUM then Power is MEDIUM\", engine),\n", 72 | " \tfl.Rule.create(\"if Ambient is BRIGHT then Power is LOW\", engine)\n", 73 | " \t]\n", 74 | "\t)\n", 75 | "]\n" 76 | ] 77 | }, 78 | { 79 | "cell_type": "markdown", 80 | "metadata": {}, 81 | "source": [ 82 | "# Takagi Sugeno Kang Fuzzy Inference System" 83 | ] 84 | }, 85 | { 86 | "cell_type": "code", 87 | "execution_count": null, 88 | "metadata": {}, 89 | "outputs": [], 90 | "source": [ 91 | "import fuzzylite as fl\n", 92 | "#Declaring and Initializing the Fuzzy Engine\n", 93 | "engine = fl.Engine(\n", 94 | "\tname=\"SimpleDimmer\",\n", 95 | "\tdescription=\"Simple Dimmer Fuzzy System which dims light based upon Light Conditions\"\n", 96 | ")\n", 97 | "#Defining the Input Variables (Fuzzification)\n", 98 | "engine.input_variables = [\n", 99 | "\tfl.InputVariable(\n", 100 | " \tname=\"Ambient\",\n", 101 | " \tdescription=\"\",\n", 102 | " \tenabled=True,\n", 103 | " \tminimum=0.000,\n", 104 | " \tmaximum=1.000,\n", 105 | " \tlock_range=False,\n", 106 | " \tterms=[\n", 107 | " \tfl.Triangle(\"DARK\", 0.000, 0.250, 0.500), #Triangular Membership Function defining “Dark”\n", 108 | " \tfl.Triangle(\"MEDIUM\", 0.250, 0.500, 0.750), #Triangular Membership Function defining “Medium”\n", 109 | " \tfl.Triangle(\"BRIGHT\", 0.500, 0.750, 1.000) #Triangular Membership Function defining “Bright”\n", 110 | " \t]\n", 111 | "\t)\n", 112 | "]\n", 113 | "#Defining the Output Variables (Defuzzification)\n", 114 | "engine.output_variables = [\n", 115 | "\tfl.OutputVariable(\n", 116 | " \tname=\"Power\",\n", 117 | " \tdescription=\"\",\n", 118 | " \tenabled=True,\n", 119 | " \tminimum=0.000,\n", 120 | " \tmaximum=1.000,\n", 121 | " \tlock_range=False,\n", 122 | " \taggregation=None,\n", 123 | " \tdefuzzifier=fl.WeightedAverage(\"TakagiSugeno\"),\n", 124 | " \tlock_previous=False,\n", 125 | " \tterms=[\n", 126 | " \tfl.Constant(\"LOW\", 0.250), #Constant Membership Function defining “LOW”\n", 127 | " \tfl.Constant(\"MEDIUM\", 0.500), #Constant Membership Function defining “MEDIUM”\n", 128 | " \tfl.Constant(\"HIGH\", 0.750) #Constant Membership Function defining “HIGH”\n", 129 | " \t]\n", 130 | "\t)\n", 131 | "]\n", 132 | "#Creation of Fuzzy Rule Base\n", 133 | "engine.rule_blocks = [\n", 134 | "\tfl.RuleBlock(\n", 135 | " \tname=\"\",\n", 136 | " \tdescription=\"\",\n", 137 | " \tenabled=True,\n", 138 | " \tconjunction=None,\n", 139 | " \tdisjunction=None,\n", 140 | " \timplication=None,\n", 141 | " \tactivation=fl.General(),\n", 142 | " \trules=[\n", 143 | " \tfl.Rule.create(\"if Ambient is DARK then Power is HIGH\", engine),\n", 144 | " \tfl.Rule.create(\"if Ambient is MEDIUM then Power is MEDIUM\", engine),\n", 145 | " \tfl.Rule.create(\"if Ambient is BRIGHT then Power is LOW\", engine)\n", 146 | " \t]\n", 147 | "\t)\n", 148 | "]" 149 | ] 150 | }, 151 | { 152 | "cell_type": "markdown", 153 | "metadata": {}, 154 | "source": [ 155 | "# Tsukamoto Fuzzy Inference System" 156 | ] 157 | }, 158 | { 159 | "cell_type": "code", 160 | "execution_count": null, 161 | "metadata": {}, 162 | "outputs": [], 163 | "source": [ 164 | "import fuzzylite as fl\n", 165 | "#Declaring and Initializing the Fuzzy Engine\n", 166 | "engine = fl.Engine(\n", 167 | "\tname=\"SimpleDimmer\",\n", 168 | "\tdescription=\"Simple Dimmer Fuzzy System which dims light based upon Light Conditions\"\n", 169 | ")\n", 170 | "#Defining the Input Variables (Fuzzification)\n", 171 | "engine.input_variables = [\n", 172 | "\tfl.InputVariable(\n", 173 | " \tname=\"Ambient\",\n", 174 | " \tdescription=\"\",\n", 175 | " \tenabled=True,\n", 176 | " \tminimum=0.000,\n", 177 | " \tmaximum=1.000,\n", 178 | " \tlock_range=False,\n", 179 | " \tterms=[\n", 180 | " \tfl.Bell(\"Dark\", -10.000, 5.000, 3.000), #Generalized Bell Membership Function defining “Dark”\n", 181 | " \tfl.Bell(\"medium\", 0.000, 5.000, 3.000), #Generalized Bell Membership Function defining “Medium”\n", 182 | " \tfl.Bell(\"Bright\", 10.000, 5.000, 3.000) #Generalized Bell Membership Function defining “Bright”\n", 183 | " \t]\n", 184 | "\t)\n", 185 | "]\n", 186 | "#Defining the Output Variables (Defuzzification)\n", 187 | "engine.output_variables = [\n", 188 | "\tfl.OutputVariable(\n", 189 | " \tname=\"Power\",\n", 190 | " \tdescription=\"\",\n", 191 | " \tenabled=True,\n", 192 | " \tminimum=0.000,\n", 193 | " \tmaximum=1.000,\n", 194 | " \tlock_range=False,\n", 195 | " \taggregation=fl.Maximum(),\n", 196 | " \tdefuzzifier=fl.Centroid(200),\n", 197 | " \tlock_previous=False,\n", 198 | " \tterms=[\n", 199 | "fl.Sigmoid(\"LOW\", 0.500, -30.000), #Triangular Membership Function defining “LOW Light”\n", 200 | " \tfl.Sigmoid(\"MEDIUM\", 0.130, 30.000), #Triangular Membership Function defining “MEDIUM light”\n", 201 | " \tfl.Sigmoid(\"HIGH\", 0.830, 30.000) #Triangular Membership Function defining “HIGH Light” \tfl.Triangle(\"HIGH\", 0.500, 0.750, 1.000) \t\n", 202 | "]\n", 203 | "\t)\n", 204 | "]\n", 205 | "#Creation of Fuzzy Rule Base\n", 206 | "engine.rule_blocks = [\n", 207 | "\tfl.RuleBlock(\n", 208 | " \tname=\"\",\n", 209 | " \tdescription=\"\",\n", 210 | " \tenabled=True,\n", 211 | " \tconjunction=None,\n", 212 | " \tdisjunction=None,\n", 213 | " \timplication=None,\n", 214 | " \tactivation=fl.General(),\n", 215 | " \trules=[\n", 216 | " \tfl.Rule.create(\"if Ambient is DARK then Power is HIGH\", engine),\n", 217 | " \tfl.Rule.create(\"if Ambient is MEDIUM then Power is MEDIUM\", engine),\n", 218 | " \tfl.Rule.create(\"if Ambient is BRIGHT then Power is LOW\", engine)\n", 219 | " \t]\n", 220 | "\t)\n", 221 | "]" 222 | ] 223 | } 224 | ], 225 | "metadata": { 226 | "kernelspec": { 227 | "display_name": "ADP", 228 | "language": "python", 229 | "name": "adp" 230 | }, 231 | "language_info": { 232 | "codemirror_mode": { 233 | "name": "ipython", 234 | "version": 3 235 | }, 236 | "file_extension": ".py", 237 | "mimetype": "text/x-python", 238 | "name": "python", 239 | "nbconvert_exporter": "python", 240 | "pygments_lexer": "ipython3", 241 | "version": "3.7.4" 242 | } 243 | }, 244 | "nbformat": 4, 245 | "nbformat_minor": 2 246 | } 247 | -------------------------------------------------------------------------------- /Chapter 4.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Classification Algorithm - Logistic Regression" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 8, 13 | "metadata": {}, 14 | "outputs": [ 15 | { 16 | "name": "stdout", 17 | "output_type": "stream", 18 | "text": [ 19 | "Confusion Matrix: [[94 24]\n", 20 | " [10 51]]\n", 21 | "Accuracy: 0.8100558659217877\n", 22 | "Recall: 0.8360655737704918\n", 23 | "Precision: 0.68\n", 24 | "F1 Score: 0.7500000000000001\n", 25 | "AUC: 0.841\n" 26 | ] 27 | }, 28 | { 29 | "name": "stderr", 30 | "output_type": "stream", 31 | "text": [ 32 | "/Users/singhhim1/anaconda3/envs/ADP/lib/python3.7/site-packages/pandas/core/frame.py:4102: SettingWithCopyWarning: \n", 33 | "A value is trying to be set on a copy of a slice from a DataFrame\n", 34 | "\n", 35 | "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n", 36 | " errors=errors,\n", 37 | "/Users/singhhim1/anaconda3/envs/ADP/lib/python3.7/site-packages/sklearn/linear_model/logistic.py:432: FutureWarning: Default solver will be changed to 'lbfgs' in 0.22. Specify a solver to silence this warning.\n", 38 | " FutureWarning)\n", 39 | "/Users/singhhim1/anaconda3/envs/ADP/lib/python3.7/site-packages/sklearn/utils/validation.py:724: DataConversionWarning: A column-vector y was passed when a 1d array was expected. Please change the shape of y to (n_samples, ), for example using ravel().\n", 40 | " y = column_or_1d(y, warn=True)\n" 41 | ] 42 | }, 43 | { 44 | "data": { 45 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3deXjU1b3H8fc3YQ9hTcKaEPYdFWNwKYiCCqKg1lrcqq33Wtt6vbeLgmtdunj1Kt2sii0Vba214oKC1WoVooIhLmWJgCyBBFD2sGadc//4TWAICZnATGb7vJ6HJ7OcTM5P4OPhnPP7HnPOISIisS8p0h0QEZHQUKCLiMQJBbqISJxQoIuIxAkFuohInGgWqR+clpbmsrOzI/XjRURi0scff7zdOZde13sRC/Ts7GwKCgoi9eNFRGKSmW2o7z1NuYiIxAkFuohInFCgi4jECQW6iEicUKCLiMSJBgPdzGaZ2VYzW17P+2ZmvzGzNWa21MxGhr6bIiLSkGC2LT4N/A54pp73JwL9/b9GAY/7v4qICEBxPhTlQfZo73nN48zckP6YBgPdObfQzLKP0WQK8Izz6vAuNrMOZtbNObclRH0UEYldxfnwpwvBV4Uzb1LEcJDcEq6bG9JQD8Uceg+gOOB5if+1o5jZjWZWYGYF27ZtC8GPFhGJcusWgK8ScOCq/b98UF3hjdRDKBSBbnW8VuepGc65mc65HOdcTnp6nXeuiohEp+J8yHvE+9qINvu6eTPQPgdVJOOSWoAlQ3KLw1MwIRKKW/9LgMyA5z2BzSH4XBGR6HBo2qQSMGibAc1aHtmmqhz2bcUbz3ptXLOWlO/eS1vADJolJ2MTH4aDOyIzhx6EucDNZvY83mJoqebPRSSuFOX5w9yvbRfoMvTINl+tgH1fAf4pirZdsC5Daca/cbt3eVMZvmovzEf/OCzdbDDQzeyvwFggzcxKgJ8CzQGcc08A84ELgTXAAeDbYempiEhTW58Hy+dAy1S82WXnTZVMeuTo0XVxPm72ZFxVBeUkk9f7Vs6/YDLti/Nh9mRvzjwM0yyBgtnlcmUD7zvgByHrkYhINCjOh2cvAV9VrTfqXCJkc+pwZqU9SIviD9mZkcsNJ5/jvZGZ6+1mCdNWxUARK58rIhLVivK8KRLg0OgcvNeK8o4I5lc/28SdLy+n2teVWyfezo/PzCY5KWC/SGZuWIO8hgJdRKQu2aPBkrxthsnNAfNG63VMm7Rv3ZyTMzvwy8uGk9mpTWT6iwJdRKRumbkwaBJ88U9vygQOTZtUdc/hjwvWUlnt4+Zz+zN2YAZnD0jHrK5d3E1HgS4iiSPwFvzGToH4p00KN+9h2u8/ZNmmUiaN6IZzDjOLeJiDAl1EEkVxPsy+yNsvDt7UiR3j3sqauzkBZk+m4pqX+e3qTjz+3lo6tGnO768eycRhXaMiyGso0EUkMRTlQVXNXnKDnjnQ87T625csgQ2LAAfVFZQWvscTH5zE5JO7c/ekIXRMadEUvW4UBbqIJIbs0ZDc7PB+8PH3HXvaJWBfeVJyC9KHj+OdUcPI6hy5Rc+G6IALEUkMmbkw7qfe4wsfbnAOPa+sNzcl3cP/VV1OyeTnITM3qsMcNEIXkVh3IguddSg9UMnP5xfyQkEJfdKGcMOVU+nZu1MIOhp+CnQRiV3rFsCfL/Vu9rEkyBwFrTvW3fbgLihe7D2efytkDD7qfwDVPsfXn/iQ9dv38/2xfbllXH9aNU8O80WEjgJdRGLT/h3wyk2H7+Z0Pti5zquEWJd9W8HV3O1ZdcTdnjv3V9ChdXOSk4xbLxhIjw6tGdajfRNcRGgp0EUSVdEHsP49yDrDe75xkfe4x6mH22z6uO7X6xJM28Z83rF88U9441Yo2+MtcPqqva/f/HP90y51FMlyzvHSJ5u4//VCpk0YxFWjsrhgaNfj71eEKdBF4klNTe79W2HfNq+ca83jwK97NkPFvkj39sQlt4SJDwVXX7xWkayStsO4409LWLh6G6f26khujMyTH4sCXSQaHGthr+JArVDeCvu3HRncNV/LS+v+/JbtICXdm45IHwQt28OmAo6uHGjQ/zzoMxbWveeNhGsObKh5vS7BtG3M5x1L4Of4qhpXX9x/t+fLn5Zw11MLccB9k4dy7em9SEqKnhuEjpcCXSQSnIOy3VC6Cda+C+/c54WTJUHW6d4UQk141zeSbtXBC+iUDOg63PvaNt3/NePI581bHfm9gdMPSckcUXhqzK1e8PU8zasHXjNFUfN6XYJp25jPO5ban3Mc9cU7pbTk1OxO/OLSYfTsGN1bERvDnKu7tm+45eTkuIKCgoj8bJGwK9/rhfWeEv9X/6/SgK+V++v+3pR0bxRdO5TbZhweZaekH30EWmMF/qsA6v4XQmO2BAbTNlRbDBv5OZXVPp7KW0dVteOWcf0BDtVgiTVm9rFzLqfO9xToEhMiEQT1BV6XYXUEdEnA8811TH2Yd2xZ+x7Qrge07wntunuPy/bAP6ZBdaU34rxubpPUzk4UyzeVMm3OUlZs3sPFJ3XnN1NPjskgr3GsQNeUi0S/4nx4epL3T2wz6DwAWrZt/OeU74Mdq73pjoY+J7AtcMQBB3VJSfcCulMf6D06ILR7eK+ndoNmx6j90WVIk5xok0jKKqv5zTtf8OTCdXRs04InrhnJhGHdIt2tsFKgS/Qryjtc9c75F8Lqu3nkWA7uPhzQDX1OYFvvG/xfzVvIO2mqP7R7QGr3o+eoG6uJTrRJJBt2HOCpvHVcdkoP7po0hPZtmke6S2GnQBevotzK16H7qdBtxOHXtyz1dkL0yDny9foE076uNg19X4fe/pNjfN42tUufOL7wq70P+Vifc6xFw3PuUPhGqf3lVby54ksuG9mTgV1T+dePx0b0BKGmpkBPdDU1oo86CDdancCaT2MO663dFjQlEuUWrN7GHS8tY3PpQUb0bE+/jNSECnNQoCeumgW/0pKAMDcYeikMvBBWzYcVL+MFaBIMvcR7vT7BtK+rDTTu++o4oLdRGjO1Ubutgjwq7dpfwQPzCnnpk030TU/h7989g34ZqZHuVkQo0BNRcT7Mvth/ckvgar+D3mfDiG9Ax16w6o3D0xOnf+/YgRZM+7raQOO/7zj2HUt8qimmtWHHAW4+px83n9svpopphZoCPREV5fnD3HHkFEaSd9cdNG56Itj29bU53u+ThLVjXzkd27QgOcmYPmEQPTq2Zmj32CumFWrah56IivPhTxMPL/IFLvhpD7REMeccf/+4hJ+9Xsi0iYO4elSvSHepyWkfuhwpMxdGXAGf/RWun+e9ptGvRLninQe44+Vl5H2xndzsTpzRp3OkuxR1FOiJql0P7+aamgBXkEsUe+mTEu56ZTkGPHDJMK7OzYqLYlqhpkBPJBsWwbIXIGMobPrUu3GmOF9hLlEvrW1Lcnt34ueXDqdHh9aR7k7UUqAnipqdLb7KI1+fPVnz5hJ1Kqt9PLlgLdU++O/x/RkzIJ0xA9Ij3a2op0CPJzV7y1t3Prrgf1Fe3TcPVVec2L5ukRBbvqmUW19cyudb9jDl5O4xWxUxEhTo8WJ9Hjx7yZGhbcneIQIp6bB9NUdsUUxq7r+VXvu6JTqUVVbzq7e/4Km8dXRKacGT154a08fBRUJQgW5mE4BfA8nAH5xzD9Z6PwuYDXTwt5nunJsf4r7Ksax46egRuKv2zo1smerV5z4kCUZeA+0ztbNFosbGnQf44/vruHxkT+64cHBCFNMKtQYD3cySgceA84ASYImZzXXOFQY0uwt4wTn3uJkNAeYD2WHob+JqqI5315OPfG5JXiGra1/y2tcuTHXSVQpyibi9ZZX8Y/mXfCMnkwFdUnn3J2Pj6gShphbMCD0XWOOcWwdgZs8DU4DAQHdAO//j9sDmUHYy4RXnwx/P59CUSVIzL7AD+ar9D8yrDjjyW3DSlUduS9TdlhJF3l25lTtfXsaXe8o4JasD/TJSFeYnKJhA7wEUBzwvAUbVanMv8JaZ/ReQAoyv64PM7EbgRoCsrKzG9jVxFeVxRD3uzFFHB3JxPmz40GvnnHe4Qu02qrktUWDn/goeeL2Qlz/dRP+Mtrz4vTMTtphWqAUT6HUtL9euF3Al8LRz7hEzOwN41syGOed8R3yTczOBmeDd+n88HU5IrQPviHMw/ArIuf7INrWnVLTQKVGo2ue4/PEP2bjzALeM688PzulLy2aJW0wr1IIJ9BIgM+B5T46eUrkBmADgnFtkZq2ANGBrKDqZ8LZ9HvAkoIBWIE2pSBTbtreczileMa07LhxMj46tGdytXcPfKI2S1HATlgD9zay3mbUApgJza7XZCIwDMLPBQCtgWyg7mtBK/EXMLNk76b2+0XdmLoz+scJcooZzjr8t2ci5j7zHc/kbARg/pIvCPEwaHKE756rM7GbgTbwtibOccyvM7H6gwDk3F/gx8JSZ/RBvOuZ6F6kyjvFmzTve8Wy5N0JqV42+JWZs3HGA6S8t5cO1OxjVuxNf65cW6S7FPZXPjWa+anhyDJTvgZsLvNG5SAx48eMS7n5lOclJxu0XDuLK01RMK1RUPjdWvftz+Gq5dyixwlxiSJd2LTmzb2d+dukwurVXMa2mokCPVsX5kPeo9zhvBvQ5R1MtErUqqnw8/t5afM7xw/MGMLp/OqP7q5hWUwtmUVQiIXDveU0BLZEo9O/i3Vz82/eZ8fZqinceQMtnkaMRerQ6tJPFtK9cotLBimoe/ecq/vj+ejJSW/GHb+UwfkiXSHcroSnQo1VmLrRIhYxBcMEvNN0iUad41wFmf7iBqblZTJ84iHatVEwr0hTo4dZQUS2RGLLHX0zrCn8xrfduHUt3nSAUNRTo4VScD09P8ubAMUjrDy1Sgvveiv1Qsde7qUinCkkU+NfKr7jjpeVs3VvGyKyO9MtoqzCPMgr0cCrKg+qaI9+cd6BESpAr/4fqlzudKiQRtWNfOfe/Xsirn21mYJdUnrj2VPpltI10t6QOCvRwyh7tlbL1VUGz1nDJ48GHsoptSRSo9jm+8cQiincd4IfjB/C9sX1p0Uyb46KVAj2cMnNh8GRYOa/xUyYqtiURtHVvGWkpLUlOMu6cNJieHdswsKtK3EY7BfqJCPeCp+qXSxPz+Rx/XbKRX85fybSJg7j29F6MG6ytiLFCgX68ivNh9sVQVe5Nqwy7HNp1P7LNns2w4mXAaWFTol7R9v1Mf2kpi9ft5My+nTlbd3rGHAX68SrK88Ic582RL3vBK28byFVz1N2eCnSJQi8UFHP3K8tpkZzEg5cN55unZWKmYlqxRoEerNrTK4ELnsnN4fr5dR8Lp4VNiQE9OrRmzIB0HpgyjK7tW0W6O3KcFOjB+OJt+MvleKNtgzb+I+F8Vcf+Pi1sSpQqr6rm9++uxTnHj84fyFn90jhL9cpjngL9WGpG5Zs/4/Axqv4DmAEObPe++nz1T6doYVOizKcbdzFtzlJWf7WPr4/siXNO0ytxQoFen+J8mH0RVFUe/d6p34YuQzSdIjHlQEUVj7y1mlkfrKdru1bMuj6HcwdpB0s8UaDXZ/Wb/kXP2vyHNGs6RWLMpl0HeXbxBq4elcW0CYNIVTGtuKNAr0+3k/0PzFv8xLxb9wNH45pOkShXerCSN5ZtYWpuFv27pLLg1rE6QSiOKdDr090f6IMvgjNv8R5rNC4x5K0VX3LXK8vZsb+CnOxO9MtoqzCPcwr0hvS/4HCAK8glBmzfV869c1fw+tItDOqayh+uy1ExrQShQBeJI9U+x+WPf8jm3WX85PwBfPfsvjRPVjGtRKFAF4kDX+0pI72tV0zrpxcPpWfH1vTvomJaiUb/6xaJYT6f49nFGxj3yAL+8tEGAM4ZlKEwT1AaoYvEqHXb9jH9pWXkr9/J1/qlMXZgRqS7JBGmQBeJQX9bspF7Xl1By2ZJPHT5CL5xak/d7SkK9AZ98SakD9QOF4kqPTu2YexAr5hWRjsV0xKPAr0+mz/zvn7+ulecS7XMJYLKq6r57TtrAPjJBSqmJXXTomh9Spb4HwQc0iwSAR9v2MmFv87jd++uYeveMpxzDX+TJCSN0OvT8zT/gyQV35KI2F9excNvrmL2oiK6t2/N7O/kcvYAnSIk9QtqhG5mE8xslZmtMbPp9bS5wswKzWyFmT0X2m5GwKFb/ydpukUiYvPugzyXv5Fvnd6LN384RmEuDWpwhG5mycBjwHlACbDEzOY65woD2vQHbgfOcs7tMjPtnxI5DqUHKpm3bAtXjfKKaeXddg5dtOgpQQpmhJ4LrHHOrXPOVQDPA1NqtflP4DHn3C4A59zW0HYzAgIXRWdP9uqji4TRP5Z/yfgZC7j71eWs3bYPQGEujRJMoPcAigOel/hfCzQAGGBmH5jZYjObUNcHmdmNZlZgZgXbtm07vh43FS2KShPZureM7//lY27688ekt23Jqz84i77pKqYljRfMomhddyvUXmZvBvQHxgI9gTwzG+ac233ENzk3E5gJkJOTE9ml+tqHPtd2aFEUSGqmRVEJi2qf44onFrG5tIxbLxjIjWP6qJiWHLdgAr0EyAx43hPYXEebxc65SmC9ma3CC/glRKPifPjTRP8hzwad+0KLlCPblJUGPNE2MQmtLaUH6ZLayiumNXkomR3bqMStnLBghgJLgP5m1tvMWgBTgbm12rwCnANgZml4UzDrQtnRkCrK84c5gAMzSO125K/kgOO5fNWacpGQ8PkcT3+wnnGPLODPNcW0BmYozCUkGhyhO+eqzOxm4E0gGZjlnFthZvcDBc65uf73zjezQqAauNU5tyOcHQ/a+jxYOQ96nApdh3uvtc86/H5yS5jy+6OnXYrzdQi0hNSarfuYPmcpBRt2MWZAOucO0mYwCS2L1F1nOTk5rqCgILw/pDgfZl3gnQVan+QWcP28uufRG5pnFwnS8/kbuWfuClo3T+aei4Zw2cgeKqYlx8XMPnbO5dT1XnzfKVqUFxDmSTDsUhh0Eax8HZa/BLjD0yl1BbYOgZYQyerchvGDM7hv8jDSU1tGujsSp+I70LNHgyV5od6sJYy6yQvo9j1h5XxNp0jYlFVW85t3vgDgtgmDOLNvGmf2VTEtCa/4DvTMXOgyzNux8vU/HHnY83VzNZ0iYVFQtJPb5ixl3bb9TD0tE+ecplekScR3oAO0bOf9qh3amk6RENtXXsXD/1jJM4s30KNDa575Ti5jVH9FmlD8B7pIE/my9CDPLynmujOyufWCgaS01F8vaVr6EydyAnbtr+D1ZVu49vRe9MvwimnpBCGJFAW6yHFwzvHG8i+559Xl7D5QyZl9O9M3va3CXCJKgS7SSFv3lHH3q8t5c8VXDO/Rnme+M0rFtCQqxH+gl+/xdrkU52sRVE5Ytc/xjScX8WVpGbdPHMQNX+tNMxXTkigR34FenA9fLff2oc+erJOH5Lht3n2Qru28Ylr3TxlGZsfW9NGoXKJMfA8tAu8UVU1zOQ7VPsefahXTOntAusJcolJ8j9AD7xTVHaHSSGu27uW2F5fyycbdjB2YzrjBXSLdJZFjiu9Ar+9OUZEGPPfRRu6du4KUlsnM+OZJXHKyimlJ9IvvQIf67xQVOYbstDacP7QL904eSlpbFdOS2BD/gS4ShLLKama8vRrDmD5RxbQkNsX3oqhIED5at4OJv87jyQXr2FtWSaTOCBA5UfE/Qtc+dKnH3rJK/vcfK/nz4o1kdWrDc/8xijP7aVQusSu+R+g1+9B3b/D2oRfnR7pHEkW+2lPOix+X8B9f680//me0wlxiXnwHuvahSy0791fw7KIiAPpltCXvtnO566IhtGkR//9YlfgX33+KtQ9d/JxzvL50C/fOXcGeskrO6pdGn/S2Og5O4kp8B7r2oQvw1Z4y7nx5OW9//hUjerbnL5eP0p2eEpfiM9CL8w8fLycJrdrnuMJfTOvOCwfz7bOyVUxL4lb8BXpxPsy+CKoq/NMt1d7rKs6VUEp2HaBb+9YkJxkPTBlGVqc2ZKelRLpbImEVf0OVojyoKgfc4TAHLYomiGqf4w956xj/6AL+vNgrpjVmQLrCXBJC/IzQa6ZZWnc+vBCa1Mx77KvWomgCWPXlXm6bs5R/F+9m3KAMzh+qYlqSWOIj0Ivz4elJ3ig8kCXBxIfh4A4vzDXdErf+vHgD9722gtRWzfn11JOZfFJ3FdOShBMfgV6Ud3SYgzcyP7gDRv+46fskTcI5h5nRL6MtFw7vxj0XDaGzimlJgortQN+wCApfhZapAdMszf3TLFWaZoljByuqefSfq0hKMm6fOJjT+3Tm9D6dI90tkYiK3UCv2c3iqzrydTOY+JCmWeLYorU7mP7SUjbsOMC1p/c6NEoXSXSxG+hFed6UCgAG+CvkaZolbu0pq+SX81fy1/yN9Orchuf+c5RK3IoEiN1Azx4NScn+qZXmgGmaJc5t3VPOK59u4sYxffjh+AG0bpEc6S6JRJWgAt3MJgC/BpKBPzjnHqyn3eXA34HTnHMFIetlXTJzYehlsHwOXD/Pe63m7lBNs8SNHfvKee3fm7n+rN70y2jL+9PO0aKnSD0aDHQzSwYeA84DSoAlZjbXOVdYq10qcAvwUTg62qDMXAV5HHHOMfffm7l37gr2lVcxZkA6fdLbKsxFjiGYO0VzgTXOuXXOuQrgeWBKHe0eAB4CykLYv/oV58OKOd7doKp1Hlc27z7IDbML+O/nP6NX5xTm3TJaxbREghBMoPcAigOel/hfO8TMTgEynXOvH+uDzOxGMysws4Jt27Y1urNH+OKtw4uiuq0/blRV+5g6czGL1u7g7ouGMOd7ZzKgS2qkuyUSE4KZQ69rP9ihQxfNLAmYAVzf0Ac552YCMwFycnJO7ODGHWv9HUjSQmgcKN55gO4dWtMsOYlfXDqcrE5tyOrcJtLdEokpwQR6CZAZ8LwnsDngeSowDHjPvxe4KzDXzCaHbWF0x1r4/DUYMBEyT9NCaAyrqvYx64P1PPLWam6fOIjrz+rN1/prK6LI8Qgm0JcA/c2sN7AJmApcVfOmc64UOPQ30MzeA34StjAvzofXbgFLhot/Baldw/JjJPw+37KHaXOWsrSklPOGdGHi8G6R7pJITGsw0J1zVWZ2M/Am3rbFWc65FWZ2P1DgnJsb7k4eUpwPT18E1eVeJcXdGxXoMerZRUXc91oh7Vs353dXncKk4d10t6fICQpqH7pzbj4wv9Zr99TTduyJd6segUW4nPOea6olptTcpj+gSyoXn9Sduy8aQqeUFpHulkhciK07RWvfHaqF0JhxoKKK/3tzNc2SjTsuHMyoPp0ZpWJaIiEVWycWZebCqdd7j696QaPzGPHBmu1c8KuFzPpgPRVVPpw7sQ1OIlK32BqhA3TI8r72zIlsP6RBpQcr+cW8z/lbQTG901J44btnkNu7U6S7JRK3Yi/QJWZs31fOa0s3c9PZffmf8f1p1VzFtETCSYEuIbVtr1dM6ztf603f9La8P+1cLXqKNBEFuoSEc45XPtvEfa8VcqC8mnMGZdA7LUVhLtKEFOhywjbtPsidLy/jvVXbGJnVgYcuH0HvtJRId0sk4SjQ5YR4xbQWsWNfBfdePIRrz8gmOUk3CIlEggJdjsvGHQfo0dErpvXgZSPI6tSGzE4qpiUSSbG1D10irqrax+PvrWX8jAU8s6gIgLP6pSnMRaKARugStBWbS5k2ZynLN+3hgqFdmKRiWiJRRYEuQZn9YREPvF5IhzYtePzqkaqMKBKFFOhyTDXFtAZ1TWXKyT24+6LBdGijrYgi0UiBLnXaX17Fw2+uonmyceekISqmJRIDtCgqR1m4ehvnz1jI7EVFVFY7FdMSiREaocshpQcqeWBeIS9+XEKfdK+Y1mnZKqYlEisU6HLI9v3lvLFsC98f25dbxqmYlkisUaAnuK17y5j72Wb+Y3SfQ8W0Oqr+ikhMUqAnKOcccz7ZxAOvF3Kwsppxg7vQOy1FYS4SwxToCah45wHueHkZeV9sJ6dXRx78uoppicQDBXqCqar2ceVTi9m1v4IHpgzl6lG9SFIxLZG4oEBPEEXb95PZqQ3NkpN46HKvmFbPjqq/IhJPtA89zlVW+3js3TWcP2PhoWJaZ/ZNU5iLxCGN0OPY8k2l3PbiUgq37GHS8G5cNKJ7pLskImGkQI9Tf/pgPT+b9zmdUlrwxDWnMmFY10h3SUTCTIEeZ2qKaQ3t3p7LTunBXZOG0L5N80h3S0SagAI9Tuwrr+Khf6ykRXISd100hNzencjtrdv2RRKJFkXjwHurtnLBjIU8u3gDDlRMSyRBaYQew3btr+CBeYW89Mkm+mW05cWbzuTUXh0j3S0RiRAFegzbdaCCt1Z8xS3n9uMH5/ajZTMV0xJJZEFNuZjZBDNbZWZrzGx6He//yMwKzWypmb1jZr1C31UB2LqnjJkL1+Kco096Wz6Ydi4/On+gwlxEGg50M0sGHgMmAkOAK81sSK1mnwI5zrkRwIvAQ6HuaKJzzvHCkmLGPbqAR95aTdGOAwDawSIihwQz5ZILrHHOrQMws+eBKUBhTQPn3LsB7RcD14Syk4mueOcBbn9pGe+v2U5u7048eNlwFdMSkaMEE+g9gOKA5yXAqGO0vwF4o643zOxG4EaArKysILuY2GqKae0+UMnPLhnGVblZKqYlInUKJtDrSo8698WZ2TVADnB2Xe8752YCMwFycnK0t+4Y1m/fT5a/mNbDl59Er85t6N6hdaS7JSJRLJhF0RIgM+B5T2Bz7UZmNh64E5jsnCsPTfcST2W1j9++8wUXzFjI7A+LADijb2eFuYg0KJgR+hKgv5n1BjYBU4GrAhuY2SnAk8AE59zWkPcyQSwt2c1tLy5l5Zd7ufik7kw+WcW0RCR4DQa6c67KzG4G3gSSgVnOuRVmdj9Q4JybCzwMtAX+bmYAG51zk8PY77gz6/31/GxeIempLXnqWzmcN6RLpLskIjEmqBuLnHPzgfm1Xrsn4PH4EPcrYdQU0xrRsz3fPC2T6RMH0761tiKKSOPpTtEI2VtWyYNvrKRls2TuuXgIOdmdyMlWMS0ROX4qzhUB767cyvkzFvLX/I00SzYV0xKRkP4uX30AAAiKSURBVNAIvQnt3F/B/a+t4JXPNjOgS1t+f/WZnJKlYloiEhoK9CZUerCSdz7fyn+P688PzulHi2b6B5KIhI4CPcy+LC3jlc828d0xfeidlsL708/VoqeIhIUCPUycczy/pJhfzPucSp+PCUO7kp2WojAXkbBRoIfBhh37mT5nGYvW7eD0Pp148LIRZKuYloiEmQI9xKqqfVz11EeUHqzkF5cOZ+ppmSqmJSJNQoEeImu37aOXv5jWI1d4xbS6tVf9FRFpOtpmcYIqqnz86u3VTPjVQp5ZtAGA0/t0VpiLSJPTCP0EfFa8m2kvLmXVV3uZcnJ3LjmlR6S7JCIJTIF+nP74/np+Pq+QjNRW/PG6HMYNVjEtEYksBXoj1RTTOjmzPVNzs5g+cRDtWmkroohEngI9SHvKKvnl/JW0ap7ETy8eyqm9OnFqLxXTEpHooUXRILxd+BXnPbqAvy3ZSItmSSqmJSJRSSP0Y9ixr5z7Xitk7r83M6hrKjOvzeGkzA6R7paISJ0U6Mewt6yKd1dt5YfjB/C9sX1VTEtEopoCvZbNuw/y8qeb+P7YvmSnpfDB9HO16CkiMUGB7ufzOZ7L38iDb6yk2ueYNLwb2WkpCnMRiRkKdGD99v1Mn7OUj9bv5Kx+nfnlpSPI6twm0t0SEWmUhA/0qmof1/zhI/aUVfLQ10fwjZyemKmYlojEnoQN9DVb95LdOYVmyUnM+ObJ9Orchi7tWkW6WyIixy3htm2UV1Xz6D9XM+FXecz2F9PK7d1JYS4iMS+hRuifbNzFtBeX8sXWfVx2Sg8uUzEtEYkjCRPoTy1cxy/e+Jxu7Vrxp2+fxjkDMyLdJRGRkIr7QPf5HElJxsheHbh6VBbTJgwiVVsRRSQOxW2glx6s5OfzCmndPJn7pgxTMS0RiXtxuSj65oovOe/RBcz5ZBMpLZupmJaIJIS4GqFv31fOT19dwbxlWxjSrR2zrj+NYT3aR7pbIiJNIvYCffdG72tJAfQ5+4i39pVVkffFNm69YCA3julD8+S4/AeIiEidYivxivPh46e9x89dAcX5bNp9kN/96wucc2SnpfDh7eP4wTn9FOYiknCCSj0zm2Bmq8xsjZlNr+P9lmb2N//7H5lZdqg7CkBRHviqAXDVlXy68DXOf3QBj727lg07DgDQtmXs/aNDRCQUGgx0M0sGHgMmAkOAK81sSK1mNwC7nHP9gBnA/4a6owBkjwbzulzlknhgeSdG9urIWz8cQ3ZaSlh+pIhIrAhmhJ4LrHHOrXPOVQDPA1NqtZkCzPY/fhEYZ2GqcFWzX8WH4wfn9uOZ7+SS2UmVEUVEggn0HkBxwPMS/2t1tnHOVQGlQOfaH2RmN5pZgZkVbNu2rfG9LcrD/FsQW5hjXKvVqowoIuIXTKDXlZi1N3YH0wbn3EznXI5zLic9PT2Y/h0pezQ0awmWjCW38J6LiAgQ3LbFEiAz4HlPYHM9bUrMrBnQHtgZkh4GysyF6+Z6i6PZo73nIiICBBfoS4D+ZtYb2ARMBa6q1WYucB2wCLgc+JcL1+2ZmbkKchGROjQY6M65KjO7GXgTSAZmOedWmNn9QIFzbi7wR+BZM1uDNzKfGs5Oi4jI0YLatO2cmw/Mr/XaPQGPy4BvhLZrIiLSGLqdUkQkTijQRUTihAJdRCROKNBFROKERerwBzPbBmw4zm9PA7aHsDuxQNecGHTNieFErrmXc67OOzMjFugnwswKnHM5ke5HU9I1JwZdc2II1zVrykVEJE4o0EVE4kSsBvrMSHcgAnTNiUHXnBjCcs0xOYcuIiJHi9URuoiI1KJAFxGJE1Ed6FFzOHUTCuKaf2RmhWa21MzeMbNekehnKDV0zQHtLjczZ2Yxv8UtmGs2syv8v9crzOy5pu5jqAXxZzvLzN41s0/9f74vjEQ/Q8XMZpnZVjNbXs/7Zma/8f/3WGpmI0/4hzrnovIXXqnetUAfoAXwb2BIrTbfB57wP54K/C3S/W6Caz4HaON//L1EuGZ/u1RgIbAYyIl0v5vg97k/8CnQ0f88I9L9boJrngl8z/94CFAU6X6f4DWPAUYCy+t5/0LgDbwT304HPjrRnxnNI/SoOpy6iTR4zc65d51zB/xPF+OdIBXLgvl9BngAeAgoa8rOhUkw1/yfwGPOuV0AzrmtTdzHUAvmmh3Qzv+4PUefjBZTnHMLOfbJbVOAZ5xnMdDBzLqdyM+M5kAP2eHUMSSYaw50A97/4WNZg9dsZqcAmc6515uyY2EUzO/zAGCAmX1gZovNbEKT9S48grnme4FrzKwE7/yF/2qarkVMY/++NyioAy4iJGSHU8eQoK/HzK4BcoCzw9qj8DvmNZtZEjADuL6pOtQEgvl9boY37TIW719heWY2zDm3O8x9C5dgrvlK4Gnn3CNmdgbeKWjDnHO+8HcvIkKeX9E8Qm/M4dSE9XDqphPMNWNm44E7gcnOufIm6lu4NHTNqcAw4D0zK8Kba5wb4wujwf7ZftU5V+mcWw+swgv4WBXMNd8AvADgnFsEtMIrYhWvgvr73hjRHOiHDqc2sxZ4i55za7WpOZwawn04ddNo8Jr90w9P4oV5rM+rQgPX7Jwrdc6lOeeynXPZeOsGk51zBZHpbkgE82f7FbwFcMwsDW8KZl2T9jK0grnmjcA4ADMbjBfo25q0l01rLvAt/26X04FS59yWE/rESK8EN7BKfCGwGm91/E7/a/fj/YUG7zf878AaIB/oE+k+N8E1vw18BXzm/zU30n0O9zXXavseMb7LJcjfZwMeBQqBZcDUSPe5Ca55CPAB3g6Yz4DzI93nE7zevwJbgEq80fgNwE3ATQG/x4/5/3ssC8Wfa936LyISJ6J5ykVERBpBgS4iEicU6CIicUKBLiISJxToIiJxQoEuIhInFOgiInHi/wH2iaqv0Zf8OAAAAABJRU5ErkJggg==\n", 46 | "text/plain": [ 47 | "
" 48 | ] 49 | }, 50 | "metadata": { 51 | "needs_background": "light" 52 | }, 53 | "output_type": "display_data" 54 | } 55 | ], 56 | "source": [ 57 | "#Reading data\n", 58 | "import pandas as pd\n", 59 | "data = pd.read_csv(\"titanic_train.csv\")\n", 60 | "\n", 61 | "#Splitting Data into Categorical and Numerical Dataframes\n", 62 | "import numpy as np\n", 63 | "data_cat = data.select_dtypes(include=[object])\n", 64 | "data_num = data.select_dtypes(include=np.number)\n", 65 | "\n", 66 | "#Checking the number of null values\n", 67 | "data_cat.isnull().sum()\n", 68 | "data_num.isnull().sum()\n", 69 | "\n", 70 | "#Dropping the Columns having null values and columns which are not important\n", 71 | "data_cat.drop([\"Cabin\",\"Embarked\",\"Name\",\"Ticket\"], axis=1, inplace=True)\n", 72 | "data_num.drop([\"Age\",\"PassengerId\"], axis=1, inplace=True)\n", 73 | "\n", 74 | "#Checking the null values again\n", 75 | "data_cat.isnull().sum()\n", 76 | "data_num.isnull().sum()\n", 77 | "\n", 78 | "#Converting categorical variables into numbers\n", 79 | "from sklearn.preprocessing import LabelEncoder\n", 80 | "le = LabelEncoder()\n", 81 | "data_cat = data_cat.apply(le.fit_transform)\n", 82 | "\n", 83 | "#Combining both dataframes\n", 84 | "data = pd.concat([data_cat,data_num], axis=1)\n", 85 | "\n", 86 | "#Defining dependent and independent variables\n", 87 | "X = data.drop([\"Survived\"], axis=1)\n", 88 | "Y = pd.DataFrame(data[[\"Survived\"]])\n", 89 | "\n", 90 | "#Defining data into train and test set\n", 91 | "from sklearn.model_selection import train_test_split\n", 92 | "X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=0.20)\n", 93 | "\n", 94 | "#Applying Logistic Regression\n", 95 | "from sklearn.linear_model import LogisticRegression\n", 96 | "lr = LogisticRegression()\n", 97 | "lr.fit(X_train,y_train)\n", 98 | "\n", 99 | "#Predicting Values\n", 100 | "pred = lr.predict(X_test)\n", 101 | "\n", 102 | "#Finding different classification measures\n", 103 | "from sklearn.metrics import confusion_matrix, accuracy_score, recall_score, precision_score, f1_score\n", 104 | "print(\"Confusion Matrix: \", confusion_matrix(pred,y_test))\n", 105 | "print(\"Accuracy: \", accuracy_score(pred,y_test))\n", 106 | "print(\"Recall: \", recall_score(pred,y_test))\n", 107 | "print(\"Precision: \", precision_score(pred,y_test))\n", 108 | "print(\"F1 Score: \", f1_score(pred,y_test))\n", 109 | "\n", 110 | "from sklearn.metrics import roc_auc_score, roc_curve\n", 111 | "from matplotlib import pyplot\n", 112 | "# predict probabilities\n", 113 | "probs = lr.predict_proba(X_test)\n", 114 | "# keep probabilities for the positive outcome only\n", 115 | "probs = probs[:, 1]\n", 116 | "# calculate AUC\n", 117 | "auc = roc_auc_score(y_test, probs)\n", 118 | "print('AUC: %.3f' % auc)\n", 119 | "# calculate roc curve\n", 120 | "fpr, tpr, thresholds = roc_curve(y_test, probs)\n", 121 | "# plot no skill\n", 122 | "pyplot.plot([0, 1], [0, 1], linestyle='--')\n", 123 | "# plot the roc curve for the model\n", 124 | "pyplot.plot(fpr, tpr, marker='.')\n", 125 | "# show the plot\n", 126 | "pyplot.show()" 127 | ] 128 | }, 129 | { 130 | "cell_type": "markdown", 131 | "metadata": {}, 132 | "source": [ 133 | "# Regression Algorithm - Linear Regression" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": 11, 139 | "metadata": {}, 140 | "outputs": [ 141 | { 142 | "name": "stderr", 143 | "output_type": "stream", 144 | "text": [ 145 | "/Users/singhhim1/anaconda3/envs/ADP/lib/python3.7/site-packages/pandas/core/frame.py:4102: SettingWithCopyWarning: \n", 146 | "A value is trying to be set on a copy of a slice from a DataFrame\n", 147 | "\n", 148 | "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n", 149 | " errors=errors,\n", 150 | "/Users/singhhim1/anaconda3/envs/ADP/lib/python3.7/site-packages/pandas/core/generic.py:6287: SettingWithCopyWarning: \n", 151 | "A value is trying to be set on a copy of a slice from a DataFrame\n", 152 | "\n", 153 | "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n", 154 | " self._update_inplace(new_data)\n" 155 | ] 156 | }, 157 | { 158 | "data": { 159 | "text/html": [ 160 | "\n", 161 | "\n", 162 | "\n", 163 | " \n", 164 | "\n", 165 | "\n", 166 | " \n", 167 | "\n", 168 | "\n", 169 | " \n", 170 | "\n", 171 | "\n", 172 | " \n", 173 | "\n", 174 | "\n", 175 | " \n", 176 | "\n", 177 | "\n", 178 | " \n", 179 | "\n", 180 | "\n", 181 | " \n", 182 | "\n", 183 | "\n", 184 | " \n", 185 | "\n", 186 | "\n", 187 | " \n", 188 | "\n", 189 | "
OLS Regression Results
Dep. Variable: SalePrice R-squared (uncentered): 0.981
Model: OLS Adj. R-squared (uncentered): 0.979
Method: Least Squares F-statistic: 744.4
Date: Sun, 22 Sep 2019 Prob (F-statistic): 0.00
Time: 21:56:15 Log-Likelihood: -13598.
No. Observations: 1168 AIC: 2.734e+04
Df Residuals: 1094 BIC: 2.772e+04
Df Model: 74
Covariance Type: nonrobust
\n", 190 | "\n", 191 | "\n", 192 | " \n", 193 | "\n", 194 | "\n", 195 | " \n", 196 | "\n", 197 | "\n", 198 | " \n", 199 | "\n", 200 | "\n", 201 | " \n", 202 | "\n", 203 | "\n", 204 | " \n", 205 | "\n", 206 | "\n", 207 | " \n", 208 | "\n", 209 | "\n", 210 | " \n", 211 | "\n", 212 | "\n", 213 | " \n", 214 | "\n", 215 | "\n", 216 | " \n", 217 | "\n", 218 | "\n", 219 | " \n", 220 | "\n", 221 | "\n", 222 | " \n", 223 | "\n", 224 | "\n", 225 | " \n", 226 | "\n", 227 | "\n", 228 | " \n", 229 | "\n", 230 | "\n", 231 | " \n", 232 | "\n", 233 | "\n", 234 | " \n", 235 | "\n", 236 | "\n", 237 | " \n", 238 | "\n", 239 | "\n", 240 | " \n", 241 | "\n", 242 | "\n", 243 | " \n", 244 | "\n", 245 | "\n", 246 | " \n", 247 | "\n", 248 | "\n", 249 | " \n", 250 | "\n", 251 | "\n", 252 | " \n", 253 | "\n", 254 | "\n", 255 | " \n", 256 | "\n", 257 | "\n", 258 | " \n", 259 | "\n", 260 | "\n", 261 | " \n", 262 | "\n", 263 | "\n", 264 | " \n", 265 | "\n", 266 | "\n", 267 | " \n", 268 | "\n", 269 | "\n", 270 | " \n", 271 | "\n", 272 | "\n", 273 | " \n", 274 | "\n", 275 | "\n", 276 | " \n", 277 | "\n", 278 | "\n", 279 | " \n", 280 | "\n", 281 | "\n", 282 | " \n", 283 | "\n", 284 | "\n", 285 | " \n", 286 | "\n", 287 | "\n", 288 | " \n", 289 | "\n", 290 | "\n", 291 | " \n", 292 | "\n", 293 | "\n", 294 | " \n", 295 | "\n", 296 | "\n", 297 | " \n", 298 | "\n", 299 | "\n", 300 | " \n", 301 | "\n", 302 | "\n", 303 | " \n", 304 | "\n", 305 | "\n", 306 | " \n", 307 | "\n", 308 | "\n", 309 | " \n", 310 | "\n", 311 | "\n", 312 | " \n", 313 | "\n", 314 | "\n", 315 | " \n", 316 | "\n", 317 | "\n", 318 | " \n", 319 | "\n", 320 | "\n", 321 | " \n", 322 | "\n", 323 | "\n", 324 | " \n", 325 | "\n", 326 | "\n", 327 | " \n", 328 | "\n", 329 | "\n", 330 | " \n", 331 | "\n", 332 | "\n", 333 | " \n", 334 | "\n", 335 | "\n", 336 | " \n", 337 | "\n", 338 | "\n", 339 | " \n", 340 | "\n", 341 | "\n", 342 | " \n", 343 | "\n", 344 | "\n", 345 | " \n", 346 | "\n", 347 | "\n", 348 | " \n", 349 | "\n", 350 | "\n", 351 | " \n", 352 | "\n", 353 | "\n", 354 | " \n", 355 | "\n", 356 | "\n", 357 | " \n", 358 | "\n", 359 | "\n", 360 | " \n", 361 | "\n", 362 | "\n", 363 | " \n", 364 | "\n", 365 | "\n", 366 | " \n", 367 | "\n", 368 | "\n", 369 | " \n", 370 | "\n", 371 | "\n", 372 | " \n", 373 | "\n", 374 | "\n", 375 | " \n", 376 | "\n", 377 | "\n", 378 | " \n", 379 | "\n", 380 | "\n", 381 | " \n", 382 | "\n", 383 | "\n", 384 | " \n", 385 | "\n", 386 | "\n", 387 | " \n", 388 | "\n", 389 | "\n", 390 | " \n", 391 | "\n", 392 | "\n", 393 | " \n", 394 | "\n", 395 | "\n", 396 | " \n", 397 | "\n", 398 | "\n", 399 | " \n", 400 | "\n", 401 | "\n", 402 | " \n", 403 | "\n", 404 | "\n", 405 | " \n", 406 | "\n", 407 | "\n", 408 | " \n", 409 | "\n", 410 | "\n", 411 | " \n", 412 | "\n", 413 | "\n", 414 | " \n", 415 | "\n", 416 | "\n", 417 | " \n", 418 | "\n", 419 | "\n", 420 | " \n", 421 | "\n", 422 | "
coef std err t P>|t| [0.025 0.975]
MSZoning -1165.7401 1539.464 -0.757 0.449 -4186.376 1854.896
Street 3.222e+04 1.27e+04 2.546 0.011 7390.874 5.71e+04
LotShape -718.1397 664.207 -1.081 0.280 -2021.403 585.123
LandContour 1235.6655 1353.945 0.913 0.362 -1420.957 3892.288
Utilities -3.379e+04 3.02e+04 -1.121 0.263 -9.3e+04 2.54e+04
LotConfig -298.4770 542.717 -0.550 0.582 -1363.360 766.406
LandSlope 4103.7910 3864.835 1.062 0.289 -3479.536 1.17e+04
Neighborhood 233.4921 156.226 1.495 0.135 -73.044 540.029
Condition1 -1042.3035 976.607 -1.067 0.286 -2958.538 873.930
Condition2 -1.533e+04 3562.421 -4.304 0.000 -2.23e+04 -8343.255
BldgType -1276.6258 1536.222 -0.831 0.406 -4290.900 1737.648
HouseStyle -392.3779 676.011 -0.580 0.562 -1718.802 934.047
RoofStyle 1427.9843 1147.983 1.244 0.214 -824.514 3680.483
RoofMatl 3634.7378 1684.056 2.158 0.031 330.392 6939.083
Exterior1st -1097.5467 531.712 -2.064 0.039 -2140.838 -54.256
Exterior2nd 499.9442 482.670 1.036 0.301 -447.119 1447.008
MasVnrType 4639.5944 1605.118 2.891 0.004 1490.137 7789.051
ExterQual -8914.7271 2017.235 -4.419 0.000 -1.29e+04 -4956.640
ExterCond 511.9805 1262.722 0.405 0.685 -1965.650 2989.611
Foundation 610.7116 1629.947 0.375 0.708 -2587.465 3808.888
BsmtQual -7872.7731 1476.928 -5.331 0.000 -1.08e+04 -4974.842
BsmtCond 1991.4110 1368.135 1.456 0.146 -693.055 4675.877
BsmtExposure -2462.4311 891.401 -2.762 0.006 -4211.479 -713.383
BsmtFinType1 603.6284 660.350 0.914 0.361 -692.067 1899.324
BsmtFinType2 1306.0052 1405.594 0.929 0.353 -1451.959 4063.969
Heating 889.9318 3149.188 0.283 0.778 -5289.199 7069.062
HeatingQC -1037.8213 619.969 -1.674 0.094 -2254.285 178.642
CentralAir -3109.1404 4472.098 -0.695 0.487 -1.19e+04 5665.718
Electrical -189.0676 960.337 -0.197 0.844 -2073.378 1695.243
KitchenQual -6792.8368 1494.550 -4.545 0.000 -9725.345 -3860.329
Functional 4001.9568 946.697 4.227 0.000 2144.411 5859.503
FireplaceQu -2479.2410 1103.125 -2.247 0.025 -4643.721 -314.761
GarageType 512.7497 613.323 0.836 0.403 -690.673 1716.173
GarageFinish 91.1008 1490.750 0.061 0.951 -2833.951 3016.152
GarageQual 726.6309 1843.585 0.394 0.694 -2890.730 4343.992
GarageCond 2259.8080 2097.899 1.077 0.282 -1856.552 6376.168
PavedDrive -572.7771 2069.922 -0.277 0.782 -4634.243 3488.689
SaleType -525.3894 580.704 -0.905 0.366 -1664.808 614.030
SaleCondition 2600.2391 838.942 3.099 0.002 954.123 4246.356
Id -1.6710 2.040 -0.819 0.413 -5.673 2.331
MSSubClass -95.9793 47.297 -2.029 0.043 -188.782 -3.176
LotFrontage 20.7353 49.791 0.416 0.677 -76.960 118.431
LotArea 0.3920 0.101 3.879 0.000 0.194 0.590
OverallQual 8141.9894 1196.646 6.804 0.000 5794.009 1.05e+04
OverallCond 5795.2869 1048.905 5.525 0.000 3737.195 7853.379
YearBuilt 301.9530 79.092 3.818 0.000 146.764 457.142
YearRemodAdd 24.1377 68.483 0.352 0.725 -110.236 158.511
MasVnrArea 32.0383 6.144 5.214 0.000 19.983 44.094
BsmtFinSF1 16.9105 3.055 5.535 0.000 10.916 22.905
BsmtFinSF2 5.7592 6.351 0.907 0.365 -6.702 18.220
BsmtUnfSF -3.1604 2.907 -1.087 0.277 -8.863 2.543
TotalBsmtSF 19.5093 3.669 5.318 0.000 12.311 26.708
1stFlrSF 29.5382 6.310 4.681 0.000 17.157 41.919
2ndFlrSF 32.9418 5.739 5.740 0.000 21.682 44.202
LowQualFinSF -35.6469 15.214 -2.343 0.019 -65.499 -5.794
GrLivArea 26.8331 5.767 4.653 0.000 15.517 38.149
BsmtFullBath 2099.4732 2531.418 0.829 0.407 -2867.510 7066.457
BsmtHalfBath -810.6716 3890.494 -0.208 0.835 -8444.345 6823.002
FullBath -79.3889 2783.571 -0.029 0.977 -5541.130 5382.352
HalfBath -943.1513 2589.318 -0.364 0.716 -6023.742 4137.440
BedroomAbvGr -6071.5075 1708.284 -3.554 0.000 -9423.390 -2719.625
KitchenAbvGr -1.685e+04 4956.178 -3.399 0.001 -2.66e+04 -7123.626
TotRmsAbvGrd 2607.9617 1169.442 2.230 0.026 313.359 4902.564
Fireplaces 6222.3936 1783.577 3.489 0.001 2722.775 9722.012
GarageYrBlt 15.7996 70.252 0.225 0.822 -122.044 153.644
GarageCars 3252.8253 2917.934 1.115 0.265 -2472.555 8978.206
GarageArea 10.7453 10.031 1.071 0.284 -8.937 30.428
WoodDeckSF 13.4806 7.601 1.774 0.076 -1.433 28.394
OpenPorchSF -14.0978 14.094 -1.000 0.317 -41.751 13.556
EnclosedPorch -2.7550 15.521 -0.178 0.859 -33.209 27.699
3SsnPorch 12.8193 26.434 0.485 0.628 -39.049 64.687
ScreenPorch 18.6703 16.901 1.105 0.270 -14.491 51.832
PoolArea 73.2044 20.649 3.545 0.000 32.688 113.721
MiscVal -0.1852 1.582 -0.117 0.907 -3.290 2.920
MoSold -153.4176 316.720 -0.484 0.628 -774.864 468.029
YrSold -337.9522 89.400 -3.780 0.000 -513.366 -162.538
\n", 423 | "\n", 424 | "\n", 425 | " \n", 426 | "\n", 427 | "\n", 428 | " \n", 429 | "\n", 430 | "\n", 431 | " \n", 432 | "\n", 433 | "\n", 434 | " \n", 435 | "\n", 436 | "
Omnibus: 539.471 Durbin-Watson: 1.982
Prob(Omnibus): 0.000 Jarque-Bera (JB): 79865.186
Skew: -1.057 Prob(JB): 0.00
Kurtosis: 43.455 Cond. No. 1.37e+16


Warnings:
[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.
[2] The smallest eigenvalue is 1.47e-21. This might indicate that there are
strong multicollinearity problems or that the design matrix is singular." 437 | ], 438 | "text/plain": [ 439 | "\n", 440 | "\"\"\"\n", 441 | " OLS Regression Results \n", 442 | "=======================================================================================\n", 443 | "Dep. Variable: SalePrice R-squared (uncentered): 0.981\n", 444 | "Model: OLS Adj. R-squared (uncentered): 0.979\n", 445 | "Method: Least Squares F-statistic: 744.4\n", 446 | "Date: Sun, 22 Sep 2019 Prob (F-statistic): 0.00\n", 447 | "Time: 21:56:15 Log-Likelihood: -13598.\n", 448 | "No. Observations: 1168 AIC: 2.734e+04\n", 449 | "Df Residuals: 1094 BIC: 2.772e+04\n", 450 | "Df Model: 74 \n", 451 | "Covariance Type: nonrobust \n", 452 | "=================================================================================\n", 453 | " coef std err t P>|t| [0.025 0.975]\n", 454 | "---------------------------------------------------------------------------------\n", 455 | "MSZoning -1165.7401 1539.464 -0.757 0.449 -4186.376 1854.896\n", 456 | "Street 3.222e+04 1.27e+04 2.546 0.011 7390.874 5.71e+04\n", 457 | "LotShape -718.1397 664.207 -1.081 0.280 -2021.403 585.123\n", 458 | "LandContour 1235.6655 1353.945 0.913 0.362 -1420.957 3892.288\n", 459 | "Utilities -3.379e+04 3.02e+04 -1.121 0.263 -9.3e+04 2.54e+04\n", 460 | "LotConfig -298.4770 542.717 -0.550 0.582 -1363.360 766.406\n", 461 | "LandSlope 4103.7910 3864.835 1.062 0.289 -3479.536 1.17e+04\n", 462 | "Neighborhood 233.4921 156.226 1.495 0.135 -73.044 540.029\n", 463 | "Condition1 -1042.3035 976.607 -1.067 0.286 -2958.538 873.930\n", 464 | "Condition2 -1.533e+04 3562.421 -4.304 0.000 -2.23e+04 -8343.255\n", 465 | "BldgType -1276.6258 1536.222 -0.831 0.406 -4290.900 1737.648\n", 466 | "HouseStyle -392.3779 676.011 -0.580 0.562 -1718.802 934.047\n", 467 | "RoofStyle 1427.9843 1147.983 1.244 0.214 -824.514 3680.483\n", 468 | "RoofMatl 3634.7378 1684.056 2.158 0.031 330.392 6939.083\n", 469 | "Exterior1st -1097.5467 531.712 -2.064 0.039 -2140.838 -54.256\n", 470 | "Exterior2nd 499.9442 482.670 1.036 0.301 -447.119 1447.008\n", 471 | "MasVnrType 4639.5944 1605.118 2.891 0.004 1490.137 7789.051\n", 472 | "ExterQual -8914.7271 2017.235 -4.419 0.000 -1.29e+04 -4956.640\n", 473 | "ExterCond 511.9805 1262.722 0.405 0.685 -1965.650 2989.611\n", 474 | "Foundation 610.7116 1629.947 0.375 0.708 -2587.465 3808.888\n", 475 | "BsmtQual -7872.7731 1476.928 -5.331 0.000 -1.08e+04 -4974.842\n", 476 | "BsmtCond 1991.4110 1368.135 1.456 0.146 -693.055 4675.877\n", 477 | "BsmtExposure -2462.4311 891.401 -2.762 0.006 -4211.479 -713.383\n", 478 | "BsmtFinType1 603.6284 660.350 0.914 0.361 -692.067 1899.324\n", 479 | "BsmtFinType2 1306.0052 1405.594 0.929 0.353 -1451.959 4063.969\n", 480 | "Heating 889.9318 3149.188 0.283 0.778 -5289.199 7069.062\n", 481 | "HeatingQC -1037.8213 619.969 -1.674 0.094 -2254.285 178.642\n", 482 | "CentralAir -3109.1404 4472.098 -0.695 0.487 -1.19e+04 5665.718\n", 483 | "Electrical -189.0676 960.337 -0.197 0.844 -2073.378 1695.243\n", 484 | "KitchenQual -6792.8368 1494.550 -4.545 0.000 -9725.345 -3860.329\n", 485 | "Functional 4001.9568 946.697 4.227 0.000 2144.411 5859.503\n", 486 | "FireplaceQu -2479.2410 1103.125 -2.247 0.025 -4643.721 -314.761\n", 487 | "GarageType 512.7497 613.323 0.836 0.403 -690.673 1716.173\n", 488 | "GarageFinish 91.1008 1490.750 0.061 0.951 -2833.951 3016.152\n", 489 | "GarageQual 726.6309 1843.585 0.394 0.694 -2890.730 4343.992\n", 490 | "GarageCond 2259.8080 2097.899 1.077 0.282 -1856.552 6376.168\n", 491 | "PavedDrive -572.7771 2069.922 -0.277 0.782 -4634.243 3488.689\n", 492 | "SaleType -525.3894 580.704 -0.905 0.366 -1664.808 614.030\n", 493 | "SaleCondition 2600.2391 838.942 3.099 0.002 954.123 4246.356\n", 494 | "Id -1.6710 2.040 -0.819 0.413 -5.673 2.331\n", 495 | "MSSubClass -95.9793 47.297 -2.029 0.043 -188.782 -3.176\n", 496 | "LotFrontage 20.7353 49.791 0.416 0.677 -76.960 118.431\n", 497 | "LotArea 0.3920 0.101 3.879 0.000 0.194 0.590\n", 498 | "OverallQual 8141.9894 1196.646 6.804 0.000 5794.009 1.05e+04\n", 499 | "OverallCond 5795.2869 1048.905 5.525 0.000 3737.195 7853.379\n", 500 | "YearBuilt 301.9530 79.092 3.818 0.000 146.764 457.142\n", 501 | "YearRemodAdd 24.1377 68.483 0.352 0.725 -110.236 158.511\n", 502 | "MasVnrArea 32.0383 6.144 5.214 0.000 19.983 44.094\n", 503 | "BsmtFinSF1 16.9105 3.055 5.535 0.000 10.916 22.905\n", 504 | "BsmtFinSF2 5.7592 6.351 0.907 0.365 -6.702 18.220\n", 505 | "BsmtUnfSF -3.1604 2.907 -1.087 0.277 -8.863 2.543\n", 506 | "TotalBsmtSF 19.5093 3.669 5.318 0.000 12.311 26.708\n", 507 | "1stFlrSF 29.5382 6.310 4.681 0.000 17.157 41.919\n", 508 | "2ndFlrSF 32.9418 5.739 5.740 0.000 21.682 44.202\n", 509 | "LowQualFinSF -35.6469 15.214 -2.343 0.019 -65.499 -5.794\n", 510 | "GrLivArea 26.8331 5.767 4.653 0.000 15.517 38.149\n", 511 | "BsmtFullBath 2099.4732 2531.418 0.829 0.407 -2867.510 7066.457\n", 512 | "BsmtHalfBath -810.6716 3890.494 -0.208 0.835 -8444.345 6823.002\n", 513 | "FullBath -79.3889 2783.571 -0.029 0.977 -5541.130 5382.352\n", 514 | "HalfBath -943.1513 2589.318 -0.364 0.716 -6023.742 4137.440\n", 515 | "BedroomAbvGr -6071.5075 1708.284 -3.554 0.000 -9423.390 -2719.625\n", 516 | "KitchenAbvGr -1.685e+04 4956.178 -3.399 0.001 -2.66e+04 -7123.626\n", 517 | "TotRmsAbvGrd 2607.9617 1169.442 2.230 0.026 313.359 4902.564\n", 518 | "Fireplaces 6222.3936 1783.577 3.489 0.001 2722.775 9722.012\n", 519 | "GarageYrBlt 15.7996 70.252 0.225 0.822 -122.044 153.644\n", 520 | "GarageCars 3252.8253 2917.934 1.115 0.265 -2472.555 8978.206\n", 521 | "GarageArea 10.7453 10.031 1.071 0.284 -8.937 30.428\n", 522 | "WoodDeckSF 13.4806 7.601 1.774 0.076 -1.433 28.394\n", 523 | "OpenPorchSF -14.0978 14.094 -1.000 0.317 -41.751 13.556\n", 524 | "EnclosedPorch -2.7550 15.521 -0.178 0.859 -33.209 27.699\n", 525 | "3SsnPorch 12.8193 26.434 0.485 0.628 -39.049 64.687\n", 526 | "ScreenPorch 18.6703 16.901 1.105 0.270 -14.491 51.832\n", 527 | "PoolArea 73.2044 20.649 3.545 0.000 32.688 113.721\n", 528 | "MiscVal -0.1852 1.582 -0.117 0.907 -3.290 2.920\n", 529 | "MoSold -153.4176 316.720 -0.484 0.628 -774.864 468.029\n", 530 | "YrSold -337.9522 89.400 -3.780 0.000 -513.366 -162.538\n", 531 | "==============================================================================\n", 532 | "Omnibus: 539.471 Durbin-Watson: 1.982\n", 533 | "Prob(Omnibus): 0.000 Jarque-Bera (JB): 79865.186\n", 534 | "Skew: -1.057 Prob(JB): 0.00\n", 535 | "Kurtosis: 43.455 Cond. No. 1.37e+16\n", 536 | "==============================================================================\n", 537 | "\n", 538 | "Warnings:\n", 539 | "[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.\n", 540 | "[2] The smallest eigenvalue is 1.47e-21. This might indicate that there are\n", 541 | "strong multicollinearity problems or that the design matrix is singular.\n", 542 | "\"\"\"" 543 | ] 544 | }, 545 | "execution_count": 11, 546 | "metadata": {}, 547 | "output_type": "execute_result" 548 | } 549 | ], 550 | "source": [ 551 | "#Reading Data\n", 552 | "import pandas as pd\n", 553 | "house_price = pd.read_csv(\"hp_train.csv\")\n", 554 | "\n", 555 | "#Partition into Categorical and Numerical Variables\n", 556 | "import numpy as np\n", 557 | "cat = house_price.select_dtypes(include=[object])\n", 558 | "num = house_price.select_dtypes(include=[np.number])\n", 559 | "\n", 560 | "#Checking Null Values\n", 561 | "cat.isnull().sum()\n", 562 | "num.isnull().sum()\n", 563 | "\n", 564 | "#Removing unnecessary columns\n", 565 | "cat.drop([\"Alley\", \"PoolQC\", \"Fence\", \"MiscFeature\"], axis=1, inplace=True)\n", 566 | "\n", 567 | "#Removing Categorical Null Values with Mode\n", 568 | "cat.BsmtCond.fillna(cat.BsmtCond.value_counts().idxmax(),inplace=True)\n", 569 | "cat.BsmtQual.fillna(cat.BsmtQual.value_counts().idxmax(),inplace=True)\n", 570 | "cat.BsmtExposure.fillna(cat.BsmtExposure.value_counts().idxmax(),inplace=True)\n", 571 | "cat.BsmtFinType1.fillna(cat.BsmtFinType1.value_counts().idxmax(),inplace=True)\n", 572 | "cat.BsmtFinType2.fillna(cat.BsmtFinType2.value_counts().idxmax(),inplace=True)\n", 573 | "cat.FireplaceQu.fillna(cat.FireplaceQu.value_counts().idxmax(),inplace=True)\n", 574 | "cat.GarageCond.fillna(cat.GarageCond.value_counts().idxmax(),inplace=True)\n", 575 | "cat.GarageFinish.fillna(cat.GarageFinish.value_counts().idxmax(),inplace=True)\n", 576 | "cat.GarageQual.fillna(cat.GarageQual.value_counts().idxmax(),inplace=True)\n", 577 | "cat.GarageType.fillna(cat.GarageType.value_counts().idxmax(),inplace=True)\n", 578 | "cat.Electrical.fillna(cat.Electrical.value_counts().idxmax(),inplace=True)\n", 579 | "cat.MasVnrType.fillna(cat.MasVnrType.value_counts().idxmax(),inplace=True)\n", 580 | "\n", 581 | "#Removing Numerical Null Values with Mean\n", 582 | "num.LotFrontage.fillna(num.LotFrontage.mean(),inplace=True)\n", 583 | "num.GarageYrBlt.fillna(num.GarageYrBlt.mean(),inplace=True)\n", 584 | "num.MasVnrArea.fillna(num.MasVnrArea.mean(),inplace=True)\n", 585 | "\n", 586 | "#Converting words to Integers\n", 587 | "from sklearn.preprocessing import LabelEncoder\n", 588 | "le = LabelEncoder()\n", 589 | "cat1 = cat.apply(le.fit_transform)\n", 590 | "\n", 591 | "#Combining two dataframes\n", 592 | "house_price2 = pd.concat([cat1, num], axis=1)\n", 593 | "\n", 594 | "#Getting Dependent and Independent Variables\n", 595 | "X = house_price2.drop([\"SalePrice\"], axis=1)\n", 596 | "Y = pd.DataFrame(house_price2[\"SalePrice\"])\n", 597 | "\n", 598 | "#Getting Train and Test Set\n", 599 | "from sklearn.model_selection import train_test_split\n", 600 | "X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.20)\n", 601 | "\n", 602 | "#Applying Linear Regression\n", 603 | "import statsmodels.api as sm\n", 604 | "est = sm.OLS(Y_train, X_train)\n", 605 | "est2 = est.fit()\n", 606 | "est2.summary()\n" 607 | ] 608 | } 609 | ], 610 | "metadata": { 611 | "kernelspec": { 612 | "display_name": "ADP", 613 | "language": "python", 614 | "name": "adp" 615 | }, 616 | "language_info": { 617 | "codemirror_mode": { 618 | "name": "ipython", 619 | "version": 3 620 | }, 621 | "file_extension": ".py", 622 | "mimetype": "text/x-python", 623 | "name": "python", 624 | "nbconvert_exporter": "python", 625 | "pygments_lexer": "ipython3", 626 | "version": "3.7.4" 627 | } 628 | }, 629 | "nbformat": 4, 630 | "nbformat_minor": 2 631 | } 632 | -------------------------------------------------------------------------------- /Chapter 5.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Activation Functions" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import numpy as np\n", 17 | "def sigmoid(x):\n", 18 | " return 1 / (1 + np.exp(-x))\n", 19 | "\n", 20 | "def tanh(x):\n", 21 | " return np.tanh(x)\n", 22 | "\n", 23 | "def softmax(x):\n", 24 | " exps = np.exp(x)\n", 25 | " return exps / (np.sum(exps).reshape(-1,1))\n", 26 | "\n", 27 | "def relu(x):\n", 28 | " return 1.0*(x>0)\n", 29 | "\n", 30 | "def leaky_relu(x, leaky_slope):\n", 31 | " d=np.zeros_like(x)\n", 32 | " d[x<=0]= leaky_slope\n", 33 | " d[x>0]=1\n", 34 | " return d" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": null, 40 | "metadata": {}, 41 | "outputs": [], 42 | "source": [ 43 | "import numpy as np\n", 44 | "\n", 45 | "#Defining dummy values of x \n", 46 | "x = np.linspace(-np.pi, np.pi, 12)\n", 47 | "\n", 48 | "#Finding the Activation Function Outputs\n", 49 | "sigmoid_output = sigmoid(x)\n", 50 | "tanh_output = tanh(x)\n", 51 | "softmax_output = softmax(x)\n", 52 | "relu_output = relu(x)\n", 53 | "leaky_relu_output = leaky_relu(x,1)\n", 54 | "\n", 55 | "#Printing the Outputs\n", 56 | "print(sigmoid_output)\n", 57 | "print(tanh_output)\n", 58 | "print(softmax_output)\n", 59 | "print(relu_output)\n", 60 | "print(leaky_relu_output)" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "# Convolutional Neural Networks" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": null, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [ 76 | "from keras.datasets import mnist\n", 77 | "import matplotlib.pyplot as plt\n", 78 | "from keras.models import Sequential\n", 79 | "from keras.layers import Dense, Conv2D, Flatten\n", 80 | "from keras.utils import to_categorical\n", 81 | "\n", 82 | "#download mnist data and split into train and test sets\n", 83 | "(X_train, y_train), (X_test, y_test) = mnist.load_data()\n", 84 | "\n", 85 | "f1 = plt.figure(1)\n", 86 | "plt.imshow(X_train[0])\n", 87 | "f2 = plt.figure(2)\n", 88 | "plt.imshow(X_train[1])\n", 89 | "plt.show()\n", 90 | "\n", 91 | "#check image shape and data count\n", 92 | "print(X_train[0].shape, len(X_train))\n", 93 | "print(X_train[0].shape, len(X_test))\n", 94 | "\n", 95 | "#reshape data to fit model\n", 96 | "X_train = X_train.reshape(len(X_train),28,28,1)\n", 97 | "X_test = X_test.reshape(len(X_test),28,28,1)\n", 98 | "\n", 99 | "#One-hot encode target column\n", 100 | "y_train = to_categorical(y_train)\n", 101 | "y_test = to_categorical(y_test)\n", 102 | "y_train[0]\n", 103 | "\n", 104 | "#Create model\n", 105 | "model = Sequential()\n", 106 | "\n", 107 | "#Add Input CNN Layer\n", 108 | "model.add(Conv2D(64, kernel_size=3, activation='relu', input_shape=(28,28,1)))\n", 109 | "\n", 110 | "#Add second CNN Layer\n", 111 | "model.add(Conv2D(32, kernel_size=3, activation='relu'))\n", 112 | "\n", 113 | "#Add the fully connected layer\n", 114 | "model.add(Flatten())\n", 115 | "model.add(Dense(10, activation='softmax'))\n", 116 | "\n", 117 | "#Compile model using accuracy to measure model performance\n", 118 | "model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])\n", 119 | "\n", 120 | "#Train the model\n", 121 | "model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=3)\n", 122 | "\n", 123 | "#predict first 6 images in the test set\n", 124 | "model.predict(X_test[:6])\n", 125 | "\n", 126 | "#actual results for first 6 images in the test set\n", 127 | "y_test[:6]\n" 128 | ] 129 | }, 130 | { 131 | "cell_type": "markdown", 132 | "metadata": {}, 133 | "source": [ 134 | "# Recurrent Neural Networks" 135 | ] 136 | }, 137 | { 138 | "cell_type": "code", 139 | "execution_count": null, 140 | "metadata": {}, 141 | "outputs": [], 142 | "source": [ 143 | "import pandas as pd\n", 144 | "import numpy as np\n", 145 | "import matplotlib.pyplot as plt\n", 146 | "from keras.models import Sequential\n", 147 | "from keras.layers import Dense, SimpleRNN\n", 148 | "\n", 149 | "#Generating Random Data \n", 150 | "t=np.arange(0,1000)\n", 151 | "x=np.sin(0.02*t)+2*np.random.rand(1000)\n", 152 | "df = pd.DataFrame(x)\n", 153 | "df.head()\n", 154 | "\n", 155 | "#Splitting into Train and Test set\n", 156 | "values=df.values\n", 157 | "train, test = values[0:800,:], values[800:1000,:]\n", 158 | "\n", 159 | "# convert dataset into matrix\n", 160 | "def convertToMatrix(data, step=4):\n", 161 | " X, Y =[], []\n", 162 | " for i in range(len(data)-step):\n", 163 | " d=i+step \n", 164 | " X.append(data[i:d,])\n", 165 | " Y.append(data[d,])\n", 166 | " return np.array(X), np.array(Y)\n", 167 | "\n", 168 | "trainX,trainY =convertToMatrix(train,6)\n", 169 | "testX,testY =convertToMatrix(test,6)\n", 170 | "trainX = np.reshape(trainX, (trainX.shape[0], 1, trainX.shape[1]))\n", 171 | "testX = np.reshape(testX, (testX.shape[0], 1, testX.shape[1]))\n", 172 | "\n", 173 | "#Making the RNN Structure\n", 174 | "model = Sequential()\n", 175 | "model.add(SimpleRNN(units=32, input_shape=(1,6), activation=\"relu\"))\n", 176 | "model.add(Dense(8, activation=\"relu\")) \n", 177 | "model.add(Dense(1))\n", 178 | "\n", 179 | "#Compiling the Code\n", 180 | "model.compile(loss='mean_squared_error', optimizer='rmsprop')\n", 181 | "model.summary()\n", 182 | "\n", 183 | "#Training the Model\n", 184 | "model.fit(trainX,trainY, epochs=1, batch_size=500, verbose=2)\n", 185 | "\n", 186 | "#Predicting with the Model\n", 187 | "trainPredict = model.predict(trainX)\n", 188 | "testPredict= model.predict(testX)\n", 189 | "predicted=np.concatenate((trainPredict,testPredict),axis=0)\n" 190 | ] 191 | }, 192 | { 193 | "cell_type": "markdown", 194 | "metadata": {}, 195 | "source": [ 196 | "# Long Short Term Memory" 197 | ] 198 | }, 199 | { 200 | "cell_type": "code", 201 | "execution_count": null, 202 | "metadata": {}, 203 | "outputs": [], 204 | "source": [ 205 | "import pandas as pd\n", 206 | "import numpy as np\n", 207 | "import matplotlib.pyplot as plt\n", 208 | "from keras.models import Sequential\n", 209 | "from keras.layers import Dense, LSTM\n", 210 | "\n", 211 | "#Generating Random Data \n", 212 | "t=np.arange(0,1000)\n", 213 | "x=np.sin(0.02*t)+2*np.random.rand(1000)\n", 214 | "df = pd.DataFrame(x)\n", 215 | "df.head()\n", 216 | "\n", 217 | "#Splitting into Train and Test set\n", 218 | "values=df.values\n", 219 | "train, test = values[0:800,:], values[800:1000,:]\n", 220 | "\n", 221 | "# convert dataset into matrix\n", 222 | "def convertToMatrix(data, step=4):\n", 223 | " X, Y =[], []\n", 224 | " for i in range(len(data)-step):\n", 225 | " d=i+step \n", 226 | " X.append(data[i:d,])\n", 227 | " Y.append(data[d,])\n", 228 | " return np.array(X), np.array(Y)\n", 229 | "\n", 230 | "trainX,trainY =convertToMatrix(train,6)\n", 231 | "testX,testY =convertToMatrix(test,6)\n", 232 | "trainX = np.reshape(trainX, (trainX.shape[0], 1, trainX.shape[1]))\n", 233 | "testX = np.reshape(testX, (testX.shape[0], 1, testX.shape[1]))\n", 234 | "\n", 235 | "#Making the LSTM Structure\n", 236 | "model = Sequential()\n", 237 | "model.add(LSTM(units=4, input_shape=(1,6), activation=\"relu\"))\n", 238 | "model.add(Dense(8, activation=\"relu\")) \n", 239 | "model.add(Dense(1))\n", 240 | "\n", 241 | "#Compiling the Code\n", 242 | "model.compile(loss='mean_squared_error', optimizer='rmsprop')\n", 243 | "model.summary()\n", 244 | "\n", 245 | "#Training the Model\n", 246 | "model.fit(trainX,trainY, epochs=1, batch_size=500, verbose=2)\n", 247 | "\n", 248 | "#Predicting with the Model\n", 249 | "trainPredict = model.predict(trainX)\n", 250 | "testPredict= model.predict(testX)\n", 251 | "predicted=np.concatenate((trainPredict,testPredict),axis=0)" 252 | ] 253 | }, 254 | { 255 | "cell_type": "markdown", 256 | "metadata": {}, 257 | "source": [ 258 | "# Gated REcurrent Units" 259 | ] 260 | }, 261 | { 262 | "cell_type": "code", 263 | "execution_count": null, 264 | "metadata": {}, 265 | "outputs": [], 266 | "source": [ 267 | "import pandas as pd\n", 268 | "import numpy as np\n", 269 | "import matplotlib.pyplot as plt\n", 270 | "from keras.models import Sequential\n", 271 | "from keras.layers import Dense, GRU\n", 272 | " \n", 273 | "#Generating Random Data \n", 274 | "t=np.arange(0,1000)\n", 275 | "x=np.sin(0.02*t)+2*np.random.rand(1000)\n", 276 | "df = pd.DataFrame(x)\n", 277 | "df.head()\n", 278 | " \n", 279 | "#Splitting into Train and Test set\n", 280 | "values=df.values\n", 281 | "train, test = values[0:800,:], values[800:1000,:]\n", 282 | " \n", 283 | "# convert dataset into matrix\n", 284 | "def convertToMatrix(data, step=4):\n", 285 | "\tX, Y =[], []\n", 286 | "\tfor i in range(len(data)-step):\n", 287 | " \td=i+step\n", 288 | " \tX.append(data[i:d,])\n", 289 | " \tY.append(data[d,])\n", 290 | "\treturn np.array(X), np.array(Y)\n", 291 | " \n", 292 | "trainX,trainY =convertToMatrix(train,6)\n", 293 | "testX,testY =convertToMatrix(test,6)\n", 294 | "trainX = np.reshape(trainX, (trainX.shape[0], 1, trainX.shape[1]))\n", 295 | "testX = np.reshape(testX, (testX.shape[0], 1, testX.shape[1]))\n", 296 | " \n", 297 | "#Making the GRU Structure\n", 298 | "model = Sequential()\n", 299 | "model.add(GRU(units=4, input_shape=(1,6), activation=\"relu\"))\n", 300 | "model.add(Dense(8, activation=\"relu\"))\n", 301 | "model.add(Dense(1))\n", 302 | " \n", 303 | "#Compiling the Code\n", 304 | "model.compile(loss='mean_squared_error', optimizer='rmsprop')\n", 305 | "model.summary()\n", 306 | " \n", 307 | "#Training the Model\n", 308 | "model.fit(trainX,trainY, epochs=10, batch_size=500, verbose=1)\n", 309 | " \n", 310 | "#Predicting with the Model\n", 311 | "trainPredict = model.predict(trainX)\n", 312 | "testPredict= model.predict(testX)\n", 313 | "predicted=np.concatenate((trainPredict,testPredict),axis=0)" 314 | ] 315 | }, 316 | { 317 | "cell_type": "markdown", 318 | "metadata": {}, 319 | "source": [ 320 | "# Use Case" 321 | ] 322 | }, 323 | { 324 | "cell_type": "code", 325 | "execution_count": null, 326 | "metadata": {}, 327 | "outputs": [], 328 | "source": [ 329 | "import numpy\n", 330 | "import pandas as pd\n", 331 | "from keras.models import Sequential\n", 332 | "from keras.layers import Dense, LSTM, GRU\n", 333 | "from sklearn.preprocessing import StandardScaler\n", 334 | "from sklearn.metrics import mean_squared_error\n", 335 | "import math\n", 336 | " \n", 337 | "# convert an array of values into a dataset matrix\n", 338 | "def create_dataset(dataset, step=1):\n", 339 | "\tdataX, dataY = [], []\n", 340 | "\tfor i in range(len(dataset)-step-1):\n", 341 | " \ta = dataset[i:(i+step), 0]\n", 342 | " \tdataX.append(a)\n", 343 | " \tdataY.append(dataset[i + step, 0])\n", 344 | "\treturn numpy.array(dataX), numpy.array(dataY)\n", 345 | " \n", 346 | "# load the dataset\n", 347 | "dataframe = pd.read_csv('carriage.csv', usecols=[1])\n", 348 | "dataset = dataframe.values\n", 349 | "dataset = dataset.astype('float32')\n", 350 | " \n", 351 | "# standardize the dataset\n", 352 | "scaler = StandardScaler()\n", 353 | "dataset = scaler.fit_transform(dataset)\n", 354 | " \n", 355 | "# split into train and test sets\n", 356 | "train_size = int(len(dataset) * 0.90)\n", 357 | "test_size = len(dataset) - train_size\n", 358 | "train, test = dataset[0:train_size,:], dataset[train_size:len(dataset),:]\n", 359 | " \n", 360 | "# Reshaping Data for the model\n", 361 | "step = 1\n", 362 | "train_X, train_Y = create_dataset(train, step)\n", 363 | "test_X, test_Y = create_dataset(test, step)\n", 364 | " \n", 365 | "train_X = numpy.reshape(train_X, (train_X.shape[0], 1, train_X.shape[1]))\n", 366 | "test_X = numpy.reshape(test_X, (test_X.shape[0], 1, test_X.shape[1]))\n", 367 | " \n", 368 | "# create and fit the LSTM network\n", 369 | "model = Sequential()\n", 370 | "model.add(LSTM(10, input_shape=(1, step)))\n", 371 | "model.add(Dense(1))\n", 372 | "model.compile(loss='mean_squared_error', optimizer='adam')\n", 373 | "model.summary()\n", 374 | "model.fit(train_X, train_Y, epochs=10, batch_size=50, verbose=1)\n", 375 | "\n", 376 | "# create and fit the GRU network\n", 377 | "model1 = Sequential()\n", 378 | "model1.add(GRU(10, input_shape=(1, step)))\n", 379 | "model1.add(Dense(1))\n", 380 | "model1.compile(loss='mean_squared_error', optimizer='adam')\n", 381 | "model1.summary()\n", 382 | "model1.fit(train_X, train_Y, epochs=10, batch_size=50, verbose=1)\n", 383 | " \n", 384 | "# make predictions from LSTM\n", 385 | "trainPredict = model.predict(train_X)\n", 386 | "testPredict = model.predict(test_X)\n", 387 | "\n", 388 | "# make predictions from GRU\n", 389 | "trainPredict1 = model1.predict(train_X)\n", 390 | "testPredict1 = model1.predict(test_X)\n", 391 | " \n", 392 | "# invert predictions from LSTM\n", 393 | "trainPredict = scaler.inverse_transform(trainPredict)\n", 394 | "train_Y = scaler.inverse_transform([train_Y])\n", 395 | "testPredict = scaler.inverse_transform(testPredict)\n", 396 | "test_Y = scaler.inverse_transform([test_Y])\n", 397 | "\n", 398 | "# invert predictions from GRU\n", 399 | "trainPredict1 = scaler.inverse_transform(trainPredict1)\n", 400 | "testPredict1 = scaler.inverse_transform(testPredict1)\n", 401 | " \n", 402 | "# calculate root mean squared error for LSTM\n", 403 | "print(\"*****Results for LSTMs*****\")\n", 404 | "trainScore = math.sqrt(mean_squared_error(train_Y[0], trainPredict[:,0]))\n", 405 | "print('Error in Training data is: %.2f RMSE' % (trainScore))\n", 406 | "testScore = math.sqrt(mean_squared_error(test_Y[0], testPredict[:,0]))\n", 407 | "print('Error in Testing data is: %.2f RMSE' % (testScore))\n", 408 | "\n", 409 | "# calculate root mean squared error for GRU\n", 410 | "print(\"*****Results for GRUs*****\")\n", 411 | "trainScore1 = math.sqrt(mean_squared_error(train_Y[0], trainPredict1[:,0]))\n", 412 | "print('Error in Training data is: %.2f RMSE' % (trainScore1))\n", 413 | "testScore1 = math.sqrt(mean_squared_error(test_Y[0], testPredict1[:,0]))\n", 414 | "print('Error in Testing data is: %.2f RMSE' % (testScore1))" 415 | ] 416 | }, 417 | { 418 | "cell_type": "code", 419 | "execution_count": null, 420 | "metadata": {}, 421 | "outputs": [], 422 | "source": [] 423 | } 424 | ], 425 | "metadata": { 426 | "kernelspec": { 427 | "display_name": "ADP", 428 | "language": "python", 429 | "name": "adp" 430 | }, 431 | "language_info": { 432 | "codemirror_mode": { 433 | "name": "ipython", 434 | "version": 3 435 | }, 436 | "file_extension": ".py", 437 | "mimetype": "text/x-python", 438 | "name": "python", 439 | "nbconvert_exporter": "python", 440 | "pygments_lexer": "ipython3", 441 | "version": "3.7.4" 442 | } 443 | }, 444 | "nbformat": 4, 445 | "nbformat_minor": 2 446 | } 447 | -------------------------------------------------------------------------------- /Chapter 6.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Adaptive Neuro Fuzzy Inference System" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "# Importing necessary libraries\n", 17 | "import anfis\n", 18 | "from anfis.membership import membershipfunction, mfDerivs\n", 19 | "import numpy\n", 20 | "training_data = numpy.loadtxt(\"training.txt\", usecols=[1,2,3])\n", 21 | "X = training_data [:,0:2]\n", 22 | "Y = training_data [:,2]\n", 23 | "# Defining the Membership Functions\n", 24 | "mf = [[['gaussmf',{'mean':0.,'sigma':1.}],['gaussmf',{'mean':-1.,'sigma':2.}],['gaussmf',{'mean':-4.,'sigma':10.}],['gaussmf',{'mean':-7.,'sigma':7.}]], [['gaussmf',{'mean':1.,'sigma':2.}],['gaussmf',{'mean':2.,'sigma':3.}],['gaussmf',{'mean':-2.,'sigma':10.}],['gaussmf',{'mean':-10.5,'sigma':5.}]]]\n", 25 | "# Updating the model with Membership Functions\n", 26 | "mfc = membershipfunction.MemFuncs(mf)\n", 27 | "# Creating the ANFIS Model Object\n", 28 | "anf = anfis.ANFIS(X, Y, mfc)\n", 29 | "# Fitting the ANFIS Model\n", 30 | "anf.trainHybridJangOffLine(epochs=20)\n", 31 | "# Printing Output\n", 32 | "print(round(anf.consequents[-1][0],6))\n", 33 | "print(round(anf.consequents[-2][0],6))\n", 34 | "print(round(anf.fittedValues[9][0],6))\n", 35 | "# Plotting Model Performance\n", 36 | "anf.plotErrors()\n", 37 | "anf.plotResults()" 38 | ] 39 | } 40 | ], 41 | "metadata": { 42 | "kernelspec": { 43 | "display_name": "ADP", 44 | "language": "python", 45 | "name": "adp" 46 | }, 47 | "language_info": { 48 | "codemirror_mode": { 49 | "name": "ipython", 50 | "version": 3 51 | }, 52 | "file_extension": ".py", 53 | "mimetype": "text/x-python", 54 | "name": "python", 55 | "nbconvert_exporter": "python", 56 | "pygments_lexer": "ipython3", 57 | "version": "3.7.4" 58 | } 59 | }, 60 | "nbformat": 4, 61 | "nbformat_minor": 2 62 | } 63 | -------------------------------------------------------------------------------- /Chapter 7.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Fuzzy Clustering" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import pandas as pd\n", 17 | "import numpy as np\n", 18 | "import numpy as np\n", 19 | "import logging\n", 20 | "from fuzzy_clustering import FCM\n", 21 | "from visualization import draw_model_2d\n", 22 | "from sklearn import preprocessing\n", 23 | "\n", 24 | "\n", 25 | "dataset = pd.read_csv(\"AirlinesCluster.csv\") #Importing the airlines data\n", 26 | "\n", 27 | "dataset1 = dataset.copy() #Making a copy so that original data remains unaffected\n", 28 | "\n", 29 | "dataset1 = dataset1[[\"Balance\", \"BonusMiles\"]][:500] #Selecting only first 500 rows for faster computation\n", 30 | "\n", 31 | "\n", 32 | "dataset1_standardized = preprocessing.scale(dataset1) #Standardizing the data to scale it between the upper and lower limit of 1 and 0\n", 33 | "\n", 34 | "dataset1_standardized = pd.DataFrame(dataset1_standardized)\n", 35 | "\n", 36 | "fcm.set_logger(tostdout=False) #Telling the package class to stop the unnecessary output\n", 37 | "\n", 38 | "fcm = FCM(n_clusters=5) #Defining k=5\n", 39 | "\n", 40 | "fcm.fit(dataset1_standardized) #Training on data\n", 41 | "\n", 42 | "predicted_membership = fcm.predict(np.array(dataset1_standardized)) #Testing on same data\n", 43 | "\n", 44 | "draw_model_2d(fcm, data=np.array(dataset1_standardized), membership=predicted_membership) #Visualizing the data" 45 | ] 46 | }, 47 | { 48 | "cell_type": "markdown", 49 | "metadata": {}, 50 | "source": [ 51 | "# Fuzzy Adaptive Resonance Theory" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": null, 57 | "metadata": {}, 58 | "outputs": [], 59 | "source": [ 60 | "from functools import partial\n", 61 | "import numpy as np\n", 62 | "import FuzzyART as f\n", 63 | "import sklearn.datasets as ds\n", 64 | "\n", 65 | "l1_norm = partial(np.linalg.norm, ord=1, axis=-1)#Used for regularization so that we can penalize the parameters that are not important\n", 66 | "\n", 67 | "if __name__ == '__main__': \n", 68 | "\n", 69 | " iris = ds.load_iris()#load the dataset in the python environment\n", 70 | "\n", 71 | "data = iris['data'] / np.max(iris['data'], axis=0)#standardize the dataset\n", 72 | "\n", 73 | "net = f.FuzzyART(alpha=0.5, rho=0.5) #Initialize the FuzzyART Hyperparameters\n", 74 | "\n", 75 | " net.train(data, epochs=100) #Train on the data\n", 76 | "\n", 77 | " print(net.test(data).astype(int)) #Print the Cluster Results\n", 78 | "\n", 79 | " print(iris['target']) #Match the cluster results" 80 | ] 81 | } 82 | ], 83 | "metadata": { 84 | "kernelspec": { 85 | "display_name": "ADP", 86 | "language": "python", 87 | "name": "adp" 88 | }, 89 | "language_info": { 90 | "codemirror_mode": { 91 | "name": "ipython", 92 | "version": 3 93 | }, 94 | "file_extension": ".py", 95 | "mimetype": "text/x-python", 96 | "name": "python", 97 | "nbconvert_exporter": "python", 98 | "pygments_lexer": "ipython3", 99 | "version": "3.7.4" 100 | } 101 | }, 102 | "nbformat": 4, 103 | "nbformat_minor": 2 104 | } 105 | -------------------------------------------------------------------------------- /Contributing.md: -------------------------------------------------------------------------------- 1 | # Contributing to Apress Source Code 2 | 3 | Copyright for Apress source code belongs to the author(s). However, under fair use you are encouraged to fork and contribute minor corrections and updates for the benefit of the author(s) and other readers. 4 | 5 | ## How to Contribute 6 | 7 | 1. Make sure you have a GitHub account. 8 | 2. Fork the repository for the relevant book. 9 | 3. Create a new branch on which to make your change, e.g. 10 | `git checkout -b my_code_contribution` 11 | 4. Commit your change. Include a commit message describing the correction. Please note that if your commit message is not clear, the correction will not be accepted. 12 | 5. Submit a pull request. 13 | 14 | Thank you for your contribution! -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Freeware License, some rights reserved 2 | 3 | Copyright (c) 2020 Himanshu Singh and Yunis Ahmad Lone 4 | 5 | Permission is hereby granted, free of charge, to anyone obtaining a copy 6 | of this software and associated documentation files (the "Software"), 7 | to work with the Software within the limits of freeware distribution and fair use. 8 | This includes the rights to use, copy, and modify the Software for personal use. 9 | Users are also allowed and encouraged to submit corrections and modifications 10 | to the Software for the benefit of other users. 11 | 12 | It is not allowed to reuse, modify, or redistribute the Software for 13 | commercial use in any way, or for a user’s educational materials such as books 14 | or blog articles without prior permission from the copyright holder. 15 | 16 | The above copyright notice and this permission notice need to be included 17 | in all copies or substantial portions of the software. 18 | 19 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 20 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 21 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 22 | AUTHORS OR COPYRIGHT HOLDERS OR APRESS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 23 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 24 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 25 | SOFTWARE. 26 | 27 | 28 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Apress Source Code 2 | 3 | This repository accompanies [*Deep Neuro-Fuzzy Systems with Python*](https://www.apress.com/9781484253601) by Himanshu Singh and Yunis Ahmad Lone (Apress, 2020). 4 | 5 | [comment]: #cover 6 | ![Cover image](9781484253601.jpg) 7 | 8 | Download the files as a zip using the green button, or clone the repository to your machine using Git. 9 | 10 | ## Releases 11 | 12 | Release v1.0 corresponds to the code in the published book, without corrections or updates. 13 | 14 | ## Contributions 15 | 16 | See the file Contributing.md for more information on how you can contribute to this repository. -------------------------------------------------------------------------------- /errata.md: -------------------------------------------------------------------------------- 1 | # Errata for *Book Title* 2 | 3 | On **page xx** [Summary of error]: 4 | 5 | Details of error here. Highlight key pieces in **bold**. 6 | 7 | *** 8 | 9 | On **page xx** [Summary of error]: 10 | 11 | Details of error here. Highlight key pieces in **bold**. 12 | 13 | *** -------------------------------------------------------------------------------- /titanic_test.csv: -------------------------------------------------------------------------------- 1 | PassengerId,Pclass,Name,Sex,Age,SibSp,Parch,Ticket,Fare,Cabin,Embarked 2 | 892,3,"Kelly, Mr. James",male,34.5,0,0,330911,7.8292,,Q 3 | 893,3,"Wilkes, Mrs. James (Ellen Needs)",female,47,1,0,363272,7,,S 4 | 894,2,"Myles, Mr. Thomas Francis",male,62,0,0,240276,9.6875,,Q 5 | 895,3,"Wirz, Mr. Albert",male,27,0,0,315154,8.6625,,S 6 | 896,3,"Hirvonen, Mrs. Alexander (Helga E Lindqvist)",female,22,1,1,3101298,12.2875,,S 7 | 897,3,"Svensson, Mr. Johan Cervin",male,14,0,0,7538,9.225,,S 8 | 898,3,"Connolly, Miss. Kate",female,30,0,0,330972,7.6292,,Q 9 | 899,2,"Caldwell, Mr. Albert Francis",male,26,1,1,248738,29,,S 10 | 900,3,"Abrahim, Mrs. Joseph (Sophie Halaut Easu)",female,18,0,0,2657,7.2292,,C 11 | 901,3,"Davies, Mr. John Samuel",male,21,2,0,A/4 48871,24.15,,S 12 | 902,3,"Ilieff, Mr. Ylio",male,,0,0,349220,7.8958,,S 13 | 903,1,"Jones, Mr. Charles Cresson",male,46,0,0,694,26,,S 14 | 904,1,"Snyder, Mrs. John Pillsbury (Nelle Stevenson)",female,23,1,0,21228,82.2667,B45,S 15 | 905,2,"Howard, Mr. Benjamin",male,63,1,0,24065,26,,S 16 | 906,1,"Chaffee, Mrs. Herbert Fuller (Carrie Constance Toogood)",female,47,1,0,W.E.P. 5734,61.175,E31,S 17 | 907,2,"del Carlo, Mrs. Sebastiano (Argenia Genovesi)",female,24,1,0,SC/PARIS 2167,27.7208,,C 18 | 908,2,"Keane, Mr. Daniel",male,35,0,0,233734,12.35,,Q 19 | 909,3,"Assaf, Mr. Gerios",male,21,0,0,2692,7.225,,C 20 | 910,3,"Ilmakangas, Miss. Ida Livija",female,27,1,0,STON/O2. 3101270,7.925,,S 21 | 911,3,"Assaf Khalil, Mrs. Mariana (Miriam"")""",female,45,0,0,2696,7.225,,C 22 | 912,1,"Rothschild, Mr. Martin",male,55,1,0,PC 17603,59.4,,C 23 | 913,3,"Olsen, Master. Artur Karl",male,9,0,1,C 17368,3.1708,,S 24 | 914,1,"Flegenheim, Mrs. Alfred (Antoinette)",female,,0,0,PC 17598,31.6833,,S 25 | 915,1,"Williams, Mr. Richard Norris II",male,21,0,1,PC 17597,61.3792,,C 26 | 916,1,"Ryerson, Mrs. Arthur Larned (Emily Maria Borie)",female,48,1,3,PC 17608,262.375,B57 B59 B63 B66,C 27 | 917,3,"Robins, Mr. Alexander A",male,50,1,0,A/5. 3337,14.5,,S 28 | 918,1,"Ostby, Miss. Helene Ragnhild",female,22,0,1,113509,61.9792,B36,C 29 | 919,3,"Daher, Mr. Shedid",male,22.5,0,0,2698,7.225,,C 30 | 920,1,"Brady, Mr. John Bertram",male,41,0,0,113054,30.5,A21,S 31 | 921,3,"Samaan, Mr. Elias",male,,2,0,2662,21.6792,,C 32 | 922,2,"Louch, Mr. Charles Alexander",male,50,1,0,SC/AH 3085,26,,S 33 | 923,2,"Jefferys, Mr. Clifford Thomas",male,24,2,0,C.A. 31029,31.5,,S 34 | 924,3,"Dean, Mrs. Bertram (Eva Georgetta Light)",female,33,1,2,C.A. 2315,20.575,,S 35 | 925,3,"Johnston, Mrs. Andrew G (Elizabeth Lily"" Watson)""",female,,1,2,W./C. 6607,23.45,,S 36 | 926,1,"Mock, Mr. Philipp Edmund",male,30,1,0,13236,57.75,C78,C 37 | 927,3,"Katavelas, Mr. Vassilios (Catavelas Vassilios"")""",male,18.5,0,0,2682,7.2292,,C 38 | 928,3,"Roth, Miss. Sarah A",female,,0,0,342712,8.05,,S 39 | 929,3,"Cacic, Miss. Manda",female,21,0,0,315087,8.6625,,S 40 | 930,3,"Sap, Mr. Julius",male,25,0,0,345768,9.5,,S 41 | 931,3,"Hee, Mr. Ling",male,,0,0,1601,56.4958,,S 42 | 932,3,"Karun, Mr. Franz",male,39,0,1,349256,13.4167,,C 43 | 933,1,"Franklin, Mr. Thomas Parham",male,,0,0,113778,26.55,D34,S 44 | 934,3,"Goldsmith, Mr. Nathan",male,41,0,0,SOTON/O.Q. 3101263,7.85,,S 45 | 935,2,"Corbett, Mrs. Walter H (Irene Colvin)",female,30,0,0,237249,13,,S 46 | 936,1,"Kimball, Mrs. Edwin Nelson Jr (Gertrude Parsons)",female,45,1,0,11753,52.5542,D19,S 47 | 937,3,"Peltomaki, Mr. Nikolai Johannes",male,25,0,0,STON/O 2. 3101291,7.925,,S 48 | 938,1,"Chevre, Mr. Paul Romaine",male,45,0,0,PC 17594,29.7,A9,C 49 | 939,3,"Shaughnessy, Mr. Patrick",male,,0,0,370374,7.75,,Q 50 | 940,1,"Bucknell, Mrs. William Robert (Emma Eliza Ward)",female,60,0,0,11813,76.2917,D15,C 51 | 941,3,"Coutts, Mrs. William (Winnie Minnie"" Treanor)""",female,36,0,2,C.A. 37671,15.9,,S 52 | 942,1,"Smith, Mr. Lucien Philip",male,24,1,0,13695,60,C31,S 53 | 943,2,"Pulbaum, Mr. Franz",male,27,0,0,SC/PARIS 2168,15.0333,,C 54 | 944,2,"Hocking, Miss. Ellen Nellie""""",female,20,2,1,29105,23,,S 55 | 945,1,"Fortune, Miss. Ethel Flora",female,28,3,2,19950,263,C23 C25 C27,S 56 | 946,2,"Mangiavacchi, Mr. Serafino Emilio",male,,0,0,SC/A.3 2861,15.5792,,C 57 | 947,3,"Rice, Master. Albert",male,10,4,1,382652,29.125,,Q 58 | 948,3,"Cor, Mr. Bartol",male,35,0,0,349230,7.8958,,S 59 | 949,3,"Abelseth, Mr. Olaus Jorgensen",male,25,0,0,348122,7.65,F G63,S 60 | 950,3,"Davison, Mr. Thomas Henry",male,,1,0,386525,16.1,,S 61 | 951,1,"Chaudanson, Miss. Victorine",female,36,0,0,PC 17608,262.375,B61,C 62 | 952,3,"Dika, Mr. Mirko",male,17,0,0,349232,7.8958,,S 63 | 953,2,"McCrae, Mr. Arthur Gordon",male,32,0,0,237216,13.5,,S 64 | 954,3,"Bjorklund, Mr. Ernst Herbert",male,18,0,0,347090,7.75,,S 65 | 955,3,"Bradley, Miss. Bridget Delia",female,22,0,0,334914,7.725,,Q 66 | 956,1,"Ryerson, Master. John Borie",male,13,2,2,PC 17608,262.375,B57 B59 B63 B66,C 67 | 957,2,"Corey, Mrs. Percy C (Mary Phyllis Elizabeth Miller)",female,,0,0,F.C.C. 13534,21,,S 68 | 958,3,"Burns, Miss. Mary Delia",female,18,0,0,330963,7.8792,,Q 69 | 959,1,"Moore, Mr. Clarence Bloomfield",male,47,0,0,113796,42.4,,S 70 | 960,1,"Tucker, Mr. Gilbert Milligan Jr",male,31,0,0,2543,28.5375,C53,C 71 | 961,1,"Fortune, Mrs. Mark (Mary McDougald)",female,60,1,4,19950,263,C23 C25 C27,S 72 | 962,3,"Mulvihill, Miss. Bertha E",female,24,0,0,382653,7.75,,Q 73 | 963,3,"Minkoff, Mr. Lazar",male,21,0,0,349211,7.8958,,S 74 | 964,3,"Nieminen, Miss. Manta Josefina",female,29,0,0,3101297,7.925,,S 75 | 965,1,"Ovies y Rodriguez, Mr. Servando",male,28.5,0,0,PC 17562,27.7208,D43,C 76 | 966,1,"Geiger, Miss. Amalie",female,35,0,0,113503,211.5,C130,C 77 | 967,1,"Keeping, Mr. Edwin",male,32.5,0,0,113503,211.5,C132,C 78 | 968,3,"Miles, Mr. Frank",male,,0,0,359306,8.05,,S 79 | 969,1,"Cornell, Mrs. Robert Clifford (Malvina Helen Lamson)",female,55,2,0,11770,25.7,C101,S 80 | 970,2,"Aldworth, Mr. Charles Augustus",male,30,0,0,248744,13,,S 81 | 971,3,"Doyle, Miss. Elizabeth",female,24,0,0,368702,7.75,,Q 82 | 972,3,"Boulos, Master. Akar",male,6,1,1,2678,15.2458,,C 83 | 973,1,"Straus, Mr. Isidor",male,67,1,0,PC 17483,221.7792,C55 C57,S 84 | 974,1,"Case, Mr. Howard Brown",male,49,0,0,19924,26,,S 85 | 975,3,"Demetri, Mr. Marinko",male,,0,0,349238,7.8958,,S 86 | 976,2,"Lamb, Mr. John Joseph",male,,0,0,240261,10.7083,,Q 87 | 977,3,"Khalil, Mr. Betros",male,,1,0,2660,14.4542,,C 88 | 978,3,"Barry, Miss. Julia",female,27,0,0,330844,7.8792,,Q 89 | 979,3,"Badman, Miss. Emily Louisa",female,18,0,0,A/4 31416,8.05,,S 90 | 980,3,"O'Donoghue, Ms. Bridget",female,,0,0,364856,7.75,,Q 91 | 981,2,"Wells, Master. Ralph Lester",male,2,1,1,29103,23,,S 92 | 982,3,"Dyker, Mrs. Adolf Fredrik (Anna Elisabeth Judith Andersson)",female,22,1,0,347072,13.9,,S 93 | 983,3,"Pedersen, Mr. Olaf",male,,0,0,345498,7.775,,S 94 | 984,1,"Davidson, Mrs. Thornton (Orian Hays)",female,27,1,2,F.C. 12750,52,B71,S 95 | 985,3,"Guest, Mr. Robert",male,,0,0,376563,8.05,,S 96 | 986,1,"Birnbaum, Mr. Jakob",male,25,0,0,13905,26,,C 97 | 987,3,"Tenglin, Mr. Gunnar Isidor",male,25,0,0,350033,7.7958,,S 98 | 988,1,"Cavendish, Mrs. Tyrell William (Julia Florence Siegel)",female,76,1,0,19877,78.85,C46,S 99 | 989,3,"Makinen, Mr. Kalle Edvard",male,29,0,0,STON/O 2. 3101268,7.925,,S 100 | 990,3,"Braf, Miss. Elin Ester Maria",female,20,0,0,347471,7.8542,,S 101 | 991,3,"Nancarrow, Mr. William Henry",male,33,0,0,A./5. 3338,8.05,,S 102 | 992,1,"Stengel, Mrs. Charles Emil Henry (Annie May Morris)",female,43,1,0,11778,55.4417,C116,C 103 | 993,2,"Weisz, Mr. Leopold",male,27,1,0,228414,26,,S 104 | 994,3,"Foley, Mr. William",male,,0,0,365235,7.75,,Q 105 | 995,3,"Johansson Palmquist, Mr. Oskar Leander",male,26,0,0,347070,7.775,,S 106 | 996,3,"Thomas, Mrs. Alexander (Thamine Thelma"")""",female,16,1,1,2625,8.5167,,C 107 | 997,3,"Holthen, Mr. Johan Martin",male,28,0,0,C 4001,22.525,,S 108 | 998,3,"Buckley, Mr. Daniel",male,21,0,0,330920,7.8208,,Q 109 | 999,3,"Ryan, Mr. Edward",male,,0,0,383162,7.75,,Q 110 | 1000,3,"Willer, Mr. Aaron (Abi Weller"")""",male,,0,0,3410,8.7125,,S 111 | 1001,2,"Swane, Mr. George",male,18.5,0,0,248734,13,F,S 112 | 1002,2,"Stanton, Mr. Samuel Ward",male,41,0,0,237734,15.0458,,C 113 | 1003,3,"Shine, Miss. Ellen Natalia",female,,0,0,330968,7.7792,,Q 114 | 1004,1,"Evans, Miss. Edith Corse",female,36,0,0,PC 17531,31.6792,A29,C 115 | 1005,3,"Buckley, Miss. Katherine",female,18.5,0,0,329944,7.2833,,Q 116 | 1006,1,"Straus, Mrs. Isidor (Rosalie Ida Blun)",female,63,1,0,PC 17483,221.7792,C55 C57,S 117 | 1007,3,"Chronopoulos, Mr. Demetrios",male,18,1,0,2680,14.4542,,C 118 | 1008,3,"Thomas, Mr. John",male,,0,0,2681,6.4375,,C 119 | 1009,3,"Sandstrom, Miss. Beatrice Irene",female,1,1,1,PP 9549,16.7,G6,S 120 | 1010,1,"Beattie, Mr. Thomson",male,36,0,0,13050,75.2417,C6,C 121 | 1011,2,"Chapman, Mrs. John Henry (Sara Elizabeth Lawry)",female,29,1,0,SC/AH 29037,26,,S 122 | 1012,2,"Watt, Miss. Bertha J",female,12,0,0,C.A. 33595,15.75,,S 123 | 1013,3,"Kiernan, Mr. John",male,,1,0,367227,7.75,,Q 124 | 1014,1,"Schabert, Mrs. Paul (Emma Mock)",female,35,1,0,13236,57.75,C28,C 125 | 1015,3,"Carver, Mr. Alfred John",male,28,0,0,392095,7.25,,S 126 | 1016,3,"Kennedy, Mr. John",male,,0,0,368783,7.75,,Q 127 | 1017,3,"Cribb, Miss. Laura Alice",female,17,0,1,371362,16.1,,S 128 | 1018,3,"Brobeck, Mr. Karl Rudolf",male,22,0,0,350045,7.7958,,S 129 | 1019,3,"McCoy, Miss. Alicia",female,,2,0,367226,23.25,,Q 130 | 1020,2,"Bowenur, Mr. Solomon",male,42,0,0,211535,13,,S 131 | 1021,3,"Petersen, Mr. Marius",male,24,0,0,342441,8.05,,S 132 | 1022,3,"Spinner, Mr. Henry John",male,32,0,0,STON/OQ. 369943,8.05,,S 133 | 1023,1,"Gracie, Col. Archibald IV",male,53,0,0,113780,28.5,C51,C 134 | 1024,3,"Lefebre, Mrs. Frank (Frances)",female,,0,4,4133,25.4667,,S 135 | 1025,3,"Thomas, Mr. Charles P",male,,1,0,2621,6.4375,,C 136 | 1026,3,"Dintcheff, Mr. Valtcho",male,43,0,0,349226,7.8958,,S 137 | 1027,3,"Carlsson, Mr. Carl Robert",male,24,0,0,350409,7.8542,,S 138 | 1028,3,"Zakarian, Mr. Mapriededer",male,26.5,0,0,2656,7.225,,C 139 | 1029,2,"Schmidt, Mr. August",male,26,0,0,248659,13,,S 140 | 1030,3,"Drapkin, Miss. Jennie",female,23,0,0,SOTON/OQ 392083,8.05,,S 141 | 1031,3,"Goodwin, Mr. Charles Frederick",male,40,1,6,CA 2144,46.9,,S 142 | 1032,3,"Goodwin, Miss. Jessie Allis",female,10,5,2,CA 2144,46.9,,S 143 | 1033,1,"Daniels, Miss. Sarah",female,33,0,0,113781,151.55,,S 144 | 1034,1,"Ryerson, Mr. Arthur Larned",male,61,1,3,PC 17608,262.375,B57 B59 B63 B66,C 145 | 1035,2,"Beauchamp, Mr. Henry James",male,28,0,0,244358,26,,S 146 | 1036,1,"Lindeberg-Lind, Mr. Erik Gustaf (Mr Edward Lingrey"")""",male,42,0,0,17475,26.55,,S 147 | 1037,3,"Vander Planke, Mr. Julius",male,31,3,0,345763,18,,S 148 | 1038,1,"Hilliard, Mr. Herbert Henry",male,,0,0,17463,51.8625,E46,S 149 | 1039,3,"Davies, Mr. Evan",male,22,0,0,SC/A4 23568,8.05,,S 150 | 1040,1,"Crafton, Mr. John Bertram",male,,0,0,113791,26.55,,S 151 | 1041,2,"Lahtinen, Rev. William",male,30,1,1,250651,26,,S 152 | 1042,1,"Earnshaw, Mrs. Boulton (Olive Potter)",female,23,0,1,11767,83.1583,C54,C 153 | 1043,3,"Matinoff, Mr. Nicola",male,,0,0,349255,7.8958,,C 154 | 1044,3,"Storey, Mr. Thomas",male,60.5,0,0,3701,,,S 155 | 1045,3,"Klasen, Mrs. (Hulda Kristina Eugenia Lofqvist)",female,36,0,2,350405,12.1833,,S 156 | 1046,3,"Asplund, Master. Filip Oscar",male,13,4,2,347077,31.3875,,S 157 | 1047,3,"Duquemin, Mr. Joseph",male,24,0,0,S.O./P.P. 752,7.55,,S 158 | 1048,1,"Bird, Miss. Ellen",female,29,0,0,PC 17483,221.7792,C97,S 159 | 1049,3,"Lundin, Miss. Olga Elida",female,23,0,0,347469,7.8542,,S 160 | 1050,1,"Borebank, Mr. John James",male,42,0,0,110489,26.55,D22,S 161 | 1051,3,"Peacock, Mrs. Benjamin (Edith Nile)",female,26,0,2,SOTON/O.Q. 3101315,13.775,,S 162 | 1052,3,"Smyth, Miss. Julia",female,,0,0,335432,7.7333,,Q 163 | 1053,3,"Touma, Master. Georges Youssef",male,7,1,1,2650,15.2458,,C 164 | 1054,2,"Wright, Miss. Marion",female,26,0,0,220844,13.5,,S 165 | 1055,3,"Pearce, Mr. Ernest",male,,0,0,343271,7,,S 166 | 1056,2,"Peruschitz, Rev. Joseph Maria",male,41,0,0,237393,13,,S 167 | 1057,3,"Kink-Heilmann, Mrs. Anton (Luise Heilmann)",female,26,1,1,315153,22.025,,S 168 | 1058,1,"Brandeis, Mr. Emil",male,48,0,0,PC 17591,50.4958,B10,C 169 | 1059,3,"Ford, Mr. Edward Watson",male,18,2,2,W./C. 6608,34.375,,S 170 | 1060,1,"Cassebeer, Mrs. Henry Arthur Jr (Eleanor Genevieve Fosdick)",female,,0,0,17770,27.7208,,C 171 | 1061,3,"Hellstrom, Miss. Hilda Maria",female,22,0,0,7548,8.9625,,S 172 | 1062,3,"Lithman, Mr. Simon",male,,0,0,S.O./P.P. 251,7.55,,S 173 | 1063,3,"Zakarian, Mr. Ortin",male,27,0,0,2670,7.225,,C 174 | 1064,3,"Dyker, Mr. Adolf Fredrik",male,23,1,0,347072,13.9,,S 175 | 1065,3,"Torfa, Mr. Assad",male,,0,0,2673,7.2292,,C 176 | 1066,3,"Asplund, Mr. Carl Oscar Vilhelm Gustafsson",male,40,1,5,347077,31.3875,,S 177 | 1067,2,"Brown, Miss. Edith Eileen",female,15,0,2,29750,39,,S 178 | 1068,2,"Sincock, Miss. Maude",female,20,0,0,C.A. 33112,36.75,,S 179 | 1069,1,"Stengel, Mr. Charles Emil Henry",male,54,1,0,11778,55.4417,C116,C 180 | 1070,2,"Becker, Mrs. Allen Oliver (Nellie E Baumgardner)",female,36,0,3,230136,39,F4,S 181 | 1071,1,"Compton, Mrs. Alexander Taylor (Mary Eliza Ingersoll)",female,64,0,2,PC 17756,83.1583,E45,C 182 | 1072,2,"McCrie, Mr. James Matthew",male,30,0,0,233478,13,,S 183 | 1073,1,"Compton, Mr. Alexander Taylor Jr",male,37,1,1,PC 17756,83.1583,E52,C 184 | 1074,1,"Marvin, Mrs. Daniel Warner (Mary Graham Carmichael Farquarson)",female,18,1,0,113773,53.1,D30,S 185 | 1075,3,"Lane, Mr. Patrick",male,,0,0,7935,7.75,,Q 186 | 1076,1,"Douglas, Mrs. Frederick Charles (Mary Helene Baxter)",female,27,1,1,PC 17558,247.5208,B58 B60,C 187 | 1077,2,"Maybery, Mr. Frank Hubert",male,40,0,0,239059,16,,S 188 | 1078,2,"Phillips, Miss. Alice Frances Louisa",female,21,0,1,S.O./P.P. 2,21,,S 189 | 1079,3,"Davies, Mr. Joseph",male,17,2,0,A/4 48873,8.05,,S 190 | 1080,3,"Sage, Miss. Ada",female,,8,2,CA. 2343,69.55,,S 191 | 1081,2,"Veal, Mr. James",male,40,0,0,28221,13,,S 192 | 1082,2,"Angle, Mr. William A",male,34,1,0,226875,26,,S 193 | 1083,1,"Salomon, Mr. Abraham L",male,,0,0,111163,26,,S 194 | 1084,3,"van Billiard, Master. Walter John",male,11.5,1,1,A/5. 851,14.5,,S 195 | 1085,2,"Lingane, Mr. John",male,61,0,0,235509,12.35,,Q 196 | 1086,2,"Drew, Master. Marshall Brines",male,8,0,2,28220,32.5,,S 197 | 1087,3,"Karlsson, Mr. Julius Konrad Eugen",male,33,0,0,347465,7.8542,,S 198 | 1088,1,"Spedden, Master. Robert Douglas",male,6,0,2,16966,134.5,E34,C 199 | 1089,3,"Nilsson, Miss. Berta Olivia",female,18,0,0,347066,7.775,,S 200 | 1090,2,"Baimbrigge, Mr. Charles Robert",male,23,0,0,C.A. 31030,10.5,,S 201 | 1091,3,"Rasmussen, Mrs. (Lena Jacobsen Solvang)",female,,0,0,65305,8.1125,,S 202 | 1092,3,"Murphy, Miss. Nora",female,,0,0,36568,15.5,,Q 203 | 1093,3,"Danbom, Master. Gilbert Sigvard Emanuel",male,0.33,0,2,347080,14.4,,S 204 | 1094,1,"Astor, Col. John Jacob",male,47,1,0,PC 17757,227.525,C62 C64,C 205 | 1095,2,"Quick, Miss. Winifred Vera",female,8,1,1,26360,26,,S 206 | 1096,2,"Andrew, Mr. Frank Thomas",male,25,0,0,C.A. 34050,10.5,,S 207 | 1097,1,"Omont, Mr. Alfred Fernand",male,,0,0,F.C. 12998,25.7417,,C 208 | 1098,3,"McGowan, Miss. Katherine",female,35,0,0,9232,7.75,,Q 209 | 1099,2,"Collett, Mr. Sidney C Stuart",male,24,0,0,28034,10.5,,S 210 | 1100,1,"Rosenbaum, Miss. Edith Louise",female,33,0,0,PC 17613,27.7208,A11,C 211 | 1101,3,"Delalic, Mr. Redjo",male,25,0,0,349250,7.8958,,S 212 | 1102,3,"Andersen, Mr. Albert Karvin",male,32,0,0,C 4001,22.525,,S 213 | 1103,3,"Finoli, Mr. Luigi",male,,0,0,SOTON/O.Q. 3101308,7.05,,S 214 | 1104,2,"Deacon, Mr. Percy William",male,17,0,0,S.O.C. 14879,73.5,,S 215 | 1105,2,"Howard, Mrs. Benjamin (Ellen Truelove Arman)",female,60,1,0,24065,26,,S 216 | 1106,3,"Andersson, Miss. Ida Augusta Margareta",female,38,4,2,347091,7.775,,S 217 | 1107,1,"Head, Mr. Christopher",male,42,0,0,113038,42.5,B11,S 218 | 1108,3,"Mahon, Miss. Bridget Delia",female,,0,0,330924,7.8792,,Q 219 | 1109,1,"Wick, Mr. George Dennick",male,57,1,1,36928,164.8667,,S 220 | 1110,1,"Widener, Mrs. George Dunton (Eleanor Elkins)",female,50,1,1,113503,211.5,C80,C 221 | 1111,3,"Thomson, Mr. Alexander Morrison",male,,0,0,32302,8.05,,S 222 | 1112,2,"Duran y More, Miss. Florentina",female,30,1,0,SC/PARIS 2148,13.8583,,C 223 | 1113,3,"Reynolds, Mr. Harold J",male,21,0,0,342684,8.05,,S 224 | 1114,2,"Cook, Mrs. (Selena Rogers)",female,22,0,0,W./C. 14266,10.5,F33,S 225 | 1115,3,"Karlsson, Mr. Einar Gervasius",male,21,0,0,350053,7.7958,,S 226 | 1116,1,"Candee, Mrs. Edward (Helen Churchill Hungerford)",female,53,0,0,PC 17606,27.4458,,C 227 | 1117,3,"Moubarek, Mrs. George (Omine Amenia"" Alexander)""",female,,0,2,2661,15.2458,,C 228 | 1118,3,"Asplund, Mr. Johan Charles",male,23,0,0,350054,7.7958,,S 229 | 1119,3,"McNeill, Miss. Bridget",female,,0,0,370368,7.75,,Q 230 | 1120,3,"Everett, Mr. Thomas James",male,40.5,0,0,C.A. 6212,15.1,,S 231 | 1121,2,"Hocking, Mr. Samuel James Metcalfe",male,36,0,0,242963,13,,S 232 | 1122,2,"Sweet, Mr. George Frederick",male,14,0,0,220845,65,,S 233 | 1123,1,"Willard, Miss. Constance",female,21,0,0,113795,26.55,,S 234 | 1124,3,"Wiklund, Mr. Karl Johan",male,21,1,0,3101266,6.4958,,S 235 | 1125,3,"Linehan, Mr. Michael",male,,0,0,330971,7.8792,,Q 236 | 1126,1,"Cumings, Mr. John Bradley",male,39,1,0,PC 17599,71.2833,C85,C 237 | 1127,3,"Vendel, Mr. Olof Edvin",male,20,0,0,350416,7.8542,,S 238 | 1128,1,"Warren, Mr. Frank Manley",male,64,1,0,110813,75.25,D37,C 239 | 1129,3,"Baccos, Mr. Raffull",male,20,0,0,2679,7.225,,C 240 | 1130,2,"Hiltunen, Miss. Marta",female,18,1,1,250650,13,,S 241 | 1131,1,"Douglas, Mrs. Walter Donald (Mahala Dutton)",female,48,1,0,PC 17761,106.425,C86,C 242 | 1132,1,"Lindstrom, Mrs. Carl Johan (Sigrid Posse)",female,55,0,0,112377,27.7208,,C 243 | 1133,2,"Christy, Mrs. (Alice Frances)",female,45,0,2,237789,30,,S 244 | 1134,1,"Spedden, Mr. Frederic Oakley",male,45,1,1,16966,134.5,E34,C 245 | 1135,3,"Hyman, Mr. Abraham",male,,0,0,3470,7.8875,,S 246 | 1136,3,"Johnston, Master. William Arthur Willie""""",male,,1,2,W./C. 6607,23.45,,S 247 | 1137,1,"Kenyon, Mr. Frederick R",male,41,1,0,17464,51.8625,D21,S 248 | 1138,2,"Karnes, Mrs. J Frank (Claire Bennett)",female,22,0,0,F.C.C. 13534,21,,S 249 | 1139,2,"Drew, Mr. James Vivian",male,42,1,1,28220,32.5,,S 250 | 1140,2,"Hold, Mrs. Stephen (Annie Margaret Hill)",female,29,1,0,26707,26,,S 251 | 1141,3,"Khalil, Mrs. Betros (Zahie Maria"" Elias)""",female,,1,0,2660,14.4542,,C 252 | 1142,2,"West, Miss. Barbara J",female,0.92,1,2,C.A. 34651,27.75,,S 253 | 1143,3,"Abrahamsson, Mr. Abraham August Johannes",male,20,0,0,SOTON/O2 3101284,7.925,,S 254 | 1144,1,"Clark, Mr. Walter Miller",male,27,1,0,13508,136.7792,C89,C 255 | 1145,3,"Salander, Mr. Karl Johan",male,24,0,0,7266,9.325,,S 256 | 1146,3,"Wenzel, Mr. Linhart",male,32.5,0,0,345775,9.5,,S 257 | 1147,3,"MacKay, Mr. George William",male,,0,0,C.A. 42795,7.55,,S 258 | 1148,3,"Mahon, Mr. John",male,,0,0,AQ/4 3130,7.75,,Q 259 | 1149,3,"Niklasson, Mr. Samuel",male,28,0,0,363611,8.05,,S 260 | 1150,2,"Bentham, Miss. Lilian W",female,19,0,0,28404,13,,S 261 | 1151,3,"Midtsjo, Mr. Karl Albert",male,21,0,0,345501,7.775,,S 262 | 1152,3,"de Messemaeker, Mr. Guillaume Joseph",male,36.5,1,0,345572,17.4,,S 263 | 1153,3,"Nilsson, Mr. August Ferdinand",male,21,0,0,350410,7.8542,,S 264 | 1154,2,"Wells, Mrs. Arthur Henry (Addie"" Dart Trevaskis)""",female,29,0,2,29103,23,,S 265 | 1155,3,"Klasen, Miss. Gertrud Emilia",female,1,1,1,350405,12.1833,,S 266 | 1156,2,"Portaluppi, Mr. Emilio Ilario Giuseppe",male,30,0,0,C.A. 34644,12.7375,,C 267 | 1157,3,"Lyntakoff, Mr. Stanko",male,,0,0,349235,7.8958,,S 268 | 1158,1,"Chisholm, Mr. Roderick Robert Crispin",male,,0,0,112051,0,,S 269 | 1159,3,"Warren, Mr. Charles William",male,,0,0,C.A. 49867,7.55,,S 270 | 1160,3,"Howard, Miss. May Elizabeth",female,,0,0,A. 2. 39186,8.05,,S 271 | 1161,3,"Pokrnic, Mr. Mate",male,17,0,0,315095,8.6625,,S 272 | 1162,1,"McCaffry, Mr. Thomas Francis",male,46,0,0,13050,75.2417,C6,C 273 | 1163,3,"Fox, Mr. Patrick",male,,0,0,368573,7.75,,Q 274 | 1164,1,"Clark, Mrs. Walter Miller (Virginia McDowell)",female,26,1,0,13508,136.7792,C89,C 275 | 1165,3,"Lennon, Miss. Mary",female,,1,0,370371,15.5,,Q 276 | 1166,3,"Saade, Mr. Jean Nassr",male,,0,0,2676,7.225,,C 277 | 1167,2,"Bryhl, Miss. Dagmar Jenny Ingeborg ",female,20,1,0,236853,26,,S 278 | 1168,2,"Parker, Mr. Clifford Richard",male,28,0,0,SC 14888,10.5,,S 279 | 1169,2,"Faunthorpe, Mr. Harry",male,40,1,0,2926,26,,S 280 | 1170,2,"Ware, Mr. John James",male,30,1,0,CA 31352,21,,S 281 | 1171,2,"Oxenham, Mr. Percy Thomas",male,22,0,0,W./C. 14260,10.5,,S 282 | 1172,3,"Oreskovic, Miss. Jelka",female,23,0,0,315085,8.6625,,S 283 | 1173,3,"Peacock, Master. Alfred Edward",male,0.75,1,1,SOTON/O.Q. 3101315,13.775,,S 284 | 1174,3,"Fleming, Miss. Honora",female,,0,0,364859,7.75,,Q 285 | 1175,3,"Touma, Miss. Maria Youssef",female,9,1,1,2650,15.2458,,C 286 | 1176,3,"Rosblom, Miss. Salli Helena",female,2,1,1,370129,20.2125,,S 287 | 1177,3,"Dennis, Mr. William",male,36,0,0,A/5 21175,7.25,,S 288 | 1178,3,"Franklin, Mr. Charles (Charles Fardon)",male,,0,0,SOTON/O.Q. 3101314,7.25,,S 289 | 1179,1,"Snyder, Mr. John Pillsbury",male,24,1,0,21228,82.2667,B45,S 290 | 1180,3,"Mardirosian, Mr. Sarkis",male,,0,0,2655,7.2292,F E46,C 291 | 1181,3,"Ford, Mr. Arthur",male,,0,0,A/5 1478,8.05,,S 292 | 1182,1,"Rheims, Mr. George Alexander Lucien",male,,0,0,PC 17607,39.6,,S 293 | 1183,3,"Daly, Miss. Margaret Marcella Maggie""""",female,30,0,0,382650,6.95,,Q 294 | 1184,3,"Nasr, Mr. Mustafa",male,,0,0,2652,7.2292,,C 295 | 1185,1,"Dodge, Dr. Washington",male,53,1,1,33638,81.8583,A34,S 296 | 1186,3,"Wittevrongel, Mr. Camille",male,36,0,0,345771,9.5,,S 297 | 1187,3,"Angheloff, Mr. Minko",male,26,0,0,349202,7.8958,,S 298 | 1188,2,"Laroche, Miss. Louise",female,1,1,2,SC/Paris 2123,41.5792,,C 299 | 1189,3,"Samaan, Mr. Hanna",male,,2,0,2662,21.6792,,C 300 | 1190,1,"Loring, Mr. Joseph Holland",male,30,0,0,113801,45.5,,S 301 | 1191,3,"Johansson, Mr. Nils",male,29,0,0,347467,7.8542,,S 302 | 1192,3,"Olsson, Mr. Oscar Wilhelm",male,32,0,0,347079,7.775,,S 303 | 1193,2,"Malachard, Mr. Noel",male,,0,0,237735,15.0458,D,C 304 | 1194,2,"Phillips, Mr. Escott Robert",male,43,0,1,S.O./P.P. 2,21,,S 305 | 1195,3,"Pokrnic, Mr. Tome",male,24,0,0,315092,8.6625,,S 306 | 1196,3,"McCarthy, Miss. Catherine Katie""""",female,,0,0,383123,7.75,,Q 307 | 1197,1,"Crosby, Mrs. Edward Gifford (Catherine Elizabeth Halstead)",female,64,1,1,112901,26.55,B26,S 308 | 1198,1,"Allison, Mr. Hudson Joshua Creighton",male,30,1,2,113781,151.55,C22 C26,S 309 | 1199,3,"Aks, Master. Philip Frank",male,0.83,0,1,392091,9.35,,S 310 | 1200,1,"Hays, Mr. Charles Melville",male,55,1,1,12749,93.5,B69,S 311 | 1201,3,"Hansen, Mrs. Claus Peter (Jennie L Howard)",female,45,1,0,350026,14.1083,,S 312 | 1202,3,"Cacic, Mr. Jego Grga",male,18,0,0,315091,8.6625,,S 313 | 1203,3,"Vartanian, Mr. David",male,22,0,0,2658,7.225,,C 314 | 1204,3,"Sadowitz, Mr. Harry",male,,0,0,LP 1588,7.575,,S 315 | 1205,3,"Carr, Miss. Jeannie",female,37,0,0,368364,7.75,,Q 316 | 1206,1,"White, Mrs. John Stuart (Ella Holmes)",female,55,0,0,PC 17760,135.6333,C32,C 317 | 1207,3,"Hagardon, Miss. Kate",female,17,0,0,AQ/3. 30631,7.7333,,Q 318 | 1208,1,"Spencer, Mr. William Augustus",male,57,1,0,PC 17569,146.5208,B78,C 319 | 1209,2,"Rogers, Mr. Reginald Harry",male,19,0,0,28004,10.5,,S 320 | 1210,3,"Jonsson, Mr. Nils Hilding",male,27,0,0,350408,7.8542,,S 321 | 1211,2,"Jefferys, Mr. Ernest Wilfred",male,22,2,0,C.A. 31029,31.5,,S 322 | 1212,3,"Andersson, Mr. Johan Samuel",male,26,0,0,347075,7.775,,S 323 | 1213,3,"Krekorian, Mr. Neshan",male,25,0,0,2654,7.2292,F E57,C 324 | 1214,2,"Nesson, Mr. Israel",male,26,0,0,244368,13,F2,S 325 | 1215,1,"Rowe, Mr. Alfred G",male,33,0,0,113790,26.55,,S 326 | 1216,1,"Kreuchen, Miss. Emilie",female,39,0,0,24160,211.3375,,S 327 | 1217,3,"Assam, Mr. Ali",male,23,0,0,SOTON/O.Q. 3101309,7.05,,S 328 | 1218,2,"Becker, Miss. Ruth Elizabeth",female,12,2,1,230136,39,F4,S 329 | 1219,1,"Rosenshine, Mr. George (Mr George Thorne"")""",male,46,0,0,PC 17585,79.2,,C 330 | 1220,2,"Clarke, Mr. Charles Valentine",male,29,1,0,2003,26,,S 331 | 1221,2,"Enander, Mr. Ingvar",male,21,0,0,236854,13,,S 332 | 1222,2,"Davies, Mrs. John Morgan (Elizabeth Agnes Mary White) ",female,48,0,2,C.A. 33112,36.75,,S 333 | 1223,1,"Dulles, Mr. William Crothers",male,39,0,0,PC 17580,29.7,A18,C 334 | 1224,3,"Thomas, Mr. Tannous",male,,0,0,2684,7.225,,C 335 | 1225,3,"Nakid, Mrs. Said (Waika Mary"" Mowad)""",female,19,1,1,2653,15.7417,,C 336 | 1226,3,"Cor, Mr. Ivan",male,27,0,0,349229,7.8958,,S 337 | 1227,1,"Maguire, Mr. John Edward",male,30,0,0,110469,26,C106,S 338 | 1228,2,"de Brito, Mr. Jose Joaquim",male,32,0,0,244360,13,,S 339 | 1229,3,"Elias, Mr. Joseph",male,39,0,2,2675,7.2292,,C 340 | 1230,2,"Denbury, Mr. Herbert",male,25,0,0,C.A. 31029,31.5,,S 341 | 1231,3,"Betros, Master. Seman",male,,0,0,2622,7.2292,,C 342 | 1232,2,"Fillbrook, Mr. Joseph Charles",male,18,0,0,C.A. 15185,10.5,,S 343 | 1233,3,"Lundstrom, Mr. Thure Edvin",male,32,0,0,350403,7.5792,,S 344 | 1234,3,"Sage, Mr. John George",male,,1,9,CA. 2343,69.55,,S 345 | 1235,1,"Cardeza, Mrs. James Warburton Martinez (Charlotte Wardle Drake)",female,58,0,1,PC 17755,512.3292,B51 B53 B55,C 346 | 1236,3,"van Billiard, Master. James William",male,,1,1,A/5. 851,14.5,,S 347 | 1237,3,"Abelseth, Miss. Karen Marie",female,16,0,0,348125,7.65,,S 348 | 1238,2,"Botsford, Mr. William Hull",male,26,0,0,237670,13,,S 349 | 1239,3,"Whabee, Mrs. George Joseph (Shawneene Abi-Saab)",female,38,0,0,2688,7.2292,,C 350 | 1240,2,"Giles, Mr. Ralph",male,24,0,0,248726,13.5,,S 351 | 1241,2,"Walcroft, Miss. Nellie",female,31,0,0,F.C.C. 13528,21,,S 352 | 1242,1,"Greenfield, Mrs. Leo David (Blanche Strouse)",female,45,0,1,PC 17759,63.3583,D10 D12,C 353 | 1243,2,"Stokes, Mr. Philip Joseph",male,25,0,0,F.C.C. 13540,10.5,,S 354 | 1244,2,"Dibden, Mr. William",male,18,0,0,S.O.C. 14879,73.5,,S 355 | 1245,2,"Herman, Mr. Samuel",male,49,1,2,220845,65,,S 356 | 1246,3,"Dean, Miss. Elizabeth Gladys Millvina""""",female,0.17,1,2,C.A. 2315,20.575,,S 357 | 1247,1,"Julian, Mr. Henry Forbes",male,50,0,0,113044,26,E60,S 358 | 1248,1,"Brown, Mrs. John Murray (Caroline Lane Lamson)",female,59,2,0,11769,51.4792,C101,S 359 | 1249,3,"Lockyer, Mr. Edward",male,,0,0,1222,7.8792,,S 360 | 1250,3,"O'Keefe, Mr. Patrick",male,,0,0,368402,7.75,,Q 361 | 1251,3,"Lindell, Mrs. Edvard Bengtsson (Elin Gerda Persson)",female,30,1,0,349910,15.55,,S 362 | 1252,3,"Sage, Master. William Henry",male,14.5,8,2,CA. 2343,69.55,,S 363 | 1253,2,"Mallet, Mrs. Albert (Antoinette Magnin)",female,24,1,1,S.C./PARIS 2079,37.0042,,C 364 | 1254,2,"Ware, Mrs. John James (Florence Louise Long)",female,31,0,0,CA 31352,21,,S 365 | 1255,3,"Strilic, Mr. Ivan",male,27,0,0,315083,8.6625,,S 366 | 1256,1,"Harder, Mrs. George Achilles (Dorothy Annan)",female,25,1,0,11765,55.4417,E50,C 367 | 1257,3,"Sage, Mrs. John (Annie Bullen)",female,,1,9,CA. 2343,69.55,,S 368 | 1258,3,"Caram, Mr. Joseph",male,,1,0,2689,14.4583,,C 369 | 1259,3,"Riihivouri, Miss. Susanna Juhantytar Sanni""""",female,22,0,0,3101295,39.6875,,S 370 | 1260,1,"Gibson, Mrs. Leonard (Pauline C Boeson)",female,45,0,1,112378,59.4,,C 371 | 1261,2,"Pallas y Castello, Mr. Emilio",male,29,0,0,SC/PARIS 2147,13.8583,,C 372 | 1262,2,"Giles, Mr. Edgar",male,21,1,0,28133,11.5,,S 373 | 1263,1,"Wilson, Miss. Helen Alice",female,31,0,0,16966,134.5,E39 E41,C 374 | 1264,1,"Ismay, Mr. Joseph Bruce",male,49,0,0,112058,0,B52 B54 B56,S 375 | 1265,2,"Harbeck, Mr. William H",male,44,0,0,248746,13,,S 376 | 1266,1,"Dodge, Mrs. Washington (Ruth Vidaver)",female,54,1,1,33638,81.8583,A34,S 377 | 1267,1,"Bowen, Miss. Grace Scott",female,45,0,0,PC 17608,262.375,,C 378 | 1268,3,"Kink, Miss. Maria",female,22,2,0,315152,8.6625,,S 379 | 1269,2,"Cotterill, Mr. Henry Harry""""",male,21,0,0,29107,11.5,,S 380 | 1270,1,"Hipkins, Mr. William Edward",male,55,0,0,680,50,C39,S 381 | 1271,3,"Asplund, Master. Carl Edgar",male,5,4,2,347077,31.3875,,S 382 | 1272,3,"O'Connor, Mr. Patrick",male,,0,0,366713,7.75,,Q 383 | 1273,3,"Foley, Mr. Joseph",male,26,0,0,330910,7.8792,,Q 384 | 1274,3,"Risien, Mrs. Samuel (Emma)",female,,0,0,364498,14.5,,S 385 | 1275,3,"McNamee, Mrs. Neal (Eileen O'Leary)",female,19,1,0,376566,16.1,,S 386 | 1276,2,"Wheeler, Mr. Edwin Frederick""""",male,,0,0,SC/PARIS 2159,12.875,,S 387 | 1277,2,"Herman, Miss. Kate",female,24,1,2,220845,65,,S 388 | 1278,3,"Aronsson, Mr. Ernst Axel Algot",male,24,0,0,349911,7.775,,S 389 | 1279,2,"Ashby, Mr. John",male,57,0,0,244346,13,,S 390 | 1280,3,"Canavan, Mr. Patrick",male,21,0,0,364858,7.75,,Q 391 | 1281,3,"Palsson, Master. Paul Folke",male,6,3,1,349909,21.075,,S 392 | 1282,1,"Payne, Mr. Vivian Ponsonby",male,23,0,0,12749,93.5,B24,S 393 | 1283,1,"Lines, Mrs. Ernest H (Elizabeth Lindsey James)",female,51,0,1,PC 17592,39.4,D28,S 394 | 1284,3,"Abbott, Master. Eugene Joseph",male,13,0,2,C.A. 2673,20.25,,S 395 | 1285,2,"Gilbert, Mr. William",male,47,0,0,C.A. 30769,10.5,,S 396 | 1286,3,"Kink-Heilmann, Mr. Anton",male,29,3,1,315153,22.025,,S 397 | 1287,1,"Smith, Mrs. Lucien Philip (Mary Eloise Hughes)",female,18,1,0,13695,60,C31,S 398 | 1288,3,"Colbert, Mr. Patrick",male,24,0,0,371109,7.25,,Q 399 | 1289,1,"Frolicher-Stehli, Mrs. Maxmillian (Margaretha Emerentia Stehli)",female,48,1,1,13567,79.2,B41,C 400 | 1290,3,"Larsson-Rondberg, Mr. Edvard A",male,22,0,0,347065,7.775,,S 401 | 1291,3,"Conlon, Mr. Thomas Henry",male,31,0,0,21332,7.7333,,Q 402 | 1292,1,"Bonnell, Miss. Caroline",female,30,0,0,36928,164.8667,C7,S 403 | 1293,2,"Gale, Mr. Harry",male,38,1,0,28664,21,,S 404 | 1294,1,"Gibson, Miss. Dorothy Winifred",female,22,0,1,112378,59.4,,C 405 | 1295,1,"Carrau, Mr. Jose Pedro",male,17,0,0,113059,47.1,,S 406 | 1296,1,"Frauenthal, Mr. Isaac Gerald",male,43,1,0,17765,27.7208,D40,C 407 | 1297,2,"Nourney, Mr. Alfred (Baron von Drachstedt"")""",male,20,0,0,SC/PARIS 2166,13.8625,D38,C 408 | 1298,2,"Ware, Mr. William Jeffery",male,23,1,0,28666,10.5,,S 409 | 1299,1,"Widener, Mr. George Dunton",male,50,1,1,113503,211.5,C80,C 410 | 1300,3,"Riordan, Miss. Johanna Hannah""""",female,,0,0,334915,7.7208,,Q 411 | 1301,3,"Peacock, Miss. Treasteall",female,3,1,1,SOTON/O.Q. 3101315,13.775,,S 412 | 1302,3,"Naughton, Miss. Hannah",female,,0,0,365237,7.75,,Q 413 | 1303,1,"Minahan, Mrs. William Edward (Lillian E Thorpe)",female,37,1,0,19928,90,C78,Q 414 | 1304,3,"Henriksson, Miss. Jenny Lovisa",female,28,0,0,347086,7.775,,S 415 | 1305,3,"Spector, Mr. Woolf",male,,0,0,A.5. 3236,8.05,,S 416 | 1306,1,"Oliva y Ocana, Dona. Fermina",female,39,0,0,PC 17758,108.9,C105,C 417 | 1307,3,"Saether, Mr. Simon Sivertsen",male,38.5,0,0,SOTON/O.Q. 3101262,7.25,,S 418 | 1308,3,"Ware, Mr. Frederick",male,,0,0,359309,8.05,,S 419 | 1309,3,"Peter, Master. Michael J",male,,1,1,2668,22.3583,,C 420 | --------------------------------------------------------------------------------