├── .gitattributes ├── LICENSE ├── README.md ├── data ├── MNIST │ ├── processed │ │ ├── test.pt │ │ └── training.pt │ └── raw │ │ ├── t10k-images-idx3-ubyte │ │ ├── t10k-images-idx3-ubyte.gz │ │ ├── t10k-labels-idx1-ubyte │ │ ├── t10k-labels-idx1-ubyte.gz │ │ ├── train-images-idx3-ubyte │ │ ├── train-images-idx3-ubyte.gz │ │ ├── train-labels-idx1-ubyte │ │ └── train-labels-idx1-ubyte.gz ├── diabetes.csv ├── iris.csv ├── otto_train.csv └── winequality-white.csv ├── lecture_01.ipynb ├── lecture_02.ipynb ├── lecture_03.ipynb ├── lecture_04.ipynb ├── lecture_05.ipynb ├── lecture_06.ipynb ├── lecture_07.ipynb ├── lecture_08.ipynb ├── lecture_08_a.ipynb ├── lecture_08_b.ipynb ├── lecture_09.ipynb ├── lecture_10.ipynb ├── lecture_11.ipynb └── lecture_12.ipynb /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Pytorch Tutorials 2 | 3 | This repository is a set of notebooks that are useful for learning and geting handson intution of the Pytorch python package. 4 | 5 | Dataset is also included in the repo. (`/data`) 6 | 7 | This tutorial is leveraging on the amazing work done by [Sun Kim](https://github.com/hunkim) 8 | 9 | The accompanying video leactures and slides are avaialable at the following links: 10 | 11 | 1. [Video](https://www.youtube.com/playlist?list=PLlMkM4tgfjnJ3I-dbhO9JTw7gNty6o_2m&disable_polymer=true) 12 | 2. [Slides](https://drive.google.com/drive/folders/0B41Zbb4c8HVyUndGdGdJSXd5d3M) -------------------------------------------------------------------------------- /data/MNIST/processed/test.pt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abhimishra91/pytorch-tutorials/f4ed50e833dab4f175a0d941863537479315e9ea/data/MNIST/processed/test.pt -------------------------------------------------------------------------------- /data/MNIST/processed/training.pt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abhimishra91/pytorch-tutorials/f4ed50e833dab4f175a0d941863537479315e9ea/data/MNIST/processed/training.pt -------------------------------------------------------------------------------- /data/MNIST/raw/t10k-images-idx3-ubyte: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abhimishra91/pytorch-tutorials/f4ed50e833dab4f175a0d941863537479315e9ea/data/MNIST/raw/t10k-images-idx3-ubyte -------------------------------------------------------------------------------- /data/MNIST/raw/t10k-images-idx3-ubyte.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abhimishra91/pytorch-tutorials/f4ed50e833dab4f175a0d941863537479315e9ea/data/MNIST/raw/t10k-images-idx3-ubyte.gz -------------------------------------------------------------------------------- /data/MNIST/raw/t10k-labels-idx1-ubyte: -------------------------------------------------------------------------------- 1 | '                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             -------------------------------------------------------------------------------- /data/MNIST/raw/t10k-labels-idx1-ubyte.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abhimishra91/pytorch-tutorials/f4ed50e833dab4f175a0d941863537479315e9ea/data/MNIST/raw/t10k-labels-idx1-ubyte.gz -------------------------------------------------------------------------------- /data/MNIST/raw/train-images-idx3-ubyte: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abhimishra91/pytorch-tutorials/f4ed50e833dab4f175a0d941863537479315e9ea/data/MNIST/raw/train-images-idx3-ubyte -------------------------------------------------------------------------------- /data/MNIST/raw/train-images-idx3-ubyte.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abhimishra91/pytorch-tutorials/f4ed50e833dab4f175a0d941863537479315e9ea/data/MNIST/raw/train-images-idx3-ubyte.gz -------------------------------------------------------------------------------- /data/MNIST/raw/train-labels-idx1-ubyte: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abhimishra91/pytorch-tutorials/f4ed50e833dab4f175a0d941863537479315e9ea/data/MNIST/raw/train-labels-idx1-ubyte -------------------------------------------------------------------------------- /data/MNIST/raw/train-labels-idx1-ubyte.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abhimishra91/pytorch-tutorials/f4ed50e833dab4f175a0d941863537479315e9ea/data/MNIST/raw/train-labels-idx1-ubyte.gz -------------------------------------------------------------------------------- /data/diabetes.csv: -------------------------------------------------------------------------------- 1 | Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction,Age,Outcome 2 | 6,148,72,35,0,33.6,0.627,50,1 3 | 1,85,66,29,0,26.6,0.351,31,0 4 | 8,183,64,0,0,23.3,0.672,32,1 5 | 1,89,66,23,94,28.1,0.167,21,0 6 | 0,137,40,35,168,43.1,2.288,33,1 7 | 5,116,74,0,0,25.6,0.201,30,0 8 | 3,78,50,32,88,31,0.248,26,1 9 | 10,115,0,0,0,35.3,0.134,29,0 10 | 2,197,70,45,543,30.5,0.158,53,1 11 | 8,125,96,0,0,0,0.232,54,1 12 | 4,110,92,0,0,37.6,0.191,30,0 13 | 10,168,74,0,0,38,0.537,34,1 14 | 10,139,80,0,0,27.1,1.441,57,0 15 | 1,189,60,23,846,30.1,0.398,59,1 16 | 5,166,72,19,175,25.8,0.587,51,1 17 | 7,100,0,0,0,30,0.484,32,1 18 | 0,118,84,47,230,45.8,0.551,31,1 19 | 7,107,74,0,0,29.6,0.254,31,1 20 | 1,103,30,38,83,43.3,0.183,33,0 21 | 1,115,70,30,96,34.6,0.529,32,1 22 | 3,126,88,41,235,39.3,0.704,27,0 23 | 8,99,84,0,0,35.4,0.388,50,0 24 | 7,196,90,0,0,39.8,0.451,41,1 25 | 9,119,80,35,0,29,0.263,29,1 26 | 11,143,94,33,146,36.6,0.254,51,1 27 | 10,125,70,26,115,31.1,0.205,41,1 28 | 7,147,76,0,0,39.4,0.257,43,1 29 | 1,97,66,15,140,23.2,0.487,22,0 30 | 13,145,82,19,110,22.2,0.245,57,0 31 | 5,117,92,0,0,34.1,0.337,38,0 32 | 5,109,75,26,0,36,0.546,60,0 33 | 3,158,76,36,245,31.6,0.851,28,1 34 | 3,88,58,11,54,24.8,0.267,22,0 35 | 6,92,92,0,0,19.9,0.188,28,0 36 | 10,122,78,31,0,27.6,0.512,45,0 37 | 4,103,60,33,192,24,0.966,33,0 38 | 11,138,76,0,0,33.2,0.42,35,0 39 | 9,102,76,37,0,32.9,0.665,46,1 40 | 2,90,68,42,0,38.2,0.503,27,1 41 | 4,111,72,47,207,37.1,1.39,56,1 42 | 3,180,64,25,70,34,0.271,26,0 43 | 7,133,84,0,0,40.2,0.696,37,0 44 | 7,106,92,18,0,22.7,0.235,48,0 45 | 9,171,110,24,240,45.4,0.721,54,1 46 | 7,159,64,0,0,27.4,0.294,40,0 47 | 0,180,66,39,0,42,1.893,25,1 48 | 1,146,56,0,0,29.7,0.564,29,0 49 | 2,71,70,27,0,28,0.586,22,0 50 | 7,103,66,32,0,39.1,0.344,31,1 51 | 7,105,0,0,0,0,0.305,24,0 52 | 1,103,80,11,82,19.4,0.491,22,0 53 | 1,101,50,15,36,24.2,0.526,26,0 54 | 5,88,66,21,23,24.4,0.342,30,0 55 | 8,176,90,34,300,33.7,0.467,58,1 56 | 7,150,66,42,342,34.7,0.718,42,0 57 | 1,73,50,10,0,23,0.248,21,0 58 | 7,187,68,39,304,37.7,0.254,41,1 59 | 0,100,88,60,110,46.8,0.962,31,0 60 | 0,146,82,0,0,40.5,1.781,44,0 61 | 0,105,64,41,142,41.5,0.173,22,0 62 | 2,84,0,0,0,0,0.304,21,0 63 | 8,133,72,0,0,32.9,0.27,39,1 64 | 5,44,62,0,0,25,0.587,36,0 65 | 2,141,58,34,128,25.4,0.699,24,0 66 | 7,114,66,0,0,32.8,0.258,42,1 67 | 5,99,74,27,0,29,0.203,32,0 68 | 0,109,88,30,0,32.5,0.855,38,1 69 | 2,109,92,0,0,42.7,0.845,54,0 70 | 1,95,66,13,38,19.6,0.334,25,0 71 | 4,146,85,27,100,28.9,0.189,27,0 72 | 2,100,66,20,90,32.9,0.867,28,1 73 | 5,139,64,35,140,28.6,0.411,26,0 74 | 13,126,90,0,0,43.4,0.583,42,1 75 | 4,129,86,20,270,35.1,0.231,23,0 76 | 1,79,75,30,0,32,0.396,22,0 77 | 1,0,48,20,0,24.7,0.14,22,0 78 | 7,62,78,0,0,32.6,0.391,41,0 79 | 5,95,72,33,0,37.7,0.37,27,0 80 | 0,131,0,0,0,43.2,0.27,26,1 81 | 2,112,66,22,0,25,0.307,24,0 82 | 3,113,44,13,0,22.4,0.14,22,0 83 | 2,74,0,0,0,0,0.102,22,0 84 | 7,83,78,26,71,29.3,0.767,36,0 85 | 0,101,65,28,0,24.6,0.237,22,0 86 | 5,137,108,0,0,48.8,0.227,37,1 87 | 2,110,74,29,125,32.4,0.698,27,0 88 | 13,106,72,54,0,36.6,0.178,45,0 89 | 2,100,68,25,71,38.5,0.324,26,0 90 | 15,136,70,32,110,37.1,0.153,43,1 91 | 1,107,68,19,0,26.5,0.165,24,0 92 | 1,80,55,0,0,19.1,0.258,21,0 93 | 4,123,80,15,176,32,0.443,34,0 94 | 7,81,78,40,48,46.7,0.261,42,0 95 | 4,134,72,0,0,23.8,0.277,60,1 96 | 2,142,82,18,64,24.7,0.761,21,0 97 | 6,144,72,27,228,33.9,0.255,40,0 98 | 2,92,62,28,0,31.6,0.13,24,0 99 | 1,71,48,18,76,20.4,0.323,22,0 100 | 6,93,50,30,64,28.7,0.356,23,0 101 | 1,122,90,51,220,49.7,0.325,31,1 102 | 1,163,72,0,0,39,1.222,33,1 103 | 1,151,60,0,0,26.1,0.179,22,0 104 | 0,125,96,0,0,22.5,0.262,21,0 105 | 1,81,72,18,40,26.6,0.283,24,0 106 | 2,85,65,0,0,39.6,0.93,27,0 107 | 1,126,56,29,152,28.7,0.801,21,0 108 | 1,96,122,0,0,22.4,0.207,27,0 109 | 4,144,58,28,140,29.5,0.287,37,0 110 | 3,83,58,31,18,34.3,0.336,25,0 111 | 0,95,85,25,36,37.4,0.247,24,1 112 | 3,171,72,33,135,33.3,0.199,24,1 113 | 8,155,62,26,495,34,0.543,46,1 114 | 1,89,76,34,37,31.2,0.192,23,0 115 | 4,76,62,0,0,34,0.391,25,0 116 | 7,160,54,32,175,30.5,0.588,39,1 117 | 4,146,92,0,0,31.2,0.539,61,1 118 | 5,124,74,0,0,34,0.22,38,1 119 | 5,78,48,0,0,33.7,0.654,25,0 120 | 4,97,60,23,0,28.2,0.443,22,0 121 | 4,99,76,15,51,23.2,0.223,21,0 122 | 0,162,76,56,100,53.2,0.759,25,1 123 | 6,111,64,39,0,34.2,0.26,24,0 124 | 2,107,74,30,100,33.6,0.404,23,0 125 | 5,132,80,0,0,26.8,0.186,69,0 126 | 0,113,76,0,0,33.3,0.278,23,1 127 | 1,88,30,42,99,55,0.496,26,1 128 | 3,120,70,30,135,42.9,0.452,30,0 129 | 1,118,58,36,94,33.3,0.261,23,0 130 | 1,117,88,24,145,34.5,0.403,40,1 131 | 0,105,84,0,0,27.9,0.741,62,1 132 | 4,173,70,14,168,29.7,0.361,33,1 133 | 9,122,56,0,0,33.3,1.114,33,1 134 | 3,170,64,37,225,34.5,0.356,30,1 135 | 8,84,74,31,0,38.3,0.457,39,0 136 | 2,96,68,13,49,21.1,0.647,26,0 137 | 2,125,60,20,140,33.8,0.088,31,0 138 | 0,100,70,26,50,30.8,0.597,21,0 139 | 0,93,60,25,92,28.7,0.532,22,0 140 | 0,129,80,0,0,31.2,0.703,29,0 141 | 5,105,72,29,325,36.9,0.159,28,0 142 | 3,128,78,0,0,21.1,0.268,55,0 143 | 5,106,82,30,0,39.5,0.286,38,0 144 | 2,108,52,26,63,32.5,0.318,22,0 145 | 10,108,66,0,0,32.4,0.272,42,1 146 | 4,154,62,31,284,32.8,0.237,23,0 147 | 0,102,75,23,0,0,0.572,21,0 148 | 9,57,80,37,0,32.8,0.096,41,0 149 | 2,106,64,35,119,30.5,1.4,34,0 150 | 5,147,78,0,0,33.7,0.218,65,0 151 | 2,90,70,17,0,27.3,0.085,22,0 152 | 1,136,74,50,204,37.4,0.399,24,0 153 | 4,114,65,0,0,21.9,0.432,37,0 154 | 9,156,86,28,155,34.3,1.189,42,1 155 | 1,153,82,42,485,40.6,0.687,23,0 156 | 8,188,78,0,0,47.9,0.137,43,1 157 | 7,152,88,44,0,50,0.337,36,1 158 | 2,99,52,15,94,24.6,0.637,21,0 159 | 1,109,56,21,135,25.2,0.833,23,0 160 | 2,88,74,19,53,29,0.229,22,0 161 | 17,163,72,41,114,40.9,0.817,47,1 162 | 4,151,90,38,0,29.7,0.294,36,0 163 | 7,102,74,40,105,37.2,0.204,45,0 164 | 0,114,80,34,285,44.2,0.167,27,0 165 | 2,100,64,23,0,29.7,0.368,21,0 166 | 0,131,88,0,0,31.6,0.743,32,1 167 | 6,104,74,18,156,29.9,0.722,41,1 168 | 3,148,66,25,0,32.5,0.256,22,0 169 | 4,120,68,0,0,29.6,0.709,34,0 170 | 4,110,66,0,0,31.9,0.471,29,0 171 | 3,111,90,12,78,28.4,0.495,29,0 172 | 6,102,82,0,0,30.8,0.18,36,1 173 | 6,134,70,23,130,35.4,0.542,29,1 174 | 2,87,0,23,0,28.9,0.773,25,0 175 | 1,79,60,42,48,43.5,0.678,23,0 176 | 2,75,64,24,55,29.7,0.37,33,0 177 | 8,179,72,42,130,32.7,0.719,36,1 178 | 6,85,78,0,0,31.2,0.382,42,0 179 | 0,129,110,46,130,67.1,0.319,26,1 180 | 5,143,78,0,0,45,0.19,47,0 181 | 5,130,82,0,0,39.1,0.956,37,1 182 | 6,87,80,0,0,23.2,0.084,32,0 183 | 0,119,64,18,92,34.9,0.725,23,0 184 | 1,0,74,20,23,27.7,0.299,21,0 185 | 5,73,60,0,0,26.8,0.268,27,0 186 | 4,141,74,0,0,27.6,0.244,40,0 187 | 7,194,68,28,0,35.9,0.745,41,1 188 | 8,181,68,36,495,30.1,0.615,60,1 189 | 1,128,98,41,58,32,1.321,33,1 190 | 8,109,76,39,114,27.9,0.64,31,1 191 | 5,139,80,35,160,31.6,0.361,25,1 192 | 3,111,62,0,0,22.6,0.142,21,0 193 | 9,123,70,44,94,33.1,0.374,40,0 194 | 7,159,66,0,0,30.4,0.383,36,1 195 | 11,135,0,0,0,52.3,0.578,40,1 196 | 8,85,55,20,0,24.4,0.136,42,0 197 | 5,158,84,41,210,39.4,0.395,29,1 198 | 1,105,58,0,0,24.3,0.187,21,0 199 | 3,107,62,13,48,22.9,0.678,23,1 200 | 4,109,64,44,99,34.8,0.905,26,1 201 | 4,148,60,27,318,30.9,0.15,29,1 202 | 0,113,80,16,0,31,0.874,21,0 203 | 1,138,82,0,0,40.1,0.236,28,0 204 | 0,108,68,20,0,27.3,0.787,32,0 205 | 2,99,70,16,44,20.4,0.235,27,0 206 | 6,103,72,32,190,37.7,0.324,55,0 207 | 5,111,72,28,0,23.9,0.407,27,0 208 | 8,196,76,29,280,37.5,0.605,57,1 209 | 5,162,104,0,0,37.7,0.151,52,1 210 | 1,96,64,27,87,33.2,0.289,21,0 211 | 7,184,84,33,0,35.5,0.355,41,1 212 | 2,81,60,22,0,27.7,0.29,25,0 213 | 0,147,85,54,0,42.8,0.375,24,0 214 | 7,179,95,31,0,34.2,0.164,60,0 215 | 0,140,65,26,130,42.6,0.431,24,1 216 | 9,112,82,32,175,34.2,0.26,36,1 217 | 12,151,70,40,271,41.8,0.742,38,1 218 | 5,109,62,41,129,35.8,0.514,25,1 219 | 6,125,68,30,120,30,0.464,32,0 220 | 5,85,74,22,0,29,1.224,32,1 221 | 5,112,66,0,0,37.8,0.261,41,1 222 | 0,177,60,29,478,34.6,1.072,21,1 223 | 2,158,90,0,0,31.6,0.805,66,1 224 | 7,119,0,0,0,25.2,0.209,37,0 225 | 7,142,60,33,190,28.8,0.687,61,0 226 | 1,100,66,15,56,23.6,0.666,26,0 227 | 1,87,78,27,32,34.6,0.101,22,0 228 | 0,101,76,0,0,35.7,0.198,26,0 229 | 3,162,52,38,0,37.2,0.652,24,1 230 | 4,197,70,39,744,36.7,2.329,31,0 231 | 0,117,80,31,53,45.2,0.089,24,0 232 | 4,142,86,0,0,44,0.645,22,1 233 | 6,134,80,37,370,46.2,0.238,46,1 234 | 1,79,80,25,37,25.4,0.583,22,0 235 | 4,122,68,0,0,35,0.394,29,0 236 | 3,74,68,28,45,29.7,0.293,23,0 237 | 4,171,72,0,0,43.6,0.479,26,1 238 | 7,181,84,21,192,35.9,0.586,51,1 239 | 0,179,90,27,0,44.1,0.686,23,1 240 | 9,164,84,21,0,30.8,0.831,32,1 241 | 0,104,76,0,0,18.4,0.582,27,0 242 | 1,91,64,24,0,29.2,0.192,21,0 243 | 4,91,70,32,88,33.1,0.446,22,0 244 | 3,139,54,0,0,25.6,0.402,22,1 245 | 6,119,50,22,176,27.1,1.318,33,1 246 | 2,146,76,35,194,38.2,0.329,29,0 247 | 9,184,85,15,0,30,1.213,49,1 248 | 10,122,68,0,0,31.2,0.258,41,0 249 | 0,165,90,33,680,52.3,0.427,23,0 250 | 9,124,70,33,402,35.4,0.282,34,0 251 | 1,111,86,19,0,30.1,0.143,23,0 252 | 9,106,52,0,0,31.2,0.38,42,0 253 | 2,129,84,0,0,28,0.284,27,0 254 | 2,90,80,14,55,24.4,0.249,24,0 255 | 0,86,68,32,0,35.8,0.238,25,0 256 | 12,92,62,7,258,27.6,0.926,44,1 257 | 1,113,64,35,0,33.6,0.543,21,1 258 | 3,111,56,39,0,30.1,0.557,30,0 259 | 2,114,68,22,0,28.7,0.092,25,0 260 | 1,193,50,16,375,25.9,0.655,24,0 261 | 11,155,76,28,150,33.3,1.353,51,1 262 | 3,191,68,15,130,30.9,0.299,34,0 263 | 3,141,0,0,0,30,0.761,27,1 264 | 4,95,70,32,0,32.1,0.612,24,0 265 | 3,142,80,15,0,32.4,0.2,63,0 266 | 4,123,62,0,0,32,0.226,35,1 267 | 5,96,74,18,67,33.6,0.997,43,0 268 | 0,138,0,0,0,36.3,0.933,25,1 269 | 2,128,64,42,0,40,1.101,24,0 270 | 0,102,52,0,0,25.1,0.078,21,0 271 | 2,146,0,0,0,27.5,0.24,28,1 272 | 10,101,86,37,0,45.6,1.136,38,1 273 | 2,108,62,32,56,25.2,0.128,21,0 274 | 3,122,78,0,0,23,0.254,40,0 275 | 1,71,78,50,45,33.2,0.422,21,0 276 | 13,106,70,0,0,34.2,0.251,52,0 277 | 2,100,70,52,57,40.5,0.677,25,0 278 | 7,106,60,24,0,26.5,0.296,29,1 279 | 0,104,64,23,116,27.8,0.454,23,0 280 | 5,114,74,0,0,24.9,0.744,57,0 281 | 2,108,62,10,278,25.3,0.881,22,0 282 | 0,146,70,0,0,37.9,0.334,28,1 283 | 10,129,76,28,122,35.9,0.28,39,0 284 | 7,133,88,15,155,32.4,0.262,37,0 285 | 7,161,86,0,0,30.4,0.165,47,1 286 | 2,108,80,0,0,27,0.259,52,1 287 | 7,136,74,26,135,26,0.647,51,0 288 | 5,155,84,44,545,38.7,0.619,34,0 289 | 1,119,86,39,220,45.6,0.808,29,1 290 | 4,96,56,17,49,20.8,0.34,26,0 291 | 5,108,72,43,75,36.1,0.263,33,0 292 | 0,78,88,29,40,36.9,0.434,21,0 293 | 0,107,62,30,74,36.6,0.757,25,1 294 | 2,128,78,37,182,43.3,1.224,31,1 295 | 1,128,48,45,194,40.5,0.613,24,1 296 | 0,161,50,0,0,21.9,0.254,65,0 297 | 6,151,62,31,120,35.5,0.692,28,0 298 | 2,146,70,38,360,28,0.337,29,1 299 | 0,126,84,29,215,30.7,0.52,24,0 300 | 14,100,78,25,184,36.6,0.412,46,1 301 | 8,112,72,0,0,23.6,0.84,58,0 302 | 0,167,0,0,0,32.3,0.839,30,1 303 | 2,144,58,33,135,31.6,0.422,25,1 304 | 5,77,82,41,42,35.8,0.156,35,0 305 | 5,115,98,0,0,52.9,0.209,28,1 306 | 3,150,76,0,0,21,0.207,37,0 307 | 2,120,76,37,105,39.7,0.215,29,0 308 | 10,161,68,23,132,25.5,0.326,47,1 309 | 0,137,68,14,148,24.8,0.143,21,0 310 | 0,128,68,19,180,30.5,1.391,25,1 311 | 2,124,68,28,205,32.9,0.875,30,1 312 | 6,80,66,30,0,26.2,0.313,41,0 313 | 0,106,70,37,148,39.4,0.605,22,0 314 | 2,155,74,17,96,26.6,0.433,27,1 315 | 3,113,50,10,85,29.5,0.626,25,0 316 | 7,109,80,31,0,35.9,1.127,43,1 317 | 2,112,68,22,94,34.1,0.315,26,0 318 | 3,99,80,11,64,19.3,0.284,30,0 319 | 3,182,74,0,0,30.5,0.345,29,1 320 | 3,115,66,39,140,38.1,0.15,28,0 321 | 6,194,78,0,0,23.5,0.129,59,1 322 | 4,129,60,12,231,27.5,0.527,31,0 323 | 3,112,74,30,0,31.6,0.197,25,1 324 | 0,124,70,20,0,27.4,0.254,36,1 325 | 13,152,90,33,29,26.8,0.731,43,1 326 | 2,112,75,32,0,35.7,0.148,21,0 327 | 1,157,72,21,168,25.6,0.123,24,0 328 | 1,122,64,32,156,35.1,0.692,30,1 329 | 10,179,70,0,0,35.1,0.2,37,0 330 | 2,102,86,36,120,45.5,0.127,23,1 331 | 6,105,70,32,68,30.8,0.122,37,0 332 | 8,118,72,19,0,23.1,1.476,46,0 333 | 2,87,58,16,52,32.7,0.166,25,0 334 | 1,180,0,0,0,43.3,0.282,41,1 335 | 12,106,80,0,0,23.6,0.137,44,0 336 | 1,95,60,18,58,23.9,0.26,22,0 337 | 0,165,76,43,255,47.9,0.259,26,0 338 | 0,117,0,0,0,33.8,0.932,44,0 339 | 5,115,76,0,0,31.2,0.343,44,1 340 | 9,152,78,34,171,34.2,0.893,33,1 341 | 7,178,84,0,0,39.9,0.331,41,1 342 | 1,130,70,13,105,25.9,0.472,22,0 343 | 1,95,74,21,73,25.9,0.673,36,0 344 | 1,0,68,35,0,32,0.389,22,0 345 | 5,122,86,0,0,34.7,0.29,33,0 346 | 8,95,72,0,0,36.8,0.485,57,0 347 | 8,126,88,36,108,38.5,0.349,49,0 348 | 1,139,46,19,83,28.7,0.654,22,0 349 | 3,116,0,0,0,23.5,0.187,23,0 350 | 3,99,62,19,74,21.8,0.279,26,0 351 | 5,0,80,32,0,41,0.346,37,1 352 | 4,92,80,0,0,42.2,0.237,29,0 353 | 4,137,84,0,0,31.2,0.252,30,0 354 | 3,61,82,28,0,34.4,0.243,46,0 355 | 1,90,62,12,43,27.2,0.58,24,0 356 | 3,90,78,0,0,42.7,0.559,21,0 357 | 9,165,88,0,0,30.4,0.302,49,1 358 | 1,125,50,40,167,33.3,0.962,28,1 359 | 13,129,0,30,0,39.9,0.569,44,1 360 | 12,88,74,40,54,35.3,0.378,48,0 361 | 1,196,76,36,249,36.5,0.875,29,1 362 | 5,189,64,33,325,31.2,0.583,29,1 363 | 5,158,70,0,0,29.8,0.207,63,0 364 | 5,103,108,37,0,39.2,0.305,65,0 365 | 4,146,78,0,0,38.5,0.52,67,1 366 | 4,147,74,25,293,34.9,0.385,30,0 367 | 5,99,54,28,83,34,0.499,30,0 368 | 6,124,72,0,0,27.6,0.368,29,1 369 | 0,101,64,17,0,21,0.252,21,0 370 | 3,81,86,16,66,27.5,0.306,22,0 371 | 1,133,102,28,140,32.8,0.234,45,1 372 | 3,173,82,48,465,38.4,2.137,25,1 373 | 0,118,64,23,89,0,1.731,21,0 374 | 0,84,64,22,66,35.8,0.545,21,0 375 | 2,105,58,40,94,34.9,0.225,25,0 376 | 2,122,52,43,158,36.2,0.816,28,0 377 | 12,140,82,43,325,39.2,0.528,58,1 378 | 0,98,82,15,84,25.2,0.299,22,0 379 | 1,87,60,37,75,37.2,0.509,22,0 380 | 4,156,75,0,0,48.3,0.238,32,1 381 | 0,93,100,39,72,43.4,1.021,35,0 382 | 1,107,72,30,82,30.8,0.821,24,0 383 | 0,105,68,22,0,20,0.236,22,0 384 | 1,109,60,8,182,25.4,0.947,21,0 385 | 1,90,62,18,59,25.1,1.268,25,0 386 | 1,125,70,24,110,24.3,0.221,25,0 387 | 1,119,54,13,50,22.3,0.205,24,0 388 | 5,116,74,29,0,32.3,0.66,35,1 389 | 8,105,100,36,0,43.3,0.239,45,1 390 | 5,144,82,26,285,32,0.452,58,1 391 | 3,100,68,23,81,31.6,0.949,28,0 392 | 1,100,66,29,196,32,0.444,42,0 393 | 5,166,76,0,0,45.7,0.34,27,1 394 | 1,131,64,14,415,23.7,0.389,21,0 395 | 4,116,72,12,87,22.1,0.463,37,0 396 | 4,158,78,0,0,32.9,0.803,31,1 397 | 2,127,58,24,275,27.7,1.6,25,0 398 | 3,96,56,34,115,24.7,0.944,39,0 399 | 0,131,66,40,0,34.3,0.196,22,1 400 | 3,82,70,0,0,21.1,0.389,25,0 401 | 3,193,70,31,0,34.9,0.241,25,1 402 | 4,95,64,0,0,32,0.161,31,1 403 | 6,137,61,0,0,24.2,0.151,55,0 404 | 5,136,84,41,88,35,0.286,35,1 405 | 9,72,78,25,0,31.6,0.28,38,0 406 | 5,168,64,0,0,32.9,0.135,41,1 407 | 2,123,48,32,165,42.1,0.52,26,0 408 | 4,115,72,0,0,28.9,0.376,46,1 409 | 0,101,62,0,0,21.9,0.336,25,0 410 | 8,197,74,0,0,25.9,1.191,39,1 411 | 1,172,68,49,579,42.4,0.702,28,1 412 | 6,102,90,39,0,35.7,0.674,28,0 413 | 1,112,72,30,176,34.4,0.528,25,0 414 | 1,143,84,23,310,42.4,1.076,22,0 415 | 1,143,74,22,61,26.2,0.256,21,0 416 | 0,138,60,35,167,34.6,0.534,21,1 417 | 3,173,84,33,474,35.7,0.258,22,1 418 | 1,97,68,21,0,27.2,1.095,22,0 419 | 4,144,82,32,0,38.5,0.554,37,1 420 | 1,83,68,0,0,18.2,0.624,27,0 421 | 3,129,64,29,115,26.4,0.219,28,1 422 | 1,119,88,41,170,45.3,0.507,26,0 423 | 2,94,68,18,76,26,0.561,21,0 424 | 0,102,64,46,78,40.6,0.496,21,0 425 | 2,115,64,22,0,30.8,0.421,21,0 426 | 8,151,78,32,210,42.9,0.516,36,1 427 | 4,184,78,39,277,37,0.264,31,1 428 | 0,94,0,0,0,0,0.256,25,0 429 | 1,181,64,30,180,34.1,0.328,38,1 430 | 0,135,94,46,145,40.6,0.284,26,0 431 | 1,95,82,25,180,35,0.233,43,1 432 | 2,99,0,0,0,22.2,0.108,23,0 433 | 3,89,74,16,85,30.4,0.551,38,0 434 | 1,80,74,11,60,30,0.527,22,0 435 | 2,139,75,0,0,25.6,0.167,29,0 436 | 1,90,68,8,0,24.5,1.138,36,0 437 | 0,141,0,0,0,42.4,0.205,29,1 438 | 12,140,85,33,0,37.4,0.244,41,0 439 | 5,147,75,0,0,29.9,0.434,28,0 440 | 1,97,70,15,0,18.2,0.147,21,0 441 | 6,107,88,0,0,36.8,0.727,31,0 442 | 0,189,104,25,0,34.3,0.435,41,1 443 | 2,83,66,23,50,32.2,0.497,22,0 444 | 4,117,64,27,120,33.2,0.23,24,0 445 | 8,108,70,0,0,30.5,0.955,33,1 446 | 4,117,62,12,0,29.7,0.38,30,1 447 | 0,180,78,63,14,59.4,2.42,25,1 448 | 1,100,72,12,70,25.3,0.658,28,0 449 | 0,95,80,45,92,36.5,0.33,26,0 450 | 0,104,64,37,64,33.6,0.51,22,1 451 | 0,120,74,18,63,30.5,0.285,26,0 452 | 1,82,64,13,95,21.2,0.415,23,0 453 | 2,134,70,0,0,28.9,0.542,23,1 454 | 0,91,68,32,210,39.9,0.381,25,0 455 | 2,119,0,0,0,19.6,0.832,72,0 456 | 2,100,54,28,105,37.8,0.498,24,0 457 | 14,175,62,30,0,33.6,0.212,38,1 458 | 1,135,54,0,0,26.7,0.687,62,0 459 | 5,86,68,28,71,30.2,0.364,24,0 460 | 10,148,84,48,237,37.6,1.001,51,1 461 | 9,134,74,33,60,25.9,0.46,81,0 462 | 9,120,72,22,56,20.8,0.733,48,0 463 | 1,71,62,0,0,21.8,0.416,26,0 464 | 8,74,70,40,49,35.3,0.705,39,0 465 | 5,88,78,30,0,27.6,0.258,37,0 466 | 10,115,98,0,0,24,1.022,34,0 467 | 0,124,56,13,105,21.8,0.452,21,0 468 | 0,74,52,10,36,27.8,0.269,22,0 469 | 0,97,64,36,100,36.8,0.6,25,0 470 | 8,120,0,0,0,30,0.183,38,1 471 | 6,154,78,41,140,46.1,0.571,27,0 472 | 1,144,82,40,0,41.3,0.607,28,0 473 | 0,137,70,38,0,33.2,0.17,22,0 474 | 0,119,66,27,0,38.8,0.259,22,0 475 | 7,136,90,0,0,29.9,0.21,50,0 476 | 4,114,64,0,0,28.9,0.126,24,0 477 | 0,137,84,27,0,27.3,0.231,59,0 478 | 2,105,80,45,191,33.7,0.711,29,1 479 | 7,114,76,17,110,23.8,0.466,31,0 480 | 8,126,74,38,75,25.9,0.162,39,0 481 | 4,132,86,31,0,28,0.419,63,0 482 | 3,158,70,30,328,35.5,0.344,35,1 483 | 0,123,88,37,0,35.2,0.197,29,0 484 | 4,85,58,22,49,27.8,0.306,28,0 485 | 0,84,82,31,125,38.2,0.233,23,0 486 | 0,145,0,0,0,44.2,0.63,31,1 487 | 0,135,68,42,250,42.3,0.365,24,1 488 | 1,139,62,41,480,40.7,0.536,21,0 489 | 0,173,78,32,265,46.5,1.159,58,0 490 | 4,99,72,17,0,25.6,0.294,28,0 491 | 8,194,80,0,0,26.1,0.551,67,0 492 | 2,83,65,28,66,36.8,0.629,24,0 493 | 2,89,90,30,0,33.5,0.292,42,0 494 | 4,99,68,38,0,32.8,0.145,33,0 495 | 4,125,70,18,122,28.9,1.144,45,1 496 | 3,80,0,0,0,0,0.174,22,0 497 | 6,166,74,0,0,26.6,0.304,66,0 498 | 5,110,68,0,0,26,0.292,30,0 499 | 2,81,72,15,76,30.1,0.547,25,0 500 | 7,195,70,33,145,25.1,0.163,55,1 501 | 6,154,74,32,193,29.3,0.839,39,0 502 | 2,117,90,19,71,25.2,0.313,21,0 503 | 3,84,72,32,0,37.2,0.267,28,0 504 | 6,0,68,41,0,39,0.727,41,1 505 | 7,94,64,25,79,33.3,0.738,41,0 506 | 3,96,78,39,0,37.3,0.238,40,0 507 | 10,75,82,0,0,33.3,0.263,38,0 508 | 0,180,90,26,90,36.5,0.314,35,1 509 | 1,130,60,23,170,28.6,0.692,21,0 510 | 2,84,50,23,76,30.4,0.968,21,0 511 | 8,120,78,0,0,25,0.409,64,0 512 | 12,84,72,31,0,29.7,0.297,46,1 513 | 0,139,62,17,210,22.1,0.207,21,0 514 | 9,91,68,0,0,24.2,0.2,58,0 515 | 2,91,62,0,0,27.3,0.525,22,0 516 | 3,99,54,19,86,25.6,0.154,24,0 517 | 3,163,70,18,105,31.6,0.268,28,1 518 | 9,145,88,34,165,30.3,0.771,53,1 519 | 7,125,86,0,0,37.6,0.304,51,0 520 | 13,76,60,0,0,32.8,0.18,41,0 521 | 6,129,90,7,326,19.6,0.582,60,0 522 | 2,68,70,32,66,25,0.187,25,0 523 | 3,124,80,33,130,33.2,0.305,26,0 524 | 6,114,0,0,0,0,0.189,26,0 525 | 9,130,70,0,0,34.2,0.652,45,1 526 | 3,125,58,0,0,31.6,0.151,24,0 527 | 3,87,60,18,0,21.8,0.444,21,0 528 | 1,97,64,19,82,18.2,0.299,21,0 529 | 3,116,74,15,105,26.3,0.107,24,0 530 | 0,117,66,31,188,30.8,0.493,22,0 531 | 0,111,65,0,0,24.6,0.66,31,0 532 | 2,122,60,18,106,29.8,0.717,22,0 533 | 0,107,76,0,0,45.3,0.686,24,0 534 | 1,86,66,52,65,41.3,0.917,29,0 535 | 6,91,0,0,0,29.8,0.501,31,0 536 | 1,77,56,30,56,33.3,1.251,24,0 537 | 4,132,0,0,0,32.9,0.302,23,1 538 | 0,105,90,0,0,29.6,0.197,46,0 539 | 0,57,60,0,0,21.7,0.735,67,0 540 | 0,127,80,37,210,36.3,0.804,23,0 541 | 3,129,92,49,155,36.4,0.968,32,1 542 | 8,100,74,40,215,39.4,0.661,43,1 543 | 3,128,72,25,190,32.4,0.549,27,1 544 | 10,90,85,32,0,34.9,0.825,56,1 545 | 4,84,90,23,56,39.5,0.159,25,0 546 | 1,88,78,29,76,32,0.365,29,0 547 | 8,186,90,35,225,34.5,0.423,37,1 548 | 5,187,76,27,207,43.6,1.034,53,1 549 | 4,131,68,21,166,33.1,0.16,28,0 550 | 1,164,82,43,67,32.8,0.341,50,0 551 | 4,189,110,31,0,28.5,0.68,37,0 552 | 1,116,70,28,0,27.4,0.204,21,0 553 | 3,84,68,30,106,31.9,0.591,25,0 554 | 6,114,88,0,0,27.8,0.247,66,0 555 | 1,88,62,24,44,29.9,0.422,23,0 556 | 1,84,64,23,115,36.9,0.471,28,0 557 | 7,124,70,33,215,25.5,0.161,37,0 558 | 1,97,70,40,0,38.1,0.218,30,0 559 | 8,110,76,0,0,27.8,0.237,58,0 560 | 11,103,68,40,0,46.2,0.126,42,0 561 | 11,85,74,0,0,30.1,0.3,35,0 562 | 6,125,76,0,0,33.8,0.121,54,1 563 | 0,198,66,32,274,41.3,0.502,28,1 564 | 1,87,68,34,77,37.6,0.401,24,0 565 | 6,99,60,19,54,26.9,0.497,32,0 566 | 0,91,80,0,0,32.4,0.601,27,0 567 | 2,95,54,14,88,26.1,0.748,22,0 568 | 1,99,72,30,18,38.6,0.412,21,0 569 | 6,92,62,32,126,32,0.085,46,0 570 | 4,154,72,29,126,31.3,0.338,37,0 571 | 0,121,66,30,165,34.3,0.203,33,1 572 | 3,78,70,0,0,32.5,0.27,39,0 573 | 2,130,96,0,0,22.6,0.268,21,0 574 | 3,111,58,31,44,29.5,0.43,22,0 575 | 2,98,60,17,120,34.7,0.198,22,0 576 | 1,143,86,30,330,30.1,0.892,23,0 577 | 1,119,44,47,63,35.5,0.28,25,0 578 | 6,108,44,20,130,24,0.813,35,0 579 | 2,118,80,0,0,42.9,0.693,21,1 580 | 10,133,68,0,0,27,0.245,36,0 581 | 2,197,70,99,0,34.7,0.575,62,1 582 | 0,151,90,46,0,42.1,0.371,21,1 583 | 6,109,60,27,0,25,0.206,27,0 584 | 12,121,78,17,0,26.5,0.259,62,0 585 | 8,100,76,0,0,38.7,0.19,42,0 586 | 8,124,76,24,600,28.7,0.687,52,1 587 | 1,93,56,11,0,22.5,0.417,22,0 588 | 8,143,66,0,0,34.9,0.129,41,1 589 | 6,103,66,0,0,24.3,0.249,29,0 590 | 3,176,86,27,156,33.3,1.154,52,1 591 | 0,73,0,0,0,21.1,0.342,25,0 592 | 11,111,84,40,0,46.8,0.925,45,1 593 | 2,112,78,50,140,39.4,0.175,24,0 594 | 3,132,80,0,0,34.4,0.402,44,1 595 | 2,82,52,22,115,28.5,1.699,25,0 596 | 6,123,72,45,230,33.6,0.733,34,0 597 | 0,188,82,14,185,32,0.682,22,1 598 | 0,67,76,0,0,45.3,0.194,46,0 599 | 1,89,24,19,25,27.8,0.559,21,0 600 | 1,173,74,0,0,36.8,0.088,38,1 601 | 1,109,38,18,120,23.1,0.407,26,0 602 | 1,108,88,19,0,27.1,0.4,24,0 603 | 6,96,0,0,0,23.7,0.19,28,0 604 | 1,124,74,36,0,27.8,0.1,30,0 605 | 7,150,78,29,126,35.2,0.692,54,1 606 | 4,183,0,0,0,28.4,0.212,36,1 607 | 1,124,60,32,0,35.8,0.514,21,0 608 | 1,181,78,42,293,40,1.258,22,1 609 | 1,92,62,25,41,19.5,0.482,25,0 610 | 0,152,82,39,272,41.5,0.27,27,0 611 | 1,111,62,13,182,24,0.138,23,0 612 | 3,106,54,21,158,30.9,0.292,24,0 613 | 3,174,58,22,194,32.9,0.593,36,1 614 | 7,168,88,42,321,38.2,0.787,40,1 615 | 6,105,80,28,0,32.5,0.878,26,0 616 | 11,138,74,26,144,36.1,0.557,50,1 617 | 3,106,72,0,0,25.8,0.207,27,0 618 | 6,117,96,0,0,28.7,0.157,30,0 619 | 2,68,62,13,15,20.1,0.257,23,0 620 | 9,112,82,24,0,28.2,1.282,50,1 621 | 0,119,0,0,0,32.4,0.141,24,1 622 | 2,112,86,42,160,38.4,0.246,28,0 623 | 2,92,76,20,0,24.2,1.698,28,0 624 | 6,183,94,0,0,40.8,1.461,45,0 625 | 0,94,70,27,115,43.5,0.347,21,0 626 | 2,108,64,0,0,30.8,0.158,21,0 627 | 4,90,88,47,54,37.7,0.362,29,0 628 | 0,125,68,0,0,24.7,0.206,21,0 629 | 0,132,78,0,0,32.4,0.393,21,0 630 | 5,128,80,0,0,34.6,0.144,45,0 631 | 4,94,65,22,0,24.7,0.148,21,0 632 | 7,114,64,0,0,27.4,0.732,34,1 633 | 0,102,78,40,90,34.5,0.238,24,0 634 | 2,111,60,0,0,26.2,0.343,23,0 635 | 1,128,82,17,183,27.5,0.115,22,0 636 | 10,92,62,0,0,25.9,0.167,31,0 637 | 13,104,72,0,0,31.2,0.465,38,1 638 | 5,104,74,0,0,28.8,0.153,48,0 639 | 2,94,76,18,66,31.6,0.649,23,0 640 | 7,97,76,32,91,40.9,0.871,32,1 641 | 1,100,74,12,46,19.5,0.149,28,0 642 | 0,102,86,17,105,29.3,0.695,27,0 643 | 4,128,70,0,0,34.3,0.303,24,0 644 | 6,147,80,0,0,29.5,0.178,50,1 645 | 4,90,0,0,0,28,0.61,31,0 646 | 3,103,72,30,152,27.6,0.73,27,0 647 | 2,157,74,35,440,39.4,0.134,30,0 648 | 1,167,74,17,144,23.4,0.447,33,1 649 | 0,179,50,36,159,37.8,0.455,22,1 650 | 11,136,84,35,130,28.3,0.26,42,1 651 | 0,107,60,25,0,26.4,0.133,23,0 652 | 1,91,54,25,100,25.2,0.234,23,0 653 | 1,117,60,23,106,33.8,0.466,27,0 654 | 5,123,74,40,77,34.1,0.269,28,0 655 | 2,120,54,0,0,26.8,0.455,27,0 656 | 1,106,70,28,135,34.2,0.142,22,0 657 | 2,155,52,27,540,38.7,0.24,25,1 658 | 2,101,58,35,90,21.8,0.155,22,0 659 | 1,120,80,48,200,38.9,1.162,41,0 660 | 11,127,106,0,0,39,0.19,51,0 661 | 3,80,82,31,70,34.2,1.292,27,1 662 | 10,162,84,0,0,27.7,0.182,54,0 663 | 1,199,76,43,0,42.9,1.394,22,1 664 | 8,167,106,46,231,37.6,0.165,43,1 665 | 9,145,80,46,130,37.9,0.637,40,1 666 | 6,115,60,39,0,33.7,0.245,40,1 667 | 1,112,80,45,132,34.8,0.217,24,0 668 | 4,145,82,18,0,32.5,0.235,70,1 669 | 10,111,70,27,0,27.5,0.141,40,1 670 | 6,98,58,33,190,34,0.43,43,0 671 | 9,154,78,30,100,30.9,0.164,45,0 672 | 6,165,68,26,168,33.6,0.631,49,0 673 | 1,99,58,10,0,25.4,0.551,21,0 674 | 10,68,106,23,49,35.5,0.285,47,0 675 | 3,123,100,35,240,57.3,0.88,22,0 676 | 8,91,82,0,0,35.6,0.587,68,0 677 | 6,195,70,0,0,30.9,0.328,31,1 678 | 9,156,86,0,0,24.8,0.23,53,1 679 | 0,93,60,0,0,35.3,0.263,25,0 680 | 3,121,52,0,0,36,0.127,25,1 681 | 2,101,58,17,265,24.2,0.614,23,0 682 | 2,56,56,28,45,24.2,0.332,22,0 683 | 0,162,76,36,0,49.6,0.364,26,1 684 | 0,95,64,39,105,44.6,0.366,22,0 685 | 4,125,80,0,0,32.3,0.536,27,1 686 | 5,136,82,0,0,0,0.64,69,0 687 | 2,129,74,26,205,33.2,0.591,25,0 688 | 3,130,64,0,0,23.1,0.314,22,0 689 | 1,107,50,19,0,28.3,0.181,29,0 690 | 1,140,74,26,180,24.1,0.828,23,0 691 | 1,144,82,46,180,46.1,0.335,46,1 692 | 8,107,80,0,0,24.6,0.856,34,0 693 | 13,158,114,0,0,42.3,0.257,44,1 694 | 2,121,70,32,95,39.1,0.886,23,0 695 | 7,129,68,49,125,38.5,0.439,43,1 696 | 2,90,60,0,0,23.5,0.191,25,0 697 | 7,142,90,24,480,30.4,0.128,43,1 698 | 3,169,74,19,125,29.9,0.268,31,1 699 | 0,99,0,0,0,25,0.253,22,0 700 | 4,127,88,11,155,34.5,0.598,28,0 701 | 4,118,70,0,0,44.5,0.904,26,0 702 | 2,122,76,27,200,35.9,0.483,26,0 703 | 6,125,78,31,0,27.6,0.565,49,1 704 | 1,168,88,29,0,35,0.905,52,1 705 | 2,129,0,0,0,38.5,0.304,41,0 706 | 4,110,76,20,100,28.4,0.118,27,0 707 | 6,80,80,36,0,39.8,0.177,28,0 708 | 10,115,0,0,0,0,0.261,30,1 709 | 2,127,46,21,335,34.4,0.176,22,0 710 | 9,164,78,0,0,32.8,0.148,45,1 711 | 2,93,64,32,160,38,0.674,23,1 712 | 3,158,64,13,387,31.2,0.295,24,0 713 | 5,126,78,27,22,29.6,0.439,40,0 714 | 10,129,62,36,0,41.2,0.441,38,1 715 | 0,134,58,20,291,26.4,0.352,21,0 716 | 3,102,74,0,0,29.5,0.121,32,0 717 | 7,187,50,33,392,33.9,0.826,34,1 718 | 3,173,78,39,185,33.8,0.97,31,1 719 | 10,94,72,18,0,23.1,0.595,56,0 720 | 1,108,60,46,178,35.5,0.415,24,0 721 | 5,97,76,27,0,35.6,0.378,52,1 722 | 4,83,86,19,0,29.3,0.317,34,0 723 | 1,114,66,36,200,38.1,0.289,21,0 724 | 1,149,68,29,127,29.3,0.349,42,1 725 | 5,117,86,30,105,39.1,0.251,42,0 726 | 1,111,94,0,0,32.8,0.265,45,0 727 | 4,112,78,40,0,39.4,0.236,38,0 728 | 1,116,78,29,180,36.1,0.496,25,0 729 | 0,141,84,26,0,32.4,0.433,22,0 730 | 2,175,88,0,0,22.9,0.326,22,0 731 | 2,92,52,0,0,30.1,0.141,22,0 732 | 3,130,78,23,79,28.4,0.323,34,1 733 | 8,120,86,0,0,28.4,0.259,22,1 734 | 2,174,88,37,120,44.5,0.646,24,1 735 | 2,106,56,27,165,29,0.426,22,0 736 | 2,105,75,0,0,23.3,0.56,53,0 737 | 4,95,60,32,0,35.4,0.284,28,0 738 | 0,126,86,27,120,27.4,0.515,21,0 739 | 8,65,72,23,0,32,0.6,42,0 740 | 2,99,60,17,160,36.6,0.453,21,0 741 | 1,102,74,0,0,39.5,0.293,42,1 742 | 11,120,80,37,150,42.3,0.785,48,1 743 | 3,102,44,20,94,30.8,0.4,26,0 744 | 1,109,58,18,116,28.5,0.219,22,0 745 | 9,140,94,0,0,32.7,0.734,45,1 746 | 13,153,88,37,140,40.6,1.174,39,0 747 | 12,100,84,33,105,30,0.488,46,0 748 | 1,147,94,41,0,49.3,0.358,27,1 749 | 1,81,74,41,57,46.3,1.096,32,0 750 | 3,187,70,22,200,36.4,0.408,36,1 751 | 6,162,62,0,0,24.3,0.178,50,1 752 | 4,136,70,0,0,31.2,1.182,22,1 753 | 1,121,78,39,74,39,0.261,28,0 754 | 3,108,62,24,0,26,0.223,25,0 755 | 0,181,88,44,510,43.3,0.222,26,1 756 | 8,154,78,32,0,32.4,0.443,45,1 757 | 1,128,88,39,110,36.5,1.057,37,1 758 | 7,137,90,41,0,32,0.391,39,0 759 | 0,123,72,0,0,36.3,0.258,52,1 760 | 1,106,76,0,0,37.5,0.197,26,0 761 | 6,190,92,0,0,35.5,0.278,66,1 762 | 2,88,58,26,16,28.4,0.766,22,0 763 | 9,170,74,31,0,44,0.403,43,1 764 | 9,89,62,0,0,22.5,0.142,33,0 765 | 10,101,76,48,180,32.9,0.171,63,0 766 | 2,122,70,27,0,36.8,0.34,27,0 767 | 5,121,72,23,112,26.2,0.245,30,0 768 | 1,126,60,0,0,30.1,0.349,47,1 769 | 1,93,70,31,0,30.4,0.315,23,0 -------------------------------------------------------------------------------- /data/iris.csv: -------------------------------------------------------------------------------- 1 | sepal_length,sepal_width,petal_length,petal_width,species 2 | 5.1,3.5,1.4,0.2,setosa 3 | 4.9,3.0,1.4,0.2,setosa 4 | 4.7,3.2,1.3,0.2,setosa 5 | 4.6,3.1,1.5,0.2,setosa 6 | 5.0,3.6,1.4,0.2,setosa 7 | 5.4,3.9,1.7,0.4,setosa 8 | 4.6,3.4,1.4,0.3,setosa 9 | 5.0,3.4,1.5,0.2,setosa 10 | 4.4,2.9,1.4,0.2,setosa 11 | 4.9,3.1,1.5,0.1,setosa 12 | 5.4,3.7,1.5,0.2,setosa 13 | 4.8,3.4,1.6,0.2,setosa 14 | 4.8,3.0,1.4,0.1,setosa 15 | 4.3,3.0,1.1,0.1,setosa 16 | 5.8,4.0,1.2,0.2,setosa 17 | 5.7,4.4,1.5,0.4,setosa 18 | 5.4,3.9,1.3,0.4,setosa 19 | 5.1,3.5,1.4,0.3,setosa 20 | 5.7,3.8,1.7,0.3,setosa 21 | 5.1,3.8,1.5,0.3,setosa 22 | 5.4,3.4,1.7,0.2,setosa 23 | 5.1,3.7,1.5,0.4,setosa 24 | 4.6,3.6,1.0,0.2,setosa 25 | 5.1,3.3,1.7,0.5,setosa 26 | 4.8,3.4,1.9,0.2,setosa 27 | 5.0,3.0,1.6,0.2,setosa 28 | 5.0,3.4,1.6,0.4,setosa 29 | 5.2,3.5,1.5,0.2,setosa 30 | 5.2,3.4,1.4,0.2,setosa 31 | 4.7,3.2,1.6,0.2,setosa 32 | 4.8,3.1,1.6,0.2,setosa 33 | 5.4,3.4,1.5,0.4,setosa 34 | 5.2,4.1,1.5,0.1,setosa 35 | 5.5,4.2,1.4,0.2,setosa 36 | 4.9,3.1,1.5,0.1,setosa 37 | 5.0,3.2,1.2,0.2,setosa 38 | 5.5,3.5,1.3,0.2,setosa 39 | 4.9,3.1,1.5,0.1,setosa 40 | 4.4,3.0,1.3,0.2,setosa 41 | 5.1,3.4,1.5,0.2,setosa 42 | 5.0,3.5,1.3,0.3,setosa 43 | 4.5,2.3,1.3,0.3,setosa 44 | 4.4,3.2,1.3,0.2,setosa 45 | 5.0,3.5,1.6,0.6,setosa 46 | 5.1,3.8,1.9,0.4,setosa 47 | 4.8,3.0,1.4,0.3,setosa 48 | 5.1,3.8,1.6,0.2,setosa 49 | 4.6,3.2,1.4,0.2,setosa 50 | 5.3,3.7,1.5,0.2,setosa 51 | 5.0,3.3,1.4,0.2,setosa 52 | 7.0,3.2,4.7,1.4,versicolor 53 | 6.4,3.2,4.5,1.5,versicolor 54 | 6.9,3.1,4.9,1.5,versicolor 55 | 5.5,2.3,4.0,1.3,versicolor 56 | 6.5,2.8,4.6,1.5,versicolor 57 | 5.7,2.8,4.5,1.3,versicolor 58 | 6.3,3.3,4.7,1.6,versicolor 59 | 4.9,2.4,3.3,1.0,versicolor 60 | 6.6,2.9,4.6,1.3,versicolor 61 | 5.2,2.7,3.9,1.4,versicolor 62 | 5.0,2.0,3.5,1.0,versicolor 63 | 5.9,3.0,4.2,1.5,versicolor 64 | 6.0,2.2,4.0,1.0,versicolor 65 | 6.1,2.9,4.7,1.4,versicolor 66 | 5.6,2.9,3.6,1.3,versicolor 67 | 6.7,3.1,4.4,1.4,versicolor 68 | 5.6,3.0,4.5,1.5,versicolor 69 | 5.8,2.7,4.1,1.0,versicolor 70 | 6.2,2.2,4.5,1.5,versicolor 71 | 5.6,2.5,3.9,1.1,versicolor 72 | 5.9,3.2,4.8,1.8,versicolor 73 | 6.1,2.8,4.0,1.3,versicolor 74 | 6.3,2.5,4.9,1.5,versicolor 75 | 6.1,2.8,4.7,1.2,versicolor 76 | 6.4,2.9,4.3,1.3,versicolor 77 | 6.6,3.0,4.4,1.4,versicolor 78 | 6.8,2.8,4.8,1.4,versicolor 79 | 6.7,3.0,5.0,1.7,versicolor 80 | 6.0,2.9,4.5,1.5,versicolor 81 | 5.7,2.6,3.5,1.0,versicolor 82 | 5.5,2.4,3.8,1.1,versicolor 83 | 5.5,2.4,3.7,1.0,versicolor 84 | 5.8,2.7,3.9,1.2,versicolor 85 | 6.0,2.7,5.1,1.6,versicolor 86 | 5.4,3.0,4.5,1.5,versicolor 87 | 6.0,3.4,4.5,1.6,versicolor 88 | 6.7,3.1,4.7,1.5,versicolor 89 | 6.3,2.3,4.4,1.3,versicolor 90 | 5.6,3.0,4.1,1.3,versicolor 91 | 5.5,2.5,4.0,1.3,versicolor 92 | 5.5,2.6,4.4,1.2,versicolor 93 | 6.1,3.0,4.6,1.4,versicolor 94 | 5.8,2.6,4.0,1.2,versicolor 95 | 5.0,2.3,3.3,1.0,versicolor 96 | 5.6,2.7,4.2,1.3,versicolor 97 | 5.7,3.0,4.2,1.2,versicolor 98 | 5.7,2.9,4.2,1.3,versicolor 99 | 6.2,2.9,4.3,1.3,versicolor 100 | 5.1,2.5,3.0,1.1,versicolor 101 | 5.7,2.8,4.1,1.3,versicolor 102 | 6.3,3.3,6.0,2.5,virginica 103 | 5.8,2.7,5.1,1.9,virginica 104 | 7.1,3.0,5.9,2.1,virginica 105 | 6.3,2.9,5.6,1.8,virginica 106 | 6.5,3.0,5.8,2.2,virginica 107 | 7.6,3.0,6.6,2.1,virginica 108 | 4.9,2.5,4.5,1.7,virginica 109 | 7.3,2.9,6.3,1.8,virginica 110 | 6.7,2.5,5.8,1.8,virginica 111 | 7.2,3.6,6.1,2.5,virginica 112 | 6.5,3.2,5.1,2.0,virginica 113 | 6.4,2.7,5.3,1.9,virginica 114 | 6.8,3.0,5.5,2.1,virginica 115 | 5.7,2.5,5.0,2.0,virginica 116 | 5.8,2.8,5.1,2.4,virginica 117 | 6.4,3.2,5.3,2.3,virginica 118 | 6.5,3.0,5.5,1.8,virginica 119 | 7.7,3.8,6.7,2.2,virginica 120 | 7.7,2.6,6.9,2.3,virginica 121 | 6.0,2.2,5.0,1.5,virginica 122 | 6.9,3.2,5.7,2.3,virginica 123 | 5.6,2.8,4.9,2.0,virginica 124 | 7.7,2.8,6.7,2.0,virginica 125 | 6.3,2.7,4.9,1.8,virginica 126 | 6.7,3.3,5.7,2.1,virginica 127 | 7.2,3.2,6.0,1.8,virginica 128 | 6.2,2.8,4.8,1.8,virginica 129 | 6.1,3.0,4.9,1.8,virginica 130 | 6.4,2.8,5.6,2.1,virginica 131 | 7.2,3.0,5.8,1.6,virginica 132 | 7.4,2.8,6.1,1.9,virginica 133 | 7.9,3.8,6.4,2.0,virginica 134 | 6.4,2.8,5.6,2.2,virginica 135 | 6.3,2.8,5.1,1.5,virginica 136 | 6.1,2.6,5.6,1.4,virginica 137 | 7.7,3.0,6.1,2.3,virginica 138 | 6.3,3.4,5.6,2.4,virginica 139 | 6.4,3.1,5.5,1.8,virginica 140 | 6.0,3.0,4.8,1.8,virginica 141 | 6.9,3.1,5.4,2.1,virginica 142 | 6.7,3.1,5.6,2.4,virginica 143 | 6.9,3.1,5.1,2.3,virginica 144 | 5.8,2.7,5.1,1.9,virginica 145 | 6.8,3.2,5.9,2.3,virginica 146 | 6.7,3.3,5.7,2.5,virginica 147 | 6.7,3.0,5.2,2.3,virginica 148 | 6.3,2.5,5.0,1.9,virginica 149 | 6.5,3.0,5.2,2.0,virginica 150 | 6.2,3.4,5.4,2.3,virginica 151 | 5.9,3.0,5.1,1.8,virginica 152 | -------------------------------------------------------------------------------- /lecture_01.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stdout", 10 | "output_type": "stream", 11 | "text": [ 12 | "1.4.0\n" 13 | ] 14 | } 15 | ], 16 | "source": [ 17 | "import torch\n", 18 | "print(torch.__version__)" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": 2, 24 | "metadata": {}, 25 | "outputs": [ 26 | { 27 | "data": { 28 | "text/plain": [ 29 | "1" 30 | ] 31 | }, 32 | "execution_count": 2, 33 | "metadata": {}, 34 | "output_type": "execute_result" 35 | } 36 | ], 37 | "source": [ 38 | "torch.cuda.device_count()" 39 | ] 40 | } 41 | ], 42 | "metadata": { 43 | "kernelspec": { 44 | "display_name": "Python 3", 45 | "language": "python", 46 | "name": "python3" 47 | }, 48 | "language_info": { 49 | "codemirror_mode": { 50 | "name": "ipython", 51 | "version": 3 52 | }, 53 | "file_extension": ".py", 54 | "mimetype": "text/x-python", 55 | "name": "python", 56 | "nbconvert_exporter": "python", 57 | "pygments_lexer": "ipython3", 58 | "version": "3.7.6" 59 | } 60 | }, 61 | "nbformat": 4, 62 | "nbformat_minor": 4 63 | } 64 | -------------------------------------------------------------------------------- /lecture_02.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Lecture: 02 \n", 8 | "\n", 9 | "Linear model: $y=w*x$\n", 10 | "\n", 11 | "* In this lecture we create a linear model for given `x_data` and `y_data`. \n", 12 | "* Eventually we also plot the value of `w` against the difference in the prediction and actual value. " 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": 12, 18 | "metadata": {}, 19 | "outputs": [], 20 | "source": [ 21 | "import torch\n", 22 | "import numpy as np" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": 23, 28 | "metadata": {}, 29 | "outputs": [], 30 | "source": [ 31 | "w_list=[]\n", 32 | "mse_list=[]" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": 13, 38 | "metadata": {}, 39 | "outputs": [], 40 | "source": [ 41 | "x_data = [1.0, 2.0, 3.0]\n", 42 | "y_data = [2.0, 4.0, 6.0]" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 19, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "w = 1 # Random value\n", 52 | "def forward(x):\n", 53 | " return x*w" 54 | ] 55 | }, 56 | { 57 | "cell_type": "code", 58 | "execution_count": 20, 59 | "metadata": {}, 60 | "outputs": [], 61 | "source": [ 62 | "def loss(x, y):\n", 63 | " y_pred = forward(x)\n", 64 | " return (y_pred-y)*(y_pred-y)" 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": 27, 70 | "metadata": { 71 | "collapsed": true 72 | }, 73 | "outputs": [ 74 | { 75 | "name": "stdout", 76 | "output_type": "stream", 77 | "text": [ 78 | "w= 0.0\n", 79 | "\t 1.0 2.0 0.0 4.0\n", 80 | "\t 2.0 4.0 0.0 16.0\n", 81 | "\t 3.0 6.0 0.0 36.0\n", 82 | "MSE= 18.666666666666668\n", 83 | "w= 0.1\n", 84 | "\t 1.0 2.0 0.1 3.61\n", 85 | "\t 2.0 4.0 0.2 14.44\n", 86 | "\t 3.0 6.0 0.30000000000000004 32.49\n", 87 | "MSE= 16.846666666666668\n", 88 | "w= 0.2\n", 89 | "\t 1.0 2.0 0.2 3.24\n", 90 | "\t 2.0 4.0 0.4 12.96\n", 91 | "\t 3.0 6.0 0.6000000000000001 29.160000000000004\n", 92 | "MSE= 15.120000000000003\n", 93 | "w= 0.30000000000000004\n", 94 | "\t 1.0 2.0 0.30000000000000004 2.8899999999999997\n", 95 | "\t 2.0 4.0 0.6000000000000001 11.559999999999999\n", 96 | "\t 3.0 6.0 0.9000000000000001 26.009999999999998\n", 97 | "MSE= 13.486666666666665\n", 98 | "w= 0.4\n", 99 | "\t 1.0 2.0 0.4 2.5600000000000005\n", 100 | "\t 2.0 4.0 0.8 10.240000000000002\n", 101 | "\t 3.0 6.0 1.2000000000000002 23.04\n", 102 | "MSE= 11.946666666666667\n", 103 | "w= 0.5\n", 104 | "\t 1.0 2.0 0.5 2.25\n", 105 | "\t 2.0 4.0 1.0 9.0\n", 106 | "\t 3.0 6.0 1.5 20.25\n", 107 | "MSE= 10.5\n", 108 | "w= 0.6000000000000001\n", 109 | "\t 1.0 2.0 0.6000000000000001 1.9599999999999997\n", 110 | "\t 2.0 4.0 1.2000000000000002 7.839999999999999\n", 111 | "\t 3.0 6.0 1.8000000000000003 17.639999999999993\n", 112 | "MSE= 9.146666666666663\n", 113 | "w= 0.7000000000000001\n", 114 | "\t 1.0 2.0 0.7000000000000001 1.6899999999999995\n", 115 | "\t 2.0 4.0 1.4000000000000001 6.759999999999998\n", 116 | "\t 3.0 6.0 2.1 15.209999999999999\n", 117 | "MSE= 7.886666666666666\n", 118 | "w= 0.8\n", 119 | "\t 1.0 2.0 0.8 1.44\n", 120 | "\t 2.0 4.0 1.6 5.76\n", 121 | "\t 3.0 6.0 2.4000000000000004 12.959999999999997\n", 122 | "MSE= 6.719999999999999\n", 123 | "w= 0.9\n", 124 | "\t 1.0 2.0 0.9 1.2100000000000002\n", 125 | "\t 2.0 4.0 1.8 4.840000000000001\n", 126 | "\t 3.0 6.0 2.7 10.889999999999999\n", 127 | "MSE= 5.646666666666666\n", 128 | "w= 1.0\n", 129 | "\t 1.0 2.0 1.0 1.0\n", 130 | "\t 2.0 4.0 2.0 4.0\n", 131 | "\t 3.0 6.0 3.0 9.0\n", 132 | "MSE= 4.666666666666667\n", 133 | "w= 1.1\n", 134 | "\t 1.0 2.0 1.1 0.8099999999999998\n", 135 | "\t 2.0 4.0 2.2 3.2399999999999993\n", 136 | "\t 3.0 6.0 3.3000000000000003 7.289999999999998\n", 137 | "MSE= 3.779999999999999\n", 138 | "w= 1.2000000000000002\n", 139 | "\t 1.0 2.0 1.2000000000000002 0.6399999999999997\n", 140 | "\t 2.0 4.0 2.4000000000000004 2.5599999999999987\n", 141 | "\t 3.0 6.0 3.6000000000000005 5.759999999999997\n", 142 | "MSE= 2.986666666666665\n", 143 | "w= 1.3\n", 144 | "\t 1.0 2.0 1.3 0.48999999999999994\n", 145 | "\t 2.0 4.0 2.6 1.9599999999999997\n", 146 | "\t 3.0 6.0 3.9000000000000004 4.409999999999998\n", 147 | "MSE= 2.2866666666666657\n", 148 | "w= 1.4000000000000001\n", 149 | "\t 1.0 2.0 1.4000000000000001 0.3599999999999998\n", 150 | "\t 2.0 4.0 2.8000000000000003 1.4399999999999993\n", 151 | "\t 3.0 6.0 4.2 3.2399999999999993\n", 152 | "MSE= 1.6799999999999995\n", 153 | "w= 1.5\n", 154 | "\t 1.0 2.0 1.5 0.25\n", 155 | "\t 2.0 4.0 3.0 1.0\n", 156 | "\t 3.0 6.0 4.5 2.25\n", 157 | "MSE= 1.1666666666666667\n", 158 | "w= 1.6\n", 159 | "\t 1.0 2.0 1.6 0.15999999999999992\n", 160 | "\t 2.0 4.0 3.2 0.6399999999999997\n", 161 | "\t 3.0 6.0 4.800000000000001 1.4399999999999984\n", 162 | "MSE= 0.746666666666666\n", 163 | "w= 1.7000000000000002\n", 164 | "\t 1.0 2.0 1.7000000000000002 0.0899999999999999\n", 165 | "\t 2.0 4.0 3.4000000000000004 0.3599999999999996\n", 166 | "\t 3.0 6.0 5.1000000000000005 0.809999999999999\n", 167 | "MSE= 0.4199999999999995\n", 168 | "w= 1.8\n", 169 | "\t 1.0 2.0 1.8 0.03999999999999998\n", 170 | "\t 2.0 4.0 3.6 0.15999999999999992\n", 171 | "\t 3.0 6.0 5.4 0.3599999999999996\n", 172 | "MSE= 0.1866666666666665\n", 173 | "w= 1.9000000000000001\n", 174 | "\t 1.0 2.0 1.9000000000000001 0.009999999999999974\n", 175 | "\t 2.0 4.0 3.8000000000000003 0.0399999999999999\n", 176 | "\t 3.0 6.0 5.7 0.0899999999999999\n", 177 | "MSE= 0.046666666666666586\n", 178 | "w= 2.0\n", 179 | "\t 1.0 2.0 2.0 0.0\n", 180 | "\t 2.0 4.0 4.0 0.0\n", 181 | "\t 3.0 6.0 6.0 0.0\n", 182 | "MSE= 0.0\n", 183 | "w= 2.1\n", 184 | "\t 1.0 2.0 2.1 0.010000000000000018\n", 185 | "\t 2.0 4.0 4.2 0.04000000000000007\n", 186 | "\t 3.0 6.0 6.300000000000001 0.09000000000000043\n", 187 | "MSE= 0.046666666666666835\n", 188 | "w= 2.2\n", 189 | "\t 1.0 2.0 2.2 0.04000000000000007\n", 190 | "\t 2.0 4.0 4.4 0.16000000000000028\n", 191 | "\t 3.0 6.0 6.6000000000000005 0.36000000000000065\n", 192 | "MSE= 0.18666666666666698\n", 193 | "w= 2.3000000000000003\n", 194 | "\t 1.0 2.0 2.3000000000000003 0.09000000000000016\n", 195 | "\t 2.0 4.0 4.6000000000000005 0.36000000000000065\n", 196 | "\t 3.0 6.0 6.9 0.8100000000000006\n", 197 | "MSE= 0.42000000000000054\n", 198 | "w= 2.4000000000000004\n", 199 | "\t 1.0 2.0 2.4000000000000004 0.16000000000000028\n", 200 | "\t 2.0 4.0 4.800000000000001 0.6400000000000011\n", 201 | "\t 3.0 6.0 7.200000000000001 1.4400000000000026\n", 202 | "MSE= 0.7466666666666679\n", 203 | "w= 2.5\n", 204 | "\t 1.0 2.0 2.5 0.25\n", 205 | "\t 2.0 4.0 5.0 1.0\n", 206 | "\t 3.0 6.0 7.5 2.25\n", 207 | "MSE= 1.1666666666666667\n", 208 | "w= 2.6\n", 209 | "\t 1.0 2.0 2.6 0.3600000000000001\n", 210 | "\t 2.0 4.0 5.2 1.4400000000000004\n", 211 | "\t 3.0 6.0 7.800000000000001 3.2400000000000024\n", 212 | "MSE= 1.6800000000000008\n", 213 | "w= 2.7\n", 214 | "\t 1.0 2.0 2.7 0.49000000000000027\n", 215 | "\t 2.0 4.0 5.4 1.960000000000001\n", 216 | "\t 3.0 6.0 8.100000000000001 4.410000000000006\n", 217 | "MSE= 2.2866666666666693\n", 218 | "w= 2.8000000000000003\n", 219 | "\t 1.0 2.0 2.8000000000000003 0.6400000000000005\n", 220 | "\t 2.0 4.0 5.6000000000000005 2.560000000000002\n", 221 | "\t 3.0 6.0 8.4 5.760000000000002\n", 222 | "MSE= 2.986666666666668\n", 223 | "w= 2.9000000000000004\n", 224 | "\t 1.0 2.0 2.9000000000000004 0.8100000000000006\n", 225 | "\t 2.0 4.0 5.800000000000001 3.2400000000000024\n", 226 | "\t 3.0 6.0 8.700000000000001 7.290000000000005\n", 227 | "MSE= 3.780000000000003\n", 228 | "w= 3.0\n", 229 | "\t 1.0 2.0 3.0 1.0\n", 230 | "\t 2.0 4.0 6.0 4.0\n", 231 | "\t 3.0 6.0 9.0 9.0\n", 232 | "MSE= 4.666666666666667\n", 233 | "w= 3.1\n", 234 | "\t 1.0 2.0 3.1 1.2100000000000002\n", 235 | "\t 2.0 4.0 6.2 4.840000000000001\n", 236 | "\t 3.0 6.0 9.3 10.890000000000004\n", 237 | "MSE= 5.646666666666668\n", 238 | "w= 3.2\n", 239 | "\t 1.0 2.0 3.2 1.4400000000000004\n", 240 | "\t 2.0 4.0 6.4 5.760000000000002\n", 241 | "\t 3.0 6.0 9.600000000000001 12.96000000000001\n", 242 | "MSE= 6.720000000000003\n", 243 | "w= 3.3000000000000003\n", 244 | "\t 1.0 2.0 3.3000000000000003 1.6900000000000006\n", 245 | "\t 2.0 4.0 6.6000000000000005 6.7600000000000025\n", 246 | "\t 3.0 6.0 9.9 15.210000000000003\n", 247 | "MSE= 7.886666666666668\n", 248 | "w= 3.4000000000000004\n", 249 | "\t 1.0 2.0 3.4000000000000004 1.960000000000001\n", 250 | "\t 2.0 4.0 6.800000000000001 7.840000000000004\n", 251 | "\t 3.0 6.0 10.200000000000001 17.640000000000008\n", 252 | "MSE= 9.14666666666667\n", 253 | "w= 3.5\n", 254 | "\t 1.0 2.0 3.5 2.25\n", 255 | "\t 2.0 4.0 7.0 9.0\n", 256 | "\t 3.0 6.0 10.5 20.25\n", 257 | "MSE= 10.5\n", 258 | "w= 3.6\n", 259 | "\t 1.0 2.0 3.6 2.5600000000000005\n", 260 | "\t 2.0 4.0 7.2 10.240000000000002\n", 261 | "\t 3.0 6.0 10.8 23.040000000000006\n", 262 | "MSE= 11.94666666666667\n", 263 | "w= 3.7\n", 264 | "\t 1.0 2.0 3.7 2.8900000000000006\n", 265 | "\t 2.0 4.0 7.4 11.560000000000002\n", 266 | "\t 3.0 6.0 11.100000000000001 26.010000000000016\n", 267 | "MSE= 13.486666666666673\n", 268 | "w= 3.8000000000000003\n", 269 | "\t 1.0 2.0 3.8000000000000003 3.240000000000001\n", 270 | "\t 2.0 4.0 7.6000000000000005 12.960000000000004\n", 271 | "\t 3.0 6.0 11.4 29.160000000000004\n", 272 | "MSE= 15.120000000000005\n", 273 | "w= 3.9000000000000004\n", 274 | "\t 1.0 2.0 3.9000000000000004 3.610000000000001\n", 275 | "\t 2.0 4.0 7.800000000000001 14.440000000000005\n", 276 | "\t 3.0 6.0 11.700000000000001 32.49000000000001\n", 277 | "MSE= 16.84666666666667\n", 278 | "w= 4.0\n", 279 | "\t 1.0 2.0 4.0 4.0\n", 280 | "\t 2.0 4.0 8.0 16.0\n", 281 | "\t 3.0 6.0 12.0 36.0\n", 282 | "MSE= 18.666666666666668\n" 283 | ] 284 | } 285 | ], 286 | "source": [ 287 | "for w in np.arange(0.0,4.1,0.1):\n", 288 | " print(\"w=\", w)\n", 289 | " l_sum=0\n", 290 | " for x_val, y_val in zip (x_data, y_data):\n", 291 | " y_pred_val = forward(x_val)\n", 292 | " l = loss(x_val, y_val)\n", 293 | " l_sum+=l\n", 294 | " print(\"\\t\", x_val, y_val, y_pred_val, l)\n", 295 | " \n", 296 | " print(\"MSE=\", l_sum/3)\n", 297 | " w_list.append(w)\n", 298 | " mse_list.append(l_sum/3)" 299 | ] 300 | }, 301 | { 302 | "cell_type": "code", 303 | "execution_count": 28, 304 | "metadata": {}, 305 | "outputs": [ 306 | { 307 | "data": { 308 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEGCAYAAAB/+QKOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nO3deXhU5dnH8e+dPYFACAQIISFsssoaNkHFBQEX0LoAbuBS6tJaX1tta9+3Wq3WLra1WqVUUFGLuxUVFSrKokgIyBL2kAQSAkkgkIQlZJn7/SOjTeMEAmTmzGTuz3XNlck5Z+b8OGRy55znOc8jqooxxhhTX4jTAYwxxvgnKxDGGGM8sgJhjDHGIysQxhhjPLICYYwxxqMwpwM0pXbt2mlqaqrTMYwxJmCsWbNmv6omeFrXrApEamoqGRkZTscwxpiAISK7Glpnl5iMMcZ4ZAXCGGOMR1YgjDHGeGQFwhhjjEdWIIwxxnhkBcIYY4xHViCMMcZ4FPQFoqKqhtnLdvLlzv1ORzHGmFP22dYi5q7IobLa1eTvHfQFIixEeH55DnOW5zgdxRhjTtlzS3cyb2Uu4aHS5O9tBSI0hGvTOvPZtiL2lh5zOo4xxjTazuLDpOeUMGVYCiJWILxiSloKLoU3M/KdjmKMMY32+uo8wkKEa4Z29sr7W4EAUtrGMKZHO15fnUeNy6ZgNcb4v+PVNby1Jp+L+3QgITbSK/uwAuE2dXgyew4dY/mOYqejGGPMSS3eXEjJkUqmDk/22j6sQLiN69uB+BYRvJae53QUY4w5qdfS80iKi+bcnh5H6m4SViDcIsNCuXpIEv/eUkhx+XGn4xhjTIN2HzjKiqz9XJeWTGhI0zdOf8MKRB1ThqVQ7VLeWmON1cYY//V6xm5CBK4b5p3G6W9YgaijR/uWDE+N5/XVu1G1xmpjjP+prnHxZkY+Y3u1J7F1tFf35bUCISJzRaRIRDLrLHtdRNa5H7kisq6B1+aKyEb3dj6dIm7q8GRyDxxlZfYBX+7WGGMaZcnWIorKjzN1mPcap7/hzTOIF4EJdReo6hRVHaSqg4C3gXdO8PoL3NumeTHjd1x6diKtosKssdoY45deW51H+9hILuzd3uv78lqBUNVlQImndVJ7y991wHxv7f90RYWHctXgJD7O3MfBI5VOxzHGmG/tLT3G59uKuDatM2Gh3m8hcKoN4lygUFV3NLBegUUiskZEZvowFwDTRqRQWePina/3+HrXxhjToDdW5+NSmDosxSf7c6pATOPEZw+jVXUIMBG4W0TOa2hDEZkpIhkiklFc3DQ3ufXu2IpByXG8lm6N1cYY/1DjUt7IyOPcnu1Ijo/xyT59XiBEJAz4HvB6Q9uoaoH7axHwLjD8BNvOVtU0VU1LSGi6G0amDU9mR9Fh1u4+2GTvaYwxp2v5jmL2HDrms7MHcOYM4mJgq6p6vNlARFqISOw3z4FLgExP23rT5QM60SIilPnWWG2M8QOvpefRtkUE4/p28Nk+vdnNdT6wEuglIvkicpt71VTqXV4SkU4istD9bQdghYisB9KBD1X1Y2/lbEiLyDAmDUrigw0FlB6r8vXujTHmW0XlFfx7SyFXD+1MRJjv/q4P89Ybq+q0BpbP8LCsALjU/TwbGOitXKdi2vBk5qfvZsG6Pdw0KtXpOMaYIPXWmnyqXcoUH9z7UJfdSX0CZye1pm9iK+an51ljtTHGES6X8vrqPIZ3jad7Qkuf7tsKxAmICNOGJ7N5bxnr80udjmOMCUIrsw+w68BRpnlxWO+GWIE4iSsHJ9EiIpSXV+5yOooxJgjNW5lLfIsIJvZP9Pm+rUCcRGxUOFcNSeL9DQV2Z7Uxxqf2lh5j8eZCrktLJio81Of7twLRCDeNTKWy2sUbGdbl1RjjO/9ctRsFbhjhu3sf6rIC0Qi9OsYyvGs8r6zahcvmrDbG+EBltYv56Xlc2Ku9z+6crs8KRCPdNLILeSXHWLrd5qw2xnjfx5v2sf/wcW4c1cWxDFYgGml8v44kxEby8lfWWG2M8b5XVu4iJT6G87045/TJWIFopIiwEKYNS+azbUXklRx1Oo4xphnbuq+M9NwSbhyZQogX55w+GSsQp2DaiBRCRHhllZ1FGGO85+WVu4gMC+Haob6/96EuKxCnILF1NOP6dOCN1XlUVNU4HccY0wyVV1Tx7td7uGJgJ9q0iHA0ixWIU3TzqC4cPFrFhxv2Oh3FGNMMvbN2D0cra7jZwcbpb1iBOEWjurele0ILa6w2xjQ5VeXlr3YxsHNrBnSOczqOFYhTJSLcNLIL6/IOsdHGZzLGNKGV2QfIKjrsN6NHW4E4Dd8b2pmYiFBe/irX6SjGmGbkla92ERcTzuUDfD/ukidWIE5Dq6hwrhycxHvrCjh01MZnMsacuX2lFXyyqZApDo275IkViNN008guHK928dYajzOnGmPMKZmfvhuXKjeMcL5x+htWIE5Tn8RWDEttw8tf2fhMxpgzU1XjYn76bsaelUBKW2fGXfLEm3NSzxWRIhHJrLPsYRHZIyLr3I9LG3jtBBHZJiJZIvJzb2U8UzeO7MKuA0dZnrXf6SjGmAC2aFMhReXHuckPurbW5c0ziBeBCR6W/1lVB7kfC+uvFJFQ4G/ARKAvME1E+nox52mb2D+Rdi0jmPdlrtNRjDEB7KWVuSTHR3P+We2djvJfvFYgVHUZUHIaLx0OZKlqtqpWAq8Bk5s0XBOJCAvh+hFdWLKtiJz9R5yOY4wJQJl7SknPKeGmkV0IdXDcJU+caIP4oYhscF+CauNhfRJQd2aefPcyj0RkpohkiEhGcbHvh+K+cWQK4SEhvPhFjs/3bYwJfHO/yCEmIpQpw5yZFOhEfF0gngO6A4OAvcCTHrbxVEIbbAVW1dmqmqaqaQkJvh8Wt31sFFcM7MSba/IpPVbl8/0bYwJXUVkF768v4Lq0ZFpHhzsd5zt8WiBUtVBVa1TVBfyD2stJ9eUDdYcw7AwU+CLf6bp1TCpHK2t4LX2301GMMQHk5a92Ue1SZpyT6nQUj3xaIESk7u2BVwGZHjZbDfQUka4iEgFMBRb4It/p6tepNSO7xfPSl7lU17icjmOMCQAVVTW8umo3F/XuQGq7Fk7H8cib3VznAyuBXiKSLyK3Ab8XkY0isgG4APgf97adRGQhgKpWAz8EPgG2AG+o6iZv5Wwqt43pRkFpBR9v2ud0FGNMAPjX13soOVLJbWO6Oh2lQWHeemNVneZh8ZwGti0ALq3z/ULgO11g/dmFvdvTpW0Mc1bkcPmATk7HMcb4MVVl7hc59Elsxchu8U7HaZDdSd1EQkOEW85J5evdh1i7+6DTcYwxfmxF1n62Fx7mtjFdEfGvrq11WYFoQtemJRMbFcbcFdbl1RjTsDkrcmjXMpIrBvrHqK0NsQLRhFpEhjF1WDIfZe5jz6FjTscxxvihrKJyPt9WzE0juxAZ5h+jtjbECkQTm35OKqrKvJW5TkcxxvihF77IJSIshBtG+t+NcfVZgWhindvEMLF/IvNX7ebI8Wqn4xhj/MjBI5W8vTafqwYl0a5lpNNxTsoKhBfcOiaVsopq3l5rc0UYY/7jn+m7qahyccuYVKejNIoVCC8YktKGgclxvPBFrs0VYYwBaud8mLcylzE92tG7Yyun4zSKFQgvEBFuG9OVnP1H+GxbkdNxjDF+YOHGvRSWHffrG+PqswLhJRP7dySxdRRzrMurMUFPVZmzIoduCS04/yzfDyp6uqxAeEl4aAg3j0rly50H2FxQ5nQcY4yDMnYdZEN+KbeM7kqIn835cCJWILzo+uEpxESE8o/l2U5HMcY46O9Ls4mLCefqIQ1ObeOXrEB4UeuYcKYNT2HB+gLyDx51Oo4xxgE7Csv595ZCpo9KJSbCa8PfeYUVCC+7bUxXBHh+ubVFGBOMZi3NJio8hOl+OufDiViB8LJOcdFcOTiJ11bvpuRIpdNxjDE+VHDoGO+t28PUYSnEt4hwOs4pswLhA3ec342KKhcvfZnrdBRjjA/NWZGDArefGzhdW+uyAuEDPdrHMq5vB15amcvRSht+w5hgcOhoJfPTdzN5YCc6t4lxOs5psQLhI3ec351DR6t4LT3P6SjGGB+Yt3IXRytr+MH53Z2Octq8OeXoXBEpEpHMOsv+ICJbRWSDiLwrInENvDbXPTXpOhHJ8FZGXxrapQ3Du8bz/PJsqmzeamOatWOVNbz4ZS4X9W5Pr46xTsc5bd48g3gRmFBv2WKgv6oOALYDvzjB6y9Q1UGqmualfD535/ndKSitYMG6AqejGGO86I2MPEqOVHLH2MA9ewAvFghVXQaU1Fu2SFW/uQj/FdDZW/v3R2N7JdC7Yyyzlu60QfyMaaaqalzMXpZNWpc2DEv13/mmG8PJNohbgY8aWKfAIhFZIyIzfZjJq0SEO87vzo6iwyzZaoP4GdMcfbhhL3sOHeOOAG57+IYjBUJEfglUA682sMloVR0CTATuFpHzTvBeM0UkQ0QyiouLvZC2aV0+IJGkuGieW7rT6SjGmCamqsxaupOe7VtyYe/2Tsc5Yz4vECIyHbgcuEFVPV5nUdUC99ci4F1geEPvp6qzVTVNVdMSEvx/lMSw0BBmnteNNbsOsjq35OQvMMYEjM+3FbN1Xzl3nN89oAbla4hPC4SITAB+BkxSVY+DE4lICxGJ/eY5cAmQ6WnbQHVdWjLxLSKY9bmdRRjTnDy3dCedWkcxaVAnp6M0CW92c50PrAR6iUi+iNwGPAPEAovdXVhnubftJCIL3S/tAKwQkfVAOvChqn7srZxOiI4IZcY5qXy6tYht+8qdjmOMaQJrdh0kPaeE28/tRnho87jFzGtDC6rqNA+L5zSwbQFwqft5NjDQW7n8xc2jujBr6U7+vnQnf5oyyOk4xpgzNGvpTuJiwpk6PNnpKE2meZS5ABQXE8G04Sm8t76A3QdsKHBjAtnWfWUs3lzIzQE4pPeJWIFw0MzzuhEaIvztsyynoxhjzsDTn2bRMjKMW0enOh2lSVmBcFCHVlFcPzyFt9fmk1diZxHGBKLtheUszNzLjHNSiYsJvCG9T8QKhMPuOL87ISI8+7mdRRgTiP766Q5iwkO5bUxgDul9IlYgHNaxdRRThyfzZoadRRgTaHYUlvPhxr1MPyeVNgE4IdDJWIHwA3eO/eYswu6LMCaQ/HVJFjHhodx+bjeno3iFFQg/kNg6minDknlrTR57Dh1zOo4xphGyisr5YEMBN5+TGpDTiTaGFQg/cad7WOBnrUeTMQHh6SVZRIeH8v1mevYAViD8Rqe4aK5LS+aNjDwK7CzCGL+2s/gw768v4KZRXZrt2QNYgfArd13QA4DnrC3CGL/2zJIsIsNCmdmMzx7ACoRfSYqL5pqhyby+Oo+9pXYWYYw/yi4+zHvr9nDTqC60bRnpdByvsgLhZ+4a2x2Xqo30aoyfeuazLCLCQpp128M3rED4meT4GK5N68z81XnsK61wOo4xpo7c/Ud4b10BN43sQkJs8z57ACsQfumusT1wuWpnpjLG+I+nl2QRHirMPC/wpxNtDCsQfig5Poarh3Tmn+m7KSyzswhj/MGuA0f417o93DAiOM4ewAqE37r7gh7UuNR6NBnjJ55ekkVYiPCD85t/28M3rED4qZS2MVyX1pl/rtptYzQZ47AdheW8szafm0d1oX1slNNxfMYKhB+756KeiMBf/r3D6SjGBLU/LtpGi4gw7hrbw+koPuXVAiEic0WkSEQy6yyLF5HFIrLD/bVNA6+d7t5mh4hM92ZOf5XYOpoZ56Tyztf5Nne1MQ75evdBPtlUyMzzujXLEVtPxNtnEC8CE+ot+znwqar2BD51f/9fRCQeeAgYAQwHHmqokDR3d47tTsvIMP64aJvTUYwJOqrK7z7eSruWEdzaDOd7OJlGFQgR6S4ike7nY0XkHhGJO9nrVHUZUFJv8WTgJffzl4ArPbx0PLBYVUtU9SCwmO8WmqAQFxPBHed3Z/HmQtbsOuh0HGOCyvId+/kqu4QfXdiTFpHNZ67pxmrsGcTbQI2I9ADmAF2Bf57mPjuo6l4A99f2HrZJAvLqfJ/vXvYdIjJTRDJEJKO4uPg0I/m3W0an0q5lJL/7eCuq6nQcY4KCy6X8/pOtdG4TzbThKU7HcURjC4RLVauBq4C/qOr/AInei4V4WObxN6OqzlbVNFVNS0hI8GIk58REhPHji3qQnlPC0u3Nswga428WZu4lc08ZP7nkLCLCgrM/T2P/1VUiMg2YDnzgXhZ+mvssFJFEAPfXIg/b5APJdb7vDBSc5v6ahSnDUkiOj+b3H2/D5bKzCGO8qarGxZOLttOrQyyTBnq8eBEUGlsgbgFGAY+pao6IdAVeOc19LqC20OD++p6HbT4BLhGRNu7G6Uvcy4JWRFgIPxnXi817y/hg416n4xjTrL2ZkU/O/iPcP74XoSGeLmgEh0YVCFXdrKr3qOp89y/sWFV94mSvE5H5wEqgl4jki8htwBPAOBHZAYxzf4+IpInI8+79lQCPAqvdj0fcy4LapIGd6N0xlicXbaOqxuV0HGOapWOVNTz16XaGdmnDRX08NZEGj8b2YvpcRFq5u5+uB14QkT+d7HWqOk1VE1U1XFU7q+ocVT2gqhepak/31xL3thmqenud185V1R7uxwun+w9sTkJChAcm9GLXgaO8vjrv5C8wxpyyl1bmUlh2nJ9N6I1I8J49QOMvMbVW1TLge8ALqjoUuNh7sUxDLujVnmGpbXjq0x0cq6xxOo4xzUrp0Sqe/SyLC3olMLxrvNNxHNfYAhHmblC+jv80UhsHiAgPTOhNcflxXvgyx+k4xjQrf1+2k7KKau4f39vpKH6hsQXiEWobiXeq6moR6QbYAEEOGZYaz0W92/Pc5zs5dLTS6TjGNAtFZRXM/SKHyYM60bdTK6fj+IXGNlK/qaoDVPVO9/fZqnq1d6OZE7l/Qi+OHK/mqU+tThvTFP7wyTZqXMp9485yOorfaGwjdWcRedc98F6hiLwtIp29Hc40rHfHVkwZlsLLK3eRVXTY6TjGBLSN+aW8tTafW0Z3pUvbFk7H8RuNvcT0ArX3L3SidsiL993LjIN+cslZRIWH8vjCLU5HMSZgqSqPfrCZ+JgIfnhhcA3nfTKNLRAJqvqCqla7Hy8CzXNciwDSrmUkP7qwB0u2FtkQHMacpo8y95GeW8J9l5xFq6jTHSCieWpsgdgvIjeKSKj7cSNwwJvBTOPMGJ1KSnwMv/lgM9V285wxp6SiqobHF26hd8dYpqQln/wFQaaxBeJWaru47gP2AtdQO/yGcVhkWCgPXtqHHUWHmZ++2+k4xgSUuV/kkH/wGP93eV/CQoNzQL4TaWwvpt2qOklVE1S1vapeSe1Nc8YPjO/XgZHd4vnT4u2UHq1yOo4xAaGovIK/Lcni4j4dGN2jndNx/NKZlMz7miyFOSMiwv9d3pdDx6r46xLr9mpMYzz5yXYqa1z88rI+TkfxW2dSIIJ7kBI/069Ta6akJfPSl7lkF1u3V2NOJHNPKW+syWP6qFS6trNurQ05kwJhkxL4mZ9c0su6vRpzEt90a42LDudHF/V0Oo5fO2GBEJFyESnz8Cin9p4I40cSYiO5+4Ie/HtLEct3WLdXYzz5ZNM+VuWUcN8lvWgdbd1aT+SEBUJVY1W1lYdHrKoG3wzeAeCW0akkx0fzmw+2WLdXY+o5Xl3DYwu3cFaHlkwbZt1aT8b6dTUzUeGhPDixD9sKy3nN5oww5r+88EUueSXWrbWx7Ag1QxP6d2RE13j+uGgbJUdstFdjAPaWHuPpT3dwUe/2nNvTBoJoDJ8XCBHpJSLr6jzKROTeetuMFZHSOtv8ytc5A5mI8Mjk/hyuqOa31mBtDACPvL+Zapfy0BX9nI4SMHzejqCq24BBACISCuwB3vWw6XJVvdyX2ZqTXh1jue3crvx9aTbXpiXb7FgmqH22tYiPMvfx00vOIqVtjNNxAobTl5guonYSol0O52iWfnxRT5Liovnff22kstoarE1wOlZZw68WZNI9oQXfP6+b03ECitMFYiowv4F1o0RkvYh8JCINnhOKyEwRyRCRjOJi69pZV0xEGL+e1I/thYeZs8KmJzXB6ZnPdpBXcozfXHk2kWGhTscJKI4VCBGJACYBb3pYvRbooqoDgaeBfzX0Pqo6W1XTVDUtIcEanuq7uG8HxvXtwFOfbiev5KjTcYzxqayicmYvy+Z7g5MY1b2t03ECjpNnEBOBtapaWH+Fqpap6mH384VAuIjYaFqn6eFJ/RCEhxdsQtVugDfBQVX55buZRIeH8qCNt3RanCwQ02jg8pKIdBQRcT8fTm1Om3/iNCXFRfM/43ry6dYiFm3+Tj02pll6Z+0eVuWU8POJfWjXMtLpOAHJkQIhIjHAOOCdOsvuEJE73N9eA2SKyHrgr8BUtT99z8gto7vSq0Msv16wiSPHq52OY4xXHTpayeMLtzA4JY6pdsf0aXOkQKjqUVVtq6qldZbNUtVZ7ufPqGo/VR2oqiNV9UsncjYn4aEhPHZVfwpKK3jqUxsS3DRvv/t4G4eOVfHYlWcTEmIDT58up3sxGR9KS41n6rBk5qzIYeu+MqfjGOMVa3YdZH76bm45J5W+nVo5HSegWYEIMj+b0JvW0eH88t1MXC67ameal+oaF798dyOJraO4d9xZTscJeFYggkybFhH8YmLv2r+yVtsc1qZ5qT07LuehK/rSMtIGnD5TViCC0DVDO3NO97Y8/uEW8g/avRGmecgqOsyTi7czrm8Hxvfr6HScZsEKRBASEX539QAAfvb2Brs3wgS8Gpfy0zfXExMRymNX9cfdS96cISsQQSo5PoYHL+vDF1kHeHWVXWoyge0fy7NZl3eIX0/qR/vYKKfjNBtWIILY9cNTGNOjHY8v3GLDcJiAlVVUzp8Wb2dCv45MGmgzITclKxBBTET43TUDCBHhgbc2WK8mE3Cqa1z85M0NtIgI5dEr7dJSU7MCEeSS4qL55WV9WJl9gFdX2ajrJrDMXp7N+rxDPDK5PwmxNpxGU7MCYZg6LJlze7bj8YVb2X3ALjWZwLC9sJy/LN7BpWd35PIBiU7HaZasQJhvezWFhQj3v7XeLjUZv1dd4+Knb66nZVQYj0y2S0veYgXCANApLpr/vbwPq3JKePkru9Rk/Nvfl2WzIb+URyf3t5FavcgKhPnWdWnJnH9WAk98tJVdB444HccYj7btK+cv/97OZQMSucwuLXmVFQjzLRHhiavPJixUuP/NDdTYpSbjZ6rcl5ZaRYXzyKQGZyI2TcQKhPkvia2jeeiKfqTnlvDc51lOxzHmvzy5aDsb95Ty2FX9aWuXlrzOCoT5jquHJDFpYCf+/O8drM4tcTqOMQAs217MrKU7uX5EChP626UlX7ACYb5DRHjsqv4kxUXz4/lfc+hopdORTJArKq/gvjfW0atDLL+6vK/TcYKGFQjjUWxUOM9cP5jiw8dtQD/jKJdLue/19Rw+Xs3T1w8mKjzU6UhBw7ECISK5IrJRRNaJSIaH9SIifxWRLBHZICJDnMgZzAZ0juNnE3rzyaZCXrGur8Yhs5btZEXWfh6+oh9ndYh1Ok5QcfoM4gJVHaSqaR7WTQR6uh8zged8mswAcOvoroztlcCjH25hc4FNU2p8a82ugzy5qLZL65RhyU7HCTpOF4gTmQzM01pfAXEiYi1TPhYSIvzx2oHERYfzo/lrOVpZ7XQkEyRKj1Vxz/yvSWwdxW+/d7bdLe0AJwuEAotEZI2IzPSwPgnIq/N9vnvZfxGRmSKSISIZxcXFXooa3Nq1jOQvUwaRvf8IDy/Y5HQcEwRUlZ+/vYHCsgqenjaYVlHhTkcKSk4WiNGqOoTaS0l3i8h59dZ7+nPhOy2lqjpbVdNUNS0hIcEbOQ1wTo923D22B29k5PPeuj1OxzHN3D/Td/NR5j5+Or4Xg1PaOB0naDlWIFS1wP21CHgXGF5vk3yg7kXHzkCBb9IZT+69uCdpXdrwy3czbSgO4zXb9pXzyPubObdnO2ae283pOEHNkQIhIi1EJPab58AlQGa9zRYAN7t7M40ESlV1r4+jmjrCQkN4atpgQgTueMXaI0zTKz1WxZ2vrCE2Kpw/XTeIkBBrd3CSU2cQHYAVIrIeSAc+VNWPReQOEbnDvc1CIBvIAv4B3OVMVFNXUlw0T00bzNZ9ZTzwlt0fYZpOjUu597Wv2V1ylL9dP9gmAPIDYU7sVFWzgYEels+q81yBu32ZyzTOBb3a88D43vzu46307dSKu8b2cDqSaQaeXLSNz7YV8+iV/RnRra3TcQz+3c3V+LE7zu/GFQM78YdPtrFka6HTcUyAe399Ac9+vpNpw1O4cUSK03GMmxUIc1pEhN9fPYC+ia348fx17Cw+7HQkE6A2FZRy/1vrSevShl9P6mf3O/gRKxDmtEVHhDL75jQiwkL4/rwMyiqqnI5kAsyBw8eZOW8NbWIieO7GoUSE2a8kf2L/G+aMJMVF8+wNQ9h94Cj3vrbOJhkyjVZV4+KuV9ey//Bx/n7TUGuU9kNWIMwZG9GtLQ9N6seSrUU8uWib03FMgHj0g82syinhiavPZkDnOKfjGA8c6cVkmp8bR6SwuaCUZz/fSZ/EVlwxsJPTkYwfey19N/NW7uL753blqsGdnY5jGmBnEKZJiAi/ntSftC5tuP+t9azPO+R0JOOnVmUf4P/ey+Tcnu342YTeTscxJ2AFwjSZiLAQnrtxKO1aRnLLi6vJtp5Npp4te8u4fV4GKfExPD1tMGGh9ivIn9n/jmlSCbGRzLu1dlitm+emU1RW4XAi4y/ySo4yfW46LSLCmHfbCOJiIpyOZE7CCoRpct0SWvLCjGGUHKlk+gurrfurqf1ZmJtORVUNL906nKS4aKcjmUawAmG8YmByHLNuHMqOwnJmzsugoqrG6UjGIUeOV3PLi6vZc+gYc2YMo1dHmzY0UFiBMF5z3lkJPHndQL7KLuF/Xrd7JIJRVY2LO19dy8b8Qzxz/RCGpcY7HcmcAisQxqsmD0ri/y7vy0eZ+3hoQaaN/hpEXC7lgbc2sGx7Mb/93tmM69vB6RkfcboAAA9nSURBVEjmFNl9EMbrbhvTleLy48xaupP2sVHcc1FPpyMZH3ji4628+/Ue7h/fiynDbAC+QGQFwvjEzyb0orj8OH9avJ22LSO4YUQXpyMZL5q9bCezl2Uz45xU7hrb3ek45jRZgTA+ISI8cfXZHDpayS/fzUQQrrdhnZulfyzL5vGFW7l8QCK/uryvjc4awKwNwvhMeGgIf7thCBf2bs+D727kpS9znY5kmtjfPsvisYVbuGxAIn+eYlOGBjqfFwgRSRaRz0Rki4hsEpEfe9hmrIiUisg69+NXvs5pvCMqPJRZNw7lkr4deGjBJv6xLNvpSKYJqCp/XrydP3yyjasGJ/HUlEGE213SAc+JS0zVwE9Uda2IxAJrRGSxqm6ut91yVb3cgXzGyyLCas8k7n19HY8t3EJljYu7L7BpSwOVqvL7T7bx3Oc7uXZoZ564egChdubQLPi8QKjqXmCv+3m5iGwBkoD6BcI0Y+GhITw1ZRARoSH84ZNtVFa7uPfinna9OsCoKr/5cAtzVuRww4gUHp3c3y4rNSOONlKLSCowGFjlYfUoEVkPFAA/VdVNDbzHTGAmQEqKNXoGkrDQEP547UDCQoSnPt1BZY2LB8b3siIRIFwu5eH3NzFv5S5mnJPKQ1dYg3Rz41iBEJGWwNvAvapaVm/1WqCLqh4WkUuBfwEeO8+r6mxgNkBaWprdhRVgQkOE3109gPCwEJ77fCeV1S7+97I+9ovGz7lcyoPvbuS11XnMPK8bv5jY2/7PmiFHCoSIhFNbHF5V1Xfqr69bMFR1oYg8KyLtVHW/L3Ma3wgJER67sj8RoSHMWZFD2bEqHrvqbJuf2E9VVNXwkzfX8+GGvfzwgh785JKzrDg0Uz4vEFL7kzQH2KKqf2pgm45AoaqqiAyntrfVAR/GND4mIjx0RV9aRYfz1093sLvkKLNuHEqbFjYktD8pKq/g+/PWsCH/EL+Y2JsfnG83wTVnTpxBjAZuAjaKyDr3sgeBFABVnQVcA9wpItXAMWCq2iA+zZ6IcN+4s+jWrgUPvLWBq579gjkzhtE9oaXT0Qy1k/3c9uJqDh6tYtaNQxnfr6PTkYyXSXP6vZuWlqYZGRlOxzBNYM2uEmbOW0NVjYvnbhzK6B7tnI4U1D7dUsg987+mZVQYc6YPo39Sa6cjmSYiImtUNc3TOrvIa/zS0C7x/Ovu0XRoFcX0uenMT9/tdKSgpKo8vzyb2+dl0DWhBe/dPcaKQxCxAmH8VnJ8DG/fdQ6je7TjF+9s5DcfbLY5JXyoqsbFg+9m8psPtzC+b0fe+MEoOraOcjqW8SErEMavtYoKZ870NGack8rzK3KYOS+DQ0crnY7V7O0/fJwZL9Seud01tjvP3jCEmAgb2zPYWIEwfi8sNISHJ/Xj0cn9WLq9mIlPLWflTuvU5i2fbStiwl+WsTr3IH+4ZgAPTOhtd0cHKSsQJmDcNCqVd+8aTXR4KNc//xW/+3grldUup2M1GxVVNTy8YBO3vLCati0ief+HY7g2LdnpWMZBViBMQDm7c2s+uGcMU9KSee7znVwz60ty9h9xOlbA27avnCv/9gUvfpnLjHNSee+Ho+nVMdbpWMZhViBMwImJCOOJqwfw3A1D2HXgKJf9dTlvrM6z+a5Pg6ry0pe5XPHMCvYfPs4Ltwzj4Un9iAoPdTqa8QPW6mQC1sSzExmUEsd9r6/ngbc38Pn2Ih6/6mziYuzu68bYf/g4D7y1gSVbi7igVwK/v2YgCbGRTscyfsQKhAloia2jeeX2Ecxels2Ti7axKruE+8f34tq0ZJuToAHVNS5eXbWbJxdto6LaxcNX9GX6Oak2npL5DruT2jQbmwpKeei9TWTsOsjZSa15eFJfhnaJdzqWX/kyaz+/fn8z2wrLGd2jLQ9f0Y+eHaytIZid6E5qKxCmWVFVFqwv4LcLt7KvrIKrBifx84m96dAquG/wyj94lMcXbmHhxn10bhPN/17Wh/H9OtpZgzlhgbBLTKZZEREmD0ri4j4dePbzLP6xLIdPNu3jRxf25NYxqUSGBVfj67HKGmYt3cmspTsRgfvGncXM87pZI7RpFDuDMM3argNH+M2HW1i8uZDUtjHcdUEPJg/q1OwLRUVVDW+vzefZz3ay59AxrhjYiV9M7E2nuGinoxk/Y5eYTNBbtr2Y3360lS17y2gfG8mM0ancMKILraPDnY7WpEqOVPLyyl3MW5nLgSOVDOzcmgcv7cOIbm2djmb8lBUIY6htn1iRtZ/Zy7JZvmM/LSJCmTIshVvHpNK5TYzT8c5I7v4jzFmRw5tr8qiocnFR7/Z8/7xujOgab+0M5oSsQBhTz+aCMp5fns2C9QUocNnZicwYncrg5LiA+YXqcikZuw4yd0UOn2zeR3hICFcNTuL2c7tazyTTaFYgjGlAwaFjvPhlLv9ctZvDx6tJiotmfL+OTDy7I0NT2vjdIHXVNS5W5x7k48y9fLxpH4Vlx2kdHc6NI1OYPiqV9kHeW8ucOisQxpxEWUUVizYV8tHGvSzfsZ/KGhcJsZGM79eBif0TGdE1nrBQZ0amqax2sTL7AB9n7mXRpkIOHKkkMiyEsb0SmNg/kXF9O9Ai0jokmtPjdwVCRCYATwGhwPOq+kS99ZHAPGAocACYoqq5J3tfKxCmKZRXVLFkaxEfZ+7j823FHKuqoU1MOMNS4+mf1Jr+Sa3o36m1V/5aV1X2lVWQuaeMzD2lbCooIz3nAGUV1bSICOXCPh2Y2L8jY3sl2PwMpkn4VYEQkVBgOzAOyAdWA9NUdXOdbe4CBqjqHSIyFbhKVaec7L2tQJimdqyyhqXbi1i0qZB1eYfIrjNybEJsJP07taJ/UmvO6hBLfIsIWkeH0zo6nFZR4cRGhX3nElWNSzlcUU3psSpKj1VRVlHFgSOVbN1bRmZBGZv2lHLgSO2ESCLQPaElg5LjGN+vI+f2bGf3L5gm5283yg0HslQ1G0BEXgMmA5vrbDMZeNj9/C3gGRERbU7Xw0xAiI4IZUL/RCb0TwRqzy627C0nc08pmQWlbNpTxtLtxXiaCVUEYiPDaBUdjmrtZazDx6vx9FMcFiL07BDLhb3bf3uW0rtjK7t0ZBzlxE9fEpBX5/t8YERD26hqtYiUAm2B/fXfTERmAjMBUlJSvJHXmG/FRoUzvGs8w7v+Z4yniqoacvYf+c9ZQZ2vZe6zBQFaRYfT6tszjLDar9HhxMWEk9q2hZ0dGL/jRIHw1C2k/t9UjdmmdqHqbGA21F5iOrNoxpy6qPBQ+iS2cjqGMU3OiW4Z+UDdeQw7AwUNbSMiYUBroMQn6YwxxgDOFIjVQE8R6SoiEcBUYEG9bRYA093PrwGWWPuDMcb4ls8vMbnbFH4IfEJtN9e5qrpJRB4BMlR1ATAHeFlEsqg9c5jq65zGGBPsHOkioaoLgYX1lv2qzvMK4Fpf5zLGGPMfztwaaowxxu9ZgTDGGOORFQhjjDEeWYEwxhjjUbMazVVEioFdp/nydni4U9sPWK5TY7lOjeU6Nc0xVxdVTfC0olkViDMhIhkNDVjlJMt1aizXqbFcpybYctklJmOMMR5ZgTDGGOORFYj/mO10gAZYrlNjuU6N5To1QZXL2iCMMcZ4ZGcQxhhjPLICYYwxxqOgKxAiMkFEtolIloj83MP6SBF53b1+lYik+kmuGSJSLCLr3I/bfZBprogUiUhmA+tFRP7qzrxBRIZ4O1Mjc40VkdI6x+pXnrbzQq5kEflMRLaIyCYR+bGHbXx+zBqZy+fHTESiRCRdRNa7c/3awzY+/zw2MpfPP4919h0qIl+LyAce1jXt8VLVoHlQO7z4TqAbEAGsB/rW2+YuYJb7+VTgdT/JNQN4xsfH6zxgCJDZwPpLgY+onQFwJLDKT3KNBT5w4OcrERjifh4LbPfw/+jzY9bIXD4/Zu5j0NL9PBxYBYyst40Tn8fG5PL557HOvu8D/unp/6upj1ewnUEMB7JUNVtVK4HXgMn1tpkMvOR+/hZwkYh4mgLV17l8TlWXceKZ/CYD87TWV0CciCT6QS5HqOpeVV3rfl4ObKF2fvW6fH7MGpnL59zH4LD723D3o36vGZ9/HhuZyxEi0hm4DHi+gU2a9HgFW4FIAvLqfJ/Pdz8o326jqtVAKdDWD3IBXO2+LPGWiCR7WO9rjc3thFHuSwQfiUg/X+/cfWo/mNq/Puty9JidIBc4cMzcl0vWAUXAYlVt8Hj58PPYmFzgzOfxL8ADgKuB9U16vIKtQHiqpPX/MmjMNk2tMft8H0hV1QHAv/nPXwlOcuJYNcZaaseXGQg8DfzLlzsXkZbA28C9qlpWf7WHl/jkmJ0klyPHTFVrVHUQtXPTDxeR/vU2ceR4NSKXzz+PInI5UKSqa060mYdlp328gq1A5AN1K31noKChbUQkDGiN9y9nnDSXqh5Q1ePub/8BDPVypsZozPH0OVUt++YSgdbOXhguIu18sW8RCaf2l/CrqvqOh00cOWYny+XkMXPv8xDwOTCh3ionPo8nzeXQ53E0MElEcqm9DH2hiLxSb5smPV7BViBWAz1FpKuIRFDbiLOg3jYLgOnu59cAS9Td4uNkrnrXqSdRex3ZaQuAm909c0YCpaq61+lQItLxm+uuIjKc2p/zAz7Yr1A7n/oWVf1TA5v5/Jg1JpcTx0xEEkQkzv08GrgY2FpvM59/HhuTy4nPo6r+QlU7q2oqtb8jlqjqjfU2a9Lj5cic1E5R1WoR+SHwCbU9h+aq6iYReQTIUNUF1H6QXhaRLGor71Q/yXWPiEwCqt25Zng7l4jMp7Z3SzsRyQceorbBDlWdRe284pcCWcBR4BZvZ2pkrmuAO0WkGjgGTPVBkYfav/BuAja6r18DPAik1MnmxDFrTC4njlki8JKIhFJbkN5Q1Q+c/jw2MpfPP48N8ebxsqE2jDHGeBRsl5iMMcY0khUIY4wxHlmBMMYY45EVCGOMMR5ZgTDGGOORFQhjjDEeWYEwxhjjkRUIY7xARB4QkXvcz/8sIkvczy/yMDyCMX7JCoQx3rEMONf9PA1o6R4PaQyw3LFUxpwCKxDGeMcaYKiIxALHgZXUFopzsQJhAkRQjcVkjK+oapV71M1bgC+BDcAFQHf8Y6BFY07KziCM8Z5lwE/dX5cDdwDrfDRwoDFnzAqEMd6znNqRQVeqaiFQgV1eMgHERnM1xhjjkZ1BGGOM8cgKhDHGGI+sQBhjjPHICoQxxhiPrEAYY4zxyAqEMcYYj6xAGGOM8ej/AUETveONP1/CAAAAAElFTkSuQmCC\n", 309 | "text/plain": [ 310 | "
" 311 | ] 312 | }, 313 | "metadata": { 314 | "needs_background": "light" 315 | }, 316 | "output_type": "display_data" 317 | } 318 | ], 319 | "source": [ 320 | "import matplotlib.pyplot as plt\n", 321 | "plt.plot(w_list, mse_list)\n", 322 | "plt.ylabel('Loss')\n", 323 | "plt.xlabel('w')\n", 324 | "plt.show()" 325 | ] 326 | }, 327 | { 328 | "cell_type": "code", 329 | "execution_count": null, 330 | "metadata": {}, 331 | "outputs": [], 332 | "source": [] 333 | } 334 | ], 335 | "metadata": { 336 | "kernelspec": { 337 | "display_name": "Python 3", 338 | "language": "python", 339 | "name": "python3" 340 | }, 341 | "language_info": { 342 | "codemirror_mode": { 343 | "name": "ipython", 344 | "version": 3 345 | }, 346 | "file_extension": ".py", 347 | "mimetype": "text/x-python", 348 | "name": "python", 349 | "nbconvert_exporter": "python", 350 | "pygments_lexer": "ipython3", 351 | "version": "3.7.6" 352 | } 353 | }, 354 | "nbformat": 4, 355 | "nbformat_minor": 4 356 | } 357 | -------------------------------------------------------------------------------- /lecture_03.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Lecture: 03\n", 8 | "\n", 9 | "Gradient Descent\n", 10 | "\n", 11 | "* How the weights get update to ensure we reach minimum loss.\n", 12 | "* This method is called Gradient Descent." 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": 1, 18 | "metadata": {}, 19 | "outputs": [], 20 | "source": [ 21 | "import numpy as np\n", 22 | "import pandas as pd\n", 23 | "import matplotlib.pyplot as plt\n", 24 | "import torch" 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": 2, 30 | "metadata": {}, 31 | "outputs": [], 32 | "source": [ 33 | "w_list = []\n", 34 | "mse_list=[]" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": 3, 40 | "metadata": {}, 41 | "outputs": [], 42 | "source": [ 43 | "# Input data\n", 44 | "\n", 45 | "x_data = [1.0, 2.0, 3.0]\n", 46 | "y_data = [2.0, 4.0, 6.0]\n", 47 | "w = 1.0" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": 4, 53 | "metadata": {}, 54 | "outputs": [], 55 | "source": [ 56 | "# Function for forward pass to predict y\n", 57 | "def forward(x):\n", 58 | " return x*w" 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": 5, 64 | "metadata": {}, 65 | "outputs": [], 66 | "source": [ 67 | "# Function to calcuate the loss of the model\n", 68 | "# Loss is the square of difference of prediction and actual value\n", 69 | "\n", 70 | "def loss(x,y):\n", 71 | " y_pred = forward(x)\n", 72 | " return (y_pred-y)**2" 73 | ] 74 | }, 75 | { 76 | "cell_type": "markdown", 77 | "metadata": {}, 78 | "source": [ 79 | "#### Gradient Descent\n", 80 | "\n", 81 | "We update the `w` such that loss is minimum. The factor by which `w` is updated is called `alpha(learning rate)`.\n", 82 | "\n", 83 | "New `w` is `w` minus `alpha` times derivative of `loss` against `w`\n", 84 | "\n", 85 | "$w=w-a*\\frac{d(loss)}{dw}$\n", 86 | "\n", 87 | "This equation is dependent on how the loss function has been defined. \n", 88 | "In the current case below formula will dictate how to update the value of w for each pass. \n", 89 | "\n", 90 | "$w = w - a*2x(xw-y)$\n" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": 6, 96 | "metadata": {}, 97 | "outputs": [], 98 | "source": [ 99 | "# Function to calcualte the gradient for w to be updated and get min loss.\n", 100 | "# y_pred closer to y\n", 101 | "\n", 102 | "# Gradient = derivative of the loss for constant x and y\n", 103 | "\n", 104 | "# We are going to use a as 0.01 for starters\n", 105 | "\n", 106 | "def gradient(x,y):\n", 107 | " return 2*x*(x*w-y)" 108 | ] 109 | }, 110 | { 111 | "cell_type": "code", 112 | "execution_count": 7, 113 | "metadata": { 114 | "collapsed": true 115 | }, 116 | "outputs": [ 117 | { 118 | "name": "stdout", 119 | "output_type": "stream", 120 | "text": [ 121 | "Predict (before training) 4 4.0\n", 122 | "\tgrad: 1.0 2.0 -2.0\n", 123 | "\tgrad: 2.0 4.0 -7.84\n", 124 | "\tgrad: 3.0 6.0 -16.2288\n", 125 | "Progress: 0 w= 1.260688 loss= 4.919240100095999\n", 126 | "\tgrad: 1.0 2.0 -1.478624\n", 127 | "\tgrad: 2.0 4.0 -5.796206079999999\n", 128 | "\tgrad: 3.0 6.0 -11.998146585599997\n", 129 | "Progress: 1 w= 1.453417766656 loss= 2.688769240265834\n", 130 | "\tgrad: 1.0 2.0 -1.093164466688\n", 131 | "\tgrad: 2.0 4.0 -4.285204709416961\n", 132 | "\tgrad: 3.0 6.0 -8.87037374849311\n", 133 | "Progress: 2 w= 1.5959051959019805 loss= 1.4696334962911515\n", 134 | "\tgrad: 1.0 2.0 -0.8081896081960389\n", 135 | "\tgrad: 2.0 4.0 -3.1681032641284723\n", 136 | "\tgrad: 3.0 6.0 -6.557973756745939\n", 137 | "Progress: 3 w= 1.701247862192685 loss= 0.8032755585999681\n", 138 | "\tgrad: 1.0 2.0 -0.59750427561463\n", 139 | "\tgrad: 2.0 4.0 -2.3422167604093502\n", 140 | "\tgrad: 3.0 6.0 -4.848388694047353\n", 141 | "Progress: 4 w= 1.7791289594933983 loss= 0.43905614881022015\n", 142 | "\tgrad: 1.0 2.0 -0.44174208101320334\n", 143 | "\tgrad: 2.0 4.0 -1.7316289575717576\n", 144 | "\tgrad: 3.0 6.0 -3.584471942173538\n", 145 | "Progress: 5 w= 1.836707389300983 loss= 0.2399802903801062\n", 146 | "\tgrad: 1.0 2.0 -0.3265852213980338\n", 147 | "\tgrad: 2.0 4.0 -1.2802140678802925\n", 148 | "\tgrad: 3.0 6.0 -2.650043120512205\n", 149 | "Progress: 6 w= 1.8792758133988885 loss= 0.1311689630744999\n", 150 | "\tgrad: 1.0 2.0 -0.241448373202223\n", 151 | "\tgrad: 2.0 4.0 -0.946477622952715\n", 152 | "\tgrad: 3.0 6.0 -1.9592086795121197\n", 153 | "Progress: 7 w= 1.910747160155559 loss= 0.07169462478267678\n", 154 | "\tgrad: 1.0 2.0 -0.17850567968888198\n", 155 | "\tgrad: 2.0 4.0 -0.6997422643804168\n", 156 | "\tgrad: 3.0 6.0 -1.4484664872674653\n", 157 | "Progress: 8 w= 1.9340143044689266 loss= 0.03918700813247573\n", 158 | "\tgrad: 1.0 2.0 -0.13197139106214673\n", 159 | "\tgrad: 2.0 4.0 -0.5173278529636143\n", 160 | "\tgrad: 3.0 6.0 -1.0708686556346834\n", 161 | "Progress: 9 w= 1.9512159834655312 loss= 0.021418922423117836\n", 162 | "\tgrad: 1.0 2.0 -0.09756803306893769\n", 163 | "\tgrad: 2.0 4.0 -0.38246668963023644\n", 164 | "\tgrad: 3.0 6.0 -0.7917060475345892\n", 165 | "Progress: 10 w= 1.9639333911678687 loss= 0.01170720245384975\n", 166 | "\tgrad: 1.0 2.0 -0.07213321766426262\n", 167 | "\tgrad: 2.0 4.0 -0.2827622132439096\n", 168 | "\tgrad: 3.0 6.0 -0.5853177814148953\n", 169 | "Progress: 11 w= 1.9733355232910992 loss= 0.006398948863435593\n", 170 | "\tgrad: 1.0 2.0 -0.05332895341780164\n", 171 | "\tgrad: 2.0 4.0 -0.2090494973977819\n", 172 | "\tgrad: 3.0 6.0 -0.4327324596134101\n", 173 | "Progress: 12 w= 1.9802866323953892 loss= 0.003497551760830656\n", 174 | "\tgrad: 1.0 2.0 -0.039426735209221686\n", 175 | "\tgrad: 2.0 4.0 -0.15455280202014876\n", 176 | "\tgrad: 3.0 6.0 -0.3199243001817109\n", 177 | "Progress: 13 w= 1.9854256707695 loss= 0.001911699652671057\n", 178 | "\tgrad: 1.0 2.0 -0.02914865846100012\n", 179 | "\tgrad: 2.0 4.0 -0.11426274116712065\n", 180 | "\tgrad: 3.0 6.0 -0.2365238742159388\n", 181 | "Progress: 14 w= 1.9892250235079405 loss= 0.0010449010656399273\n", 182 | "\tgrad: 1.0 2.0 -0.021549952984118992\n", 183 | "\tgrad: 2.0 4.0 -0.08447581569774698\n", 184 | "\tgrad: 3.0 6.0 -0.17486493849433593\n", 185 | "Progress: 15 w= 1.9920339305797026 loss= 0.0005711243580809696\n", 186 | "\tgrad: 1.0 2.0 -0.015932138840594856\n", 187 | "\tgrad: 2.0 4.0 -0.062453984255132156\n", 188 | "\tgrad: 3.0 6.0 -0.12927974740812687\n", 189 | "Progress: 16 w= 1.994110589284741 loss= 0.0003121664271570621\n", 190 | "\tgrad: 1.0 2.0 -0.011778821430517894\n", 191 | "\tgrad: 2.0 4.0 -0.046172980007630926\n", 192 | "\tgrad: 3.0 6.0 -0.09557806861579543\n", 193 | "Progress: 17 w= 1.9956458879852805 loss= 0.0001706246229305199\n", 194 | "\tgrad: 1.0 2.0 -0.008708224029438938\n", 195 | "\tgrad: 2.0 4.0 -0.03413623819540135\n", 196 | "\tgrad: 3.0 6.0 -0.07066201306448505\n", 197 | "Progress: 18 w= 1.9967809527381737 loss= 9.326038746484765e-05\n", 198 | "\tgrad: 1.0 2.0 -0.006438094523652627\n", 199 | "\tgrad: 2.0 4.0 -0.02523733053271826\n", 200 | "\tgrad: 3.0 6.0 -0.052241274202728505\n", 201 | "Progress: 19 w= 1.9976201197307648 loss= 5.097447086306101e-05\n", 202 | "\tgrad: 1.0 2.0 -0.004759760538470381\n", 203 | "\tgrad: 2.0 4.0 -0.01865826131080439\n", 204 | "\tgrad: 3.0 6.0 -0.03862260091336722\n", 205 | "Progress: 20 w= 1.998240525958391 loss= 2.7861740127856012e-05\n", 206 | "\tgrad: 1.0 2.0 -0.0035189480832178432\n", 207 | "\tgrad: 2.0 4.0 -0.01379427648621423\n", 208 | "\tgrad: 3.0 6.0 -0.028554152326460525\n", 209 | "Progress: 21 w= 1.99869919972735 loss= 1.5228732143933469e-05\n", 210 | "\tgrad: 1.0 2.0 -0.002601600545300009\n", 211 | "\tgrad: 2.0 4.0 -0.01019827413757568\n", 212 | "\tgrad: 3.0 6.0 -0.021110427464781978\n", 213 | "Progress: 22 w= 1.9990383027488265 loss= 8.323754426231206e-06\n", 214 | "\tgrad: 1.0 2.0 -0.001923394502346909\n", 215 | "\tgrad: 2.0 4.0 -0.007539706449199102\n", 216 | "\tgrad: 3.0 6.0 -0.01560719234984198\n", 217 | "Progress: 23 w= 1.9992890056818404 loss= 4.549616284094891e-06\n", 218 | "\tgrad: 1.0 2.0 -0.0014219886363191492\n", 219 | "\tgrad: 2.0 4.0 -0.005574195454370212\n", 220 | "\tgrad: 3.0 6.0 -0.011538584590544687\n", 221 | "Progress: 24 w= 1.999474353368653 loss= 2.486739429417538e-06\n", 222 | "\tgrad: 1.0 2.0 -0.0010512932626940419\n", 223 | "\tgrad: 2.0 4.0 -0.004121069589761106\n", 224 | "\tgrad: 3.0 6.0 -0.008530614050808794\n", 225 | "Progress: 25 w= 1.9996113831376856 loss= 1.3592075910762856e-06\n", 226 | "\tgrad: 1.0 2.0 -0.0007772337246287897\n", 227 | "\tgrad: 2.0 4.0 -0.0030467562005451754\n", 228 | "\tgrad: 3.0 6.0 -0.006306785335127074\n", 229 | "Progress: 26 w= 1.9997126908902887 loss= 7.429187207079447e-07\n", 230 | "\tgrad: 1.0 2.0 -0.0005746182194226179\n", 231 | "\tgrad: 2.0 4.0 -0.002252503420136165\n", 232 | "\tgrad: 3.0 6.0 -0.00466268207967957\n", 233 | "Progress: 27 w= 1.9997875889274812 loss= 4.060661735575354e-07\n", 234 | "\tgrad: 1.0 2.0 -0.0004248221450375844\n", 235 | "\tgrad: 2.0 4.0 -0.0016653028085471533\n", 236 | "\tgrad: 3.0 6.0 -0.0034471768136938863\n", 237 | "Progress: 28 w= 1.9998429619451539 loss= 2.2194855602869353e-07\n", 238 | "\tgrad: 1.0 2.0 -0.00031407610969225175\n", 239 | "\tgrad: 2.0 4.0 -0.0012311783499932005\n", 240 | "\tgrad: 3.0 6.0 -0.0025485391844828342\n", 241 | "Progress: 29 w= 1.9998838998815958 loss= 1.213131374411496e-07\n", 242 | "\tgrad: 1.0 2.0 -0.00023220023680847746\n", 243 | "\tgrad: 2.0 4.0 -0.0009102249282886277\n", 244 | "\tgrad: 3.0 6.0 -0.0018841656015560204\n", 245 | "Progress: 30 w= 1.9999141657892625 loss= 6.630760559646474e-08\n", 246 | "\tgrad: 1.0 2.0 -0.00017166842147497974\n", 247 | "\tgrad: 2.0 4.0 -0.0006729402121816719\n", 248 | "\tgrad: 3.0 6.0 -0.0013929862392156878\n", 249 | "Progress: 31 w= 1.9999365417379913 loss= 3.624255915449335e-08\n", 250 | "\tgrad: 1.0 2.0 -0.0001269165240174175\n", 251 | "\tgrad: 2.0 4.0 -0.0004975127741477792\n", 252 | "\tgrad: 3.0 6.0 -0.0010298514424817995\n", 253 | "Progress: 32 w= 1.9999530845453979 loss= 1.9809538924707548e-08\n", 254 | "\tgrad: 1.0 2.0 -9.383090920422887e-05\n", 255 | "\tgrad: 2.0 4.0 -0.00036781716408107457\n", 256 | "\tgrad: 3.0 6.0 -0.0007613815296476645\n", 257 | "Progress: 33 w= 1.9999653148414271 loss= 1.0827542027017377e-08\n", 258 | "\tgrad: 1.0 2.0 -6.937031714571162e-05\n", 259 | "\tgrad: 2.0 4.0 -0.0002719316432120422\n", 260 | "\tgrad: 3.0 6.0 -0.0005628985014531906\n", 261 | "Progress: 34 w= 1.999974356846045 loss= 5.9181421028034105e-09\n", 262 | "\tgrad: 1.0 2.0 -5.1286307909848006e-05\n", 263 | "\tgrad: 2.0 4.0 -0.00020104232700646207\n", 264 | "\tgrad: 3.0 6.0 -0.0004161576169003922\n", 265 | "Progress: 35 w= 1.9999810417085633 loss= 3.2347513278475087e-09\n", 266 | "\tgrad: 1.0 2.0 -3.7916582873442906e-05\n", 267 | "\tgrad: 2.0 4.0 -0.0001486330048638962\n", 268 | "\tgrad: 3.0 6.0 -0.0003076703200690645\n", 269 | "Progress: 36 w= 1.9999859839076413 loss= 1.7680576050779005e-09\n", 270 | "\tgrad: 1.0 2.0 -2.8032184717474706e-05\n", 271 | "\tgrad: 2.0 4.0 -0.0001098861640933535\n", 272 | "\tgrad: 3.0 6.0 -0.00022746435967313516\n", 273 | "Progress: 37 w= 1.9999896377347262 loss= 9.6638887447731e-10\n", 274 | "\tgrad: 1.0 2.0 -2.0724530547688857e-05\n", 275 | "\tgrad: 2.0 4.0 -8.124015974608767e-05\n", 276 | "\tgrad: 3.0 6.0 -0.00016816713067413502\n", 277 | "Progress: 38 w= 1.999992339052936 loss= 5.282109892545845e-10\n", 278 | "\tgrad: 1.0 2.0 -1.5321894128117464e-05\n", 279 | "\tgrad: 2.0 4.0 -6.006182498197177e-05\n", 280 | "\tgrad: 3.0 6.0 -0.00012432797771566584\n", 281 | "Progress: 39 w= 1.9999943361699042 loss= 2.887107421958329e-10\n", 282 | "\tgrad: 1.0 2.0 -1.1327660191629008e-05\n", 283 | "\tgrad: 2.0 4.0 -4.4404427951505454e-05\n", 284 | "\tgrad: 3.0 6.0 -9.191716585732479e-05\n", 285 | "Progress: 40 w= 1.9999958126624442 loss= 1.5780416225633037e-10\n", 286 | "\tgrad: 1.0 2.0 -8.37467511161094e-06\n", 287 | "\tgrad: 2.0 4.0 -3.282872643772805e-05\n", 288 | "\tgrad: 3.0 6.0 -6.795546372551087e-05\n", 289 | "Progress: 41 w= 1.999996904251097 loss= 8.625295142578772e-11\n", 290 | "\tgrad: 1.0 2.0 -6.191497806007362e-06\n", 291 | "\tgrad: 2.0 4.0 -2.4270671399762023e-05\n", 292 | "\tgrad: 3.0 6.0 -5.0240289795056015e-05\n", 293 | "Progress: 42 w= 1.999997711275687 loss= 4.71443308235547e-11\n", 294 | "\tgrad: 1.0 2.0 -4.5774486259198e-06\n", 295 | "\tgrad: 2.0 4.0 -1.794359861406747e-05\n", 296 | "\tgrad: 3.0 6.0 -3.714324913239864e-05\n", 297 | "Progress: 43 w= 1.9999983079186507 loss= 2.5768253628059826e-11\n", 298 | "\tgrad: 1.0 2.0 -3.3841626985164908e-06\n", 299 | "\tgrad: 2.0 4.0 -1.326591777761621e-05\n", 300 | "\tgrad: 3.0 6.0 -2.7460449796734565e-05\n", 301 | "Progress: 44 w= 1.9999987490239537 loss= 1.4084469615916932e-11\n", 302 | "\tgrad: 1.0 2.0 -2.5019520926150562e-06\n", 303 | "\tgrad: 2.0 4.0 -9.807652203264183e-06\n", 304 | "\tgrad: 3.0 6.0 -2.0301840059744336e-05\n", 305 | "Progress: 45 w= 1.9999990751383971 loss= 7.698320862431846e-12\n", 306 | "\tgrad: 1.0 2.0 -1.8497232057157476e-06\n", 307 | "\tgrad: 2.0 4.0 -7.250914967116273e-06\n", 308 | "\tgrad: 3.0 6.0 -1.5009393983689279e-05\n", 309 | "Progress: 46 w= 1.9999993162387186 loss= 4.20776540913866e-12\n", 310 | "\tgrad: 1.0 2.0 -1.3675225627451937e-06\n", 311 | "\tgrad: 2.0 4.0 -5.3606884460322135e-06\n", 312 | "\tgrad: 3.0 6.0 -1.109662508014253e-05\n", 313 | "Progress: 47 w= 1.9999994944870796 loss= 2.299889814334344e-12\n", 314 | "\tgrad: 1.0 2.0 -1.0110258408246864e-06\n", 315 | "\tgrad: 2.0 4.0 -3.963221296032771e-06\n", 316 | "\tgrad: 3.0 6.0 -8.20386808086937e-06\n", 317 | "Progress: 48 w= 1.9999996262682318 loss= 1.2570789110540446e-12\n", 318 | "\tgrad: 1.0 2.0 -7.474635363990956e-07\n", 319 | "\tgrad: 2.0 4.0 -2.930057062755509e-06\n", 320 | "\tgrad: 3.0 6.0 -6.065218119744031e-06\n", 321 | "Progress: 49 w= 1.999999723695619 loss= 6.870969979249939e-13\n", 322 | "\tgrad: 1.0 2.0 -5.526087618612507e-07\n", 323 | "\tgrad: 2.0 4.0 -2.166226346744793e-06\n", 324 | "\tgrad: 3.0 6.0 -4.484088535150477e-06\n", 325 | "Progress: 50 w= 1.9999997957248556 loss= 3.7555501141274804e-13\n", 326 | "\tgrad: 1.0 2.0 -4.08550288710785e-07\n", 327 | "\tgrad: 2.0 4.0 -1.6015171322436572e-06\n", 328 | "\tgrad: 3.0 6.0 -3.3151404608133817e-06\n", 329 | "Progress: 51 w= 1.9999998489769344 loss= 2.052716967104274e-13\n", 330 | "\tgrad: 1.0 2.0 -3.020461312175371e-07\n" 331 | ] 332 | }, 333 | { 334 | "name": "stdout", 335 | "output_type": "stream", 336 | "text": [ 337 | "\tgrad: 2.0 4.0 -1.1840208351543424e-06\n", 338 | "\tgrad: 3.0 6.0 -2.4509231284497446e-06\n", 339 | "Progress: 52 w= 1.9999998883468353 loss= 1.1219786256679713e-13\n", 340 | "\tgrad: 1.0 2.0 -2.2330632942768602e-07\n", 341 | "\tgrad: 2.0 4.0 -8.753608113920563e-07\n", 342 | "\tgrad: 3.0 6.0 -1.811996877876254e-06\n", 343 | "Progress: 53 w= 1.9999999174534755 loss= 6.132535848018759e-14\n", 344 | "\tgrad: 1.0 2.0 -1.6509304900935717e-07\n", 345 | "\tgrad: 2.0 4.0 -6.471647520100987e-07\n", 346 | "\tgrad: 3.0 6.0 -1.3396310407642886e-06\n", 347 | "Progress: 54 w= 1.999999938972364 loss= 3.351935118167793e-14\n", 348 | "\tgrad: 1.0 2.0 -1.220552721115098e-07\n", 349 | "\tgrad: 2.0 4.0 -4.784566662863199e-07\n", 350 | "\tgrad: 3.0 6.0 -9.904052991061008e-07\n", 351 | "Progress: 55 w= 1.9999999548815364 loss= 1.8321081844499955e-14\n", 352 | "\tgrad: 1.0 2.0 -9.023692726373156e-08\n", 353 | "\tgrad: 2.0 4.0 -3.5372875473171916e-07\n", 354 | "\tgrad: 3.0 6.0 -7.322185204827747e-07\n", 355 | "Progress: 56 w= 1.9999999666433785 loss= 1.0013977760018664e-14\n", 356 | "\tgrad: 1.0 2.0 -6.671324292994996e-08\n", 357 | "\tgrad: 2.0 4.0 -2.615159129248923e-07\n", 358 | "\tgrad: 3.0 6.0 -5.413379398078177e-07\n", 359 | "Progress: 57 w= 1.9999999753390494 loss= 5.473462367088053e-15\n", 360 | "\tgrad: 1.0 2.0 -4.932190122985958e-08\n", 361 | "\tgrad: 2.0 4.0 -1.9334185274999527e-07\n", 362 | "\tgrad: 3.0 6.0 -4.002176350326181e-07\n", 363 | "Progress: 58 w= 1.9999999817678633 loss= 2.991697274308627e-15\n", 364 | "\tgrad: 1.0 2.0 -3.6464273378555845e-08\n", 365 | "\tgrad: 2.0 4.0 -1.429399514307761e-07\n", 366 | "\tgrad: 3.0 6.0 -2.9588569994132286e-07\n", 367 | "Progress: 59 w= 1.9999999865207625 loss= 1.6352086111474931e-15\n", 368 | "\tgrad: 1.0 2.0 -2.6958475007887728e-08\n", 369 | "\tgrad: 2.0 4.0 -1.0567722164012139e-07\n", 370 | "\tgrad: 3.0 6.0 -2.1875184863517916e-07\n", 371 | "Progress: 60 w= 1.999999990034638 loss= 8.937759877335403e-16\n", 372 | "\tgrad: 1.0 2.0 -1.993072418216002e-08\n", 373 | "\tgrad: 2.0 4.0 -7.812843882959442e-08\n", 374 | "\tgrad: 3.0 6.0 -1.617258700292723e-07\n", 375 | "Progress: 61 w= 1.9999999926324883 loss= 4.885220495987371e-16\n", 376 | "\tgrad: 1.0 2.0 -1.473502342363986e-08\n", 377 | "\tgrad: 2.0 4.0 -5.7761292637792394e-08\n", 378 | "\tgrad: 3.0 6.0 -1.195658771990793e-07\n", 379 | "Progress: 62 w= 1.99999999455311 loss= 2.670175009618106e-16\n", 380 | "\tgrad: 1.0 2.0 -1.0893780100218464e-08\n", 381 | "\tgrad: 2.0 4.0 -4.270361841918202e-08\n", 382 | "\tgrad: 3.0 6.0 -8.839649012770678e-08\n", 383 | "Progress: 63 w= 1.9999999959730488 loss= 1.4594702493172377e-16\n", 384 | "\tgrad: 1.0 2.0 -8.05390243385773e-09\n", 385 | "\tgrad: 2.0 4.0 -3.1571296688071016e-08\n", 386 | "\tgrad: 3.0 6.0 -6.53525820126788e-08\n", 387 | "Progress: 64 w= 1.9999999970228268 loss= 7.977204100704301e-17\n", 388 | "\tgrad: 1.0 2.0 -5.9543463493128e-09\n", 389 | "\tgrad: 2.0 4.0 -2.334103754719763e-08\n", 390 | "\tgrad: 3.0 6.0 -4.8315948575350376e-08\n", 391 | "Progress: 65 w= 1.9999999977989402 loss= 4.360197735196887e-17\n", 392 | "\tgrad: 1.0 2.0 -4.402119557767037e-09\n", 393 | "\tgrad: 2.0 4.0 -1.725630838222969e-08\n", 394 | "\tgrad: 3.0 6.0 -3.5720557178819945e-08\n", 395 | "Progress: 66 w= 1.9999999983727301 loss= 2.3832065197304227e-17\n", 396 | "\tgrad: 1.0 2.0 -3.254539748809293e-09\n", 397 | "\tgrad: 2.0 4.0 -1.2757796596929438e-08\n", 398 | "\tgrad: 3.0 6.0 -2.6408640607655798e-08\n", 399 | "Progress: 67 w= 1.9999999987969397 loss= 1.3026183953845832e-17\n", 400 | "\tgrad: 1.0 2.0 -2.406120636067044e-09\n", 401 | "\tgrad: 2.0 4.0 -9.431992964437086e-09\n", 402 | "\tgrad: 3.0 6.0 -1.9524227568012975e-08\n", 403 | "Progress: 68 w= 1.999999999110563 loss= 7.11988308874388e-18\n", 404 | "\tgrad: 1.0 2.0 -1.7788739370416806e-09\n", 405 | "\tgrad: 2.0 4.0 -6.97318647269185e-09\n", 406 | "\tgrad: 3.0 6.0 -1.4434496264925656e-08\n", 407 | "Progress: 69 w= 1.9999999993424284 loss= 3.89160224698574e-18\n", 408 | "\tgrad: 1.0 2.0 -1.3151431055291596e-09\n", 409 | "\tgrad: 2.0 4.0 -5.155360582875801e-09\n", 410 | "\tgrad: 3.0 6.0 -1.067159693945996e-08\n", 411 | "Progress: 70 w= 1.9999999995138495 loss= 2.1270797208746147e-18\n", 412 | "\tgrad: 1.0 2.0 -9.72300906454393e-10\n", 413 | "\tgrad: 2.0 4.0 -3.811418736177075e-09\n", 414 | "\tgrad: 3.0 6.0 -7.88963561149103e-09\n", 415 | "Progress: 71 w= 1.9999999996405833 loss= 1.1626238773828175e-18\n", 416 | "\tgrad: 1.0 2.0 -7.18833437218791e-10\n", 417 | "\tgrad: 2.0 4.0 -2.8178277489132597e-09\n", 418 | "\tgrad: 3.0 6.0 -5.832902161273523e-09\n", 419 | "Progress: 72 w= 1.999999999734279 loss= 6.354692062078993e-19\n", 420 | "\tgrad: 1.0 2.0 -5.314420015167798e-10\n", 421 | "\tgrad: 2.0 4.0 -2.0832526814729135e-09\n", 422 | "\tgrad: 3.0 6.0 -4.31233715403323e-09\n", 423 | "Progress: 73 w= 1.9999999998035491 loss= 3.4733644793346653e-19\n", 424 | "\tgrad: 1.0 2.0 -3.92901711165905e-10\n", 425 | "\tgrad: 2.0 4.0 -1.5401742103904326e-09\n", 426 | "\tgrad: 3.0 6.0 -3.188159070077745e-09\n", 427 | "Progress: 74 w= 1.9999999998547615 loss= 1.8984796531526204e-19\n", 428 | "\tgrad: 1.0 2.0 -2.9047697580608656e-10\n", 429 | "\tgrad: 2.0 4.0 -1.1386696030513122e-09\n", 430 | "\tgrad: 3.0 6.0 -2.3570478902001923e-09\n", 431 | "Progress: 75 w= 1.9999999998926234 loss= 1.0376765851119951e-19\n", 432 | "\tgrad: 1.0 2.0 -2.1475310418850313e-10\n", 433 | "\tgrad: 2.0 4.0 -8.418314934033333e-10\n", 434 | "\tgrad: 3.0 6.0 -1.7425900722400911e-09\n", 435 | "Progress: 76 w= 1.9999999999206153 loss= 5.671751114309842e-20\n", 436 | "\tgrad: 1.0 2.0 -1.5876944203796484e-10\n", 437 | "\tgrad: 2.0 4.0 -6.223768167501476e-10\n", 438 | "\tgrad: 3.0 6.0 -1.2883241140571045e-09\n", 439 | "Progress: 77 w= 1.9999999999413098 loss= 3.100089617511693e-20\n", 440 | "\tgrad: 1.0 2.0 -1.17380327679939e-10\n", 441 | "\tgrad: 2.0 4.0 -4.601314884666863e-10\n", 442 | "\tgrad: 3.0 6.0 -9.524754318590567e-10\n", 443 | "Progress: 78 w= 1.9999999999566096 loss= 1.6944600977692705e-20\n", 444 | "\tgrad: 1.0 2.0 -8.678080476443029e-11\n", 445 | "\tgrad: 2.0 4.0 -3.4018121652934497e-10\n", 446 | "\tgrad: 3.0 6.0 -7.041780492045291e-10\n", 447 | "Progress: 79 w= 1.9999999999679208 loss= 9.2616919156479e-21\n", 448 | "\tgrad: 1.0 2.0 -6.415845632545825e-11\n", 449 | "\tgrad: 2.0 4.0 -2.5150193039280566e-10\n", 450 | "\tgrad: 3.0 6.0 -5.206075570640678e-10\n", 451 | "Progress: 80 w= 1.9999999999762834 loss= 5.062350511130293e-21\n", 452 | "\tgrad: 1.0 2.0 -4.743316850408519e-11\n", 453 | "\tgrad: 2.0 4.0 -1.8593837580738182e-10\n", 454 | "\tgrad: 3.0 6.0 -3.8489211817704927e-10\n", 455 | "Progress: 81 w= 1.999999999982466 loss= 2.7669155644059242e-21\n", 456 | "\tgrad: 1.0 2.0 -3.5067948545020045e-11\n", 457 | "\tgrad: 2.0 4.0 -1.3746692673066718e-10\n", 458 | "\tgrad: 3.0 6.0 -2.845563784603655e-10\n", 459 | "Progress: 82 w= 1.9999999999870368 loss= 1.5124150106147723e-21\n", 460 | "\tgrad: 1.0 2.0 -2.5926372160256506e-11\n", 461 | "\tgrad: 2.0 4.0 -1.0163070385260653e-10\n", 462 | "\tgrad: 3.0 6.0 -2.1037571684701106e-10\n", 463 | "Progress: 83 w= 1.999999999990416 loss= 8.26683933105326e-22\n", 464 | "\tgrad: 1.0 2.0 -1.9167778475548403e-11\n", 465 | "\tgrad: 2.0 4.0 -7.51381179497912e-11\n", 466 | "\tgrad: 3.0 6.0 -1.5553425214420713e-10\n", 467 | "Progress: 84 w= 1.9999999999929146 loss= 4.518126871054872e-22\n", 468 | "\tgrad: 1.0 2.0 -1.4170886686315498e-11\n", 469 | "\tgrad: 2.0 4.0 -5.555023108172463e-11\n", 470 | "\tgrad: 3.0 6.0 -1.1499068364173581e-10\n", 471 | "Progress: 85 w= 1.9999999999947617 loss= 2.469467919185614e-22\n", 472 | "\tgrad: 1.0 2.0 -1.0476508549572827e-11\n", 473 | "\tgrad: 2.0 4.0 -4.106759377009439e-11\n", 474 | "\tgrad: 3.0 6.0 -8.500933290633839e-11\n", 475 | "Progress: 86 w= 1.9999999999961273 loss= 1.349840097651456e-22\n", 476 | "\tgrad: 1.0 2.0 -7.745359908994942e-12\n", 477 | "\tgrad: 2.0 4.0 -3.036149109902908e-11\n", 478 | "\tgrad: 3.0 6.0 -6.285105769165966e-11\n", 479 | "Progress: 87 w= 1.999999999997137 loss= 7.376551550022107e-23\n", 480 | "\tgrad: 1.0 2.0 -5.726086271806707e-12\n", 481 | "\tgrad: 2.0 4.0 -2.2446045022661565e-11\n", 482 | "\tgrad: 3.0 6.0 -4.646416584819235e-11\n", 483 | "Progress: 88 w= 1.9999999999978835 loss= 4.031726170507742e-23\n", 484 | "\tgrad: 1.0 2.0 -4.233058348290797e-12\n", 485 | "\tgrad: 2.0 4.0 -1.659294923683774e-11\n", 486 | "\tgrad: 3.0 6.0 -3.4351188560322043e-11\n", 487 | "Progress: 89 w= 1.9999999999984353 loss= 2.2033851437431755e-23\n", 488 | "\tgrad: 1.0 2.0 -3.1294966618133913e-12\n", 489 | "\tgrad: 2.0 4.0 -1.226752033289813e-11\n", 490 | "\tgrad: 3.0 6.0 -2.539835008974478e-11\n", 491 | "Progress: 90 w= 1.9999999999988431 loss= 1.2047849775995315e-23\n", 492 | "\tgrad: 1.0 2.0 -2.3137047833188262e-12\n", 493 | "\tgrad: 2.0 4.0 -9.070078021977679e-12\n", 494 | "\tgrad: 3.0 6.0 -1.8779644506139448e-11\n", 495 | "Progress: 91 w= 1.9999999999991447 loss= 6.5840863393251405e-24\n", 496 | "\tgrad: 1.0 2.0 -1.7106316363424412e-12\n", 497 | "\tgrad: 2.0 4.0 -6.7057470687359455e-12\n", 498 | "\tgrad: 3.0 6.0 -1.3882228699912957e-11\n", 499 | "Progress: 92 w= 1.9999999999993676 loss= 3.5991747246272455e-24\n", 500 | "\tgrad: 1.0 2.0 -1.2647660696529783e-12\n", 501 | "\tgrad: 2.0 4.0 -4.957811938766099e-12\n", 502 | "\tgrad: 3.0 6.0 -1.0263789818054647e-11\n", 503 | "Progress: 93 w= 1.9999999999995324 loss= 1.969312363793734e-24\n", 504 | "\tgrad: 1.0 2.0 -9.352518759442319e-13\n", 505 | "\tgrad: 2.0 4.0 -3.666400516522117e-12\n", 506 | "\tgrad: 3.0 6.0 -7.58859641791787e-12\n", 507 | "Progress: 94 w= 1.9999999999996543 loss= 1.0761829795642296e-24\n", 508 | "\tgrad: 1.0 2.0 -6.914468997365475e-13\n", 509 | "\tgrad: 2.0 4.0 -2.7107205369247822e-12\n", 510 | "\tgrad: 3.0 6.0 -5.611511255665391e-12\n", 511 | "Progress: 95 w= 1.9999999999997444 loss= 5.875191475205477e-25\n", 512 | "\tgrad: 1.0 2.0 -5.111466805374221e-13\n", 513 | "\tgrad: 2.0 4.0 -2.0037305148434825e-12\n", 514 | "\tgrad: 3.0 6.0 -4.1460168631601846e-12\n", 515 | "Progress: 96 w= 1.999999999999811 loss= 3.2110109830478153e-25\n", 516 | "\tgrad: 1.0 2.0 -3.779199175824033e-13\n", 517 | "\tgrad: 2.0 4.0 -1.4814816040598089e-12\n", 518 | "\tgrad: 3.0 6.0 -3.064215547965432e-12\n", 519 | "Progress: 97 w= 1.9999999999998603 loss= 1.757455879087579e-25\n", 520 | "\tgrad: 1.0 2.0 -2.793321129956894e-13\n", 521 | "\tgrad: 2.0 4.0 -1.0942358130705543e-12\n", 522 | "\tgrad: 3.0 6.0 -2.2648549702353193e-12\n", 523 | "Progress: 98 w= 1.9999999999998967 loss= 9.608404711682446e-26\n", 524 | "\tgrad: 1.0 2.0 -2.0650148258027912e-13\n", 525 | "\tgrad: 2.0 4.0 -8.100187187665142e-13\n", 526 | "\tgrad: 3.0 6.0 -1.6786572132332367e-12\n", 527 | "Progress: 99 w= 1.9999999999999236 loss= 5.250973729513143e-26\n", 528 | "Predict (After training) 4 hours 7.9999999999996945\n" 529 | ] 530 | } 531 | ], 532 | "source": [ 533 | "# Training loop\n", 534 | "\n", 535 | "print('Predict (before training)', 4, forward(4))\n", 536 | "\n", 537 | "# Training loop\n", 538 | "\n", 539 | "for epoch in range(100):\n", 540 | " l_sum=0\n", 541 | " for x_val, y_val in zip(x_data, y_data):\n", 542 | " grad = gradient(x_val, y_val)\n", 543 | " w = w-0.01*grad\n", 544 | " print('\\tgrad: ', x_val, y_val, grad)\n", 545 | " l=loss(x_val, y_val)\n", 546 | " l_sum+=l\n", 547 | " \n", 548 | " print('Progress: ', epoch, 'w=', w, 'loss=', l)\n", 549 | " w_list.append(w)\n", 550 | " mse_list.append(l_sum/3)\n", 551 | " \n", 552 | " \n", 553 | "print('Predict (After training)', '4 hours', forward(4)) " 554 | ] 555 | }, 556 | { 557 | "cell_type": "code", 558 | "execution_count": 8, 559 | "metadata": { 560 | "scrolled": true 561 | }, 562 | "outputs": [ 563 | { 564 | "data": { 565 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEGCAYAAABo25JHAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nO3deXxU5d3+8c83eyAhgSRAgLCDiMgaNhGkLo+4FbRasSqrUkXrUtunfbrZxz7+ahd3RURlcalLXanVWhUFBAUCsoOALIKEXUJCyH7//pipTUOAQHJyZjLX+/WalzNz7sxcDpBrzrnPYs45REQkckX5HUBERPylIhARiXAqAhGRCKciEBGJcCoCEZEIF+N3gJOVnp7u2rdv73cMEZGwsnTp0n3OuYzqloVdEbRv356cnBy/Y4iIhBUz23asZdo0JCIS4VQEIiIRTkUgIhLhVAQiIhFORSAiEuFUBCIiEU5FICIS4SKmCPbmF/Pb2WsoLiv3O4qISEjxrAjMLMHMFpvZCjNbY2b/W82YeDN72cw2mdkiM2vvVZ7FWw4wc+FWfvrXlVRU6BoMIiL/4uUaQTFwrnOuF9AbGGFmg6qMmQh845zrDDwI/MGrMJf0zOSnF57G7BU7+cN76716GxGRsONZEbiAguDD2OCt6lfxkcCs4P1XgfPMzLzKNHl4J64d2JYn525m1sKtXr2NiEhY8XSOwMyizWw5sAd43zm3qMqQ1sB2AOdcGZAHpFXzOpPMLMfMcvbu3VubPNwzsgfnn96C3/5tDf9YveuUX0tEpKHwtAicc+XOud5AG2CAmfWoMqS6b/9HbcB3zk1zzmU757IzMqo9eV6NRUcZj17Th15tUrn9pc9Zuu1ArV5PRCTc1cteQ865g8DHwIgqi3YAWQBmFgOkAJ7/Zk6Mi+aZsdlkpiQwcVYOX+4tOPEPiYg0UF7uNZRhZqnB+4nA+UDVWdrZwNjg/SuBOc65etmlJy0pnlkTBhBtxtjpi9mTX1QfbysiEnK8XCPIBD4ys5XAEgJzBG+b2T1m9t3gmGeANDPbBPwY+LmHeY7SLq0x08f1Z39BCRNmLuFwcVl9vr2ISEiwevoCXmeys7NdXV+YZs763dwwK4ehXTJ4emw2sdERc5ydiEQIM1vqnMuubpl+4wHndmvBvZefydwNe/nF66sIt3IUEamNsLtUpVeuGdCW3INHeGTOJlqlJnLnBV39jiQiUi9UBJXceUFXduYV8fCHG8lMSWD0gLZ+RxIR8ZyKoBIz4/dXnMme/GJ++eZqWjRJ4DvdmvsdS0TEU5ojqCI2Ooop1/bl9MxkJr+wjJU7DvodSUTEUyqCaiTFxzB9XH/SkuKYMHMJX+0v9DuSiIhnVATH0Dw5gZnjB1BW4Rg7YzEHDpf4HUlExBMqguPo3DyJp8dk8/XBI0yctYQjJbqojYg0PCqCE8hu34xHRvdm+faD3PbS55TrojYi0sCoCGpgRI9M7r60O++v3c3ds1frgDMRaVC0+2gNjRvSgdy8Ip6ct5lWqYlMHt7Z70giInVCRXASfjaiGzvzivjjP74gMyWBy/u08TuSiEitqQhOQlSU8eererIvv5j/fnUlzZMTGNI53e9YIiK1ojmCkxQfE83U6/vRMT2JHz63lLU7D/kdSUSkVlQEpyAlMZYZ4/uTFB/D+JmL+frgEb8jiYicMhXBKWqVmsjMCf0pLC5n3PTF5BWW+h1JROSUqAhqoVvLJjw5ph9b9x/mxudyKCrVAWciEn5UBLV0Vqd0/nxVLxZvOcBdf11BhQ44E5Ewo72G6sDI3q3ZlVfE799dT2aTBH51aXe/I4mI1JiKoI5MGtaRnQeP8PQnW8hMTWTi2R38jiQiUiMqgjpiZvzmsjPYdaiI//v7WjJTErj4zEy/Y4mInJDmCOpQdJTx8Og+9G3blDteXs7iLQf8jiQickIqgjqWEBvN02OyadM0kRtmLWHj7ny/I4mIHJdnRWBmWWb2kZmtM7M1ZnZ7NWOGm1memS0P3n7jVZ761LRxHLPGDyAuJppxM5aw+1CR35FERI7JyzWCMuAu59zpwCDgFjOrbnea+c653sHbPR7mqVdZzRoxY1x/viksYdyMJeQX6YAzEQlNnhWBcy7XObcseD8fWAe09ur9QtGZbVKYcm1fNuzOZ/ILyygpq/A7kojIUepljsDM2gN9gEXVLB5sZivM7F0zO6M+8tSn4ac15/dXnMn8jfv4+esrdVEbEQk5nu8+amZJwGvAHc65qqfqXAa0c84VmNnFwJtAl2peYxIwCaBt27YeJ65738/OIvdgEQ9+sIFWKYn85MLT/I4kIvItT9cIzCyWQAm84Jx7vepy59wh51xB8P47QKyZHXWCf+fcNOdctnMuOyMjw8vInrntvM5cMyCLxz7axAuLtvkdR0TkW56tEZiZAc8A65xzDxxjTEtgt3POmdkAAsW036tMfjIzfjeyB7vyivj1m6tpnpzABd1b+B1LRMTTNYIhwPXAuZV2D73YzG4ys5uCY64EVpvZCuARYLRrwBvRY6KjeOwHfenROoUfvbiMz7/6xu9IIiJYuP3ezc7Odjk5OX7HqJV9BcVcMWUhBcVlvHbzWXRIb+x3JBFp4MxsqXMuu7plOrLYB+lJ8cyaMADnHONmLGZfQbHfkUQkgqkIfNIhvTHPjOvP7kNFTJy5hMKSMr8jiUiEUhH4qG/bpjx6TV9WfZ3HrX/5nLJyHXAmIvVPReCzC7q34H9H9mDO+j38+q3VOuBMROqdrkcQAq4f1I7cg0eY8vGXtEpJ5EfnHXVMnYiIZ1QEIeKnF57Grrwi7n9/Ay1TErgqO8vvSCISIVQEIcLMuO97PdmTX8z/vL6KFk0SGNY1PI+iFpHwojmCEBIXE8UT1/WlS4tkbn5+Kau/zvM7kohEABVBiElOiGXm+P6kJMYyfuYSth8o9DuSiDRwKoIQ1KJJArMmDKC4tJxxMxZzsLDE70gi0oCpCEJUlxbJPDUmm+0HjnDDrByKSsv9jiQiDZSKIIQN7JjGA1f3ImfbN9z58nLKK3SMgYjUPRVBiLu0Zyt+dcnpvLt6F797e60OOBOROqfdR8PADUM7kptXxDOfbKF1aiI3DuvodyQRaUBUBGHilxefzq68Iu59Zx0tUhL4bq9WfkcSkQZCRRAmoqKM+7/fi70FxfzklRVkJMUzuFOa37FEpAHQHEEYSYiN5qnrs2mb1ohJz+Xwxa58vyOJSAOgIggzKY1imTVhAImx0YybsZjcvCN+RxKRMKciCEOtUxOZMb4/+UVljJ+xhENFpX5HEpEwpiIIU2e0SuGJ6/qyaU8BNz23lJIyXdRGRE6NiiCMDe2SwR+v7MnCL/fz01dXUKEDzkTkFGivoTB3Rd825OYV8af3viAzJZGfX9TN70giEmZUBA3A5OGd2HnwCFPnfkmr1ATGDG7vdyQRCSMqggbAzLhnZA92Hyrm7tlraJ6cwIgeLf2OJSJhwrM5AjPLMrOPzGydma0xs9urGWNm9oiZbTKzlWbW16s8DV10lPHoNX3o1SaV21/6nKXbDvgdSUTChJeTxWXAXc6504FBwC1m1r3KmIuALsHbJOAJD/M0eIlx0TwzNpvMlAQmzsrhy70FfkcSkTDgWRE453Kdc8uC9/OBdUDrKsNGAs+6gM+AVDPL9CpTJEhLimfWhAFEmzF2+mL25Bf5HUlEQly97D5qZu2BPsCiKotaA9srPd7B0WWBmU0ysxwzy9m7d69XMRuMdmmNmT6uP/sLSpg4M4fDxWV+RxKREOZ5EZhZEvAacIdz7lDVxdX8yFE7wzvnpjnnsp1z2RkZGV7EbHB6ZaXy+LV9WJt7iMkvLKO0XAeciUj1PC0CM4slUAIvOOder2bIDiCr0uM2wE4vM0WSc7u14P9G9WDuhr388o1VuqiNiFTLy72GDHgGWOece+AYw2YDY4J7Dw0C8pxzuV5likTXDGjLbed25pWcHTz0wUa/44hICPLyOIIhwPXAKjNbHnzuF0BbAOfcVOAd4GJgE1AIjPcwT8S684Ku5OYV8fCHG2mVmsDV/dv6HUlEQohnReCc+4Tq5wAqj3HALV5lkAAz4/9dcSa784v5xRuraZ6cwHe6Nfc7loiECJ10LkLERkcx5dq+nJ6ZzOQXlrFyx0G/I4lIiFARRJCk+Bimj+tPWlIcE2Yu4av9hX5HEpEQoCKIMM2TE5g5fgBlFY6xMxZz4HCJ35FExGcqggjUuXkST4/JZufBI0yctYQjJeV+RxIRH6kIIlR2+2Y8PLo3y7cf5PaXPqdcF7URiVgqggg2okcmd1/anX+u3c1vZ6/RAWciEUrXI4hw44Z0IDeviCfnbaZVaiI3D+/kdyQRqWcqAuFnI7qRm1fEH/6xnsyUBEb1Oeq8fyLSgKkIhKgo409X9WRvfjE/fXUFGcnxDOmc7ncsEaknmiMQAOJjopl6fT86pifxw+eWsnZn1RPFikhDpSKQb6UkxjJzQn+S4mMYP3MxXx884nckEakHKgL5D5kpicyc0J/C4nLGTV9MXmGp35FExGMqAjlKt5ZNeHJMP7buP8yk53IoLtMBZyINmYpAqnVWp3T+fFUvFm05wI9fWUGFDjgTabC015Ac08jerdmVV8Tv311Pq5QEfnlJd78jiYgHVARyXJOGdSQ3r4in5m8hMS6GO8/vQuDicyLSUKgI5LjMjF9f2p38ojIe+XAjW/Yd5k9X9iQhNtrvaCJSR2o0R2BmncwsPnh/uJndZmap3kaTUBEdZfz5qp7894jTeHvlTq5+8lP2HCryO5aI1JGaTha/BpSbWWcCF6TvAPzFs1QScsyMycM7M/W6fmzYXcDIxxew+us8v2OJSB2oaRFUOOfKgMuBh5xzdwKZ3sWSUHXhGS159ebBGHDV1E/5x+pcvyOJSC3VtAhKzewaYCzwdvC5WG8iSag7o1UKb946hNNaJnPT88t4bM5GncJaJIzVtAjGA4OBe51zW8ysA/C8d7Ek1DVPTuClSYMY1bsVf/7nBu58eTlFpTrwTCQc1WivIefcWuA2ADNrCiQ75+7zMpiEvoTYaB68ujddWiTzp/e+YOv+QqaN6Ufz5AS/o4nISajpXkMfm1kTM2sGrABmmNkDJ/iZ6Wa2x8xWH2P5cDPLM7PlwdtvTj6++M3MuOU7nZl6XV++2JXPqMcWsGanJpFFwklNNw2lOOcOAVcAM5xz/YDzT/AzM4ERJxgz3znXO3i7p4ZZJASN6JHJX28aTIWDK5/4lPfW7PI7kojUUE2LIMbMMoHv8+/J4uNyzs0DDpxqMAk/PVqnMPvWIXRtmcxNzy9lysebNIksEgZqWgT3AO8BXzrnlphZR2BjHbz/YDNbYWbvmtkZxxpkZpPMLMfMcvbu3VsHbytead4kgZcnDeLSnq344z++4K5XVujspSIhzrz8xmZm7YG3nXM9qlnWhMDxCQVmdjHwsHOuy4leMzs72+Xk5NR5VqlbzjkenbOJB97fQL92TXny+n6kJ8X7HUskYpnZUudcdnXLajpZ3MbM3ghO/u42s9fMrE1tQjnnDjnnCoL33wFizUwXym0gzIzbzuvC4z/oy5qdeYx8bAHrcnX5S5FQVNNNQzOA2UAroDXwt+Bzp8zMWlrwNJZmNiCYZX9tXlNCzyU9M3nlh4Mpq6jgyicW8sHa3X5HEpEqaloEGc65Gc65suBtJpBxvB8wsxeBT4HTzGyHmU00s5vM7KbgkCuB1Wa2AngEGO00s9gg9WyTylu3nE2n5knc+FwOU+d+qUlkkRBS09NQ7zOz64AXg4+v4QTf3p1z15xg+WPAYzV8fwlzLVMSeHnSYH7y1xXc9+56Nu0p4N7LexAfo9NZi/itpmsEEwjsOroLyCXwbX68V6GkYUqMi+bRa/pw+3ldeHXpDq57ehH7C4r9jiUS8WpUBM65r5xz33XOZTjnmjvnRhE4uEzkpERFGXde0JVHr+nDyh15jHx8AV/syvc7lkhEq83F639cZykk4lzWqxWv/HAwJWUVXDFlAXPWaxJZxC+1KQJduFZqpVdWKm/dOoQOGY2ZOCuHp+dv1iSyiA9qUwT6Fyu1lpmSyCs/HMyIM1ryf39fx89eW0lJWYXfsUQiynH3GjKzfKr/hW9AoieJJOI0iovh8R/05cEPNvDonE1s3V/I1Ov60axxnN/RRCLCcdcInHPJzrkm1dySnXM13fVU5ISiooy7/us0Hh7dm+XbDzLq8QVs3K1JZJH6UJtNQyJ1bmTv1rw8aRCFJeVcMWUhH32xx+9IIg2eikBCTp+2TZl96xDaNGvExJlLeOaTLZpEFvGQikBCUqvURF69aTDnn96C3729ll+8sUqTyCIeURFIyGocH8PU6/pxy3c68eLi7YyZvohvDpf4HUukwVERSEiLijJ+emE3Hvh+L5ZtO8ioKQvYtEeTyCJ1SUUgYeGKvm14cdIgDheXcfnjC5m7QVeqE6krKgIJG/3aNeXNW4bQumki42csZuYCTSKL1AUVgYSVNk0b8drNZ3Futxb89m9r+dWbqykt1ySySG2oCCTsNI6PYdr1/bjpnE68sOgrxk5fzMFCTSKLnCoVgYSlqCjj5xd1489X9SJn6zeMenwBX+4t8DuWSFhSEUhYu7JfG/5y40Dyi8oY9fgC5m/UJLLIyVIRSNjLbt+MN28ZQquURMbNWMKzn271O5JIWFERSIOQ1awRr00+i+FdM/jNW2v49ZurKdMkskiNqAikwUiKj2HamGwmDevIc59tY9yMJeQVlvodSyTkqQikQYmOMn5x8en88Xs9WbRlP5dPWcCWfYf9jiUS0lQE0iB9v38Wz08cyDeFJYx6fAELN+3zO5JIyFIRSIM1sGMab91yNs2T47l++mJeWLTN70giIcmzIjCz6Wa2x8xWH2O5mdkjZrbJzFaaWV+vskjkapvWiNcnn8WwLun88o3V/Hb2Gk0ii1Th5RrBTGDEcZZfBHQJ3iYBT3iYRSJYckIsT4/tz8SzOzBz4VbGz1xC3hFNIov8i2dF4JybBxw4zpCRwLMu4DMg1cwyvcojkS06yvj1pd2574oz+fTL/VwxZQFbNYksAvg7R9Aa2F7p8Y7gc0cxs0lmlmNmOXv36shROXWjB7TluYkD2X+4hFFTFrDwS00ii/hZBFbNc9WeU9g5N805l+2cy87IyPA4ljR0gzul8dYtQ0hrHMeYZxbz4uKv/I4k4is/i2AHkFXpcRtgp09ZJMK0S2vMG7cM4azO6fzP66u4529rNYksEcvPIpgNjAnuPTQIyHPO5fqYRyJMk4RYpo/NZvyQ9kxfsIUbns3hUJEmkSXyeLn76IvAp8BpZrbDzCaa2U1mdlNwyDvAZmAT8BQw2assIscSEx3F3Zedwb2X9+CTjfu4YspCtu3XJLJEFgu3S/1lZ2e7nJwcv2NIA7Rw0z5ufmEZZnD3Zd0Z1bs1ZtVNZYmEHzNb6pzLrm6ZjiwWCTqrczpv3jKEdmmNufPlFVz79CJd7EYigopApJIO6Y15/eaz+N2oHqz6Oo+LHprPA+9voKi03O9oIp5REYhUER1lXD+oHR/edQ4XndmSRz7cyIUPzWPeBh3DIg2TikDkGJonJ/Dw6D48P3EgUWaMmb6YH734OXsOFfkdTaROqQhETuDsLum8e/tQ7ji/C++t3sV598/l2U+3Ul4RXjtaiByLikCkBhJio7nj/K78446h9MpK5TdvreHyKQtY/XWe39FEak1FIHISOmYk8dzEATw8ujc7Dxbx3cc+4bez15CvA9EkjKkIRE6SmTGyd2s+vOscrh3YjlmfbuX8B+byzqpcwu24HBFQEYicspTEWH43qgdvTB5CWuN4Jr+wjHEzlvDV/kK/o4mcFBWBSC31zkpl9q1D+PWl3cnZeoALHpzL4x9toqRMJ7GT8KAiEKkDMdFRTDy7Ax/cdQ7ndmvOn977gosfmc9nm/f7HU3khFQEInUoMyWRJ67rx/Rx2RSVljN62mfc9coK9hcU+x1N5JhUBCIeOLdbC96/8xwmD+/EW8u/5tz75/LS4q+o0LEHEoJUBCIeSYyL5r9HdOOd24dyWotkfv76Kq568lPW7zrkdzSR/6AiEPFY1xbJvPzDQfzpyp5s3lvApY98wu/fXUdhSZnf0UQAFYFIvTAzrsrOYs5dw/le3zY8OXczFzwwj/fX7vY7moiKQKQ+NW0cxx+u7MlfbxpM4/hobnw2hxufzeHrg0f8jiYRTEUg4oP+7Zvx9o+G8rMR3Zi/cS8XPDCXp+ZtprRcxx5I/VMRiPgkLiaKm4d34v07z2FwxzTufWcdlz36CUu3feN3NIkwKgIRn2U1a8TTY7OZel0/8o6U8r0nFvI/r6/iYGGJ39EkQqgIREKAmTGiR0ve//E53HB2B17J2c5598/l9WU7dCI78ZyKQCSEJMXH8KtLuzP71iFkNWvEj19ZwQ+eWsSmPQV+R5MGTEUgEoLOaJXC6zefxb2X92DNzjwuenge9//zC4pKy/2OJg2Qp0VgZiPM7Asz22RmP69m+Tgz22tmy4O3G7zMIxJOoqKMawe248O7hnNpz1Y8OmcT//XgPOZu2Ot3NGlgPCsCM4sGHgcuAroD15hZ92qGvuyc6x28Pe1VHpFwlZEcz4NX9+YvNwwkJsoYO30xt/5lGbsPFfkdTRoIL9cIBgCbnHObnXMlwEvASA/fT6RBO6tzOu/eMZQfX9CVf67dzfn3z2XWwq2U60R2UkteFkFrYHulxzuCz1X1PTNbaWavmllWdS9kZpPMLMfMcvbu1WqxRK74mGhuO68L/7xjGL3bpnL37DWcd//HPP/ZNs0fyCnzsgismueqfnX5G9DeOdcT+ACYVd0LOeemOeeynXPZGRkZdRxTJPy0T2/MsxMGMPW6fqQ0iuNXb67mrPvm8NAHGzhwWMcfyMmJ8fC1dwCVv+G3AXZWHuCcq3z5pqeAP3iYR6RB+dexBxee0YLFWw4wbd5mHvpgI1PnfslV/bK4YWgH2qU19jumhAEvi2AJ0MXMOgBfA6OBH1QeYGaZzrnc4MPvAus8zCPSIJkZAzumMbBjGht35/PU/M28vGQ7LyzaxogeLZk0rBO9s1L9jikhzLw8atHMLgYeAqKB6c65e83sHiDHOTfbzH5PoADKgAPAzc659cd7zezsbJeTk+NZZpGGYM+hImYs3Mrzn20jv6iMAR2aMWloR87t1pyoqOq22kpDZ2ZLnXPZ1S4Lt8PXVQQiNVdQXMbLS7Yz/ZMtfH3wCJ2bJ3Hj0A6M6tOa+Jhov+NJPVIRiES40vIK3lmVy5NzN7M29xAZyfGMO6s91w1sR0qjWL/jST1QEYgIAM45Fmzaz5PzvmT+xn00iovm6v5ZTDy7A22aNvI7nnhIRSAiR1m78xBPz9/M7BU7ccAlZ2YyaVhHerRO8TuaeEBFICLHtPPgEWYs2MKLi7dTUFzGkM5p3Di0I+d0zcBME8sNhYpARE7oUFEpf1n0FTMWbGH3oWK6tUzmxqEduaxXK+JidKLicKciEJEaKymr4K3lX/PU/M1s2F1AyyYJTDi7PaMHtKVJgiaWw5WKQEROmnOOjzfsZdrczXy6eT/J8TFcM7At44e0JzMl0e94cpJUBCJSKyt3HGTavM28syqXKDO+27sVk4Z1pFvLJn5HkxpSEYhIndh+oJBnPtnCy0u2c6S0nHO6ZnDdoHYM65quA9RCnIpAROrUwcISnv9sGzMXbmVfQQnJ8TFccEYLLuvZiiGd0zW5HIJUBCLiidLyCj7ZtI+/r8zlvTW7yC8qIyUxlgvPaMElPVtxVqc0YqNVCqFARSAinisuK+eTjYFS+Ofa3RQUl9G0USwjerTkkjNbMahjM2JUCr5REYhIvSoqLWfehr38fVUuH6zdzeGSctIaxzGiR0su7dmKAR2aEa2zoNYrFYGI+KaotJyPv9jD2ytz+XDdHo6UlpOeFM/FZwZKIbtdU50aux6oCEQkJBSWlPHR+r38fdVO5qzfQ1FpBS2axHPxmZlc2jOTPlkqBa+oCEQk5BwuLuPD9Xt4e8VOPt6wl5KyClqlJHDxmZlc0jOT3lmpOtdRHVIRiEhIyy8q5cN1e3h75U7mbdhHSXkFrVMTuaRnJsO6ZNCvXVMS43ScQm2oCEQkbOQdKeWDtbt5e+VO5m/cR1mFIzba6NUmlUEd0xjUMY2+7VJpFOflJdcbHhWBiISlguIylm77hs827+ezzftZuSOP8mAx9GyTyqCOzRjUMY1+7ZqqGE5ARSAiDcKxiiEmyuiVpWI4HhWBiDRIh4vLyAkWw6JgMZQFi6Fnm5RKm5KakhQf2cWgIhCRiHC4mjWGsorA77h2aY3ontmE07+9JdM6NTFi9kw6XhFEdkWKSIPSOD6GYV0zGNY1A/h3MazYfpB1uw6xduch3l2969vxTRJi6JbZhO7B2+mZTejSIomE2MjaQ0lFICINVtVigEA5rN+Vz9rcQ6wL3v51Wm2A6CijY3pjurZIpm1aI9o1axT4b1pjMpskNMgD3jwtAjMbATwMRANPO+fuq7I8HngW6AfsB652zm31MpOIRLbG8TH0a9eUfu2afvtceYVj2/7DrMvN/7Yc1uzM4701u77dtAQQFx1Fm2aJtGsWKIa2zRqRmZJA8yYJNE+Op3mT+LC8LoNnRWBm0cDjwAXADmCJmc12zq2tNGwi8I1zrrOZjQb+AFztVSYRkepERxkdM5LomJHEJT0zv32+rLyC3Lwitu0vZNuBw3y1v5CvDhSybX8hS7Z+Q0Fx2VGv1bRRLC2aJJCRHE9a4zhSG8XRrHEcTRvFktoojpTEWJISYkiOjyEpIYZGsTEkxkUTG22+zVd4uUYwANjknNsMYGYvASOBykUwEvht8P6rwGNmZi7cZrBFpEGKiY4iq1kjspo14mzS/2OZc45vCkvZlVfE7vwi9hwqYs+hYnbnF7H7UDF7DhWxdf9hvjlcWm1hVBUdZcTHRBEfE0VsdOAWE21Em2EGZsbo/lncMLRj3f9/1vkr/ltrYHulxzuAgcca45wrM7M8IA3YV3mQmU0CJgG0bdvWq7wiIjVmZjRrHPi2353jX7u5pKyCg0dKOFhYSn5RKVgwTk4AAAY8SURBVPlFZeQXlVFQXEZhSTlFpeUUlpRRUlZBUWkFpeUVlJRXUF7hKK9wOAcOR3pSvCf/L14WQXXrOFW/6ddkDM65acA0COw+WvtoIiL1Jy4miubJCTRPTvA7SrW8vFzQDiCr0uM2wM5jjTGzGCAFOOBhJhERqcLLIlgCdDGzDmYWB4wGZlcZMxsYG7x/JTBH8wMiIvXLs01DwW3+twLvEdh9dLpzbo2Z3QPkOOdmA88Az5nZJgJrAqO9yiMiItXz9DgC59w7wDtVnvtNpftFwFVeZhARkePzctOQiIiEARWBiEiEUxGIiEQ4FYGISIQLu+sRmNleYJuPEdKpcuRzCFLGuqGMdSMcMkJ45KxNxnbOuYzqFoRdEfjNzHKOdXGHUKGMdUMZ60Y4ZITwyOlVRm0aEhGJcCoCEZEIpyI4edP8DlADylg3lLFuhENGCI+cnmTUHIGISITTGoGISIRTEYiIRDgVQTXMbLqZ7TGz1cdYPtLMVprZcjPLMbOzQy1jpXH9zazczK6sr2yV3vtEn+NwM8sLfo7Lzew31Y3zM2NwzPBgvjVmNrc+8wXf/0Sf408rfYarg3/ezUIwZ4qZ/c3MVgQ/y/EhmLGpmb0R/Pe92Mx6+JAxy8w+MrN1wc/p9mrGmJk9Ymabgln71upNnXO6VbkBw4C+wOpjLE/i3/MrPYH1oZYxOCYamEPgDLBXhlpGYDjwdoj/WacSuM522+Dj5qGWscrYywhc1yMUP8tfAH8I3s8gcOr5uBDL+Cfg7uD9bsCHPnyOmUDf4P1kYAPQvcqYi4F3CVzlcRCwqDbvqTWCajjn5nGcK6U55wpc8E8DaEw1l9f02okyBv0IeA3Y432io9Uwo69qkPEHwOvOua+C4+v9szzJz/Ea4EUP4xxTDXI6INnMjMCXqQPAia/qXodqkLE78GFw7HqgvZm1qI9s/+Kcy3XOLQvezwfWEbi+e2UjgWddwGdAqpllnup7qghOkZldbmbrgb8DE/zOU5WZtQYuB6b6neUEBgc3FbxrZmf4HaYaXYGmZvaxmS01szF+BzoWM2sEjCBQ/qHoMeB0ApesXQXc7pyr8DfSUVYAVwCY2QCgHYHL7PrCzNoDfYBFVRa1BrZXeryDo8uixlQEp8g594ZzrhswCvid33mq8RDwM+dcud9BjmMZgfOf9AIeBd70OU91YoB+wCXAhcCvzayrv5GO6TJggXMuVNfCLgSWA62A3sBjZtbE30hHuY9A8S8nsEb9OfW81vIvZpZEoNTvcM4dqrq4mh855S0Tnl6hLBI45+aZWSczS3fOhdIJq7KBlwJr4aQDF5tZmXMuZH7ZVv7L7Zx7x8ymhODnuAPY55w7DBw2s3lALwLbbUPNaHzaLFRD44H7gptVN5nZFgLb4Rf7G+vfgn8nx0NgQhbYErzVKzOLJVACLzjnXq9myA4gq9LjNgTWtE6J1ghOgZl1Dv4lIThbHwfs9zfVf3LOdXDOtXfOtQdeBSaHUgkAmFnLSp/jAAJ/H0PqcwTeAoaaWUxw08tAAttsQ4qZpQDnEMgbqr4CzgMIbnc/Ddjsa6IqzCzVzOKCD28A5lXzbdzrDEbgeu7rnHMPHGPYbGBMcO+hQUCecy73VN9TawTVMLMXCezRkm5mO4C7gVgA59xU4HsE/hBKgSPA1ZUmj0Mlo+9qkPFK4GYzKyPwOY4Otc/RObfOzP4BrAQqgKedc8fdZbe+MwaHXQ78M7jm4osa5PwdMNPMVhHYtPGz+l77q0HG04FnzaycwN5iE+szX9AQ4HpgVXATFQT2uGpbKec7BPYc2gQUElyLOVU6xYSISITTpiERkQinIhARiXAqAhGRCKciEBGJcCoCEZEIpyIQEYlwKgIRkQinIhCpBTP7bzO7LXj/QTObE7x/npk97286kZpREYjUzjxgaPB+NpAUPE/M2cB831KJnAQVgUjtLAX6mVkyUAx8SqAQhqIikDChcw2J1IJzrtTMthI418tCAuck+g7QiRA8OZ1IdbRGIFJ784CfBP87H7gJWF7fJ9ATOVUqApHam0/gOrOfOud2A0Vos5CEEZ19VEQkwmmNQEQkwqkIREQinIpARCTCqQhERCKcikBEJMKpCEREIpyKQEQkwv1/aZVVMLIk1zkAAAAASUVORK5CYII=\n", 566 | "text/plain": [ 567 | "
" 568 | ] 569 | }, 570 | "metadata": { 571 | "needs_background": "light" 572 | }, 573 | "output_type": "display_data" 574 | } 575 | ], 576 | "source": [ 577 | "plt.plot(w_list, mse_list)\n", 578 | "plt.ylabel('Loss')\n", 579 | "plt.xlabel('w')\n", 580 | "plt.show()" 581 | ] 582 | }, 583 | { 584 | "cell_type": "code", 585 | "execution_count": null, 586 | "metadata": {}, 587 | "outputs": [], 588 | "source": [] 589 | } 590 | ], 591 | "metadata": { 592 | "kernelspec": { 593 | "name": "python37664bitfastaiconda149f4ca18fae45818735beadf08062d0", 594 | "language": "python", 595 | "display_name": "Python 3.7.6 64-bit ('fastai': conda)" 596 | }, 597 | "language_info": { 598 | "codemirror_mode": { 599 | "name": "ipython", 600 | "version": 3 601 | }, 602 | "file_extension": ".py", 603 | "mimetype": "text/x-python", 604 | "name": "python", 605 | "nbconvert_exporter": "python", 606 | "pygments_lexer": "ipython3", 607 | "version": "3.7.6" 608 | } 609 | }, 610 | "nbformat": 4, 611 | "nbformat_minor": 4 612 | } -------------------------------------------------------------------------------- /lecture_05.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Lecture 05:\n", 8 | "\n", 9 | "**Linear Regression PyTorch way**\n", 10 | "\n", 11 | "There is a rythm to the pytorch programs\n", 12 | "\n", 13 | "* Model and network\n", 14 | " - Forward pass\n", 15 | "* Loss and Optimizer\n", 16 | "* Training loop\n", 17 | "\n", 18 | "\n", 19 | "We will use the same linear regression example as before for this lecture and use pytorch natively for all the coding" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 1, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "import numpy as np\n", 29 | "import pandas as pd\n", 30 | "import matplotlib.pyplot as plt\n", 31 | "import torch" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": 2, 37 | "metadata": {}, 38 | "outputs": [], 39 | "source": [ 40 | "x_data = torch.Tensor([[1.0], [2.0],[3.0]])\n", 41 | "y_data = torch.Tensor([[2.0], [4.0],[6.0]])" 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": 4, 47 | "metadata": {}, 48 | "outputs": [], 49 | "source": [ 50 | "## Model network and forward pass\n", 51 | "\n", 52 | "class Model(torch.nn.Module):\n", 53 | " def __init__(self):\n", 54 | " \"\"\"\n", 55 | " In the constructor we instantiate 2 nn.linear module\n", 56 | " \"\"\"\n", 57 | " super(Model, self).__init__()\n", 58 | " self.linear = torch.nn.Linear(1,1) # One data in and one out for x and y\n", 59 | " \n", 60 | " def forward(self, x):\n", 61 | " \"\"\"\n", 62 | " In forward function we accept the input variable and we return variable for the output\n", 63 | " We can use the modules defined in the constructor and arbitary operations\n", 64 | " on the variable as well\"\"\"\n", 65 | " y_pred = self.linear(x)\n", 66 | " return y_pred\n", 67 | "\n", 68 | "# Our model\n", 69 | "model = Model()" 70 | ] 71 | }, 72 | { 73 | "cell_type": "code", 74 | "execution_count": 7, 75 | "metadata": {}, 76 | "outputs": [], 77 | "source": [ 78 | "# Loss function and optimizer.\n", 79 | "# model.parameters() automatically calcuated the gradient for all the weights in the network\n", 80 | "\n", 81 | "\n", 82 | "criterion = torch.nn.MSELoss(size_average = False)\n", 83 | "optimum = torch.optim.SGD(model.parameters(), lr = 0.01)" 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": 15, 89 | "metadata": { 90 | "collapsed": true 91 | }, 92 | "outputs": [ 93 | { 94 | "name": "stdout", 95 | "output_type": "stream", 96 | "text": [ 97 | "Epoch: 0, Loss: 0.000493427854962647\n", 98 | "Epoch: 1, Loss: 0.00048634305130690336\n", 99 | "Epoch: 2, Loss: 0.00047935411566868424\n", 100 | "Epoch: 3, Loss: 0.00047246244503185153\n", 101 | "Epoch: 4, Loss: 0.0004656784294638783\n", 102 | "Epoch: 5, Loss: 0.0004589696181938052\n", 103 | "Epoch: 6, Loss: 0.0004523824609350413\n", 104 | "Epoch: 7, Loss: 0.0004458907642401755\n", 105 | "Epoch: 8, Loss: 0.0004394740972202271\n", 106 | "Epoch: 9, Loss: 0.0004331583040766418\n", 107 | "Epoch: 10, Loss: 0.00042693031718954444\n", 108 | "Epoch: 11, Loss: 0.00042080465937033296\n", 109 | "Epoch: 12, Loss: 0.00041475644684396684\n", 110 | "Epoch: 13, Loss: 0.00040878489380702376\n", 111 | "Epoch: 14, Loss: 0.00040291156619787216\n", 112 | "Epoch: 15, Loss: 0.0003971316618844867\n", 113 | "Epoch: 16, Loss: 0.00039141540764831007\n", 114 | "Epoch: 17, Loss: 0.0003857953997794539\n", 115 | "Epoch: 18, Loss: 0.00038024436798878014\n", 116 | "Epoch: 19, Loss: 0.00037479097954928875\n", 117 | "Epoch: 20, Loss: 0.00036940042627975345\n", 118 | "Epoch: 21, Loss: 0.0003640844370238483\n", 119 | "Epoch: 22, Loss: 0.00035885433317162097\n", 120 | "Epoch: 23, Loss: 0.0003536948934197426\n", 121 | "Epoch: 24, Loss: 0.0003486151108518243\n", 122 | "Epoch: 25, Loss: 0.0003436130646150559\n", 123 | "Epoch: 26, Loss: 0.0003386674798093736\n", 124 | "Epoch: 27, Loss: 0.00033380769309587777\n", 125 | "Epoch: 28, Loss: 0.0003290083259344101\n", 126 | "Epoch: 29, Loss: 0.00032428139820694923\n", 127 | "Epoch: 30, Loss: 0.00031960842898115516\n", 128 | "Epoch: 31, Loss: 0.00031501270132139325\n", 129 | "Epoch: 32, Loss: 0.00031049229437485337\n", 130 | "Epoch: 33, Loss: 0.00030603198683820665\n", 131 | "Epoch: 34, Loss: 0.0003016302362084389\n", 132 | "Epoch: 35, Loss: 0.00029729443485848606\n", 133 | "Epoch: 36, Loss: 0.0002930318296421319\n", 134 | "Epoch: 37, Loss: 0.00028881384059786797\n", 135 | "Epoch: 38, Loss: 0.0002846646821126342\n", 136 | "Epoch: 39, Loss: 0.00028057279996573925\n", 137 | "Epoch: 40, Loss: 0.0002765395911410451\n", 138 | "Epoch: 41, Loss: 0.00027256819885224104\n", 139 | "Epoch: 42, Loss: 0.00026865125983022153\n", 140 | "Epoch: 43, Loss: 0.0002647844376042485\n", 141 | "Epoch: 44, Loss: 0.0002609850780572742\n", 142 | "Epoch: 45, Loss: 0.00025722902501001954\n", 143 | "Epoch: 46, Loss: 0.000253535428782925\n", 144 | "Epoch: 47, Loss: 0.0002498896501492709\n", 145 | "Epoch: 48, Loss: 0.0002462957927491516\n", 146 | "Epoch: 49, Loss: 0.00024276424665004015\n", 147 | "Epoch: 50, Loss: 0.00023927078291308135\n", 148 | "Epoch: 51, Loss: 0.0002358347992412746\n", 149 | "Epoch: 52, Loss: 0.00023244312615133822\n", 150 | "Epoch: 53, Loss: 0.0002291033451911062\n", 151 | "Epoch: 54, Loss: 0.00022580692893825471\n", 152 | "Epoch: 55, Loss: 0.00022256042575463653\n", 153 | "Epoch: 56, Loss: 0.000219370995182544\n", 154 | "Epoch: 57, Loss: 0.0002162097516702488\n", 155 | "Epoch: 58, Loss: 0.0002131020009983331\n", 156 | "Epoch: 59, Loss: 0.00021004454174544662\n", 157 | "Epoch: 60, Loss: 0.00020702675101347268\n", 158 | "Epoch: 61, Loss: 0.0002040490653598681\n", 159 | "Epoch: 62, Loss: 0.0002011168544413522\n", 160 | "Epoch: 63, Loss: 0.00019823206821456552\n", 161 | "Epoch: 64, Loss: 0.00019538355991244316\n", 162 | "Epoch: 65, Loss: 0.00019257667008787394\n", 163 | "Epoch: 66, Loss: 0.00018980208551511168\n", 164 | "Epoch: 67, Loss: 0.00018707069102674723\n", 165 | "Epoch: 68, Loss: 0.0001843883073888719\n", 166 | "Epoch: 69, Loss: 0.00018173549324274063\n", 167 | "Epoch: 70, Loss: 0.00017912212933879346\n", 168 | "Epoch: 71, Loss: 0.00017654933617450297\n", 169 | "Epoch: 72, Loss: 0.000174012006027624\n", 170 | "Epoch: 73, Loss: 0.0001715158869046718\n", 171 | "Epoch: 74, Loss: 0.0001690446079010144\n", 172 | "Epoch: 75, Loss: 0.0001666130410740152\n", 173 | "Epoch: 76, Loss: 0.00016422067710664123\n", 174 | "Epoch: 77, Loss: 0.00016185821732506156\n", 175 | "Epoch: 78, Loss: 0.00015953781257849187\n", 176 | "Epoch: 79, Loss: 0.00015724146214779466\n", 177 | "Epoch: 80, Loss: 0.00015498421271331608\n", 178 | "Epoch: 81, Loss: 0.0001527576387161389\n", 179 | "Epoch: 82, Loss: 0.0001505643012933433\n", 180 | "Epoch: 83, Loss: 0.00014839674986433238\n", 181 | "Epoch: 84, Loss: 0.00014626598567701876\n", 182 | "Epoch: 85, Loss: 0.00014416183694265783\n", 183 | "Epoch: 86, Loss: 0.0001420860644429922\n", 184 | "Epoch: 87, Loss: 0.00014004806871525943\n", 185 | "Epoch: 88, Loss: 0.0001380343601340428\n", 186 | "Epoch: 89, Loss: 0.00013605151616502553\n", 187 | "Epoch: 90, Loss: 0.00013410118117462844\n", 188 | "Epoch: 91, Loss: 0.00013217095693107694\n", 189 | "Epoch: 92, Loss: 0.00013027130626142025\n", 190 | "Epoch: 93, Loss: 0.00012840254930779338\n", 191 | "Epoch: 94, Loss: 0.00012654860620386899\n", 192 | "Epoch: 95, Loss: 0.00012472760863602161\n", 193 | "Epoch: 96, Loss: 0.00012294366024434566\n", 194 | "Epoch: 97, Loss: 0.0001211712951771915\n", 195 | "Epoch: 98, Loss: 0.00011943148274440318\n", 196 | "Epoch: 99, Loss: 0.00011770993296522647\n", 197 | "Predict (after training) 4 7.987527847290039\n" 198 | ] 199 | } 200 | ], 201 | "source": [ 202 | "# Training loop\n", 203 | "\n", 204 | "for epoch in range (100):\n", 205 | " # Using forward pass to calcuate the prediction\n", 206 | " y_pred = model(x_data)\n", 207 | " \n", 208 | " # Compute and print the loss\n", 209 | " loss = criterion(y_pred, y_data)\n", 210 | " print(f'Epoch: {epoch}, Loss: {loss.item()}')\n", 211 | " \n", 212 | " # Making the gradients zero and then doing a backward pass to calcuate\n", 213 | " # And then update the weights\n", 214 | " optimum.zero_grad()\n", 215 | " loss.backward()\n", 216 | " optimum.step()\n", 217 | " \n", 218 | " \n", 219 | "# After training\n", 220 | "new_val = torch.Tensor([4.0])\n", 221 | "print('Predict (after training)', 4, model.forward(new_val).item())" 222 | ] 223 | } 224 | ], 225 | "metadata": { 226 | "kernelspec": { 227 | "display_name": "Python 3", 228 | "language": "python", 229 | "name": "python3" 230 | }, 231 | "language_info": { 232 | "codemirror_mode": { 233 | "name": "ipython", 234 | "version": 3 235 | }, 236 | "file_extension": ".py", 237 | "mimetype": "text/x-python", 238 | "name": "python", 239 | "nbconvert_exporter": "python", 240 | "pygments_lexer": "ipython3", 241 | "version": "3.7.6" 242 | } 243 | }, 244 | "nbformat": 4, 245 | "nbformat_minor": 4 246 | } 247 | -------------------------------------------------------------------------------- /lecture_06.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Lecture 06: \n", 8 | "\n", 9 | "**Logistic Regression**\n", 10 | "\n", 11 | "* This is an extension to Linear model. We only have to add a `sigmoid function` on the output of our linear model that we have.\n", 12 | "\n", 13 | "* The sigmoid will define the threshold to move the output into 0 or 1\n", 14 | "\n", 15 | "* The loss will change from `MSE` to `Binary Cross Entropy`" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": 3, 21 | "metadata": {}, 22 | "outputs": [], 23 | "source": [ 24 | "import torch\n", 25 | "import numpy as np\n", 26 | "import pandas as pd\n", 27 | "import matplotlib.pyplot as plt" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": 6, 33 | "metadata": {}, 34 | "outputs": [], 35 | "source": [ 36 | "import torch.nn.functional as F" 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": 5, 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [ 45 | "x_data = torch.Tensor([[1.0], [2.0], [3.0], [4.0]])\n", 46 | "y_data = torch.Tensor([[0.0], [0.0], [1.0], [1.0]])" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": 9, 52 | "metadata": { 53 | "collapsed": true 54 | }, 55 | "outputs": [ 56 | { 57 | "name": "stdout", 58 | "output_type": "stream", 59 | "text": [ 60 | "Epoch: 0, Loss: 1.0488253831863403\n", 61 | "Epoch: 1, Loss: 0.803641676902771\n", 62 | "Epoch: 2, Loss: 0.6963463425636292\n", 63 | "Epoch: 3, Loss: 0.6555147171020508\n", 64 | "Epoch: 4, Loss: 0.6380110383033752\n", 65 | "Epoch: 5, Loss: 0.6283940076828003\n", 66 | "Epoch: 6, Loss: 0.6216136813163757\n", 67 | "Epoch: 7, Loss: 0.6159367561340332\n", 68 | "Epoch: 8, Loss: 0.6107307076454163\n", 69 | "Epoch: 9, Loss: 0.6057538390159607\n", 70 | "Epoch: 10, Loss: 0.6009101271629333\n", 71 | "Epoch: 11, Loss: 0.5961601138114929\n", 72 | "Epoch: 12, Loss: 0.5914872288703918\n", 73 | "Epoch: 13, Loss: 0.5868842005729675\n", 74 | "Epoch: 14, Loss: 0.5823471546173096\n", 75 | "Epoch: 15, Loss: 0.5778741836547852\n", 76 | "Epoch: 16, Loss: 0.573464035987854\n", 77 | "Epoch: 17, Loss: 0.5691152811050415\n", 78 | "Epoch: 18, Loss: 0.5648272037506104\n", 79 | "Epoch: 19, Loss: 0.5605987310409546\n", 80 | "Epoch: 20, Loss: 0.5564290285110474\n", 81 | "Epoch: 21, Loss: 0.5523171424865723\n", 82 | "Epoch: 22, Loss: 0.5482622981071472\n", 83 | "Epoch: 23, Loss: 0.5442636013031006\n", 84 | "Epoch: 24, Loss: 0.5403201580047607\n", 85 | "Epoch: 25, Loss: 0.536431074142456\n", 86 | "Epoch: 26, Loss: 0.532595694065094\n", 87 | "Epoch: 27, Loss: 0.5288130044937134\n", 88 | "Epoch: 28, Loss: 0.5250822305679321\n", 89 | "Epoch: 29, Loss: 0.5214025378227234\n", 90 | "Epoch: 30, Loss: 0.5177733302116394\n", 91 | "Epoch: 31, Loss: 0.5141934752464294\n", 92 | "Epoch: 32, Loss: 0.5106623768806458\n", 93 | "Epoch: 33, Loss: 0.507179319858551\n", 94 | "Epoch: 34, Loss: 0.5037433505058289\n", 95 | "Epoch: 35, Loss: 0.5003538727760315\n", 96 | "Epoch: 36, Loss: 0.49701017141342163\n", 97 | "Epoch: 37, Loss: 0.49371135234832764\n", 98 | "Epoch: 38, Loss: 0.4904567301273346\n", 99 | "Epoch: 39, Loss: 0.48724570870399475\n", 100 | "Epoch: 40, Loss: 0.484077513217926\n", 101 | "Epoch: 41, Loss: 0.4809514880180359\n", 102 | "Epoch: 42, Loss: 0.47786685824394226\n", 103 | "Epoch: 43, Loss: 0.4748230278491974\n", 104 | "Epoch: 44, Loss: 0.47181928157806396\n", 105 | "Epoch: 45, Loss: 0.46885496377944946\n", 106 | "Epoch: 46, Loss: 0.4659295082092285\n", 107 | "Epoch: 47, Loss: 0.463042289018631\n", 108 | "Epoch: 48, Loss: 0.4601925015449524\n", 109 | "Epoch: 49, Loss: 0.4573797285556793\n", 110 | "Epoch: 50, Loss: 0.4546033442020416\n", 111 | "Epoch: 51, Loss: 0.451862633228302\n", 112 | "Epoch: 52, Loss: 0.44915708899497986\n", 113 | "Epoch: 53, Loss: 0.44648614525794983\n", 114 | "Epoch: 54, Loss: 0.44384920597076416\n", 115 | "Epoch: 55, Loss: 0.44124576449394226\n", 116 | "Epoch: 56, Loss: 0.43867525458335876\n", 117 | "Epoch: 57, Loss: 0.4361370801925659\n", 118 | "Epoch: 58, Loss: 0.4336307644844055\n", 119 | "Epoch: 59, Loss: 0.43115586042404175\n", 120 | "Epoch: 60, Loss: 0.4287116825580597\n", 121 | "Epoch: 61, Loss: 0.4262978434562683\n", 122 | "Epoch: 62, Loss: 0.4239138662815094\n", 123 | "Epoch: 63, Loss: 0.4215591847896576\n", 124 | "Epoch: 64, Loss: 0.41923344135284424\n", 125 | "Epoch: 65, Loss: 0.41693609952926636\n", 126 | "Epoch: 66, Loss: 0.41466662287712097\n", 127 | "Epoch: 67, Loss: 0.41242462396621704\n", 128 | "Epoch: 68, Loss: 0.41020965576171875\n", 129 | "Epoch: 69, Loss: 0.4080214500427246\n", 130 | "Epoch: 70, Loss: 0.4058592915534973\n", 131 | "Epoch: 71, Loss: 0.403722882270813\n", 132 | "Epoch: 72, Loss: 0.4016118347644806\n", 133 | "Epoch: 73, Loss: 0.3995257318019867\n", 134 | "Epoch: 74, Loss: 0.3974641263484955\n", 135 | "Epoch: 75, Loss: 0.3954267203807831\n", 136 | "Epoch: 76, Loss: 0.3934130370616913\n", 137 | "Epoch: 77, Loss: 0.39142274856567383\n", 138 | "Epoch: 78, Loss: 0.3894554674625397\n", 139 | "Epoch: 79, Loss: 0.38751083612442017\n", 140 | "Epoch: 80, Loss: 0.38558846712112427\n", 141 | "Epoch: 81, Loss: 0.3836880624294281\n", 142 | "Epoch: 82, Loss: 0.38180920481681824\n", 143 | "Epoch: 83, Loss: 0.3799516558647156\n", 144 | "Epoch: 84, Loss: 0.3781149983406067\n", 145 | "Epoch: 85, Loss: 0.3762989342212677\n", 146 | "Epoch: 86, Loss: 0.3745031952857971\n", 147 | "Epoch: 87, Loss: 0.37272730469703674\n", 148 | "Epoch: 88, Loss: 0.370971143245697\n", 149 | "Epoch: 89, Loss: 0.36923426389694214\n", 150 | "Epoch: 90, Loss: 0.3675164580345154\n", 151 | "Epoch: 91, Loss: 0.3658173978328705\n", 152 | "Epoch: 92, Loss: 0.3641367554664612\n", 153 | "Epoch: 93, Loss: 0.3624744415283203\n", 154 | "Epoch: 94, Loss: 0.3608299195766449\n", 155 | "Epoch: 95, Loss: 0.3592030107975006\n", 156 | "Epoch: 96, Loss: 0.35759350657463074\n", 157 | "Epoch: 97, Loss: 0.3560010492801666\n", 158 | "Epoch: 98, Loss: 0.35442546010017395\n", 159 | "Epoch: 99, Loss: 0.3528664708137512\n", 160 | "Predict for 1 hour 1.0 False\n", 161 | "Predict for 7 hour 7.0 True\n" 162 | ] 163 | } 164 | ], 165 | "source": [ 166 | "# Creating the model class and defining the elements of the network\n", 167 | "\n", 168 | "class Model(torch.nn.Module):\n", 169 | " def __init__(self):\n", 170 | " super(Model, self).__init__()\n", 171 | " self.linear = torch.nn.Linear(1,1)\n", 172 | " \n", 173 | " def forward(self, x):\n", 174 | " y_pred = F.sigmoid(self.linear(x))\n", 175 | " return y_pred\n", 176 | "\n", 177 | "# Defining the model object for the class\n", 178 | "model = Model()\n", 179 | "\n", 180 | "\n", 181 | "# Defining the loss and the optimizer function\n", 182 | "criterion = torch.nn.BCELoss(size_average=True)\n", 183 | "optimus = torch.optim.SGD(model.parameters(), lr = 0.2)\n", 184 | "\n", 185 | "\n", 186 | "# Training loop\n", 187 | "for epoch in range(100):\n", 188 | " # forward pass\n", 189 | " y_pred = model(x_data)\n", 190 | " \n", 191 | " # compute loss and print\n", 192 | " loss = criterion(y_pred, y_data)\n", 193 | " print(f'Epoch: {epoch}, Loss: {loss.item()}')\n", 194 | " \n", 195 | " # compute backward loop and update the values using optimizer\n", 196 | " optimus.zero_grad()\n", 197 | " loss.backward()\n", 198 | " optimus.step()\n", 199 | " \n", 200 | " \n", 201 | "# After training\n", 202 | "one_val = torch.Tensor([1.0])\n", 203 | "seven_val = torch. Tensor([7.0])\n", 204 | "print('Predict for 1 hour', 1.0, model(one_val).item()> 0.5)\n", 205 | "print('Predict for 7 hour', 7.0, model(seven_val).item() > 0.5)" 206 | ] 207 | }, 208 | { 209 | "cell_type": "code", 210 | "execution_count": null, 211 | "metadata": {}, 212 | "outputs": [], 213 | "source": [] 214 | } 215 | ], 216 | "metadata": { 217 | "kernelspec": { 218 | "display_name": "Python 3", 219 | "language": "python", 220 | "name": "python3" 221 | }, 222 | "language_info": { 223 | "codemirror_mode": { 224 | "name": "ipython", 225 | "version": 3 226 | }, 227 | "file_extension": ".py", 228 | "mimetype": "text/x-python", 229 | "name": "python", 230 | "nbconvert_exporter": "python", 231 | "pygments_lexer": "ipython3", 232 | "version": "3.7.6" 233 | } 234 | }, 235 | "nbformat": 4, 236 | "nbformat_minor": 4 237 | } 238 | -------------------------------------------------------------------------------- /lecture_07.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Lecture 07:\n", 8 | "\n", 9 | "* Making wide and deep network with the diabetics dataset" 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 1, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "import pandas as pd\n", 19 | "import numpy as np\n", 20 | "import torch\n", 21 | "import matplotlib.pyplot as plt" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": 2, 27 | "metadata": {}, 28 | "outputs": [ 29 | { 30 | "name": "stdout", 31 | "output_type": "stream", 32 | "text": [ 33 | "X_data shape torch.Size([768, 8])\n", 34 | "y_data shape torch.Size([768, 1])\n" 35 | ] 36 | } 37 | ], 38 | "source": [ 39 | "xy = np.loadtxt('./data/diabetes.csv', delimiter=',', skiprows=1, dtype = np.float32)\n", 40 | "x_data = torch.from_numpy(xy[:,0:-1])\n", 41 | "y_data = torch.from_numpy(xy[:,[-1]])\n", 42 | "\n", 43 | "print('X_data shape', x_data.shape)\n", 44 | "print('y_data shape', y_data.shape)" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 11, 50 | "metadata": {}, 51 | "outputs": [ 52 | { 53 | "name": "stdout", 54 | "output_type": "stream", 55 | "text": [ 56 | "Epoch: 0, Loss: 0.6973557472229004\n", 57 | "Epoch: 100, Loss: 0.6468027830123901\n", 58 | "Epoch: 200, Loss: 0.6467949151992798\n", 59 | "Epoch: 300, Loss: 0.6467936635017395\n", 60 | "Epoch: 400, Loss: 0.6467936635017395\n", 61 | "Epoch: 500, Loss: 0.6467936635017395\n", 62 | "Epoch: 600, Loss: 0.6467936635017395\n", 63 | "Epoch: 700, Loss: 0.6467936635017395\n", 64 | "Epoch: 800, Loss: 0.6467936635017395\n", 65 | "Epoch: 900, Loss: 0.6467936635017395\n" 66 | ] 67 | } 68 | ], 69 | "source": [ 70 | "class Model(torch.nn.Module):\n", 71 | " def __init__(self):\n", 72 | " super(Model, self).__init__()\n", 73 | " self.l1 = torch.nn.Linear(8,6)\n", 74 | " self.l2 = torch.nn.Linear(6,4)\n", 75 | " self.l3 = torch.nn.Linear(4,1)\n", 76 | " \n", 77 | " self.sigmoid = torch.nn.Sigmoid()\n", 78 | " \n", 79 | " def forward(self, x):\n", 80 | " out1 = self.sigmoid(self.l1(x))\n", 81 | " out2 = self.sigmoid(self.l2(out1))\n", 82 | " y_pred = self.sigmoid(self.l3(out2))\n", 83 | " return y_pred\n", 84 | " \n", 85 | " \n", 86 | "model = Model()\n", 87 | "\n", 88 | "criterion = torch.nn.BCELoss(size_average=True)\n", 89 | "optimus = torch.optim.Adam(model.parameters(), lr = 0.3)\n", 90 | "\n", 91 | "\n", 92 | "for epoch in range(1000):\n", 93 | " y_pred = model(x_data)\n", 94 | " \n", 95 | " loss = criterion(y_pred, y_data)\n", 96 | " if epoch%100 == 0:\n", 97 | " print(f'Epoch: {epoch}, Loss: {loss.item()}')\n", 98 | " \n", 99 | " optimus.zero_grad()\n", 100 | " loss.backward()\n", 101 | " optimus.step()" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": null, 107 | "metadata": {}, 108 | "outputs": [], 109 | "source": [] 110 | } 111 | ], 112 | "metadata": { 113 | "kernelspec": { 114 | "display_name": "Python 3", 115 | "language": "python", 116 | "name": "python3" 117 | }, 118 | "language_info": { 119 | "codemirror_mode": { 120 | "name": "ipython", 121 | "version": 3 122 | }, 123 | "file_extension": ".py", 124 | "mimetype": "text/x-python", 125 | "name": "python", 126 | "nbconvert_exporter": "python", 127 | "pygments_lexer": "ipython3", 128 | "version": "3.7.6" 129 | } 130 | }, 131 | "nbformat": 4, 132 | "nbformat_minor": 4 133 | } 134 | -------------------------------------------------------------------------------- /lecture_08.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Lecture 08:\n", 8 | "\n", 9 | "* **DataLoader** used to load the data to the model in batches. This is done because not all the data can be loaded to the network in 1 go. \n", 10 | "\n", 11 | "\n", 12 | "- **1 Epoch** is the passing of all the data through the network once. \n", 13 | "- **Batch Size** is the minimum amout of data that will pass through the network\n", 14 | "- **Iterations** No. of passes of data to be made to cover the complete training data. \n", 15 | "\n", 16 | "\n", 17 | "_For a training data of `1000 rows` and batch size of `500` the iteration is `2`_\n", 18 | " \n", 19 | "\n" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 1, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "import pandas as pd\n", 29 | "import numpy as np\n", 30 | "import matplotlib.pyplot as plt\n", 31 | "import torch" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": 2, 37 | "metadata": {}, 38 | "outputs": [], 39 | "source": [ 40 | "# Libraries for creating the dataloader\n", 41 | "from torch.utils.data import DataLoader, Dataset" 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": 3, 47 | "metadata": {}, 48 | "outputs": [], 49 | "source": [ 50 | "class MyDataset(Dataset):\n", 51 | " \n", 52 | " # Initiatize your data, download process etc\n", 53 | " def __init__(self):\n", 54 | " xy = np.loadtxt('./data/diabetes.csv', delimiter=',', skiprows=1, dtype = np.float32)\n", 55 | " self.len = xy.shape[0]\n", 56 | " self.x_data = torch.from_numpy(xy[:,0:-1])\n", 57 | " self.y_data = torch.from_numpy(xy[:,-1])\n", 58 | " \n", 59 | " # This will return the element from the data, based on the index value \n", 60 | " def __getitem__(self, index):\n", 61 | " return self.x_data[index], self.y_data[index]\n", 62 | " \n", 63 | " # Return the length of the data\n", 64 | " def __len__(self):\n", 65 | " return self.len\n", 66 | " \n", 67 | "\n", 68 | "# Object for MyDataset class\n", 69 | "dataset = MyDataset()\n", 70 | "\n", 71 | "# Creating the loader\n", 72 | "train_loader = DataLoader(dataset = dataset, batch_size = 32, shuffle=True, num_workers=0)" 73 | ] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "execution_count": 4, 78 | "metadata": {}, 79 | "outputs": [], 80 | "source": [ 81 | "# Pytorch network and training\n", 82 | "\n", 83 | "class Model(torch.nn.Module):\n", 84 | " \n", 85 | " def __init__(self):\n", 86 | " \"\"\"\n", 87 | " Initialize the network with the layers\n", 88 | " \"\"\"\n", 89 | " super(Model, self).__init__()\n", 90 | " self.l1 = torch.nn.Linear(8,6)\n", 91 | " self.l2 = torch.nn.Linear(6,8)\n", 92 | " self.l3 = torch.nn.Linear(8,1)\n", 93 | " \n", 94 | " self.sigmoid = torch.nn.Sigmoid()\n", 95 | " \n", 96 | " def forward(self, x):\n", 97 | " \"\"\"\n", 98 | " Defining the forward pass of the data based on the layers created above\n", 99 | " \"\"\"\n", 100 | " out1 = self.sigmoid(self.l1(x))\n", 101 | " out2 = self.sigmoid(self.l2(out1))\n", 102 | " y_pred = self.sigmoid(self.l3(out2))\n", 103 | " return y_pred\n", 104 | "\n", 105 | "# Creating Model object\n", 106 | "model = Model()\n", 107 | "\n", 108 | "# Loss function called criterion. This is the binary corss entropy.\n", 109 | "# Also create the optimizer to update the gradient. Using the SGD here.\n", 110 | "criterion = torch.nn.BCELoss(reduction='mean')\n", 111 | "optimus = torch.optim.SGD(model.parameters(), lr = 0.1)" 112 | ] 113 | }, 114 | { 115 | "cell_type": "code", 116 | "execution_count": 5, 117 | "metadata": { 118 | "scrolled": false 119 | }, 120 | "outputs": [ 121 | { 122 | "name": "stderr", 123 | "output_type": "stream", 124 | "text": [ 125 | "C:\\Users\\abhis\\Anaconda3\\envs\\fastai\\lib\\site-packages\\torch\\nn\\modules\\loss.py:498: UserWarning: Using a target size (torch.Size([32])) that is different to the input size (torch.Size([32, 1])) is deprecated. Please ensure they have the same size.\n", 126 | " return F.binary_cross_entropy(input, target, weight=self.weight, reduction=self.reduction)\n" 127 | ] 128 | }, 129 | { 130 | "name": "stdout", 131 | "output_type": "stream", 132 | "text": [ 133 | "Epoch: 0, Loss: 0.6850202679634094\n", 134 | "Epoch: 0, Loss: 0.7063078284263611\n", 135 | "Epoch: 0, Loss: 0.7715358138084412\n", 136 | "Epoch: 0, Loss: 0.7151461839675903\n", 137 | "Epoch: 0, Loss: 0.7090287208557129\n", 138 | "Epoch: 0, Loss: 0.6874685287475586\n", 139 | "Epoch: 0, Loss: 0.686786949634552\n", 140 | "Epoch: 0, Loss: 0.680263102054596\n", 141 | "Epoch: 0, Loss: 0.6956996321678162\n", 142 | "Epoch: 0, Loss: 0.662934422492981\n", 143 | "Epoch: 0, Loss: 0.6796655654907227\n", 144 | "Epoch: 0, Loss: 0.672856330871582\n", 145 | "Epoch: 0, Loss: 0.6612431406974792\n", 146 | "Epoch: 0, Loss: 0.6768894791603088\n", 147 | "Epoch: 0, Loss: 0.6287937164306641\n", 148 | "Epoch: 0, Loss: 0.6391973495483398\n", 149 | "Epoch: 0, Loss: 0.6554039120674133\n", 150 | "Epoch: 0, Loss: 0.6606767177581787\n", 151 | "Epoch: 0, Loss: 0.6127085089683533\n", 152 | "Epoch: 0, Loss: 0.6212042570114136\n", 153 | "Epoch: 0, Loss: 0.6492659449577332\n", 154 | "Epoch: 0, Loss: 0.6614421010017395\n", 155 | "Epoch: 0, Loss: 0.6139333844184875\n", 156 | "Epoch: 0, Loss: 0.6641162633895874\n", 157 | "Epoch: 1, Loss: 0.6617295145988464\n", 158 | "Epoch: 1, Loss: 0.6448819637298584\n", 159 | "Epoch: 1, Loss: 0.5934086441993713\n", 160 | "Epoch: 1, Loss: 0.6472995281219482\n", 161 | "Epoch: 1, Loss: 0.7720071077346802\n", 162 | "Epoch: 1, Loss: 0.7074853777885437\n", 163 | "Epoch: 1, Loss: 0.647002100944519\n", 164 | "Epoch: 1, Loss: 0.6644510626792908\n", 165 | "Epoch: 1, Loss: 0.6477363109588623\n", 166 | "Epoch: 1, Loss: 0.6591359972953796\n", 167 | "Epoch: 1, Loss: 0.6448429822921753\n", 168 | "Epoch: 1, Loss: 0.6117604374885559\n", 169 | "Epoch: 1, Loss: 0.6275312304496765\n", 170 | "Epoch: 1, Loss: 0.6068578958511353\n", 171 | "Epoch: 1, Loss: 0.548807680606842\n", 172 | "Epoch: 1, Loss: 0.7080508470535278\n", 173 | "Epoch: 1, Loss: 0.6454963088035583\n", 174 | "Epoch: 1, Loss: 0.6226770281791687\n", 175 | "Epoch: 1, Loss: 0.6826448440551758\n", 176 | "Epoch: 1, Loss: 0.587816596031189\n", 177 | "Epoch: 1, Loss: 0.6265673041343689\n", 178 | "Epoch: 1, Loss: 0.7271870970726013\n", 179 | "Epoch: 1, Loss: 0.6611766219139099\n", 180 | "Epoch: 1, Loss: 0.6052882671356201\n" 181 | ] 182 | } 183 | ], 184 | "source": [ 185 | "for epoch in range(2):\n", 186 | " for i, data in enumerate(train_loader, 0):\n", 187 | " # getting the inputs and labels\n", 188 | " inputs, labels = data\n", 189 | " \n", 190 | " # forward pass\n", 191 | " y_pred = model(inputs)\n", 192 | " \n", 193 | " # Loss and print\n", 194 | " loss = criterion(y_pred, labels)\n", 195 | " print(f'Epoch: {epoch}, Loss: {loss.item()}')\n", 196 | " \n", 197 | " # Make optimizer items zero, back propagation \n", 198 | " # Then updating the wegihts using step\n", 199 | " optimus.zero_grad()\n", 200 | " loss.backward()\n", 201 | " optimus.step()\n", 202 | " " 203 | ] 204 | } 205 | ], 206 | "metadata": { 207 | "kernelspec": { 208 | "display_name": "Python 3", 209 | "language": "python", 210 | "name": "python3" 211 | }, 212 | "language_info": { 213 | "codemirror_mode": { 214 | "name": "ipython", 215 | "version": 3 216 | }, 217 | "file_extension": ".py", 218 | "mimetype": "text/x-python", 219 | "name": "python", 220 | "nbconvert_exporter": "python", 221 | "pygments_lexer": "ipython3", 222 | "version": "3.7.6" 223 | } 224 | }, 225 | "nbformat": 4, 226 | "nbformat_minor": 4 227 | } 228 | -------------------------------------------------------------------------------- /lecture_08_a.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np\n", 10 | "import pandas as pd\n", 11 | "import torch\n", 12 | "from torch.utils.data import DataLoader, Dataset" 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": 2, 18 | "metadata": {}, 19 | "outputs": [ 20 | { 21 | "data": { 22 | "text/plain": [ 23 | "(150, 4)" 24 | ] 25 | }, 26 | "execution_count": 2, 27 | "metadata": {}, 28 | "output_type": "execute_result" 29 | } 30 | ], 31 | "source": [ 32 | "xy_df = pd.read_csv('./data/iris.csv', delimiter=',')\n", 33 | "x_data = xy_df.iloc[:,0:-1].to_numpy()\n", 34 | "x_data.shape" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": 3, 40 | "metadata": {}, 41 | "outputs": [ 42 | { 43 | "data": { 44 | "text/plain": [ 45 | "array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", 46 | " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n", 47 | " 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1.,\n", 48 | " 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", 49 | " 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", 50 | " 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 2., 2.,\n", 51 | " 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2.,\n", 52 | " 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2.,\n", 53 | " 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2., 2.],\n", 54 | " dtype=float32)" 55 | ] 56 | }, 57 | "execution_count": 3, 58 | "metadata": {}, 59 | "output_type": "execute_result" 60 | } 61 | ], 62 | "source": [ 63 | " pd.get_dummies(xy_df.iloc[:,-1]).values.argmax(1).astype(np.float32)" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": 4, 69 | "metadata": {}, 70 | "outputs": [], 71 | "source": [ 72 | "# Creating Network\n", 73 | "\n", 74 | "class Model(torch.nn.Module):\n", 75 | " \n", 76 | " def __init__(self):\n", 77 | " \"\"\"\n", 78 | " Initialize the network with the layers\n", 79 | " \"\"\"\n", 80 | " super(Model, self).__init__()\n", 81 | " self.l1 = torch.nn.Linear(4, 10)\n", 82 | " self.l2 = torch.nn.Linear(10, 8)\n", 83 | " self.l3 = torch.nn.Linear(8, 1)\n", 84 | " \n", 85 | " self.sigmoid = torch.nn.Sigmoid()\n", 86 | " \n", 87 | " def forward(self, x):\n", 88 | " \"\"\"\n", 89 | " Defining the forward pass of the data based on the layers created above\n", 90 | " \"\"\"\n", 91 | " out1 = self.sigmoid(self.l1(x))\n", 92 | " out2 = self.sigmoid(self.l2(out1))\n", 93 | " y_pred = self.sigmoid(self.l3(out2))\n", 94 | " return y_pred" 95 | ] 96 | }, 97 | { 98 | "cell_type": "code", 99 | "execution_count": 5, 100 | "metadata": {}, 101 | "outputs": [], 102 | "source": [ 103 | "# Creating the Dataset and Dataloader\n", 104 | "\n", 105 | "class MyDataset(Dataset):\n", 106 | " \n", 107 | " def __init__(self):\n", 108 | " xy_df = pd.read_csv('./data/iris.csv', delimiter=',')\n", 109 | " self.len = xy_df.shape[0]\n", 110 | " self.x_data = torch.from_numpy(xy_df.iloc[:,0:-1].to_numpy(dtype=np.float32))\n", 111 | " self.y_data = torch.from_numpy((pd.get_dummies(xy_df.iloc[:,-1])).values.argmax(1).astype(np.float32))\n", 112 | " \n", 113 | " def __getitem__(self, index):\n", 114 | " return self.x_data[index], self.y_data[index]\n", 115 | " \n", 116 | " def __len__(self):\n", 117 | " return self.len " 118 | ] 119 | }, 120 | { 121 | "cell_type": "code", 122 | "execution_count": 6, 123 | "metadata": {}, 124 | "outputs": [], 125 | "source": [ 126 | "model = Model()\n", 127 | "my_dataset = MyDataset()" 128 | ] 129 | }, 130 | { 131 | "cell_type": "code", 132 | "execution_count": 7, 133 | "metadata": {}, 134 | "outputs": [], 135 | "source": [ 136 | "train_loader = DataLoader(dataset = my_dataset, batch_size=16, shuffle=True, num_workers=0)" 137 | ] 138 | }, 139 | { 140 | "cell_type": "code", 141 | "execution_count": 8, 142 | "metadata": {}, 143 | "outputs": [], 144 | "source": [ 145 | "criterion = torch.nn.BCELoss(reduction='mean')\n", 146 | "optimus = torch.optim.SGD(model.parameters(), lr = 0.05)" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": 9, 152 | "metadata": { 153 | "scrolled": false 154 | }, 155 | "outputs": [ 156 | { 157 | "name": "stderr", 158 | "output_type": "stream", 159 | "text": [ 160 | "C:\\Users\\abhis\\Anaconda3\\envs\\fastai\\lib\\site-packages\\torch\\nn\\modules\\loss.py:498: UserWarning: Using a target size (torch.Size([16])) that is different to the input size (torch.Size([16, 1])) is deprecated. Please ensure they have the same size.\n", 161 | " return F.binary_cross_entropy(input, target, weight=self.weight, reduction=self.reduction)\n" 162 | ] 163 | }, 164 | { 165 | "name": "stdout", 166 | "output_type": "stream", 167 | "text": [ 168 | "Epoch: 0, Loss: 0.5405045747756958\n", 169 | "Epoch: 0, Loss: 0.4360932409763336\n", 170 | "Epoch: 0, Loss: 0.5628055930137634\n", 171 | "Epoch: 0, Loss: 0.41424405574798584\n", 172 | "Epoch: 0, Loss: 0.3848228454589844\n", 173 | "Epoch: 0, Loss: 0.2688494324684143\n", 174 | "Epoch: 0, Loss: 0.4241332709789276\n", 175 | "Epoch: 0, Loss: 0.5201851725578308\n", 176 | "Epoch: 0, Loss: 0.3002222776412964\n", 177 | "Epoch: 0, Loss: 0.3375948369503021\n", 178 | "Epoch: 1, Loss: 0.07833589613437653\n", 179 | "Epoch: 1, Loss: 0.29924270510673523\n", 180 | "Epoch: 1, Loss: 0.5644407272338867\n", 181 | "Epoch: 1, Loss: 0.2867211103439331\n", 182 | "Epoch: 1, Loss: 0.059754449874162674\n", 183 | "Epoch: 1, Loss: 0.48803412914276123\n", 184 | "Epoch: 1, Loss: 0.02433396875858307\n", 185 | "Epoch: 1, Loss: 0.07684843987226486\n", 186 | "Epoch: 1, Loss: 0.6535183191299438\n", 187 | "Epoch: 1, Loss: 0.23174096643924713\n" 188 | ] 189 | }, 190 | { 191 | "name": "stderr", 192 | "output_type": "stream", 193 | "text": [ 194 | "C:\\Users\\abhis\\Anaconda3\\envs\\fastai\\lib\\site-packages\\torch\\nn\\modules\\loss.py:498: UserWarning: Using a target size (torch.Size([6])) that is different to the input size (torch.Size([6, 1])) is deprecated. Please ensure they have the same size.\n", 195 | " return F.binary_cross_entropy(input, target, weight=self.weight, reduction=self.reduction)\n" 196 | ] 197 | } 198 | ], 199 | "source": [ 200 | "for epoch in range(2):\n", 201 | " for i, data in enumerate(train_loader, 0):\n", 202 | " # getting the inputs and labels\n", 203 | " inputs, labels = data\n", 204 | "# print(f'labels: {labels.shape}')\n", 205 | " # Predictions from the model\n", 206 | " y_pred = model(inputs)\n", 207 | "# print(labels.shape)\n", 208 | " \n", 209 | " # Get the loss\n", 210 | " loss = criterion(y_pred, labels)\n", 211 | " print(f'Epoch: {epoch}, Loss: {loss.item()}')\n", 212 | " \n", 213 | " # optimus roll out\n", 214 | " optimus.zero_grad()\n", 215 | " loss.backward()\n", 216 | " optimus.step()\n", 217 | " " 218 | ] 219 | }, 220 | { 221 | "cell_type": "code", 222 | "execution_count": null, 223 | "metadata": {}, 224 | "outputs": [], 225 | "source": [] 226 | } 227 | ], 228 | "metadata": { 229 | "kernelspec": { 230 | "display_name": "Python 3", 231 | "language": "python", 232 | "name": "python3" 233 | }, 234 | "language_info": { 235 | "codemirror_mode": { 236 | "name": "ipython", 237 | "version": 3 238 | }, 239 | "file_extension": ".py", 240 | "mimetype": "text/x-python", 241 | "name": "python", 242 | "nbconvert_exporter": "python", 243 | "pygments_lexer": "ipython3", 244 | "version": "3.7.6" 245 | } 246 | }, 247 | "nbformat": 4, 248 | "nbformat_minor": 4 249 | } 250 | -------------------------------------------------------------------------------- /lecture_08_b.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 2, 4 | "metadata": { 5 | "language_info": { 6 | "name": "python", 7 | "codemirror_mode": { 8 | "name": "ipython", 9 | "version": 3 10 | }, 11 | "version": "3.7.1-final" 12 | }, 13 | "orig_nbformat": 2, 14 | "file_extension": ".py", 15 | "mimetype": "text/x-python", 16 | "name": "python", 17 | "npconvert_exporter": "python", 18 | "pygments_lexer": "ipython3", 19 | "version": 3, 20 | "kernelspec": { 21 | "name": "python37664bitfastaiconda149f4ca18fae45818735beadf08062d0", 22 | "display_name": "Python 3.7.6 64-bit ('fastai': conda)" 23 | } 24 | }, 25 | "cells": [ 26 | { 27 | "cell_type": "markdown", 28 | "metadata": {}, 29 | "source": [ 30 | "### Practice File with Wine Dataset\n", 31 | "\n", 32 | "The result is shit. To be further improved with Softmax implementation" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": 1, 38 | "metadata": {}, 39 | "outputs": [], 40 | "source": [ 41 | "# Importing standard libraries\n", 42 | "\n", 43 | "import numpy as np\n", 44 | "import pandas as pd\n", 45 | "import torch\n", 46 | "import matplotlib.pyplot as plt" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": 2, 52 | "metadata": {}, 53 | "outputs": [], 54 | "source": [ 55 | "# Importing the dataloader and dataset library\n", 56 | "\n", 57 | "from torch.utils.data import DataLoader, Dataset" 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": 3, 63 | "metadata": {}, 64 | "outputs": [ 65 | { 66 | "output_type": "execute_result", 67 | "data": { 68 | "text/plain": "(4898, 12)" 69 | }, 70 | "metadata": {}, 71 | "execution_count": 3 72 | } 73 | ], 74 | "source": [ 75 | "# Analyzing the data real quick to see how it is structured\n", 76 | "\n", 77 | "wine = pd.read_csv('./data/winequality-white.csv')\n", 78 | "wine.shape" 79 | ] 80 | }, 81 | { 82 | "cell_type": "code", 83 | "execution_count": 4, 84 | "metadata": {}, 85 | "outputs": [ 86 | { 87 | "output_type": "execute_result", 88 | "data": { 89 | "text/plain": " fixed acidity volatile acidity citric acid residual sugar chlorides \\\n0 7.0 0.27 0.36 20.7 0.045 \n1 6.3 0.30 0.34 1.6 0.049 \n2 8.1 0.28 0.40 6.9 0.050 \n3 7.2 0.23 0.32 8.5 0.058 \n4 7.2 0.23 0.32 8.5 0.058 \n\n free sulfur dioxide total sulfur dioxide density pH sulphates \\\n0 45.0 170.0 1.0010 3.00 0.45 \n1 14.0 132.0 0.9940 3.30 0.49 \n2 30.0 97.0 0.9951 3.26 0.44 \n3 47.0 186.0 0.9956 3.19 0.40 \n4 47.0 186.0 0.9956 3.19 0.40 \n\n alcohol quality \n0 8.8 6 \n1 9.5 6 \n2 10.1 6 \n3 9.9 6 \n4 9.9 6 ", 90 | "text/html": "
\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
fixed acidityvolatile aciditycitric acidresidual sugarchloridesfree sulfur dioxidetotal sulfur dioxidedensitypHsulphatesalcoholquality
07.00.270.3620.70.04545.0170.01.00103.000.458.86
16.30.300.341.60.04914.0132.00.99403.300.499.56
28.10.280.406.90.05030.097.00.99513.260.4410.16
37.20.230.328.50.05847.0186.00.99563.190.409.96
47.20.230.328.50.05847.0186.00.99563.190.409.96
\n
" 91 | }, 92 | "metadata": {}, 93 | "execution_count": 4 94 | } 95 | ], 96 | "source": [ 97 | "wine.head()" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": 15, 103 | "metadata": {}, 104 | "outputs": [], 105 | "source": [ 106 | "# Creating the dataset class\n", 107 | "\n", 108 | "class Mydataset(Dataset):\n", 109 | "\n", 110 | " # Initialize the dataset class with the data\n", 111 | " def __init__(self):\n", 112 | " xy = np.loadtxt('./data/winequality-white.csv', skiprows=1, dtype=np.float32, delimiter=',')\n", 113 | " self.x_data = torch.from_numpy(xy[:,0:-1])\n", 114 | " self.y_data = torch.from_numpy(xy[:,-1])\n", 115 | " self.y_data = self.y_data.long()\n", 116 | " self.len = xy.shape[0]\n", 117 | "\n", 118 | " # Return item from the tensor based on the index value\n", 119 | " def __getitem__(self, index):\n", 120 | " return self.x_data[index], self.y_data[index]\n", 121 | "\n", 122 | " # Return the length of the tensor\n", 123 | " def __len__(self):\n", 124 | " return self.len" 125 | ] 126 | }, 127 | { 128 | "cell_type": "code", 129 | "execution_count": 16, 130 | "metadata": {}, 131 | "outputs": [], 132 | "source": [ 133 | "# Creating a dataset object based on the Mydataset class\n", 134 | "\n", 135 | "dataset = Mydataset()\n", 136 | "\n", 137 | "# Creating a loader file based on this dataset using the Dataloader utility\n", 138 | "\n", 139 | "train_loader = DataLoader(dataset=dataset, batch_size=32, num_workers=0, shuffle=True)" 140 | ] 141 | }, 142 | { 143 | "cell_type": "code", 144 | "execution_count": 17, 145 | "metadata": {}, 146 | "outputs": [], 147 | "source": [ 148 | "# Creating a model for classification\n", 149 | "\n", 150 | "class Model(torch.nn.Module):\n", 151 | " # Initialize the Mymodel class\n", 152 | " def __init__(self):\n", 153 | " super(Model, self).__init__()\n", 154 | " self.l1 = torch.nn.Linear(11, 64)\n", 155 | " self.l2 = torch.nn.Linear(64, 32)\n", 156 | " self.l3 = torch.nn.Linear(32,16)\n", 157 | " self.l4 = torch.nn.Linear(16,10)\n", 158 | "\n", 159 | " self.relu = torch.nn.ReLU()\n", 160 | "\n", 161 | " def forward(self, x):\n", 162 | " out_1 = self.relu(self.l1(x))\n", 163 | " out_2 = self.relu(self.l2(out_1))\n", 164 | " out_3 = self.relu(self.l3(out_2))\n", 165 | " y_pred = self.l4(out_3)\n", 166 | " return y_pred\n", 167 | "\n", 168 | "\n", 169 | "model = Model()" 170 | ] 171 | }, 172 | { 173 | "cell_type": "code", 174 | "execution_count": 20, 175 | "metadata": {}, 176 | "outputs": [], 177 | "source": [ 178 | "# Loss function and optimizer\n", 179 | "\n", 180 | "criterion = torch.nn.CrossEntropyLoss(reduction='mean')\n", 181 | "optimus = torch.optim.SGD(model.parameters(), lr = 0.01, momentum=0.05)" 182 | ] 183 | }, 184 | { 185 | "cell_type": "code", 186 | "execution_count": 21, 187 | "metadata": {}, 188 | "outputs": [ 189 | { 190 | "output_type": "stream", 191 | "name": "stdout", 192 | "text": "Epoch: 0, | Loss: 1.756433367729187\nEpoch: 0, | Loss: 1.084341287612915\nEpoch: 0, | Loss: 1.2398041486740112\nEpoch: 0, | Loss: 1.2816483974456787\nEpoch: 0, | Loss: 1.1007722616195679\nEpoch: 0, | Loss: 1.3398348093032837\nEpoch: 0, | Loss: 1.2524685859680176\nEpoch: 0, | Loss: 1.2639210224151611\nEpoch: 0, | Loss: 1.282962441444397\nEpoch: 0, | Loss: 1.1008296012878418\nEpoch: 0, | Loss: 1.2006319761276245\nEpoch: 0, | Loss: 1.3502143621444702\nEpoch: 0, | Loss: 1.1859476566314697\nEpoch: 0, | Loss: 1.4054738283157349\nEpoch: 0, | Loss: 1.1343977451324463\nEpoch: 0, | Loss: 1.2453234195709229\nEpoch: 0, | Loss: 1.3625078201293945\nEpoch: 0, | Loss: 1.3253774642944336\nEpoch: 0, | Loss: 1.3961822986602783\nEpoch: 0, | Loss: 1.3755953311920166\nEpoch: 0, | Loss: 1.0844221115112305\nEpoch: 0, | Loss: 1.3290928602218628\nEpoch: 0, | Loss: 1.7520118951797485\nEpoch: 0, | Loss: 1.248341679573059\nEpoch: 0, | Loss: 1.0518062114715576\nEpoch: 0, | Loss: 1.2712175846099854\nEpoch: 0, | Loss: 1.447879672050476\nEpoch: 0, | Loss: 1.2612885236740112\nEpoch: 0, | Loss: 1.422073245048523\nEpoch: 0, | Loss: 1.5215140581130981\nEpoch: 0, | Loss: 1.3892000913619995\nEpoch: 0, | Loss: 1.1072907447814941\nEpoch: 0, | Loss: 1.2334656715393066\nEpoch: 0, | Loss: 1.1808644533157349\nEpoch: 0, | Loss: 1.6566005945205688\nEpoch: 0, | Loss: 1.2929452657699585\nEpoch: 0, | Loss: 1.4340423345565796\nEpoch: 0, | Loss: 1.218409776687622\nEpoch: 0, | Loss: 1.463149905204773\nEpoch: 0, | Loss: 1.213962197303772\nEpoch: 0, | Loss: 1.2909905910491943\nEpoch: 0, | Loss: 1.3370096683502197\nEpoch: 0, | Loss: 1.2478351593017578\nEpoch: 0, | Loss: 1.2110052108764648\nEpoch: 0, | Loss: 1.6760051250457764\nEpoch: 0, | Loss: 1.1968765258789062\nEpoch: 0, | Loss: 1.2450307607650757\nEpoch: 0, | Loss: 1.1623587608337402\nEpoch: 0, | Loss: 1.4190447330474854\nEpoch: 0, | Loss: 1.3626271486282349\nEpoch: 0, | Loss: 1.188270092010498\nEpoch: 0, | Loss: 1.3932428359985352\nEpoch: 0, | Loss: 1.2388060092926025\nEpoch: 0, | Loss: 1.3176240921020508\nEpoch: 0, | Loss: 1.3304051160812378\nEpoch: 0, | Loss: 1.2774662971496582\nEpoch: 0, | Loss: 1.3109029531478882\nEpoch: 0, | Loss: 1.4816749095916748\nEpoch: 0, | Loss: 1.3870813846588135\nEpoch: 0, | Loss: 1.2540956735610962\nEpoch: 0, | Loss: 1.309913158416748\nEpoch: 0, | Loss: 1.2840979099273682\nEpoch: 0, | Loss: 1.1009389162063599\nEpoch: 0, | Loss: 1.1775392293930054\nEpoch: 0, | Loss: 1.0523645877838135\nEpoch: 0, | Loss: 1.5078932046890259\nEpoch: 0, | Loss: 1.4824919700622559\nEpoch: 0, | Loss: 1.1031649112701416\nEpoch: 0, | Loss: 1.0372084379196167\nEpoch: 0, | Loss: 1.4036259651184082\nEpoch: 0, | Loss: 1.1167181730270386\nEpoch: 0, | Loss: 1.3177129030227661\nEpoch: 0, | Loss: 1.4202280044555664\nEpoch: 0, | Loss: 1.4587438106536865\nEpoch: 0, | Loss: 1.1930605173110962\nEpoch: 0, | Loss: 1.1292318105697632\nEpoch: 0, | Loss: 1.0617456436157227\nEpoch: 0, | Loss: 1.178981065750122\nEpoch: 0, | Loss: 1.2181856632232666\nEpoch: 0, | Loss: 1.2632627487182617\nEpoch: 0, | Loss: 1.1817893981933594\nEpoch: 0, | Loss: 1.1683629751205444\nEpoch: 0, | Loss: 1.611451506614685\nEpoch: 0, | Loss: 1.3743221759796143\nEpoch: 0, | Loss: 1.2293481826782227\nEpoch: 0, | Loss: 1.3875333070755005\nEpoch: 0, | Loss: 1.2539887428283691\nEpoch: 0, | Loss: 1.2650847434997559\nEpoch: 0, | Loss: 1.1299192905426025\nEpoch: 0, | Loss: 1.391156554222107\nEpoch: 0, | Loss: 1.300539493560791\nEpoch: 0, | Loss: 1.0793477296829224\nEpoch: 0, | Loss: 1.6700665950775146\nEpoch: 0, | Loss: 1.251083254814148\nEpoch: 0, | Loss: 1.311794400215149\nEpoch: 0, | Loss: 1.2355707883834839\nEpoch: 0, | Loss: 1.7282485961914062\nEpoch: 0, | Loss: 1.1501705646514893\nEpoch: 0, | Loss: 1.3597657680511475\nEpoch: 0, | Loss: 1.1651781797409058\nEpoch: 0, | Loss: 1.3110222816467285\nEpoch: 0, | Loss: 1.6248635053634644\nEpoch: 0, | Loss: 1.1450632810592651\nEpoch: 0, | Loss: 1.1833020448684692\nEpoch: 0, | Loss: 1.5490548610687256\nEpoch: 0, | Loss: 1.2332658767700195\nEpoch: 0, | Loss: 1.2717041969299316\nEpoch: 0, | Loss: 1.499459981918335\nEpoch: 0, | Loss: 1.227103352546692\nEpoch: 0, | Loss: 1.193637728691101\nEpoch: 0, | Loss: 1.244114637374878\nEpoch: 0, | Loss: 1.291919231414795\nEpoch: 0, | Loss: 1.2115942239761353\nEpoch: 0, | Loss: 1.4701423645019531\nEpoch: 0, | Loss: 1.4692375659942627\nEpoch: 0, | Loss: 1.1239627599716187\nEpoch: 0, | Loss: 1.3325225114822388\nEpoch: 0, | Loss: 1.3494784832000732\nEpoch: 0, | Loss: 1.2421330213546753\nEpoch: 0, | Loss: 1.5091298818588257\nEpoch: 0, | Loss: 1.297631025314331\nEpoch: 0, | Loss: 1.2244142293930054\nEpoch: 0, | Loss: 1.2834659814834595\nEpoch: 0, | Loss: 1.273996353149414\nEpoch: 0, | Loss: 1.4591169357299805\nEpoch: 0, | Loss: 1.183942437171936\nEpoch: 0, | Loss: 1.011520504951477\nEpoch: 0, | Loss: 1.2380011081695557\nEpoch: 0, | Loss: 1.4024224281311035\nEpoch: 0, | Loss: 1.139121174812317\nEpoch: 0, | Loss: 1.2269700765609741\nEpoch: 0, | Loss: 1.3007882833480835\nEpoch: 0, | Loss: 1.6031306982040405\nEpoch: 0, | Loss: 1.1335954666137695\nEpoch: 0, | Loss: 1.4247970581054688\nEpoch: 0, | Loss: 1.1235253810882568\nEpoch: 0, | Loss: 1.4010425806045532\nEpoch: 0, | Loss: 1.2210664749145508\nEpoch: 0, | Loss: 1.4678453207015991\nEpoch: 0, | Loss: 1.2090983390808105\nEpoch: 0, | Loss: 1.2411819696426392\nEpoch: 0, | Loss: 1.5199180841445923\nEpoch: 0, | Loss: 1.2131812572479248\nEpoch: 0, | Loss: 1.326535940170288\nEpoch: 0, | Loss: 1.6301542520523071\nEpoch: 0, | Loss: 1.3725135326385498\nEpoch: 0, | Loss: 1.2347798347473145\nEpoch: 0, | Loss: 1.1588331460952759\nEpoch: 0, | Loss: 1.1402033567428589\nEpoch: 0, | Loss: 1.3818988800048828\nEpoch: 0, | Loss: 1.558655023574829\nEpoch: 0, | Loss: 1.3235424757003784\nEpoch: 0, | Loss: 1.012758493423462\nEpoch: 0, | Loss: 1.4458292722702026\nEpoch: 1, | Loss: 1.1494163274765015\nEpoch: 1, | Loss: 1.5127333402633667\nEpoch: 1, | Loss: 1.0710644721984863\nEpoch: 1, | Loss: 1.325239896774292\nEpoch: 1, | Loss: 1.0804272890090942\nEpoch: 1, | Loss: 1.5886611938476562\nEpoch: 1, | Loss: 1.2305622100830078\nEpoch: 1, | Loss: 1.3778977394104004\nEpoch: 1, | Loss: 1.2961015701293945\nEpoch: 1, | Loss: 1.344267725944519\nEpoch: 1, | Loss: 1.2853035926818848\nEpoch: 1, | Loss: 1.462005615234375\nEpoch: 1, | Loss: 1.4919780492782593\nEpoch: 1, | Loss: 1.3722729682922363\nEpoch: 1, | Loss: 1.322454571723938\nEpoch: 1, | Loss: 1.5486449003219604\nEpoch: 1, | Loss: 1.1968246698379517\nEpoch: 1, | Loss: 1.1492953300476074\nEpoch: 1, | Loss: 1.1030819416046143\nEpoch: 1, | Loss: 1.3961551189422607\nEpoch: 1, | Loss: 1.2197537422180176\nEpoch: 1, | Loss: 1.3928762674331665\nEpoch: 1, | Loss: 1.307381272315979\nEpoch: 1, | Loss: 1.2360962629318237\nEpoch: 1, | Loss: 1.0710458755493164\nEpoch: 1, | Loss: 1.0596511363983154\nEpoch: 1, | Loss: 1.0853190422058105\nEpoch: 1, | Loss: 1.0965861082077026\nEpoch: 1, | Loss: 1.1520949602127075\nEpoch: 1, | Loss: 1.2259347438812256\nEpoch: 1, | Loss: 1.4113998413085938\nEpoch: 1, | Loss: 1.2449194192886353\nEpoch: 1, | Loss: 1.2817134857177734\nEpoch: 1, | Loss: 1.1083173751831055\nEpoch: 1, | Loss: 1.3955156803131104\nEpoch: 1, | Loss: 1.495963215827942\nEpoch: 1, | Loss: 1.423494815826416\nEpoch: 1, | Loss: 1.3785333633422852\nEpoch: 1, | Loss: 1.2990778684616089\nEpoch: 1, | Loss: 1.2252511978149414\nEpoch: 1, | Loss: 1.3543459177017212\nEpoch: 1, | Loss: 1.336092233657837\nEpoch: 1, | Loss: 1.394531011581421\nEpoch: 1, | Loss: 1.2090952396392822\nEpoch: 1, | Loss: 1.3966002464294434\nEpoch: 1, | Loss: 1.1067705154418945\nEpoch: 1, | Loss: 1.2832252979278564\nEpoch: 1, | Loss: 1.4224506616592407\nEpoch: 1, | Loss: 1.4573304653167725\nEpoch: 1, | Loss: 1.2919801473617554\nEpoch: 1, | Loss: 1.3525831699371338\nEpoch: 1, | Loss: 1.113459825515747\nEpoch: 1, | Loss: 1.4404045343399048\nEpoch: 1, | Loss: 1.3068095445632935\nEpoch: 1, | Loss: 1.2566174268722534\nEpoch: 1, | Loss: 1.1975817680358887\nEpoch: 1, | Loss: 1.2946299314498901\nEpoch: 1, | Loss: 1.5031222105026245\nEpoch: 1, | Loss: 1.4052891731262207\nEpoch: 1, | Loss: 1.3215574026107788\nEpoch: 1, | Loss: 1.502802848815918\nEpoch: 1, | Loss: 1.2137442827224731\nEpoch: 1, | Loss: 1.2630358934402466\nEpoch: 1, | Loss: 1.2480052709579468\nEpoch: 1, | Loss: 1.1748147010803223\nEpoch: 1, | Loss: 1.3075768947601318\nEpoch: 1, | Loss: 1.4848074913024902\nEpoch: 1, | Loss: 1.359654188156128\nEpoch: 1, | Loss: 1.367064356803894\nEpoch: 1, | Loss: 1.3566868305206299\nEpoch: 1, | Loss: 1.1814059019088745\nEpoch: 1, | Loss: 1.382716178894043\nEpoch: 1, | Loss: 1.2418136596679688\nEpoch: 1, | Loss: 1.487998127937317\nEpoch: 1, | Loss: 1.1606115102767944\nEpoch: 1, | Loss: 1.210985541343689\nEpoch: 1, | Loss: 1.5726450681686401\nEpoch: 1, | Loss: 1.1975990533828735\nEpoch: 1, | Loss: 1.2851539850234985\nEpoch: 1, | Loss: 1.2811022996902466\nEpoch: 1, | Loss: 0.9745256304740906\nEpoch: 1, | Loss: 1.3818031549453735\nEpoch: 1, | Loss: 1.266993522644043\nEpoch: 1, | Loss: 1.281699776649475\nEpoch: 1, | Loss: 1.356113314628601\nEpoch: 1, | Loss: 1.031165361404419\nEpoch: 1, | Loss: 1.1052879095077515\nEpoch: 1, | Loss: 1.2047781944274902\nEpoch: 1, | Loss: 1.488179326057434\nEpoch: 1, | Loss: 1.4291750192642212\nEpoch: 1, | Loss: 1.198131799697876\nEpoch: 1, | Loss: 1.1763678789138794\nEpoch: 1, | Loss: 1.0646899938583374\nEpoch: 1, | Loss: 1.289359450340271\nEpoch: 1, | Loss: 1.4328956604003906\nEpoch: 1, | Loss: 1.1799676418304443\nEpoch: 1, | Loss: 1.3820618391036987\nEpoch: 1, | Loss: 1.1124380826950073\nEpoch: 1, | Loss: 1.1585581302642822\nEpoch: 1, | Loss: 1.2595069408416748\nEpoch: 1, | Loss: 1.4869842529296875\nEpoch: 1, | Loss: 1.3511043787002563\nEpoch: 1, | Loss: 1.302904486656189\nEpoch: 1, | Loss: 1.2984356880187988\nEpoch: 1, | Loss: 1.268978476524353\nEpoch: 1, | Loss: 1.3513026237487793\nEpoch: 1, | Loss: 1.346780776977539\nEpoch: 1, | Loss: 1.4619845151901245\nEpoch: 1, | Loss: 1.3158633708953857\nEpoch: 1, | Loss: 1.3570668697357178\nEpoch: 1, | Loss: 1.3115622997283936\nEpoch: 1, | Loss: 1.1120936870574951\nEpoch: 1, | Loss: 1.1535944938659668\nEpoch: 1, | Loss: 1.1250364780426025\nEpoch: 1, | Loss: 1.2397269010543823\nEpoch: 1, | Loss: 1.330161213874817\nEpoch: 1, | Loss: 1.4579788446426392\nEpoch: 1, | Loss: 1.153367280960083\nEpoch: 1, | Loss: 1.1978938579559326\nEpoch: 1, | Loss: 1.2818348407745361\nEpoch: 1, | Loss: 1.240523099899292\nEpoch: 1, | Loss: 1.283247470855713\nEpoch: 1, | Loss: 1.3392599821090698\nEpoch: 1, | Loss: 1.2881269454956055\nEpoch: 1, | Loss: 1.287299394607544\nEpoch: 1, | Loss: 1.2496486902236938\nEpoch: 1, | Loss: 1.3451167345046997\nEpoch: 1, | Loss: 1.514665961265564\nEpoch: 1, | Loss: 1.3301310539245605\nEpoch: 1, | Loss: 1.2946044206619263\nEpoch: 1, | Loss: 1.3693774938583374\nEpoch: 1, | Loss: 1.2106473445892334\nEpoch: 1, | Loss: 1.2141366004943848\nEpoch: 1, | Loss: 1.5447413921356201\nEpoch: 1, | Loss: 1.386259913444519\nEpoch: 1, | Loss: 1.4681321382522583\nEpoch: 1, | Loss: 1.4315094947814941\nEpoch: 1, | Loss: 1.389166235923767\nEpoch: 1, | Loss: 1.1746869087219238\nEpoch: 1, | Loss: 1.282170295715332\nEpoch: 1, | Loss: 1.3671268224716187\nEpoch: 1, | Loss: 1.6811981201171875\nEpoch: 1, | Loss: 1.1945055723190308\nEpoch: 1, | Loss: 1.1401394605636597\nEpoch: 1, | Loss: 1.3543992042541504\nEpoch: 1, | Loss: 1.3394800424575806\nEpoch: 1, | Loss: 1.4498770236968994\nEpoch: 1, | Loss: 1.2716094255447388\nEpoch: 1, | Loss: 1.3174337148666382\nEpoch: 1, | Loss: 1.1227093935012817\nEpoch: 1, | Loss: 1.2315298318862915\nEpoch: 1, | Loss: 1.375878095626831\nEpoch: 1, | Loss: 1.3357716798782349\nEpoch: 1, | Loss: 1.0040476322174072\n" 193 | } 194 | ], 195 | "source": [ 196 | "# Creating the training loop and printing the loss. For this experiment we are using the whole dataset as training. \n", 197 | "\n", 198 | "for epoch in range(2):\n", 199 | " for _, data in enumerate(train_loader, 0):\n", 200 | " inputs, labels = data\n", 201 | " # Forward pass in the network to get the prediction\n", 202 | " y_pred = model(inputs)\n", 203 | "\n", 204 | " # Calcuate the loss\n", 205 | " loss = criterion(y_pred, labels)\n", 206 | "\n", 207 | " # Print the result\n", 208 | " print(f'Epoch: {epoch}, | Loss: {loss}')\n", 209 | "\n", 210 | " # Backpropagation and updating the weights\n", 211 | " optimus.zero_grad()\n", 212 | " loss.backward()\n", 213 | " optimus.step()\n", 214 | "" 215 | ] 216 | }, 217 | { 218 | "cell_type": "code", 219 | "execution_count": 79, 220 | "metadata": {}, 221 | "outputs": [], 222 | "source": [ 223 | "# As you can see the result is shit. To be further improved in lecture 9" 224 | ] 225 | } 226 | ] 227 | } -------------------------------------------------------------------------------- /lecture_10.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 2, 4 | "metadata": { 5 | "language_info": { 6 | "name": "python", 7 | "codemirror_mode": { 8 | "name": "ipython", 9 | "version": 3 10 | }, 11 | "version": "3.7.6-final" 12 | }, 13 | "orig_nbformat": 2, 14 | "file_extension": ".py", 15 | "mimetype": "text/x-python", 16 | "name": "python", 17 | "npconvert_exporter": "python", 18 | "pygments_lexer": "ipython3", 19 | "version": 3, 20 | "kernelspec": { 21 | "name": "python37664bitfastaiconda149f4ca18fae45818735beadf08062d0", 22 | "language": "python", 23 | "display_name": "Python 3.7.6 64-bit ('fastai': conda)" 24 | } 25 | }, 26 | "cells": [ 27 | { 28 | "cell_type": "markdown", 29 | "metadata": {}, 30 | "source": [ 31 | "## Lecture 10:\n", 32 | "\n", 33 | "\n", 34 | "* We are going to use **_CNN_** in this Lecture\n", 35 | "* The dataset to be used is the **_MINST Dataset_** given by pytorch package" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": 2, 41 | "metadata": {}, 42 | "outputs": [], 43 | "source": [ 44 | "# Importing the stock libraries\n", 45 | "\n", 46 | "import numpy as np\n", 47 | "import pandas as pd\n", 48 | "import torch\n", 49 | "from torch.utils.data import DataLoader, Dataset\n", 50 | "from torchvision import datasets, transforms" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": 3, 56 | "metadata": {}, 57 | "outputs": [], 58 | "source": [ 59 | "# Creating the CUDA environment\n", 60 | "\n", 61 | "from torch import cuda\n", 62 | "device = 'cuda' if cuda.is_available() else 'cpu'" 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": 4, 68 | "metadata": {}, 69 | "outputs": [], 70 | "source": [ 71 | "# Creating the dataset for the model\n", 72 | "\n", 73 | "train_dataset = datasets.MNIST(root='./data/', train=True, transform=transforms.ToTensor(), download=True)\n", 74 | "test_dataset = datasets.MNIST(root='./data/', train=False, transform=transforms.ToTensor(), download=True)" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": 5, 80 | "metadata": {}, 81 | "outputs": [], 82 | "source": [ 83 | "# Creating the dataloader for the input to the model\n", 84 | "\n", 85 | "train_dataloader = DataLoader(dataset=train_dataset, batch_size=32, shuffle=True)\n", 86 | "test_dataloader = DataLoader(dataset=test_dataset, batch_size=32, shuffle=False)" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": 6, 92 | "metadata": {}, 93 | "outputs": [ 94 | { 95 | "data": { 96 | "text/plain": "Model(\n (conv1): Conv2d(1, 10, kernel_size=(5, 5), stride=(1, 1))\n (conv2): Conv2d(10, 20, kernel_size=(5, 5), stride=(1, 1))\n (l1): Linear(in_features=320, out_features=10, bias=True)\n (max): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)\n)" 97 | }, 98 | "execution_count": 6, 99 | "metadata": {}, 100 | "output_type": "execute_result" 101 | } 102 | ], 103 | "source": [ 104 | "# Creating the Model\n", 105 | "\n", 106 | "class Model(torch.nn.Module):\n", 107 | "\n", 108 | " ## Initiatize\n", 109 | " def __init__(self):\n", 110 | " super(Model, self).__init__()\n", 111 | " self.conv1 = torch.nn.Conv2d(1,10,kernel_size=5)\n", 112 | " self.conv2 = torch.nn.Conv2d(10,20,kernel_size=5)\n", 113 | " self.l1 = torch.nn.Linear(320, 10)\n", 114 | "\n", 115 | " self.max = torch.nn.MaxPool2d(2)\n", 116 | "\n", 117 | "\n", 118 | " ## Forward\n", 119 | " def forward(self, x):\n", 120 | " in_size = x.size(0)\n", 121 | " x = torch.nn.functional.relu(self.max(self.conv1(x)))\n", 122 | " x = torch.nn.functional.relu(self.max(self.conv2(x)))\n", 123 | " x = x.view(in_size, -1)\n", 124 | " x = self.l1(x)\n", 125 | " return torch.nn.functional.log_softmax(x)\n", 126 | "\n", 127 | "model = Model()\n", 128 | "model.to(device)" 129 | ] 130 | }, 131 | { 132 | "cell_type": "code", 133 | "execution_count": 7, 134 | "metadata": {}, 135 | "outputs": [], 136 | "source": [ 137 | "# Define the loss function and optimizer\n", 138 | "\n", 139 | "criterion = torch.nn.CrossEntropyLoss(reduction='mean')\n", 140 | "optimus = torch.optim.SGD(model.parameters(), lr = 0.01, momentum=0.05)" 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": 8, 146 | "metadata": {}, 147 | "outputs": [], 148 | "source": [ 149 | "# This is the code for training\n", 150 | "\n", 151 | "def train(epoch):\n", 152 | " model.train()\n", 153 | " for _, data in enumerate(train_dataloader, 0):\n", 154 | " inputs, labels = data\n", 155 | " inputs = inputs.to(device)\n", 156 | " labels = labels.to(device)\n", 157 | "\n", 158 | " # Forward pass\n", 159 | " predict = model(inputs)\n", 160 | "\n", 161 | " # loss\n", 162 | " loss = criterion(predict, labels)\n", 163 | " if _%500 == 0:\n", 164 | " print(f'Epoch: {epoch}, Loss: {loss.item()}')\n", 165 | "\n", 166 | " # zero optimus,back propagation, update optimus\n", 167 | " optimus.zero_grad()\n", 168 | " loss.backward()\n", 169 | " optimus.step()\n", 170 | "" 171 | ] 172 | }, 173 | { 174 | "cell_type": "code", 175 | "execution_count": 9, 176 | "metadata": {}, 177 | "outputs": [], 178 | "source": [ 179 | "# This code is for the validation step\n", 180 | "\n", 181 | "def valid(test_dataloader):\n", 182 | " model.eval()\n", 183 | " total=0; n_correct =0 \n", 184 | " with torch.no_grad():\n", 185 | " for _, data in enumerate(test_dataloader,0):\n", 186 | " inputs, labels = data\n", 187 | " inputs = inputs.to(device)\n", 188 | " labels = labels.to(device)\n", 189 | "\n", 190 | " if _%500 == 0:\n", 191 | " print(f'Validation completed for {_} samples')\n", 192 | "\n", 193 | " predict = model(inputs)\n", 194 | " total += labels.size(0)\n", 195 | " big_val, big_idx = torch.max(predict.data, dim=1)\n", 196 | " n_correct += (big_idx==labels).sum().item()\n", 197 | "\n", 198 | " return (n_correct*100)/total" 199 | ] 200 | }, 201 | { 202 | "cell_type": "code", 203 | "execution_count": 10, 204 | "metadata": {}, 205 | "outputs": [ 206 | { 207 | "name": "stdout", 208 | "output_type": "stream", 209 | "text": [ 210 | "Starting the training\n", 211 | "Epoch: 0, Loss: 2.313748598098755\n", 212 | "Epoch: 0, Loss: 0.3788803517818451\n", 213 | "Epoch: 0, Loss: 0.35306981205940247\n", 214 | "Epoch: 0, Loss: 0.15131986141204834\n", 215 | "Epoch: 1, Loss: 0.48513269424438477\n", 216 | "Epoch: 1, Loss: 0.13334602117538452\n", 217 | "Epoch: 1, Loss: 0.1520206332206726\n", 218 | "Epoch: 1, Loss: 0.08754238486289978\n", 219 | "Training Completed\n", 220 | "+++++++++++++++++=================+++++++++++++++++=================++++++++++++++++\n", 221 | "Starting the testing\n", 222 | "Validation completed for 0 samples\n", 223 | "The accuracy of the model is: 96.57\n" 224 | ] 225 | }, 226 | { 227 | "name": "stderr", 228 | "output_type": "stream", 229 | "text": [ 230 | "C:\\Users\\abhis\\Anaconda3\\envs\\fastai\\lib\\site-packages\\ipykernel_launcher.py:22: UserWarning: Implicit dimension choice for log_softmax has been deprecated. Change the call to include dim=X as an argument.\n" 231 | ] 232 | } 233 | ], 234 | "source": [ 235 | "# Calling the training function\n", 236 | "\n", 237 | "print('Starting the training')\n", 238 | "\n", 239 | "for epoch in range(0,2):\n", 240 | " train(epoch)\n", 241 | "\n", 242 | "print('Training Completed')\n", 243 | "\n", 244 | "print('+++++++++++++++++=================+++++++++++++++++=================++++++++++++++++')\n", 245 | "\n", 246 | "print('Starting the testing')\n", 247 | "accu = valid(test_dataloader)\n", 248 | "print(f'The accuracy of the model is: {accu}')" 249 | ] 250 | }, 251 | { 252 | "cell_type": "code", 253 | "execution_count": null, 254 | "metadata": {}, 255 | "outputs": [], 256 | "source": [] 257 | } 258 | ] 259 | } -------------------------------------------------------------------------------- /lecture_11.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 2, 4 | "metadata": { 5 | "language_info": { 6 | "name": "python", 7 | "codemirror_mode": { 8 | "name": "ipython", 9 | "version": 3 10 | }, 11 | "version": "3.7.6-final" 12 | }, 13 | "orig_nbformat": 2, 14 | "file_extension": ".py", 15 | "mimetype": "text/x-python", 16 | "name": "python", 17 | "npconvert_exporter": "python", 18 | "pygments_lexer": "ipython3", 19 | "version": 3, 20 | "kernelspec": { 21 | "name": "python37664bitfastaiconda149f4ca18fae45818735beadf08062d0", 22 | "display_name": "Python 3.7.6 64-bit ('fastai': conda)" 23 | } 24 | }, 25 | "cells": [ 26 | { 27 | "cell_type": "markdown", 28 | "metadata": {}, 29 | "source": [ 30 | "## Lecture 11:\n", 31 | "\n", 32 | "\n", 33 | "* We are going to create toy inception modules and then use them in our model.\n", 34 | "* Everything else remains same as the lecture 10.\n", 35 | "* The dataset to be used is the **_MINST Dataset_** given by pytorch package\n", 36 | "\n", 37 | "\n", 38 | "_It is worth noting that we can create a new network component as a class and then use that object in our core network. This is really a very neat and useful implementation_" 39 | ] 40 | }, 41 | { 42 | "cell_type": "code", 43 | "execution_count": 1, 44 | "metadata": {}, 45 | "outputs": [], 46 | "source": [ 47 | "# Importing the stock libraries\n", 48 | "\n", 49 | "import numpy as np\n", 50 | "import pandas as pd\n", 51 | "import torch\n", 52 | "from torch.utils.data import DataLoader, Dataset\n", 53 | "from torchvision import datasets, transforms" 54 | ] 55 | }, 56 | { 57 | "cell_type": "code", 58 | "execution_count": 2, 59 | "metadata": {}, 60 | "outputs": [], 61 | "source": [ 62 | "# Creating the CUDA environment\n", 63 | "\n", 64 | "from torch import cuda\n", 65 | "device = 'cuda' if cuda.is_available() else 'cpu'" 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": 3, 71 | "metadata": {}, 72 | "outputs": [], 73 | "source": [ 74 | "# Creating the dataset for the model\n", 75 | "\n", 76 | "train_dataset = datasets.MNIST(root='./data/', train=True, transform=transforms.ToTensor(), download=True)\n", 77 | "test_dataset = datasets.MNIST(root='./data/', train=False, transform=transforms.ToTensor(), download=True)" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": 4, 83 | "metadata": {}, 84 | "outputs": [], 85 | "source": [ 86 | "# Creating the dataloader for the input to the model\n", 87 | "\n", 88 | "train_dataloader = DataLoader(dataset=train_dataset, batch_size=32, shuffle=True)\n", 89 | "test_dataloader = DataLoader(dataset=test_dataset, batch_size=32, shuffle=False)" 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": 5, 95 | "metadata": {}, 96 | "outputs": [], 97 | "source": [ 98 | "# This is where we are defining the inception component, which is a network in itself. But once defined as a class we can then use the object ob this class in other networks as a reusable element. \n", 99 | "\n", 100 | "class InceptionM(torch.nn.Module):\n", 101 | "\n", 102 | " # Initialize\n", 103 | " def __init__(self, in_channels):\n", 104 | " super(InceptionM, self).__init__()\n", 105 | " self.branch_01_02 = torch.nn.Conv2d(in_channels, 24, kernel_size=1)\n", 106 | " self.branch_02_01 = torch.nn.Conv2d(in_channels, 16, kernel_size=1)\n", 107 | " \n", 108 | " self.branch_03_01 = torch.nn.Conv2d(in_channels, 16, kernel_size=1)\n", 109 | " self.branch_03_02 = torch.nn.Conv2d(16, 24, kernel_size=5, padding=2)\n", 110 | "\n", 111 | " self.branch_04_01 = torch.nn.Conv2d(in_channels, 16, kernel_size=1)\n", 112 | " self.branch_04_02 = torch.nn.Conv2d(16, 24, kernel_size=3, padding=1)\n", 113 | " self.branch_04_03 = torch.nn.Conv2d(24, 24, kernel_size=3, padding=1)\n", 114 | " \n", 115 | " \n", 116 | " def forward(self, x):\n", 117 | " branch_01_02 = self.branch_01_02(torch.nn.functional.avg_pool2d(x, kernel_size=3, stride=1, padding=1))\n", 118 | " \n", 119 | " branch_02_01 = self.branch_02_01(x)\n", 120 | " \n", 121 | " branch_03_01 = self.branch_03_01(x)\n", 122 | " branch_03_02 = self.branch_03_02(branch_03_01)\n", 123 | "\n", 124 | " branch_04_01 = self.branch_04_01(x)\n", 125 | " branch_04_02 = self.branch_04_02(branch_04_01)\n", 126 | " branch_04_03 = self.branch_04_03(branch_04_02)\n", 127 | "\n", 128 | " outputs = [branch_01_02,branch_02_01,branch_03_02,branch_04_03]\n", 129 | "\n", 130 | " return torch.cat(outputs, 1)\n", 131 | " " 132 | ] 133 | }, 134 | { 135 | "cell_type": "code", 136 | "execution_count": 6, 137 | "metadata": {}, 138 | "outputs": [ 139 | { 140 | "output_type": "execute_result", 141 | "data": { 142 | "text/plain": "Model(\n (conv1): Conv2d(1, 10, kernel_size=(5, 5), stride=(1, 1))\n (conv2): Conv2d(88, 20, kernel_size=(5, 5), stride=(1, 1))\n (incep_1): InceptionM(\n (branch_01_02): Conv2d(10, 24, kernel_size=(1, 1), stride=(1, 1))\n (branch_02_01): Conv2d(10, 16, kernel_size=(1, 1), stride=(1, 1))\n (branch_03_01): Conv2d(10, 16, kernel_size=(1, 1), stride=(1, 1))\n (branch_03_02): Conv2d(16, 24, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))\n (branch_04_01): Conv2d(10, 16, kernel_size=(1, 1), stride=(1, 1))\n (branch_04_02): Conv2d(16, 24, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n (branch_04_03): Conv2d(24, 24, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n )\n (incep_2): InceptionM(\n (branch_01_02): Conv2d(20, 24, kernel_size=(1, 1), stride=(1, 1))\n (branch_02_01): Conv2d(20, 16, kernel_size=(1, 1), stride=(1, 1))\n (branch_03_01): Conv2d(20, 16, kernel_size=(1, 1), stride=(1, 1))\n (branch_03_02): Conv2d(16, 24, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))\n (branch_04_01): Conv2d(20, 16, kernel_size=(1, 1), stride=(1, 1))\n (branch_04_02): Conv2d(16, 24, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n (branch_04_03): Conv2d(24, 24, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n )\n (l1): Linear(in_features=1408, out_features=10, bias=True)\n (max): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)\n)" 143 | }, 144 | "metadata": {}, 145 | "execution_count": 6 146 | } 147 | ], 148 | "source": [ 149 | "# Creating the Model\n", 150 | "\n", 151 | "class Model(torch.nn.Module):\n", 152 | "\n", 153 | " ## Initiatize\n", 154 | " def __init__(self):\n", 155 | " super(Model, self).__init__()\n", 156 | " self.conv1 = torch.nn.Conv2d(1,10,kernel_size=5)\n", 157 | " self.conv2 = torch.nn.Conv2d(88,20,kernel_size=5)\n", 158 | "\n", 159 | " self.incep_1 = InceptionM(in_channels=10)\n", 160 | " self.incep_2 = InceptionM(in_channels=20)\n", 161 | "\n", 162 | " self.l1 = torch.nn.Linear(1408, 10)\n", 163 | "\n", 164 | " self.max = torch.nn.MaxPool2d(2)\n", 165 | "\n", 166 | "\n", 167 | " ## Forward\n", 168 | " def forward(self, x):\n", 169 | " in_size = x.size(0)\n", 170 | " x = torch.nn.functional.relu(self.max(self.conv1(x)))\n", 171 | " x = self.incep_1(x)\n", 172 | " x = torch.nn.functional.relu(self.max(self.conv2(x)))\n", 173 | " x = self.incep_2(x)\n", 174 | " x = x.view(in_size, -1)\n", 175 | " x = self.l1(x)\n", 176 | " return torch.nn.functional.log_softmax(x)\n", 177 | "\n", 178 | "model = Model()\n", 179 | "model.to(device)" 180 | ] 181 | }, 182 | { 183 | "cell_type": "code", 184 | "execution_count": 7, 185 | "metadata": {}, 186 | "outputs": [], 187 | "source": [ 188 | "# Define the loss function and optimizer\n", 189 | "\n", 190 | "criterion = torch.nn.CrossEntropyLoss(reduction='mean')\n", 191 | "optimus = torch.optim.SGD(model.parameters(), lr = 0.01, momentum=0.5)" 192 | ] 193 | }, 194 | { 195 | "cell_type": "code", 196 | "execution_count": 8, 197 | "metadata": {}, 198 | "outputs": [], 199 | "source": [ 200 | "# This is the code for training\n", 201 | "\n", 202 | "def train(epoch):\n", 203 | " model.train()\n", 204 | " for _, data in enumerate(train_dataloader, 0):\n", 205 | " inputs, labels = data\n", 206 | " inputs = inputs.to(device)\n", 207 | " labels = labels.to(device)\n", 208 | "\n", 209 | " # Forward pass\n", 210 | " predict = model(inputs)\n", 211 | "\n", 212 | " # loss\n", 213 | " loss = criterion(predict, labels)\n", 214 | " if _%500 == 0:\n", 215 | " print(f'Epoch: {epoch}, Loss: {loss.item()}')\n", 216 | "\n", 217 | " # zero optimus,back propagation, update optimus\n", 218 | " optimus.zero_grad()\n", 219 | " loss.backward()\n", 220 | " optimus.step()\n", 221 | "" 222 | ] 223 | }, 224 | { 225 | "cell_type": "code", 226 | "execution_count": 9, 227 | "metadata": {}, 228 | "outputs": [], 229 | "source": [ 230 | "# This code is for the validation step\n", 231 | "\n", 232 | "def valid(test_dataloader):\n", 233 | " model.eval()\n", 234 | " total=0; n_correct =0 \n", 235 | " with torch.no_grad():\n", 236 | " for _, data in enumerate(test_dataloader,0):\n", 237 | " inputs, labels = data\n", 238 | " inputs = inputs.to(device)\n", 239 | " labels = labels.to(device)\n", 240 | "\n", 241 | " if _%500 == 0:\n", 242 | " print(f'Validation completed for {_} samples')\n", 243 | "\n", 244 | " predict = model(inputs)\n", 245 | " total += labels.size(0)\n", 246 | " big_val, big_idx = torch.max(predict.data, dim=1)\n", 247 | " n_correct += (big_idx==labels).sum().item()\n", 248 | "\n", 249 | " return (n_correct*100)/total" 250 | ] 251 | }, 252 | { 253 | "cell_type": "code", 254 | "execution_count": 10, 255 | "metadata": {}, 256 | "outputs": [ 257 | { 258 | "output_type": "stream", 259 | "name": "stdout", 260 | "text": "Starting the training\nEpoch: 0, Loss: 2.3154919147491455\nEpoch: 0, Loss: 0.5073910355567932\nEpoch: 0, Loss: 0.13816455006599426\nEpoch: 0, Loss: 0.17782428860664368\nEpoch: 1, Loss: 0.0799398273229599\nEpoch: 1, Loss: 0.021845880895853043\nEpoch: 1, Loss: 0.24345053732395172\nEpoch: 1, Loss: 0.059964992105960846\nTraining Completed\n+++++++++++++++++=================+++++++++++++++++=================++++++++++++++++\nStarting the testing\nValidation completed for 0 samples\nThe accuracy of the model is: 97.76\n" 261 | } 262 | ], 263 | "source": [ 264 | "# Calling the training function\n", 265 | "\n", 266 | "print('Starting the training')\n", 267 | "\n", 268 | "for epoch in range(0,2):\n", 269 | " train(epoch)\n", 270 | "\n", 271 | "print('Training Completed')\n", 272 | "\n", 273 | "print('+++++++++++++++++=================+++++++++++++++++=================++++++++++++++++')\n", 274 | "\n", 275 | "print('Starting the testing')\n", 276 | "accu = valid(test_dataloader)\n", 277 | "print(f'The accuracy of the model is: {accu}')" 278 | ] 279 | }, 280 | { 281 | "cell_type": "code", 282 | "execution_count": null, 283 | "metadata": {}, 284 | "outputs": [], 285 | "source": [] 286 | } 287 | ] 288 | } -------------------------------------------------------------------------------- /lecture_12.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 2, 4 | "metadata": { 5 | "language_info": { 6 | "name": "python", 7 | "codemirror_mode": { 8 | "name": "ipython", 9 | "version": 3 10 | }, 11 | "version": "3.7.6-final" 12 | }, 13 | "orig_nbformat": 2, 14 | "file_extension": ".py", 15 | "mimetype": "text/x-python", 16 | "name": "python", 17 | "npconvert_exporter": "python", 18 | "pygments_lexer": "ipython3", 19 | "version": 3, 20 | "kernelspec": { 21 | "name": "python37664bitfastaiconda149f4ca18fae45818735beadf08062d0", 22 | "display_name": "Python 3.7.6 64-bit ('fastai': conda)" 23 | } 24 | }, 25 | "cells": [ 26 | { 27 | "cell_type": "markdown", 28 | "metadata": {}, 29 | "source": [ 30 | "## Lecture 12: \n", 31 | "\n", 32 | "* We are going to do RNN in this session. \n", 33 | "* Very powerful concept. Hidden state of one cell is used as an input for another cell in addition to the original input. \n", 34 | "\n", 35 | "\n", 36 | "For `RNN` the arguments that are passed are:\n", 37 | "\n", 38 | "* `input_size`\n", 39 | "* `hidden_size`" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": 1, 45 | "metadata": {}, 46 | "outputs": [], 47 | "source": [ 48 | "# Importing stock libraries\n", 49 | "\n", 50 | "import numpy as np\n", 51 | "import pandas as pd\n", 52 | "import torch\n", 53 | "from torch.utils.data import DataLoader, Dataset" 54 | ] 55 | }, 56 | { 57 | "cell_type": "markdown", 58 | "metadata": {}, 59 | "source": [ 60 | "### Experiment 1: \n", 61 | "\n", 62 | "* Feeding `H E L L O` to RNN\n", 63 | "* We will use 1 hot vector encoding for this\n", 64 | "\n", 65 | "\n", 66 | "* $h = [1,0,0,0]$\n", 67 | "* $e = [0,1,0,0]$\n", 68 | "* $l = [0,0,1,0]$\n", 69 | "* $o = [0,0,0,1]$\n", 70 | "\n", 71 | "This is feeded to the network/cell one by one. Hence,\n", 72 | "* `input size = 4`\n", 73 | "* `hidden size = 2`\n" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": 14, 79 | "metadata": {}, 80 | "outputs": [], 81 | "source": [ 82 | "h = [1,0,0,0]\n", 83 | "e = [0,1,0,0]\n", 84 | "l = [0,0,1,0]\n", 85 | "o = [0,0,0,1]" 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": 25, 91 | "metadata": {}, 92 | "outputs": [ 93 | { 94 | "output_type": "stream", 95 | "name": "stdout", 96 | "text": "Input size torch.Size([1, 1, 4])\nOut tensor([[[0.6362, 0.4687]]])\nType torch.float32\nSize torch.Size([1, 1, 2])\nHidden tensor([[[0.6362, 0.4687]]])\n" 97 | } 98 | ], 99 | "source": [ 100 | "# Create an RNN cell with the desited input size and hidden size\n", 101 | "\n", 102 | "cell = torch.nn.RNN(input_size=4, hidden_size=2,batch_first=True)\n", 103 | "\n", 104 | "# Creating the one letter input\n", 105 | "h = [1,0,0,0]\n", 106 | "inputs = torch.Tensor([[h]])\n", 107 | "print('Input size', inputs.size())\n", 108 | "\n", 109 | "# initialize the hidden state\n", 110 | "# (num_layers*num_directios, batch, hidden_size)\n", 111 | "\n", 112 | "hidden = torch.Tensor(torch.rand(1,1,2))\n", 113 | "\n", 114 | "# Feed one element at a time\n", 115 | "# after each step, hidden contains the hidden state\n", 116 | "out, hidden = cell(inputs, hidden)\n", 117 | "print('Out', out.data)\n", 118 | "print('Type', out.dtype)\n", 119 | "print('Size',out.size())\n", 120 | "print('Hidden', hidden.data)" 121 | ] 122 | }, 123 | { 124 | "cell_type": "code", 125 | "execution_count": 24, 126 | "metadata": {}, 127 | "outputs": [ 128 | { 129 | "output_type": "stream", 130 | "name": "stdout", 131 | "text": "Input size torch.Size([1, 5, 4])\nOut tensor([[[ 0.4185, -0.8764],\n [ 0.5675, 0.2853],\n [ 0.3233, -0.7229],\n [ 0.2798, -0.4977],\n [ 0.2200, -0.6902]]])\nType torch.float32\nSize torch.Size([1, 5, 2])\nHidden tensor([[[ 0.2200, -0.6902]]])\n" 132 | } 133 | ], 134 | "source": [ 135 | "# Create an RNN cell with the desited input size and hidden size, this time we are entering more than 1 charecter\n", 136 | "\n", 137 | "cell = torch.nn.RNN(input_size=4, hidden_size=2,batch_first=True)\n", 138 | "\n", 139 | "# hidden_size =2\n", 140 | "# batch_size=1\n", 141 | "# sequence_length=5\n", 142 | "\n", 143 | "inputs = torch.Tensor([[h,e,l,l,o]])\n", 144 | "print('Input size', inputs.size())\n", 145 | "\n", 146 | "# initialize the hidden state\n", 147 | "# (num_layers*num_directios, batch, hidden_size)\n", 148 | "\n", 149 | "hidden = torch.Tensor(torch.rand(1,1,2))\n", 150 | "\n", 151 | "# Feed one element at a time\n", 152 | "# after each step, hidden contains the hidden state\n", 153 | "out, hidden = cell(inputs, hidden)\n", 154 | "print('Out', out.data)\n", 155 | "print('Type', out.dtype)\n", 156 | "print('Size',out.size())\n", 157 | "print('Hidden', hidden.data)" 158 | ] 159 | }, 160 | { 161 | "cell_type": "code", 162 | "execution_count": 23, 163 | "metadata": {}, 164 | "outputs": [ 165 | { 166 | "output_type": "stream", 167 | "name": "stdout", 168 | "text": "Input size torch.Size([3, 5, 4])\nOut tensor([[[-0.5312, -0.0883],\n [ 0.6064, -0.6325],\n [ 0.3647, -0.7075],\n [ 0.3591, -0.7897],\n [-0.5652, -0.8731]],\n\n [[ 0.7213, 0.2784],\n [ 0.6125, -0.4499],\n [ 0.4220, -0.6717],\n [-0.5384, -0.8517],\n [ 0.3825, -0.9386]],\n\n [[ 0.7266, -0.2211],\n [ 0.4819, -0.5763],\n [ 0.4050, -0.2343],\n [ 0.5114, -0.1682],\n [ 0.5114, -0.6541]]])\nType torch.float32\nSize torch.Size([3, 5, 2])\nHidden tensor([[[-0.5652, -0.8731],\n [ 0.3825, -0.9386],\n [ 0.5114, -0.6541]]])\n" 169 | } 170 | ], 171 | "source": [ 172 | "# Create an RNN cell with the desited input size and hidden size, this time we are entering more than 1 word. \n", 173 | "\n", 174 | "# hidden_size =2\n", 175 | "# batch_size=2\n", 176 | "# sequence_length=5\n", 177 | "\n", 178 | "cell = torch.nn.RNN(input_size=4, hidden_size=2,batch_first=True)\n", 179 | "\n", 180 | "# Creating the one letter input\n", 181 | "inputs = torch.Tensor(\n", 182 | " [\n", 183 | " [h,e,l,l,o],\n", 184 | " [e,l,l,o,l],\n", 185 | " [l,l,e,e,l]\n", 186 | " ]\n", 187 | " )\n", 188 | "print('Input size', inputs.size())\n", 189 | "\n", 190 | "# initialize the hidden state\n", 191 | "# (num_layers*num_directios, batch, hidden_size)\n", 192 | "\n", 193 | "hidden = torch.Tensor(torch.rand(1,3,2))\n", 194 | "\n", 195 | "# Feed one element at a time\n", 196 | "# after each step, hidden contains the hidden state\n", 197 | "out, hidden = cell(inputs, hidden)\n", 198 | "print('Out', out.data)\n", 199 | "print('Type', out.dtype)\n", 200 | "print('Size',out.size())\n", 201 | "print('Hidden', hidden.data)" 202 | ] 203 | }, 204 | { 205 | "cell_type": "markdown", 206 | "metadata": {}, 207 | "source": [ 208 | "### Experiment 2:\n", 209 | "\n", 210 | "* We will be feeding the string `hihell` to the network such that it gives us the output `ihello` basically predicting the next charter\n", 211 | "\n", 212 | "* This is a sequence classification. " 213 | ] 214 | }, 215 | { 216 | "cell_type": "code", 217 | "execution_count": 100, 218 | "metadata": {}, 219 | "outputs": [], 220 | "source": [ 221 | "# Creating a project to convert hihell -> ihello\n", 222 | "\n", 223 | "# Data prepration\n", 224 | "\n", 225 | "idx2char = ['h', 'i', 'e', 'l', 'o']\n", 226 | "\n", 227 | "x_data = [0,1,0,2,3,3] #hihell\n", 228 | "\n", 229 | "x_data = [[0, 1, 0, 2, 3, 3]] # hihell\n", 230 | "x_one_hot = [[[1, 0, 0, 0, 0], # h 0\n", 231 | " [0, 1, 0, 0, 0], # i 1\n", 232 | " [1, 0, 0, 0, 0], # h 0\n", 233 | " [0, 0, 1, 0, 0], # e 2\n", 234 | " [0, 0, 0, 1, 0], # l 3\n", 235 | " [0, 0, 0, 1, 0]]] # l 3\n", 236 | "\n", 237 | "y_data = [1, 0, 2, 3, 3, 4] # ihello\n", 238 | "\n", 239 | "\n", 240 | "inputs = torch.Tensor(x_one_hot)\n", 241 | "labels = torch.LongTensor(y_data)" 242 | ] 243 | }, 244 | { 245 | "cell_type": "code", 246 | "execution_count": 96, 247 | "metadata": {}, 248 | "outputs": [], 249 | "source": [ 250 | "# Parameters\n", 251 | "num_classes =5\n", 252 | "input_size =5 # One-hot size\n", 253 | "hidden_size = 5 # output from the cell\n", 254 | "batch_size=1 # one sentence\n", 255 | "sequence_length=6\n", 256 | "num_layers=1 # one layer run" 257 | ] 258 | }, 259 | { 260 | "cell_type": "code", 261 | "execution_count": 97, 262 | "metadata": {}, 263 | "outputs": [ 264 | { 265 | "output_type": "stream", 266 | "name": "stdout", 267 | "text": "Model(\n (rnn): RNN(5, 5, batch_first=True)\n)\n" 268 | } 269 | ], 270 | "source": [ 271 | "class Model(torch.nn.Module):\n", 272 | "\n", 273 | " def __init__(self, num_classes, input_size, hidden_size, num_layers, sequence_length):\n", 274 | " super(Model, self).__init__()\n", 275 | " self.num_classes = num_classes\n", 276 | " self.input_size = input_size\n", 277 | " self.hidden_size = hidden_size\n", 278 | " self.num_layers = num_layers\n", 279 | " self.sequence_length = sequence_length\n", 280 | "\n", 281 | " self.rnn = torch.nn.RNN(input_size=5, hidden_size=5, batch_first=True)\n", 282 | "\n", 283 | " def forward(self,x):\n", 284 | " h_0 = torch.zeros(self.num_layers, x.size(0), self.hidden_size)\n", 285 | " x = x.view(x.size(0), self.sequence_length, self.input_size)\n", 286 | " outputs, hidden = self.rnn(x,h_0)\n", 287 | " return outputs.view(-1, num_classes)\n", 288 | "\n", 289 | "\n", 290 | "model = Model(num_classes, input_size, hidden_size, num_layers ,sequence_length)\n", 291 | "print(model)" 292 | ] 293 | }, 294 | { 295 | "cell_type": "code", 296 | "execution_count": 98, 297 | "metadata": {}, 298 | "outputs": [], 299 | "source": [ 300 | "# Set loss and optimizer\n", 301 | "\n", 302 | "criterion = torch.nn.CrossEntropyLoss()\n", 303 | "optimus = torch.optim.SGD(model.parameters(), lr=0.1)" 304 | ] 305 | }, 306 | { 307 | "cell_type": "code", 308 | "execution_count": 101, 309 | "metadata": {}, 310 | "outputs": [ 311 | { 312 | "output_type": "stream", 313 | "name": "stdout", 314 | "text": "epoch: 1, loss: 1.689\nPredicted string: oeoooo\nepoch: 11, loss: 1.448\nPredicted string: llllll\nepoch: 21, loss: 1.314\nPredicted string: llllll\nepoch: 31, loss: 1.202\nPredicted string: illlll\nepoch: 41, loss: 1.115\nPredicted string: ilelll\nepoch: 51, loss: 1.047\nPredicted string: ilelll\nepoch: 61, loss: 0.993\nPredicted string: ihelll\nepoch: 71, loss: 0.948\nPredicted string: ihelll\nepoch: 81, loss: 0.909\nPredicted string: ihelll\nepoch: 91, loss: 0.874\nPredicted string: ihelll\nLearning finished!\n" 315 | } 316 | ], 317 | "source": [ 318 | "for epoch in range(100):\n", 319 | " outputs = model(inputs)\n", 320 | " optimus.zero_grad()\n", 321 | " loss = criterion(outputs, labels)\n", 322 | " loss.backward()\n", 323 | " optimus.step()\n", 324 | " _, idx = outputs.max(1)\n", 325 | " idx = idx.data.numpy()\n", 326 | " result_str = [idx2char[c] for c in idx.squeeze()]\n", 327 | " if epoch%10 == 0:\n", 328 | " print(\"epoch: %d, loss: %1.3f\" % (epoch + 1, loss.item()))\n", 329 | " print(\"Predicted string: \", ''.join(result_str))\n", 330 | "\n", 331 | "print(\"Learning finished!\")\n", 332 | " \n", 333 | " \n", 334 | "" 335 | ] 336 | }, 337 | { 338 | "cell_type": "markdown", 339 | "metadata": {}, 340 | "source": [ 341 | "### Experiment 3:\n", 342 | "\n", 343 | "* We will be doing the same experiment as above but rather than using one hot embedding we will be using `embedding layer`. " 344 | ] 345 | }, 346 | { 347 | "cell_type": "code", 348 | "execution_count": 133, 349 | "metadata": {}, 350 | "outputs": [], 351 | "source": [ 352 | "# Creating a project to convert hihell -> ihello\n", 353 | "\n", 354 | "# Data prepration\n", 355 | "\n", 356 | "\n", 357 | "x_data = [[0,1,0,2,3,3]] #hihell\n", 358 | "y_data = [1,0,2,3,3,4] #ihello\n", 359 | "\n", 360 | "\n", 361 | "inputs = torch.LongTensor(x_data)\n", 362 | "labels = torch.LongTensor(y_data)" 363 | ] 364 | }, 365 | { 366 | "cell_type": "code", 367 | "execution_count": 134, 368 | "metadata": {}, 369 | "outputs": [ 370 | { 371 | "output_type": "execute_result", 372 | "data": { 373 | "text/plain": "6" 374 | }, 375 | "metadata": {}, 376 | "execution_count": 134 377 | } 378 | ], 379 | "source": [ 380 | "labels.size(0)" 381 | ] 382 | }, 383 | { 384 | "cell_type": "code", 385 | "execution_count": 135, 386 | "metadata": {}, 387 | "outputs": [], 388 | "source": [ 389 | "# Parameters\n", 390 | "embedding_size = 10\n", 391 | "num_classes =5\n", 392 | "input_size =5 # One-hot size\n", 393 | "hidden_size = 5 # output from the cell\n", 394 | "batch_size=1 # one sentence\n", 395 | "sequence_length=6\n", 396 | "num_layers=1 # one layer run" 397 | ] 398 | }, 399 | { 400 | "cell_type": "code", 401 | "execution_count": 136, 402 | "metadata": {}, 403 | "outputs": [ 404 | { 405 | "output_type": "stream", 406 | "name": "stdout", 407 | "text": "Model(\n (embedding): Embedding(5, 10)\n (rnn): RNN(10, 5, batch_first=True)\n (fc): Linear(in_features=5, out_features=5, bias=True)\n)\n" 408 | } 409 | ], 410 | "source": [ 411 | "class Model(torch.nn.Module):\n", 412 | "\n", 413 | " def __init__(self):\n", 414 | " super(Model, self).__init__()\n", 415 | " # self.num_classes = num_classes\n", 416 | " # self.input_size = input_size\n", 417 | " # self.hidden_size = hidden_size\n", 418 | " # self.num_layers = num_layers\n", 419 | " # self.sequence_length = sequence_length\n", 420 | "\n", 421 | " \n", 422 | " self.embedding = torch.nn.Embedding(input_size, embedding_size)\n", 423 | " self.rnn = torch.nn.RNN(input_size=embedding_size, hidden_size=5, batch_first=True)\n", 424 | " self.fc = torch.nn.Linear(hidden_size, num_classes)\n", 425 | "\n", 426 | " def forward(self,x):\n", 427 | " h_0 = torch.zeros(num_layers, x.size(0),hidden_size)\n", 428 | " emb = self.embedding(x)\n", 429 | " emb = emb.view(batch_size, sequence_length, -1)\n", 430 | " outputs, hidden = self.rnn(emb,h_0)\n", 431 | " return self.fc(outputs.view(-1, num_classes))\n", 432 | "\n", 433 | "\n", 434 | "model = Model()\n", 435 | "print(model)" 436 | ] 437 | }, 438 | { 439 | "cell_type": "code", 440 | "execution_count": 137, 441 | "metadata": {}, 442 | "outputs": [], 443 | "source": [ 444 | "# Set loss and optimizer\n", 445 | "\n", 446 | "criterion = torch.nn.CrossEntropyLoss()\n", 447 | "optimus = torch.optim.Adam(model.parameters(), lr=0.1)" 448 | ] 449 | }, 450 | { 451 | "cell_type": "code", 452 | "execution_count": 138, 453 | "metadata": {}, 454 | "outputs": [ 455 | { 456 | "output_type": "stream", 457 | "name": "stdout", 458 | "text": "epoch: 1, loss: 1.808\nPredicted string: oooooo\nepoch: 6, loss: 0.765\nPredicted string: ehello\nepoch: 11, loss: 0.362\nPredicted string: ihello\nepoch: 16, loss: 0.166\nPredicted string: ihello\nepoch: 21, loss: 0.077\nPredicted string: ihello\nepoch: 26, loss: 0.040\nPredicted string: ihello\nepoch: 31, loss: 0.025\nPredicted string: ihello\nepoch: 36, loss: 0.017\nPredicted string: ihello\nepoch: 41, loss: 0.013\nPredicted string: ihello\nepoch: 46, loss: 0.010\nPredicted string: ihello\nepoch: 51, loss: 0.009\nPredicted string: ihello\nepoch: 56, loss: 0.008\nPredicted string: ihello\nepoch: 61, loss: 0.007\nPredicted string: ihello\nepoch: 66, loss: 0.006\nPredicted string: ihello\nepoch: 71, loss: 0.006\nPredicted string: ihello\nepoch: 76, loss: 0.005\nPredicted string: ihello\nepoch: 81, loss: 0.005\nPredicted string: ihello\nepoch: 86, loss: 0.005\nPredicted string: ihello\nepoch: 91, loss: 0.005\nPredicted string: ihello\nepoch: 96, loss: 0.004\nPredicted string: ihello\nLearning finished!\n" 459 | } 460 | ], 461 | "source": [ 462 | "for epoch in range(100):\n", 463 | " outputs = model(inputs)\n", 464 | " optimus.zero_grad()\n", 465 | " loss = criterion(outputs, labels)\n", 466 | " loss.backward()\n", 467 | " optimus.step()\n", 468 | " _, idx = outputs.max(1)\n", 469 | " idx = idx.data.numpy()\n", 470 | " result_str = [idx2char[c] for c in idx.squeeze()]\n", 471 | " if epoch%5 == 0:\n", 472 | " print(\"epoch: %d, loss: %1.3f\" % (epoch + 1, loss.item()))\n", 473 | " print(\"Predicted string: \", ''.join(result_str))\n", 474 | "\n", 475 | "print(\"Learning finished!\")" 476 | ] 477 | }, 478 | { 479 | "cell_type": "code", 480 | "execution_count": null, 481 | "metadata": {}, 482 | "outputs": [], 483 | "source": [] 484 | } 485 | ] 486 | } --------------------------------------------------------------------------------