├── .gitignore ├── LICENSE.txt ├── README.md ├── lesson1-pytorch.ipynb ├── lesson4-pytorch.ipynb ├── lesson5-pytorch.ipynb ├── lesson6-pytorch.ipynb └── lesson7-pytorch.ipynb /.gitignore: -------------------------------------------------------------------------------- 1 | data/* 2 | .idea/* 3 | .ipynb_checkpoints/* -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Practical Deep Learning for Coders (fast.ai courses), PyTorch Port 2 | 3 | This is a PyTorch port of Rachel's and Jeremy's 1st part Machine Learning massive open online course. For the original version written in Keras 1.2.2 please see [here](https://github.com/fastai/courses). 4 | 5 | ## Acknowledgments 6 | 7 | I would like to start by thanking Rachel and Jeremy for all the time they have put in this free online MOOC. This is by far the best practical Machine Learning course available today and it has proven to be an invaluable resource. I [wrote in my blog](https://rodrigo.red/blog/picmatix-pragmatic-machine-learning-company/) that open collaboration is an undisputed reason to the success of Machine Learning and I am convinced that the educational work Rachel and Jeremy are doing is a perfect example of this mindset. Thank you very much! 8 | 9 | The notebooks in this repo would not be possible without @ncullen93's amazing work in [torchsample](https://github.com/ncullen93/torchsample). A high-level package that includes Keras-like training methods and many other gems. 10 | 11 | Last but not least, I would like to thank all Keras and PyTorch maintainers and contributors for pushing the boundaries of what's possible. 12 | 13 | ## Motivation 14 | 15 | Keras is a very well designed Machine Learning library. It does not come as a surprise that Google is making it its default API. Nonetheless, in Keras, many important details are hidden away behind easy-to-use APIs and high-level abstractions. This is perfect for when you are looking for good results quickly, but from a learning perspective, I believe we can do better. Moreover, for certain types of architectures, Keras may not be the best fit and it would make sense to learn and use a different framework. 16 | 17 | PyTorch is the new kid on the block, promising easier to debug networks, flexible architectures, and state-of-the-art results. Who could say no to such a shiny new ML framework? 18 | 19 | Porting the course notebooks to PyTorch has meant a lot of hard work trying to figure out the small details (i.e. Why is my network not converging as fast as it should?) but the payoff has been an amazing learning & hands-on experience. I recommend you do the same and I hope you find value in this work. 20 | 21 | Finally, these notebooks can also be used as a direct comparison between Keras and PyTorch. (Yet Another PyTorch vs. Keras post). 22 | 23 | ## Notebooks & Contributions 24 | 25 | - Lesson 1 is significantly different from the original notebook as 1) I wasn't planning on making this work public when I started the course and 2) Pytorch's CNN/vision features are significantly different in that doing an exact port would be hard. 26 | - Lessons 2 and 3 haven't been ported to PyTorch. Feel free to submit a PR. 27 | - Lessons 4-6 are almost line-to-line equivalents of the original notebooks with a few loose ends here and there. (such as the missing LSTM network in Lesson 5 and no Theano implementation in Lesson 6). 28 | - Lesson 7 will be available in early June 2017. 29 | 30 | If you have a better way of doing XYZ, please let me know through a Github issue. 31 | 32 | ## Additional Requirements 33 | 34 | - Python 3.X 35 | - PyTorch v0.1.12 36 | - [Torchsample](https://github.com/ncullen93/torchsample) 37 | 38 | ## License 39 | 40 | Apache 2.0 -------------------------------------------------------------------------------- /lesson1-pytorch.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Using Convolutional Neural Networks" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "Welcome to the first week of the first deep learning certificate! We're going to use convolutional neural networks (CNNs) to allow our computer to see - something that is only possible thanks to deep learning." 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": {}, 20 | "source": [ 21 | "## Introduction to this week's task: 'Dogs vs Cats'" 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "We're going to try to create a model to enter the [Dogs vs Cats](https://www.kaggle.com/c/dogs-vs-cats) competition at Kaggle. There are 25,000 labelled dog and cat photos available for training, and 12,500 in the test set that we have to try to label for this competition. According to the Kaggle web-site, when this competition was launched (end of 2013): *\"**State of the art**: The current literature suggests machine classifiers can score above 80% accuracy on this task\"*. So if we can beat 80%, then we will be at the cutting edge as of 2013!" 29 | ] 30 | }, 31 | { 32 | "cell_type": "markdown", 33 | "metadata": {}, 34 | "source": [ 35 | "## Basic setup" 36 | ] 37 | }, 38 | { 39 | "cell_type": "markdown", 40 | "metadata": {}, 41 | "source": [ 42 | "There isn't too much to do to get started - just a few simple configuration steps.\n", 43 | "\n", 44 | "This imports all dependencies and shows plots in the web page itself - we always wants to use this when using jupyter notebook:" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 2, 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [ 53 | "import torch\n", 54 | "import torchvision.models as models\n", 55 | "import torchvision.transforms as transforms\n", 56 | "import torchvision.datasets as datasets\n", 57 | "from torchvision.utils import make_grid\n", 58 | "from PIL import Image\n", 59 | "import matplotlib.pyplot as plt\n", 60 | "import torch.nn as nn\n", 61 | "import torch.optim as optim\n", 62 | "import torch.utils.trainer as trainer\n", 63 | "import torch.utils.trainer.plugins\n", 64 | "from torch.autograd import Variable\n", 65 | "import numpy as np\n", 66 | "import os\n", 67 | "\n", 68 | "from torchsample.modules import ModuleTrainer\n", 69 | "from torchsample.metrics import CategoricalAccuracy\n", 70 | "\n", 71 | "%matplotlib inline" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": 3, 77 | "metadata": { 78 | "collapsed": true 79 | }, 80 | "outputs": [], 81 | "source": [ 82 | "def show(img):\n", 83 | " npimg = img.numpy()\n", 84 | " plt.imshow(np.transpose(npimg, (1,2,0)), interpolation='nearest')" 85 | ] 86 | }, 87 | { 88 | "cell_type": "markdown", 89 | "metadata": {}, 90 | "source": [ 91 | "Define path to data: (It's a good idea to put it in a subdirectory of your notebooks folder, and then exclude that directory from git control by adding it to .gitignore.). Additionaly set use_cuda = True to use a GPU for training and prediction." 92 | ] 93 | }, 94 | { 95 | "cell_type": "code", 96 | "execution_count": 11, 97 | "metadata": {}, 98 | "outputs": [ 99 | { 100 | "name": "stdout", 101 | "output_type": "stream", 102 | "text": [ 103 | "Using CUDA: False\n" 104 | ] 105 | } 106 | ], 107 | "source": [ 108 | "data_path = \"data/dogscats/\"\n", 109 | "# data_path = \"data/dogscats/sample/\"\n", 110 | "use_cuda = True\n", 111 | "batch_size = 64\n", 112 | "print('Using CUDA:', use_cuda)" 113 | ] 114 | }, 115 | { 116 | "cell_type": "markdown", 117 | "metadata": {}, 118 | "source": [ 119 | "# Use a pretrained VGG model with PyTorch's **Vgg16** class" 120 | ] 121 | }, 122 | { 123 | "cell_type": "markdown", 124 | "metadata": {}, 125 | "source": [ 126 | "Our first step is simply to use a model that has been fully created for us, which can recognise a wide variety (1,000 categories) of images. We will use 'VGG', which won the 2014 Imagenet competition, and is a very simple model to create and understand. The VGG Imagenet team created both a larger, slower, slightly more accurate model (*VGG 19*) and a smaller, faster model (*VGG 16*). We will be using VGG 16 since the much slower performance of VGG19 is generally not worth the very minor improvement in accuracy.\n", 127 | "\n", 128 | "PyTorch includes a class, *Vgg16*, which makes using the VGG 16 model very straightforward. " 129 | ] 130 | }, 131 | { 132 | "cell_type": "markdown", 133 | "metadata": {}, 134 | "source": [ 135 | "## The punchline: state of the art custom model in 7 lines of code\n", 136 | "\n", 137 | "Here's everything you need to do to get >97% accuracy on the Dogs vs Cats dataset - we won't analyze how it works behind the scenes yet, since at this stage we're just going to focus on the minimum necessary to actually do useful work." 138 | ] 139 | }, 140 | { 141 | "cell_type": "code", 142 | "execution_count": 5, 143 | "metadata": { 144 | "collapsed": true 145 | }, 146 | "outputs": [], 147 | "source": [ 148 | "# TODO refactor the code below and put it in utils.py to simplify allow creating a custom model in 7 lines of code" 149 | ] 150 | }, 151 | { 152 | "cell_type": "markdown", 153 | "metadata": {}, 154 | "source": [ 155 | "## Use Vgg16 for basic image recognition\n", 156 | "\n", 157 | "Let's start off by using the *Vgg16* class to recognise the main imagenet category for each image.\n", 158 | "\n", 159 | "We won't be able to enter the Cats vs Dogs competition with an Imagenet model alone, since 'cat' and 'dog' are not categories in Imagenet - instead each individual breed is a separate category. However, we can use it to see how well it can recognise the images, which is a good first step.\n", 160 | "\n", 161 | "First create a DataLoader which will read the images from disk, resize them, convert them into tensors and normalize them the same way the Vgg16 network was trained (using ImageNet's RGB mean and std)." 162 | ] 163 | }, 164 | { 165 | "cell_type": "code", 166 | "execution_count": 12, 167 | "metadata": {}, 168 | "outputs": [ 169 | { 170 | "name": "stdout", 171 | "output_type": "stream", 172 | "text": [ 173 | "Images in test folder: 6\n" 174 | ] 175 | } 176 | ], 177 | "source": [ 178 | "# Data loading code\n", 179 | "traindir = os.path.join(data_path, 'train')\n", 180 | "valdir = os.path.join(data_path, 'valid') \n", 181 | "# cd data/dogscats && mkdir -p test && mv test1 test/\n", 182 | "testdir = os.path.join(data_path, 'test')\n", 183 | "\n", 184 | "# pytorch way of implementing fastai's get_batches, (utils.py)\n", 185 | "def get_data_loader(dirname, shuffle=True, batch_size = 64):\n", 186 | " # pytorch's VGG requires images to be 224x224 and normalized using https://github.com/pytorch/vision#models\n", 187 | " normalize = transforms.Compose([\n", 188 | " transforms.Lambda(lambda img: img.resize((224, 224), Image.BILINEAR)),\n", 189 | " transforms.ToTensor(),\n", 190 | " transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),\n", 191 | " ])\n", 192 | " \n", 193 | " image_folder = datasets.ImageFolder(dirname, normalize)\n", 194 | " return torch.utils.data.DataLoader(image_folder, batch_size=batch_size, \n", 195 | " shuffle=shuffle, pin_memory=use_cuda), image_folder\n", 196 | "\n", 197 | "train_loader, folder = get_data_loader(traindir, batch_size=batch_size)\n", 198 | "val_loader, folder = get_data_loader(valdir, shuffle=False, batch_size=batch_size)\n", 199 | "test_loader, testfolder = get_data_loader(testdir, shuffle=False, batch_size=batch_size)\n", 200 | "\n", 201 | "print('Images in test folder:', len(testfolder.imgs))" 202 | ] 203 | }, 204 | { 205 | "cell_type": "markdown", 206 | "metadata": {}, 207 | "source": [ 208 | "Then, create a Vgg16 object:" 209 | ] 210 | }, 211 | { 212 | "cell_type": "code", 213 | "execution_count": 13, 214 | "metadata": {}, 215 | "outputs": [], 216 | "source": [ 217 | "# Load the model\n", 218 | "model = models.vgg16(pretrained=True)" 219 | ] 220 | }, 221 | { 222 | "cell_type": "markdown", 223 | "metadata": {}, 224 | "source": [ 225 | "Then *finetune* the model such that it will be trained based on the data in the batches provided - in this case, to predict either 'dog' or 'cat'." 226 | ] 227 | }, 228 | { 229 | "cell_type": "code", 230 | "execution_count": 22, 231 | "metadata": {}, 232 | "outputs": [ 233 | { 234 | "name": "stdout", 235 | "output_type": "stream", 236 | "text": [ 237 | "Using 2 classes: ['cats', 'dogs']\n" 238 | ] 239 | } 240 | ], 241 | "source": [ 242 | "# Finetune by replacing the last fully connected layer and freezing all network parameters\n", 243 | "for param in model.parameters():\n", 244 | " param.requires_grad = False\n", 245 | "\n", 246 | "# Replace the last fully-connected layer matching the new class count\n", 247 | "classes = train_loader.dataset.classes\n", 248 | "num_classes = len(classes)\n", 249 | "print('Using {:d} classes: {}'.format(num_classes, classes))\n", 250 | "model.classifier = nn.Sequential(\n", 251 | " nn.Linear(512 * 7 * 7, 4096),\n", 252 | " nn.ReLU(True),\n", 253 | " nn.Dropout(),\n", 254 | " nn.Linear(4096, 4096),\n", 255 | " nn.ReLU(True),\n", 256 | " nn.Dropout(),\n", 257 | " nn.Linear(4096, num_classes),\n", 258 | " )\n", 259 | "\n", 260 | "# Monkey patch the parameters() to return trainable weights only\n", 261 | "import types\n", 262 | "\n", 263 | "def parameters(self):\n", 264 | " p = filter(lambda p: p.requires_grad, nn.Module.parameters(self))\n", 265 | " return p\n", 266 | "\n", 267 | "model.parameters = types.MethodType(parameters, model)" 268 | ] 269 | }, 270 | { 271 | "cell_type": "code", 272 | "execution_count": 23, 273 | "metadata": {}, 274 | "outputs": [], 275 | "source": [ 276 | "# define loss function (criterion) and optimizer\n", 277 | "criterion = nn.CrossEntropyLoss()\n", 278 | "# enable cuda if available\n", 279 | "if(use_cuda):\n", 280 | " model.cuda()\n", 281 | " criterion.cuda()" 282 | ] 283 | }, 284 | { 285 | "cell_type": "code", 286 | "execution_count": 24, 287 | "metadata": { 288 | "collapsed": true 289 | }, 290 | "outputs": [], 291 | "source": [ 292 | "def getTrainer(lr):\n", 293 | " trainer = ModuleTrainer(model)\n", 294 | " trainer.set_optimizer(optim.Adam, lr=1e-3)\n", 295 | " trainer.set_loss(criterion)\n", 296 | " trainer.set_metrics([CategoricalAccuracy()])\n", 297 | " \n", 298 | " return trainer" 299 | ] 300 | }, 301 | { 302 | "cell_type": "markdown", 303 | "metadata": {}, 304 | "source": [ 305 | "Finally, we fit() the parameters of the model using the training data, reporting the accuracy on the validation set after every epoch. (An epoch is one full pass through the training data.)" 306 | ] 307 | }, 308 | { 309 | "cell_type": "code", 310 | "execution_count": 25, 311 | "metadata": {}, 312 | "outputs": [ 313 | { 314 | "name": "stderr", 315 | "output_type": "stream", 316 | "text": [ 317 | "Epoch 1/2: 4 batches [08:03, 114.11s/ batches, val_acc=50.31, val_loss=15.9782, loss=15.0138, acc=50.62]\n", 318 | "Epoch 2/2: 4 batches [10:48, 161.16s/ batches, val_acc=61.56, val_loss=1.3137, loss=8.2031, acc=64.38]\n" 319 | ] 320 | } 321 | ], 322 | "source": [ 323 | "trainer = getTrainer()\n", 324 | "trainer.fit_loader(train_loader, val_loader=train_loader, nb_epoch=3)\n", 325 | "# This gets a validation accuracy of 98.9 when using the whole dataset" 326 | ] 327 | }, 328 | { 329 | "cell_type": "markdown", 330 | "metadata": {}, 331 | "source": [ 332 | "That shows all of the steps involved in using the Vgg16 class to create an image recognition model using whatever labels you are interested in. For instance, this process could classify paintings by style, or leaves by type of disease, or satellite photos by type of crop, and so forth.\n", 333 | "Next up, we'll dig one level deeper to see what's going on in the Vgg16 class." 334 | ] 335 | }, 336 | { 337 | "cell_type": "markdown", 338 | "metadata": {}, 339 | "source": [ 340 | "## Visually validate the classifier" 341 | ] 342 | }, 343 | { 344 | "cell_type": "code", 345 | "execution_count": 19, 346 | "metadata": {}, 347 | "outputs": [], 348 | "source": [ 349 | "# Define some helper functions\n", 350 | "\n", 351 | "def denorm(tensor):\n", 352 | " # Undo the image normalization + clamp between 0 and 1 to avoid image artifacts\n", 353 | " for t, m, s in zip(tensor, [0.485, 0.456, 0.406], [0.229, 0.224, 0.225]): \n", 354 | " t.mul_(s).add_(m).clamp_(0, 1)\n", 355 | " return tensor\n", 356 | "\n", 357 | "def get_images_to_plot(images_tensor):\n", 358 | " denormalize = transforms.Compose([\n", 359 | " transforms.Lambda(denorm)\n", 360 | " ])\n", 361 | " return denormalize(images_tensor)\n", 362 | "\n", 363 | "def get_classes_strings(classes, labels_ids):\n", 364 | " # returns the classes in string format\n", 365 | " return [classes[label_id] for label_id in labels_ids]\n", 366 | "\n", 367 | "def get_prediction_classes_ids(predictions):\n", 368 | " # returns the predictions in id format\n", 369 | " predictions_ids = predictions.cpu().data.numpy().argmax(1)\n", 370 | " return predictions_ids\n", 371 | "\n", 372 | "def get_prediction_classes_strings(classes, predictions):\n", 373 | " # returns the predictions in string format\n", 374 | " return get_classes_strings(classes, get_prediction_classes_ids(predictions))" 375 | ] 376 | }, 377 | { 378 | "cell_type": "code", 379 | "execution_count": 30, 380 | "metadata": {}, 381 | "outputs": [ 382 | { 383 | "name": "stdout", 384 | "output_type": "stream", 385 | "text": [ 386 | "['cats', 'dogs', 'cats', 'dogs']\n" 387 | ] 388 | }, 389 | { 390 | "data": { 391 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAB2CAYAAADGFVhfAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvXt8XFW58P9d+75nZk8yk2TSTNqkJWkbaCut0kKrLZdK\nAUHpEVBRQEHU40FUkJ/HG3LwvF5+6hEVPHgQeZUDelRQREEBQQStSMGitCWVBknbpO2kmVxmJntm\n9t5rvX9MkqbV88rx0JxXPvP9fPZnZta+zjNrPetZz3rWM0IpRZ06derUeemi/U8/QJ06derUObLU\nFX2dOnXqvMSpK/o6derUeYlTV/R16tSp8xKnrujr1KlT5yVOXdHXqVOnzkucI6LohRCnCyF2CCF2\nCiE+dCTuUadOnTp1XhjixY6jF0LowB+AU4E9wGbgfKXU9hf1RnXq1KlT5wVxJCz6VcBOpdRzSqkq\n8B/A2UfgPnXq1KlT5wVgHIFrtgO7Z3zeAxx/+EFCiHcC75z8+Ioj8Bx16tSp81LngFKq5S8ddCQU\n/QtCKXUTcBOAEKKeh6FOnTp1/uv0v5CDjoTrZgCYN+Pz3MmyOnXq1KnzP8CRUPSbgYVCiAVCCAt4\nE3D3EbhPnTp16tR5AbzorhulVCiEeA9wH6ADtyiltr3Y96lTp06dOi+MFz288q96iLqPvk6dOnX+\nGp5USh33lw6qr4ytU6dOnZc4dUVfp06dOi9x6oq+Tp06dV7i1BV9nTp16rzEqSv6OnXq1HmJU1f0\nderUqfMSp67o69SpU+clzv9Yrpv/LoZ7WB6fmesBhIY87HgpJYauE8nD90ydrhAKlJi8hBCHHiB0\nXM1myZLF/H77U2i6RSRqPeXUMgANHV3XiaIAIQSaFIdcRwkB2qFlQtXurQSYpkmlUql9VgopJeXx\n2cke8fymG3BjMSrlMgDVahXXa0RKiWlZCCFQSh2UszKoBlXitkkQBJNfRCKlRDcMZBQRidp3MzUD\nGdXKw6D2/QKloesK0xSYmkmpWOTZPUX+/9s2449NYNgW+w+MkohZKL9KKuXhJpoYLg7TYMdpTyQ5\n/U2v4dneHex4rJfWtIlUitGKTtEvUo4kVSHRhIatC0wRseiYo/mnT1wzK/J83yevR4iDv7WOmJbh\nVNnM16l9Sik0TUMIiZqsjH9uqYsiAtTk/tqrULV6rtCZuT5GTtb5AFm7Vyin7wUgVVA7b7pMgNKQ\nUhJFUe2YMERKOb0BBEHAnTf/y4sotT/PtxZ9ka7uLsxMEi+doqW1lWw2i5duJ5VO46Y9UpkM6fZW\nXNfBCSHjtYPnYbplWvAJ/QEe3bKTtSvWk7vvDsIf38Kd1yT4+sbFpNvbcYICOx68A9f08UiTXbMM\nw1vMSHIlG849i0/2ZnGvKzHSuZN86BH2z+e88cdYftFNvPEf38cT99/HtXO38NEn09xz6SmcfVaO\nG1/fyJVn7eTyz16KHd/O2We9j91vuJmb1uzm5NO6WZx7kPRFc/jU5xtpcbv54D+894jJ8G/Wotc0\n7ZANIWobwOSLYRoHN+O/36dlGmKsXrmEpoZmZCQPqfiHNwIApWtEgunt8P1TCCHQNA3XdTFNE8uy\nMAwD0zT/28/8QrFtGyUl1qRSj8fjGKZJFEUoKQmqVYBphYAW4rgGkawgtAhNq8nfcV1kFCGEwLSs\nyatLNE0iowrxRAKAZEMSy7IIgpAwCABIJeNU/SImUBorYGs6KJBRRGtTC7ISYgQhlVKBkXCcp369\nldtv+zHdxx5DebxCbkKxf3iIspRUiXB0E0SVhYsWctrpGzG0xKzJs6asxSHK/vD9U4p2qlOfKgdg\nSvHDn71OqVQ6qMyVDkon0iA6rEVPdRwAhgQ9OrRzgEONGiEEzOgEpp7z8M4JQNf1/6pY/iq8pIfr\nujiui5dM4rouqXSaVDo9vS/punhJD8d1SWfbccMCbjLEcw2G3B5y88+iY8UqUqsypNd08tQtXXz9\nfPj0zd9g7WkbWX3WuVx69T+xeN2ZZNZtwM8sJ2zNkO3s594tP+Xs9s14X2kmFZ5GOb2KcP/zGME4\nnZ2dFAoFQlzedPWNfPKNGTZcsobj+B6DrOCTD6YpJDfx7jX9rOy1+extX+Lks04jdD3uDdvZsXMz\nKdekxxs/ojL8m7XopyrcVCWcalgAUin0ycqtpJo+XgGmZSIjCUodYt1PVfCZ1z6kgWoap5+8msLQ\nEGefsYav3XY34aRFrwkQmgAmrd7J8YTSDzZSTQgMoaEEhzRqKWtWVhRGlEql6c8zv89soNkOZd/H\ndhxM00JJiYwiHMdBaBqarmPoDsoIUVRREpTU0UwHNWX5aTpVqbATCapBFSoSM+ZBtQyaThSGjI2O\nY7kWlXIJ3TAwDQcVKmzHI+OYjA77tKSSVIIKhhUjqIY4DXF27x3AiMcoy4BWx6bdMtn6u8184hMf\n5Ntfv4F5yQ4KxSK2ZpJsn0dheDdRFOLYMboWd3PPj+6CajRr8tQFaEJjdGg/cdeAZCtCRQh1sG6Z\npnmw4xQKiUBoBl2tCcIgoK/vj5iNbaCig1b7ZJ1IxJPApMIWYe03lDpCaQhN1DroqY5ESgRMj3Jn\nKm6o7dCFRqhq8hGahlC19hMEQc1u0jS0yQ5pqt5Od0qzgGGauK4DQCqVosVsxUt6eJ4HrkkqncZI\nG7iui2eakAqAPHm68W86gx1XtJB+94cIvz/IcVeU+Nz6C1jeeRmZB1bxlqtOh8J+ntp0F1nTpCuT\nYvGTH2FdmOeh607mFPcxNo+nyT2ylKtvWU/2kru56vVtvPyqr5J6sp1y2eeEnf388MHvsfR995Ld\nfCmfuf0Wjok+SG9hPz/bcyxfP+MR7qy+k7ubnuIbP3oPjlugc1kX2d5e3MyVfKTtuiMrvyN69SOK\nhqbXrA0lFQfrrUQTgpopqA4d9yqBrIZEmolhGuhBzcWipCSSEiWigw1AaYAAUWsejgWukUCqCoXh\nUYRmYqmIEIlUQKSICGvuGq3W2ISUNcsMidI0QiFATVp7k01P6AZRFE2PTKY7q//ExXSk0HQdL5mc\nHr7ruk44OcTXdX2yIxToukk1qBBUBKYTAAaaphGFIcak4vry7Q/wxOO9XP+5D+AFB4i0BELTiSdM\nRvDR9AhN1wiDAN3QqAhFPrD45WNP0ZjSGNi3F9N2CYIQ2zQoT0wQ6HFc3eeo+Vn27h2C5rksnGPw\nv79xOzGrhcCKGA8Er157AlUpmHf8cdx//12cdcZZ3Hv33Si/gqvPnmLSNQcELFpyLOOjB7BlCV+a\nSN0+xJqespiVphGLSszxEoyNV1BVnQXzF7J7dPyg0TJjBDB1npQSoR10Cc10/0zVoamymaOCma6b\nqfOmFLecNI6mlLkSgigMax3+jO84W3U0nUmTzqTwCGnEJxOamJ5DOuMCIV3d7RTCPC4hI5ikXQPD\nW4ZrerzmZsnV+1/Htz70TtauyHBO8G2+/tVPsfL6DF88/XxWn5ehEBboas+w+tp+Ntz0FS6/ZCNh\n+79z9lVDXOGdxM+/+DWy/Svpvewq3mE8wmW3vpPXjVXZvG0YzCROIsfQUz47duY54asB/98Vt3D9\nh0fgiwOcvf5d/NtzG3n6m2/hwz3NfPGkj3HTDwwKpo8xPsjPzk6y4dIHuOja9/BB/uGIyfBvVtF3\nzu+cfi+lpFQqUSwWqVbK/+lQEwRC1xEqgiACJofPQqBrGpKZDVCvdRKaBkpxysrVmF4cZ0KAsmlO\nxRkayaOhI2f6Q5WCKIIINCGIpETXBbqhTzc0KSW6mFLsIbUR8MGGKUTNInsx3E0vlEgqLCtGGFRA\nhARhGUNPIERExS+iCQvdgVAqhGZhWAGGbk72owo7ZmJFDjf/8D56e8dItCzm4g98gyiKMC2LKKp1\nGlITmGEFS3d51fEtXHD+KVzwD1/FjiUIQoEWCdrntjE4OAZGhQk/RNN0SqUCqcYMhojhF0OyR3Uw\nv2M+27ZvJ3R19oxWsVE88dvHCEVAy5xzWHfyGTz02CZGikWa7AS2OYvVXQl6tz6Kd+wriXsemuZg\nFYZ5ru9JYm4DMa8VZTkIUUUFgsrwHlo7u/DLPsJ00awAFepok/UT/nSkeVBBG4d00NMjRn1S+Uvj\n4MgBDukkAKTS0XQDpj+Hf/J1xAxX02yPNl1/AMd3CfxxigWTRV0GXrI2oslkMgSFPvIDA+TcNEvW\nnwtA1nya77z+bG7MfwvaV3DBCedReHwTX73qfZzy4Rvx0x7poIvOTJpcIYdhGLjtnVxw7Sdxw3V4\nbj/svxPjxMtYft71PLXpJhh4hE+ve4xXnHElD328hUVXfJDsHo9CXz/+I330zfG4/a57yJ26iIGd\nnRjpXm4dW89DvaO8p/k7eNl2/uB+ldd0X4HJTrYf93mqsRMY/saprObpIyrDv1kf/fz581mwYAEd\nHR10dHRwdM8ijlown/b2LAnPQzNMdM1A6AboOkqruU2UoOZr0QRCU5MWewTaZCUWOpqoKYTaENbE\nUDbz53ZiTUQYiTSOa/KqFcuISBBx6HzBTORkhyMlBNWIsBoRBpIoUoRSEUSSalghlAFSVImoIIkI\nZTDZv8yeq0FJOe2eMTQXlIlUBQzdI5bowE02YRi1uQ4hBLblEYU6USQxDBtZ8fFjDfzooQEGR4oM\nj+QQsopragSVMoXxmg8yqFQoIZnQK9z7q12864Pfp6VlLs0NzaRMHd1Kkk0YnPCyRl62YC7xuE2q\ncQ6Z1lZ838dLefR0dfL0T+/n3h/eTaYxja00oiiiagqqmomsCoIgYPu27RQGc6QTDeiGwWBu36zJ\nU4iA8bEhfvbA95nT0ELC9EiYHk1uitj+nbQzROOBZ6n+7nEaiztJhQcYyw2g6xCLO7R4jTiuCzNG\nIX8uAeHUaGumxf6fTfbOvIYQAl2vBQ9MKf4pBW4YxiF1+vBrzqaSBzDMkPJ9mzg2b8Kapbj9veTG\nx3EJSbkmoeHghWmOW3cKGTek1zC4NiP5+LbjWb3C58Cj/XR0Zbjmqzez4XM3s7W3n6zRTiqzir5c\nSIo0YSEJpFnSuYyjOnvIjefZfNflMJ6j48Q1LDnvYyxfczEruzfy+atu45zkg+Tu28zK1d3gelz9\nlfdy+fnv5Je7/gM3gPL8bt59e5x9xafZ9subcHpCOpIZjl1xCo/i81jHG5j3ipMZuOsmls73CVvb\nj6gM/2YVved5NDY20tzcTHNzM+lkgrlzWpjX0cExRy9m+cuW0t7aQsK1MKasaa22abo2/aobOtrk\nfk3TaoEMM6JLQi1C0yGOSRQ3EZUqmu7Q2pyiLRXDFLVzZk60Tin8P6f4oygiDEKqlSqVcoWgAkFF\nUa1EhIEiCIL/dNL2SOLYLkHFBy1CEmFaDogY1bBMGBWJwqCm2EMNQzfA1EBTaLqJTkjRamfjBR/D\n1CXtLSmqpRJuzKIcTCApYjmKiWIJoipxwyIojhNWBL9/5hkGhsd5fMvvsRoTHN3m0dzagWm3YZom\nqaRHW9rkmGySOQ0xBIJKsYKyPU45eQPjhQkQ4KIhZMj4eAHsGD/72QPMSXdhN3SQaV/J6EScmNcw\na/JUmiCRnMtQbh/Fwn5Cv8ie/X8kacGc+QsYGx0l2eDQvWgeuq9IZzppbIoTlUNksUC1OsHwnh1o\nSjtYNznUfXP4hKqo7Zg+XkkNlH6Icj9k/mdGPZ/ZWRzu0pk6R9O0WvOYvOds+ejzN90CnSFPhgEt\nYTuDrZ1ku1exZcsWfN+HXJnOM1dilnPsePoXfPoPD/OVhR8i2biCD/4wRn7VKnb15TG6zuTeX4DZ\n2UXBhTDtcdzp55LuXM7iFafQ0bOSEd8lJE8qbUBhMzs2P4hpGGQyrQTJdlLdyzG61/PEjl18fH2a\nBS0pHv3SBxi85Xu85vQz+d3SN0OY421XbeOkY1/Fj97l8sVPf4ltm1xG9oc819vLynCAyy/4At/9\n3nZ60kGtQz/C/M26blzHRUqJbdtIFaFEiGNbxKSgUp5AhSFJx6VU8cnlRxgeHqZcrSKkQCIPGboK\nIRC6mJwUVWiImoUvIaYpjlu0CC1uEZUqVKMqumNDWGDlscfw4C8fI9IUUVibF1CahooODrenmkVt\n3kAc4uap7RYEoYRAIoRC0yVCaGiaQIR/OoQ+UkSyilJQ9QPceBwZRthODBlKhKbXLH5hoeuCKKoC\nEttxgSoHgkZe++ZrSHoNlAgZ272PeS0NjBYLxG0dx22iOFbAanDJj5UolkZoTrkMDEnmZlp51bFH\nkc8mmZudx84dz9BgNFEuC9xEC+2ihAwjqlYjSlTYsXUbw7lRNEujc8fTeHEdQzMYjkLa5y5gz/N/\nIKhWCWTIQ5t+SmvLUezL7aIhHSeuqrMmT4SgrX0uA89tRxV245ehM9NAFHl4yTgTO3ZimRaWFyfd\nvpgwZlEeH8alwsj4HtxCxODjD+O98g1gmkQyOsSSFpM++EMU/oy69efcl4f78Wcq9anjZvr3pwID\npu4zcyJ25nWPNI9dsZaRVzhgPsh3n7uPvvCNeLdn+ETxVtL+9ZDMUhjP47e73HnTP7H8F8tZveIh\nTl11Fdt6M/j5fv7j1l9z5fcHOeeiD/HU03fhZVIsWbOSkd4cBT8k1Z6GMEkqmWZki4HntJPN5ujr\nvYTe/jvJdnfzRHs3/c9s4oIVDzJn4xnILedzwZkdfPkta+nMrODcH3+V//jgv3LPha/GPe99GJ9/\nljdevRR+/kM6Op8l7e5k0bpVrO65n+UPt9PcP8SSZXdQBtwj7Kb9m7XoLdPCMk2EkMgoRNMdlDIw\nTYt43MV1HWzHoSERp2NOhs62NpoTCaJJq2bmsHWmlaOZGroGulDYGkSapOuodsbzQ1gtGWKNjYRS\nIqsRSijiidjk+TWfqGXHMGwb3bQmy/Wav19oM5T/QSQhQkiEBgiJlBBFkkoloFyePcWkmzaWkyTu\nNRIFkjCUCMOZXlegGQYIHwjQNBOpW2DFQDd588X/SBT6KKr44xM4psZ4sUihELJ/f4Hx0QKu7TA6\nUkBFkE54RFWXSJbp7kzw3K69PLd3nIcf/w1/2Luf/l3PoRswXhihd3cfv9r+e36z+VdsfXYnFWUg\nYg5VJI89+Rva53ZS9gVuKk15dBevXn8ShBGmFmdOZhHliTy5oT+CprF3rDhr8hRCEPOSpFqa2f3s\ndlwroFAooKsAGQhSyRQTSlL1x0kumEc4VmSiXMYfHydhKfLlMZaddBJCVBGoycn7g8pYoUCrRZKJ\nyY3JyLL/GzM7hinrfLpDmLrOYZO3h7trpt7N1qjTCAt46QA/8MEfwP/FfQTf+ih/f/V2Fq35Aidf\n+EuO/azk0ge+yKnf+TCf+uhX+LsrV3Pzt/sJvK246U7u3ZTnqre+hf6nf0xhYAuuX2YkB8t7VrJ0\n1UrSnZ24mU5MIwVGGjcNdz7yPfp6e3G9ENNwybopyuGXyOe38urVPYTJdn7ynTs5c+OFfP3627jt\nM1+i68FL6R95iLec917SnR4jO8v4hR4604AJQ3Tysvz13BdezYV//znKRgYHA+MI23R/sxa9YUEU\nKgzTxnFd/LECtm1jxmOgQkQIQbxKEJYplUrEYwm8mI0a3M9YsXTI8BQORhiEUmIIgeNalAslTOnS\nmm6gkPdR1RClCyzXpTIiaWpo5KRXvpKf/PxhIg765B3HqTUUKaYXnURRSBhWJi37GvIwC2wm01bY\nEZbjFBW/jOMYhFGEaZlokYYKJ1BSoGwNpUDqNqAjpUBVyigZ8fMnd7C/aBLXI8IwoFoNSDYlGcyN\nUiyXMU2DKJA8XxpGkzrZlkYqpTEKUnD8wjZkpPPH3fuoygDDitExv4e9AzsZHBrAcVwSbpIGz2Pv\ngVHS6SbKgSIMq0RRxP4D4/xxZBdvvvTN3HLzDyhpime2DhC3OoiMcQb2/h7HipPNzGXv/h0ckzzy\nQ+QppJSYpomXTJJIJLCr46TnHY00bYzIpG1OB7oTUSlVSWWyCK0BdWAfweCz2KpMqRJhBRXKw3tw\nm7sQ6KAftMong8EQiOkQ4qkY+v/sz4QOUfKHvc58P2XFT32PQ6J2mPJuqlmz6LNGgZHeTWTTIX2b\n7qIwMEC2vZdy9zpcP0em0MuG+Wt45fHX4W+9gMsvvQz/wh189ycVeH6MN95wIRt++SlOWXcmybAA\nIwNg+rhGD6HrEhRC8j74QT+u2c5I4dcsWNjMolf8I9rYoxj9j5NZcyZGrpXC0zvJ9Q6y/YlXcsxr\n76HrHedz3HkbuHzDCH1vbuPeU9/LYzfeyob3zSdbMOnLZRks+wRzAh6OX0fmkfX89sMLKHgZ2lvb\nGSgUSKVdCsaRXTPzFy16IcQtQoicEGLrjLK0EOIBIcSzk6+pyXIhhPiyEGKnEOL3QoiXH6kHtzTQ\nVYQmIwgDHM/FdSxMBJZuI2wTO2ajGyZ2IonjxmhsamLFsmOY25JGmhq6ZaEJB0FtAlZGYGo6kVCU\nqhHKjfHy416GoarEXItqVMVyHLRygGHrNDoGDS2NvOa0U1m2ZBmuHaNcruCXClT8EkFQQQiF5To4\n8ZqP2I65WI6FZhqgGSihUDPWe/3fFtkcSQzDRSnQDUEYVgBJUC2jmxU0BVEIUmgYloll6eiaRjGS\nfO76OyEMsA2dRDwGwsAPAop+QGmiChIqSAw0BBGjo2N4yUa6sq2MjU/Q3NJCoKpEQtEyJ82u/j6K\nfpFksomWVAwnEcN0Y7S3tdOYaEWoJpINXcRjMWzbpu/Z3Tx+3/c46dgy7bE5WJUDpLwxXF3R4MZ5\nRSMsECO02wZ+NHuT20LWwnZNQ0NEAaV9wySU4qH776DzmCxu61xC6aA5Hg8/8Ci/+813+NW9d2KI\nCcrDeczKGGEQ0mSV4alHqFb2o6jVeWDSmldI1PRCqUMW6x2mwKWshR1rk8p65na4T34q3BemAg3E\n1JDhTyJ3ZoORfD+On8PM5cgEBq6/mZUrlvONL1zCh686j1ed1kmzkeJn191I7vEtnPy6FH13f5En\nApeP/GiId3/qanxS3POld3DcmqWsXd9N386nGdx6Dzm3EzIpOs1OKBv4+7dynBtw+5fu4I2vu5zP\nfv5/sWvLU/Rt/TVLV5xCR7iCbQ/ewkMPlrn4nz/F2z50E88/dCvXX3khv5yj8Y3LH+bvcyGFTU/j\nmyvx+vN89iPXsPL827nry18hKPSy1PVIuzkaXZcWL43neXhJ74jK8IVY9N8AbgBunVH2IeBBpdRn\nhBAfmvz8j8AZwMLJ7XjgxsnXFx3NctCFjkEtwsJxY0hVxbRtIiUxTZvqRIlYPI4VBYRC4GgGQRiy\nuGs+QW9Aya9QQmEKizAMJ5VrzRJTEXixBKtPOJ7q6H4qjsu8jjaCqiQs+5RHQmJxEy+dIWWP0uzq\nZJsaeXjTb5gI5HSIZBiGyEoVTWgIYRB3k8gowrIlkQwJwwgpo5orSKnp5eizuRgFQCdChQFSc5CR\nwLQMNKMBqSrokyF8pq4TBZO+W0+y8ezPU6goYpZCNwXDuTEmKhWceS1IhjEMg2qkKBTKNKeSWIbA\nsmG8DIP5CUoTRfYVd+BHAa1tc3i+/3nCKKTVcylUJMcuaCZQIel4M5VQ0r83T7kaoUIbSYiMNMYm\nqvzk8Z10zWtlaeo5do1lsAOfuOmSTMQYIKCQH2XunDYSsjJ7AhWK4vgo1cI+qmYSq3MhhfwAf7d2\nPcPP7+GZx37Fkp6jGQsCGieG6TzmVcxr7Ec3dcKmhYiJEUSxiBPqBIkhou2/wtcsrGNPJxYJfDNE\nqEmDYFL5a2LSfz69FkQdYo1PrViGQ632aWU/Y18UhpM9gSSK5MHcIPz50cCRZPF8j7VdaXbltjD4\nzFYM0yCXy2G2dmGaaTwjxZJlPQS39vLUcyGPFtaymAxzlp1KrmcNux7fxFdv+ShvyQzQedZluK7L\n8jXrWJxxcSlg+GVy/fdgGnkCDO7/bS833H4/X47bnLdiI089cgm9jz/I2Zesg56TIbmRdOpzXHLa\nMVyxJ8arP9FLc3Mn/Z8c42fuDfzc+wWje15G25w+Fi/N8Ohd32b1JSeiLfsMjzzwU97W1QsM8Bvt\nu5jXvxUIjrgM/6I2UUo9AuQPKz4b+Obk+28CG2eU36pqPAY0CiHaXqyHnYnUdJxYAsMwJ5fVC9xY\nLYVAzHUxdB3bdmq+zSjE1MAyyzhmhWTc5fhly+g5agG2W1umbxgmtu2QSMRZ2L2Qro5Oztm4kdZM\nM50Ll9DSHMeSo4jSEDFHEZ/TRKwljWOaaIaL0h3mtmY44eUrWLFiBQ0NDVimRWs6xVs3vpqLzn41\n737z35Gd08BEeYTSxDhhEGCZNjE3gRuL47gx3FgM07TRdQNdm50l5gC61YgVz2AYNqZtI4WGkAVU\nMIqYjEaSkUToOqZl8cP7+jA9jTAMcGMuXjKBZVpEUcRQbpQwrE3qhWE0meYgBAVBIIiikMGBfTi2\nyb59OYKqIqwqNGEQVhWhkaBtTjMjZYOxMQvLNqiO5uiZ30bcESQbExwYHseNxdCEYCII6d09yJMj\nEX37+jCdCCsh2Drq4wSKdEMTz/Tv55mRidmTp64zPJRj/74RrAVLsQ2L56s2lXiaVDJFqnUu40VJ\ned8w3cetRI/rxLKLKFppSqPDqEqJIAQzEUc3JEJTGIV9VDfdSVEbxwxnTp7WHCqHL7Y7ZFR42OTr\n1ETr4e6c6UVYU1E81PR/bZ9EzXDpzNao84w1KzDx8TIG55y3hrXrTuSMs87Cy/awev2b6Ohew+c/\n/RnCwiDb91xIZ8bj/Zd+lOcf+goffv1RXNbwe855/CO846IL2brp22S9DJsfvI8dW35K/+ZN+LkB\n+p+/j8GdWynnQvpGQk464418c1MZefTlbH7wx2x75McMDg7Quf58ulZ2cs7bjueXP/kaK9MZyk+P\n89qPvpPGAxu486Z/Ab+fsFCgwzXIHL2Gh2Ov5hWvvZz8d4/nOGMnTtqlUCjguAGO4+K5abz/Rydj\nW5VSeyff7wNaJ9+3A7tnHLdnsuxPEEK8UwjxhBDiib/mAQzdwNYMrFgM03HRDRMjFkc3alaOrZtI\nKdDRMAw+wQxqAAAgAElEQVQdparoUkNWI/RQokRIi2fSk22mu62JnnmtvGJJN6esPoGVS5bwsiUL\naUhYWLqgNDFCJShTLJVRmsSLx4nH0sQcl7ir05JO0drcTKa1lbntGRa2pnjT+pO48PRXcfqaV6CH\nClVWFIeGmN8xHyUMUIKoGlIqjlMojlGNAnTLwLITODEPJ5bEdGYvN0sQFFHSpxqUCcMAFYWAgWF4\nhJGJrltMzjgjwypfuuEmoqpem/eQFqoMITpKRZT8oBa5BIRhiOmYlCplpKYzVvYpThSIJwwKhSJC\nCNa+8myKhSLjhQrlapmBwb0cGB4m7wcgyoyPjtE5bw5SVYnrOiPDu2lty1AqFTFdk1K1glQhA/tC\nlvYswNfiPLM7x+KmGJ5hEgQ+J89PI8LZDFkNsRyHchAxdmAfjtdA59EvZ9RswLc9jj3zAvr2HsDt\nWI6W6aaiNRA1tjJnwctomr+AmNtMQ1sbwd5daNUy0i/UjBGryvCD3ySkiBQSXUYoJdGVgUQhhESh\nodBAWZOdQI2ZoZHwpy6bmZE1tf0gIzG5SnwSIdDk7IVWApgGLFq1HsJ2gr7xaTdHV1cPmzf1sePx\nB3nDmlVccdZSOjPtfOubF7Gk0+eO97+fExPjrE334Ry9gq/d+u/senon2fkZ8CGfe4hHbz2HXf5d\n3Nufo9pQoiQk9mm/5MsHNvKeV7ybS9/xbt5zxaX0Pn4X9/zg23SduI5sz2X0dPpcd9WJBAYM9m7m\nkRtuoenKV5Mqj5BKL+Ohp/vhsm+wdcW9tD38cb55Ftz2vc9D9ypCc4BUu8u2vkfIpNvpNDKsvu6P\nR1SG/+1fS9Vqy395DKeUukkpdZx6Af9g/ucwhEQG5enJT9dpwNBjaMJCE/q0G0SJAKVq/kkLDUc3\nMVwb3bHwwypt6WaymTTz57Zx9MJFtLdmiDsmjQmHuG1QKpUIqgHxeJxqtUoymaRSrdLQnCYE/Akf\nU1Mk4za2HbF4boa0YeIXRjHsOIlUBsNuwHTTaMJi587npqN+hCYQugZSEVZ9xsfyjI3lCcJaBknH\ncf4a0fxV6LpOtVJBw0LXbHTDBUMD20JoFSJZqqnu4gEKxEml2sjlcliOTrniE0URpWKJTKYFw9Bp\naEjS2NCA4zhIKXFiDr7v45cCZKSTSCRxrAbmzj0WKRwKlSKNjR5COSTiaSq+pFqG+V1LaGxuZ9+4\nTrEsaEmliJmgKhO8bF4DDoLWdJqEleaYrmYa0zrP7h3HdhJsP1BiUE+SGxXsw+RApTxr8gRIpZtY\ntPwEtu0ZZU8J/LEcmjIY8U325UrMWX4qhVgzpUAyPOZDFFIZfY7q2AhShhzY8we0Sh4tjIgbLg1O\nDLMqSWsW+Z9+g+FH7yC//SFGNz9Aoe8xxMgfoVrADEYxg1EkBaRUhyQfm1LqM8sOD8X8cwuk5Ay/\nPWJ2J2NT6TRBEPCq9etoy7ayZOlSDMMgPzKC7/ssX7aMCy68mMBPQpDhug/dSOzsV+JmVtDVZLPy\n2nsxe87n/h9vIpcb5M7v3cFI3xae+Om3+eHnL+WV0VG8duEbefOGO8icfBcfHx3lx5+/ntesW0/W\nS+P5ISP997Bj033c/8wI2Ys+xq83BXzr6nMYSRrccO0X+OhnrgJvnK6uFEMD+/GDXh7+msnbX38s\nbzy/h7TbiesenHANgfFCAdc1gCSXX/0fR1SGf+14Yb8Qok0ptXfSNZObLB8A5s04bu5k2YtObTLI\nRw8NDD2JpoUoFRIZUC1XaxOIUiOsKiwEqVSSCb+AqEry4zXF1NI8h0qlQizVTENDI4mkjSZsYpbN\n+Oh+NCJQBpoRYmouGBLNSqFFI4iwlr7XEhp+tUxUDahWyxRz+9DQSDS3EAYKw4gjXZfyeBFPWewe\nHEBTB60pTYFCADpC1BpSWK4SivKsTsjKSohtWQRTDVhKFCaBX0TTDQzbRhohmtbCpoceZqxYRiqB\nY7tUyj6NTWlGq4pqaQIn7hJFFXRTJ2E7SCVAmYwXi+iGoLkhSWPbQpYtXcvbLtnIGRs2oEUWhhEn\n2RBhRgU6Ohazb6jIb7Y8S2NzEzo2jl4iZnjMa24GpTMeFNEsjWbPZGGrQIoyKaOJo7q72Na7g+yc\nLCqfJ7AaKYQNFIs7Z02eY+UJkk0xRvM2YTDBk1u30dq0lpSYID8eIUYmaGnJoEUT7B8cQthJjFIe\n23AYm9jP2L69RAeeRzNtwqiI0DRCGSFsDUuY2NUiWnkfyh+iUvWBBYw/t4WJYogRtwgwiM1bQLz7\neAJhoIsq6Gat3sma62UqXYIWKSpE00r9cMU/lXp6KmWx0gTI2VP0nudRKBRwA5c/+AEnu+0sP209\nKTfLuy9eSSHfS35oJ+HAUgbZQse7vkwh9wg/+u6Xufakc/i33deTlN2wYgXnXPYpsp0pVq7ooWCu\n4HXfWc6Pwr3c+/mPctrXI7w7B3j/MUu4+aLX8cW738Trlq7gwL4eLv++zxOb72NpLk8YGnzkN5LM\n0tPoCv6Va+/7Nr8NP8fy3OOMmAb59D4G/3gu1V/+ho+873racyYGIYPB04zkV5EyQsx0Jw888hA5\nQtLtGUbDo4HtR0yGf61Ffzfw1sn3bwV+OKP8osnomxOAsRkunhcZHdAJghBtsjKWy2Wqk+l0gyCA\nqIJjajQ3pShWQkZLJsWKgx1PEks2YntJmuZkmd/Rzty2VhxTJ2brGEQkvTQy0giLRcr5Kk5yHkk3\njSkU6BqGY9HYUEsCFovFcF0X2zIIdRcvOw8rliKVmYchFXq1Qsq1mbCoZQHUtENi+A9ZsTsjpl9o\ns6foMV0izULoteXvQtMol8vo2uQqyzBEFiOiaomf3L+l1vBcl9HRMUzDoFgqoaTCsi3amjIkbIcW\nW9CQ1DFMQdn30TTBxRdcyh0/vpt/u/FfuOTijaRS8P4rPkIQKAxTEXPjmOlu9o9XMWyNBe1tNMQs\nXEPRlm1HaQbNLRlS6RSphhZeuaidY9KQTGY5pusV7M9PIEf/gKECguFdLDluEWXG2TO0k6M6Ov+y\nHF4kftr3XdSEyYJFxxBpNgPbNvOr3/6e4QlJzHWwXcnI2AAHCuNk0g5trs/I3p08u+UJyrv/QHmw\nF69pDtHoOK5m1DJ9WhYJQ0eXVVzbwtVNDAlzmtuY68VQqkJbtglXl/h7nyUY/iNj992Ctb+XaqE0\nnaZ4ZtqEKabKpiz96dW1M+Lpp+vsYat0jzhmnmynS0iBN5z3PhafeBpb+/N4XpJsezsrT99IpmMZ\ni3uW8vq/+xhnnr2U12w4jTsfe5ief/s4b7/xfl57zmrOPO1cgjBgJD+C030uLSsuJnP6tWRXXMLZ\nn/osi7MeV16gcaa8nsqT1/DU9Z9k+8Pf4eOf+yxuZiUEtU4ntz9Huvs0PvLwPAo3X8wp3d3c8NZr\nOLVQ4LbHH+RtH36Ovn/+Fzp7cmxYfxqBH2CQIpVsJ5VOQboLfzzHG//1y6z792Y6/9fDvNx/+IiK\n8C9a9EKIbwMnAc1CiD3ANcBngO8KId4O9ANvmDz8XuA1wE5gArj4CDwzADIUOHYcYyqDpVK1nOlC\noKKan9ExdXQlyQ/tp6wMGtuasW0L23JQChoakshIEowfwDYFGibJZCOl0TEs2yOKImTMZPHLVvD9\n7/xv1p51LoU9gzS1tJAv1Vbfln2foBThWjaGIWjIzAXLRRYqSCyEjGhwbQrDee5+9Ge1BFQcDPP7\nk7hmMbWYRc1eQwLCsIimx6mUQ+KJGEopbE0hzASosBZm51io6iiD+QMEkY6QVZJuY23lpuZgWiFl\nPySWsAlVIzFTEpSG0XSFDCO6uxdz30OP8PTW5zj3rFNZvXYtUoNz33wSA7n3sPlXj7Jv7z4EVcql\ncRJxl56eLGXfYGBoiJH8OKEMqAQSw3LIJJJ41ggTwsGxXKRpoCyT/SN5OhIxDpQEP/v5Ywz7Y9iW\nw0Q4e5OxhhtSMUCVKxy7YiXbZMjWX/2EXduforu7m6MXL6YaSVauO5stP7uTeKGPpBEiczlirVlk\nMs7Yvl2YiTiJhMfQ4D6EJhgbH6NaCYl0HV2YNLSmqRZHGT7QT3PTHMbGJpgoTxBLNKJGxjFcj72b\n70fpDu0rTiRq7kHTBFJj2tceqeiQVbVTlrqm1XII6ZpGOBmxM7X2Y+YK2SON55kUCgN0dXdh5ENy\n4zmWrjkNgDCAwMsSmv/Oji1b2bDuMrp+/jEKYZZefytrT9yA8dNH+fnKkL74WXjuZpYvW8rysz7E\njn4XBvohzED3UpaGmzAz3dz6zY/xg9QVeI2j3PrsdzA6u/AL7RzXtY7Ud5+gY343g799BM66kO0D\neTr+cAfrbl6HvvsXuEGeTDLJ3bkU56w5j/6jbwRMQlxCkkCKpGuy/YzvcO6q72Fc+BEe8SvkzvwK\nZN90xGT4QqJuzldKtSmlTKXUXKXU15VSw0qp9UqphUqpVyul8pPHKqXUZUqpLqXUMqXUXzXR+kIQ\nIkIzLSzXxS+N1X7xKCSoRhRLExTGx9ANm0izcNItuI6GVZlAm8iDXyJmmJiaoNGN49oGSkY4Vgzf\nD5jwR7FjGnrCwYlCiOn0DvyRu+57lGrgY7kpjLA25dXQ0EBLSxYZVYmUwBISEUEQVBno20G14lOe\nKLJ9cCf50gS2AXHXxLZ1dINaPL1+aAIpAE0Y6Nrs/fGIadpEgcR1a3npozCkGgS1EL3J3PRKq1A0\n04yMhLU/KlGSeCKBZVvouj5ptWsExRwff/vJbDihk0xDF61NrZz7+jfxu+3P0rf7Wfp2P8dnb/gC\nW5/qp5QDXcL73/smTj3zIo5feyatrYuY0zafSrnIU9ueQ4QVKmN7kaUDxLUKjTGDt7/7Ms58zXmU\nJ0z2DCmGJiQi7lD1S8xvNmlqirFoTgsnzO1g2ZLjec+lF5FNzuLktgr43fObMI0YMbeBJcevp2Px\nyxkdzvH0U0+iFQ6Q0DVicZuUU8Qa24+IJEbCZbQ4jlKKZEMD6ZZGCoU8iZYEjqPjT/gowyGogNaQ\nYjg3xu6hEfSKgar4NMYcErZOU9KhKeliqIAF2Q7mpeLseuBbDP/2LrRoHGMypcKUv36mX35mJM5M\nCx8OzVw5W66bsh8ALoP5QQbzO8EcgLCX5etX47YuZqQ/B0aajkwaJ+PhGZ10ZA162peya+dWzG6P\nbFeGvp13YXg9pDrXsO3pPtJuyI58Adwy3kCaxRe9k8HcTzkqFbJ2fZoNa9aQNXoIC1Dw03Su2kiY\nzzGyP0/H0etY3tnN8hXLeL6wnx9d/zE2dOfpffnV/PDxu+gwe8klA3Y90wueh+8W6Gg/k5QBj227\nnR/c/s8c5/2Cp/ofpzCYI5tOH1EZ/s2mQDAkiFBSqVQwTRPDMCiMjxMEAWG1jG3WhpoNDQ04MY+m\nTJZEQwbTaUCXAaKwF1UqUvTHCYOQKIpwbBtZmUBXAcWxPBPjIyipQ6VKwjYZzT/DnIWLqUyMkm3P\n4jguhYkKFX8EKTWQJpGsELMlzQ2NNMcNquND5IaHefDpbQjLQNNrlpCpgedYmEJiCjnd0AzDmG5c\nsxtLb2E7LkJG2JYNmoXpJiZzrNTyzUeR5L3v/yhRFCBlhFI6Pgq/MMForsjcVJz7b7mEf/vMm1m4\nqJ3Xv3Ytn77mRNywytZte9GiCi2NKQZze9GMJP07/oCeAM0Aw4CL33Y8gRZD08vkRw7Q1DIXGcG+\n3AAqDNDNgDmpFO/4hys5qnMxRx29hA984nPc8K9f5YzTz+VNF7yDFcevJVQ2xeoESB/hTJBwqgzt\nyXPGazf+ZTG8SGhaxDOFJwnR0IRO3LJ5+fEncd7br+Do5SeSPXYNHd3z+f2PvkZ1aD+JbCeVIKA5\n3UI8nqQhmcY2TKqFIpauoymJrhTFapEGrwHL0GmIJ2iIQ3ZOG2bcJgon8CdyJOMphGZSnQhINTUy\nkt/PcH6YTMdRGLldRDt+iZATh7hnptyJMOnGmfzXtpl1cNptIwTa1H8UzAItrRk6OzvxkmlS6VY6\nsytwnG5yQZKTz7uYLi+DR8iIO07PyjO59F3/QDgCnrmVG798G0Ho8M/Xvpfbbvoojz64lZUrTqPg\n+ww8/zzZ9nZ25X1SPSshvYy3vO96Vh+9lJRv0pIJ8dwuPK+bH+7cyTnnncdxq1ZxzukrKQzsZNvW\np+nry5Mfgd/9fpSMex4jP7idsfe/jbVeL0YQ0rulH6+1AMDqNat4gke4bugapHY7V16WJrvidJYv\nXU8603NEZfg3q+iFJgkrY1SLpZqFo2CiGjAxUaYplaIpk8XzklQmSpihRAgbOx4jnmzCiSUo+3lM\nG2zXxk4k0YVGEATYRoSMatmLVSVA6RqFA/t57alvIF30EcpFRVXGRkfQ4jrJthTDu/pxG5NEURnX\njqE0wURpFNd0sRJJHn3qcRyhETdchGZhOQl0w0bJmrJP2CaerZGwBIYmMXWFY2kkYtZfFsSLRBRJ\npFJUQwWGTSgMgiCsKXmhCJG898PXsftAxOh4gBAa6VQjohoyN9PK2845g9v+9QOYXjtKa0aYFjLZ\nRMKdy3lvOI2Rif2YjsPEREh7IsPSrmNZdcLxCKu26rZSrqVDL/sF9u56HkOTzO9ooyPbgqmPYcZ8\n5rUdxXuv/QIt2W5MAclMDNONo0yN4165FmHovPaSt3LahnNJOga+5pPDoXv+MbTNb2ffrl2zJk+l\ndIJonCf+uGlyxKajazqr1ryaqz72Qfp+8zDPPXwP7S1x0qkMsXiCeCxGsVjEMHQCqeF4DWiGiWWa\nqGpAaXSUOY0t+IU8Cc9lYryAWVE0NsQZy+8nUA6ZuYswEnEcRyfdlEQWxojZOs0NHonGJE4iTjA2\nwp6f3Vn75ylx0KAQMxT7zGyZ05krZ7oSayfOiiy7unrItneS9bpItmdw2tOsXLmR9mXH8ejOrRQY\nwQgh684nFw4xEvgsWdmKQZoPfuBKhvKjeCPdXPXuf6RQ3knQnmb5inVs6+1lx5bHSQX78fP9hIZH\n17LzyJ7/McyXn8JQaxfXP3IHv+4vsHL9WaRSKTKZDF2dPXT1tFMY8PnE26/kqd9u4YnHe/nezgPk\nc1/gXT/oZMN51/DRT3yGdPd+TP80trke7/6XHN7Rn+X9YoCVq0uk0t1kuzvJplPg/r8ZR/8/ju4m\nUJqFsGJoKmBiYgI3FsO1NAxdECmFpklMEyIVURrdi04VL2bgxlxisRiGYWBZJrFEHM/zSHgJAilo\nam2nWCyRSqf+D3vvHh1Xed77f/be777NzJ4ZjaSxJdmWbcm2AgZswBC7AQoulwSSQEKgNJeSkELT\nNiVJKbm0tCUkDaEhl5Ibl1BKSAiHECBcEiCGxBBDbIINtkEGCyzbkq2RNKOZPTP7vvf5YwuTnHN+\n67S/Vesc1jrPWlrSktYajZ7Zevc73/f7fL4YhkEYhCw96kiWLzkGJbZRzJRlEwcKkeOi5/M4ToDj\nu0hquhvKWoI4sZnxHPa3m8hG5hDnOwxDvCAmEQaxrKLoGVRVRVVVsrpKztAwTQ1VncOoNqECEYoA\nr9VEiyOSWEpnEZQMaDK7XpuhXrMJ4jg9BBcybqvFB9/9Ac47XUckIVHkEccxQQBKw8Vvu7ztxC6q\n41NYhSJZHf74rWdxbO8iphot4klo2+nvVQC7to++/gXMzy+hpLe5/E8v4Kqrvs1Xr/khn7j8c1Cb\nII59/GyKapc1iMJUkpFndefeZW9BThTMnMS0HbJp0694acdW3HjuIHEJIVMH97Np+yNpvJ8k8Nsz\n6InPrbdeT27RfLpXHUucKdK9uAdkBcdpEssKmqqgRDZus4pQZKQ4IW777D14AMdrEksyupElak4S\nmIK9u3cRuCGWIRMFLq3JKfRMlmajieu4tBMNVzYQcYyl60TV/Yj2GIE9CdLvv3s8ZAYgjRdUZIVY\nkhBKir343dyFuZNu3DQYxDRxnIC9ToOgZNAw++g3HZzQAQGqajAwsIgVbxnkAx9Kjwcv/sj76O7L\nsWv3Ru798j+xvEuwUGtRHd2KMVTiL6/5R0aqLpOYOKbFs7uHsRs2vb197Ni+g+5ymdEG2JQBUIXA\nMWFgaIiBUplL3v42pp99ips/+A9M76xz2l9eQVW9g7/65Ee45/YBbvzLf+IrVy/lmsse45L1fZTv\nPYYz5m1g2/aNCHoh6MA0zcMOHXvTLvRhlCCrGigqceinkk0QIKSE0GkSNiYJAg+kGMdro0o+ge+l\naU+yQjabQ9XEIQZIEAR4rofQUvIkJPi+j+/7mBmT5sw4HUvnM3VwP23PQ9M0EhQK8/qRivMo9A0y\nb/5SPE+h3W7Tas2gFUvc+8SvyehZsiKd2k2SJJWaNA0UQYxCTErS1DQNTcjomkCo8qEYw7moOIkJ\nwxDHqacMINkgY1kIReA6AW6iEKPjhzKyKmPlcrRbbVavOoITjm6gF5YSyzJRLUjdGZKJn7hohoHu\nd3LtP17AH607DlUr8NjWJ1hm5ahOuWgK1MYj2jPpxXj8sW/huGPP54gj8nz0nX/LtGuQ+C6JnDLw\nndikNgP1cRfHgaiZoCsacgykw7dYuTIKUMhZiGZIs13nkd++yKbfHLYjo/+p5DigZ9UKIrnFdL1J\nFEeMH3yVv7rsPDJGGbcdo0gZUHK4zTZec4YgTN1iMzMztOozNGdquK6DJEnU6g06uspEQqfYWSYM\nIlrtdmpAiEMsU6ezsxNd15g/r49Ws4muGxQKRSxDozOfpdVqMT01hbAKKFqWvT/7DqHvHtLqfzev\n+I1hKg755n/v70vSG8FclGV1MFmpIUyTIHTp6O3DLHdQKvezY9OjmJZFrdrAypk4rkNHqcyO4R2s\nXbcWuzHG+y88G8sSPHHfPfzkrh9x29ev5W0nFhkY+ggXvOfTHHPc6Xzko5/i0U1P46iwd3SMHTuG\nefJXm3hiw0Z2juyh0nBwZudbnty8hbe/57188Yc30di0hSPbgnce08vn/u793HBjhb3VPuxShWrl\nQ1x37XbuuK+Xuya3cNkn3sNJp/QTlE9mVd8QhCVMM48QKmb+/zDU7P/WatoNvAjCJMAJJVzHJwxi\n9EwGq1jGKs0n9AJa9RaSXwctS+A0iMKQwJnC6OxD0y2ymkqS+CStClLioMkhMzPThKGH74cEQidI\npFmrpUu77eBUbZwwQbeK3HH9v/Cjjb/kthuuodqwiWiiC4nOeYs4aDsIVWDqGWQ1/Wd6PaFJkSQ0\nRU5vGElCqCgEsgyaAaqOpprI0twdxiYIkkhCV4sIWaHZnMJt1giDJobk8IPvb8BxQ0wzgxQnyGT5\n8/NP5J///BjMXCdJApKiQUZHUgyELggDFdt2aCUtlnTUOW/9fDrVCZYOzaelNFFfe4Ybr/osv33m\nXmTXo7nPIfECNj10NRddeBm5o/vp7h0glDtxhUySyZPtylMsanRaBhqgaBKRBBFpOmQEdJdLqJpJ\ntdpgT20fYSiTi0Pc5txhir0oJvQCjILMgw//gN88/QT5wiL6F69g76uvIjIWiaqQBG2mqzXagUwU\nxsR2G7daR5FUsrkiKjqqBk3XwdR0CrpJa6ZK7eAYoRsRtVoUMgXMbJ6WnxArCrGI6Cp10r2oTKa7\ngOx7tKYqKFHAvFIRQ8+z/MhVLFk8QOXXPyGIvEPaO1IafylJKTQtBanNXiO/y82RJBJ5biBxds3E\nEBZCOIBBP/046jyqToVxxwbVYulR61h0win0BiE7fz3M2nPPpdKoUtm6iZlnRzi9OMA9zhg77/4+\nvevO4eabNjA5UuG7N97O9pd3c89PHuD667/LI/c/xfve9dc8/atRnt44ytWfu4ktm3bw+CObCIFA\nhbJa4t//5Tb051/hwjPO4GO3fJv8+9fzD9+5hXec82G2VcoYTh/hmquxr3gbl9x3G3d87RrmtW4k\nfOx63n2Uys2bNuCufASHgJASjtU4rD180y70nucxMzNDEASzu3GXKIowrSJ6Ng9yhK7rdHaWaDR9\nZLd1KCFHCkISVSOKPTyvRdxoIAxBEicIVaGjs0RPTw+TlQqGaVCrVgkScGfDspFklCjht48/xvz5\nS1GmqizNdbJ0xTI0RSBrJl4i8dRvt2HqWWRZRlU1VFXFNDNomo6qqr8fP5iohIFEEKcfXkSql89R\nyQIkkRCEIMkpYTOJBBIGklZAt9Q01U6SWDi/i1uuvYCL3nkaaukIZC0iiaHddFBkgyCMqFXrJLFM\nHElEoYYUFxGRzWevOItnntvBvU/ezJ3P3MKG0ef4yk1f5Z3vO42bv/yviPaLfPyv/4liaQHCBV0G\nVQGRQOJCuwoigCgBOYDQgygGZFD1dJh3unKAUjGF02VyOUpZlXJWR5Ezc9ZPKZHxWg6aobBnbDtJ\nBMXOEgNHHc1zW9Ic2wPVCtPtNlGmSKJqSHKC2jMfu1XHqVRo2HUgYebgJFGUHpLqioQchSiyhKoI\nfDcFtQVBQKsyTmv8IK16HbdaZXLkZdzxcYzQpVTooDh/HmEcoocN6uOv4ksamjuDQpTKS0ASR/A7\n1t74dTAas9GaSkwavqlw2/d/Pie9XLFiCHMWMR0GIU5epTTYyxnrzwQBjWoN1bQIrX5WnXIu2zZ9\nnycfGWVgsI+7vvVF3nHKSn72oRsY2BJSbVTpG1xNLZjHWUOTfO3v/p6F2nEc3GVwzw0vsPvh7Zz+\nh6eycH4PN3z9G/z0vvvYVQ256is38ezmzdgNG8syeXL7Vt71tpP4syfu518efYpHxyp89vSP89VP\nXsyp516KWV7LlT91ePydB7mwej1/PlTiko++yKorz2TVpRdyWqnKu6qDgIOT76USHF6E9puWR++4\n7VQG0VXaLYdarcb8nh50VaXZOIBGmmavqiqqmcFtOsSqTjh2gM6uIrLvEyoxktsAGWSyyIlEFKU4\nXl3X6Chk8ewmVjZPIdeF5jdRXBMiiUmm6Zrfxfp3nsTZgY9u6EyMjRFEMrgBTc+hWq1RKBSJYwnP\nS/BDakoAACAASURBVNnsYRgSxxGapuO6Dpqmk8kI4jgiilL9XhGCKPRRDzOj+ndLRiCpJgkeTtvB\nyGaQEo8w9JE0idd27iajSFiWxuf/9gP095RItAKh54GkEkcenu9hZPJUp6uYRo4olIlCSCRBrJTI\n5XIE7QZf+uzJ3Pfg85RiQRgEBBFIcchL0zvISDlW0IPvg6mDKkEsQeylh7WmNhv5a6SHuIoBSgBI\nEIagaECSEEQKSWhTzGh0JDF5vRO7ve9/14b/soqjCFXEBFGMsSRDaVGGndt3smzFIKViJy8+v523\nvu0UFCPEa7VRBAjVoPrqa3R2dZFoAtnxidw2QRhQ6OxCUSS8ICLwA2RF4DguhUIBu9HANE26urqo\n1Wq4tRa5QgfZUhFFSFidJRJUJveO4vkOUcci2rUK0eQYOaFiv/oi5tLVyLGcNjKOkaQ3yJaSosxe\ntzGxBKGvcfu3bgI1Pye9vP/JhxgQA5SEINwGQjiYosSjGzdTdd7BSX1L2TW8k96+bgjOZHTrdt5x\n1plUKIMp2LX5Ic6Tz0YcWMSqwQEW7T6TgffdwrM7q6yaXwK1yrPDO6juqGKbz/PwDV/gY1cGPGua\nfPTKL3PB1glevm4Jly5+Bruym3JpNbs2b+eMczpRpu7iXa0Wud/u58Rze9k5YlMuO/SuH+LEb1zL\nSP/FdKzuZbJ/kErDoaP8MHd9cpjKNWM8cM2HYHiAx29dx6pvHRaAwKF60+7oFdlA13L4XoJhZA/5\ngZ36FFlVx3PbhG4br2WTMw3cWEHLdFJrzlCpVGjaTdr1JpHIgWLixz5xkk7TJs0KodcmcJtUxvcg\n6yauotCuO1iaycTUNP2Dy+lb1E99apTW9D4OjOykNTNB1QupNhvc/9CDFApFFEWZPWhNJRrTNFFV\nbTYG0TjkS06SNC821fHj37O7zUV5ngt+ALGKUFOQlet5JErCZz59LV5YR7fg1KEBBpaWCaKE2Kuj\nqtosgtnENIq0m22kSMZptnDDEDdu47oOcRDTbscEooBlraDRtNn26iT1xGd+vkBWz9GMJKy4E0RE\nIqX+ej8GVFBTORsMQE4lGiUPiQaJCrF8iKqL3bKpTk0R+DJ79+0n1hReac4gzSGPnjAiI2cJJY/O\nwRLPjDzDM8/+BkU2OGLVcezY8Qw/ufcOqoGPUegEpQQ5k8mR3ex9ZR9+s0221EHG0tl7cBrfaaFp\nMoqSoCoQOG0WL1hI6HokqkJrpsHul18kV8zQf9RKuvoXkM1myHUUCb2YoFFFSWL6lw6hKjqdfYvo\nXX40k56NM70HEQFyTCLFIP8O3pj0pvU6JTPys9z+7ZuBBPqXz0kr+3p72Ts6CsCRK1di2ynYbGBo\nEIARu0bH0FKqOLh5Qc22ue3Wb+BUS1iDq7njkWeZkvdyf6nMv+7u5+G7P827nK3c+BuNP773Lva9\n9hgXfugi3rvmgyyMFtI7dDYf+vCXWHXyOTzz8iSOmMcR2SNxnTyCEpVKhZPWr+f968/myKmEG97z\nZwzNaIS2zdPbN/GJ95/KT699N2tPHuSkc67GFScQhCmKOAxDRs1Bdm3exnnvWMYlHz+Kd12mcvsN\n/29H/7+sIIhQFJVCoUir1cI0cwSuR27hAgxNQ9KzhPYU05UJNFWlo68fIaskiqDebBEj01nMM7+n\nAy9MkKIW7WYLSctBsw6eixwFBJ4PskBOJAo9vdRtmyVHH4GuGkhGQNOLaLY97FqTZgLtxhQ//8Uv\nkRWdjKoDqTshZ2XxPTcdKAoCCsUCTnuWFJkkCKHOjpkL2u0WuVyOMDz8nOrXSzENPL+NIr9xSeiq\nxIQvmHAS9u+Kueov/5pFSxRElCNSPVStgNeeQZIkfC9AUdLzh1gxUFWVMJRQpBBZ02nZ9TTfV5YJ\nQx9TgQXzu5hutcjn8yiaz1TdoeL4DC1ewrxeiALQgThKD/9IUskmVgAtTWdk9rw6ikE10sPY0df2\nIDJ5tFBn8cpFvLznVVb1ZAmznXPWT99zaToOkiyhOj6TB2pIlW6+9IUvkhEanltnYk+Nl48+hTWr\nB6hrDewd45SXD9CuVzmwdz8T+8bQDI1I0dHLnVRH95ErFZFD6O3rIA5DMpkMrclJuvv7yObzRLJE\ne6aebh4UQRi0EHFC6NjpjVCSidr7UQOTdiKRhAaq4yAIcePf98wHQfB7Nss4gdv+7Y707ZTrwkub\n56SXj2/aSP/ifp7ctgV2hAyM9FK9eZxV1jpO+5O/4fG7R1jTezau3cARsGqwTCDy7Nxd5fh1x/Pk\nhucZ3nIT2dUOH/rLK3npzq+y9xubOD77TUaHTiBz4Xl87zOf5Ilf9WOPPMUnP3IF49+8i89/6WO8\n48pP8M1/fYRXDxzgsuNfZuQzl1NTLZ42TV6Y2sdnsHngm9fR272Fkd27ueiayzhiPSyyV7KifDLD\nYyBKZdasOZfaS1sQHWOUGlv53Iav889nNFDLm/jox7dRu+qOw9rDN+2OXpJS/rzjOLSaLVTNpKuz\nE0KfwG1B5KPqRkq+cx1ars/+/fsoWkXiBGbqDVpexEy1iq5pKCKDmcujGzqqUImDACEEuWyOg/v3\nsX90BEkxOOK0s+hZsIwkgupMk+lqg0q1znS1QavtcO/9D2JmLEodZcQsY1oR6c5cN/SUgFnIE/gR\nSZI6HDRNQwiBYZhIkoSmaYCEEHPno1cjH1WYCFVDEQLf85AUhR/e+hP+/NLPctN3b2T1sYvoWWCh\n6CoZI0cUtIgDn5npKeI4wnUd2u0WmiIQkgyRQ+QFeI6NLEuoqkBRZCRZwurSsAwfGY0D1QaSpGCa\nWRIpwsgbeAEIPV24FY3X0UbIMjjN1FYZA/gQTIbIWkrTlWIYOzCJbqq0HAe3cZD5C5bx3GsH0MTc\nvUMKowihqCSxSmXEofWSgSZUBpcNkhBAotGyI773ze9SqbXIWh2YC5aRW3I0b1mzFkko5Dq7MGQJ\noWWQWh4dC/po2E16ly9GNzVs28ayLLp7e9BiidrBCjP7xmnWaxysTjM5tgc1TogTmbbnEwsFP4jI\nKhJJ5HNwcj8dlo7dqtFOojcSp2aDwQGiOCYhQZIg8EJoOOA4IBRQs3PSy5lqyrVBCLp7ezFEifHR\nCdQQnEBgV22CULCof5Dxyji9A0MMDa3EKpXoHRyi3H8kP/jhQ6zq6+MXV93Me2/+NnseO5LrPp5j\nZcnigVueJ/+HH2LHyKc46czVnPmH6/nZdTfzvc9fxMfe+2X+9vovYq8sQ6kMZonR0VF27R7lXZdd\nwa6Wyx+8871c9fVrOZB4HJ1rUO7vp6//ZELHBAJME45cfSaGGeCGDg4mX7v+y6wq5zHUeQxcN8gV\nT5x1WHv4pl3oNU2jUW/guR5hGKS7RNMga2hIsY8UpQe0QeAjz4K5epcsJHAcMpkssiIz02zTbHsQ\nhynUK4qI4xjf90mSBMMwyJk6Xtsmb5rkusscnJ4gSCLqjSahbFBvtDCyFigaP/7J/egZC11Pd/K6\noaMb6deBn9o3c1aOKIzQdBXDeEO60TTtUM7o6xa3eC6lBoAkSdEHnodQVTzZ4G+u/ApHHnUqgRui\n6Q1USaNtt7FnmnhOiKxayFoeSdIhEZAIPL+ORHrecIh4mCQ4joPnuciYZPJ9uH4AcZOcJmHbTVRV\nx/NtDu6fInZJg3dU8OoQ+qkmH0Wpdi/HELvpS6cUBJJ4I7R6zQknIssau/dVyGfzDHaZHLV8OVpu\nbjRlSOWOYrEDRYZgr0a5ewFeq0Emm8N1PSRJIlvsgDhm6/PDJHqGTGcXiSozM13DDWLyVgnPcYgi\nF8dp0Ww20U0Tt2FjT9coFApEUUS5XEbTNDKqhoqMLEd06BqGYaIoShrOk8nj+m0arRq57j5CLUdn\nRw+5fJZM0EKNo9/zxb+eOJXKipDg8/D9vwZFB6T0hZij2rhxI8PDwwhVZdvWrUAZ01pMEISU++ax\nbcdWSuUSA4P9dJd7qYQmRj6N6Ns1UeOk886nPLiKbZse4d4tf8W/nHEaz274No8+FhBUdxOUJ/jS\nL5/l4os+zMMbbuLvr7uGC952Ggs+9FGO3fUKV573IZ769VOYvUPccveD9Pf3U148wHgDxjFodc/j\nZwd38e8/+hHvXdePbTsISkAeq1QiCAPWnLCWfD6EEOy84P5v/Rt76WVn6TTee/I6Vja2HtYevmkX\neqEKPM/FdV103WB+zzxkJcF12riOTRQGiLiNEDKOD5U9r2JkSrTbbTIamJkMGU0liGDqwBhhFBM6\nLsQKIglxoxhdV8lYJmqc2s8K80o0Z6YZHxtl3749DA/vpN6o8upre3nkyafo7upjYOkQqp4lSmLC\nMMH3IhRFodjRgW5kUBSNJJGJI8jlshhGFsPMoigasqyiaSaZTD79B1Xn0l6pgjBQNQO1UIAY9Mwg\nIpslDsHSxmn7MDHdoOE4NOoO1Wkbu+WBJIhDGRIVVWQIYo2AiGyhiJGzMAwdVZEg9ilk88j4zOso\n8eqERxgFVKabhMQ0G3WaMw22Pfs0bgSxk8ozkgpBCIYFGQnkyMe1QfHBOTB2aHcfhenCP7hwPnv3\ntQkwqNs2Xq3CYJfBtl0jc9ZPKRLUKlPIegyhYGZyDGKFPbvHiHwXP0xoTR8Ez2bbczsZm6yQmHkk\n3ULLmChhE4IWimEQeU20TAFZyZDPWrRrTWTTxPE8NEUQthxM3cDMZuno6iSjZIicFtlsniD0aTar\nNKpVRCyhazo1u01tqkLo2rTbLq22R4z/RjJJApAu8mESgSQjoTP52m7wZkh5hczanQ5/Pbl9jJ/8\nagPfvPf7hCo8ObKR8R2bqVXHIBBYwqLq7CYsWahCpVwuUwscKqHDSatXs/a09/Hxz38VR1j0OzD+\nmy/zrdULWbDQ5rPP/oKRTTXcHVt5PlpMb/hzvvW1v2bo5LM5YvLd7Aq2csa9W7nnuT5OuePXjAyP\n8ie338lVm37MJzbcSLNhY3fb3H/3j7jgj9diU6PbtghNm2rfyeSdKqGAt5/2QRrlgNFqiYt/EOEt\nGeSjd+zC6TuBH42M8b5L33NYe/im1ehr1SqLlyxh4uAEURSRyWZQFAWFGD1j4rdboCg0nTZtOaaY\n0WBqP719PdRrVTQ9i2kYuK5L4nqIop/aOhKJAIVcsZMoiohkmXypm7E9r+LFCU7bYezgJFaxQLvt\n8Morr4DQGBhYSr3eou04FGd3WuEsGMzK5Q5pnaoQGLpOFEV4s0jlKIzI5y2CIEDTNaIoJAiiOaVX\nRmGIInRCWUVq1ZG0pciFPsIQnJndiKiN68gocjopLOFjZlQI2oRhhKKb+IEPAWQ0kEOXwJFQZZmW\nHyNpGZr1Gab2H8SyLJYtX0Hy4GbyGZOc7tGMQC1mmK44nHjq2yioIGfThTuoTeNO7MMpzSOfyREr\nEv6evbitKqLYg9QEkUudOZJb5QtXXc4LB9s4NZ8FOY2dByaZcoosW7yUXzw5N/0MQ5lAkjEVk8ST\nCQMHv2GD5yIpOXQzR767TKU2yugrr7Jr2ysU1hxJZnqMA/v2MnDksUzseZVyZ4nxmoOkarSajXQO\nQxfMTE8jI1E0NJxGE1VV0RVBksQEszGaShhTb9QQikKiC5otG8MwCTwfS5eYmaxjdpURqsTLL42S\nGF34QUAum2VsbIzx/WMMHTHAwPI+Hvjpr0GJIEzewB/M0eVZUQd5chRKpsFP7I2ctHoNb73vfjr+\n5tO8933vI2/1ETolwiBk0eJ+RnaPsmr1EB2lIXbu3srKviHMwQEM9Uwe+/kjVKxe3K13ctLqM+m4\n0+Rf3ncNj1YrjL/nQt62/hyWnBiwpW8ewc8e4Ng/XMaKvg/y9ZPvY+0VLmtVheBP/5m39+9k1+Uv\ncNqRoyx+/krC6gZ2PedgBpC3LIQQnH7e2dy2dTMhIYvKJic5q1l53T+x9t23sHDqW1x8YS8rN1/J\nPRscTjp/J3D4pLA37Y4+jiOaTTvNEw184ij12ElCoCoCIUm4QQhIaAgWZwvk5ATkiFJnN5BKJUGU\nMDNTJwo8FF3Hcxz8MELVMyiaiSQ0Iin1cmuGSRSkclC96dF2IxYtXsbCBYuo120MXafU0XEojk3X\ndTqKRZIkTvGyvo/neTRbLZAkrFwOK2+hauosrTCfunEMA1030LS5S5jyPYc4Dkm8Nk7cS9S9GBlw\nGhH1xm503aDDMslnFYo56Oqej6GbaKaCZspkLJ2Oziy5vIyqq8RSjGaaCF1HETqyrNFR6qRcKtFV\nLoAX4wQ+UzMNlnbncf0Qx/V467HHsbCrAzSIfIimPfyZPRyYfoHpvU/h+lOM736S1tQwvgwkMi17\nAnt0D+2Dr7Hh/h/yzIuv4dhNlg/0EaohUZTgtZroydwdbvuRj5nRUKoRsdQkCRMwY/BmMDUTAp+Z\n2gzl3kGWHXkE4wdm2F/Zy/Dmp8kW8kwcHCcR0Gg0aExOkSkW6Mh3kin1kc0V6cpalK0ifhCQtXJo\nSsJMa4qqPUkQxCSxoN6oQughSzFGHDOvtxdNzyEpOu22C4bC1MExlEBg+Ta57kUQJOx+7SAvPPc8\nB3Y+SeRXkNCYeOklCGftl1KKy0CeG6TE0gtu5Wu3b2TcNnl66yi77HG+d/dN7NqykUc3bgLTYmR8\njLHxKnZVoAqHWrWGaZqMj7p0DK3ECU0mGyGoJe658yGGtw/z6KabUK++iM9dehWLrEFOHRhi78RW\n9uaHePut74HL1zFyy6/501OPhxKMb7R58ad7+fTjPv/2dMz5F3+JfwguwhzuB1tFqP1QA4NxQtXm\nA2f/PXurNtfcM8jFG7fyzOl/whdvfZzX7vo6x68uUwnLVDAJHfizq//fYez/sgxTIwg9HKdNX08X\npirjuQ51u8WY7eDHoMcegR9jGAZS0SLX30enaWJkNCRFw/Uj/EQlM3+QA5N1Jmc8PMcjFhq+JAgl\nlUhSafs+XqwQxIKWJyiV++nsKpOz8ihCBWQMPdX4M5kM+XyeTCaD5/vpmHoYoml6ijjQNAxdp91u\n4/v+IX6IPzv4Zc7GB8ZxChqbq8rlLRJADguoShl57w5eeOQnTI28QKdlUswXiYVCrCq0Qh9FRLTa\nM2SyWTKZLKoWk8ma5PPzyJXKGLkiihCppVRXEQSEjo2hNTFV2DZ6gCOWdpG3smhyC1MkKLHPH536\nNvyD+3H272Hy5S3cf+/f89hT32HPa8+xZ3gDP7vvcvaOPsRLIxv556s/xYH9WwmmJzCEzo4Xhtn6\n3KMMrDgeNMHu0dcIo5hl83JoZpbAmssM3gBZkqnhEXfbIGK6lxyHZnazZFkXrj2NbkgcsbifXc88\nRrUe8qsnn+OY088m9AKWrzyGY996Eu04ptTdTdRymKlMMjZ5ANtzkIRCEEcEdoux8VEa9hTC6EDW\nCrR8F9tzDl1vchST09Prqrtcprt3Eb39SynKErmcgaNGeJO70bMl0BVC2gSug9nVxRFHHs/tN/88\nHWbAAQJIZoca5qi+/uLLXHX3Zk685B+plVZzxSdvp7p1lOHhbYyPjbGoPw2UMU0TuwHleYux8ha1\nahXLNNi1YxhR7qNj8RAnnXkOk07AY1sr3LVhmNu2VPjEpz7KLbd+C7PPYrRm41SrlPv7WPOZD7Nl\nfR/ff/K/8dimnzNeqeCOjLOov4N+K49asdllDTPQGOXZjZvJWyVE72YwSzy9G447/WwG1l/EznqL\nKxYKTj34Q1aK67nmr75ICZOd1W3Yd9/K/Q/+I9seObwOpjetdGNmMsRRTEdnF3k1ophLoWHezDhy\nFDHtgRSZaJpEhwdqIYtXs5FMlcSNUIVguunRUe6m7bZmM2EDUDTarkeoSbRaHpWJCVRVkC+WcRt1\nehcvYKpmIxSBECrtdptSqRPDNHAdF9tuoBs6hmGmNxhJYmqyiut5CFU65JV//eCr2WohSRIZ08T3\nfaI4RlNVFEWZ2+CROCGRBAfsJk888m+c8o5zeG30NTrthN55Jonn4HkRECNJMhOtWspm8RN8P0YW\nCkHQTP3/sknoOkgo+L5LkshIioSR0ZFn/+bpvS/RIwfYcUTDt+jtgiOWH0d/ucxLux5n2wubCYnI\nl5ZTLBxNENXxgjZmZimBnCEWMVNiCTpHEOAzNePz+C++j9f22XtgH8uXvgXdUKhWR/nNq5O87eQz\nmNq3f876mQbMCIxQxV+tERysYGEzqcjsGU8oDwwgSwmJnlqLklYVNWsQ4pDLCKb2vsrw2EHmz+um\np6fADKCYErVak2KnRaNRp2nb5HQzxVKoAkVOcJKAOPSQJYkwjNJp8UyGWIZMIjH56m6COAXPqZ3z\nKLYd6pUmB8Im+SRCjmUiV0YRCpf82SVM12K8yi5SoyuAgCSdnk13+Ie/LvzB9WS+EzL8m19x4p9u\n5WnxCc675lquHX8XP/j5yxxpVum18oyPVVi7ZjWBk8ZTOI7LW9etY+fuEfZW+zh+3Wp2bdpM5bKr\n+M7td3LbgzdxB5uxxr7AyhMcTv3IRdz8XdhlmuzYvp01x1rsX9dD/0WrOa/nDJqOwwoEQ8zj/rvv\nxiyVGS+b3PSJ+1hxY5mGbfPEQ2Uqxnbe8b7LueWR+/hKf5mHrljC31SHefemR9ix+8ds23o+J63+\nBv2VfnaMbcIyRzBfCQ9rD9+0C72imbjTNZYt64JGFV1Xib0WlpnBaVQpqhqRZjLjRfRlJPTuHtyZ\nAwTtEGSdfZV9dJbmkzGM9G1NIqHoOq7nkLe68HwPVRL0lPvQdZ2p2gyGkaHRalHM57Asi4ypM69c\nplatEfkhqqaSz6f6vOc4ZAwTRTZxcg75Qg5VVWnUGwS+Ty6bw/VcioZBq9U65E6JouiNXf2cavQx\nL+0eYcPPt7Bq6Fi2bdnJdOMgRx0zQP/CPEgJrudBLBH6Eb4PhUKJZmMGw9CJAwlZEcRBSBjbJFFC\nHIOq6dj2DFIcEIQ+uqYjNJ0TTzieXz3xJKrig1BZ3j2PZtvlFw/fjKt2oRaORNMLhGHIdDPAb0h0\nFDuZrE2jGTFRFOE2p0kyEbHoJE58rFIvbXeapl1FM5bSarcoFJYw8kqF53+7ieUrVsxZP0PXI4kD\nFi3o4aXnh+k8bSGv/vJ5aGm0GmO08OnoG+SZp54BTWHan6FHLYNdQ1IN5i9dQM+K5Qg3ZrIdIfkQ\nTR9g0cJ+pLBBKZ+j28ogTANVtXA8F7sxA3GEqanIQkECZElG02UkZLwootRZotlqYjfrkIQkckR3\nV4mm1kvkuiRSusGQzRw33/oQ3sQ06VF3m9TjGpFSzkj1gDm4RK2KSf+aNYy9fYgHvj+fCz55J1dt\nuZK1//Bj1v7s77jk6g9ilarY4XYGBtZiM45p9VPO9yMcE6HuZnSjw9Nr7mZkbJinH9xI6Dgcv/4v\nMPvzrLrow/QOHsWfvOdalk9cSeAEnHHWWThWlf4778c9/Q84et0f0l49Ru3nWzEq41hvKSEE9Npl\nvnT5Uo6Yupv3D57PjXft4o5Nn+Ddq9dx0tZvcdKmmxjlZPZu38QZizeyszGM4+ygXDqfCiOU+6oM\nmIN87BfPHNYe/keiBBcCtwPzSF/Wm5Ik+YYkSSXgLmAxsAe4IEmSmpRuVb9BGinYBi5OkuS5/+on\nHkcRrutiZfPoOihCIvRiFN0g31HCbjaRMhq4gsTMkkmt6YS+S0CUTtcAMzMzh0BjqqrQ9gJMS0lv\nJGELM5vGDubzeVqtFplMhiAI0t9t5QiDNDPWcRwcz6XdauP7HpqmY5gm7aZPsdiB57mYZoY4jNA6\nSjQaNhkzh91Kx9dfT7U3TZN6vYEkS4d8+HNRkiSYN7/A2jOPo1VzWbm4QPmtZ5GxdII4QdN0dCEI\nHHc2nUghjgNUXSNOUrddvdFAViIsq4N6wyZjzqKXs3nCIECXJNq+Q7Ph43ky07UphJ5neHQ/xyzq\nZuXJf8Cmp36LVepjpuEQRemQjtsMUUydRpgQJTqECp4b8PYz3oEbTJETXUhJzAnrzuGBB3aTL+g0\nJidwWiGO0aB/QQ9mxmDvntfmsJ8KURiz77UDyIbAzHbzuc//HddedQVxMwRZoXZwPyQSCwaXUWnX\nWdS3gkyum0xexpNB8hwOTo3jS3myGZOGopLtnU9UDxE5BVPVUDQVp9rAnZxG1RTiKEaoBpl8Dq2r\niCoEYeQjoeIFKaE0nAnRNZ0wigg8nzhxCCujxL6LH0SEgUMw/hrgA68PMfyPu/e524SUOkrYZZta\ntcKpV38BGh/BfOo0toxs4YwLPsev81fx0ztqhEEv4+OjDKzuY7RiY5b66egwCUfGGB/dynhjDOGo\nlM2QHfYY3vJ38c2vfZXSz4bYUVpJeM65rDhrgl0P/ph77r4bcLCrm3nglz/lo7dfx2XVJZihihlC\n2bKw8nl2VIdZuf4LfO/kv8J67S4qZRtr67cIyw7bbr+e4+1T2Tt4BU9u2cQYFc447zq2nHMmQ2ee\nDFRxRq9mdPvdhMGyw9rD/8hKEgJ/kyTJc5IkWcBvJUl6DLgY2JAkybWSJH0G+AzwaeDtwLLZjxOB\n78x+/i+tTCZLkMshJA0joyIjYeayaBkL33NQil3IErT3NCgduRzJmQBdRpUyJHWHvoVLEFGI4zhk\nstl0+CSfQ89aVKs1crksimpQt5uphBIldHZ1UZmeIpvN4rouQqg4joOqaYdwBcWOIgBO2wFSe6DQ\nVPJ5izAMkSmQJAmLFncytv8AXZ2dOE6qp3YUiwRBQLFYIIpj6jP1/+q2/X+WoqgUTJ1jl3ch1A5C\nVUVIIbKiI3wfhIEctBFCoV6dJlMoEvoSMdCot1B1lTBMUFGpNz1kYeKEAXarRRKD6/uoqoobQoLO\n9GSN0Evo7SngBG2CbCeOo5Et9xMmqRTkOz6qlkMogB/hhi5xlBBHIWGYECsZ6nWb7pJLKEF54SA9\nC44k3DOMmwSoOZmB3pT1vmfPGNocDqCRKAjZJFYT4sDHcVqoWo7PfvnbHNy3n96+fpAFvj3OoTDs\nrAAAIABJREFU7uGXuefB/8YfHDNE49Vh9o7uJvIE+Y4itaxJNZJYmM+y+JjjoTFFxszQnnFIDMH4\n7r1oSYS1qJtIVuhQVWIhEJpG6AT4YUDsJyhygldrpMNQQYQf+Xi2jarrUMijSzKe5xJFAYEfkC7u\nEm8s8BJvLO5J+nNpbjYitj1GENqENCiNlekolagd90f0+B/k2bv/jRt/OIZFB6FjIkyVyliVgBJ2\naOK4AQMli9Gt28EZJSwPMDY8zt9d8VXe/9PHWLV6HdUtm/nsffch+D6/LHSyxLLodRxqtoWjulz2\n8Ys5bf16yo+M8felT8GWTYShya4NW1nZfwLd79nP5ads4YY/P4bRb/wFV13QYmrFEM6Wzay9+jqu\n/O6jPH7LPs74yG6G1n0H9XMfRbCG0AnpMAXj8/p4fNOPga7D1sP/7SuVJMkB4MDs17YkSS8BfcC7\nSUPDAf4d+CXpQv9u4PYkFZifkSSpKElSz+zj/JdVPl/AnppkerpGcUknnjuNqatIQoM4oZi1mKk3\nSUQd255BV8CNFESuSOzM4HoOBEEK2oojip0lHNfFMBRUVcHz0rCDbDaXwpziGNPKEVUmkGWZTCaT\nyiymiet7yLJMqVSi2WweGrzKZDIpEiCOyWVzTBycQNXU2eGoEEMXKGoKjBKzB5fhbBjz6xO0c1ZJ\njGlauJ5LgozwIJFCMExiySBu1pGlPDIqVlcPbbtJo25j5fKYmokXhkiotFs+uVx63iHLCW0nAEkm\nkQR+CHGQ0HJdJqYq5AsZXMelZBZ465pz2fCbDRiZTrwoQFMVhJzm6cYxRF6KI5ZViaYfYBV7WbDw\nHKbbv2YJgKSwcHEPVU+lL1+kUp/kxLeexpPPbER4LY5a1Mdk8/CiYH+3ZCAOAnw/nbAOApAUgYhj\nFvUuIkFBRoLcPIZffpj55QVkhEvc18vQkoXQFkxOv0Cl1UtHSWDqoDFNpMUkYUBHoYBbrdPTMw+9\nu5e210aNPCLHJwl8lDgmjGTiVhs/jIkSB5HRaEzXsMwMCIt2voAXSzTH9yPlBomTCN/3iCKfdDGf\nHY76nxZ5QFaZKy9H98p+arUaveVBnh0eRlTTEJInR/byR1O9XH3rtXS2z+XC936QHS9tZ9Wxq7Ed\nm8pEBUodDJgma9etplatUa1sY9X6c/n4dTdQK3Xw8EMPUskv5Y6xEZ4f3YpYvZq3f+B8rjbfTrh1\nnHK5zMj2YW5mGHt4hJ7TPaL169m2aQM77O386vjHONVfwMrqX2A5J3LaZXez5rweTvvUqfS8eBBr\n8TpOOu1stjz3I8pWH+//4Acwllh8gBBhClQgEGA7tcPaw//ULVmSpMXAauA3wLzfWbwPkko7kN4E\nfhcTuH/2e7+30EuSdClw6X/6Gc/W6975ffv30qxNMrTqKNRMjCKr+LFMhE6tPcOLu/ezZMUKGg0b\n3SrhJyoTMxWMrI4wFKanpzEMgyhKscZRFFEoFshmc0xPTR2Cjem6zvTUFN3d3am/PopQZAXPC9FU\njWwuS61Wm0URm4dCS+I4pmk30928IuO0HXzPx/McFizsxwscdEOn1WyyZOlS7IZNvT6DrusEwdzZ\nAV03RNcVdDUDiUSchEiJStRqISSJli+hiiZCCEI/QMgq2UKRJIpQVZkgSW9OspzKKmmQRYKqGnh+\nKvW4nocsgyqpdCyYx0svv0he5Dnr/MtQCgXyloWq5zFjnyBIU0RagYuqyLTjOoak4CgyC1acypJl\nx2FPz9C/8BTi1gSynkfoMDU5SdOTcGOZ+x59gIKi4ouIVw7sJZ85vOCo3yspRigQKRphu4mZ04kl\nUFFTwPvsBkCRFXTDoJDvIGoHWEYHQXMSLROwZ18XVqfFgk4TM2kTeTmEFqEKF0GMWdSRslkcSQWj\ngIgi9KxEtqNAZORpNSaJDhygXjmAUDScVhMrayIpKrIqUa9GBF6b/Xv3EzBOR34hhC3iaBrwSH3d\nAekyEZEu7LOLu5RwCDR0mOuGMz7M2nXreLgyyvFrjqej3IcTOqyyj2LVwLk8cfc3eOZ7G9ny7BYW\nnX8+Vj7P0Mq1VCoTHH/K2fSaIaOb70MIwfFnfxB13Re5ZOsORGmAf/33O1E7TuWk0oN8dXmecafB\nwLw8ZXuQn955H9VqDWtWQt219W5GVp3LioFh1B+u5AtX3cKqd55MxylPE6x/mo999k+45NPv5avr\nL0eybYZKaxh2xjj1oku57XRADXjP+ZdSeuVBrLxFGDgIJx2K3DU8fFgPTP/Djy1JUg64B/hEkiSN\n/2FcOpEk6T8l2iVJchNw0+xj/6cFP0NX0Xp6if02mqZROTDBQbfJiqNXMV2rogiP/XvHWLSoj10v\nvcKiJZ3UZtrU2z75UpEkSWg1XUqdZUqdnUwcPIjre6kUo6pUqw3y+XRH7fkRcSKTzeVot9sIodF2\nmrTbbSzLOuSPz2azZDIZGo0GlmWRzWZRFOUQN18IcUimcV0NISRq9fT5m93dtJot4jhCUQRxJBPH\n7n+2Lf+/K31ObsrdUVWSOEYWKk69TRR5+IlMkskSSTKJrJMkLTwvJKOpaKpBooSYpkmSJLhumlqk\nCCWdCzA1PC9EqBoSgsCHtUevI2rkUTQ45R0nMf7yCNlskUiK0BUN1YdWu4UmQyJFdJXm07SnOfWM\nD9BVzKPIIBYUcUMIWzFmLgMCLrn083zlv7f37nGSVOX9//vUvbq7uqd7brszu+zCLuyKu4EliICi\nESMQJYqKX8BEuXhLQsxXjVGUeI2KotEgEeMdogQVjIJoAnGJAcNdl8siC+yszC4zu9Mz0zPd1d3V\nVXWqzveP6hlWf14wcWd3+fX79erXVFfV9NQ5p+epU895ns/zyb/D6cyjTIdd9ToFV2NFfoALzn8l\n19x445L0p6bZ6JqNbnRAT2nUGyzMhoWmkSRJVo9AdzjuuOewa2IMQzeIak+Qdx3CSCNn2lhmgkgi\nYqEjlY5umoSdCIlEM10S+vBDCyttEc43sPIxncY0INAMm1o7JrFdhO1QXrWazvQ807U6ot0gZ+hs\nf+gRjnnuc5me2k1/cYqV/UX01WuJT1jD7k5ImltOLe3jka0PU280qO3eDZ0QsLM4+iVQQvjGJ67A\n874KgFc8n6GhbE7pulBdPcU927bxx289j7/aFvCe7Y+AdxJDa01GRoaoANVazEkXfBjf9wncEW7z\ni2zbto1PveIynhuUGQt2ILe5+JU+fDp426qcsPn9XN4f4HcmcA2DarVKrRZwz92Xcf5pZxAP+Wwl\nZmRVhSu/vYVXBmsYrcCqVfdQtRtc8lbJ5Vf8BTwe8N6L3oN3143MVWLM1c/Be+K/ui1zmTMN3KGN\njN2yeZ/24VMy9EIIk8zIX62U+tfu7qkFl4wQYjlQ7e6fAFbu9esruvt+p0gpUUpRGVqGpmW1S1ND\n8MD995PL5bJkpXI5K9bgODTbGn39IzgVnempJ+grl/G8pPtYHZPLuQhNMTA4SKvZJOxk+tsLC6KG\nYdBsZhmIjusQxzHFYhEpJQODgwghaDWbRFG0aPCjKKJarVIulwmCAKXUossHoFarLYZghmFIkiTd\nzy0RRwn9Vul33W2/klS4OHmXWAbomoWR85ibrlIoesjAJm9ZSJGgEhMhIgzLpqQ7pKnMFBpFytz8\nHIVCgXw+n7m7lETTFLpuIUTXRaUSbNuEVPGK1/wZdi5z9Saxgee4JIlEGDaJppHPabSChOmkjiVC\nlq/bgC4FhoBCptKAFcK8SLFyLhI4bJnBRe97D5//2D8xNf0Tfu/IZ/DHL3oWTtjGW8IavMm8Ih4Q\noCwM3SKMGiRRiqFlxl7X9SysNudQWr6cIKgzt2s7ZSuHVcgxM1dnOkoYbDXRdAfdzWM4OTpRiIoV\ntlCkIqFea6EMiSYUc+0mhw30URpYhYGktmeckmrS0SW2Mnhiy1ampqsUvX5a9T0YdsKGNYejNJ2j\nnv1sdMMmShWtPdNMz03gdVrUdzzAYc89h2POfhl6obvu1W6jaRrz8/N87n2v3+d96TcaGBWDSiXL\nfvX9Bq7jgmsyNDTMm4Y3cu/4g/zVhRfhzd9J9cEHOfr5ZzKNyz3jW9mwYQM31Dbj+z4Go/ynX8EN\nGtz71Ys596smGNmTnu/Pg+dQlR3WnTjO1Z+5hov+8HhcWcMwTDpBQLHoUY1rvPS0NzGy6nnEp2dP\n3QEn8oFLruTv/vsLXP7xF/Lyl27hhDib/Fzy0Q/xnrNOITB9RkZHKYxWADDIQrS9Shm5j103v/Gb\n342i+RLwsFLqk3sdugE4t7t9LnD9XvtfKzKOB+q/a/88QP/QELlCAU3LFo2EMCgNrsTKl7C9Iu04\nBi2lPFBiaPkofeVhZmtV2o0plo+MdAuBmDR9H7/RQMoEr1im0w5JE4Ft2/SVy92bQA7bsbMCxd3H\nf8uy+L2jNnLE+jUYhp6Jp+kJR6xfg2ka2LZLq9XCcZyfq8UZhiGGYWCYJoZlLsbUW5ZFpb/C8pER\nCl6eXGFpXTe6mdBu13Ecm0Rm9Xf7ymU63X/qBbSuAmRW+zZb3Exkgm4Y9PVl+vu57pOMbRUwjRyO\nZeKa0Je3SKREKchbGkJGi2Vxi8UiBc/CKzrknZi+goUIoVQwcDUbP0y476FtTM3M0gwaBK1M20YA\niBy2AL0tEc05Jn66h5e/9Cje+YYzueCMExkqSAYGClT6l27Nw7R1km05aDoonhSoW1CFXKjLGgTz\nmHO7WGPMIeshTrFMpDRmpn1G8xblyiAVBypaSFF10IMmYFFtJsw2HPoHyqxcPoRXcHBKBUqjG2k3\nZpn82WMYjkdHaahER+uroLkWRs6hVDQ58phNDC07BL1cYcXKlRimien1I6OAtOPjFiuUBpdhGTa1\n73+JB676MEljkqTTZGSwTKfTIZdbuopdAL7vE8sYGWcx53FsEMcGf3rh+cwFNd7+1g8hgxonbFzL\nbd++gkEp6Tw8ju+7HFJcz7rKJoIhD2fK48r3voMrv3MPNTfGleAWYxwjwAnmMABTDvGK176R9338\nBghMhoo1HM/k2De9Cb9YZHRkDZXR9Qx5AUbRpcqDuMf9NfdOfYgXP2M7ky4c7Q/hu6NcevHnGaqM\nIt0K406MG4+DMURFVnhkosybL78Pr7Fv14+eyoz+OcBrgAeFEPd1970b+CjwTSHE64Bx4P90j32f\nLLRyO1l45fm/0yvu0gkCdD1zDRSLJUgVSmUzJYRANwxy+Rzz9Sb1eockSUnSbNYsDBulFO1WC9M0\n8YpFOkFAy28SBB2KpRJhJ9OOd1wXv9GgWCrRqDeQsaQdtDn3/Ncy3G8QJHDfjx/lh7f8kLNe/Soq\n/WX832/xpc9/Bdd1sSyLZrNJpVJZXNSNoiiT7DUMnG4cvWVZSJl9gRfCLZey8IhINQr9y2k1GtiG\nRtSqkUqBruu0ww62MBF2gTQKMDSdVE+RSUC+r0gYJZiYaLqOPz9PHEW4uRydIMA0dRKZoGEStDu4\npTK6bqOkBkmEYVsQgWXFeDlBGIKmDMJYIiyFwCXvSWTYoaP53HfvLUTBBob7+0gwsbSUfguSMEUz\nAoy84NBlVWI/Qdc9VBwSRRpm3iZM9m1Syt5UCqtQ9ZR8PMjo8hW0W3Gmo6xJklRimh5JkuLXG6wb\ndZh5uIO36lDmOhIRp+xutBioDLIyD522SX9fH81GA9vK0wpSzMrhFMtlvEqFZYeuozo1RcHS0N0+\nvCNOQD78I+Z3Pkxx9XpUImm32vQfeTR6rNNszOEOLKdOP7ZsMqtSKo6NlgbotoXd18/un43RbtSz\nSUoomZt5HG/4UIaXDwIGxwyPdt1RS4/juuwt9/fMDcfxFor40uDOrWOc9ZKt4HpMTjyOVxmmOjGB\nO7qeWs3nji23Mzfhcu+t3+OoF7+ROTmH7KqkyljiuE+u4xRdlzdf+rdcedffc/Pn3sEjH/0MkxMT\nrDNcBkdHiVfFeJU1BNQI3ElMYz3rquMMDf0VI/Kz3Lv9Ohg9lZvvvoeXjdZ49bnnccdVu7nlwTU8\nc73J1uGAb+5Yzchr3s4Hr/jOk/Kr+4CnEnXzI371Jbzwl5yvgAv/l9f1G1nQh3EcN6uX2WwRRREr\nDzkEv1GnWCrh5grs9Heik+LYNkEMI6PD1OdmsgVQv4nrZr/vuG4WKdN1O7i5XFcXPhNQm65WsW2b\nMAx55avOpOAZ1APwXDj+uCPYeNQRJAmEnRjDhOecdDz/fdud2HYmfVCv1xeja9I0RSYSPeuvzJi2\n2xQKBaIoIggCHMdhYGDfhVv9IlEcY1sJuXweoRJMvYBUDoZpkUsCoriD0jR00yIJA9xcHyRpVulJ\nSzFMQas+h1uwUZqNX6/jui6arpOoCMO2u1WLQvRU0Axj9KhJgQJJArXa7kyG186hKR2lJJ7nEQcd\nBstFyoU+Vkgbw/JIVA3hz+K4JnoSUa12GBhcgZbzsCoe4bb7oXtTtTSNgYGBzCWWs39jP/yuuOaz\ndyKNDlKYaCg8DBKVouuZKmQcSwxDR9ehEyUc+gKJ41i0Wh00TXCM60CqEKRZ7WBdUFQg0FAomrU5\nTA2UgonpKQYPOYr5uTqRriCxGT721RhHTJMrlrGMrA5A2AgYSRI00yZJQgbiBkqBa+bYcf9tHGpM\n0ZSKIG5QLOTRohbNYB5pm9h1k9bOOymuPA2lEhzLouIsjR69lDKbNBTNxfKahpm5Uryih2esoeKt\nYnx8gte9+X3EUoIs8J+bb+Gl55zD5MQkc3KY8ce3c+e3N/PFr3ySY1d+jC9s24BXDDAwwcgmWACu\n61Apu9CJqa6BPz2uyIcrN1H2zmNodBRu38Jb3v0OnrXxRA6LH2TsO1sw/93g0rNO4XX/8H1qwUsY\neuF63n5NzJtftYZJJPGJf4F1/AWM2X/Huz9+Of9WWsE/fvM2ghuv4suPbuaOPynv0z48aDNjy+WB\nTJQsDLJ/4v4KuVyOTrsNmkZ9zmf3xBR95T7K/f1diYMQ27YRQtBsNhYNdxhHuDmXJJKgCfoHss+e\nnZvFth28viI5J8ee3bvJ5/OUy0XCjqLVapEmBTpBBzeXJVbNzMzgOA4rVx6Kaf+Eut8gTdNFYxNF\nEV6pSBzHVKtVgiBYDM2MogiUQdHL0/DnaLVaS9afAgEkJFGEbloos4CmFDL0QbOxnBxJFGWZk3Yf\ncRJkUSNCkKYQNNs4bg4pE4QO+bxNKgS6loIRE8cd7EIOTVk0gxjLscilOVSSlQzsqyxDRB0ajTpx\nmqDrFjIWKKGhkyDTNoZuoaUxhqZwdEHYSjAdB91S1Nu7GTZWk2jwzJNeyqO3fR/bgqiTkKaKfN5B\nt5bO0OfUNMRgmClKAIkFAmzNQEpF3pRouo4SUCpk5SN1A/qshYDGAIFA13RQWWUo0ZUORgjSYiVT\nkRSwZnU/SoG2aiAr95dkrsLVqZc9wWpZuG/S9+RsVQgL6K6l6IJnHn46D/zwJiw/winPIKwRUuEw\nbJXZHe4i12+SS9qUynlkGpKmETJdmjWPozdtolyp4G5yMAyT9ZUKI6MjBEGA5xUxzZi3vv1y3nXu\n7xNM+IyNb2FkpMl9DwbElddwyqs+DFMB1ckJvnjdhym8/3a+OXwmf3rO2YxP+owOruEFzz+N5z7v\nZCYnxlm1egeTDZ+fjW1n/PFxbhnbxoa0xtC5awju3sL1d1/LoekfobVv41vffhOTlDnh+WfgfuMy\nXnneX7JzfJLaxC38y5vX8onzVnHP7XPcedNX+OzbfP7g2u3s3Hwxh41+jk8OzfP6ndfx0bf+PYfI\nL+/TPjxoDf1C6KIAbNtGCUGr1cLQNEzTxK83GF2xIotNl5JqtYqh29TnW1QqFQzdZi6pUiyVcHIu\nzWaTvoEhdu/ZzezMDLl8frHOq2laWdhlqYTVnZmColwp0KgHeEUXvxFQ8FyGh4dJkoSZYJ6h4WHC\nTocgCPB9nzRNWX3ooWx/7LHsHy9J6OvrI4oiDMOg0+nguAZ+Yx4pJZVKZcn603FcUiFQhp0VHwnq\nCKeAWRhABE3iKCTVTLQUHD0lTgSGaSKEhZY0ETaEYRPHdVFmDkvXaNVnkI6HYTroeoJEkMgEmSR4\nuTzG6iKpASKCNOngBzFRqiPjhGajjcAgTAW2ZdGJQpJEImOFLnQKto4yIIg7aDJl9+QMfctXY1qA\nBatPeDE7776RcqUAIsHJmUi5dDfOFcO//GnsF+PL1MIixcJymf7z5ymhUGLvGPbuYq7KbgcCsios\nQIqWGfyu///JtRUTRIrQ1eIagVKKJIlQCqJWTCvwqYcavpGjPtGgU5+iPbuHKDKY2r2TDiYf+cSX\niS/5Chf+5Tv5zOevRPiP/8876LegVqtRlTGVcgXDNJEyplardWfgk0DAmvXr+WN3iG/duoV/uOgN\nfPxTV+KWPQ4Z38Yjt99Iec0qPnLJ2/jI5M2EtQkuuHsI/4pPU9m8lju/GvM1JOuky7FHPRMAKWNe\nfSz4fpNqEHCv58Mdd1Edclm/cSPPPPIorr36tZz1los4ae2p/PDsYznruFFu+OfP8ILjnscJpz+f\nsdtv5NgzPsZH1gecddaZ3FOt4ZgbeaUxxwbjccpxg2ptAt/3F9cd9hUHraFfCGdMU0nY6VCu9OM4\nDhNP7CJJJP1Dg5h25h/vK3qkShG022zYuIHZmSxdfGBwCAChZb5ov9WkXKkQhpnoU9BuY5gGiUwQ\nCEzbotVucd23r+dFf/yHDO+20foku6ZmGFo1QHsmxLIs5ufn8f1GVhi863sfXracdjOk3QwZHl7O\n/Pw8lfIA0zNTi9IKpmlSm58nn8tRsks0m82l6880xUw0LN0hSXWcvEOqIvQ0JHUFWgM0XaEZBmkC\nwtDRdB0NBYmBxKJQKtFpNkg6s6gkwbAtUrIbgq5pmEkCuTxKtJE4aGkma44FrUaIjAOmdk/guMXu\nzTUGoZhv1Cm4LlEYI2VAR6aEHQfTMPADaIchxkSVw1ceiW7miJpNNCNm5VEvojn7EPkUwmgWpzC4\nZP2plEKRFS1XgNF9v+gF7RrvmAADgUwcUgnINmO7agwN9BEEEa1mG99vI0Wbmeka89UWTX8WqWKa\nzZCx7TvwZ+t0gg7PP+UUfnTLZqrVSZRSrDliLSuPOIIf/OtVIANMt49Pf/Gfef+73o5LgyQK6Gg2\nfaU/YetP3kne7OcNbzyT1tw4aRCAFMRxHdvy6MtbPDA3RyhjPnXZexhZ9gLqyf2LNUj2JdVqFVmR\nVLtRLJ7nYZhZVrpX9DANF9c0GNp4In/z7ss4uXgbJ1/7ZV40/RruML7O3Klnc2xwNp5nsmvbVi76\n94+CfyVzawEadAAMMAyQTONICFwDWTNoNHx8GTMXTzCycRN/esFrqG6+Dm/TCZy04TLOP3E97/3g\nm/nLP51i7AsSY5PLQ7e7VPHpTLk8css17Dx/IydtWs991xmctGktbryZgNUEwzWG4iGqscErg30b\neHHQGvpSqdRN0NEYXraMRqOJaVr0lcvM1Wpompb5eLvyv47rYhoGuycnCcOwO6NJGBkZoV6vk8/n\nsyLXUUSpVMI0TWa6CVOpkS5mrtqOg1CC/k1XY0wr2qctY/juJ8j99M8ILIswDFmxcpDbf3QX9a6O\nTvZ0YCIjie1YhJ0U08gibhzHoVQqEQRBtzi4TiwlmhCLJQmXAtOyEJZH0m6CEGgIUsMmqM8hhIZh\nG2iWh1ApSrbQdI240UQvWBi5AqqbIGa7LmmqSNMElejoZoKh2zTnZxGaTq6UwzB0NMdBONn6JBpo\nlknezXPYYWtRUqIbBmEkcfMmYdQm8GP6yzoyKmJZBrPTNSzPpk84BI0WbTnINd/7Lq94+Zn0HVLg\nift3ccimMjuqoxzRF2K0FBhLtxh763/fzcajf4+cZSFQtJXO2NijHDJ6GGEgmK7WaAU+Tj7Hd6+/\nifHHt7NndxU718c/XnERZ7z0xaQqRUYpxeIa/vFzH+cvzj+HJA1QScQbL/gYNNrcc9tVaDKPbVn8\n2Z9/leu+/Cmi1gwI2PFjnw9e+mFuuvrj6Iki6jRp+RFP7NqO6dfpaBG2ZlI5egIz1gnTPQS7J/DK\ny0j7TYL6bixZYF7NUp+qki8U0GUIIqXsFMiPrOPR7Y/s8750XRe6XqcgCMDzkN2ItE4QII3sBvCP\nV3+LVywzWPbaDzFQ/xr3/9f3OHLjBozGBFf+82X88Oavc/PtlzH2nQD/VBM39rqfK4ll9tlBJyse\nEngu0vfZOT5G7Hlsq27DXqEzMTlBx29w7Nrn8+//dATisOdQuezT3PDGT7Lj1uv4/mcu44bQY+xz\nn2Bs7gNUJyTHHncqRvFojMZXuOHGr/LWU0eJZQOIOenk5xEUxqiN7ts+PGgN/UK8eqfTXiywncUl\n5yiVSsgoWlwIFbpOqVSi2Q1hWnv4WnaO70QIge/7OI6DbdvU63UKhUyzXClFpTu7l1Li+z6lUokw\n7DAyMEJxejfK8Bm6wyDVCvhJjKYJiiWHJ3ZNkyQJXjfhKmi3sxtLGjE702T5yCiO6y5m2jabTRzH\nQSmF53l0ggDXdZmdmV2y/kyjEF0zwdBJDQ1lG+D7WPkCQrcw0phOYwY9n6cdtDCkiaEZaMpBdiIE\nCZqead8kKKx8njSUKGGiLIFVKqLpNlEYYLodpLMmW8hNM7VbzTRABRRyLrqw0YSGYTjIJGKgr0jN\n8LPchE6AoSkqxQFSYaNUSuSBW+pnupFn83e/xmFrn8HEjl2U3D4GEp8Hbr+PI39vGMtcuryEcnEl\nb7rgtciojQpjmpHN+9//Oc5804uRnceJZIdUlbj8mu9z43XvJWlCokKEU6DZupBo9mHSNEVpbeaj\nKtsfnMTtzBKpCF1p/OA/72Swr0AuTuhQR0h4cNsY0giwjQgFhPbP2Lndp2+gjKubtOMmd/zoBxx1\nzO9TMAOCUEPJAKtk8ryzzmL3tjvQlaI51ySJfZIwoDo9l1WkGjmE05/9Uh5/fAdxp87tlmwkAAAa\nx0lEQVR0TTLYd+SSGPoFXPdXZzbHEjruKKec82Le9e0bueKZVT5y6aWcPFFlkke46rrNjP37TQxt\nvYXbgk2sKXoMyQqdoAHukwu8C8zN1XDirO5sDbht8y1c+Znz+I8bt3L8ELzhwtfw3k1DyJENfOob\nt8CJF7PV/Ap3rnwL8fYbeWjyNgL/Hv7munfyqU+fTfksncHXR5z7mW1UKmdS87ctumsmt2yjs2qf\ndt3Ba+g9LxMJy+c9DMOir89henqa5cuXIwBdCPxWgGm7DC8bZuKJCUzTpNJfYbo6TbvdZnBguOs/\nb5AkucUZdqvVyrTtwxDD0BEi+3utVou861Hz27SKCs/vo+hP47s5yo0WcoVLGMJtt91Jub+fTier\naVssleh0OoAgSWMmJyao9A9068Jmuh2mk8ueRBJFvlDEzRdwgnDJ+jOREl3TIGciWh2M+ZDEUmjC\nIk0VSjcx8zmiMMTWDLS+PpApohOjKYXSBamUoBS6bRN3QlSaoOkGshUhOx10LcXsq+DXW+SGSigN\nRJq5Nkr9/eitfhIBxaKLpkKStk8iI0hy9PV5SBlTLJVQUhGFETnXpNGYo79SQTdMSqM5jlhm0jFt\nCoP9zNR+gpPL4w4OoOXspayVwdiOcSZ/toUUEy0BhMVjj26nLndjJzZKi0lVzJ6xx3HzZZQTohk2\naZxSnwlZdeRGLMtCT0I6UcK2HffygpedDqbF1K5dWF4O0Hn5hW9DUyk7tm3l/nvu5aw3vg4lU/xG\ng4mdj9HwJ3jRq85FxAnbHtrC9kd/xhHrNyJImGnMUd+zg7zWIkodlg2vxdYFcdIglR1CvwFhQNxs\n463dhNIthJGCESGbVew+b0n6cq5Wwx19csobS7kYggxZ/WiIGan5nHfhpbz6uSEffOAedm6f4LDn\nn4w7VWWDvILPbvkO1c2b2XDcNwjiCca9GpWguGgFFwy9Lz2MwGC6luWAVipDzI3HfOsT72CndyvH\nXvgajt7Y4uhnPZP/c+FF1P79Fk7adCoEkrgxwSVX3MQHP/tCbrj1o7z3HfexjnXsCE7mOctewdiW\na9l6UY2TTz+R6//5EwyZp/Hl2/+bkUvX7tM+PGgN/eBQlo26Z/eexSzDcrlMvZ4l/QiVYpoGQmjM\n1bKsM9u2mZmewdTFYljlQgil63rMzEwxNDzE9PT0YmJTzvW6bpaUfD7P2GMPUS4t5/pL13H0pY+w\ncezzKPKMv+zbFH5wC3fcfjuO4zA5MUGSZHVf+wcGCNptdF1nxcqVBO0YXTcoeB5RFJAkCbO7d9PX\n14dlZQu/7XZ7MYN2KTDtHFGSYEQaSRyj6TpCs0nCrKKQynlE7QTHyZFYCqHrWXSHa6IJQawSTNdE\nRW2UJjGdPLRbKMtCkqA7EsvK0aruIpY+7a07KB59GDFgRdCoTuHkdSzVQSYacSoxbIuClclCm66L\nHneI2k0ct4DQDSxLw7YNnLxBikDpAs0qUtB11q0YJNGzSKLlyiWwTOxw6W6ceybH+bP/eyk4gIKf\n/vQBtm6/mbf99SUkMkKqhAe33MsT4zOce+G7kYkiCVvcf98d7Nq5h9NOP480SfD9WbY/to35uVlW\nHL6JVEEriNCQzMy0sa3liDREKZuf3H07R2xcCyLEMWx0dPbseQKShFQoEDphXAdjOalK0UVComIO\nfcax3PuT6xnwEgyV4LoFWij6R0dxC3Wi8SdIkhZKttA1g0KugpHzkfq+zebcm1qtRuzFDA0PAS6+\n3yAIAkZGRoljHwDTNBgcHqEaKHZsgb/6qzdz9bfGmJiYpDa1mUNWbcI31+JvuQxjzUtwTRPPMBgb\nn8QbqmBIma37NLLELMMw8IoeW+Mar3zjBRz60A+Z9AP8rePUWlOMrn8h923ZShgEHFq9hZecvJV3\nP8vjkW2PIFdNw9QIx59xPud9406+eMO1vOHSb7BmwyZkMMEH3vp6JqtH8gfvq/N7/3Uuk1tPJPuy\n7BsO2lKC83PzmXvE8yh4BaTMJAtM08TQDfJeEZQi59qkUuJYNmmaUiqVcPMFin1lnLyDV/IARSJj\nmk2fse1jJElCIV8in8vT8OdpNFrMzMwQBAGHrF5DebCCMjrc/4HDeehV52Al89zzrrdxww03MFur\nEUURI6OjjIyOUC4PYts2Q8PDGKbB1O4pWs06k0+M0/LrtLpJW5W+fkzdodFoZCUGdX3xJrQUSJXJ\nKSMT9FIBdB0RB4TtGTRvmKSdoOkBaRwiKyVUGKGbKalTQLklDNelk8sjdIsoToiThEQZKC1B5G2s\nfBElIjS3QAcD85jDSKMOjQduYdcjP8Ltq5CmgtTOYbgadi4HZhHdiHFcE6fgYdouhq4j0hg3l4M0\noVQeRInsRm/ooHQHYRnEhoFUAs2poNBwwzZafukyOSenn0AaWcEPITQMJZifydQspS5AU+i6Yueu\nR0lTiSQmFQlKwV13/jdSSKSWoBsmSklarRlUqpEqhdAcBBGdzh7QUhINlKExMzsFQiIQKCFQChpz\nNYTIwjKFpmNZQCJIRYghBFGsMEyfJOpgGQMUn3EUatUmRGk1gZ2nbfYxtO4oWrufIDJyWDrIMCIK\nIxKxNOYj6LoyFxIKO0GAjCWV8pNRaaaZyQnIWPKCt/8tH3zHmznpeRdwzx2TyGCIyvAoL3n5RaxZ\n4/GCl5zBy144StWo8chNtzMUQ1CtQSMgrj2ZfWu4PuMTW/D8gFpxFAiY/K/NfOvaa/mj555NumuW\n+NCX8a73fBJ/W8CzzzyDQ1at4qSTT+Keucc571XnsM6b4pCKB+UTOWHj6TjPWMvRr76csz74I958\n6aN86cZreOXLNxE4+zbC7qA19M1mk9mZWQzDYGrPFJX+CsVSEcdx8EpFgqCDZZn4jQYoyOdy5PN5\nWq0WYRiRpopcPkcn7FBvzOM3G5T7+ikUChSLRTyvSCxjcjmH4eEhPM/r+gh1kkRSKPWBbvBfL3g2\nd73/w9RVATfnksvnFgXM9uzeQz5fwPd99uzezerVh+K6DjKOKRTyaJpGPp8lndi2SaM+S6lUYmg4\nE21ynKUrDm7kTUQSkqoQvdlEczRSO4c5spZUtFBOipEvEdsatt9EMxXCdNATn7ixG7EQHpZzsQp5\nMHW0gT5kmqAsk8QWiIKNWx5GJCbmdEQYGnhrn8Oyjc/lxzdvRgYRujLQEhMVhyRxDd10EUJBHGKS\n4OZs7LyLZlvotoUUKco00VwbzbYwHIMkbmFoCl2kJMEMmp6CZaOppXtCqtfnQVMkaYJMJIqYTtRc\nDKtNpAQleHz8MdLUQCkySeuuMqoiEz9LVUKSpORyeRYidoTQsqS7OAQUQhMkaQpCgpCoro9KJm0e\n2fYAkK05aV2pDZVmYZqalmWR1+b30AmayLAD+QE6fp2ZsTsQs1MUDZ+CaHDIyBDGrnuRpgdakkVl\npUvTn7GUzNVqGIbBXG2OWMbEMluMlVJimgZxLJEyRsqYz7zlb/nag+Nsm6yyY/t2qtNVtj64hctv\nvIlnXvg13vzBK7n5hq8z9x83s+6F6/FdAxnHP3czydxBMDQ0vJikhWkwMjqK65oEBvz0gY/wTu3r\n/PWr/oQ1b3g/rlnh41+4DtyNXPyajxCvHeWQE09jevsYXtHjkNFVmEamk+V5Hq45xIb1z2PDs85g\n3TFn7NM+PGgNveu6hGHI/Pw8/QP92cJVt/RekiRZxSjDYGh4CNuxCYKAMAwZHBpkeNkwUkqmq9M0\nGg2Gh4cpFksMDAxT6e9HCEEQPFlYRHQjYBYicxb+hhanJK7G7nWHs6avxODgIOvWr2PlypVIKVmx\ncgWaplEqlRgYHGTnzp3IboiY3S0QniQJ+UKBJAkpeC5uLkfQblPp719SQy+VgUSgRIIkIm03kUDY\n9lF+C93JowsdPQWhpSgRo6REaClG3iW0UgwtQORLaGlCIhNIJVa+jB6FxFEEtokoKIZHhrn6m1/G\nEgZ0ptDbAceedjJxayciTZFJG10vYFp5ZBgQGQYyDhGWnsknWybCBJW3cHJ5NJGgTBfSmNZ8DUiJ\n2gG6rmFYDkk+j2Z7KLF0khLBvMKUMUkSk6aSRAMtDZAiIVGSRKWkmkAmbdI0M1pZtJKir99GxAlE\nEk3TSAVEcZQZcaWj6RpCKMKogyIhTQRC6Ni2i4p00rRbsza1GN+17cnYeZEtmLfCJkJpYOZI0pSx\n8R1oKG699VZue/B+Or6PnnMxcn0MVEYZWnYYy9eu48hVaxlI5rI8izhBZ2n6c3pqiiAI6ARZMZ+g\n+xNAxjFB8KTKq+O61O7exqS3ivse3s4j27axa/xxAtnge1u2UjntY/zNh7/KqCF5w/oNnOK5eBQx\nTJO5Wu1XXoOMY1zDRS7cYCa2YQQe95nP4oprbsD8/jl87/JLUGKQreMxR+T3cNIrbmJk08kcvWkD\nY9vH6MQBdG8kUsb4DXjPxR/FZxU+I/ui6xY5aA29abg4dhYhU5+vZxmunRDTMmk1s8QYJXTiRGFY\nFn0DFXK2Q312jtp0FS/vEnU6DA0MgGbgFgokSUynHeLYOXJ5k0qljOcV8Ztz2JZLu9VB10yEgGLB\nQ9MEhxQHsSIyf3CqeOLx3Th2jkQq6vNNms0GYRjS6XTo7++nVCmjmToyTSj2lSh4HvX5eYKgg5vL\nUyzk8BtzTFf34OaWZrELwLQEQk9I9DzYRVLLQhcxuiHRSgNZIWtdoLk2HZUgY0GiQaJ0NDuPqZUx\ntAIqlSi3H8t2ELk+JDHSSLAdm6TRgjRCLzucffbp3LH5Bm646rvcfcN3+Mlt32Pw8CPRnCaGASlR\nltGp2ZgqRagOSoDIl4lVSiJaGIZFqCRGuYiuNVD5HPn+IolpYhVySB2U6mD4Polsk/z2atj/Y1rB\nbmRMV5c/M7RJmqAnCj1JyYT4QNdT4qTdVWPNJisPbd26+DtCaKg0ZXp6prvmo6EJmzRRNBv+z4mk\nFTwLKYPFhCmlFI35eaArQtctmdmoNxbXj5RS1CamsPM5sBQ//sFmWqqD07cGy1I0gpj5ZgfZbLHn\noXsZDiY4wvTZtNxCLZF2kCPBCCRxEBB03TaZiqWfyR2QzeyDoEMnCKicuJGTVm/gts1jjNfGCaYm\nmZu4lQ3uevrEEH5Q5JbN28Fw+dSl5+PRwDNcnK6Cjmf4uMzhSvAwCGRMLCVBMEd51UbKm9ZzyQe/\nwNyD13HVP5/N0aeewSHr/5yLr/03znvR4cix64nyp9C64V/obK8yuPEC7ty8hcEzrmBN2UAGAQQm\nfhCw7aadPNOtEgTmr+uC/zUHraH3ih6maTI0PJRFyjRbWSHpmVniOCt8YdkWjuOg6/qiiuSCWJjj\nOtnip52VAWw3W3hFj3whT7FUJAxDqtUqruuS6+reSCkpFDyazSY7d+6k0WhkWvOyw+Pj29mzZzed\nTofZ2dluRFAepRSFQoFyuUwURQwODlIslvCKRZYtX46mafSVM52L2uwsURSxfPkIA4MD1Ofml6w/\nFTqG0LH1mFTX0YTC0E1sdLSk3VWJFCB0MHNYA0MIYSAck5QIjSDLDmrWIayhRIps7UEzbJSwSGSI\nUfBQxLTrs9jGPM85+Rm89HV/xJHHH85xf3AcaRKhNAthgmYqEpFHKUmShOAUSTWLREWIWJLioWSM\nVTCRcYzSbYRsZ8U9CgWEaWCYNqmdI7VshOWCWrrYgzhOkDJFypQkUZAaKBUiI4lMMjeDEBqaYRGF\nmf5RmuigdMbGHiZUCaFKkEpHodj+0weINYNUJWiaIowjhG6SKIFUAjQ981FL8WSh+TQbr1SlRKQI\nzSRRCfOz06TKQNNTVKpTnaoxNDrKipXPwJ+d5WcT05TXbSR1PIJOi7nJR2lNbGeo7LDuyPWc8JKz\n+cNXvILTnvv/kbraZ0gZM+f7maGX8kkly67Squs6i1E4vu/zjg98A991GbtnM/fddC00tkHjO7zg\nLz/GJRe/lZOpcMnFF3P1J/4eArk4U9+7TrPrutmaH2AaMIbDmtFn8XeXf43vPjTGzfc4NG77Dm86\nvsC9t9/BSS98Ce7wqXzqC98iEnfxkx9v5OPvP5vDg2l+/6Qz2bLD4JDRTSCz72x1cpybt4wzNBqw\nmCiwjzhoDf3UninCMGS2VmN2ZgavWCQMI7xiEQ2BUFn8+vxcg4KXiYUppVi1evWiH93zSui6zvJl\nyyn39TEzM02SShQplf4Kqw89bDFpKY5jSqUSuq5RKpYplcooBIlKKRQ8wKATxrSDFp1OG9m9qfT1\n9RGFMWEYs2LlCM1mG9t26XQi4jhBs20sK9PwGBgYIoqyknwCKBSWriJSHEeLssipSrvJ9hqa6ZDq\nJkkUkIZtSEKMriYOgJZamX68lQPZIdZ0hOmStBuQJsh2A9MpQ2GYOA0h1bFLFYSmk2oRZkHhDhRI\nLEXqFVCmTaiZtFWM6YLhWmDp6C7ouRxOqYxuWRhoKN1CagZgQqIhMBBpikoz2Qu62ajCNNEMoytp\nvXQszOYzvZlMmiPq5mUsyGhoWrbAunCuUooozEpTLrgJMxkPhVIGKhUITZCqNCv72P3dhQXXTufJ\nfaAW3X8LyXhpmuL7/uL5CpXlm9g25cFlCDPHtsd2MBfpBPnlSKeIa9oU8yUGjzseeehxxFaZpNnA\nljNL0o9ztTmCIMAIJLLWIK418PYKGHRc9+fcN4bh4W1axbNf9A46jDI5vpV4bAi3upn1XpErP3Mz\nX/zap/nardfxru9swzA9RnApSgNTQrHmQVxefBUDDx8Pr2ZiuNu5755JNqxay8s+9nUGvU38zevX\nEde2ElS3gFND4lGb8PjiDRv54N/+Pe983+t46Pp38uh3L+WLn/sQX9yyjbGa5C3XjnH1596Ga4wC\n+/bpXSz4tfcn/5MKUz169OjRgx8rpY79TScdtDP6Hj169Ojx1OgZ+h49evR4mtMz9D169OjxNOep\n1Ix1hBB3CyHuF0I8JIT4QHf/oUKIu4QQ24UQ3xBZJQOEEHb3/fbu8dX7tgk9evTo0ePX8VRm9CFw\nslLqKOBo4LRu0e+PAZ9SSq0F5oDXdc9/HTDX3f+p7nk9evTo0WM/8RsNvcpYqIBhdl8KOBm4rrv/\nKmAhh/dl3fd0j79QLMSJ9ejRo0ePJecp+eiFELoQ4j6gCvwHMAbMK6UWUuOeABZ0REeBXQDd43Wg\n/5d85huFEPcKIe793zWhR48ePXr8Op6SoVdKJUqpo4EVwHHA+v/tH1ZKfV4pdexTiQHt0aNHjx7/\nc36rqBul1Dzwn8AJQJ8QYiE9bQUw0d2eAFYCdI+XgKUrldSjR48ePX6OpxJ1MyiE6Otuu8CLgIfJ\nDP6Z3dPOBa7vbt/QfU/3+C3qQEi/7dGjR4//n/JUVJ6WA1cJIXSyG8M3lVI3CiF+CnxdCPEhYAvw\npe75XwK+KoTYDtSAs/fBdffo0aNHj6dIT+umR48ePQ5enpLWzYFSM7YJLF05+X3PALA00n5Lw9Op\nPU+ntkCvPQcyS9GWVU/lpAPF0D/ydIq+EULc22vPgcnTqS3Qa8+BzIHUlp7WTY8ePXo8zekZ+h49\nevR4mnOgGPrP7+8L+B3Ta8+By9OpLdBrz4HMAdOWAyLqpkePHj167DsOlBl9jx49evTYR/QMfY8e\nPXo8zdnvhl4IcZoQ4pFuoZKL9vf1/CaEECuFEP8phPhptxDL/+3urwgh/kMI8Vj3Z7m7XwghPt1t\n3wNCiGP2bwt+OV2F0i1CiBu77w/awjJCiD4hxHVCiG1CiIeFECccrOMjhHhr93u2VQhxTbcQ0EEz\nNkKILwshqkKIrXvt+63HQghxbvf8x4QQ5/6yv7UU/Ir2fLz7XXtACPHtBcmY7rF3ddvziBDi1L32\nL63dU0rttxegk0keHwZYwP3Akfvzmp7CNS8Hjulue8CjwJHApcBF3f0XAR/rbr8Y+DdAAMcDd+3v\nNvyKdr0N+Bfgxu77bwJnd7f/Cfjz7vZfAP/U3T4b+Mb+vvZf0pargNd3ty2g72AcHzLJ758B7l5j\nct7BNDbA84BjgK177futxgKoADu6P8vd7fIB1J5TAKO7/bG92nNk16bZwKFdW6fvD7u3v78EJwA3\n7fX+XcC79veX87dsw/VkQm+PAMu7+5aTJYEBfA44Z6/zF887UF5k6qObyYrJ3Nj9R5vZ68u7OE7A\nTcAJ3W2je57Y323Yqy2lrnEUv7D/oBsfnqztUOn29Y3AqQfb2ACrf8Ew/lZjAZwDfG6v/T933v5u\nzy8cezlwdXf75+zZwvjsD7u3v103i0VKuuxdwOSAp/tovAm4CxhWSu3uHtoDDHe3D4Y2/gPwDiDt\nvu/nf1lYZj9yKDANfKXrivqiECLPQTg+SqkJ4BPATmA3WV//mIN3bBb4bcfigB2jX8IFZE8lcAC1\nZ38b+oMWIUQB+BbwFqVUY+9jKrtNHxRxq0KI04GqUurH+/tafkcYZI/Wn1VKbQJaZO6BRQ6W8en6\nrl9GdvMaAfLAafv1on7HHCxj8VQQQlwMSODq/X0tv8j+NvSLRUq67F3A5IBFCGGSGfmrlVL/2t09\nJYRY3j2+nKzsIhz4bXwO8FIhxOPA18ncN5dx8BaWeQJ4Qil1V/f9dWSG/2Acnz8EfqaUmlZKxcC/\nko3XwTo2C/y2Y3EgjxEAQojzgNOBP+nevOAAas/+NvT3AId3owgssgWkG/bzNf1ahBCCTHP/YaXU\nJ/c6tHfBlV8sxPLabkTB8UB9r8fW/Y5S6l1KqRVKqdVk/X+LUupPOEgLyyil9gC7hBDrurteCPyU\ng3N8dgLHCyFy3e/dQlsOyrHZi992LG4CThFClLtPOad09x0QCCFOI3N9vlQp1d7r0A3A2d1oqEOB\nw4G72R92b38taOy1EPFissiVMeDi/X09T+F6n0v2qPkAcF/39WIyX+hm4DHgB0Cle74APtNt34PA\nsfu7Db+mbX/Ak1E3h3W/lNuBawG7u9/pvt/ePX7Y/r7uX9KOo4F7u2P0HbJIjYNyfIAPANuArcBX\nySI4DpqxAa4hW1+IyZ62Xvc/GQsy3/f27uv8A6w928l87gv24J/2Ov/ibnseAf5or/1Lavd6Egg9\nevTo8TRnf7tuevTo0aPHPqZn6Hv06NHjaU7P0Pfo0aPH05yeoe/Ro0ePpzk9Q9+jR48eT3N6hr5H\njx49nub0DH2PHj16PM35f95nGbZLcd46AAAAAElFTkSuQmCC\n", 392 | "text/plain": [ 393 | "" 394 | ] 395 | }, 396 | "metadata": {}, 397 | "output_type": "display_data" 398 | } 399 | ], 400 | "source": [ 401 | "# display a sample set of images and their labels\n", 402 | "loader, folder = get_data_loader(valdir, batch_size = 4)\n", 403 | "images, labels = next(iter(loader))\n", 404 | "show(make_grid(get_images_to_plot(images), padding=100))\n", 405 | "labels_string = get_classes_strings(classes, labels.numpy())\n", 406 | "print(labels_string)" 407 | ] 408 | }, 409 | { 410 | "cell_type": "code", 411 | "execution_count": 31, 412 | "metadata": {}, 413 | "outputs": [ 414 | { 415 | "name": "stdout", 416 | "output_type": "stream", 417 | "text": [ 418 | "['cats', 'dogs', 'cats', 'dogs']\n" 419 | ] 420 | } 421 | ], 422 | "source": [ 423 | "# display the predictons for the images above\n", 424 | "if use_cuda:\n", 425 | " images = images.cuda()\n", 426 | "predictions = model(Variable(images))\n", 427 | "predictions_string = get_prediction_classes_strings(classes, predictions)\n", 428 | "print(predictions_string)" 429 | ] 430 | } 431 | ], 432 | "metadata": { 433 | "kernelspec": { 434 | "display_name": "Python 3", 435 | "language": "python", 436 | "name": "python3" 437 | }, 438 | "language_info": { 439 | "codemirror_mode": { 440 | "name": "ipython", 441 | "version": 3 442 | }, 443 | "file_extension": ".py", 444 | "mimetype": "text/x-python", 445 | "name": "python", 446 | "nbconvert_exporter": "python", 447 | "pygments_lexer": "ipython3", 448 | "version": "3.5.2" 449 | } 450 | }, 451 | "nbformat": 4, 452 | "nbformat_minor": 2 453 | } 454 | -------------------------------------------------------------------------------- /lesson5-pytorch.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stderr", 10 | "output_type": "stream", 11 | "text": [ 12 | "/Users/rodrigo/Libs/torchsample/torchsample/datasets.py:16: UserWarning: Cant import nibabel.. Cant load brain images\n", 13 | " warnings.warn('Cant import nibabel.. Cant load brain images')\n" 14 | ] 15 | } 16 | ], 17 | "source": [ 18 | "%load_ext autoreload\n", 19 | "\n", 20 | "import os\n", 21 | "import numpy as np\n", 22 | "import pandas as pd\n", 23 | "import torch\n", 24 | "import torch.nn as nn\n", 25 | "import torch.optim as optim\n", 26 | "import torch.utils.data\n", 27 | "from torch.autograd import Variable\n", 28 | "from sklearn.utils import shuffle\n", 29 | "from torchsample.initializers import Uniform\n", 30 | "from torchsample.modules import ModuleTrainer\n", 31 | "from torchsample.metrics import CategoricalAccuracy\n", 32 | "\n", 33 | "%aimport torchsample.modules\n", 34 | "\n", 35 | "%matplotlib inline" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": 3, 41 | "metadata": { 42 | "collapsed": true 43 | }, 44 | "outputs": [], 45 | "source": [ 46 | "use_cuda = False\n", 47 | "batch_size = 64" 48 | ] 49 | }, 50 | { 51 | "cell_type": "markdown", 52 | "metadata": {}, 53 | "source": [ 54 | "# Setup data" 55 | ] 56 | }, 57 | { 58 | "cell_type": "markdown", 59 | "metadata": {}, 60 | "source": [ 61 | "We're going to look at the IMDB dataset, which contains movie reviews from IMDB, along with their sentiment. Keras comes with some helpers for this dataset." 62 | ] 63 | }, 64 | { 65 | "cell_type": "code", 66 | "execution_count": 4, 67 | "metadata": {}, 68 | "outputs": [ 69 | { 70 | "name": "stderr", 71 | "output_type": "stream", 72 | "text": [ 73 | "Using Theano backend.\n" 74 | ] 75 | } 76 | ], 77 | "source": [ 78 | "from keras.datasets import imdb\n", 79 | "idx = imdb.get_word_index()" 80 | ] 81 | }, 82 | { 83 | "cell_type": "markdown", 84 | "metadata": {}, 85 | "source": [ 86 | "This is the word list:" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": 5, 92 | "metadata": {}, 93 | "outputs": [ 94 | { 95 | "data": { 96 | "text/plain": [ 97 | "['the', 'and', 'a', 'of', 'to', 'is', 'br', 'in', 'it', 'i']" 98 | ] 99 | }, 100 | "execution_count": 5, 101 | "metadata": {}, 102 | "output_type": "execute_result" 103 | } 104 | ], 105 | "source": [ 106 | "idx_arr = sorted(idx, key=idx.get)\n", 107 | "idx_arr[:10]" 108 | ] 109 | }, 110 | { 111 | "cell_type": "markdown", 112 | "metadata": {}, 113 | "source": [ 114 | "...and this is the mapping from id to word" 115 | ] 116 | }, 117 | { 118 | "cell_type": "code", 119 | "execution_count": 6, 120 | "metadata": { 121 | "collapsed": true 122 | }, 123 | "outputs": [], 124 | "source": [ 125 | "idx2word = {v: k for k, v in idx.items()}" 126 | ] 127 | }, 128 | { 129 | "cell_type": "markdown", 130 | "metadata": {}, 131 | "source": [ 132 | "We download the reviews using code copied from keras.datasets:" 133 | ] 134 | }, 135 | { 136 | "cell_type": "code", 137 | "execution_count": 7, 138 | "metadata": { 139 | "collapsed": true 140 | }, 141 | "outputs": [], 142 | "source": [ 143 | "from keras.utils.data_utils import get_file\n", 144 | "import pickle\n", 145 | "\n", 146 | "path = get_file('imdb_full.pkl',\n", 147 | " origin='https://s3.amazonaws.com/text-datasets/imdb_full.pkl',\n", 148 | " md5_hash='d091312047c43cf9e4e38fef92437263')\n", 149 | "f = open(path, 'rb')\n", 150 | "(x_train, labels_train), (x_test, labels_test) = pickle.load(f)" 151 | ] 152 | }, 153 | { 154 | "cell_type": "code", 155 | "execution_count": 8, 156 | "metadata": {}, 157 | "outputs": [ 158 | { 159 | "data": { 160 | "text/plain": [ 161 | "25000" 162 | ] 163 | }, 164 | "execution_count": 8, 165 | "metadata": {}, 166 | "output_type": "execute_result" 167 | } 168 | ], 169 | "source": [ 170 | "len(x_train)" 171 | ] 172 | }, 173 | { 174 | "cell_type": "markdown", 175 | "metadata": {}, 176 | "source": [ 177 | "Here's the 1st review. As you see, the words have been replaced by ids. The ids can be looked up in idx2word." 178 | ] 179 | }, 180 | { 181 | "cell_type": "code", 182 | "execution_count": 8, 183 | "metadata": {}, 184 | "outputs": [ 185 | { 186 | "data": { 187 | "text/plain": [ 188 | "'23022, 309, 6, 3, 1069, 209, 9, 2175, 30, 1, 169, 55, 14, 46, 82, 5869, 41, 393, 110, 138, 14, 5359, 58, 4477, 150, 8, 1, 5032, 5948, 482, 69, 5, 261, 12, 23022, 73935, 2003, 6, 73, 2436, 5, 632, 71, 6, 5359, 1, 25279, 5, 2004, 10471, 1, 5941, 1534, 34, 67, 64, 205, 140, 65, 1232, 63526, 21145, 1, 49265, 4, 1, 223, 901, 29, 3024, 69, 4, 1, 5863, 10, 694, 2, 65, 1534, 51, 10, 216, 1, 387, 8, 60, 3, 1472, 3724, 802, 5, 3521, 177, 1, 393, 10, 1238, 14030, 30, 309, 3, 353, 344, 2989, 143, 130, 5, 7804, 28, 4, 126, 5359, 1472, 2375, 5, 23022, 309, 10, 532, 12, 108, 1470, 4, 58, 556, 101, 12, 23022, 309, 6, 227, 4187, 48, 3, 2237, 12, 9, 215'" 189 | ] 190 | }, 191 | "execution_count": 8, 192 | "metadata": {}, 193 | "output_type": "execute_result" 194 | } 195 | ], 196 | "source": [ 197 | "', '.join(map(str, x_train[0]))" 198 | ] 199 | }, 200 | { 201 | "cell_type": "markdown", 202 | "metadata": {}, 203 | "source": [ 204 | "The first word of the first review is 23022. Let's see what that is." 205 | ] 206 | }, 207 | { 208 | "cell_type": "code", 209 | "execution_count": 9, 210 | "metadata": {}, 211 | "outputs": [ 212 | { 213 | "data": { 214 | "text/plain": [ 215 | "'bromwell'" 216 | ] 217 | }, 218 | "execution_count": 9, 219 | "metadata": {}, 220 | "output_type": "execute_result" 221 | } 222 | ], 223 | "source": [ 224 | "idx2word[23022]" 225 | ] 226 | }, 227 | { 228 | "cell_type": "markdown", 229 | "metadata": {}, 230 | "source": [ 231 | "Here's the whole review, mapped from ids to words." 232 | ] 233 | }, 234 | { 235 | "cell_type": "code", 236 | "execution_count": 10, 237 | "metadata": {}, 238 | "outputs": [ 239 | { 240 | "data": { 241 | "text/plain": [ 242 | "\"bromwell high is a cartoon comedy it ran at the same time as some other programs about school life such as teachers my 35 years in the teaching profession lead me to believe that bromwell high's satire is much closer to reality than is teachers the scramble to survive financially the insightful students who can see right through their pathetic teachers' pomp the pettiness of the whole situation all remind me of the schools i knew and their students when i saw the episode in which a student repeatedly tried to burn down the school i immediately recalled at high a classic line inspector i'm here to sack one of your teachers student welcome to bromwell high i expect that many adults of my age think that bromwell high is far fetched what a pity that it isn't\"" 243 | ] 244 | }, 245 | "execution_count": 10, 246 | "metadata": {}, 247 | "output_type": "execute_result" 248 | } 249 | ], 250 | "source": [ 251 | "' '.join([idx2word[o] for o in x_train[0]])" 252 | ] 253 | }, 254 | { 255 | "cell_type": "markdown", 256 | "metadata": {}, 257 | "source": [ 258 | "The labels are 1 for positive, 0 for negative." 259 | ] 260 | }, 261 | { 262 | "cell_type": "code", 263 | "execution_count": 9, 264 | "metadata": {}, 265 | "outputs": [ 266 | { 267 | "data": { 268 | "text/plain": [ 269 | "[1, 1, 1, 1, 1, 1, 1, 1, 1, 1]" 270 | ] 271 | }, 272 | "execution_count": 9, 273 | "metadata": {}, 274 | "output_type": "execute_result" 275 | } 276 | ], 277 | "source": [ 278 | "labels_train_tensor = torch.from_numpy(np.array(labels_train))\n", 279 | "labels_test_tensor = torch.from_numpy(np.array(labels_test))\n", 280 | "labels_train[:10]" 281 | ] 282 | }, 283 | { 284 | "cell_type": "markdown", 285 | "metadata": {}, 286 | "source": [ 287 | "Reduce vocab size by setting rare words to max index." 288 | ] 289 | }, 290 | { 291 | "cell_type": "code", 292 | "execution_count": 10, 293 | "metadata": { 294 | "collapsed": true 295 | }, 296 | "outputs": [], 297 | "source": [ 298 | "vocab_size = 5000\n", 299 | "\n", 300 | "trn = [np.array([i if i < vocab_size - 1 else vocab_size - 1 for i in s]) for s in x_train]\n", 301 | "test = [np.array([i if i < vocab_size - 1 else vocab_size - 1 for i in s]) for s in x_test]" 302 | ] 303 | }, 304 | { 305 | "cell_type": "markdown", 306 | "metadata": {}, 307 | "source": [ 308 | "Look at distribution of lengths of sentences." 309 | ] 310 | }, 311 | { 312 | "cell_type": "code", 313 | "execution_count": 11, 314 | "metadata": {}, 315 | "outputs": [ 316 | { 317 | "data": { 318 | "text/plain": [ 319 | "(2493, 10, 237.71364)" 320 | ] 321 | }, 322 | "execution_count": 11, 323 | "metadata": {}, 324 | "output_type": "execute_result" 325 | } 326 | ], 327 | "source": [ 328 | "lens = np.array(list(map(len, trn)))\n", 329 | "(lens.max(), lens.min(), lens.mean())" 330 | ] 331 | }, 332 | { 333 | "cell_type": "markdown", 334 | "metadata": {}, 335 | "source": [ 336 | "Pad (with zero) or truncate each sentence to make consistent length." 337 | ] 338 | }, 339 | { 340 | "cell_type": "code", 341 | "execution_count": 12, 342 | "metadata": { 343 | "collapsed": true 344 | }, 345 | "outputs": [], 346 | "source": [ 347 | "seq_len = 500\n", 348 | "\n", 349 | "from keras.preprocessing import sequence\n", 350 | "\n", 351 | "trn = sequence.pad_sequences(trn, maxlen=seq_len, value=0)\n", 352 | "test = sequence.pad_sequences(test, maxlen=seq_len, value=0)\n", 353 | "\n", 354 | "trn_tensor = torch.from_numpy(trn).long()\n", 355 | "test_tensor = torch.from_numpy(test).long()" 356 | ] 357 | }, 358 | { 359 | "cell_type": "markdown", 360 | "metadata": {}, 361 | "source": [ 362 | "This results in nice rectangular matrices that can be passed to ML algorithms. Reviews shorter than 500 words are pre-padded with zeros, those greater are truncated." 363 | ] 364 | }, 365 | { 366 | "cell_type": "code", 367 | "execution_count": 15, 368 | "metadata": {}, 369 | "outputs": [ 370 | { 371 | "data": { 372 | "text/plain": [ 373 | "torch.Size([25000, 500])" 374 | ] 375 | }, 376 | "execution_count": 15, 377 | "metadata": {}, 378 | "output_type": "execute_result" 379 | } 380 | ], 381 | "source": [ 382 | "trn_tensor.size()" 383 | ] 384 | }, 385 | { 386 | "cell_type": "markdown", 387 | "metadata": {}, 388 | "source": [ 389 | "## Create simple models" 390 | ] 391 | }, 392 | { 393 | "cell_type": "markdown", 394 | "metadata": {}, 395 | "source": [ 396 | "### Single hidden layer NN" 397 | ] 398 | }, 399 | { 400 | "cell_type": "markdown", 401 | "metadata": {}, 402 | "source": [ 403 | "The simplest model that tends to give reasonable results is a single hidden layer net. So let's try that. Note that we can't expect to get any useful results by feeding word ids directly into a neural net - so instead we use an embedding to replace them with a vector of 32 (initially random) floats for each word in the vocab." 404 | ] 405 | }, 406 | { 407 | "cell_type": "code", 408 | "execution_count": 16, 409 | "metadata": { 410 | "collapsed": true 411 | }, 412 | "outputs": [], 413 | "source": [ 414 | "import torch.nn as nn\n", 415 | "import torch.nn.functional as F\n", 416 | "\n", 417 | "class SingleHiddenLayerModule(nn.Module):\n", 418 | " def __init__(self):\n", 419 | " super().__init__()\n", 420 | " num_dimensions = 32\n", 421 | " self.embedding = nn.Embedding(vocab_size, num_dimensions)\n", 422 | " self.fc1 = nn.Linear(seq_len * num_dimensions, 100)\n", 423 | " self.dropout = nn.Dropout(0.7)\n", 424 | " self.fc2 = nn.Linear(100, 2)\n", 425 | " self.init()\n", 426 | "\n", 427 | " def forward(self, words_ids):\n", 428 | " x = self.embedding(words_ids) # x => torch.Size([64, 500, 32])\n", 429 | " x = x.view(x.size(0), -1) # x => torch.Size([64, 16000])\n", 430 | " x = self.fc1(x)\n", 431 | " x = F.relu(x, True)\n", 432 | " x = self.dropout(x)\n", 433 | " x = self.fc2(x)\n", 434 | " # result = F.sigmoid(x)\n", 435 | " result = x\n", 436 | " return result\n", 437 | " \n", 438 | " def init(self):\n", 439 | " torch.nn.init.constant(self.fc1.bias, val=0.0)\n", 440 | " torch.nn.init.constant(self.fc2.bias, val=0.0)" 441 | ] 442 | }, 443 | { 444 | "cell_type": "code", 445 | "execution_count": 17, 446 | "metadata": {}, 447 | "outputs": [ 448 | { 449 | "data": { 450 | "text/plain": [ 451 | "SingleHiddenLayerModule (\n", 452 | " (embedding): Embedding(5000, 32)\n", 453 | " (fc1): Linear (16000 -> 100)\n", 454 | " (dropout): Dropout (p = 0.7)\n", 455 | " (fc2): Linear (100 -> 2)\n", 456 | ")" 457 | ] 458 | }, 459 | "execution_count": 17, 460 | "metadata": {}, 461 | "output_type": "execute_result" 462 | } 463 | ], 464 | "source": [ 465 | "%autoreload 2\n", 466 | "\n", 467 | "# criterion = nn.BCELoss()\n", 468 | "criterion = nn.CrossEntropyLoss()\n", 469 | "model = SingleHiddenLayerModule()\n", 470 | "if(use_cuda):\n", 471 | " model.cuda()\n", 472 | " criterion.cuda()\n", 473 | "trainer = ModuleTrainer(model)\n", 474 | "trainer.set_optimizer(optim.Adam, lr=1e-3)\n", 475 | "trainer.set_loss(criterion)\n", 476 | "trainer.set_initializers([Uniform(module_filter=\"embedding*\", a=-0.05, b=0.05), XavierUniform(module_filter=\"fc*\")])\n", 477 | "trainer.set_metrics([CategoricalAccuracy()])\n", 478 | "\n", 479 | "# trainer.summary((trn_tensor.size(0), labels_train_tensor.size(0)))\n", 480 | "model" 481 | ] 482 | }, 483 | { 484 | "cell_type": "code", 485 | "execution_count": 18, 486 | "metadata": {}, 487 | "outputs": [ 488 | { 489 | "name": "stderr", 490 | "output_type": "stream", 491 | "text": [ 492 | "Epoch 1/2: 392 batches [01:19, 2.72s/ batches, val_acc=81.47, val_loss=0.2871, acc=75.19, loss=0.4612]\n", 493 | "Epoch 2/2: 392 batches [01:40, 6.92s/ batches, val_acc=89.91, val_loss=0.3095, acc=92.56, loss=0.1991]\n" 494 | ] 495 | } 496 | ], 497 | "source": [ 498 | "trainer.fit(trn_tensor, labels_train_tensor, validation_data=(test_tensor, labels_test_tensor), \n", 499 | " nb_epoch=2, batch_size=batch_size, shuffle=True)" 500 | ] 501 | }, 502 | { 503 | "cell_type": "markdown", 504 | "metadata": {}, 505 | "source": [ 506 | "The [stanford paper](http://ai.stanford.edu/~amaas/papers/wvSent_acl2011.pdf) that this dataset is from cites a state of the art accuracy (without unlabelled data) of 0.883. ~~So we're short of that, but on the right track.~~ We've already beaten the state of the art in 2011 with a simple Neural Net." 507 | ] 508 | }, 509 | { 510 | "cell_type": "markdown", 511 | "metadata": {}, 512 | "source": [ 513 | "### Single conv layer with max pooling" 514 | ] 515 | }, 516 | { 517 | "cell_type": "markdown", 518 | "metadata": {}, 519 | "source": [ 520 | "A CNN is likely to work better, since it's designed to take advantage of ordered data. We'll need to use a 1D CNN, since a sequence of words is 1D." 521 | ] 522 | }, 523 | { 524 | "cell_type": "code", 525 | "execution_count": 48, 526 | "metadata": { 527 | "collapsed": true 528 | }, 529 | "outputs": [], 530 | "source": [ 531 | "import torch.nn as nn\n", 532 | "import torch.nn.functional as F\n", 533 | "\n", 534 | "class CnnMaxPoolingModule(nn.Module):\n", 535 | " def __init__(self):\n", 536 | " super().__init__()\n", 537 | " num_dimensions = 32\n", 538 | " self.embedding = nn.Embedding(vocab_size, num_dimensions)\n", 539 | " self.drop1 = nn.Dropout(0.2)\n", 540 | " self.conv1 = nn.Conv1d(in_channels=32, out_channels=64, kernel_size=5, padding=2, groups=1)\n", 541 | " self.fc1 = nn.Linear(seq_len * num_dimensions, 100)\n", 542 | " self.dropout = nn.Dropout(0.7)\n", 543 | " self.fc2 = nn.Linear(100, 2)\n", 544 | " self.init()\n", 545 | "\n", 546 | " def forward(self, words_ids):\n", 547 | " x = self.embedding(words_ids) # x => torch.Size([B, 500, 32])\n", 548 | " x = x.permute(0, 2, 1)\n", 549 | " # print('emb', x.size())\n", 550 | " x = self.drop1(x) # x => torch.Size([B, 500, 32])\n", 551 | " x = self.conv1(x) # x => torch.Size([B, 500, 64])\n", 552 | " x = F.relu(x, True)\n", 553 | " # print('conv1', x.size())\n", 554 | " x = self.drop1(x) # x => torch.Size([B, 500, 64])\n", 555 | " x = F.max_pool1d(x, kernel_size=2)\n", 556 | " # print('max', x.size())\n", 557 | " \n", 558 | " x = x.view(x.size(0), -1)\n", 559 | " # print(x.size())\n", 560 | " \n", 561 | " x = self.fc1(x)\n", 562 | " x = F.relu(x, True)\n", 563 | " x = self.dropout(x)\n", 564 | " x = self.fc2(x)\n", 565 | " # result = F.sigmoid(x)\n", 566 | " result = x\n", 567 | " \n", 568 | " #raise 'Error'\n", 569 | " \n", 570 | " return result\n", 571 | " \n", 572 | " def init(self):\n", 573 | " torch.nn.init.constant(self.conv1.bias, val=0.0)\n", 574 | " torch.nn.init.constant(self.fc1.bias, val=0.0)\n", 575 | " torch.nn.init.constant(self.fc2.bias, val=0.0)" 576 | ] 577 | }, 578 | { 579 | "cell_type": "code", 580 | "execution_count": 20, 581 | "metadata": {}, 582 | "outputs": [ 583 | { 584 | "data": { 585 | "text/plain": [ 586 | "CnnMaxPoolingModule (\n", 587 | " (embedding): Embedding(5000, 32)\n", 588 | " (drop1): Dropout (p = 0.2)\n", 589 | " (conv1): Conv1d(32, 64, kernel_size=(5,), stride=(1,), padding=(2,), groups=2)\n", 590 | " (fc1): Linear (16000 -> 100)\n", 591 | " (dropout): Dropout (p = 0.7)\n", 592 | " (fc2): Linear (100 -> 2)\n", 593 | ")" 594 | ] 595 | }, 596 | "execution_count": 20, 597 | "metadata": {}, 598 | "output_type": "execute_result" 599 | } 600 | ], 601 | "source": [ 602 | "%autoreload 2\n", 603 | "\n", 604 | "# criterion = nn.BCELoss()\n", 605 | "criterion = nn.CrossEntropyLoss()\n", 606 | "model = CnnMaxPoolingModule()\n", 607 | "if(use_cuda):\n", 608 | " model.cuda()\n", 609 | " criterion.cuda()\n", 610 | "trainer = ModuleTrainer(model)\n", 611 | "trainer.set_optimizer(optim.Adam, lr=1e-3)\n", 612 | "trainer.set_loss(criterion)\n", 613 | "trainer.set_initializers([Uniform(module_filter=\"embedding*\", a=-0.05, b=0.05), XavierUniform(module_filter=\"fc*\"), XavierUniform(module_filter=\"conv*\")])\n", 614 | "trainer.set_metrics([CategoricalAccuracy()])\n", 615 | "\n", 616 | "# trainer.summary((trn_tensor.size(0), labels_train_tensor.size(0)))\n", 617 | "model" 618 | ] 619 | }, 620 | { 621 | "cell_type": "code", 622 | "execution_count": 22, 623 | "metadata": {}, 624 | "outputs": [ 625 | { 626 | "name": "stderr", 627 | "output_type": "stream", 628 | "text": [ 629 | "Epoch 1/2: 392 batches [04:24, 18.87s/ batches, val_acc=80.01, val_loss=0.2810, acc=71.68, loss=0.5062]\n", 630 | "Epoch 2/2: 392 batches [03:50, 17.43s/ batches, val_acc=89.87, val_loss=0.2576, acc=90.39, loss=0.2602]\n" 631 | ] 632 | } 633 | ], 634 | "source": [ 635 | "trainer.fit(trn_tensor, labels_train_tensor, validation_data=(test_tensor, labels_test_tensor), \n", 636 | " nb_epoch=2, batch_size=batch_size, shuffle=True)" 637 | ] 638 | }, 639 | { 640 | "cell_type": "code", 641 | "execution_count": 23, 642 | "metadata": {}, 643 | "outputs": [ 644 | { 645 | "name": "stderr", 646 | "output_type": "stream", 647 | "text": [ 648 | "Epoch 1/4: 392 batches [02:59, 7.73s/ batches, val_acc=77.54, val_loss=0.2922, acc=67.17, loss=0.5545]\n", 649 | "Epoch 2/4: 392 batches [02:03, 7.54s/ batches, val_acc=89.12, val_loss=0.2622, acc=89.18, loss=0.2754]\n", 650 | "Epoch 3/4: 392 batches [02:30, 9.34s/ batches, val_acc=90.64, val_loss=0.2588, acc=92.10, loss=0.2103]\n", 651 | "Epoch 4/4: 392 batches [02:40, 9.55s/ batches, val_acc=91.03, val_loss=0.2911, acc=93.66, loss=0.1727]\n" 652 | ] 653 | } 654 | ], 655 | "source": [ 656 | "trainer.fit(trn_tensor, labels_train_tensor, validation_data=(test_tensor, labels_test_tensor), \n", 657 | " nb_epoch=4, batch_size=batch_size, shuffle=True)" 658 | ] 659 | }, 660 | { 661 | "cell_type": "markdown", 662 | "metadata": {}, 663 | "source": [ 664 | "### Pre-trained vectors" 665 | ] 666 | }, 667 | { 668 | "cell_type": "markdown", 669 | "metadata": {}, 670 | "source": [ 671 | "You may want to look at wordvectors.ipynb before moving on.\n", 672 | "\n", 673 | "In this section, we replicate the previous CNN, but using pre-trained embeddings." 674 | ] 675 | }, 676 | { 677 | "cell_type": "code", 678 | "execution_count": 39, 679 | "metadata": {}, 680 | "outputs": [ 681 | { 682 | "name": "stderr", 683 | "output_type": "stream", 684 | "text": [ 685 | "loading word vectors from ./glove.6B.50d.txt: 100%|██████████| 400000/400000 [00:10<00:00, 38319.17it/s]\n" 686 | ] 687 | }, 688 | { 689 | "name": "stdout", 690 | "output_type": "stream", 691 | "text": [ 692 | "Loaded 400000 words\n" 693 | ] 694 | } 695 | ], 696 | "source": [ 697 | "import torch\n", 698 | "import re\n", 699 | "from torchtext.vocab import load_word_vectors\n", 700 | "\n", 701 | "wv_dict, wv_arr, wv_size = load_word_vectors('.', 'glove.6B', 50)\n", 702 | "\n", 703 | "print('Loaded', len(wv_arr), 'words')" 704 | ] 705 | }, 706 | { 707 | "cell_type": "markdown", 708 | "metadata": {}, 709 | "source": [ 710 | "\n", 711 | "The glove word ids and imdb word ids use different indexes. So we create a simple function that creates an embedding matrix using the indexes from imdb, and the embeddings from glove (where they exist)." 712 | ] 713 | }, 714 | { 715 | "cell_type": "code", 716 | "execution_count": 146, 717 | "metadata": { 718 | "collapsed": true 719 | }, 720 | "outputs": [], 721 | "source": [ 722 | "def get_word(word):\n", 723 | " return wv_arr[wv_dict[word]]\n", 724 | "\n", 725 | "def create_emb():\n", 726 | " num_dimensions_glove = wv_arr.size()[1]\n", 727 | " \n", 728 | " embedding = nn.Embedding(vocab_size, num_dimensions_glove)\n", 729 | " # If we can't find the word in glove, randomly initialize\n", 730 | " torch.nn.init.uniform(embedding.weight, a=-0.05, b=0.05)\n", 731 | "\n", 732 | " num_found, num_not_found = 0, 0\n", 733 | " \n", 734 | " for i in range(1,len(embedding.weight)):\n", 735 | " word = idx2word[i]\n", 736 | " if word and re.match(r\"^[a-zA-Z0-9\\-]*$\", word):\n", 737 | " embedding.weight.data[i] = get_word(word)\n", 738 | " num_found += 1\n", 739 | " else:\n", 740 | " num_not_found +=1\n", 741 | "\n", 742 | " # This is our \"rare word\" id - we want to randomly initialize\n", 743 | " torch.nn.init.uniform(embedding.weight.data[-1], a=-0.05, b=0.05)\n", 744 | " embedding.weight.requires_grad = False\n", 745 | " \n", 746 | " # This speeds up training. Can it be replaced by BatchNorm1d?\n", 747 | " embedding.weight.data /= 3\n", 748 | " \n", 749 | " print(\"Words found: {}, not found: {}\".format(num_found, num_not_found))\n", 750 | " \n", 751 | " return embedding" 752 | ] 753 | }, 754 | { 755 | "cell_type": "markdown", 756 | "metadata": {}, 757 | "source": [ 758 | "\n", 759 | "We pass our embedding matrix to the Embedding constructor, and set it to non-trainable." 760 | ] 761 | }, 762 | { 763 | "cell_type": "code", 764 | "execution_count": 147, 765 | "metadata": { 766 | "collapsed": true 767 | }, 768 | "outputs": [], 769 | "source": [ 770 | "import torch.nn as nn\n", 771 | "import torch.nn.functional as F\n", 772 | "\n", 773 | "class CnnMaxPoolingModuleWithEmbedding(nn.Module):\n", 774 | " def __init__(self, embedding):\n", 775 | " super().__init__()\n", 776 | " num_dimensions = 32\n", 777 | " self.embedding = embedding\n", 778 | " self.drop1 = nn.Dropout(0.25)\n", 779 | " self.batchnorm = nn.BatchNorm1d(500)\n", 780 | " self.conv1 = nn.Conv1d(in_channels=embedding.weight.size()[1], out_channels=64, kernel_size=5, padding=2, groups=1)\n", 781 | " self.fc1 = nn.Linear(seq_len * num_dimensions, 100)\n", 782 | " self.dropout = nn.Dropout(0.7)\n", 783 | " self.fc2 = nn.Linear(100, 2)\n", 784 | " self.init()\n", 785 | "\n", 786 | " def forward(self, words_ids):\n", 787 | " x = self.embedding(words_ids)\n", 788 | " # x = self.batchnorm(x)\n", 789 | " x = x.permute(0, 2, 1)\n", 790 | " x = self.drop1(x)\n", 791 | " x = self.conv1(x)\n", 792 | " x = F.relu(x, True)\n", 793 | " x = self.drop1(x)\n", 794 | " x = F.max_pool1d(x, kernel_size=2)\n", 795 | " \n", 796 | " x = x.view(x.size(0), -1)\n", 797 | " \n", 798 | " x = self.fc1(x)\n", 799 | " x = F.relu(x, True)\n", 800 | " x = self.dropout(x)\n", 801 | " x = self.fc2(x)\n", 802 | " result = x\n", 803 | " \n", 804 | " return result\n", 805 | " \n", 806 | " def init(self):\n", 807 | " torch.nn.init.constant(self.conv1.bias, val=0.0)\n", 808 | " torch.nn.init.constant(self.fc1.bias, val=0.0)\n", 809 | " torch.nn.init.constant(self.fc2.bias, val=0.0)\n", 810 | " \n", 811 | " def parameters(self):\n", 812 | " p = filter(lambda p: p.requires_grad, nn.Module.parameters(self))\n", 813 | " return p" 814 | ] 815 | }, 816 | { 817 | "cell_type": "code", 818 | "execution_count": 148, 819 | "metadata": {}, 820 | "outputs": [ 821 | { 822 | "name": "stdout", 823 | "output_type": "stream", 824 | "text": [ 825 | "Words found: 4914, not found: 85\n" 826 | ] 827 | } 828 | ], 829 | "source": [ 830 | "%autoreload 2\n", 831 | "\n", 832 | "emb = create_emb()\n", 833 | "\n", 834 | "# criterion = nn.BCELoss()\n", 835 | "criterion = nn.CrossEntropyLoss()\n", 836 | "model = CnnMaxPoolingModuleWithEmbedding(emb)\n", 837 | "if(use_cuda):\n", 838 | " model.cuda()\n", 839 | " criterion.cuda()\n", 840 | "trainer = ModuleTrainer(model)\n", 841 | "trainer.set_optimizer(optim.Adam, lr=1e-3)\n", 842 | "trainer.set_loss(criterion)\n", 843 | "trainer.set_initializers([XavierUniform(module_filter=\"fc*\"), XavierUniform(module_filter=\"conv*\")])\n", 844 | "trainer.set_metrics([CategoricalAccuracy()])\n", 845 | "\n", 846 | "# trainer.summary((trn_tensor.size(0), labels_train_tensor.size(0)))" 847 | ] 848 | }, 849 | { 850 | "cell_type": "code", 851 | "execution_count": 149, 852 | "metadata": {}, 853 | "outputs": [ 854 | { 855 | "name": "stderr", 856 | "output_type": "stream", 857 | "text": [ 858 | "Epoch 1/10: 392 batches [05:23, 28.35s/ batches, loss=0.6532, acc=61.48, val_loss=0.5240, val_acc=69.12]\n", 859 | "Epoch 2/10: 392 batches [04:36, 18.35s/ batches, loss=0.5188, acc=75.36, val_loss=0.4707, val_acc=77.29]\n", 860 | "Epoch 3/10: 392 batches [04:55, 18.07s/ batches, loss=0.4685, acc=78.90, val_loss=0.4274, val_acc=80.36]\n", 861 | "Epoch 4/10: 392 batches [03:10, 14.13s/ batches, loss=0.4317, acc=80.46, val_loss=0.3984, val_acc=81.39]\n", 862 | "Epoch 5/10: 392 batches [02:58, 13.89s/ batches, loss=0.4150, acc=81.47, val_loss=0.3811, val_acc=82.45]\n", 863 | "Epoch 6/10: 392 batches [03:30, 15.72s/ batches, loss=0.3967, acc=82.54, val_loss=0.3822, val_acc=83.00]\n", 864 | "Epoch 7/10: 392 batches [03:31, 14.43s/ batches, loss=0.3758, acc=83.47, val_loss=0.3835, val_acc=83.19]\n", 865 | "Epoch 8/10: 392 batches [03:06, 14.12s/ batches, loss=0.3543, acc=84.56, val_loss=0.3692, val_acc=84.13]\n", 866 | "Epoch 9/10: 392 batches [02:55, 10.51s/ batches, loss=0.3453, acc=84.74, val_loss=0.3665, val_acc=84.38]\n", 867 | "Epoch 10/10: 392 batches [03:03, 12.91s/ batches, loss=0.3295, acc=85.66, val_loss=0.3823, val_acc=84.21]\n" 868 | ] 869 | } 870 | ], 871 | "source": [ 872 | "trainer.fit(trn_tensor, labels_train_tensor, validation_data=(test_tensor, labels_test_tensor), \n", 873 | " nb_epoch=10, batch_size=batch_size, shuffle=True)" 874 | ] 875 | }, 876 | { 877 | "cell_type": "markdown", 878 | "metadata": {}, 879 | "source": [ 880 | "We already have beaten our previous model! But let's fine-tune the embedding weights - especially since the words we couldn't find in glove just have random embeddings." 881 | ] 882 | }, 883 | { 884 | "cell_type": "code", 885 | "execution_count": 151, 886 | "metadata": { 887 | "collapsed": true 888 | }, 889 | "outputs": [], 890 | "source": [ 891 | "model.embedding.weight.requires_grad = True\n", 892 | "trainer = ModuleTrainer(model)\n", 893 | "trainer.set_optimizer(optim.Adam, lr=1e-4)\n", 894 | "trainer.set_loss(criterion)\n", 895 | "trainer.set_metrics([CategoricalAccuracy()])" 896 | ] 897 | }, 898 | { 899 | "cell_type": "code", 900 | "execution_count": 152, 901 | "metadata": {}, 902 | "outputs": [ 903 | { 904 | "name": "stderr", 905 | "output_type": "stream", 906 | "text": [ 907 | "Epoch 1/1: 392 batches [01:50, 6.58s/ batches, loss=0.3015, acc=87.19, val_loss=0.3479, val_acc=86.01]\n" 908 | ] 909 | } 910 | ], 911 | "source": [ 912 | "trainer.fit(trn_tensor, labels_train_tensor, validation_data=(test_tensor, labels_test_tensor), \n", 913 | " nb_epoch=1, batch_size=batch_size, shuffle=True)" 914 | ] 915 | }, 916 | { 917 | "cell_type": "markdown", 918 | "metadata": {}, 919 | "source": [ 920 | "### Multi-size CNN" 921 | ] 922 | }, 923 | { 924 | "cell_type": "markdown", 925 | "metadata": {}, 926 | "source": [ 927 | "This is an implementation of a multi-size CNN as shown in Ben Bowles' excellent blog post." 928 | ] 929 | }, 930 | { 931 | "cell_type": "markdown", 932 | "metadata": {}, 933 | "source": [ 934 | "We create multiple conv layers of different sizes, and then concatenate them." 935 | ] 936 | }, 937 | { 938 | "cell_type": "code", 939 | "execution_count": 173, 940 | "metadata": { 941 | "collapsed": true 942 | }, 943 | "outputs": [], 944 | "source": [ 945 | "import torch.nn as nn\n", 946 | "import torch.nn.functional as F\n", 947 | "\n", 948 | "class CnnMaxPoolingModuleMultiSizeWithEmbedding(nn.Module):\n", 949 | " def __init__(self, embedding):\n", 950 | " super().__init__()\n", 951 | " num_dimensions = 32\n", 952 | " self.embedding = embedding\n", 953 | " self.drop1 = nn.Dropout(0.25)\n", 954 | " self.batchnorm = nn.BatchNorm1d(500)\n", 955 | " self.convs = [self.create_conv(embedding, fsz) for fsz in range (3, 6)]\n", 956 | " self.fc1 = nn.Linear(25000, 100)\n", 957 | " self.dropout = nn.Dropout(0.7)\n", 958 | " self.fc2 = nn.Linear(100, 2)\n", 959 | " self.init()\n", 960 | " \n", 961 | " def create_conv(self, embedding, fsz):\n", 962 | " return nn.Conv1d(in_channels=embedding.weight.size()[1], out_channels=64, kernel_size=5, padding=2, groups=1)\n", 963 | " \n", 964 | " def conv(self, c, x):\n", 965 | " x = c(x)\n", 966 | " x = F.relu(x, True)\n", 967 | " x = self.drop1(x)\n", 968 | " x = F.max_pool1d(x, kernel_size=2)\n", 969 | " return x\n", 970 | "\n", 971 | " def forward(self, words_ids):\n", 972 | " x = self.embedding(words_ids)\n", 973 | " x = x.permute(0, 2, 1)\n", 974 | " x = self.drop1(x)\n", 975 | " convs = [self.conv(conv, x) for conv in self.convs]\n", 976 | " \n", 977 | " torch.cat(convs, dim=1)\n", 978 | " \n", 979 | " x = x.view(x.size(0), -1)\n", 980 | " \n", 981 | " x = self.fc1(x)\n", 982 | " x = F.relu(x, True)\n", 983 | " x = self.dropout(x)\n", 984 | " x = self.fc2(x)\n", 985 | " result = x\n", 986 | " \n", 987 | " return result\n", 988 | " \n", 989 | " def init(self):\n", 990 | " torch.nn.init.constant(self.fc1.bias, val=0.0)\n", 991 | " torch.nn.init.constant(self.fc2.bias, val=0.0)\n", 992 | " for conv in self.convs:\n", 993 | " torch.nn.init.xavier_uniform(conv.weight.data, gain=1.0)\n", 994 | " torch.nn.init.constant(conv.bias, val=0.0)\n", 995 | " \n", 996 | " def parameters(self):\n", 997 | " p = filter(lambda p: p.requires_grad, nn.Module.parameters(self))\n", 998 | " return p" 999 | ] 1000 | }, 1001 | { 1002 | "cell_type": "code", 1003 | "execution_count": 174, 1004 | "metadata": {}, 1005 | "outputs": [ 1006 | { 1007 | "name": "stdout", 1008 | "output_type": "stream", 1009 | "text": [ 1010 | "Words found: 4914, not found: 85\n" 1011 | ] 1012 | } 1013 | ], 1014 | "source": [ 1015 | "%autoreload 2\n", 1016 | "\n", 1017 | "emb = create_emb()\n", 1018 | "\n", 1019 | "criterion = nn.CrossEntropyLoss()\n", 1020 | "model = CnnMaxPoolingModuleMultiSizeWithEmbedding(emb)\n", 1021 | "model.embedding.weight.requires_grad = True\n", 1022 | "if(use_cuda):\n", 1023 | " model.cuda()\n", 1024 | " criterion.cuda()\n", 1025 | "trainer = ModuleTrainer(model)\n", 1026 | "trainer.set_optimizer(optim.Adam, lr=1e-3)\n", 1027 | "trainer.set_loss(criterion)\n", 1028 | "trainer.set_initializers([XavierUniform(module_filter=\"fc*\")])\n", 1029 | "trainer.set_metrics([CategoricalAccuracy()])" 1030 | ] 1031 | }, 1032 | { 1033 | "cell_type": "code", 1034 | "execution_count": 175, 1035 | "metadata": {}, 1036 | "outputs": [ 1037 | { 1038 | "name": "stderr", 1039 | "output_type": "stream", 1040 | "text": [ 1041 | "Epoch 1/10: 392 batches [10:42, 73.36s/ batches, loss=0.6308, acc=61.92, val_loss=0.4106, val_acc=72.10]\n", 1042 | "Epoch 2/10: 392 batches [13:00, 70.60s/ batches, loss=0.4033, acc=81.30, val_loss=0.3117, val_acc=84.12]\n", 1043 | "Epoch 3/10: 392 batches [08:08, 28.02s/ batches, loss=0.2956, acc=87.68, val_loss=0.2967, val_acc=87.63]\n", 1044 | "Epoch 4/10: 392 batches [05:14, 33.21s/ batches, loss=0.2316, acc=90.32, val_loss=0.3037, val_acc=88.94]\n", 1045 | "Epoch 5/10: 392 batches [05:00, 23.83s/ batches, loss=0.1827, acc=92.86, val_loss=0.3306, val_acc=90.25]\n", 1046 | "Epoch 6/10: 392 batches [04:18, 25.20s/ batches, loss=0.1464, acc=94.25, val_loss=0.3538, val_acc=90.86]\n", 1047 | "Epoch 7/10: 392 batches [04:17, 21.28s/ batches, loss=0.1203, acc=95.12, val_loss=0.3973, val_acc=91.11]\n", 1048 | "Epoch 8/10: 392 batches [04:28, 27.56s/ batches, loss=0.0926, acc=96.44, val_loss=0.4414, val_acc=91.88]\n", 1049 | "Epoch 9/10: 392 batches [04:46, 29.97s/ batches, loss=0.0766, acc=97.03, val_loss=0.4905, val_acc=92.07]\n", 1050 | "Epoch 10/10: 392 batches [05:13, 32.26s/ batches, loss=0.0649, acc=97.38, val_loss=0.5613, val_acc=92.25]\n" 1051 | ] 1052 | } 1053 | ], 1054 | "source": [ 1055 | "trainer.fit(trn_tensor, labels_train_tensor, validation_data=(test_tensor, labels_test_tensor), \n", 1056 | " nb_epoch=10, batch_size=batch_size, shuffle=True)" 1057 | ] 1058 | }, 1059 | { 1060 | "cell_type": "markdown", 1061 | "metadata": {}, 1062 | "source": [ 1063 | "This is clearly over-fitting. But it does get the highest accuracy on validation set." 1064 | ] 1065 | }, 1066 | { 1067 | "cell_type": "markdown", 1068 | "metadata": {}, 1069 | "source": [ 1070 | "### LSTM" 1071 | ] 1072 | }, 1073 | { 1074 | "cell_type": "markdown", 1075 | "metadata": {}, 1076 | "source": [ 1077 | "We haven't covered this bit yet!" 1078 | ] 1079 | }, 1080 | { 1081 | "cell_type": "code", 1082 | "execution_count": 317, 1083 | "metadata": { 1084 | "collapsed": true 1085 | }, 1086 | "outputs": [], 1087 | "source": [ 1088 | "import torch.nn as nn\n", 1089 | "import torch.nn.functional as F\n", 1090 | "\n", 1091 | "class LstmEmbeddingModule(nn.Module):\n", 1092 | " def __init__(self):\n", 1093 | " super().__init__()\n", 1094 | " num_dimensions = 32\n", 1095 | " self.num_hidden = 100\n", 1096 | " self.embedding = nn.Embedding(vocab_size, num_dimensions)\n", 1097 | " self.drop1 = nn.Dropout(0.2)\n", 1098 | " self.lstm1 = nn.LSTM(input_size=32, hidden_size=self.num_hidden, num_layers=1, batch_first=True)\n", 1099 | " self.fc1 = nn.Linear(50000, 2)\n", 1100 | " self.hidden = self.init_hidden(batch_size)\n", 1101 | " self.init()\n", 1102 | "\n", 1103 | " def forward(self, words_ids):\n", 1104 | " \n", 1105 | " # We detach the hidden state from how it was previously produced.\n", 1106 | " # If we didn't, the model would try backpropagating all the way to start of the dataset.\n", 1107 | " # self.hidden = self.repackage_hidden(self.hidden)\n", 1108 | " \n", 1109 | " x = self.embedding(words_ids)\n", 1110 | " x = self.drop1(x)\n", 1111 | " #print('embd', x.size())\n", 1112 | " \n", 1113 | " self.hidden = self.init_hidden(x.size(0))\n", 1114 | " \n", 1115 | " #lenghts = [vocab_size for _ in range(x.size(0))]\n", 1116 | " #x = torch.nn.utils.rnn.pack_padded_sequence(x, lenghts, batch_first=True)\n", 1117 | " \n", 1118 | " #print('pack', x.data.size())\n", 1119 | " \n", 1120 | " x, self.hidden = self.lstm1(x, self.hidden)\n", 1121 | " \n", 1122 | " #print('lstm', x.data.size())\n", 1123 | " \n", 1124 | " #x, _ = torch.nn.utils.rnn.pad_packed_sequence(x, batch_first=True)\n", 1125 | " \n", 1126 | " #print('unpk', x.size())\n", 1127 | " \n", 1128 | " # print(self.hidden)\n", 1129 | " # TODO can we get rid of contiguous?\n", 1130 | " x = x.contiguous().view(x.size(0), -1)\n", 1131 | " #print('view', x.size())\n", 1132 | " \n", 1133 | " x = self.fc1(x)\n", 1134 | " x = F.relu(x, True)\n", 1135 | " \n", 1136 | " return x\n", 1137 | " \n", 1138 | " def init(self):\n", 1139 | " torch.nn.init.constant(self.fc1.bias, val=0.0)\n", 1140 | " \n", 1141 | " def init_hidden(self, batch_size):\n", 1142 | " num_layers = 1\n", 1143 | " weight = next(self.parameters()).data\n", 1144 | " return (Variable(weight.new(num_layers, batch_size, self.num_hidden).zero_()),\n", 1145 | " Variable(weight.new(num_layers, batch_size, self.num_hidden).zero_()))\n", 1146 | "\n", 1147 | " def repackage_hidden(self, h):\n", 1148 | " \"\"\"Wraps hidden states in new Variables, to detach them from their history.\"\"\"\n", 1149 | " if type(h) == Variable:\n", 1150 | " return Variable(h.data)\n", 1151 | " else:\n", 1152 | " return tuple(self.repackage_hidden(v) for v in h)" 1153 | ] 1154 | }, 1155 | { 1156 | "cell_type": "code", 1157 | "execution_count": 318, 1158 | "metadata": { 1159 | "collapsed": true 1160 | }, 1161 | "outputs": [], 1162 | "source": [ 1163 | "%autoreload 2\n", 1164 | "\n", 1165 | "criterion = nn.CrossEntropyLoss()\n", 1166 | "model = LstmEmbeddingModule()\n", 1167 | "if(use_cuda):\n", 1168 | " model.cuda()\n", 1169 | " criterion.cuda()\n", 1170 | "trainer = ModuleTrainer(model)\n", 1171 | "trainer.set_optimizer(optim.Adam, lr=1e-3)\n", 1172 | "trainer.set_loss(criterion)\n", 1173 | "# TODO init LSTM\n", 1174 | "trainer.set_initializers([Uniform(module_filter=\"embedding*\", a=-0.05, b=0.05), XavierUniform(module_filter=\"fc*\"), XavierUniform(module_filter=\"conv*\")])\n", 1175 | "trainer.set_metrics([CategoricalAccuracy()])" 1176 | ] 1177 | }, 1178 | { 1179 | "cell_type": "code", 1180 | "execution_count": 319, 1181 | "metadata": {}, 1182 | "outputs": [ 1183 | { 1184 | "name": "stderr", 1185 | "output_type": "stream", 1186 | "text": [ 1187 | "Epoch 1/5: 392 batches [40:42, 66.02s/ batches, loss=0.6943, acc=50.00, val_loss=0.6931, val_acc=50.00]\n", 1188 | "Epoch 2/5: 1%|▏ | 5/391 [00:10<12:52, 2.00s/ batches, loss=0.6931, acc=51.95]\n" 1189 | ] 1190 | }, 1191 | { 1192 | "ename": "KeyboardInterrupt", 1193 | "evalue": "", 1194 | "output_type": "error", 1195 | "traceback": [ 1196 | "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", 1197 | "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", 1198 | "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m trainer.fit(trn_tensor, labels_train_tensor, validation_data=(test_tensor, labels_test_tensor), \n\u001b[0;32m----> 2\u001b[0;31m nb_epoch=5, batch_size=batch_size, shuffle=True)\n\u001b[0m", 1199 | "\u001b[0;32m/Users/rodrigo/Libs/torchsample/torchsample/modules/module_trainer.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, inputs, targets, validation_data, nb_epoch, batch_size, shuffle, cuda_device, verbose)\u001b[0m\n\u001b[1;32m 403\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 404\u001b[0m \u001b[0;31m# backward pass and optimizer step\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 405\u001b[0;31m \u001b[0mloss\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbackward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 406\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_optimizer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstep\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 407\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", 1200 | "\u001b[0;32m/Users/rodrigo/Libs/pytorch/torch/autograd/variable.py\u001b[0m in \u001b[0;36mbackward\u001b[0;34m(self, gradient, retain_variables)\u001b[0m\n\u001b[1;32m 142\u001b[0m \u001b[0;32mraise\u001b[0m \u001b[0mTypeError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"gradient has to be a Tensor, Variable or None\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 143\u001b[0m \u001b[0mgradient\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mVariable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mgradient\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvolatile\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 144\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_execution_engine\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun_backward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mgradient\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mretain_variables\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 145\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 146\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mregister_hook\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhook\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", 1201 | "\u001b[0;31mKeyboardInterrupt\u001b[0m: " 1202 | ] 1203 | } 1204 | ], 1205 | "source": [ 1206 | "# TODO figure out how to do this in PyTorch\n", 1207 | "trainer.fit(trn_tensor, labels_train_tensor, validation_data=(test_tensor, labels_test_tensor), \n", 1208 | " nb_epoch=5, batch_size=batch_size, shuffle=True)" 1209 | ] 1210 | } 1211 | ], 1212 | "metadata": { 1213 | "kernelspec": { 1214 | "display_name": "Python 3", 1215 | "language": "python", 1216 | "name": "python3" 1217 | }, 1218 | "language_info": { 1219 | "codemirror_mode": { 1220 | "name": "ipython", 1221 | "version": 3 1222 | }, 1223 | "file_extension": ".py", 1224 | "mimetype": "text/x-python", 1225 | "name": "python", 1226 | "nbconvert_exporter": "python", 1227 | "pygments_lexer": "ipython3", 1228 | "version": "3.5.2" 1229 | } 1230 | }, 1231 | "nbformat": 4, 1232 | "nbformat_minor": 1 1233 | } 1234 | -------------------------------------------------------------------------------- /lesson6-pytorch.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stderr", 10 | "output_type": "stream", 11 | "text": [ 12 | "/Users/rodrigo/Libs/torchsample/torchsample/datasets.py:16: UserWarning: Cant import nibabel.. Cant load brain images\n", 13 | " warnings.warn('Cant import nibabel.. Cant load brain images')\n" 14 | ] 15 | } 16 | ], 17 | "source": [ 18 | "%load_ext autoreload\n", 19 | "\n", 20 | "import os\n", 21 | "import numpy as np\n", 22 | "import pandas as pd\n", 23 | "import torch\n", 24 | "import torch.nn as nn\n", 25 | "import torch.optim as optim\n", 26 | "import torch.utils.data\n", 27 | "from torch.autograd import Variable\n", 28 | "from sklearn.utils import shuffle\n", 29 | "from torchsample.initializers import Uniform\n", 30 | "from torchsample.modules import ModuleTrainer\n", 31 | "from torchsample.metrics import CategoricalAccuracy\n", 32 | "\n", 33 | "%aimport torchsample.modules\n", 34 | "\n", 35 | "%matplotlib inline" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": 2, 41 | "metadata": { 42 | "collapsed": true 43 | }, 44 | "outputs": [], 45 | "source": [ 46 | "use_cuda = False\n", 47 | "batch_size = 64" 48 | ] 49 | }, 50 | { 51 | "cell_type": "markdown", 52 | "metadata": {}, 53 | "source": [ 54 | "## Setup" 55 | ] 56 | }, 57 | { 58 | "cell_type": "markdown", 59 | "metadata": {}, 60 | "source": [ 61 | "We're going to download the collected works of Nietzsche to use as our data for this class." 62 | ] 63 | }, 64 | { 65 | "cell_type": "code", 66 | "execution_count": 3, 67 | "metadata": {}, 68 | "outputs": [ 69 | { 70 | "name": "stderr", 71 | "output_type": "stream", 72 | "text": [ 73 | "Using TensorFlow backend.\n" 74 | ] 75 | }, 76 | { 77 | "name": "stdout", 78 | "output_type": "stream", 79 | "text": [ 80 | "corpus length: 600893\n" 81 | ] 82 | } 83 | ], 84 | "source": [ 85 | "from keras.utils.data_utils import get_file\n", 86 | "\n", 87 | "path = get_file('nietzsche.txt', origin=\"https://s3.amazonaws.com/text-datasets/nietzsche.txt\")\n", 88 | "text = open(path).read()\n", 89 | "print('corpus length:', len(text))" 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": 4, 95 | "metadata": {}, 96 | "outputs": [ 97 | { 98 | "name": "stdout", 99 | "output_type": "stream", 100 | "text": [ 101 | "total chars: 85\n" 102 | ] 103 | } 104 | ], 105 | "source": [ 106 | "chars = sorted(list(set(text)))\n", 107 | "chars.insert(0, \"\\0\")\n", 108 | "vocab_size = len(chars)\n", 109 | "print('total chars:', vocab_size)" 110 | ] 111 | }, 112 | { 113 | "cell_type": "markdown", 114 | "metadata": {}, 115 | "source": [ 116 | "Sometimes it's useful to have a zero value in the dataset, e.g. for padding" 117 | ] 118 | }, 119 | { 120 | "cell_type": "code", 121 | "execution_count": 5, 122 | "metadata": {}, 123 | "outputs": [ 124 | { 125 | "data": { 126 | "text/plain": [ 127 | "'\\x00\\n !\"\\'(),-.0123456789:;=?ABCDEFGHIJKLMNOPQRSTUVWXYZ[]_abcdefghijklmnopqrstuvwxyzÆäæéë'" 128 | ] 129 | }, 130 | "execution_count": 5, 131 | "metadata": {}, 132 | "output_type": "execute_result" 133 | } 134 | ], 135 | "source": [ 136 | "''.join(chars)" 137 | ] 138 | }, 139 | { 140 | "cell_type": "markdown", 141 | "metadata": {}, 142 | "source": [ 143 | "Map from chars to indices and back again" 144 | ] 145 | }, 146 | { 147 | "cell_type": "code", 148 | "execution_count": 6, 149 | "metadata": { 150 | "collapsed": true 151 | }, 152 | "outputs": [], 153 | "source": [ 154 | "char_indices = dict((c, i) for i, c in enumerate(chars))\n", 155 | "indices_char = dict((i, c) for i, c in enumerate(chars))" 156 | ] 157 | }, 158 | { 159 | "cell_type": "markdown", 160 | "metadata": {}, 161 | "source": [ 162 | "idx will be the data we use from now own - it simply converts all the characters to their index (based on the mapping above)" 163 | ] 164 | }, 165 | { 166 | "cell_type": "code", 167 | "execution_count": 7, 168 | "metadata": { 169 | "collapsed": true 170 | }, 171 | "outputs": [], 172 | "source": [ 173 | "idx = [char_indices[c] for c in text]" 174 | ] 175 | }, 176 | { 177 | "cell_type": "code", 178 | "execution_count": 8, 179 | "metadata": {}, 180 | "outputs": [ 181 | { 182 | "data": { 183 | "text/plain": [ 184 | "[40, 42, 29, 30, 25, 27, 29, 1, 1, 1]" 185 | ] 186 | }, 187 | "execution_count": 8, 188 | "metadata": {}, 189 | "output_type": "execute_result" 190 | } 191 | ], 192 | "source": [ 193 | "idx[:10]" 194 | ] 195 | }, 196 | { 197 | "cell_type": "code", 198 | "execution_count": 9, 199 | "metadata": {}, 200 | "outputs": [ 201 | { 202 | "data": { 203 | "text/plain": [ 204 | "'PREFACE\\n\\n\\nSUPPOSING that Truth is a woman--what then? Is there not gro'" 205 | ] 206 | }, 207 | "execution_count": 9, 208 | "metadata": {}, 209 | "output_type": "execute_result" 210 | } 211 | ], 212 | "source": [ 213 | "''.join(indices_char[i] for i in idx[:70])" 214 | ] 215 | }, 216 | { 217 | "cell_type": "markdown", 218 | "metadata": {}, 219 | "source": [ 220 | "## 3 char model" 221 | ] 222 | }, 223 | { 224 | "cell_type": "markdown", 225 | "metadata": {}, 226 | "source": [ 227 | "### Create inputs" 228 | ] 229 | }, 230 | { 231 | "cell_type": "markdown", 232 | "metadata": {}, 233 | "source": [ 234 | "Create a list of every 4th character, starting at the 0th, 1st, 2nd, then 3rd characters" 235 | ] 236 | }, 237 | { 238 | "cell_type": "code", 239 | "execution_count": 10, 240 | "metadata": { 241 | "collapsed": true 242 | }, 243 | "outputs": [], 244 | "source": [ 245 | "cs=3\n", 246 | "c1_dat = [idx[i] for i in range(0, len(idx)-1-cs, cs)]\n", 247 | "c2_dat = [idx[i+1] for i in range(0, len(idx)-1-cs, cs)]\n", 248 | "c3_dat = [idx[i+2] for i in range(0, len(idx)-1-cs, cs)]\n", 249 | "c4_dat = [idx[i+3] for i in range(0, len(idx)-1-cs, cs)]" 250 | ] 251 | }, 252 | { 253 | "cell_type": "code", 254 | "execution_count": 11, 255 | "metadata": {}, 256 | "outputs": [ 257 | { 258 | "data": { 259 | "text/plain": [ 260 | "(200297,)" 261 | ] 262 | }, 263 | "execution_count": 11, 264 | "metadata": {}, 265 | "output_type": "execute_result" 266 | } 267 | ], 268 | "source": [ 269 | "x1 = np.stack(c1_dat)\n", 270 | "x2 = np.stack(c2_dat)\n", 271 | "x3 = np.stack(c3_dat)\n", 272 | "x3.shape" 273 | ] 274 | }, 275 | { 276 | "cell_type": "markdown", 277 | "metadata": {}, 278 | "source": [ 279 | "Our output" 280 | ] 281 | }, 282 | { 283 | "cell_type": "code", 284 | "execution_count": 12, 285 | "metadata": {}, 286 | "outputs": [ 287 | { 288 | "data": { 289 | "text/plain": [ 290 | "(200297,)" 291 | ] 292 | }, 293 | "execution_count": 12, 294 | "metadata": {}, 295 | "output_type": "execute_result" 296 | } 297 | ], 298 | "source": [ 299 | "y = np.stack(c4_dat)\n", 300 | "y.shape" 301 | ] 302 | }, 303 | { 304 | "cell_type": "markdown", 305 | "metadata": {}, 306 | "source": [ 307 | "The first 4 inputs and outputs" 308 | ] 309 | }, 310 | { 311 | "cell_type": "code", 312 | "execution_count": 13, 313 | "metadata": {}, 314 | "outputs": [ 315 | { 316 | "data": { 317 | "text/plain": [ 318 | "(array([40, 30, 29, 1]), array([42, 25, 1, 43]), array([29, 27, 1, 45]))" 319 | ] 320 | }, 321 | "execution_count": 13, 322 | "metadata": {}, 323 | "output_type": "execute_result" 324 | } 325 | ], 326 | "source": [ 327 | "x1[:4], x2[:4], x3[:4]" 328 | ] 329 | }, 330 | { 331 | "cell_type": "code", 332 | "execution_count": 14, 333 | "metadata": {}, 334 | "outputs": [ 335 | { 336 | "data": { 337 | "text/plain": [ 338 | "array([30, 29, 1, 40])" 339 | ] 340 | }, 341 | "execution_count": 14, 342 | "metadata": {}, 343 | "output_type": "execute_result" 344 | } 345 | ], 346 | "source": [ 347 | "y[:4]" 348 | ] 349 | }, 350 | { 351 | "cell_type": "code", 352 | "execution_count": 15, 353 | "metadata": {}, 354 | "outputs": [ 355 | { 356 | "data": { 357 | "text/plain": [ 358 | "((200297,), (200297,))" 359 | ] 360 | }, 361 | "execution_count": 15, 362 | "metadata": {}, 363 | "output_type": "execute_result" 364 | } 365 | ], 366 | "source": [ 367 | "x1.shape, y.shape" 368 | ] 369 | }, 370 | { 371 | "cell_type": "markdown", 372 | "metadata": {}, 373 | "source": [ 374 | "### Create and train model" 375 | ] 376 | }, 377 | { 378 | "cell_type": "markdown", 379 | "metadata": {}, 380 | "source": [ 381 | "The number of latent factors to create (i.e. the size of the embedding matrix). Pick a size for our hidden state" 382 | ] 383 | }, 384 | { 385 | "cell_type": "code", 386 | "execution_count": 16, 387 | "metadata": { 388 | "collapsed": true 389 | }, 390 | "outputs": [], 391 | "source": [ 392 | "n_fac = 42\n", 393 | "n_hidden = 256" 394 | ] 395 | }, 396 | { 397 | "cell_type": "code", 398 | "execution_count": 17, 399 | "metadata": { 400 | "collapsed": true 401 | }, 402 | "outputs": [], 403 | "source": [ 404 | "import torch.nn as nn\n", 405 | "import torch.nn.functional as F\n", 406 | "\n", 407 | "seq_len = 3\n", 408 | "\n", 409 | "def tensor(from_int):\n", 410 | " return torch.from_numpy(np.array(from_int)).long()\n", 411 | "\n", 412 | "class SimpleRnn3Chars(nn.Module):\n", 413 | "\n", 414 | " def __init__(self):\n", 415 | " super().__init__()\n", 416 | " self.embedding = nn.Embedding(vocab_size, n_fac)\n", 417 | " self.dense_in_lin = nn.Linear(n_fac, n_hidden)\n", 418 | " self.dense_hidden_lin = nn.Linear(n_hidden, n_hidden)\n", 419 | " self.dense_out = nn.Linear(n_hidden, vocab_size)\n", 420 | " self.init()\n", 421 | " # print(self.embedding(Variable(tensor([10]))))\n", 422 | " # print(self.dense_in_lin.bias)\n", 423 | " \n", 424 | " def dense_in(self, x):\n", 425 | " x = x.view(x.size(0), -1)\n", 426 | " x = self.dense_in_lin(x)\n", 427 | " x = F.relu(x, True)\n", 428 | " return x\n", 429 | " \n", 430 | " def dense_hidden(self, x):\n", 431 | " x = self.dense_hidden_lin(x)\n", 432 | " x = F.tanh(x)\n", 433 | " return x\n", 434 | "\n", 435 | " def forward(self, c1, c2, c3):\n", 436 | " c1_in = self.embedding(c1) # x => torch.Size([B, 3, n_fac])\n", 437 | " c2_in = self.embedding(c2)\n", 438 | " c3_in = self.embedding(c3)\n", 439 | " \n", 440 | " c1_hidden = self.dense_in(c1_in)\n", 441 | " \n", 442 | " c2_dense = self.dense_in(c2_in)\n", 443 | " hidden_2 = self.dense_hidden(c1_hidden)\n", 444 | " c2_hidden = c2_dense + hidden_2\n", 445 | " \n", 446 | " c3_dense = self.dense_in(c3_in)\n", 447 | " hidden_3 = self.dense_hidden(c2_hidden)\n", 448 | " c3_hidden = c3_dense + hidden_3\n", 449 | " \n", 450 | " c4_out = self.dense_out(c3_hidden)\n", 451 | " \n", 452 | " return c4_out\n", 453 | " \n", 454 | " def init(self):\n", 455 | " torch.nn.init.uniform(self.embedding.weight, a=-0.05, b=0.05)\n", 456 | " torch.nn.init.xavier_uniform(self.dense_in_lin.weight)\n", 457 | " torch.nn.init.constant(self.dense_in_lin.bias, val=0.0)\n", 458 | " torch.nn.init.eye(self.dense_hidden_lin.weight)\n", 459 | " torch.nn.init.constant(self.dense_hidden_lin.bias, val=0.0)\n", 460 | " torch.nn.init.xavier_uniform(self.dense_out.weight)\n", 461 | " torch.nn.init.constant(self.dense_out.bias, val=0.0)" 462 | ] 463 | }, 464 | { 465 | "cell_type": "code", 466 | "execution_count": 18, 467 | "metadata": {}, 468 | "outputs": [ 469 | { 470 | "data": { 471 | "text/plain": [ 472 | "SimpleRnn3Chars (\n", 473 | " (embedding): Embedding(85, 42)\n", 474 | " (dense_in_lin): Linear (42 -> 256)\n", 475 | " (dense_hidden_lin): Linear (256 -> 256)\n", 476 | " (dense_out): Linear (256 -> 85)\n", 477 | ")" 478 | ] 479 | }, 480 | "execution_count": 18, 481 | "metadata": {}, 482 | "output_type": "execute_result" 483 | } 484 | ], 485 | "source": [ 486 | "%autoreload 2\n", 487 | "\n", 488 | "criterion = nn.CrossEntropyLoss()\n", 489 | "model = SimpleRnn3Chars()\n", 490 | "if(use_cuda):\n", 491 | " model.cuda()\n", 492 | " criterion.cuda()\n", 493 | "trainer = ModuleTrainer(model)\n", 494 | "trainer.set_optimizer(optim.Adam, lr=1e-3)\n", 495 | "trainer.set_loss(criterion)\n", 496 | "\n", 497 | "model" 498 | ] 499 | }, 500 | { 501 | "cell_type": "code", 502 | "execution_count": 19, 503 | "metadata": {}, 504 | "outputs": [ 505 | { 506 | "name": "stderr", 507 | "output_type": "stream", 508 | "text": [ 509 | "Epoch 1/4: 3131 batches [00:21, 142.92 batches/s, loss=2.4394] \n", 510 | "Epoch 2/4: 3131 batches [00:20, 151.08 batches/s, loss=2.2161] \n", 511 | "Epoch 3/4: 3131 batches [00:20, 150.22 batches/s, loss=2.1526] \n", 512 | "Epoch 4/4: 3131 batches [00:20, 149.20 batches/s, loss=2.1232] \n" 513 | ] 514 | } 515 | ], 516 | "source": [ 517 | "trainer.fit([tensor(x1), tensor(x2), tensor(x3)], tensor(y), nb_epoch=4, batch_size=batch_size, shuffle=True)" 518 | ] 519 | }, 520 | { 521 | "cell_type": "markdown", 522 | "metadata": {}, 523 | "source": [ 524 | "### Test model" 525 | ] 526 | }, 527 | { 528 | "cell_type": "code", 529 | "execution_count": 20, 530 | "metadata": { 531 | "collapsed": true 532 | }, 533 | "outputs": [], 534 | "source": [ 535 | "def get_next(inp):\n", 536 | " idxs = [char_indices[c] for c in inp]\n", 537 | " arrs = [tensor([i]) for i in idxs]\n", 538 | " p = trainer.predict(arrs)\n", 539 | " # torch doesn't have an argmax function. See https://discuss.pytorch.org/t/argmax-with-pytorch/1528\n", 540 | " v, i = torch.max(p, 1) # i is the result Tensor with the index locations of the maximum values\n", 541 | " i = torch.max(i.data) # find any index (they are all max)\n", 542 | " return chars[i]" 543 | ] 544 | }, 545 | { 546 | "cell_type": "code", 547 | "execution_count": 21, 548 | "metadata": {}, 549 | "outputs": [ 550 | { 551 | "data": { 552 | "text/plain": [ 553 | "'l'" 554 | ] 555 | }, 556 | "execution_count": 21, 557 | "metadata": {}, 558 | "output_type": "execute_result" 559 | } 560 | ], 561 | "source": [ 562 | "get_next('phi')" 563 | ] 564 | }, 565 | { 566 | "cell_type": "code", 567 | "execution_count": 22, 568 | "metadata": {}, 569 | "outputs": [ 570 | { 571 | "data": { 572 | "text/plain": [ 573 | "'e'" 574 | ] 575 | }, 576 | "execution_count": 22, 577 | "metadata": {}, 578 | "output_type": "execute_result" 579 | } 580 | ], 581 | "source": [ 582 | "get_next(' th')" 583 | ] 584 | }, 585 | { 586 | "cell_type": "code", 587 | "execution_count": 23, 588 | "metadata": {}, 589 | "outputs": [ 590 | { 591 | "data": { 592 | "text/plain": [ 593 | "'d'" 594 | ] 595 | }, 596 | "execution_count": 23, 597 | "metadata": {}, 598 | "output_type": "execute_result" 599 | } 600 | ], 601 | "source": [ 602 | "get_next(' an')" 603 | ] 604 | }, 605 | { 606 | "cell_type": "markdown", 607 | "metadata": {}, 608 | "source": [ 609 | "## Our first RNN!" 610 | ] 611 | }, 612 | { 613 | "cell_type": "markdown", 614 | "metadata": {}, 615 | "source": [ 616 | "### Create inputs" 617 | ] 618 | }, 619 | { 620 | "cell_type": "markdown", 621 | "metadata": {}, 622 | "source": [ 623 | "This is the size of our unrolled RNN." 624 | ] 625 | }, 626 | { 627 | "cell_type": "code", 628 | "execution_count": 24, 629 | "metadata": { 630 | "collapsed": true 631 | }, 632 | "outputs": [], 633 | "source": [ 634 | "cs=8" 635 | ] 636 | }, 637 | { 638 | "cell_type": "markdown", 639 | "metadata": {}, 640 | "source": [ 641 | "For each of 0 through 7, create a list of every 8th character with that starting point. These will be the 8 inputs to out model." 642 | ] 643 | }, 644 | { 645 | "cell_type": "code", 646 | "execution_count": 25, 647 | "metadata": {}, 648 | "outputs": [ 649 | { 650 | "data": { 651 | "text/plain": [ 652 | "(8, 75111)" 653 | ] 654 | }, 655 | "execution_count": 25, 656 | "metadata": {}, 657 | "output_type": "execute_result" 658 | } 659 | ], 660 | "source": [ 661 | "c_in_dat = [[idx[i+n] for i in range(0, len(idx)-1-cs, cs)]\n", 662 | " for n in range(cs)]\n", 663 | "len(c_in_dat), len(c_in_dat[0])" 664 | ] 665 | }, 666 | { 667 | "cell_type": "markdown", 668 | "metadata": {}, 669 | "source": [ 670 | "Then create a list of the next character in each of these series. This will be the labels for our model." 671 | ] 672 | }, 673 | { 674 | "cell_type": "code", 675 | "execution_count": 26, 676 | "metadata": {}, 677 | "outputs": [ 678 | { 679 | "data": { 680 | "text/plain": [ 681 | "(8, (75111,))" 682 | ] 683 | }, 684 | "execution_count": 26, 685 | "metadata": {}, 686 | "output_type": "execute_result" 687 | } 688 | ], 689 | "source": [ 690 | "c_out_dat = [idx[i+cs] for i in range(0, len(idx)-1-cs, cs)]\n", 691 | "xs = [np.stack(c) for c in c_in_dat]\n", 692 | "len(xs), xs[0].shape" 693 | ] 694 | }, 695 | { 696 | "cell_type": "code", 697 | "execution_count": 27, 698 | "metadata": { 699 | "collapsed": true 700 | }, 701 | "outputs": [], 702 | "source": [ 703 | "y = np.stack(c_out_dat)" 704 | ] 705 | }, 706 | { 707 | "cell_type": "markdown", 708 | "metadata": {}, 709 | "source": [ 710 | "So each column below is one series of 8 characters from the text." 711 | ] 712 | }, 713 | { 714 | "cell_type": "code", 715 | "execution_count": 28, 716 | "metadata": {}, 717 | "outputs": [ 718 | { 719 | "data": { 720 | "text/plain": [ 721 | "[array([40, 1, 33, 2, 72, 67, 73, 2]),\n", 722 | " array([42, 1, 38, 44, 2, 9, 61, 73]),\n", 723 | " array([29, 43, 31, 71, 54, 9, 58, 61]),\n", 724 | " array([30, 45, 2, 74, 2, 76, 67, 58]),\n", 725 | " array([25, 40, 73, 73, 76, 61, 24, 71]),\n", 726 | " array([27, 40, 61, 61, 68, 54, 2, 58]),\n", 727 | " array([29, 39, 54, 2, 66, 73, 33, 2]),\n", 728 | " array([ 1, 43, 73, 62, 54, 2, 72, 67])]" 729 | ] 730 | }, 731 | "execution_count": 28, 732 | "metadata": {}, 733 | "output_type": "execute_result" 734 | } 735 | ], 736 | "source": [ 737 | "[xs[n][:cs] for n in range(cs)]" 738 | ] 739 | }, 740 | { 741 | "cell_type": "markdown", 742 | "metadata": {}, 743 | "source": [ 744 | "...and this is the next character after each sequence." 745 | ] 746 | }, 747 | { 748 | "cell_type": "code", 749 | "execution_count": 29, 750 | "metadata": {}, 751 | "outputs": [ 752 | { 753 | "data": { 754 | "text/plain": [ 755 | "array([ 1, 33, 2, 72, 67, 73, 2, 68])" 756 | ] 757 | }, 758 | "execution_count": 29, 759 | "metadata": {}, 760 | "output_type": "execute_result" 761 | } 762 | ], 763 | "source": [ 764 | "y[:cs]" 765 | ] 766 | }, 767 | { 768 | "cell_type": "markdown", 769 | "metadata": {}, 770 | "source": [ 771 | "### Create and train model" 772 | ] 773 | }, 774 | { 775 | "cell_type": "code", 776 | "execution_count": 30, 777 | "metadata": { 778 | "collapsed": true 779 | }, 780 | "outputs": [], 781 | "source": [ 782 | "import torch.nn as nn\n", 783 | "import torch.nn.functional as F\n", 784 | "\n", 785 | "def each_tensor(items):\n", 786 | " return [tensor(item) for item in items] \n", 787 | "\n", 788 | "class RnnMultiChar(nn.Module):\n", 789 | "\n", 790 | " def __init__(self):\n", 791 | " super().__init__()\n", 792 | " self.embedding = nn.Embedding(vocab_size, n_fac)\n", 793 | " self.dense_in_lin = nn.Linear(n_fac, n_hidden)\n", 794 | " self.dense_hidden_lin = nn.Linear(n_hidden, n_hidden)\n", 795 | " self.dense_out = nn.Linear(n_hidden, vocab_size)\n", 796 | " self.init()\n", 797 | " \n", 798 | " def dense_in(self, x):\n", 799 | " x = x.view(x.size(0), -1)\n", 800 | " x = self.dense_in_lin(x)\n", 801 | " x = F.relu(x, True)\n", 802 | " return x\n", 803 | " \n", 804 | " def dense_hidden(self, x):\n", 805 | " x = self.dense_hidden_lin(x)\n", 806 | " x = F.relu(x)\n", 807 | " return x\n", 808 | "\n", 809 | " def forward(self, *c):\n", 810 | " c_in = self.embedding(c[0])\n", 811 | " hidden = self.dense_in(c_in)\n", 812 | " \n", 813 | " for i in range(1,cs):\n", 814 | " c_in = self.embedding(c[i]) # x => torch.Size([B, 1, n_fac])\n", 815 | " c_dense = self.dense_in(c_in)\n", 816 | " hidden = self.dense_hidden(hidden)\n", 817 | " hidden.add_(c_dense)\n", 818 | " \n", 819 | " c_out = self.dense_out(hidden)\n", 820 | " \n", 821 | " return c_out\n", 822 | " \n", 823 | " def init(self):\n", 824 | " torch.nn.init.uniform(self.embedding.weight, a=-0.05, b=0.05)\n", 825 | " torch.nn.init.xavier_uniform(self.dense_in_lin.weight)\n", 826 | " torch.nn.init.constant(self.dense_in_lin.bias, val=0.0)\n", 827 | " torch.nn.init.eye(self.dense_hidden_lin.weight)\n", 828 | " torch.nn.init.constant(self.dense_hidden_lin.bias, val=0.0)\n", 829 | " torch.nn.init.xavier_uniform(self.dense_out.weight)\n", 830 | " torch.nn.init.constant(self.dense_out.bias, val=0.0)" 831 | ] 832 | }, 833 | { 834 | "cell_type": "code", 835 | "execution_count": 31, 836 | "metadata": {}, 837 | "outputs": [ 838 | { 839 | "data": { 840 | "text/plain": [ 841 | "RnnMultiChar (\n", 842 | " (embedding): Embedding(85, 42)\n", 843 | " (dense_in_lin): Linear (42 -> 256)\n", 844 | " (dense_hidden_lin): Linear (256 -> 256)\n", 845 | " (dense_out): Linear (256 -> 85)\n", 846 | ")" 847 | ] 848 | }, 849 | "execution_count": 31, 850 | "metadata": {}, 851 | "output_type": "execute_result" 852 | } 853 | ], 854 | "source": [ 855 | "%autoreload 2\n", 856 | "\n", 857 | "criterion = nn.CrossEntropyLoss()\n", 858 | "model = RnnMultiChar()\n", 859 | "if(use_cuda):\n", 860 | " model.cuda()\n", 861 | " criterion.cuda()\n", 862 | "trainer = ModuleTrainer(model)\n", 863 | "trainer.set_optimizer(optim.Adam, lr=1e-3)\n", 864 | "trainer.set_loss(criterion)\n", 865 | "\n", 866 | "model" 867 | ] 868 | }, 869 | { 870 | "cell_type": "code", 871 | "execution_count": 32, 872 | "metadata": {}, 873 | "outputs": [ 874 | { 875 | "name": "stderr", 876 | "output_type": "stream", 877 | "text": [ 878 | "Epoch 1/4: 1175 batches [00:16, 70.66 batches/s, loss=2.6259] \n", 879 | "Epoch 2/4: 1175 batches [00:17, 66.64 batches/s, loss=2.2948] \n", 880 | "Epoch 3/4: 1175 batches [00:17, 66.26 batches/s, loss=2.1843] \n", 881 | "Epoch 4/4: 1175 batches [00:17, 65.76 batches/s, loss=2.1106] \n" 882 | ] 883 | } 884 | ], 885 | "source": [ 886 | "trainer.fit(each_tensor(xs), tensor(y), nb_epoch=4, batch_size=batch_size, shuffle=True)" 887 | ] 888 | }, 889 | { 890 | "cell_type": "markdown", 891 | "metadata": {}, 892 | "source": [ 893 | "### Test model" 894 | ] 895 | }, 896 | { 897 | "cell_type": "code", 898 | "execution_count": 33, 899 | "metadata": {}, 900 | "outputs": [ 901 | { 902 | "data": { 903 | "text/plain": [ 904 | "'e'" 905 | ] 906 | }, 907 | "execution_count": 33, 908 | "metadata": {}, 909 | "output_type": "execute_result" 910 | } 911 | ], 912 | "source": [ 913 | "get_next('for ther')" 914 | ] 915 | }, 916 | { 917 | "cell_type": "code", 918 | "execution_count": 34, 919 | "metadata": {}, 920 | "outputs": [ 921 | { 922 | "data": { 923 | "text/plain": [ 924 | "'t'" 925 | ] 926 | }, 927 | "execution_count": 34, 928 | "metadata": {}, 929 | "output_type": "execute_result" 930 | } 931 | ], 932 | "source": [ 933 | "get_next('part of ')" 934 | ] 935 | }, 936 | { 937 | "cell_type": "code", 938 | "execution_count": 35, 939 | "metadata": {}, 940 | "outputs": [ 941 | { 942 | "data": { 943 | "text/plain": [ 944 | "'n'" 945 | ] 946 | }, 947 | "execution_count": 35, 948 | "metadata": {}, 949 | "output_type": "execute_result" 950 | } 951 | ], 952 | "source": [ 953 | "get_next('queens a')" 954 | ] 955 | }, 956 | { 957 | "cell_type": "markdown", 958 | "metadata": {}, 959 | "source": [ 960 | "## Our first RNN with PyTorch!" 961 | ] 962 | }, 963 | { 964 | "cell_type": "markdown", 965 | "metadata": {}, 966 | "source": [ 967 | "The SimpleRNN layer does not exist in PyTorch (yet?)" 968 | ] 969 | }, 970 | { 971 | "cell_type": "code", 972 | "execution_count": 36, 973 | "metadata": {}, 974 | "outputs": [ 975 | { 976 | "data": { 977 | "text/plain": [ 978 | "(256, 42, 8, 85)" 979 | ] 980 | }, 981 | "execution_count": 36, 982 | "metadata": {}, 983 | "output_type": "execute_result" 984 | } 985 | ], 986 | "source": [ 987 | "n_hidden, n_fac, cs, vocab_size" 988 | ] 989 | }, 990 | { 991 | "cell_type": "markdown", 992 | "metadata": {}, 993 | "source": [ 994 | "This is nearly exactly equivalent to the RNN we built ourselves in the previous section." 995 | ] 996 | }, 997 | { 998 | "cell_type": "code", 999 | "execution_count": 37, 1000 | "metadata": { 1001 | "collapsed": true 1002 | }, 1003 | "outputs": [], 1004 | "source": [ 1005 | "import torch.nn as nn\n", 1006 | "import torch.nn.functional as F\n", 1007 | "\n", 1008 | "class RnnMultiCharPytorch(nn.Module):\n", 1009 | "\n", 1010 | " def __init__(self):\n", 1011 | " super().__init__()\n", 1012 | " self.embedding = nn.Embedding(vocab_size, n_fac)\n", 1013 | " self.rnn = nn.RNNCell(input_size=n_fac, hidden_size=n_hidden, nonlinearity='relu')\n", 1014 | " self.dense_out = nn.Linear(n_hidden, vocab_size)\n", 1015 | " self.init()\n", 1016 | "\n", 1017 | " def forward(self, *c):\n", 1018 | " batch_size = c[0].size(0)\n", 1019 | " hidden = Variable(torch.zeros(batch_size, n_hidden))\n", 1020 | " # F.relu(F.linear(input, w_ih, b_ih)\n", 1021 | " for ci in c:\n", 1022 | " c_in = self.embedding(ci)\n", 1023 | " c_in = c_in.view(c_in.size(0), -1) # torch.Size([64, 42])\n", 1024 | " hidden = self.rnn(c_in, hidden)\n", 1025 | " \n", 1026 | " c_out = self.dense_out(hidden)\n", 1027 | " return c_out\n", 1028 | " \n", 1029 | " def init(self):\n", 1030 | " torch.nn.init.uniform(self.embedding.weight, a=-0.05, b=0.05)\n", 1031 | " torch.nn.init.xavier_uniform(self.rnn.weight_ih)\n", 1032 | " torch.nn.init.constant(self.rnn.bias_ih, val=0.0)\n", 1033 | " torch.nn.init.eye(self.rnn.weight_hh)\n", 1034 | " torch.nn.init.constant(self.rnn.bias_hh, val=0.0)\n", 1035 | " torch.nn.init.xavier_uniform(self.dense_out.weight)\n", 1036 | " torch.nn.init.constant(self.dense_out.bias, val=0.0)" 1037 | ] 1038 | }, 1039 | { 1040 | "cell_type": "code", 1041 | "execution_count": 38, 1042 | "metadata": {}, 1043 | "outputs": [ 1044 | { 1045 | "data": { 1046 | "text/plain": [ 1047 | "RnnMultiCharPytorch (\n", 1048 | " (embedding): Embedding(85, 42)\n", 1049 | " (rnn): RNNCell(42, 256, nonlinearity=relu)\n", 1050 | " (dense_out): Linear (256 -> 85)\n", 1051 | ")" 1052 | ] 1053 | }, 1054 | "execution_count": 38, 1055 | "metadata": {}, 1056 | "output_type": "execute_result" 1057 | } 1058 | ], 1059 | "source": [ 1060 | "%autoreload 2\n", 1061 | "\n", 1062 | "criterion = nn.CrossEntropyLoss()\n", 1063 | "model = RnnMultiCharPytorch()\n", 1064 | "if(use_cuda):\n", 1065 | " model.cuda()\n", 1066 | " criterion.cuda()\n", 1067 | "trainer = ModuleTrainer(model)\n", 1068 | "trainer.set_optimizer(optim.Adam, lr=1e-3)\n", 1069 | "trainer.set_loss(criterion)\n", 1070 | "\n", 1071 | "model" 1072 | ] 1073 | }, 1074 | { 1075 | "cell_type": "code", 1076 | "execution_count": 39, 1077 | "metadata": {}, 1078 | "outputs": [ 1079 | { 1080 | "name": "stderr", 1081 | "output_type": "stream", 1082 | "text": [ 1083 | "Epoch 1/4: 1175 batches [00:17, 68.95 batches/s, loss=2.7699] \n", 1084 | "Epoch 2/4: 1175 batches [00:20, 54.34 batches/s, loss=2.2835] \n", 1085 | "Epoch 3/4: 1175 batches [00:20, 57.30 batches/s, loss=2.0904] \n", 1086 | "Epoch 4/4: 1175 batches [00:29, 39.95 batches/s, loss=1.9617] \n" 1087 | ] 1088 | } 1089 | ], 1090 | "source": [ 1091 | "trainer.fit(each_tensor(xs), tensor(y), nb_epoch=4, batch_size=batch_size, shuffle=True)" 1092 | ] 1093 | }, 1094 | { 1095 | "cell_type": "code", 1096 | "execution_count": 40, 1097 | "metadata": {}, 1098 | "outputs": [ 1099 | { 1100 | "data": { 1101 | "text/plain": [ 1102 | "'e'" 1103 | ] 1104 | }, 1105 | "execution_count": 40, 1106 | "metadata": {}, 1107 | "output_type": "execute_result" 1108 | } 1109 | ], 1110 | "source": [ 1111 | "get_next('for ther')" 1112 | ] 1113 | }, 1114 | { 1115 | "cell_type": "code", 1116 | "execution_count": 41, 1117 | "metadata": {}, 1118 | "outputs": [ 1119 | { 1120 | "data": { 1121 | "text/plain": [ 1122 | "'t'" 1123 | ] 1124 | }, 1125 | "execution_count": 41, 1126 | "metadata": {}, 1127 | "output_type": "execute_result" 1128 | } 1129 | ], 1130 | "source": [ 1131 | "get_next('part of ')" 1132 | ] 1133 | }, 1134 | { 1135 | "cell_type": "code", 1136 | "execution_count": 42, 1137 | "metadata": {}, 1138 | "outputs": [ 1139 | { 1140 | "data": { 1141 | "text/plain": [ 1142 | "'n'" 1143 | ] 1144 | }, 1145 | "execution_count": 42, 1146 | "metadata": {}, 1147 | "output_type": "execute_result" 1148 | } 1149 | ], 1150 | "source": [ 1151 | "get_next('queens a')" 1152 | ] 1153 | }, 1154 | { 1155 | "cell_type": "markdown", 1156 | "metadata": {}, 1157 | "source": [ 1158 | "## Returning sequences" 1159 | ] 1160 | }, 1161 | { 1162 | "cell_type": "markdown", 1163 | "metadata": {}, 1164 | "source": [ 1165 | "## Create inputs" 1166 | ] 1167 | }, 1168 | { 1169 | "cell_type": "markdown", 1170 | "metadata": {}, 1171 | "source": [ 1172 | "To use a sequence model, we can leave our input unchanged - but we have to change our output to a sequence (of course!)\n", 1173 | "\n", 1174 | "Here, c_out_dat is identical to c_in_dat, but moved across 1 character." 1175 | ] 1176 | }, 1177 | { 1178 | "cell_type": "code", 1179 | "execution_count": 43, 1180 | "metadata": { 1181 | "collapsed": true 1182 | }, 1183 | "outputs": [], 1184 | "source": [ 1185 | "#c_in_dat = [[idx[i+n] for i in range(0, len(idx)-1-cs, cs)]\n", 1186 | "# for n in range(cs)]\n", 1187 | "c_out_dat = [[idx[i+n] for i in range(1, len(idx)-cs, cs)]\n", 1188 | " for n in range(cs)]" 1189 | ] 1190 | }, 1191 | { 1192 | "cell_type": "code", 1193 | "execution_count": 44, 1194 | "metadata": {}, 1195 | "outputs": [ 1196 | { 1197 | "data": { 1198 | "text/plain": [ 1199 | "(8, (75111,))" 1200 | ] 1201 | }, 1202 | "execution_count": 44, 1203 | "metadata": {}, 1204 | "output_type": "execute_result" 1205 | } 1206 | ], 1207 | "source": [ 1208 | "ys = [np.stack(c) for c in c_out_dat]\n", 1209 | "len(ys), ys[0].shape" 1210 | ] 1211 | }, 1212 | { 1213 | "cell_type": "markdown", 1214 | "metadata": {}, 1215 | "source": [ 1216 | "Reading down each column shows one set of inputs and outputs." 1217 | ] 1218 | }, 1219 | { 1220 | "cell_type": "code", 1221 | "execution_count": 45, 1222 | "metadata": {}, 1223 | "outputs": [ 1224 | { 1225 | "data": { 1226 | "text/plain": [ 1227 | "(8, (75111,))" 1228 | ] 1229 | }, 1230 | "execution_count": 45, 1231 | "metadata": {}, 1232 | "output_type": "execute_result" 1233 | } 1234 | ], 1235 | "source": [ 1236 | "[xs[n][:cs] for n in range(cs)]\n", 1237 | "len(xs), xs[0].shape" 1238 | ] 1239 | }, 1240 | { 1241 | "cell_type": "code", 1242 | "execution_count": 46, 1243 | "metadata": {}, 1244 | "outputs": [ 1245 | { 1246 | "data": { 1247 | "text/plain": [ 1248 | "(8, (75111,))" 1249 | ] 1250 | }, 1251 | "execution_count": 46, 1252 | "metadata": {}, 1253 | "output_type": "execute_result" 1254 | } 1255 | ], 1256 | "source": [ 1257 | "[ys[n][:cs] for n in range(cs)]\n", 1258 | "len(ys), ys[0].shape" 1259 | ] 1260 | }, 1261 | { 1262 | "cell_type": "markdown", 1263 | "metadata": {}, 1264 | "source": [ 1265 | "### Create and train model" 1266 | ] 1267 | }, 1268 | { 1269 | "cell_type": "code", 1270 | "execution_count": 47, 1271 | "metadata": { 1272 | "collapsed": true 1273 | }, 1274 | "outputs": [], 1275 | "source": [ 1276 | "import torch.nn as nn\n", 1277 | "import torch.nn.functional as F\n", 1278 | "\n", 1279 | "class RnnMultiOutput(nn.Module):\n", 1280 | "\n", 1281 | " def __init__(self):\n", 1282 | " super().__init__()\n", 1283 | " self.embedding = nn.Embedding(vocab_size, n_fac)\n", 1284 | " self.dense_in_lin = nn.Linear(n_fac, n_hidden)\n", 1285 | " self.dense_hidden_lin = nn.Linear(n_hidden, n_hidden)\n", 1286 | " self.dense_out = nn.Linear(n_hidden, vocab_size)\n", 1287 | " self.init()\n", 1288 | " \n", 1289 | " def dense_in(self, x):\n", 1290 | " x = x.view(x.size(0), -1)\n", 1291 | " x = self.dense_in_lin(x)\n", 1292 | " x = F.relu(x, True)\n", 1293 | " return x\n", 1294 | " \n", 1295 | " def dense_hidden(self, x):\n", 1296 | " x = self.dense_hidden_lin(x)\n", 1297 | " x = F.relu(x)\n", 1298 | " return x\n", 1299 | "\n", 1300 | " def forward(self, *c):\n", 1301 | " c_in = self.embedding(c[0])\n", 1302 | " hidden = self.dense_in(c_in)\n", 1303 | " \n", 1304 | " out = [self.dense_out(hidden)]\n", 1305 | " \n", 1306 | " for i in range(1,cs):\n", 1307 | " c_in = self.embedding(c[i]) # x => torch.Size([B, 1, n_fac])\n", 1308 | " c_dense = self.dense_in(c_in)\n", 1309 | " hidden = self.dense_hidden(hidden)\n", 1310 | " hidden.add_(c_dense)\n", 1311 | " out.append(self.dense_out(hidden))\n", 1312 | " \n", 1313 | " return out\n", 1314 | " \n", 1315 | " def init(self):\n", 1316 | " torch.nn.init.uniform(self.embedding.weight, a=-0.05, b=0.05)\n", 1317 | " torch.nn.init.xavier_uniform(self.dense_in_lin.weight)\n", 1318 | " torch.nn.init.constant(self.dense_in_lin.bias, val=0.0)\n", 1319 | " torch.nn.init.eye(self.dense_hidden_lin.weight)\n", 1320 | " torch.nn.init.constant(self.dense_hidden_lin.bias, val=0.0)\n", 1321 | " torch.nn.init.xavier_uniform(self.dense_out.weight)\n", 1322 | " torch.nn.init.constant(self.dense_out.bias, val=0.0)" 1323 | ] 1324 | }, 1325 | { 1326 | "cell_type": "code", 1327 | "execution_count": 65, 1328 | "metadata": {}, 1329 | "outputs": [ 1330 | { 1331 | "data": { 1332 | "text/plain": [ 1333 | "RnnMultiOutput (\n", 1334 | " (embedding): Embedding(85, 42)\n", 1335 | " (dense_in_lin): Linear (42 -> 256)\n", 1336 | " (dense_hidden_lin): Linear (256 -> 256)\n", 1337 | " (dense_out): Linear (256 -> 85)\n", 1338 | ")" 1339 | ] 1340 | }, 1341 | "execution_count": 65, 1342 | "metadata": {}, 1343 | "output_type": "execute_result" 1344 | } 1345 | ], 1346 | "source": [ 1347 | "%autoreload 2\n", 1348 | "\n", 1349 | "criterion = nn.CrossEntropyLoss()\n", 1350 | "model = RnnMultiOutput()\n", 1351 | "if(use_cuda):\n", 1352 | " model.cuda()\n", 1353 | " criterion.cuda()\n", 1354 | "trainer = ModuleTrainer(model)\n", 1355 | "trainer.set_optimizer(optim.Adam, lr=1e-3)\n", 1356 | "trainer.set_loss(criterion)\n", 1357 | "\n", 1358 | "# Bug in torchsample?\n", 1359 | "trainer._has_multiple_loss_fns = False\n", 1360 | "\n", 1361 | "model" 1362 | ] 1363 | }, 1364 | { 1365 | "cell_type": "code", 1366 | "execution_count": 66, 1367 | "metadata": {}, 1368 | "outputs": [ 1369 | { 1370 | "name": "stderr", 1371 | "output_type": "stream", 1372 | "text": [ 1373 | "Epoch 1/4: 1175 batches [00:22, 52.20 batches/s, loss=19.6869] \n", 1374 | "Epoch 2/4: 1175 batches [00:23, 49.30 batches/s, loss=17.6566] \n", 1375 | "Epoch 3/4: 1175 batches [00:24, 47.88 batches/s, loss=17.1526] \n", 1376 | "Epoch 4/4: 1175 batches [00:24, 48.41 batches/s, loss=16.8499] \n" 1377 | ] 1378 | } 1379 | ], 1380 | "source": [ 1381 | "# TODO print each loss separately\n", 1382 | "trainer.fit(each_tensor(xs), each_tensor(ys), nb_epoch=4, batch_size=batch_size, shuffle=True)" 1383 | ] 1384 | }, 1385 | { 1386 | "cell_type": "markdown", 1387 | "metadata": {}, 1388 | "source": [ 1389 | "### Test model" 1390 | ] 1391 | }, 1392 | { 1393 | "cell_type": "code", 1394 | "execution_count": 68, 1395 | "metadata": { 1396 | "collapsed": true 1397 | }, 1398 | "outputs": [], 1399 | "source": [ 1400 | "%autoreload 2\n", 1401 | "\n", 1402 | "def char_argmax(p):\n", 1403 | " # print(p.size())\n", 1404 | " v, i = torch.max(p, 0) # i is the result Tensor with the index locations of the maximum values\n", 1405 | " i = torch.max(i.data) # find any index (they are all max)\n", 1406 | " return chars[i]\n", 1407 | "\n", 1408 | "def get_nexts_multiple(inp):\n", 1409 | " idxs = [char_indices[c] for c in inp]\n", 1410 | " arrs = [tensor([i]) for i in idxs]\n", 1411 | " ps = trainer.predict(arrs)\n", 1412 | " print(list(inp))\n", 1413 | " return [char_argmax(p[0]) for p in ps]" 1414 | ] 1415 | }, 1416 | { 1417 | "cell_type": "code", 1418 | "execution_count": 69, 1419 | "metadata": {}, 1420 | "outputs": [ 1421 | { 1422 | "name": "stdout", 1423 | "output_type": "stream", 1424 | "text": [ 1425 | "[' ', 't', 'h', 'i', 's', ' ', 'i', 's']\n" 1426 | ] 1427 | }, 1428 | { 1429 | "data": { 1430 | "text/plain": [ 1431 | "['t', 'h', 'e', 't', ' ', 'c', 'n', ' ']" 1432 | ] 1433 | }, 1434 | "execution_count": 69, 1435 | "metadata": {}, 1436 | "output_type": "execute_result" 1437 | } 1438 | ], 1439 | "source": [ 1440 | "get_nexts_multiple(' this is')" 1441 | ] 1442 | }, 1443 | { 1444 | "cell_type": "code", 1445 | "execution_count": 70, 1446 | "metadata": {}, 1447 | "outputs": [ 1448 | { 1449 | "name": "stdout", 1450 | "output_type": "stream", 1451 | "text": [ 1452 | "[' ', 'p', 'a', 'r', 't', ' ', 'o', 'f']\n" 1453 | ] 1454 | }, 1455 | { 1456 | "data": { 1457 | "text/plain": [ 1458 | "['t', 'o', 'r', 't', 'i', 'o', 'f', ' ']" 1459 | ] 1460 | }, 1461 | "execution_count": 70, 1462 | "metadata": {}, 1463 | "output_type": "execute_result" 1464 | } 1465 | ], 1466 | "source": [ 1467 | "get_nexts_multiple(' part of')" 1468 | ] 1469 | }, 1470 | { 1471 | "cell_type": "markdown", 1472 | "metadata": {}, 1473 | "source": [ 1474 | "## Sequence model with PyTorch" 1475 | ] 1476 | }, 1477 | { 1478 | "cell_type": "code", 1479 | "execution_count": 71, 1480 | "metadata": {}, 1481 | "outputs": [ 1482 | { 1483 | "data": { 1484 | "text/plain": [ 1485 | "(256, 42, 8, 85)" 1486 | ] 1487 | }, 1488 | "execution_count": 71, 1489 | "metadata": {}, 1490 | "output_type": "execute_result" 1491 | } 1492 | ], 1493 | "source": [ 1494 | "n_hidden, n_fac, cs, vocab_size" 1495 | ] 1496 | }, 1497 | { 1498 | "cell_type": "markdown", 1499 | "metadata": {}, 1500 | "source": [ 1501 | "To convert our previous PyTorch model into a sequence model, simply return multiple outputs instead of a single one" 1502 | ] 1503 | }, 1504 | { 1505 | "cell_type": "code", 1506 | "execution_count": 87, 1507 | "metadata": { 1508 | "collapsed": true 1509 | }, 1510 | "outputs": [], 1511 | "source": [ 1512 | "import torch.nn as nn\n", 1513 | "import torch.nn.functional as F\n", 1514 | "\n", 1515 | "class RnnCellMultiOutput(nn.Module):\n", 1516 | "\n", 1517 | " def __init__(self):\n", 1518 | " super().__init__()\n", 1519 | " self.embedding = nn.Embedding(vocab_size, n_fac)\n", 1520 | " self.rnn = nn.RNNCell(input_size=n_fac, hidden_size=n_hidden, nonlinearity='relu')\n", 1521 | " self.dense_out = nn.Linear(n_hidden, vocab_size)\n", 1522 | " self.init()\n", 1523 | "\n", 1524 | " def forward(self, *c):\n", 1525 | " batch_size = c[0].size(0)\n", 1526 | " hidden = Variable(torch.zeros(batch_size, n_hidden))\n", 1527 | " \n", 1528 | " out = []\n", 1529 | " \n", 1530 | " for ci in c:\n", 1531 | " c_in = self.embedding(ci)\n", 1532 | " c_in = c_in.view(c_in.size(0), -1)\n", 1533 | " hidden = self.rnn(c_in, hidden)\n", 1534 | " out.append(self.dense_out(hidden))\n", 1535 | " \n", 1536 | " return out\n", 1537 | " \n", 1538 | " def init(self):\n", 1539 | " torch.nn.init.uniform(self.embedding.weight, a=-0.05, b=0.05)\n", 1540 | " torch.nn.init.xavier_uniform(self.rnn.weight_ih)\n", 1541 | " torch.nn.init.constant(self.rnn.bias_ih, val=0.0)\n", 1542 | " torch.nn.init.eye(self.rnn.weight_hh)\n", 1543 | " torch.nn.init.constant(self.rnn.bias_hh, val=0.0)\n", 1544 | " torch.nn.init.xavier_uniform(self.dense_out.weight)\n", 1545 | " torch.nn.init.constant(self.dense_out.bias, val=0.0)" 1546 | ] 1547 | }, 1548 | { 1549 | "cell_type": "code", 1550 | "execution_count": 88, 1551 | "metadata": {}, 1552 | "outputs": [ 1553 | { 1554 | "data": { 1555 | "text/plain": [ 1556 | "RnnCellMultiOutput (\n", 1557 | " (embedding): Embedding(85, 42)\n", 1558 | " (rnn): RNNCell(42, 256, nonlinearity=relu)\n", 1559 | " (dense_out): Linear (256 -> 85)\n", 1560 | ")" 1561 | ] 1562 | }, 1563 | "execution_count": 88, 1564 | "metadata": {}, 1565 | "output_type": "execute_result" 1566 | } 1567 | ], 1568 | "source": [ 1569 | "%autoreload 2\n", 1570 | "\n", 1571 | "criterion = nn.CrossEntropyLoss()\n", 1572 | "model = RnnCellMultiOutput()\n", 1573 | "if(use_cuda):\n", 1574 | " model.cuda()\n", 1575 | " criterion.cuda()\n", 1576 | "trainer = ModuleTrainer(model)\n", 1577 | "trainer.set_optimizer(optim.Adam, lr=1e-3)\n", 1578 | "trainer.set_loss(criterion)\n", 1579 | "\n", 1580 | "# Bug in torchsample?\n", 1581 | "trainer._has_multiple_loss_fns = False\n", 1582 | "\n", 1583 | "model" 1584 | ] 1585 | }, 1586 | { 1587 | "cell_type": "code", 1588 | "execution_count": 89, 1589 | "metadata": {}, 1590 | "outputs": [ 1591 | { 1592 | "name": "stderr", 1593 | "output_type": "stream", 1594 | "text": [ 1595 | "Epoch 1/4: 1175 batches [00:22, 53.17 batches/s, loss=19.3179] \n", 1596 | "Epoch 2/4: 1175 batches [00:21, 53.67 batches/s, loss=15.9803] \n", 1597 | "Epoch 3/4: 1175 batches [00:22, 51.81 batches/s, loss=15.0784] \n", 1598 | "Epoch 4/4: 1175 batches [00:22, 53.04 batches/s, loss=14.6023] \n" 1599 | ] 1600 | } 1601 | ], 1602 | "source": [ 1603 | "# TODO print each loss separately\n", 1604 | "trainer.fit(each_tensor(xs), each_tensor(ys), nb_epoch=4, batch_size=batch_size, shuffle=True)" 1605 | ] 1606 | }, 1607 | { 1608 | "cell_type": "code", 1609 | "execution_count": 91, 1610 | "metadata": {}, 1611 | "outputs": [ 1612 | { 1613 | "name": "stdout", 1614 | "output_type": "stream", 1615 | "text": [ 1616 | "[' ', 't', 'h', 'i', 's', ' ', 'i', 's']\n" 1617 | ] 1618 | }, 1619 | { 1620 | "data": { 1621 | "text/plain": [ 1622 | "['t', 'h', 'e', 's', ' ', 'c', 'n', ' ']" 1623 | ] 1624 | }, 1625 | "execution_count": 91, 1626 | "metadata": {}, 1627 | "output_type": "execute_result" 1628 | } 1629 | ], 1630 | "source": [ 1631 | "get_nexts_multiple(' this is')" 1632 | ] 1633 | }, 1634 | { 1635 | "cell_type": "markdown", 1636 | "metadata": {}, 1637 | "source": [ 1638 | "## Stateful model with Pytorch" 1639 | ] 1640 | }, 1641 | { 1642 | "cell_type": "code", 1643 | "execution_count": null, 1644 | "metadata": { 1645 | "collapsed": true 1646 | }, 1647 | "outputs": [], 1648 | "source": [ 1649 | "# TODO" 1650 | ] 1651 | } 1652 | ], 1653 | "metadata": { 1654 | "kernelspec": { 1655 | "display_name": "Python 3", 1656 | "language": "python", 1657 | "name": "python3" 1658 | }, 1659 | "language_info": { 1660 | "codemirror_mode": { 1661 | "name": "ipython", 1662 | "version": 3 1663 | }, 1664 | "file_extension": ".py", 1665 | "mimetype": "text/x-python", 1666 | "name": "python", 1667 | "nbconvert_exporter": "python", 1668 | "pygments_lexer": "ipython3", 1669 | "version": "3.5.2" 1670 | } 1671 | }, 1672 | "nbformat": 4, 1673 | "nbformat_minor": 2 1674 | } 1675 | --------------------------------------------------------------------------------