├── .gitignore ├── LICENSE ├── README.md ├── dataprocessing.py ├── dcgan_autoencoder_notebook.ipynb ├── requirements.txt └── utils ├── __init__.py ├── activations.py ├── config.py ├── costs.py ├── cv2_utils.py ├── data_utils.py ├── inits.py ├── metrics.py ├── ops.py ├── rng.py ├── theano_utils.py ├── updates.py └── vis.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | 26 | # PyInstaller 27 | # Usually these files are written by a python script from a template 28 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 29 | *.manifest 30 | *.spec 31 | 32 | # Installer logs 33 | pip-log.txt 34 | pip-delete-this-directory.txt 35 | 36 | # Unit test / coverage reports 37 | htmlcov/ 38 | .tox/ 39 | .coverage 40 | .coverage.* 41 | .cache 42 | nosetests.xml 43 | coverage.xml 44 | *,cover 45 | .hypothesis/ 46 | 47 | # Translations 48 | *.mo 49 | *.pot 50 | 51 | # Django stuff: 52 | *.log 53 | 54 | # Sphinx documentation 55 | docs/_build/ 56 | 57 | # PyBuilder 58 | target/ 59 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 mikesj-public 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # dcgan-autoencoder 2 | 3 | I recommend you look at the [write up of this repo](https://swarbrickjones.wordpress.com/2016/01/24/generative-adversarial-autoencoders-in-theano/) before proceeding. 4 | 5 | This is a Theano implementation of an convolutional autoencoder trained with an adversarial network loss function. This structure was used to try and upscale some grainy images of celebrities, [written up here](https://swarbrickjones.wordpress.com/2016/01/13/enhancing-images-using-deep-convolutional-generative-adversarial-networks-dcgans/). 6 | 7 | Example output - for each triple of images, the one on the left is the original image, the middle one is the grainy version given to the autoencoder and finally the one on the right is the neural network's attempt to reconstruct the original ![My image](https://swarbrickjones.files.wordpress.com/2016/01/1452706493.png) 8 | 9 | The code in the ipython notebook closely follows the implementation given by [Alec Radford et al](https://github.com/Newmu/dcgan_code). 10 | 11 | ## How to run 12 | 13 | I assume knowledge of IPython (Jupyter), pip and virtualenv (not complicated to learn if not). The following should work on unix systems. Working in a virtualenv, run 14 | 15 | ```pip install -r /path/to/requirements.txt``` 16 | 17 | You should download the CelebA dataset from [website](http://mmlab.ie.cuhk.edu.hk/projects/CelebA.html) (you're looking for a file called img_align_celeba.zip). Unzip into this directory then run 18 | 19 | ``` ./dataprocessing.py ``` 20 | 21 | This will crop the images to the right size and store them in HDF5 format. 22 | 23 | Next run the dcgan notbook. 24 | -------------------------------------------------------------------------------- /dataprocessing.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from PIL import Image 4 | from os import listdir 5 | from os.path import isfile, join 6 | import numpy as np 7 | import pickle 8 | from time import time 9 | import sys 10 | import h5py 11 | from tqdm import tqdm 12 | 13 | 14 | image_dir = 'img_align_celeba/' 15 | try: 16 | image_locs = [join(image_dir,f) for f in listdir(image_dir) if isfile(join(image_dir,f))] 17 | except: 18 | print "expected aligned images directory, see README" 19 | 20 | total_imgs = len(image_locs) 21 | print "found %i images in directory" %total_imgs 22 | 23 | def process_image(im): 24 | if im.mode != "RGB": 25 | im = im.convert("RGB") 26 | new_size = [int(i/1.3) for i in im.size] 27 | im.thumbnail(new_size, Image.ANTIALIAS) 28 | target = np.array(im)[3:-3,4:-4,:] 29 | im = Image.fromarray(target) 30 | new_size = [i/4 for i in im.size] 31 | im.thumbnail(new_size, Image.ANTIALIAS) 32 | input = np.array(im) 33 | return input, target 34 | 35 | 36 | def proc_loc(loc): 37 | try: 38 | i = Image.open(loc) 39 | input, target = process_image(i) 40 | return (input, target) 41 | except KeyboardInterrupt: 42 | raise 43 | except: 44 | return None 45 | 46 | 47 | try: 48 | hf = h5py.File('faces.hdf5','r+') 49 | except: 50 | hf = h5py.File('faces.hdf5','w') 51 | 52 | 53 | try: 54 | dset_t = hf.create_dataset("target", (1,160,128,3), 55 | maxshape= (1e6,160,128,3), chunks = (1,160,128,3), compression = "gzip") 56 | except: 57 | dset_t = hf['target'] 58 | 59 | try: 60 | dset_i = hf.create_dataset("input", (1, 40, 32, 3), 61 | maxshape= (1e6, 40, 32, 3), chunks = (1, 40, 32, 3), compression = "gzip") 62 | except: 63 | dset_i = hf['input'] 64 | 65 | batch_size = 1024 66 | num_iter = total_imgs / 1024 67 | 68 | insert_point = 0 69 | print "STARTING PROCESSING IN BATCHES OF %i" %batch_size 70 | 71 | for i in tqdm(range(num_iter)): 72 | sys.stdout.flush() 73 | 74 | X_in = [] 75 | X_ta = [] 76 | 77 | a = time() 78 | locs = image_locs[i * batch_size : (i + 1) * batch_size] 79 | 80 | proc = [proc_loc(loc) for loc in locs] 81 | 82 | for pair in proc: 83 | if pair is not None: 84 | input, target = pair 85 | X_in.append(input) 86 | X_ta.append(target) 87 | 88 | X_in = np.array(X_in) 89 | X_ta = np.array(X_ta) 90 | 91 | dset_i.resize((insert_point + len(X_in),40, 32, 3)) 92 | dset_t.resize((insert_point + len(X_in),160,128,3)) 93 | 94 | dset_i[insert_point:insert_point + len(X_in)] = X_in 95 | dset_t[insert_point:insert_point + len(X_in)] = X_ta 96 | 97 | insert_point += len(X_in) 98 | 99 | hf.close() 100 | -------------------------------------------------------------------------------- /dcgan_autoencoder_notebook.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Imports" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": { 14 | "collapsed": false 15 | }, 16 | "outputs": [ 17 | { 18 | "name": "stderr", 19 | "output_type": "stream", 20 | "text": [ 21 | "Using gpu device 0: Graphics Device (CNMeM is disabled)\n" 22 | ] 23 | } 24 | ], 25 | "source": [ 26 | "import sys\n", 27 | "sys.path.append('..')\n", 28 | "\n", 29 | "import os\n", 30 | "import json\n", 31 | "from time import time\n", 32 | "import numpy as np\n", 33 | "from tqdm import tqdm\n", 34 | "\n", 35 | "import theano\n", 36 | "import theano.tensor as T\n", 37 | "from theano.sandbox.cuda.dnn import dnn_conv\n", 38 | "\n", 39 | "from PIL import Image" 40 | ] 41 | }, 42 | { 43 | "cell_type": "markdown", 44 | "metadata": {}, 45 | "source": [ 46 | "N.B. The code from the following imports is lifted from the original [dcgan project](https://github.com/Newmu/dcgan_code)" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": 2, 52 | "metadata": { 53 | "collapsed": true 54 | }, 55 | "outputs": [], 56 | "source": [ 57 | "from lib import activations\n", 58 | "from lib import updates\n", 59 | "from lib import inits\n", 60 | "from lib.rng import py_rng, np_rng\n", 61 | "from lib.ops import batchnorm, conv_cond_concat, deconv, dropout, l2normalize\n", 62 | "from lib.metrics import nnc_score, nnd_score\n", 63 | "from lib.theano_utils import floatX, sharedX\n", 64 | "from lib.data_utils import OneHot, shuffle, iter_data, center_crop, patch" 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": 3, 70 | "metadata": { 71 | "collapsed": true 72 | }, 73 | "outputs": [], 74 | "source": [ 75 | "from fuel.datasets.hdf5 import H5PYDataset\n", 76 | "from fuel.schemes import ShuffledScheme, SequentialScheme\n", 77 | "from fuel.streams import DataStream" 78 | ] 79 | }, 80 | { 81 | "cell_type": "markdown", 82 | "metadata": {}, 83 | "source": [ 84 | "# Data Stuff" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": 30, 90 | "metadata": { 91 | "collapsed": false 92 | }, 93 | "outputs": [ 94 | { 95 | "name": "stdout", 96 | "output_type": "stream", 97 | "text": [ 98 | "number of samples in dataset : 9216\n" 99 | ] 100 | } 101 | ], 102 | "source": [ 103 | "import h5py\n", 104 | "try:\n", 105 | " hf[\"target\"].shape\n", 106 | "except:\n", 107 | " hf = h5py.File('faces.hdf5','r+')\n", 108 | "num_samples = hf[\"input\"].shape[0]\n", 109 | "\n", 110 | "print \"number of samples in dataset : %i\" %num_samples" 111 | ] 112 | }, 113 | { 114 | "cell_type": "code", 115 | "execution_count": 32, 116 | "metadata": { 117 | "collapsed": false 118 | }, 119 | "outputs": [], 120 | "source": [ 121 | "split_dict = {\n", 122 | " 'train': {'input': (2000, num_samples), 'target': (2000, num_samples)},\n", 123 | " 'test': {'input': (0, 1000), 'target': (0, 1000)},\n", 124 | " 'val': {'input': (1000, 2000), 'target': (1000, 2000)}\n", 125 | "}\n", 126 | "hf.attrs['split'] = H5PYDataset.create_split_array(split_dict)\n", 127 | "train_set = H5PYDataset('faces.hdf5', which_sets=('train',))\n", 128 | "test_set = H5PYDataset('faces.hdf5', which_sets=('test',))\n", 129 | "val_set = H5PYDataset('faces.hdf5', which_sets=('val',))\n", 130 | "\n", 131 | "batch_size = 128\n", 132 | "\n", 133 | "#TODO : use shuffledscheme instead? Seems slower, might have screwed up the chunksize in the HDF5 files?\n", 134 | "\n", 135 | "tr_scheme = SequentialScheme(examples=train_set.num_examples, batch_size=batch_size)\n", 136 | "tr_stream = DataStream(train_set, iteration_scheme=tr_scheme)\n", 137 | "\n", 138 | "val_scheme = SequentialScheme(examples=val_set.num_examples, batch_size=batch_size)\n", 139 | "val_stream = DataStream(val_set, iteration_scheme=val_scheme)\n", 140 | "\n", 141 | "test_scheme = SequentialScheme(examples=test_set.num_examples, batch_size=batch_size)\n", 142 | "test_stream = DataStream(test_set, iteration_scheme=test_scheme)" 143 | ] 144 | }, 145 | { 146 | "cell_type": "markdown", 147 | "metadata": {}, 148 | "source": [ 149 | "## Check data looks sensible" 150 | ] 151 | }, 152 | { 153 | "cell_type": "code", 154 | "execution_count": 6, 155 | "metadata": { 156 | "collapsed": false 157 | }, 158 | "outputs": [ 159 | { 160 | "name": "stdout", 161 | "output_type": "stream", 162 | "text": [ 163 | "EXAMPLE TARGET IMAGE:\n" 164 | ] 165 | }, 166 | { 167 | "data": { 168 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAIAAAACgCAIAAABL8POqAAB9tklEQVR4nOz9aZRl2XUeiO29z3Cn\nN8acEZFz5VAjqgpVhUIRAAmKBEGCIAVSdGtoURbVGlpuanB30172Wv5ht91uS7bcGlrWarWl7hYp\nki1RFCkKJAiCBEEMRGGoAmrMyjkyMyIyhhdvutM5Z2//uC8iswZQEEkJWl44K/PFfe/dd999Z5+z\nz7e/PRzE1Q9QVUg5lSoXXwB4wADCAAKAAIigEQgAYdZYgAUCQEDQAAT3Gr/1AAWRCDUziwgACChA\nBTpCnYCJwUTaphhFzhrQGrQWHUFkwSZgLegYtRVRgIioKKCEIMCCDMAQPAGzrxWIQRAfDEgHfexz\n9pUFb6DUoVAhp5AbDFYB+5rFKxQURwqIkAiQkEiMVVqTUqgUaqO0IkWqaQCstVZKkUIi1JqU0kop\nItIEIiIiKICIgGhIESIREZEial503nsXPEtVu6Isy9rlZenqAET3d9+327egfVsA3+JGiIiI/+YT\n/502kUZBNa2Ztt/sZxFAgFlYhLnRnG95UxAJCZkZAUWAhQlJZm2mW4VZDj8rjcY8vKXfo39EpNEi\nswMRmX1U8PCz2Pye+9t9V/j2DPgWt28L4Fvcvi2Ab3H7tgC+xe3fvQDk/iUH77Mn7p0BCG9aPOUd\nTgEAwDefiYfXxzedJu+0ZOKbL/P2b/hWtX8PM0BEEJEQCbFBJQIi0hh0wiICIiRIcniGCDADA8p9\nwhBgDgDhsJsFEAGQSDXPCAlJy6GUDj8EiogA2LNCJAAlQO8wCL6phtgAnns/7PfZJYhEBIjM/O9N\nBX2DsQ/39/E7nPHm+5PDs/jNL+JbPi1076IIiHLvDr7ViHvWjrDot9eAb3H7tgC+xe3bAvgWt28L\n4FvcdPMHEWXGwBw+ity3vs3Oaf4CIMgfbD2b4Yd7TEmDhd7075u5DAgINDj0kGPBt8DYhpkhPARf\nCIgoIvQmGkz+sJbnBuIeXVpE8Pfsqm8wA75FWOHfETy/h/uPYNR/MJbAt1XQt7h9WwDf4vZtAXyL\n2/9fCuAPtoL9+13/9B/SdeRtBwD4jRe6Gfg4dEp9c5inwWdyj4cQAQACYRAEBgiILAhCiArAN1zN\nIRphRAQSYUEQQEEGaJxljUtdhEQImmNAudeO7vNtxyDfhLga9CUiIPB2j9gflgDgnWXwh9q+0W/l\nQ5aHQvPd2ADT+8kyETlEqCLAJIDIBIjCBIgg1IBrERIgAPr3BZP+sASAb+PI/sNtCCB42MEIjf3D\nCAzAh0/x35d2/oN8yzvSi/e9IvjO3Py/64azL2+O5fBG5Bu4Cr617Q8u5re7+L+xSP5dNnz7Nx+2\n/1CMrndqfygqqOn3I+pCAOgtRP+/z/amteh+6d83G45OOHp6b6Lgv8XU/YP/Qn20Js/ooJl2FJxR\nGIiH93K4ps3oFACUe4zQEZfSBNrw7Bc3F/4m7lNm1zrSwXTEHzTfhYhwHxSBe4+HNw8AICzCIIjC\nAILAIqHxOiGG5iMNywVyhEVmvjVorteAMjz8XnjLQXM4i/xB1ESzE3Dm+TuigJpIoYbpUkRMwsJH\nCKo5U5jvmwH3y1zuCeD37Ld31DZH8+D31Zo7e8cLIKAAEonw4bcdjt5DMTcyocNIzeYiRESkETwQ\nYXMRISQhQKLmnxAhNR2GQESalFYaCYlIKZy987bOIKIQQjN6mJmIZhMNAA6dsYc3IBDugZQjhKYb\naPvWbvy3Xq2O5gG9efi/NVTt8B054g3v58pmN3cEGt/yOZzNkBn6bsBKM5hAhO+D6IgESESKtCZj\nOWgERURCCCwQSAhJEJEIlEIk0Rq1IaVIqeZAK1CNABrZHHblTA7NED4MoZNDR7UAiOAR8G/u5V53\nz6wHufeTNXNQh8bCEUv8TQ7hN6O1o/PpPjxK+PaOvOcJFhAWmSlnOVQB39jZjYCCAIgkwDONBdgE\nHDYyYRYWJhBBVKQIlEZNyigUBUwAKDyjr0kIkQiIkJQoDUrNBKCUUlqRKCRURI0wEYCQAJq5MpPB\noVIGECFEEWFholncI4sc9S3zmzC6HHaBBkBmBuF70R7f9BLUjIn7ev9wON+bq9/oQnL4gPctLnLf\naHrH0BEBATkc7YACLNJYT0ioRCGgELHSCBpFoVKolGgNQZMYFABEZhREAZypnUYGgkqUUkqhnolB\nESMhISIhUDPq6XBVu6eujqL4UQAI8b5RJLN+R2x6X+5bsZpLKED9Fv1z6HT5ZhbOb9yEAOQdqIj7\nAYocRpfcb6K+veG9ydjIRkCADyfRTBIAIA0yiKyNRBQ4g2AVxIBGgLznUHlgjaJIESEwADICKKWM\nUaQYCbShZkJoTUbpprsJiRRgE3grSAoPQ/rlMNRWjlYHRBIJwCB4yFUcPr5pXiMQEpEIiEYOwEGE\nEbgRM8gsZGX29BAFHF5ANfMbZkzI21X1W3t7NkVBEMLh2wHYQSAgRRzAO0ACaWKXA8hs7RAWYUEM\ngoQgJBJCEABQCoAPaQciFI2iiVTwCsUosjpW7MqyqkNIjF3o9xbnuomlIp+OBntlnSsj1trI6NpV\no2Ii3kVaaabE2ii2SOglAHitiUh55xShJtAItS8xhCSOjAbmCiAVVAggKDTDhEFQAIFRAD0RKFEE\nKCAkjMIEQAiKiJu1RwIjB2SPwkdDEwEFqeGyDsEGyr1OVb+HKw0RZTYGDvu/ybdBfwj+GBCk6UnS\nih2w8mJIuBk3wgIsKCgswgE1AbAwCId2luZV5Sc5xLGQBkSFoCBIqNnVRgFAHXxt4mx+bnll8aGz\nZ06dWDu2ujA/P9clgDKfTMejoipAS5omSWLHo4ODwW4+Oti6s7GzvTnY3yvEk6DWgahyzBQlAWtC\nccF7V0c6sgoUMUkgIiaFrIUEQZgBGRQppCAkQAEUIgEykIigIDAiKwKtSECIwDPqw8l9qJ74UAH/\nvsx2BJxhxG/0PgQAEglwGAM3W/yFGzAnCAQkSI0FgkCNdhEE1DCejJUxcbdVVzV51goVsgapudYI\nx1eX33Xu+Nn1pfPnHjh5Yi2ylpgjoyKtOHh2rt/rEhxHhWgUByaU4Kvga43C4usqr8tiMj7Y29sb\nDO5eu/7q3c3NvHQgJIihytMoVRYBg+MKWJQhVEDkhZtRxhKYyDQInsAoQQIE8IA8U/pEgYQa9YOg\nUDRIEBYAwHtK6p1R+DfT5BCOyJGKn/X8W2LZDs+WwByQGYUFG62CMosQUIBasPkVBCKMrLJEKl/m\nRaIojkwop1U+nlucf+KZZ971+IUnHn10rZ8YZk1IiAABRSDUdWj0NgIqH3zwoAJ5H4QDCCBpVIoI\njI1txJ3e6rF1AXBP5t+xe3ezzIuDwWAyPBju7u7vbk+mIwV1t9VXiupqrKRA9Eo1CRosxEREQACK\n0Cg2BChYANaAhCSABNSooCAMRKCBAZiBBWd4iAEE5Ztiur+hDOD+3r+Pk7i3LM9MWWFmZuTQDCAR\nQGhACQHOHhGVEIKwMHAxieJWlphisO+nBxdOn3jfc9//wQ8+MrewXHnfylQsHgrnKy8iisi52mgT\nx1HwngMDkdE2OHYeFBpGhQSA6DgoVKQR0HtB7wILsWp35u38klk5HmKjlcjoYLBx48rlSy/f3boN\njlvJgsJ95FwgiHhEVEQIQUSUEDEKKBACUtKYytBAKkAMiKgIUUAf2koMEuCQL/lDIHHu9f6bXr1P\nAAEwgEiDKZtFC2b4BgEVkAIBJBQJKATAYIjIYqjHB6NTi/M//mM/8v6nzydxFCSIr9IEh8NRCG4u\na5nIGmuR8GB/NB6NJGRZqw1aWLisaudYa4NkQgDSBEGqICoIABMRI2qtCXQIIWq3O61oMq6LybjI\n825v+YnVk488+eydW9dfeeXl7dtXXVkb1AI1QylSaQUiQCzMAcUTIIBuxtURD9F8i0YMIiSgZ2AO\n7tmehxD9PmP0TQbaO7Qm0ubtQjh8u4E1h5JurtOoR+RDkNbgQgaBBi9A7VQ7DnVl0wQARIJR6KoS\ngvzoRz/8Z370e/uRwQpiZBeCYEAH+XDfWKPSSDiwdyLK1TUSKa1CCMwsAETaWEYQJGYVEAUUAEtA\n1lqVtQeASV5HsdFRVDgfptLrtBYWW6ORe/GFl4yCM6dPrp55cH7t5MHO5uYbL23fvnJn6yaL2Fgz\nVMC1EjEEwI5AGFnAN1HgfB+5RQ1yEtSAAiIo3IAPaNg4fLs6B0Q6MhPu73qYWQ8zEgoQ4U3JcoeY\nHd+slGZcjRwiYDzE9yLCSJBGUZnncTvhYqwjqwjK8Wh9afE/+uEf/tD7Ho1rUUEUBahDbJhJqqpc\n7HelrPYGBwoVEZEySplWOwJA5xwSkdLMSASMHKQCAi9eBLTVVVVVLhS1S5MEicfTIaO3xrLAcMp7\nQ+62s5W11c2t7Upwa3s7a6XthdX1xRP5aPD6Gy+88upX9oe3QEATxkrY1QbqOFG+zIMoRgwsPnBg\nJCJmL9wgDLlniOE9iuAPpoJmvX//jGo0fjMXA0gToP8mmqiZBLOMcATgEOoqzpKyGBuCCKGe5s89\n/uiP/cgPPvXwqpsEYdQaAb1w8CwigVAhQ6vVMUqxgDAHQWYBQUFEUgIYvDAGYgYdGosVsRmAKBja\nnTTz9d7eKEvTwtdg8GA4NNoMBACISOskPXZ8HbWxccuzlEBSk1IL5x56//Hzj12++rUXXvjMaP9W\nMCFWyK7y42FkNGEcAobD1bVhqhC5uShC73HIJxgKYgdyLwNCUPObwD7ely9/TxcdzYAGfYqwSLiP\nC0IABmxeCSA8g0NoAQ1QBGjRxGhTiVOJUrAJ2AyTjJIUSYn4VidBqS3CH/vhH/yRj3xPglAUZUzY\na0f5aEzARinhIABNhkYIToQBkAgFCAGRFOKMsxJGAQYlqO5xAw25xN672m/f3Znr9ed6XVC0M9rX\nWh8cjBEJhPb3D3r9fr/XjSOrrSqKaTktsSCtNGJAEwAKVw9e+PJnXn7hc8pPVvpxKMZGsQITvDBz\nw5seGcUISEQaWUQEGwrv3iSYxWzeL4B/G2TaUBF4yF8eUpSzV4+InkPV1HTFbCVq+DVoKGKfT5bn\n2n/xP/nT3/XcI35QqyBdTSx+NBpFlkSwCp6kWfoIFAXRAQMJsCAgkiBwk6Fx+LMRRUC4GTcEACSg\nEJO4tTvemQ733/P4xZ29AiRggCiKTqzNTcb5wXCc2mwyLsvCeecWlxdanazTzyTjugqTYiK1JHHb\nZOm7n/2BXn/lpec/PRjfXegshGrCztHMpUEA0ky5GSQieotHbJZawvdjx3+rhkikDsfgzB2Bh4Gx\nKMLCDZtzuDLPvhf48GlDMIiI96D5ofNn//Kf//FTxxfH+5MuKyvMnh04keDBMLAAIBIyAZN3AppY\nQcPSAQsKEzYp1KHJqCYBLxQECVAa3QPkKhfKop11WkkHGfptS0a2tqvhYLq0tBLbbL6bSA/3B4Oy\nKidlefv2zmLN3V5LKcGEYt0KLI4lL8sIWo889oGl/uKXPvtrd29dnmunSpfsyyb2uSHwhBu1h0iA\n0H4YygmGEsWj+KM1AMmw3C+e+yHQoSJCOaR24cgAng1wASJCarxjXqRh4T0LCyMDAlogCxijiVHH\nbGNIMzAWbBK1umR0FJv3PP3En/pffezEsflqchAbZWqfaMsUvAS0KogEDoq0r9mS9XVAJC8B9Iw9\nmdFYIg14IEQiJCQGDDOKk+kweBmFtaKDwcFcv3swGNzZ3PKk19dX0qxTB1GonfMMEJjzstrbHTAz\naOjPt3v9HildlQGYNRH6whXDhY6ZDm79zm/92p3rr/dsTWEapCYSZoeCIAoEEZUiRGifg2ICvkbw\nOBuSLIAIEYB68+B+KwadxTG9dRLM/ONE1HjqmJ1IEAgkDYmPgWm2BqgEKAJlIIkhTUiZVq8XmNN2\n+rEf/ugP/sCHWrFCrik4UkioA8uM7UYkIu+90booylaWOeerslTGoqLmRzSONYLZ2qXUDMmJKKDD\nYQLQWBta6Ybun0yno9GIUK+srztXIyEoCcF7z0S6rNw0d2Xpp5MqL3KPfmV1qdfpKSKDCsQp8ciF\nd3krVqPh7m//5q9NN15KZcwwIVUw1cKa2KJEBFYr0O8U6Y+A3xwX9E7mMgI1/1FmSH/m1AXEGWHe\nqKYmK5KIKCgCRaSQEDhUkY3+13/qT37Xd74/TnSVjxSyhqBIOXbM9wyOxvioq7rdbo8nE6VU1m6V\ntZtNRcGZTU6IpKCp4cAzv1kIiIJIh95tBpYAwEgQJely1gKGqqyCeEQgISQwhgDQGqNaJrGiULH4\nsfO7e4PpOO93u912GxmYg0KlKGLEdm/p/R/8vhd+sxrceZVDxeSNBgISRxiQgBBFH3pF8G2ulbf1\n9tvQ6dtXifsi7w7d04dWwiyUD+459A4dE0iEqA0AAIom9eHv+74Pfe8HvJfR4MBoiCLFwbvgBRTz\nYSGNQwFEUXTr1i0fgnOu1Wq1Wt1Gu6qZNwVFkEWQG7ObkUEIhVRjDeFhHBYDCrP4hohqclqJAzOz\n4uYmAQmEhUjFsWaJAqRSY14Uw/GoQThZHFutSRmueZKX1lC7u/DsBz/8/Gfk+tUXe3EMYeKrIkJQ\nCkAYiTQw3ovtu2f6flNEkBw93CcCvuecO0TZEhqaGQGEBUiOjAA8dIKjUW46TTvdhx68+LGP/dDd\n7b0kSRRBEkcc6hA4ICPQLG4TpJEokRqNJ9s7excuXJhMJxs3b7SyfHVtLTgWDQqVAhKE0KQkNx9j\nCCLAPJM/HblTmpoqCgkFGssQqUFPQUJgAdaRZRYAVhqSROuorQqjtBoNx8PxsKrLuW5vrt9VOkIT\nO+/q0luFvf7aI89+dyXhzvUXu3FsFCCzIhQWbKphiTSjgY/YsoYOxbd291tH/JsSqWfDnhFxlkM9\nmwccGA47gAEFmGfmP/DMemAOVUHWkMZTJ9eRvVZAGLx3VSVFkbeyGEE7z03UbOMLVqSq2n3yU7/1\nx/+jjxVl/cC5ta3tu4OD0fqJ07WrDpFc49dgaFzV0jCNwBya1G4M0sDkEAIiNrWwtNZWKW2UxhhQ\nEKF0VVmVUjokAmDnA5HSxqYQiyKl1XDvYDqd+tp577vtVprGpDPnqlrg9mC0vHr+kWd4PB7ydNtE\nFspchBGCAOhvRN/f59c6cnzCWw4a1fKm16CJ8EFBEBYEEhKBIMIgQYBhltKlBPiw+hmLBK2MbqVZ\nmj77nmf2d3edrxcX57IsFRHnvA/gXA0CqDSLoDASIcFwPPnOD77/zHpnMIVxnnvPJ0+d8l6IVGD2\n7InFWKOMEebK1USoFAmAJhtCaMqoSRCtdavd8c7XzpVlVVVj9rUBareyOI4DexcCKfIhaEQRrrwD\nAW0jZdPY2siaSNmtza3JZOS9L8pyrj+XxgliVPhadGt3yq2Fk+9+74e/+plfmUy2u1EaqimhAIEG\nEZAg0BirLIfjRsD/GwMXBQ4DKe4nfmZSwtCs5kEED+27mfOFZtwcByEGAFIUKcy3Np/82MfOnTmD\nBKPhcPfu1jTNev1eFMWuDozEIIaIvXfOK0WC+MaVKw89dOHSrem1a7cuX760urZGyhR5aSx5H4L3\ngmKEueAoMs57pXRiaTqeaK2NiZI0BsQiLw5Gw43bG94HpZQxxjlfl3md54jY6XTSLE2zpNPpFtN6\nNC1aWdJudytfVlVNwSILArY7KdHK3q7e3x/c3d0pq7rdbs/1FkhFhZTi0XtYXDv/4LtGL33xk7kf\nGRUBOqMUoj0r9RTAIfh7AmjMtndYCd48A/D3CO0/9CQTciNdCTgjBBFAAVpQMehM2URFKURmLmv9\nV3/rb87N9dqt1HnHwFevXK6cO3HqlNKaBQSQlD6y4wOHnZ2dq1euPvveZ59//vlut/vIw4/u3d3r\ntrtKoTQOBAAB8cGzcKvVspEpi0ITWW3G4/GtW7dGo9Hi0uLi4lIznLQxcRQba61WWWzHk+lg/yBJ\n01Yn857jmDbu7O3ubAXm/lx3aWmlrEKWtliCc66R3P5gf3PzbhynNoqJdLfbL3weR6Ybm3J/+1jH\nvPTFT734/CeW+1ph6YspojkNfgpSA4R7AvjmoiIQ4B1D0o9ClwBAGnUEoeFcD6+qAKNGAMYmoI1D\n9YM/9qM/8Wf+VLfdmkzHSBBZ69jvHQxu3dlqdTorx9ZCmF2UiJRSdV0D4ng02t/fL8uy2++vrqzE\nOgIWEfbsGwGUVWkjS0oB8mAwcM6hyGh/sLKyHMexNbbX78WxrSqnSGmjFSnn3Xg8DsEfO7Y4mVR3\n7+4JkjVR5Vy73Wm11bRwe3u7rq77nXlXuSg2nV4HCauqEpHNrbtV5avai2CadrJuLOwjNOTqrgUp\nd770uX813r9scaK4QNTHxecIDsFLI4D7FPrv2Rpa4/ca/03MhgjKjPQOAgJAAArIAkWgM2MzVpq1\n+Qv/2V9+9OGL7Sw5eeK453pa5ICUZK2DyeTytev9/nyv30ciEWBmJPTOa62Louh0OnmeR3EcWevy\nQqPyHAiRFAphHEVCcuPmjYPhqKrLOIkePnexEydFkcdxHEXxeDxWShYWFra2d5m53WoprRkECCaT\nSZK2jbJFxUqbsij2dgcmMcsrSyA8GY81487WbaWItK5cOb+42Ov16tpNi2pnd+CcjIbjrGNXlper\nqY9NUo+Hc23Y337lK1/4ZTe5tdw2M48YwiGHfJihA/wNO/eokw8ptLd3Ph5qqIYI0LMrv4PV3OBJ\nWTt5/PTpU0uLi5defQXEnTixZhWhtmVZpmnr8cef+PpLL+elW1pc1FofRgORCCZxqkinScbMeV5Q\nYK1RK7KxDRyqur5+8/buYNBqJ+snev35dVKY74/GddXtdn0IuzvbIqK13tvdn+/3mHk4HLq6DihT\nVyHQq29cffWVS5Nptbp6fOXYyu7O/s3bG5PJ5OSJ9cceeeTkwuKFs2eVplu3Nyfj4Wg4uLVx08TJ\nyurq0tKSD5xl6d7Orclwn7BVsQSVbQ72Wtnc4trprWs7AQKiOSa+xJkKahisBtvwN6OFvtFMObLI\nBEiwoTQYOQAACAkqIAtoQWc2ade1e/R97/2p//1Pbd25s7S8OBzs5fnkxMkTvf78aDyJ0ySOUxPH\nX/zKVyKbHD9xIjRRBEAiUnvfarVqVwMIEXDtCSVN08FwOJ2Oy3zKgsdWl9fXVpXGuq6dq+o8RKRF\nGJUiwqqqkNTm5p2rV68abU6fPNntdp14j4GhsZlpOBxvbGy12t1uby5rt27dunN3Z2thYb6ftc6c\nOpkkEbM3hoQ9s7+7szuZFlXlPIelhaWqmgz2DhYWTqbZXFm46Xin3aqr4ZU3XvgkD29pEd+EiRx5\nzw/RvXrHnn2HMfxmjHoYC3a/H7NxRTCCKKGZP55BAJGARJGRxZVuzbkDtz3Yy9JseWFxY3PTxtnp\nkydcManyaSiGzz39yNdfef3WxpXzFx86GI5Ja6MjTzIpptaaEBwDtvrtunb7k8n27rZBeviRh63S\nSRQP9g98Va2vrkwqarejzd2DW5ubqOlgOFTWtDvt9bWTj86v7Gzf1WlvXLhWJ7t1+6pz5buffKqV\nqoXemSvXT92+vWmiiDQsr8zt7m1/6jO/vXD65CdffvHsyVMPXzh36thyPRlV0/LUyROT0UGaJVcv\nX97bvNrqLiwtrb726qunz56fm1/aG7Ibw+L8+WT+zu5ghEBzIB7EA3g4zA5o3Ch/ODlfCEBNJqPg\njJhEACtowWSok+BJd7Of/D/81ceefJJIl85NprmvfRIn9XSydmzpxNqyFp5Oxslct6jqS29cneb1\nY48/6T3XLgRhbXRVVyx+rt/bvL11+/amtfbxxx4u8tL7MD/XUmQQRILs7t7N8ypt93cOxoyctTt5\nVWzcuQ0AnXZHXD0ejo4fW9VEVV2Mp6MzZ46naQpBFhd602k5mZQvvfpaUXmTtG7fujOYTKcaRFE3\nzQ7ubj968cEPPPu01IUlBezrYrK0tBBcmNZS5v7mrY3XX79y+sz59tzC9vadbgvrwc27lz6LgD2A\npvcZ5Z7bhd8xsvz3JwFqGO5mYQNCjWRdQMQE4w6LeuCRR37q//x/7C3O7+7tjaZT73hj49apUyce\nfejBOzdvnjqxajTM9XrT8UEU2ShK72zevXLl5oWHHkKgIKINoaKDg/3tra1+q3d8va911wUuyrzb\n6+4ODib5JEC4cfNW1m4NRwME3e3PodKtdqvybnd/gOKffvz8pctbG9c3xPv3PfdcFNGXvvxiUUwe\ne/RRZM6yVpaqz37uBUY4febC+QdXb90a3Nra3zrY/c3f+lxdl2vHlvudziMPPXj65Fo5zZfn2+04\nvn1razIenzh+fG8w7vfmdnYPSidO4Nbtjcj45RbfefV3EKgL4hrL6L5ya/KHJ4DDhKV7AlAMWtCi\nyqL2vBf80Ec/+sd/4scHo1HW6QxHo+Fokmbqzp27T77rifl+7/Ibr6+vrpxYPwblJDgX2dh58KG+\nuXFrdfUUAByMhoODvTiyc/PzS725YlraCOfm5waTYnc4mFblpMpFqVavszsYfPH55+9u77SSzmPv\neqw31w/M48k4TZLzZ0+yx6osU2P73f5g/6AJGtq8czuOo/3d3VdeffX7PvSRxx5aGhQwHAcywnXR\n6ra3doa//MufGozHly6/XpTFo488/BN/5scvvfKaETm1dryVppPJMPjQbndqL8NJGVDf3bk73N88\nvhjtXvsygu6AOBQ+yrcFgEP3Cr89J+TfON7fQprKLI2CEaABpITEYEzSCcqSyapp/bEf/zMf+dEf\nuXN3p3K1Uvr93/Xs+lrrX/7S58aj4fHj66dOnTjY39cKz6wuJlrnRWUU1S7k+eja1ds2TlrtrDfX\nbbU6SFiNp8eW+nc29/OqqIILiu/uD+NWQib6zc/89urx9QcunNrdGRli5+DFr730nmeetta++MJX\n11bWzpw+u7a6/PWvvrS7s7O+vr64sFhMJu1WurW5RSTtVifL2ldvXLu9uZu2e0tLx9Iko1AvLcZp\nJ9var/7Fv/74lRvX7ty59df+yl/RQn5aKBbxfnmx38pi7zkw6ah1MJ6OptP9vc1+yjK6gWTaLA45\n3E+BogDQ72f4vyUM+1AATcoBz5hMVIExynperMl6Osr+0k/+1YeeeHLvYLiysnzr9u12L6urkOcT\nbeixxx7bG+xlWTodj8vp6MELD/Q63el4ao1WBHd3RlqppBXXPmhjfAia8Otf/UpZufUTJ1vd7sLy\nws//wi/u7Ow+8fRTeweDRx595JEHFscBLr9+66svvHjxwYcO9vc16SiOP//Zzz/z9DNbm1tGqbX1\n9X6na7XSiFVVjgeDufm5wH5leeUTv/HpyvtTZ88ZE01GozPLq4P9gW2riZdj6wt3B3s/+wsft4qe\nfuKp9cUlLVhMpsV0//bG1QfOnlfaIkWLaycnRXnl8qutyLdpqCNNAooZAN/k3cL7ks1+j+7+N77I\n2JheSNhwrYiohZu/pDTGadKd64kISLBGzfXaX/v614qiOHFi/eK5h69duZy1s05rmZlrCc+/9NpD\nFy4SUFuzr3x/aY6ZR5PcxPGVWzeNtcODg876+qK1VVW/du3qSlG+730fuHTp0vUr1xjoNz7+Gy8u\nzHd6nSRtISv0cPGBixs3b1mKzpx4oBW33rh09cSpUysrJ8o83x/sW0XWaBVFX33hhVbWytpda83J\nM2d2B0MWOL66Nh4dnD29fGd/XE5333hjcuzE8b/wE3/6p3/65774hS8cnDrzzJNPtZczV0Zpoqvc\nDe7u5YXfHkzSVjtNs92711pLkSY6co80AjjK9Z2hyN9DBu+cUYFvpvAQBAlAFDYR60hKK9FoVNbp\n7u+NkWy70wZkkJBPhmdOHM8ilefTubn+q19/cXVtbWluLla4tNAHrbSxNzc2Th5f39uf5pNJr9PK\nspZwuHHjWlEW+X7Zm+utrC15LzIaDYb7+4P99WPrq8vLWRS99uqlDz77HaPx1HnXbbUvvO8DN25s\nvP7Sq7c37ly48OB8b26ut/idH/huJByOxnu7dwc7u3/0j77vK1++vLy0NMm/5gLf3Nj44le+/FBV\nP/auJ69t3Hzhpa+fXlzoTTqtdsrYH06n4939heXeD3//R3Y2t65duXrnzu0T62tAan5hqa7qB86f\nv7tz8NWXXp9cv7G01N/fHXRMgt1OF9ADHvZ4UzQBkPkwQOi+TI9/4wyY6Zs3vURChCAEogSAIAgK\nalaxp0gofuq97//xP/ef1JV78OGLc7326GC0u725trqyv7+XF2WatW7f3ry7u92fX+gsrczNzWnk\n8f7gwtkTVuPwYDKZTjyzF2x1O4y4N9i9u7udJNmJE6cGe/uj4chXfmlpqSprX/uFheWT60uey43t\nsatKDnB3Z4cZWq1OlKSd3ty4yL/60tefefbZ3Z2da5cuPfboxUcvrL74ysZ4eDAej0+dOTUeTzZu\n3Vk/eerW5vaXnv/ihTOnUqOeefqZS2+8obS+cP78cDhGAmPslctXFxcXQwjeV/lk0m13ji2vxEl3\nZzD63Bd+t9NJH3/43Gjrda01AWqYeQ9hFiEAqJjerIG+KQG8w4uzDCskEGoICiCbZDXYuvI2jk6f\ne0ApWl1bbsVRMRqGcrqy0G/HavHk+sFwfzIpL54+cfbEmnO+qMvp5o2LZ07P99LN117upsncXL8d\noQt4e3t7MtppdbtPnDhWH+vXPhgtvbls9ZFziFRMp5O8roLs7e3XZWYjWptPJ2M+c2r5yrXMsbt6\nbaMsx7e3bp48e5aU//TvfOrpp54+e/7cjY2t6bQAEFCqv9C/cXNjcWllZX017qS4C2mv/cRzT/z8\nz/zs1FUf+uB3+7I+2N7pdTrLK73PP/9CPh3W3XR3f/+NK1fTOPuuD3xgd38oPE7ava2tLaTF0fCA\niLQ2dNi7cuSYlCYR8WgKfEMV9E4CeJNXWQBQCFGYkGgWnUWudmBNu9XWaba6tnrs2Mpcr/3ySy8s\nzc+tH1sspmOLSWaxd2zZeRyOJ7/6iU9efuNyJ4vbSfSpn/vp4ytL1WQsrg6+urlxE0GJVvujkU2S\nc2dOLSz0V9eOnzx9ptXtXr17Z2l1zcTx+dX5g8Bzvbis6v29g+kkP3PmzHAwNDCdm2sfX3/3pRu3\ntnb3BsOtAFXNk1/4pX82ly1eOH/+i1/5aitNz5w9OdebP3fxAR/Cxu195+u5uV7abp2+uP7kc+95\n5YWXyqrsx1lvybz6ymvt6Px73/3Ya1dvHOTDiw9fIBNtb+1qG3suijzvzi08+cQTL730wrUI5hPX\npDJgkMOg5cNJQHTYfbOsxXdwjR352e+bH/hmFdQgTyDU2LiIERiQVGSS5M7d/Yunzjz99LsPBvtf\nff53Yq1HO3dMODMdDn73yhvXrlzxzufT6c2NW1qbYpLrUB1fXqwm450Xx4bEV1U+nSwuL5s48ZUk\n5bSe7L22eY1QpmVdh9Dq902atebnReuF9fWs14vS7D/+8R/vtaJObMrxsK6dUaRQh7pMLT3+8NmD\nvFpZW94dDbY39/MRfOHzn2+1sijSeVnc2doqqkl/fvHRx1a/8KUr//KXP6EjtTsSitNOv3/52pXH\nzz/01a88n5qo20ltEq3M90yE7Sx5z7PP3Li+efnKVWLZ3dnfH04ffPjhg+G+yJS0wZWVpcbtCXAE\nRPEwbE2OsBAzvz0u6PeYFkexEVphCF4EjI1L5wUVKENRsn8wvfjIIz/51//Lsvbzvd58pD/7W7+5\neef2lddethSK8WC4u7043ysnI67L+V5/bWW5n2Vto5S4XhZZRcraW9t7KkpAWWYZjg98VcZpCqgP\nRuPhdKrixIHaGQ4nlfOIKklbvf7xEycXltafePrZc+cvRDa2NorTbDgpFpf6Nlbbu+WtO1smMnWg\n0idXr19Ls3hnd8sDe19n7a6QmUxHjz720HAUfvO3Px9UePjhCzubd+rx6MMf/M7f/LWPhyL/sR/5\no+1WqlCPJ1MgunJnW5TVRF/5yosXLj48mroHH330Vz/+r7ZuXnr8/AoeO7b8lh5sENFhRD/82wrg\n6LTmgBCZvTACaUYEFZGJtvcPnnvfd/03f+Nvbm7d3dkfZFZ/9hO/du2113Z3tjdvXTt+bHGx32lH\nKo3IQDDIVT7J4mRtaakdGaimy/MdReRYrt/ZiTt9itLpeDrYu8t1FbXbJs4mebFxZ2t3ONo7GI+r\nKqCqWArnKw7MOCk8Jd1HHnn04oWLz73v/Y8++hgqPZnmjz/xsDHxrTv71mJR095kgtpcvnZ9/eTx\nS5cv/9Kv/Eq712t15s6cu/iFL734E3/2Ty8fm//pn/8V5vriA6dDnZeTg4Od2+h8v9X62A9+JI0S\nX5WK8Mr2+DNf/ML66vq//OVf+cm/+p/vj8r+3MJXXvjSKy98br2n9P07Wb0TqsGjEf1NCuDogkez\nh0gLYghsbKSMORhPvv97vucjH/2jl197bXnlmJ6fh8DvfeY9P/qRjxRlsXd3c3fz9s3rV+7eurG1\nszvc22klURKbvEYdFWXGMWFfJdao2vuaLEIsQY3qsDetfVlhISZlbaP+8vr5x48dP3m6t7SsswyV\nZgAPqE2kTVK6cPvO3d2d/TovX3np60qb7bt3y7I4d+5UK+0qJcbqJE294Csv7d2+Xj771JObW5sf\n/+RvbNz+/A/9sfTCuRM/+3M/88f/9J9+7rmnyiK/vXHzofNnpgfJ9TdezYyJ43h/MIQ2dLNE6XDu\n3PrvfBGD84898sgnP/nr7c78k888myXx8GBfTSpcX1+FN2NNaZLQgY8EcBjc/E3J4Oi05sB7TwRK\nGRbR2uZFtbp26u/+vb/fn188GE3Lsrx7d9eQ0RKK6VQ4PHDmZCuLLAlJqPKRIdjavPPrn/z1vZ3t\nODZJpCKp11cWkzgSUXe2D8TEdQBr7Mri/ANnTi2uHvcmYh98VXkE1JqR6sC190EAtUIkpZJuf6nT\n6QBR8F4AbGw7nfZoOp5O8sX53mCcp1lSh0qbqPLywsuXV46fjFu9n/5ffuH6nVtxt18HX9bF4srq\now+/K59Oz5w4DuzOnV74+lde/Xv/7f/72PzC+vKxH/noR5fne9dubC6cWP/6pSv5eBIn2cd//ZPj\nqfuJv/AXNm/f+rn/6R+0aIrr66tH/X6vASNBE8F79NpRwYrfu8msVEaTMowhOKUJUGmtQWg0mn7/\nhz/yX//f/puf/pmf++mf/rnbd+4MBwc2iuuq0EqLMAgjhOWFufPnzr7rXY+cPLH+9JPvfurdT6pI\n5+XYGIDJgY40hICMZdBJe67Kqxs3b770wlc3Nm5eu75x7dadW7dv39m6M5rk7sjL8dYWxXESp8n8\n/Nwjjzx8/sELjz/+rueeey6KDSlljE5iw1KXlfdghxN+5cqmStuOcPH43N2D6T/6pz+btZPJZHLn\n5vYT73rivc8+Pt9fOHuym0+CBfXFz32Rgjs2v7Q8v1CU9X5ZRu30Nz7xCW3T937H+3/5X//aM+99\njn39q//i5zZe/xKurq7gLC8e72UQNP5ekCMV1NRiIbx/h68jRHT4VACwyRM+csqjgKCGunJZknkf\nnAt//s/9pQ9974c/9rGPDUf7sepGcVyWFWlb+0orRQBRZMqyKN2wuW47a33X+z/wF//Tv7iwPJdE\nhHWRGRTvAAzr7KXXLv9PP/1zX/jdL2zt7sAhICNQRMQiCKCNan5FYN/8PE2xoihJ4jiJhqODIs9R\nSb/f+//+o3/4zNNPR5EFFGvRu7x2FMVzgezN7dGnPvv1tTPnOIoG+e7iiXUn9XQ6ufr61U98/BMv\nv/LSe599+vs//EeOr505d2JZi/utT33RslhtojheWFvPeq00iX71Vz/14MOPDCf557/4/CMPXbzy\n8lf++T/+76jJ3ACAJo1ttunjYemi2TMBRGSWwOy9d855771vjv3smefmXe9DCMwiIXAI3nnnA2hj\n87LQWgcfUBiZ59odQqLgpa4Sbb3zIMgBQoDaMSmbxP047nXaC+Np9cu/+q//n3/7703KmmzsACel\nCxTlQUZl/X/6r/7vv/grv7y1u5vFrcymsW0rnZFJGLUAMVBV+7JytZfQxIYJMrOr3WQyHh4MjNKL\nCwuL8/PDwf5Xv/yl4Mrp5EATxsZEOsqUJY9ccoK2l3Q//eu/5Sal1Ob21dvTg9xN3fkzZ/7aT/75\nn/izf+7KlRv/8H/4mdt37tzZzesAzzzzBEdZjXTl5sbf+tt/d3AwmRR+7fjq9Zs3Ot1ukqaf+cxn\n7u7sjie5fgf9I9J09ze0fhuPODXiOaouNYvxAwB1WLFMAJuEjNmqwgEFIhPBYXa8JvSuBGR/nyOO\nHROhDw5AkNDaxAf84vPP59Mi9LIyL9qR3t3d7fYXUOjWnU1jexxCUXuFQAoZQEK4L1GwwQKMiEpp\nQBRBIGGWqvI+OOdLpZAUbm9tVmVpDOaTYaz7vgyhpijFcVH95H/6X/7Ol1648K5nnv/yV7/jez74\nxHsev71xd2/v7p3bt06dPvGuhx89d/bcr//Gb/zNv/V33/ve9370B37wxPrKw08+MdjZmzjGKP76\ny69dfOji+QunX39j45/+/M+vnzh56vSZ8fZtEdJHJYfksKbNoQDe6o98yxY20AR5AkBDNs+sMUIE\nZiRBmbkjSZi9sCZd1zUQdntdo7UyGgGUImGopQYwhiwS1L6UIEoIQJidD8gSdGSn4wNxJQW/NNev\np8Pl+blp5TyLhCBNCQwnqDBw3cSDyawwiMxuTGaRr0eBq7P6TiEE9tZoV4etze2yLCVoFHSVU6Lj\ndnuau9u3Nl95+YW11YXnf/1v/+/+63/25c9//ul3v+tD3/E4IOwNi2s3bufT4uKjD60cP33m4u9+\n9rOf+wf/+J9cuHD+2MLi/FzvoXe/J2f6lY//63FefMd3PDeeVl958UXH8J3f8VxG1O/P6/szx446\n/R0X28M0vEYCwozwNtB5eIKEw6Jh0mRHsghJcJ5Im0ibSNtIBxAnHhUpQRRSRgMwBhLxTb0CYHZV\noQ1FRmdxp5NEYTr51Od+e/vWzUcffvj0+QfTbG5lcf7yzduKlIfATQ4eHjrgAAA4MrFR2vnaB26y\n7QUP03gafzXMwsUm44nRVhE5xz6gUWY6zaOs3Z9rfeA7n/7kJ379x/7jvwxm7tO/9svldPxnfuJP\nrqytz60st9N099bBFz7/1aTT+67v+c6nv+N9/93f//+89MbVpDu/0ulIBN35xVOnz/3mb392NJkc\nW1v7ng99+O7d3TvbO/Npunb81L0ZcNS53yTef/uZR5DpSBKzmjgNmmJmEYVYu1pbY6MIABx7FCRl\nFShuMizxXp4qADd1O/J88siDFxKC4mDwK7/wC1zlL3/1yz/6x//Uc9/9A+995qk3rt/QSgfkWe4d\n3IsuAwCrVRRZqATAB2AQ8TgrxgBESIhNNitACKBU1ATTV5WjRAMJi+v0or/39//G3v7o669cvXL9\n2pkza0srxy+//MaNK7e+68Pf++CF1d7c0mee/8pLl75yeWPrkXc//Gf/0p/6Z//8N3/3xa/tDocv\nvX71+7/z2Qcv/skvfP6FT33q19dPnzl3/mJRvTTNq5SodjwTwBHaOWp8r+TDbCoT0Sx35t6cAObQ\n4P1DXmjW9c263RyjoNY6ONfAWGbJWq1Or2d1pJCCZwAgRO8dNxkpKCweARtnBIIEJw8/dN4SXL12\ndbHT2rkzaOnOr/3yLz348OPf88H3/+Of/TmjiJUGXyNCkDcFFbu6DIfDH0SIUKEOh+5vnq0YAEiM\nUNWOORit6tpFaWKSSMRrzRKmc/34/c89+tHve7YGuLNZf+n5Fz73pa/+/M/808X1E4vH1zvdXoDb\nn3/+iy++fml59dipM2eeeOLx2xs3L1+78WmVvOfxx55+z+NJGl+/cfOxJx4fjvNyWipfbdzZ1Pfp\njbeoEYbDumyNJp3lVCHArFgUzcQzC6kDgMPkJgRgacobigAjaI/kUQuI5+d/5ws/8sM/2un2sqxd\n5mVsTO3ZQS6zeiFCINbEzldaE6BvQvIfOHfWaLh95XJ5d+d4K2trvH7ryitf+vT5J589ub6ytTPV\npieEdT1S6Bsk3VQidkEwBEABEmZgFkDXlIADIEGB2doWUCnSGHyofB1K2dkoJqMxobQSy8GFuiak\nu1HiqtDr9Fb15NkHFj/72s2dzVsmba899NCT73ku6i/fuX0LyjJ1vJZ2H33m/ftnD37jk5+MVfTQ\nQ2cvPHRx7yDfvDs4eeqBz33m09VcK88n+gj2fAM1I9/guBHD206/50mD2YKNiELCTIwYOAR/9fIV\nZl5aXgal4ijVDBwqhyKA1sYSoKor7zmOkrKaxIkykakrt7q+PBkON65cWZ3rP3b2JPuiFeOv/Yuf\n/+vvfvePfewH/ubf+R9b7YWyZq26PgwFQGCWAWEUkcIQHDf53Ag0C5QRBgZSwqCN8sFn7QwQrDWa\nDFLEQFrXrdhwXU73BpZEIX/2k7/auJGtVnmgBLp394b7aXv59APHV5bXVpdfe+3yK19/8dLLL0Fe\nyYkzK6tLH/nBH3jx6y9+4hO/0et1llaOTfP6zLkL21ube7evMrtvWDm3YdK+0buNp1HuLzF7OKNn\n2kxQZtWzAA/NOhYUga27uy+9/Op7n3vfP//nv2i0nYxGSml2gSE4Eq0NaAm+rpw3USriRgfThx46\nudTtv/K1rykNH/noD7RB8tFgbmHevXHli7/9mSceeqTbbtehbqWdygdXNUkPIIggAUkDIos7gmyo\nSNAEH4AIQAEEdh6Fz546Q4CT0SRLY2tNy6ionaSRIi993e+301/9pX/5D//O/+vE8ePj4chV1eLq\nySI5dsDJlUuX797de//3/9DCiZXHH37Aov/KV776wuXX9kO4mOj1hd6DF89NxkMgzFq9gxs3P//5\nLywsLg+3NrLegm5qLL4T6Hyn6XD/OkGNF/nwldnPu28dPnqHgyhNREiiFQ5Gwy88/6Xv/fCHtY3T\nKNWkh8NhZBMWRm19I7MmwpYCAonAo+curvYWv/jyv3jysUcevnB+7/rVwvvVucWnHkx+64tf+fCf\neuKj3/uhf/Xrn9URFJ61tcF5YQEJQOSYQUSQgABmNWOajS0BtdbaIoIEX+V1u9XJJ1Mt2LJxmecK\nVC+2rhi3I91a7A137lz72pe/733PrC4vF9PpZDyalJysrBS6+7Xrm9e/9Pl2HD/0zHuOnTr+xMMX\nAfkTn/38S7eu7mN47uGHYq63drf6vbkwGq6vr77wwtfA1zs7e4M7d4929v6m2ptk0OR9vbkxv0VN\nAYiwcAgszECE2tYuvPLqa3HWeuqZ9wyGo6qq6lBDCIAYWZu2WrbV0sYoowTBOZcgPPvYu2++fgmC\nW1tenA4PiKVlIu3l4VMPtJC+9Onf/vEf+xENbnfvTppGSZqYyGittdaKSAEoJGMiEyUmToyNUGkh\nYsRQltVkvxzvV/mo11146Ny5xV7PIrlpoVy12DILMenpYPvKK7/7yX/1C//4v5+L4LlHL8xp7ip3\nrGUeWOmud+3xvvmhDz7z3EOnr33181/79Cf2rr3hhnuPPXzh6WefnLj81etXPvP8F/MgTObl1y55\nZlT64YcePLF+PIvjUBYzO+Dta8AM0b1pThyaOI0vrMkxfNs5MjtBZtAIBRB88MCkGstBqxdfenmS\n5//FT/0Xv3jq1O6drVdfeOnWzY39ereqdwEyIAIeH13zz/yxH3n/00//03/0359YWWBfKoQ4MhJH\nhjTU9fnV9Y9/4fMf+OD3/Z3/x//1//K3/ttXLl2yNqnrKYB/hyl8b/qa+cWFhfmFfre/MDe3MNd/\nz1NPPX7xop9MurFe6s/fvX39c7/2y3dvbVx+/eX9rVtL893zJ0/GifYH23OxScSEUpySvXpvYWn+\n3GNnT5872/nKK9e3t177wqerfHjyXe96+uELQdHnXnhp4/bWZ6vw3Hue2tobvPT66+979jlfu4PJ\n6GDrDpQFJkl01Ln3Y6G3oNL73z1q8rYcpvsLBx2GpwfBgAGRkUAzEGg9KYo/9if+xF/7K//Z6WPr\nB1s7e3c2p6PRcDqtCaNupwbZ3d+zRvcTw6PBY6dP3nzt1V/4mf/5g+97/JnHH17pdsdb22GSh8od\njPOpqF/57OfLpP1n/7f/eW3iS9dvTaZlUZTjyaiuKpGARFEURZHRWqVpgqSN0Vm7Fduk3Wr3e71j\nyyudLC0n4/3tzclgb/PWjdde+vrVV1/cvX1FS1hfXbpw+tT68sJ8JzPMKJxPp5PJJI2isZeDTvf4\nI0/F7ZVsbn1ja7i1P37hlddv7g7OPvnEQ899F/bnX7h84wtfemVre68VmR/5oR+4cfWKFmxn6cHO\n5sd/9n/efPl5TJJI7qOdj7r4fpPqLQKgw5LhbxcAwFutM8EAyMhEolAUA4lWAQRALOGFEyc/8j3f\ne2yuv9TtrKytmVY2ZS5CKIqcXV2OhpuXL13++guvffmFhU70U3/tLz549jQKu/EYqupgd+/Oxp1a\n9NWdnd964WvU76+eO3/m4qPduaV2q51lKRE2WUrGGEIIga21SFiWVVmXxaS4c+f2lStXbt/a2N3a\n2t3Zb2lpWXLlNFTFQi994uLpxfnescX5hV6n385STVzXaRIH7/NpWZXlVPgq4Jkn3tuZW0s7yzv7\nRVnJ/nD8v/ziL+0X/sQj73r8/R889sCjr+7kL1y69trXX1xdnPvQH/nujZs3bt/a6KTxL/+Tf3Tj\n0x/XRyTEmwUAAOrNbPNhPqQcdfTs79smypufEkBTRFSERAQkeKes1prG+/uj/SjR4Xd/+9e+9vzn\ntDWTyg1zV3moAQAgBliIqWUUOvnej373+ZOnfVkDSNbtapS03ToYDmVanDq2+K7R8S+/cem3Xv/a\nx//lL9R+VqilGSjee1f7qoZylhsFCiCKgIgUGRAkDFmku4nhSY6xObe2fHL9oblOMt+2/U5rvtdt\nxRGKc6VDgXzqvPMgFCdxYFGurqoislSW4147LVTlJnVb8tFwf+OL48nGzXd/54fOvvePzD/52CPH\n5r74+c9+9bO//fhTT+fjyXAyXH/ksWk5uSeAozEuM+cXv6Vz70dKzRR5axDcfRPl3lMQUUizWlGN\nWQYS6qIouqnRKJkKp1f78sDKcDSa5HJyoQ1kvAcENCJWwfhg7/u//7s/9tEfaJmoYglKSmENnC7N\nn37wwtVXXuVpfmKpN84XFBYTj2WNCECKFCkAqF0dfABAUiqyNoojYSmLMoRQV8EHr1EZ5VIFp84d\nP7W+ahW3ErO63Ot1k8V+12pd5hN2LjIWAPK8yPNS6ThJElAmFc1FLZWLktjGqs7rO9dfg3yvxZOt\n27evvvj8+M7NxwaDB55877tXF5e/48nrtzfryUBbrZLWhWfef/zBR/VhV76V1Xn70P79NZkx2DSr\ny4vCHGKlrTWurGKCboLBSjeVhXYveJxOyrIIJk69c2mcIMmxdvz9H/qedppU4yLN0kLxuJ5ajWk7\n6104d3wyvnn16nwWXVhbdPXBYOwm5MqyAkZDRmmFWotoY2wcx828UKT0XNsYq3VTwAaiyBgSJX61\nH/daiSHst6N+P/N1DoGSSAfCsiiYBUBFSapsorQJjn3usRQr1Gl1huPpjcuvXn39a+2Ib+/e2Lm6\n2Upb+cbrv/2z/3D/+mvvee65Rx993Pp8Y3LQ6y1sloMbdwd57fV9K+ps5DYQeZaN+2apvFlUiIgg\nb89kaiyCw5MEWAKLkBy6BkC8KzGAC7C22E0II+IzJ5brIi+mjipQNacGTdwCxL2D/WeeearfTatq\ngjWXqMZYleApjusih9EEkAm8K8edxCy1W4nxB9Nyb7cMHFJr49gQUmBJkjhJ0gYNR8bEkY1sbGMb\nxVErS9LIJpHKx4O9na3V/snzZ0/5UE3yKQgTkxdGBK1V7VhrHYJ4DsFTCKBq6FCEZQh5cbCzdfP6\n5cjwfnlQDPdUYONryAcL3ey1T/+ry7/7qQ98/w+99yMf254MJt4vrK5dHvPlzVtaKSAiEBBgPqzd\ngNAU7HyTQsFZcPM9NSUihLOqVUc7b2GjdnhGayKxYsRDhk4Qm7JHGiGL8Or1zZdeffX0YhenUaiL\nlolXTi+L4HQyBcb9wWh9ef6hB874YsoltVTqq2lVDL04cWk5HtajkUitEj0ti9qHyERT52KLy4s9\n7wMEAeZOO4vjhBCiyBKRIozjxJpIRNI0brUzRVBVU8X23U88OjpYuXr59Z2deG6up4CV0oTUlHMj\n0sZwXTsO4nwZRbFBi1z5cqL91NZR7KfGTU8fWzq4cysfl9bgZFr3+s7Ww1NJQhRe/vVfqSp38jt/\nYBL8ar/zR7/ryfWvtbVW6lD1IyMKStN5gHQUGn14wtHCeyiDo/F+L7XpMJ5UEGYV2RCaXRNEBJiJ\nGEBZ7X0gDdv740lRJNFqYLvcO5akaa/XE5G9vb2qqgjD8vJKFFGV55Ex41D5ui4mY0JP3DZpWhWT\naZ7nVami2BiRoqzquvalUdroSLzENl6Ym4+jSLwzkU6SOI4tCGkdx3FUuxLZpXFybPFYlkbjg91O\nOzt58uTd7c12O8mStCgqBjakvA9VVRORdy6ykRImritXjYbDg9cmccQaCpkcnF7oxXE6PnHyS8mL\nZVlHkanKIoH6WNY6vrR2/MEnPrux97uf/s2TH/j+RFMrgo+975xWWsv9jjBkAELEhrfB+zYfeAfC\nDpEPq5YdDf9Gn91XkAYAgHCWqUEKPBJh0Io1QhJjpFC8s4raWWrjBEWM1nP9/vDgIFpZydqdMi92\n6u0oirI4cXVdl7nVVFaVCOdFwSFobbIsNVGSJFnhy9HmOGDotdIojdpJu9ftGqUVibUmjmySRIEh\njrN+vzcaDUYHAxROo+jY8vL21u3xcNjrdPPpeGPj9trqKiJppVkYAK21IQSllLXWGFPXVZFPXZmP\n9vcHO0udLIGqOn/2TCtrt9L0K1/60q1bW9omg/2DUIxUO0ZfPPX4o25l+Df+9W9tpUvnnurs7BRJ\nt6u1UkLU+OWPoBvAYWrdfZDm7QJARD7cVQJmmQBICHJY16oxy5rXsdmhUYlRwuytllRhm8RgQK5b\nSdZKU0BydeXqmgNrrRVpAsynOYsopUKrFdgLOwAznozHIiHUi8vL3fmFqr6cl5W1+vjx9UmZb29t\nTXES95Mmfd4qo0gUAnrmqrY2UQjFeNzJWqtLC86Vk/Ho8qU31tZWFMruzs7K0tL169cHg0G32zex\nOfR2wHSaizRbVCIAcPCxoWyh18uixEAWJ0k7dq7qt7NuGk/aCYCuI4JyBFXSiVU+3gu1W19beenq\nZe6ujSd08PpVre6poDe5ZZraAo1r/m1M3awxEQBxs6/LTC+FwIyAhEgImghJqKnhCUREoFEZIFKp\ngYVItzl0Y63FtZMoUtpxIBBm4MANdqzrmlwQAAYZjw6AObYmjq1WaBTayCpt5hcXzxMNh6PXXr80\n15974IEHpqPxeDTJkjaH4JzXqCwpjWCQLGmrlTE6jeNWK0vTOHhTTieDwfg2+6XF/vLS8p07G/Pz\n82VZYlN6EDEEBoA4to2CDcFzYARZme92en2LXE0GvX6fp0ME7YrxXDuJ9Oo0rxS7fGdQpvutCKvp\nUInutqIumcggtKIoSWb7iDVieJMAGp9i4/Rmbio0vHUGNJWlhOGeAASZtaIo0pG1VittlVGIwAqV\nJoUKTKQjQ91UzydRXBVzncQSSuAin/jAylgAKItiNJlkrTYRTF1eFrWI5MWIxcU20lpZpebnum3V\nmeRFF2BxdXVh/bhzbm86XlpYXF9b/9reK8PRqNvqWG2s0qAIBRWgIRVrkyRJZExVFPlkzKEmxE67\nPRzud7rZ8vLSaHyQF9M4jp13RVFoPduwoqpqEVFKNT3Wbjljkzi2IA5cGcoctbZxduvqpU5qT6yv\nEdmXX/76y9v7i/OLWRpnseXJFOp8ob1047WvDnerVm9hJgCaxe7fi6tt6k2LMCIwkwiH8KY1mRCh\nSb0jEvbCQZEQiVaUJraVJGkSp3FsYqWINZJGVJoUoY4oNqqVmIUkznynFWsNrBgZJAQXmAXIOx9C\n0FoDUZ6X3gUX3N7BQCSQUggQR7YMftXYrCiivX1rI0Qw1vqDWhNlWaqIxpPJ/sGBNZYQkW0rThgx\neOe9y6fjEtF7JxxIQZxEvV6Xpd7e2kLEs2cfuHb92ng0JFLOuRCCtVZrg0jGqKaviKjf73vvrQIk\noOBdMfYAN25c/+qXv8iglheWTKRjax995MQTTz2zsnQ8z6dVMU20XH3lK19+/hWoLRir3xLK2QiA\niIIwCR+GPrAIEXETxXXEORMIohBCgIAYYq0SY7Ik6rSydhZnsU3T2EYaKcTaGCIkNEbpSCWRiTQt\nZmlbMEaE3CEapZSAFMU0CNa1398fRGmmjalqp60dT8bTqtJGKxallAPcPhipOPWgdgfjNIljo7TW\nEmQ8GU1G4263MxpNt3a2jbWklFGoldIUlVUtRC1LSiknLi/yKLJFOWHuWBsVZVGH+mA06nV7VZkr\navSzhBCIVJomzFLXTegZR1FktVJG+xCaCJppUW3cvNHK0tW1E/MLS+NxsbpybGHhkYsXH54eFJvb\n28fWzzwwz+npB7Zvb9167TpMPJ44vvKO+v1oN7KjuJWmYmEIfla2EAWZNYLRRAiRUa3YdFtpr530\nWmk7SyKjk9hoQ6QhMcpqbbUxkdaRTZIotaYbx3Edqv2DajDOdOKdz4uy9r4o64PReOPO1tLKctbq\nDYYjpQ0pdTDeCyCCwAE4MLO0W1k7ja1WkTG9TquVpfl0vLO7PRyPbZztD4ZbW9tpmp5YW5/rtBXz\nUq+nERCg3cmiyITgvXeoMC+m/X631c5c8KQgjqNuu+2qcrB/4JxP09QYK7MxKA04bPokjiLnvTYU\nmIuqqgPf2druzi08+NCjSasXGIu8fPXSG4lKrl261l09mZ578E7a9otrVzf2fuYf/I8H23d0s8i8\nBWWKzCIL5HAjOGbRGkTEe2IODRBQ7CLFibVJpNtp3OukvVbay+JuK2klUWxUZBVpEA2p0bGJYmN0\nHJlI2ziOjGmZKBPcyotaTUChBAAFwlCHMK3L4XRy62sHS0tzlQ9l5RaWlrpzbSGVF+WtjVs3b+5X\nBbBAJ4PFvu2304W5frfd5jrXCue6PZu2ojRDpff2dm9tbVaunO+0p3XZylKFPJ4OiTrGmjiLWEJR\nTR37AKKtFuA6BAZQSmmtDyvLzRC5CCillVLM7JyrnWeQyNi6rB1Dpz83v7Kapu203Q6oUOt8ZzA4\nGL2ycWmps7h+4tSBjZzgzdub/bljf/2//Knf+Fe/+CYB0CwvSThwk6jNLFrTfTNAlDqElwQxUMuq\nTivttJJeJ53vtHtZ3E5tO7GpVZFW1ipjCS0m1iQ6jozWcYRWmyhSSiVo+zYbbe1W1oYQgnDtfV4W\no3w8ySeCand/cvKB9qMPXLh0+erNjZuvXOV2L+n22lWQggEMIIAT2BvWw6HbG+RZhCvz7fNnTnfn\n5waTPIri/uJ84evJaLRzsB8nscqnonG+nRkgG1ttTBSZvJxM86mKlKkiE5kkiQSgds4iJElCSIFD\nXddaawC4t4UbIpLyjNpaIVO5ovaiTLx44iQ4do4FvA/+tTeubG5ut9utdz3++JiZjNVRe3Cw313r\ndqK5P/Fn/zczAcyCnw+PYbabgiDSbDd2kIa0YCalBBFBpB3bxZbtd9r9bqffbc11km4Wt6zKrLYG\nrcI0sjbVZCkyOlLGKKWjSIwmG2llLOhO2iVlWMBqYiERyfN8MhmPJtO9vfHKsezkmTMr6+ul97fv\nbipfTcs8HLBjaHfjLEtJIDI6QiT23VayONddnWsn1kAQYWaFytqs2xHC4cHezt4Ozs9rqyKN3diy\nBG2iKIkLXw6n46Ck0++R0XGWEhFwEBYkUlojk1IaEcuyimwEAM1SqEiR1tpG3oe8qAbD4TgvlI1a\n7X6UtUS0QXP+4sVef25lbjGNO8NhkbS7cdyJC7yxPZykhmqvj8p7zuL5G5tVzaLTGYRRiYA0Ed+A\nzB4hGEVG01xMS71kod+b67Xnu61eGrVTm0XWKrRaaa2SONJWgWWrjdFGKa1MjNooa63SGhXaNCgK\nIBoVoBCR96Eo67LyDHDx4YvzC4tlVS4szj/77Hu2D3a393bvbt8VkvmF7OyZB4BFA8TWaISFfjfS\najFLuSjyqjLKonA7iiBrSV1VSo9GB50srdJoOPIJdZlFa51lSZTYazez21uby6vHBIGjNIpMqD2g\nSGAAUYQN+KnqkiVAaJSFQiJAYHaB0CTRYro8mZTlxLUzBWB97VUEx1bXFucXi2l5Z3tH9+ZBgc8n\nPaP3C1baTKqgXQjNfokKibGJU0JgwtAkapMANeV8g4gIKxCFklnqd7Kltj0231qe7/c72VwrbUUm\n1hTbSBFqY7W2ShlUjCpoG5GNiSyZWJnIKmMVaSTQRqeJWO2mJQGWRe6dL6t6MsmXjs3155eUUe12\nS2s9v7zQ3m+bW6p0k6rIV1fmTh1f6HY6RhkOrJRKk0QCaxcCU6wtqCovcnYhIdU2NllavrN5a2tr\nU0BUt1s5DyzeOVdUUctkWTyajPb391pR2sE4smAVBWJrDCGUZeFDrYy2VjsfmCGyqdYGQAjZiXeM\nZO2Jk2d2Nndjm2qvYOIVITK4UNd1uLW9PQk+MWx1PU92UnLlHbucFWoOTSgPgTAJ4WxbF1Yw22hm\nhlABIQRQrAlTE3XbyVy3vTSXriy0FnqdbivrpVEWWwOoCO7bmrcJ0GFmUQygiEhpIq2UMUYBgjWI\nVFW19pxG8cFw5FmCiGdeWF5R1gQQHcVZlnpwJ1rrw+mgP9fO0tWlhSVUtLq+nmbZZDhZXFwcDAau\ncsud3taNG8Rcw7iVJdPxCAmyfr+sinjSGg3Hm7t77VZr6usy+J42zCFUkJo4Vno4GCQLhtmPDg76\n/V7gYKyxNhLhsi6dq422iECEzfZbAIIalNLFeAomjlrtdjeACyzCZQlGoRLRxACXb27YTrZ+fA0I\nNepjS3POMiethGItIkFAHdpYcC9XgFkabk4BCAkIskKJrMpi02slC73WfC/tt9N2lqSxiSJrjbYI\nugnIBQRpiHMHsw1MUYA0aSCFShQiEAFpUMoFrlnQhzqEKoThJG/PzWXdDmhS1rT63TRLnCul4jiJ\nFxYXTp445RwPhwcmTeaXlsfTIu12u/OLGxsbu3lO7W4rS/evX1tbXZ1MJ2WRK407d++CTXpF8cbl\n12/t7RoN8TjudjuRVqDpzMlTr77y6mQ4cu1e7Z0imJZTG1lg1FYxG0EJwsEzNqXf5YinV9qYwfAO\n6BIQeysrkjtkqIpKA3jng+Dm3f3NuzvH2onjQByCiFYmiWhY1QUH7b0noiYfWw7j/GdbPAgLKsHA\nQI3kI41ZZLpZ1MuibhJ1UttKTBpTbNAqMDQbHQDSbBHKzAKsWFiwqUvOqBkoAIrSSAJx42Egm8aT\nyYRRDSejg0lx9sTJ3uLC0vrqiVMnl46tDA8OhsOJl1IgnL/wwNUr15986pl4J7m+cfP46TMS2a9d\nev3cAxdWT5/+3c9+TpHqnzguO3fjhYVscfHqtSuLyytzx45fu34NQUyrMxnt7te1Ho2yu7tmfsEA\nLq0spFH79uaNuU530urM9fvTqmQEIfCshAMiaVKuLpVCBGIRCIKELEKA+3uDaR2Ob9w+tnYCrYG8\nYleVTkrxOftL164V7EySgDKoNNdSloWhdhbHvhLtXSASxSDUONChsfwUAgEAeQFq8lwsQqx1O9bd\nLOpnUTexqRaLtQELjMAkgsDUeHY4hOBZQgjQyFJAQDF60BgEQjCoFCmTpgCEyoCNJtVBHsQp9cDD\nF9/19DMnT59eWFnqdNvKqsJVYNRkMK7rSildOQcA8wuLN27evLu7259feO3SFQb11NPPPPLup8aT\nSdLrx73e2LlzD5x95doViaK11dVXr18/cfzEw08+9fLXv7J56zID3h2Ou0maRVGZV6dPrL32+kvD\n0ajTGZvYRqTrulZGAQILc1PFkwE1YVMOnEUheR+qvByOp5O8fPmVV4a7g7lWv85L52omrMHvjEeX\nb2wMax5V1TGjTRRxFbxzVV3GnR5Yq70PiMIsDQxtNL7wbI8PaGquckBgBRgp24pMLzGdSLUiSJRY\nYgvBoNIoCrjZsQ9YIAT0LCGAhOA8V450zdYjCzFrhFBVorTxAVEHwYqlJtVbWl6/8OCJM2dXT55M\n2y0yqqyLfJyXrur2OoO9LUK1c3fnXY++6/qNm6dOnV47fmL/4KDXX5hfXGx12l64v7zsUbGipNut\nA1cicacXdzomSVlpMHZxde3Y5KC32Nu8dKme5GiSwoVJXp594Jz+rc/mRT0tSxwcLPXnEERYlFLI\nGELNjUNqFuDRbIeD3nMgrp0va7e3s0teynbhi7oop8pqMHQwmRyMx9vT6nhR2jRjQeeCstrVdchz\niJUOwSNiCM3m6aSUQmw2H7kX98ChthpTG3cS02/FvdRmESYUYkLNDL5UGtkpD6BJoMmF8wLMwNzs\n61f7ghlNVILzXDsEREYbpVA6qw0jeNL9lWNPnb+4furUtCqH43EdQnAVII8nk7ou7wz3iryMo4yI\n6trd3bw7N7ewun5SBC+9fqnf6506eXI8GnW6XSB/9fpNrTUjTKfF/MLi7v5Brz93+uwDg/1BHfjs\nxUcQvBF959VLBSjtJGGV2ihK4vEkL/KSPSc6akVJVddxEjdUr1LGu8o5RwoAtDAECZXztpUQ6f39\nwe7u8IbdikSWF/pG6/nFOQkg4gajYTDp8dOne/25a3f2giSu9grMYHDAsdPONR5n0+xl1fgZG/oH\ngWm2LzZrVFls2mnUiXU70pmhTEtMYEBIAoQA4IE0swALMSlUiEhIgM4JKwCQwGU19aGYTl1VZ915\nEUrKCgXjrNNbXJmbX1xaPjat3TivoqyVZImrq7s7W9PRiJC372z62s31eiKQT4pjS6uhCvlwXFVV\nK4o6WVqMRlVdQ1lN9geT/f2zZ8+ORkNXT7PIGoWtND12bFkbdemNN5bX1pQ17/sjH/70NFy78saD\np4/v5nnPKDZakQmA4gFBRVGklWYW710IjALOOaXNLH1KCBAC82RSmighpTdu3ZkOb7UNhDMnHrx4\nUWsFGtDxeDw+9/RjS8sru3v7VVVXgbx1PlBZeudBzzy496DPzO5tlmIiIQBNEBnTSuNOGrUSm1pK\nNVotBpiCgFNeatCEorWi2T7soDQJIEsQhZoJPfrAIMFz4BInLCoEiNO2AMZJ1l9a6fTngtbDyVQQ\n5zq9ohiDSF1UVZ7HGg/u7s7NL44Pisl00mm3I202rl7bvrXpPffmut0sHZVVHMWDrc0UYLnbhrIY\n7+12s1YSWfFy89oVIBVrrQBbnb4oBTp76n3f+ZI2EzfNtB56zzZGDhIIgKABEYGrqgohCDI2W8XN\nODIM7L2XOGntjyciyAJK4+Jienb9+GK3125nzldEVNfVyTOnT5w8c3AwCrqMW/PTXNj7wCYErrjS\n9wciyr049cYbwCIEBErrVpa20iSLbRbbNCKrWEMgYWQA50WUgAcSJFTKaNIGFQkAsyB6H5i81RpY\nAkMADADFZOwYolanqmsTJZ2FJda68MExOOe2d3YSq9k7X7tiNBmW073tnStvbEynVWT13Fx/887d\nra2doihQYH4hvXjxgquq06dOGauroiidy1qtl1959djacZslvf78/NJSnCRFWV64eLEuq+UT55Bo\n/uz5x4y5/cYrGn2opmysy/Pai3YuH+VxZJAwMAMKKkRgIkVEiMSBvfdVFarSTcvCGpOl7VY66mRt\nawwRGq2ypCWGdiajfm9Oa313eyfp9TxYwYwDE6CNItbmHhnXdP09TnSWZiqIFBnTbmWdVtbK4naa\nZAkkWEcaLYECQUIAIVRGG2tirSKjlCGFIuCDkBgVZoWQWQJSEApIZcCaeTKeiEh/fkFH8bSsyrLM\nq3p3e3t/f9cV+dU3Lg13t8WXiVXltNzeGReVz+Lo9s1bzoV2q7PQSzQRoezcus3sfT6NI5vGcVFW\nt6oCWTauvrGxubmwvLKyttbudOeXFi+ePxdFcV7U3YUFLqdJ1l4788D+/lY/XsAolrKKojRFhQLB\nhRAJBCElCBhCCEGUACkKHpwLzgcdp36ajye5MdH8wmK/3Wlp2+12vXdGFIeQJMl4c6z3BwLSAzzW\nWxoMc4wzJ857cXJvq7aZIrr3FGC2DxdCpCmJdCs2rdgmkYktxISxEi1ELAga0RqKDFlLRiltiDSq\nhk9FMdq2RLw0G2sSMhCSqdGMSh+YTZx2+osURaEsiqIoi7yYjHZu3/zdz3321rW9Y0vJ6RMriTIm\n0enanGdIrRmOx1kWIQejVBRrCMzBG6OyNLVaIfJCtx0A9g4OCleZ1WOVq6+9+lLW6Vn72Gd+57fe\n/70/pOqiLnNDyoOKu/MwGalIk45rtx9FUay0UkJaGdPsQ8nM4AN77wOjZg4iPoQgzBy89yy+18li\nq7RwmkXziz2Foqw5GA/zyVSCKob5eDrsdvv9dvugGO8WRaHjnWk9DqCbfMhDACowK9IkAs2WgGzJ\ndhLTNtCJdTexlkAhxDYxmqHyCKLAaIpjirQoJWSaYC0VEARICBWyEtSgWJFoQkPG6Dh3IsITFZlW\nVyVZ6bwgAXvyuRvvbl19lUf7F46nJ9aOtzvdoqgjrcRhXdYUcLW/ABCocYgGJlTa6DiOUhulsVaa\ngsKaHUhSFJACF4VoF5Wu2Lh5+Wzrwf3dOxcWjoW60HESt7v7uzutzqIvJk+9+7kv/OavVqEch9Ba\nnFOWGg9w8FJWNQMrY7wHJPECpQ9eAIIT4lZsfV0ZiwppdaVPhrWNmyIPKCY18d7Wbqdt57IkVAVp\nPS5lKLTpQhW0fkvI7cwvjwISGEFrjLRqJVG33UojGxmKrY4NWasMBWRQhETWaBtpa7TVpFRTI+4w\nfwBJaWUCBFFMiFqRQk0qjhRFBhxGEqWiDQsbra1R47oYDXbz0cHxlaWVpaV+bw6YrInZsy8Zuan+\nyiCiNIpnRWC0slonkYkiE1mtFbFBg4oURUb3W+3JtEjSYutg4KqalPLBj8bDJDCSquugTORc3en1\nlDYnTpzSXJBwWRcAwCKxRLO0Y6HgwQcOdR0EgjA0XD0IAZBAZM1cv68NKUVKNX5jo7Udl9Wd3QmZ\nRRtn3mPtgwt6Ulfj3KF662aehwFYgghNjUOKrE2TOE0jq0kjGIWKmogsAObGY4YIzb7qwshAgkgC\nokA4COqgdWh2n1IYUCFpIgPKKq00aa+jgAo4KEJC9JWrq0oRLs/Pr68ei6OYPQTB0WhcQgDkWc0T\nZiTUVmlFkdWR0VFkbWS1Jq2JrGKF1trUWgTSNE6z1tQHH9v19eO9/nwdAtYuBinqspVmxXRUOn9s\nfe1g+0axM84SS9YopQkVIzCDZwksSisGDrX3gYMAaoPMVVH42lmr263WXL9fh5qQOEgIbIzNWqrl\nyVS6vbASteenDl3QdenHxdgVghHpJl3tyAZuqpUhAhISgNYURdZGkVakNRpDRpFGQWaWIMELK2p2\nxGUW7wRRgJo9q0UEQRiDcy6QR0WMRpp1XRC0VhgRKVSGyChFrqjrqszzsa+r2NpOO0ut0Qp1ZBUZ\nVxaEqDV679g778FqDeyRgsz2qxZyYsRoMKJQEQGissZVDlmM0lrrKEkX5ha6vZ4DU3nnhYu61kZH\nSTza35lbXprkUxsZUFgUZZRGzYDygSsXAotFJYC193XtaLZVpsrzKXJoz813Wu3Ga6aUroNnhlar\nnfbTy1JlPs6W18YOyprHUylKDjVq0rX3WmtCuj8grnE3ogAohCgycZzEUTMeyKjma5kDe18HxwpQ\nKKAPSA4AGEQpdRhwPUvcBqlZCSgiDsARKEQxRCwqIGoCbNIpCudcWfq6QpA0irI01goNIQbvnEfh\n4CvmABxCqHxdkXiFwMISHBOBWBTxgkqEQHEgQUZEV1VGq9wFELBRTKiJFDAOx2ObtqZl4YNPtUaF\nLsjC0vLmlVciNEprZazSpkkkFuQgfjjKdWTqygXvrDHIHEQUQKvdWpibs1ojgtbaGBNqYOHYxrrb\ny29tcpyyze4clEm3fWdvZ5BjTQkSkWGtNAHAjL2/D4YyIxJoa+I4jiNjjAaQwOyDVEE8O/be16IA\nA/vAtQfUiom8OkxPaRZz5KCYRSvRpIIX7VkF1qgYvGFmAU0SVBD2zgFAEiWtNJVOZokUIrHkRVEU\npatcCE6YEYGQEYOwCCEdVqNpohZA2AdQTgiAUSQwsm8lWeVza2271VUmNjYindK0REITWQA0ceRy\nOBjsL62sbN98g1FZG5O2QNoLVj7kpS/rqiyrlGPvamFvjVEoPoQsTef6vXaWcfBEVDs/UweBGdC5\nUNW1inqe7JR1msztTjZ3hpU3zDHYONJHXU90uAVzsxiAIpQoipIkRqLgPbMua69FDPpQl945dqLA\n2AhiS5EH0qSItFIaCAkFEFgw1Aodac0KhXRk40g7ozxZz7rmKCYlqKQCJUBxnMzPL+zf3shJpUlq\nFNVlCcFHykzdNI6t1lop0kqjsHeenWtlGYBURWmtIULFAAxIaK2ufT2ajNI0Uwp98EmSdhcWojR1\nAZTFfr+f5yWSKosiM8ZEMbZae7cKY+OqnnDSwBiqa1+UofKh8myiWGk7nU5JglIYgldIa8eWkzgW\nZqO0967dapVV1VDAi4uLN3cO9vd20lPHHKOwvnT77uZgMqoFAkU2UgRaa3009o8IURARVISgSRGB\nsLBwVXslQTwBu6qcVmUlnhRYa0Ma+8hqZcigNkarBgOxcBCQSkkpiIEQyMRxkto6idjGokxgDqTR\naG2zniQSyhyV0sbENjbWomBsY19N67panJ/vzs/FSRQZy8Kuqtl79iGOYw6hLkujNQDkkykI28S6\nUNZ1aa1OkhhIWRtlMbXaHZNkoLQE1Nog6qbuFCBp0uO8GOwPjNLaxq72HHEQmZb1YDIJ3ASrqUle\nIKnERBBCYN/tzaVRpBQRQ7MvqK8ds7Ra7Y6O4zip67tW0CBMpsVoQoXmWwcjUUajkrrsKdLGHAXH\n3ZsBDQ9KRFrPUuld7SskAgxMzrnppM7LKpSiwFlbNxDQWIpIGaWbUOjgxbsgUiFUzgcHQjpK0yw2\nZUyTLGtn3a7KsnYcR9o43xh0WhubtTtYV1EUG8RQOWujLGvPzS9oS1EUW22cq70yihQ7b7QWH7xN\nrDXCYpCCBNDo8kqYrTUAUFWV5wDGojF1kCBilUJBZg+CWum6qlNEV7v9vd0oOEuAgCZKfBDnPYNS\nVhPRdDp1Zd5KLBIEHyJrkigyWouIACNqo3VTaAwRoyjKy3o0mqRaTXb36ha1Tx4vvd0tK9KQ6ppC\n6X09E8Bb7QAgIq0QtSYADMGXtSBDXYNCLMvyYJxP87LMHQWKrE4inaTGGp1GNraWQBGAd+wqV7mq\n9NPCuTqINjaOJplJemknjV028TrLl8iq/rJOtQi7IIDW2JS0ZSBjo2JSRHF68uQppTSI10R1Xbui\nQgFtVKvTMdrWZc0hRJEtyyq0uHBF6UpAsbGlJkGBABSCUqCVkNI2Mlqza6rYgAIup9M4tQpVWTrx\nlTHS7qSBeTSZ5mVJZBioyKvJtIAQEit1VSurWq02CHDwAIJN7IJCayKsPJJSWnvve53O8QXYKIST\nxBO9dv3a9mg0t7iowKfCZTHVDWg5EoDMqiwFpZRCVIQo4nwowROjRwrMk2mxMxgOR1NfBhIVa52k\nOq2iODKJ1dZagwpFBe/r0pXeTXw9qeo6BDLeko+xatkiTZJut2zP9dLlVZt2yEZVCDULAZFS3rME\nEECtTZa2Op1u8CFSGHxdFwUwEyqD2pLVqIKQIrQq8uK0NghOHJAxRmyzk3ZgHzhUrp6WVSWBiAiV\nJlBEwqCVLlzQpBSRJqWRfF0apQKzMKBSRLpyfpTnwbPVJAKkyEaWEJp9X2axnAiIVFeVADUR1FVV\ndTudx7prsjG4yR4IdJLWhFP2CoghCPO94Nw3zwDEpuo8KAEMITiUPDCI1D7sH4y37g52B1MIFBlt\ntUkrk9WcRj6JTBKJIqWAnJO6ckXwIxemlasCI4nGoKVMVBnpvDOpepXrrq3lVWhF2jkJHowxUZxq\nbZS2wtBpd1tZJixJFFfFJJ9MirzgwNZoRKyKogLwVYgjawmDMUVdGUWRMSA1I0VGVY4ZnAC4yrmy\nRs8KNTBFxrpIPHmDWGpKtCLh1FqjOOROUFeeycRaoKh8mRd1WcTWWgVEEsc2SWxdlzaySilochAR\nRbCs6yjN4jhBorqslE36Fs8fX4TSXBM2RgUOtXMhqOB9bK3WSs0iEu9LhhEhYNFWKVLBs0dxzHVw\nAlDUbudgvDucjith4WZr78KH3Ll2TK00qqqZG80Frj3X3pe1d6JD8CxQIwvABDmJZfP2nVOGitqB\nkKuxqlnrmARVlJg4BoUIGFsTEfrpJGm3CBqGnLUma5XRKMFVea5QGUX5dL+uHaGPSZRRKjSlYtGh\nBJHaee9DPRyqvKQaddYD8OBy7b0rJ8ZP7t6+W40HaRKXo7ozt+IwAbQ11KXjyWRS5XkEPlXKKDDA\nvmRvoJW1guMaPJICRSKICsVEAZSxUV1WEYJiF8q7y1lvALgxHcbAliUKHAUxAq6q9aH9BUh0X9G+\npiQlh8Deew8AGMB7xIaGZR+ABTwLs2fHHpGd00IKam8ZgRggSKi8Y4bAwk0l0VntDqyDA4I4joaj\n0cbGRjGdelFWm1qpfDR1tUMkINLGGGNc5fPhYDQYjvORgBApQnBVZY0ySHVVGaUqr43WOlIMIF5I\nk22lZYUHw9F0WjKD1trl5Xh/4McTi+DrAsWlBhjCQTHYuPpaKMbTg/3RwSDSipSuHAOI81w5Vzvn\nXKGYKTYaIYnidpbZSHPwhiJhIY2kLRAxEEuz5ysG7yUEpUhRGE/2jcBy1u4Mpn2jirwwnW4St/Py\nUABwiEQPQ4OwKYkSOHgvtUgQh+wRwNVOggMQJUAIEJgDMwCix6AgANTKAzKAB2aele3hEJiFUQDI\nGK20KiZTSQz7YvPWxp3bNy8+9FgxGRfTYawx7rYnd6GYTuM0c8q4qp4Mx+LZYe187Z1D4chGWRJH\nxgTnlUIhypKEiJTVGsE7aar0lmU9nkwrJAEtwRV5MR4OR/ubddhM4mh/b+v2xtVE48H+VjE8qKeT\nEGqbtpXGuqyn07yuqyIfV3mBAtpoJNRa28hqrUGk9l5TEBHUSgMEZu+csSZJE2MMAAARkCKtLUgm\nsqB5NYY544cckTJ7k7yVoj4MR0EAEOYj9+SsNg1LYPHAHIKCACIhBAKINIFl9sABCCHW0ElNLzNx\nRAgoiA6k9Mwg4pgEVJNP0Gx2zoEBYqs0h1iZG9eu/JN/9D+89zveF0It1fTYXGdtvgfCVVWPg4TS\ncR0mk8IHz+DH06GrqizNUBmq3d5gwD4YpYqy7HZ61hoo0XFd5dNpPqnrWgCyVqYc+qpWAtPh8JWv\nv7ixcX04Gc3N9YYHu3u72+dOn0QOxXiqRAgkn+Z1WQmQC1gURT4t2NdpbJPEkkJrIwCcTCZaY5qk\nAUACA4IINPx0ZOPI2MDig2+C3IrC2ziLytrWd8902ufmki/fGlZFvrh+enVl9U1F++SwagczE1Kg\nEAL6AMiBQuCm75wnxNgaTRJKL8KpUd12stTvdrPYGgOoHPupcwd5Xk9y4UCioAmqIQIkAiFFCOjK\nQqdWoWzfvvkrv/TPlhbmFIQ3iI8vzy/1usvdfp7nBZcQcDLKp3lOhmtXEoAJsncwHA2HiJBYq5Qq\nfKgBjbFxEoGE8Xg6nky890ZrtNaH4JyPjPEQBnd38smQuahG27G1S52WlGVdVeBZGyuEde2r4Ehp\nJ+xc5X3NwYcA3qskjrUmEPTBK9JGG0RAIq00g3AIUZJoUsG5woe6qo02gmBsbOI4KQbRdLyUwgN9\n89XX9sqoKyJV7TUcpVYDHNWOZmYGRgSvyTGCsBZuVAn7oAQsklYBLBlN3Va6stg/trDQSawxGglz\nVw+meWA3nYbANYlp6tRCQCQSQWGIkzhKIiXeBzfXbQk4JZUlMIjldDwl5FZnkucQSKOZ5PXe/lC0\nJyVamzKMXVVNxuPY2ioRABgV1aisECGNIwCuykJrba31oMSDD4Co08TYBAGNilCTqevKKpNEUTWt\ngCV4qYNXqP5/dZ3LjyTZdd7P896IyKzq6q7umZ4eaihqaMqmTMIQRYpDyjYhgAJsGPB/6qX/BsNe\neqOFFzYgQRJ7ht1dlZkR997z8CKy5iGb29oUkCfu65zv+32ExSDMfEQjhuU4MUBVrix7lx8Q5mme\nahEpwwIJMrP31vuY55kR3SwcMrNOlVX6wH7ZJm+fzLnN/Reff/w//vZ//32s59+/nYllN2B+HZCx\nG0KYyDLco7tJD6SAzIgQTCZSIQiCAAJ6Ns+fvLz/9KMXr1/e3UwVMRPx0rsqhtu2nn+/bQQRBIIE\nSMPdPUREos+zKjMqCHqdCtgGmFg0BpwePnw43hYua+vnbQxIA1ovDRmKAiCM1lmmx24P7cRMReQ8\nsig9nttoG0Acj4caWJWFCwtImCEe6pRI23bu3lkYDNrZMikdRo8gZGb3CEdkSsypig+4nqUMADls\nCKFOVUR34xggnc4nAFJRH7auay1TQrLqHlegqm//6R8126cfPf/K+mfL4T/98qf/5b//r+3hd2df\nv1MARLz6vyA9AzB9YM9A3uMggQAEoBCLBAQVpec3x/tnh5e3h1fHZanKFCTafKlFIeH88NhPZxuY\nkcAARJOSZ6iIChemKsikiMDp+zpDS8ewjNPD+5ubO3M/9+5OUBRiWtvW3IoWS+7ndm28Ajqi9+hJ\nk0KiMFOHEobJIo4RCChEKKQeCQ4RQM6eHEH7vHd0gBhAFp4iXIWKlkjbtkuYCy1Uaq2aw1vvFAGZ\ntVYS3okYTFJKqaKl1OEeAcv1KMbzw6MQHOeZw7ltiviz778Jqv/1v/3P2+PTCnC/AnB3Sx5ReloA\nUAamUCBRFkREZGLGzCSAmFQOUznM9TDV26Ueq2AGMi9ACNhaf7x71s7rw+PFAgCQBYEYCAlJGZSB\nIQiQ0gmSMBVRCYQQEi7rmbVYULPRurce5tSdOYm1JHMycCnEmJlB1JEiaGwhTFVrS/EkSJZgRCRW\nBsxEH24OFmyRmZ5hNgIBPSLCmZEJSXWeS4K/ffulD3t1/+owl/VywYwqUkWIyT1a694GsTBzFRRm\nY56RHteLlDIfDgCwbdvp9OE41WWZt7ZC2FHct6/+8nv3N7/+s3r/ieyQgutd/5sCUKTvCBTBDCpA\npHvwAQU6BWSi7aMsQSxEE9NBhcIDoZmL2Qx0W6dny2Kjm4VZJMSuwyZI8IhEwnQMgBBhYSrKRbiI\nKEsibK2JTPNxweaWrSyHOfdlGq1t02EWEYC0iADPp9BKkYo6JTOIJlfLpB3xYjGst61vvY/94Avw\ngDSPCA8TpbkUZETpzc36+dXLl6P3YW1rXkRaa/rUMHa3iABRIpQropGF+fR4auta6sSIl2378PBB\nOIpQN2+OyhNDEK70+Hc/uTv40WWMsRfguyvgOk5yhxAmImatk87ClZxdUyVHVxYtSoDpI90oeCIC\nIust+4Bu4lGZj/M0unUJBwKAACDYR54BkAyxk8QUSYWrFBUWRiDyMMSclmm5qbfPad04glU4MiOG\nlOI2+hgWFm67jIkxp6JEBIggakA++v6EHr2Bm9tYvXWMDIydrxsBEMxIClqS2IPNE81ta+FmafbY\nNjR/df88M7dta+FFpZQpMyWv90j3sD4+PJ7rPKnI+XK5rOu2rS+OVQQ286SCBBoDtw8c4Wr/5+/f\ny+gdniJM6FspJsBJQJmJAMRYlFVYlRcVTQVjcKUAJYD03nvb2kaQhIzsfaAHRqC5ZMy1EJHGrpv+\nWnwXBEQUhCyIc9WqUqvUwkxPYy4I683j8XjL9y8+Rjg+ni0ziMjd+rBSpxJubu4WEBhJDAw5+nCz\nQAprfWvMmNa3dd0NX81tw74j7DERIVlYlRxseEAYjCjCmLBevLWmTIdpQsQP7x+e3dxIrYhkFgCD\nkI1NnUeM8HC3tm3PX9yXWj6cTm0MIBIVAMBSlLRtG7pPglPC+8uH23on9oTrwycsaAKEOwE4OgEh\nyMx4LHiUvCmwcDKmFkkH701ii5Gt4cNZkXASpfSt90vfbFyq2M0i0FmJbIyInalOQAEOCUGYTMgE\ntVrVnCrMAsJJiJHRfWy5jfGw9e0m+fnzmdV+/9XDCD49boASCcPMrGc40h4/7XsHxd2pIRMBZFuH\njY6ZSLS1nhQs2q1nBDIiAiNEhBBiYjv1w1xulsPaWzCpwmjt5JfnN3cxbNuMUmqthdgjyJMT18ul\naJnn+e3vP0gpWPjUt3ePD9PhppSaXJtZRDB6UZbpJuDm3aX9w9jq/Rv5Zs/5djMOIBIyIyAwXTgn\nxaXQofJMIETKGAMDXQGFMCG2Pvi8DXUEMBtj9EwXhmUqyeChZprhhPtMv2dGphMgETNHmWJSnAsv\nqipEiB55Hj1GjwCIS8aZ6MJi3R5az7U58xwJvY8xhvsW0SPcw3aFX3ogAotUUQL0K34nHDIC2AOv\nAYJJhJGeSYz67st3lfnu5V2Y2XAECI9I6N0fT+fb+QDIkWiWrMiobevb2o63h4D86t27Yf76/vk2\nmo8uUtZ1vXt2NxyFKgxLT52mAfTlZcTho9ff//j5Zz/6/5Nzr9po2C3DKCy11nnWZSqzgFIKYQxJ\nYU1kRAIaY5zDB4syRwZiailIgB5omb7Ti/fXeWRopgMYAhIRc04TVJWlTHOVfazfbBjkZpZuYe5u\nQCAqzHlZz2PAurZSZkggijF829Y2utnI3Plrvk/0eqlTKczcPWwMRgowd5NSEiAimEhZRm/vL6eJ\nmAhb7yKYHnWq7bJCpLCMMYaaVAnI3jtE1Fo96dLWA9yaw4eHh9vbGxVulxOwJCAlKUEFWR/W+eZY\nDocvT+chOr949erzH939y38FXK8hPvtd6NsFwB1IpkAERbko16q16lyoMDJCMAeieBIiJUakpe17\nPEAioaoWlRKpEe7hNjLYwyEjgzARkgFhL0AtNKnOtZQigpi5x20Q7aE74RkWMaZ5qZNiGkRsa0NI\nYkF0QIscmcNiPMVhOAcCJowkzIJl+OjWmQghE4J2290Y4KgIfVsh/M2bT9PH1tYKBREzoo+ODnUS\nBDR3dzdE8MgMZOYy1QnPW0sbRCRE59PjNM/W1se13b/8CDPHNkYDkFwJoj5//ubT+z/9sdzfu5RB\n9J1e0HdqkICQBMTMxMgEKlQKT1UKkVAaoofvtBQM5ATKvT2dSMB7O5+gFFGiyHRXMPPwCAuzAME9\nfYOIGaZCRXUXIO3fAwOKcClaAbXMVQtiLMs0z6UUcA8ms37mUpEwYkRYQASGY0JC7GJgRyQcYeho\nYeZmkXvKXGweEemGAmt0Rrx7dudpRVmJz6eHgLRBYU6AHrHUiZF2jiEhAsIwcw4ief/4GKPf3S4J\n2dtWa7ms57Acfbtssj3ms7s3jxa25Q9/8eeHP/lhSDVQgzRg+eeq9K8LECmUQixPYiwiUCFRKsRM\niBkwhPdWKgKYZyYjqsh+USFEYgaChBBEUAKRTAZgd9l3Y8QUQiIoLKWUWgpRehAickRJnRFBvUzz\nzfGoRae5EicRCMOy1NGcyRIBdkAypQojQrjvv1RSBLqBkadHBLibYSJCtAiELMLhKVJKUTcrchjb\nlox1mj98eMfMtVaIhIhaVEh23SCpMFEmbrtg2kFEEpJEgPB3b39nkXf3r4D48bzidP93j5fnn3zv\n85//8vDmM5tuPMOQiZkx5WsZxD8rAJNA+k4RJXAVFEUpUItUFsLEYBRFiDRHSBZJ3020SMzMOzk+\nIAEYCHI/7yAdEoWYAXc8IwMgYi217PNpQsC0hMDUWpZSNLHOc51KrVWElmVSAqritkqlMleH3Prq\nMYhwhLs7IiEDJRIBy968CsRAjAjf7UXhBhmKUMs01UkF0L1vW2/bQDgclqlUAJimaZ6mdHeP8F5E\nVURLyZ0XkbCNrUBMpQLg6L0cD1+9f7j/6OPTZnSUU3Oo8uJPfviDf/Oz5XvfD6w9EEpJALcshH8w\nQQNg9wZcuSDCVCrXKrVQYaLEZEPxDNhluphA+BQ0k5CxZ2cjUeyWebga5xGJhACRn8bOgUCFmJkI\nMRESMyDqMpNqkDoCa5VpKrVqKYdlIYH0oHQiUkYmliu2IXY/MwIyMyIgojIT4Z7p5+HEGJ579tBe\nbiJAcAycSsFMVdlpAYdl2TvDbdtqqar88P794f5VqbW1lhmlTo/nCwMeD0stgmmXbWvuz+5fvbuM\njz795O3JaD7++Oe/+uynP4e7F2PrhhzMHkAICsgOf3ALikwG5B20x6hKy1SWqZbKAoQBLAQiGZ7m\nCemR4EYJCAgE4R6ahXgvxxXwQkCAhMCMiPsfr0UWJCYEwhFm4Ul0vD3INCep7R0onanWaZ7maWLm\nbo0SEDzdmEutZZqmtQ3BkN3aj7v9PJgAw32MMMsIIkpIjygshbmoKBEHMCJCqkoE7rcuQbw93kTE\n6Xy6XM5Fy/2Lexa2DFWN8DGMAKaqWiTC3MeyLMHld+9Pz169+eD8j+8vf/3F3zz77PP3hnUdA5kK\nC0EapAEmUMAfWgGIgcTJzIW1qExzmScthVWIAxFSiPaPPJEifX/d+r4sMffurWGKojBdOTsEiEi7\n9wZh/0Jhl2EzR3gi2i65LjIdFtSaxJjIqmU6pE4ipagykptBRnjY6CpaapnnxWKNGJAOQCJMgO6Q\nO3jmqslOcMsAZqlFqkgR0r2Pg5geu2mXmYvIMh+sj61tMbzUcnO4EZHz6YQAh2XxiPPlfP/8Hnel\n4HZhwnk5BNDty9cXLF++Pf3Zz77445/84m//4Sve9KZDPcwTsxNighDuXMo/vAUhCImoShEtMk+1\nqLCgXE3EKExBlATMTAoQiYzJkR6IybxzP5kFRflqQt5R9phC+JTsA/t7KCK7jQzs3kf6oc5lnkA1\nkRmIS53mw8CCgIikIkIcGAHpHuzOKKqVuRNcAVdy7ZrlaD0iIIN222EEAZEwyvWkIgBBuopLiEpR\nFhLmbd1iDMhc5mVeZhZ+OD1WVQw4XS4qcn9/f5iqjZbIPNVh43S+rNmevbn3qD/797/+i9/8zdrw\no08+HwGQ6a11MC3KwnuEjSPK/7v57JcguKrjWJiEsdYiqkJMvPOaMjRUMzwoAVgYYO+SMxAxMDMS\nIQNSshDx7kLY48gSIfYXGcKOpUgL33oHoe7DGbiUABAmqlMiM4uoJNb9PVtKqbWmpe+k3iemskcw\nopB4uLJcFTvMHtktbFhmMlMSI8ZV4wIegACqLB6gIiLqMc5bwxHTNE3TRMxu1lpTEqb9eo3TNB2X\n5TDXD+8ukLHMSzdbHbQcQeq/++1//qMv/nq8b/XZXEB7j0yzWKN3ACMugBQQASpPiSP7vgNP50EC\nGiuVklqoTvtJhoAYmZjJRKRECuBODJQoAAgigNcLEDMRJQEmEMRu997jcYF2P4fvpHCPfeZvLaKd\nbXjW2wVpcWACZARWYSpIggLretKChZHSi0izMMDWmpSJLMoIVmRGVEakiPDI4WEWwxPyesFCCiaQ\nGIgJiQkSaQh6mGdFOb17IITb29uggQTntkZkYZ11WsrU1zU8SilTmQG4r+thPj6u2zk06+3bh/Wz\nf/Gvf/rr3/zRr37TLx51MiTKkAJIygFm7DF8sx08R0zyBJr/mv0MmYiUhM5kxLFLqPMp3DABASIx\nE8OyRRozQQISC6AwK8uOPCCmnUfK135feIJF5q5lyYgMgDSz7mNYM4fzxTxFbqqF7ilfu14jSYK5\nRR/Ry1RKEWGiwt2bR45hpUwlsOyuzUwijszRR+9j23rvBpmCzCCEYLbGWHlSKRNrrUXmeUKAh/cf\nbpbDocwAmW20vs23y1IXYEJHcozNKhauTLVomUqpknK+rMf7N28f2z89th/85Itf/vY/vvzRj73R\nZWtYOBEQXAmu7yIsGNJaG+kiqIrf5Al/6/yFiJR9crVfITMi0sP3V+Qem229b2OgGZAQ8fVJ+7Sv\nkjAxIyJlIu0r3vfjIa6zffewjGhmw1sbWxuxtgCqfQwza4ZpQBkAewBErJtBMO5dABIfuUfEIBEz\nImeAhQUCQEBktt62re/kVQgADBYE4FqqlsIIROju4X65XAT5/sWLqZSH9x8i4vbmeFhmSBpjgFFh\nZZZ0r9O8HA8yzYNgpHXk+dXr95uZTn/5b//qL377H+jF67G2zRug7I8cetpXvrYBiMiei+3u3xQA\nn9B9EQF4HRHvevWIDDcbbIpB6Blhtm7baI0cKEGQYH+zfS0z/Rqp/pSOah7DbZib7xnc3dyGW++9\ne1/t0puvDbhE83Gx5psHJ5oqpadhSjfEIGvmztN0hGiwrQCOCESZaaNfAIlAMswje7feh5lFJAQw\nM+xnzvXn+EYLyyS1lHVdz4+Pbd2Ox2NV7d1UqGoJohjerb24uVumeT4coWhg9FBdDu/XAYfbL37+\n6x/84lcDNNfeHVCnZZq7pXVLzqBv0qciQlV38OV3CvB1Db4dnLG7bdzNzIbR6Nkyyd2tr+s6WlPY\nWXOAmRCZIkDJzIFASUCYgPu/6mM062OYjbb2zc2H92E2xtisrXHZetggBTz3VbdTJzVyLGpYyAiM\nz2scphtEsCCWqjV11UjMdORMGGs7MxUlvl53IgCwteFmpRTVqZQiIAAdwGR/04rs+uLee7ucj8vy\n+uPXRLheLkJ4Pp3gwoeb4/2zu6lMthkQOwAwYSnE+vZsz1//8Z//8q9eff6nPZGcUicSBpI+og9/\nKvP1pRXfmr6UUjLz/wJKendXs2iAxQAAAABJRU5ErkJggg==\n", 169 | "text/plain": [ 170 | "" 171 | ] 172 | }, 173 | "execution_count": 6, 174 | "metadata": {}, 175 | "output_type": "execute_result" 176 | } 177 | ], 178 | "source": [ 179 | "for x_train, x_target in tr_stream.get_epoch_iterator():\n", 180 | " break\n", 181 | "print \"EXAMPLE TARGET IMAGE:\"\n", 182 | "\n", 183 | "Image.fromarray(x_target[3].astype(np.uint8))" 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": 7, 189 | "metadata": { 190 | "collapsed": false 191 | }, 192 | "outputs": [ 193 | { 194 | "name": "stdout", 195 | "output_type": "stream", 196 | "text": [ 197 | "EXAMPLE INPUT IMAGE:\n" 198 | ] 199 | }, 200 | { 201 | "data": { 202 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAACAAAAAoCAIAAAAQS2/OAAAK+klEQVR4nIVWWYxl11Xd+5xzpzfc\nN481V/VQXd1V7VZ1222MQwLGAtsKUawAQviDCJAwKB/hMyA+g4SEEJL/IwI//BARhBwnUoxx27Fj\nbLe67Z6ruqprHt58xzNsPl533IEY9sfV1bo6e62lpb3PReavmMEDAMPAIZAwLtQIzIDFilNQnzC1\nBpTazFhGhUKGdTmsxJt5uW3RyHZM3rd838tm3EzWzWZcBuBZtmPbnUHwYPeQASD8H0UAQID46B2I\ngOjhISIAIoIxNFYGCEBEiIiAAMA+r+/4M4FBAjSEBASEAAhIiAjA6H9I+0zEZwDi5xD87ADQY13o\nMRzw//H+sNiYBwAQEfGxIzh+ID0mi8YQAgEQ0OeRPN7mFzr4zDw95nmMaQQCNs4GwSARexgFEdFn\nUmgsBsT/pn8k9XMKARCRM+AMCZHhLzbxCB0TIEMkGifLH/V/yPRIFSIi4wwMi9I0SmJPpwxSpskA\ncMFsi3PBGCIZo7UZWyciQUoiIAEHRCALkQMYQDBkgCTKGLVETQyIAclBhBYtnpp7cmH1ZCNr5NBQ\nGPR2w+Ag6h+TCsAo5B4gGtCcjOvaApREIvMwFz4WTgYANKIkFaJMOXcUoZbBS89cePk3n1w+1Sxn\ncowxxsFoUBoGw8Oof7B//97OnXeOdj+wkAnIMZFkPQ/RnmNpV6MA4AgckREpIgVMAsVglVnjtKkv\nFCbq3/yDr37tVy4KnQbRCAgAHBIAaFQiiYE0HIhygt+/+8HVt7/DeltElkwTRNFmakjoAjAABIYA\nBjlDSI2OQFRNYWLu0uW/+tafrc5PdIOh0cCYxRjYCMA4Y5wblSpFwun2o0Ect1oTItp971//tvPg\nHc8uCDJggOF4JQCRIURkhAYMAAJy2xGvfv135lrVjYMDY9DoRBvLzxUkM4a0ASQAAeiiLBZsyenW\nvTtTtdrql//8p98P9e4dgSQJJIAeMyAREWlixAyQpmSwsnx25dTs+samlfGBaBRExazbHY1iFbuC\nu0JYti2N7gcm0aro56ealY2tnUyxPv/Uqzfe+LYASBAkgfn5yUIgxhGUMUvLpw47R5V8NlKJZm6x\nXAUtGVI5m0ljPZIS4pgJIMTDTvDR7bWFmfb8bGN/72hAFffE84IwZUSAPzdaRASgjAEnX5yZP7nd\n6Stgi7ONJFaJBo52LOXazoNRknLL9R2v5ha4SWtFr1xe+OjmjcFodHai3RtJp3RK5LIcgAGhMeZn\nIw4AyBkoZZeys3MzrWohVmptr9vMudFomDCyLLvhlxfLpaPD7uEgPqAwzymQKk2GXzy/snvc29jZ\ny9i5JJIin/MAEIAMmbH48dhyxkaxevn3ftvT/U9++Pre+i3VOaq7MEy1cB2nXBeN9vzZ1eXl1X7L\n//j29lq3b2XsocFg7c7KRCPm/p2NXpKmODHRNkYR6Ic7jgAAOOdRHF289OyFi1+4cfVjE/Znmv5z\nz65OVf033/vk1u17+4MgYcgdf2r2xB/+8ddPnj7x3qdbP7h2VzmiknPmsl6tWL22dXj7xiciSWKt\nNZEBRKDxXYKcQ5imxXzuu6/9/U5nHyDDhFr90q9O1ef/5rt/vb+78SgsAaCmmv7Fv/jLzu17+ig4\n/8sXb66vryHbGhwCQhTFLEkSpZTSSiuljTbGGDLaaJ0YUrpcrwJwAGk7eP/9t978zmsz7RoAWMgF\nY4wDAAyGQX8YPvHE/OWa3Llyhbm1T3eGCfcT8AJtiYe5IhEQAo53uwYjCNbubqysXj7qBNoS043c\n+anmnAp3o8zmfpTEXcaNJaxcJru8dG7/5vWjm1ejt1/XHYVRsnTh6bu9QTvnj/pDdByLiBAZETHG\nEPnDBc50mrKvvPD8UtW/d9h1g/43Xn5+vuz/87//6N1OSMAI0LYdI2VZ6Flfn5+t1l134Ff52V/7\np+/9ODzxTH727MfvviUYQ6PN+DrSWnMGCGiItAEpo5sfXnn2lxai7a2Tp87MN+qGyxMV8V8/vWLI\nrpbKfj5fKuYn696puVbW8wb9MO2Hteio/8H3t7/3jy9+89vRl14U+ZwnpVKapJREQAy0SgXHjG1r\nz26XraX27LnWjAL2nz/+QavknVleunS8v799dPnscq1a9gtZMv0g6DuWY9uWOdwt9Nefe+rcleM3\nsrvXrh17OD3VJABtdJokcZqi0RPl7BMnJyYa+UrFX6yWip3k/oO9g37Ui4ZJqleXz1gs/fDajbOz\n88vz08C0l7cyGUGKhHAYYwr0QLPhcTyqLf7d9Z6wHZsxZrSybDur04sL1d946uzJdrlczVZabUvx\n66+/dWPtXmP+jGJmq7cn1h6cn6rNzc58dPfTSsGenqp3w2GjNjsa9gAMcic1/PSZ86N+dItyXz73\nkohTaQuBBgDx/ELra7++sjzdAkUE6FlFO+vtdXpOqfzcV19a3765GscrK0+OOkHQ7TitibWDzTmq\n93b3A79ie5bUoLRSzJaBur25kc4uli1HDLoj13UQqVlyn14o1UQgwxFXPDo43u6Piq2F5srFy7+7\nHMT9fm+gDV/b3G5OTjGLXzi9qJUMb13vjm5t7x1MTze1okSbf/nR6yfrHw/ymfbsshwMhdIqTgBB\nnZ6aOdMuunFqKbuUK6FtKZWEveHs6lPvv33l+pU340ESR3Epd3VmqnEw6KWW9co3vtV8afnG2t00\nlGGYKIO9KM1lvFIxE5HoHXZ3/AFjCNpozxFnpqs1P5O1fZ/bFmo3W2i05xrTiynB1rWfzBZzk5Vy\nu5Bt5ty6Yy1P1J1weHS0I63s3LkVZakw1sf9oDvoLi1MTU63jMgPU/vW3kBk8x4AVoq+61jrux0V\nqHZgXGHLdJjzK/U515TLc9NTzmhEFRupLAxxACmHCwunCvVWLwzAchSIvaHs9/qtcrZer7m2k60U\n/mNPr4c7ouDnNFEu620fdN/d3NrZ6S3ONAtubqtzHKfmmUv933/1T/zKZDU/albrMgyLTvYo6Gzs\n3jmW2i80HOYkg5FG57gfmCQuZKuCcWZ7i2XvH+4E6/2BsIQNWmtldvaO13cOD/aiouXuWqPjQRwp\nPXrnJ5WpdgsDO0421z9kIGu+X674qEzv4OijN/5NYNh7cFcl0O0czlUzWmtJTDDRtukZP7p6IPDE\nwoyUyrFYK4Mw7CxUKl95dvXa5tYP37mKwsllLAvCpZlWIzd5/8E+UOo4olLIlfwcutle0EcCBi7n\n0mbxdLlQKRRy+VKhVEXk3SR+7X5G9LoDbbRlsblifaJZnyvl5woZqpTu1wqx0hY35Vw2x3XCgsnT\nS2FEURL2QabSYWE6SlSkZM6Nyj6Pkmg4ErawhHAtN7YzTtGCP50NRRAFhiCLfLqWW27mcBh8em+T\nIa0uzkRRoNOgUnKreW+jd1yZmLh9exjF0VFvnwzlXE+plAmV4bYM7SSMRszjLE3kEVkso0q59unG\npWeE1kYbgyQqZXeyXQl3hRyGnmdPVwsMMhxL+YyrmeKlglMvbdzbYRjGOuCMRUqFQVR0LDvvkNFZ\nz2MWHyQpMb21vdO6uDL5wiui5IvxD3rGcZqNQrHgsUGCAK5tZTKWsFzBtExicp3TZ05J5n/y/o3Q\ndqoFH43u9wYWGt9zPCFc23KEBcBLOSdOourTLy781h9JYUEYCUNAhrwMcMESnRiR1JsFP5fnYIaj\nXmc42Do8KLYny5xJwzwvQ70jW+puFEahLHoWZwY1CA225zT8Ohby0xe+sPTCKymzUKdpqv4br5fq\nmM8oV74AAAAASUVORK5CYII=\n", 203 | "text/plain": [ 204 | "" 205 | ] 206 | }, 207 | "execution_count": 7, 208 | "metadata": {}, 209 | "output_type": "execute_result" 210 | } 211 | ], 212 | "source": [ 213 | "print \"EXAMPLE INPUT IMAGE:\"\n", 214 | "Image.fromarray(x_train[3].astype(np.uint8))" 215 | ] 216 | }, 217 | { 218 | "cell_type": "markdown", 219 | "metadata": {}, 220 | "source": [ 221 | "# Setup Neural Network" 222 | ] 223 | }, 224 | { 225 | "cell_type": "code", 226 | "execution_count": null, 227 | "metadata": { 228 | "collapsed": true 229 | }, 230 | "outputs": [], 231 | "source": [ 232 | "def target_transform(X):\n", 233 | " return floatX(X).transpose(0, 3, 1, 2)/127.5 - 1.\n", 234 | "\n", 235 | "def input_transform(X):\n", 236 | " return target_transform(X)" 237 | ] 238 | }, 239 | { 240 | "cell_type": "code", 241 | "execution_count": 8, 242 | "metadata": { 243 | "collapsed": false 244 | }, 245 | "outputs": [], 246 | "source": [ 247 | "l2 = 1e-5 # l2 weight decay\n", 248 | "nvis = 196 # # of samples to visualize during training\n", 249 | "b1 = 0.5 # momentum term of adam\n", 250 | "nc = 3 # # of channels in image\n", 251 | "nbatch = 128 # # of examples in batch\n", 252 | "npx = 64 # # of pixels width/height of images\n", 253 | "\n", 254 | "nx = npx*npx*nc # # of dimensions in X\n", 255 | "niter = 1000 # # of iter at starting learning rate\n", 256 | "niter_decay = 30 # # of iter to linearly decay learning rate to zero\n", 257 | "lr = 0.0002 # initial learning rate for adam\n", 258 | "ntrain = 25000 # # of examples to train on\n", 259 | "\n", 260 | "relu = activations.Rectify()\n", 261 | "sigmoid = activations.Sigmoid()\n", 262 | "lrelu = activations.LeakyRectify()\n", 263 | "tanh = activations.Tanh()\n", 264 | "bce = T.nnet.binary_crossentropy\n", 265 | "\n", 266 | "def mse(x,y):\n", 267 | " return T.sum(T.pow(x-y,2), axis = 1)\n", 268 | "\n", 269 | "gifn = inits.Normal(scale=0.02)\n", 270 | "difn = inits.Normal(scale=0.02)\n", 271 | "sigma_ifn = inits.Normal(loc = -100., scale=0.02)\n", 272 | "gain_ifn = inits.Normal(loc=1., scale=0.02)\n", 273 | "bias_ifn = inits.Constant(c=0.)" 274 | ] 275 | }, 276 | { 277 | "cell_type": "markdown", 278 | "metadata": {}, 279 | "source": [ 280 | "\n", 281 | "The following methods are to help adjust the sizes of the convolutional layers in the generator and discriminator, which is very fiddly to do otherwise. The (overloaded) method make_conv_set can be used to create both the conv \n", 282 | "and deconv sets of layers. Note that the 'size' of the images is the size of the shortest side (32 in the input set, 128 in the target set). Only use powers of 2 here." 283 | ] 284 | }, 285 | { 286 | "cell_type": "code", 287 | "execution_count": 11, 288 | "metadata": { 289 | "collapsed": false 290 | }, 291 | "outputs": [], 292 | "source": [ 293 | "def make_conv_layer(X, input_size, output_size, input_filters, \n", 294 | " output_filters, name, index,\n", 295 | " weights = None, filter_sz = 5):\n", 296 | " \n", 297 | " is_deconv = output_size >= input_size\n", 298 | "\n", 299 | " w_size = (input_filters, output_filters, filter_sz, filter_sz) \\\n", 300 | " if is_deconv else (output_filters, input_filters, filter_sz, filter_sz)\n", 301 | " \n", 302 | " if weights is None:\n", 303 | " w = gifn(w_size, '%sw%i' %(name, index))\n", 304 | " g = gain_ifn((output_filters), '%sg%i' %(name, index))\n", 305 | " b = bias_ifn((output_filters), '%sb%i' %(name, index))\n", 306 | " else:\n", 307 | " w,g,b = weights\n", 308 | " \n", 309 | " conv_method = deconv if is_deconv else dnn_conv\n", 310 | " activation = relu if is_deconv else lrelu\n", 311 | " \n", 312 | " sub = output_size / input_size if is_deconv else input_size / output_size\n", 313 | " \n", 314 | " if filter_sz == 3:\n", 315 | " bm = 1\n", 316 | " else:\n", 317 | " bm = 2\n", 318 | " \n", 319 | " layer = activation(batchnorm(conv_method(X, w, subsample=(sub, sub), border_mode=(bm, bm)), g=g, b=b))\n", 320 | " \n", 321 | " return layer, [w,g,b]\n", 322 | "\n", 323 | "def make_conv_set(input, layer_sizes, num_filters, name, weights = None, filter_szs = None):\n", 324 | " assert(len(layer_sizes) == len(num_filters))\n", 325 | " \n", 326 | " vars_ = []\n", 327 | " layers_ = []\n", 328 | " current_layer = input\n", 329 | " \n", 330 | " for i in range(len(layer_sizes) - 1):\n", 331 | " input_size = layer_sizes[i]\n", 332 | " output_size = layer_sizes[i + 1]\n", 333 | " input_filters = num_filters[i]\n", 334 | " output_filters = num_filters[i + 1]\n", 335 | " \n", 336 | " if weights is not None:\n", 337 | " this_wts = weights[i * 3 : i * 3 + 3]\n", 338 | " else:\n", 339 | " this_wts = None\n", 340 | " \n", 341 | " if filter_szs != None:\n", 342 | " filter_sz = filter_szs[i]\n", 343 | " else:\n", 344 | " filter_sz = 5\n", 345 | " \n", 346 | " layer, new_vars = make_conv_layer(current_layer, input_size, output_size, \n", 347 | " input_filters, output_filters, name, i, \n", 348 | " weights = this_wts, filter_sz = filter_sz)\n", 349 | " \n", 350 | " vars_ += new_vars\n", 351 | " layers_ += [layer]\n", 352 | " current_layer = layer\n", 353 | " \n", 354 | " return current_layer, vars_, layers_" 355 | ] 356 | }, 357 | { 358 | "cell_type": "code", 359 | "execution_count": null, 360 | "metadata": { 361 | "collapsed": true 362 | }, 363 | "outputs": [], 364 | "source": [ 365 | "# inputs\n", 366 | "X = T.tensor4()\n", 367 | "\n", 368 | "## encode layer\n", 369 | "e_layer_sizes = [32, 16, 8]\n", 370 | "e_filter_sizes = [3, 256, 1024]\n", 371 | "\n", 372 | "eX, e_params, e_layers = make_conv_set(X, e_layer_sizes, e_filter_sizes, \"e\")\n", 373 | "\n", 374 | "## generative layer\n", 375 | "g_layer_sizes = [8, 16, 32, 64, 128]\n", 376 | "g_num_filters = [1024, 512, 256, 256, 128]\n", 377 | "\n", 378 | "g_out, g_params, g_layers = make_conv_set(eX, g_layer_sizes, g_num_filters, \"g\")\n", 379 | "gwx = gifn((128, nc, 5, 5), 'gwx')\n", 380 | "g_params += [gwx]\n", 381 | "gX = tanh(deconv(g_out, gwx, subsample=(1, 1), border_mode=(2, 2)))\n", 382 | "\n", 383 | "## discrim layer(s)\n", 384 | "\n", 385 | "df1 = 128\n", 386 | "d_layer_sizes = [128, 64, 32, 16, 8]\n", 387 | "d_filter_sizes = [3, df1, 2 * df1, 4 * df1, 8 * df1]\n", 388 | "\n", 389 | "dwy = difn((df1 * 8 * 10 * 8, 1), 'dwy')\n", 390 | "\n", 391 | "def discrim(input, name, weights=None):\n", 392 | " d_out, disc_params, d_layers = make_conv_set(input, d_layer_sizes, d_filter_sizes, name, weights = weights)\n", 393 | " d_flat = T.flatten(d_out, 2)\n", 394 | " \n", 395 | " disc_params += [dwy]\n", 396 | " y = sigmoid(T.dot(d_flat, dwy))\n", 397 | " \n", 398 | " return y, disc_params, d_layers\n", 399 | "\n", 400 | "# target outputs\n", 401 | "target = T.tensor4()\n", 402 | "\n", 403 | "p_real, d_params, d_layers = discrim(target, \"d\")\n", 404 | "#we need to make sure the p_gen params are the same as the p_real params\n", 405 | "p_gen , d_params2, d_layers = discrim(gX, \"d\", weights=d_params)" 406 | ] 407 | }, 408 | { 409 | "cell_type": "code", 410 | "execution_count": 21, 411 | "metadata": { 412 | "collapsed": false 413 | }, 414 | "outputs": [ 415 | { 416 | "data": { 417 | "text/plain": [ 418 | "(128, 1)" 419 | ] 420 | }, 421 | "execution_count": 21, 422 | "metadata": {}, 423 | "output_type": "execute_result" 424 | } 425 | ], 426 | "source": [ 427 | "# test everything working so far (errors are most likely size mismatches)\n", 428 | "f = theano.function([X], p_gen)\n", 429 | "f(input_transform(x_train)).shape" 430 | ] 431 | }, 432 | { 433 | "cell_type": "markdown", 434 | "metadata": {}, 435 | "source": [ 436 | "Next we set up the various cost functions we need" 437 | ] 438 | }, 439 | { 440 | "cell_type": "code", 441 | "execution_count": 23, 442 | "metadata": { 443 | "collapsed": false 444 | }, 445 | "outputs": [], 446 | "source": [ 447 | "from theano.tensor.signal.downsample import max_pool_2d\n", 448 | "\n", 449 | "## GAN costs\n", 450 | "d_cost_real = bce(p_real, T.ones(p_real.shape)).mean()\n", 451 | "d_cost_gen = bce(p_gen, T.zeros(p_gen.shape)).mean()\n", 452 | "g_cost_d = bce(p_gen, T.ones(p_gen.shape)).mean()\n", 453 | "\n", 454 | "## MSE encoding cost is done on an (averaged) downscaling of the image\n", 455 | "target_pool = max_pool_2d(target, (4,4), mode=\"average_exc_pad\",ignore_border=True)\n", 456 | "target_flat = T.flatten(target_pool, 2)\n", 457 | "gX_pool = max_pool_2d(gX, (4,4), mode=\"average_exc_pad\",ignore_border=True)\n", 458 | "gX_flat = T.flatten(gX_pool,2)\n", 459 | "enc_cost = mse(gX_flat, target_flat).mean() \n", 460 | "\n", 461 | "## generator cost is a linear combination of the discrim cost plus the MSE enocding cost\n", 462 | "d_cost = d_cost_real + d_cost_gen\n", 463 | "g_cost = g_cost_d + enc_cost / 100 ## if the enc_cost is weighted too highly it will take a long time to train\n", 464 | "\n", 465 | "## N.B. e_cost and e_updates will only try and minimise MSE loss on the autoencoder (for debugging)\n", 466 | "e_cost = enc_cost\n", 467 | "\n", 468 | "cost = [g_cost_d, d_cost_real, enc_cost]\n", 469 | "\n", 470 | "elrt = sharedX(0.002)\n", 471 | "lrt = sharedX(lr)\n", 472 | "d_updater = updates.Adam(lr=lrt, b1=b1, regularizer=updates.Regularizer(l2=l2))\n", 473 | "g_updater = updates.Adam(lr=lrt, b1=b1, regularizer=updates.Regularizer(l2=l2))\n", 474 | "e_updater = updates.Adam(lr=elrt, b1=b1, regularizer=updates.Regularizer(l2=l2))\n", 475 | "\n", 476 | "d_updates = d_updater(d_params, d_cost)\n", 477 | "g_updates = g_updater(e_params + g_params, g_cost)\n", 478 | "e_updates = e_updater(e_params, e_cost)" 479 | ] 480 | }, 481 | { 482 | "cell_type": "code", 483 | "execution_count": 24, 484 | "metadata": { 485 | "collapsed": false 486 | }, 487 | "outputs": [ 488 | { 489 | "name": "stdout", 490 | "output_type": "stream", 491 | "text": [ 492 | "COMPILING\n", 493 | "30.91 seconds to compile theano functions\n" 494 | ] 495 | } 496 | ], 497 | "source": [ 498 | "print 'COMPILING'\n", 499 | "t = time()\n", 500 | "_train_g = theano.function([X, target], cost, updates=g_updates)\n", 501 | "_train_d = theano.function([X, target], cost, updates=d_updates)\n", 502 | "_train_e = theano.function([X, target], cost, updates=e_updates)\n", 503 | "_get_cost = theano.function([X, target], cost)\n", 504 | "print '%.2f seconds to compile theano functions'%(time()-t)" 505 | ] 506 | }, 507 | { 508 | "cell_type": "markdown", 509 | "metadata": {}, 510 | "source": [ 511 | "# Training code\n", 512 | "\n", 513 | "Code for generating the images every 100 batches or so." 514 | ] 515 | }, 516 | { 517 | "cell_type": "code", 518 | "execution_count": 25, 519 | "metadata": { 520 | "collapsed": false 521 | }, 522 | "outputs": [], 523 | "source": [ 524 | "img_dir = \"gen_images/\"\n", 525 | "\n", 526 | "if not os.path.exists(img_dir):\n", 527 | " os.makedirs(img_dir)\n", 528 | "\n", 529 | "ae_encode = theano.function([X, target], [gX, target])\n", 530 | "\n", 531 | "def inverse(X):\n", 532 | " X_pred = (X.transpose(0, 2, 3, 1) + 1) * 127.5\n", 533 | " X_pred = np.rint(X_pred).astype(int)\n", 534 | " X_pred = np.clip(X_pred, a_min = 0, a_max = 255)\n", 535 | " return X_pred.astype('uint8')\n", 536 | "\n", 537 | "\n", 538 | "def save_sample_pictures():\n", 539 | " for te_train, te_target in test_stream.get_epoch_iterator():\n", 540 | " break\n", 541 | " te_out, te_ta = ae_encode(input_transform(te_train), target_transform(te_target))\n", 542 | " te_reshape = inverse(te_out)\n", 543 | " te_target_reshape = inverse(te_ta)\n", 544 | "\n", 545 | " new_size = (128 * 6, 160 * 12)\n", 546 | " new_im = Image.new('RGB', new_size)\n", 547 | " r = np.random.choice(128, 24, replace=False).reshape(2,12)\n", 548 | " for i in range(2):\n", 549 | " for j in range(12):\n", 550 | " index = r[i][j]\n", 551 | " \n", 552 | " target_im = Image.fromarray(te_target_reshape[index])\n", 553 | " train_im = Image.fromarray(te_train[index].repeat(axis=0,repeats=4).repeat(axis=1,repeats=4).astype(np.uint8))\n", 554 | " im = Image.fromarray(te_reshape[index])\n", 555 | " \n", 556 | " new_im.paste(target_im, (128 * i * 3, 160 * j))\n", 557 | " new_im.paste(train_im, (128 * (i * 3 + 1), 160 * j))\n", 558 | " new_im.paste(im, (128 * (i * 3 + 2), 160 * j))\n", 559 | " img_loc = \"gen_images/%i.png\" %int(time()) \n", 560 | " print \"saving images to %s\" %img_loc\n", 561 | " new_im.save(img_loc)\n", 562 | " \n", 563 | "save_sample_pictures()" 564 | ] 565 | }, 566 | { 567 | "cell_type": "code", 568 | "execution_count": 26, 569 | "metadata": { 570 | "collapsed": false 571 | }, 572 | "outputs": [], 573 | "source": [ 574 | "def mn(l):\n", 575 | " if sum(l) == 0:\n", 576 | " return 0\n", 577 | " return sum(l) / len(l)\n", 578 | "\n", 579 | "## TODO : nicer way of coding these means?\n", 580 | "\n", 581 | "def get_test_errors():\n", 582 | " print \"getting test error\"\n", 583 | " g_costs = []\n", 584 | " d_costs = []\n", 585 | " e_costs = []\n", 586 | " k_costs = []\n", 587 | " for i in range(20):\n", 588 | " try:\n", 589 | " x_train, x_target = te_iterator.next()\n", 590 | " except:\n", 591 | " te_iterator = val_stream.get_epoch_iterator()\n", 592 | " x_train, x_target = te_iterator.next()\n", 593 | " x = input_transform(x_train)\n", 594 | " t = target_transform(x_target)\n", 595 | " cost = _get_cost(x,t)\n", 596 | " g_cost, d_cost, enc_cost = cost\n", 597 | " g_costs.append(g_cost)\n", 598 | " d_costs.append(d_cost)\n", 599 | " e_costs.append(enc_cost)\n", 600 | " \n", 601 | " s= \" ,\".join([\"test errors :\", str(mn(g_costs)), str(mn(d_costs)), str(mn(e_costs))])\n", 602 | " return s\n" 603 | ] 604 | }, 605 | { 606 | "cell_type": "markdown", 607 | "metadata": {}, 608 | "source": [ 609 | "# Train Model\n", 610 | "\n", 611 | "Finally, we come to the actual training of the model. This code can be keyboard interrupted, and the weights will be stored in memory, allowing us to stop, adjust and restart the training (this is how I got the model to train). For advice on training see the blog post at (#TODO)" 612 | ] 613 | }, 614 | { 615 | "cell_type": "code", 616 | "execution_count": null, 617 | "metadata": { 618 | "collapsed": true 619 | }, 620 | "outputs": [], 621 | "source": [ 622 | "iterator = tr_stream.get_epoch_iterator()\n", 623 | "\n", 624 | "# you may wish to reset the learning rate to something of your choosing if you feel it is too high/low\n", 625 | "lrt = sharedX(lr)" 626 | ] 627 | }, 628 | { 629 | "cell_type": "code", 630 | "execution_count": 33, 631 | "metadata": { 632 | "collapsed": false, 633 | "scrolled": false 634 | }, 635 | "outputs": [ 636 | { 637 | "name": "stderr", 638 | "output_type": "stream", 639 | "text": [ 640 | "\r", 641 | " 0%| | 0/72 [00:00\u001b[0m in \u001b[0;36m\u001b[1;34m()\u001b[0m\n\u001b[0;32m 32\u001b[0m \u001b[0mcost\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0m_train_g\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mx\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mt\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 33\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 34\u001b[1;33m \u001b[0mcost\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0m_train_d\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mx\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mt\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 35\u001b[0m \u001b[1;31m#cost = _train_e(x,t)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 36\u001b[0m \u001b[0mg_cost\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0md_cost\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0menc_cost\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mcost\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 773 | "\u001b[1;32m/home/mike/envs/lasagne/lib/python2.7/site-packages/theano/compile/function_module.pyc\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 857\u001b[0m \u001b[0mt0_fn\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtime\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtime\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 858\u001b[0m \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 859\u001b[1;33m \u001b[0moutputs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mfn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 860\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0mException\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 861\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mhasattr\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mfn\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;34m'position_of_error'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 774 | "\u001b[1;31mKeyboardInterrupt\u001b[0m: " 775 | ] 776 | } 777 | ], 778 | "source": [ 779 | "from time import time\n", 780 | "\n", 781 | "n_updates = 0\n", 782 | "t = time()\n", 783 | "\n", 784 | "n_epochs = 200\n", 785 | "\n", 786 | "print \"STARTING\"\n", 787 | "\n", 788 | "\n", 789 | "\n", 790 | "for epoch in range(n_epochs):\n", 791 | " \n", 792 | " \n", 793 | " tm = time()\n", 794 | "\n", 795 | " g_costs = []\n", 796 | " d_costs = []\n", 797 | " e_costs = []\n", 798 | " \n", 799 | " ## TODO : produces pretty ugly output, redo this?\n", 800 | " for i in tqdm(range(num_samples/128)):\n", 801 | " \n", 802 | " try:\n", 803 | " x_train, x_target = iterator.next()\n", 804 | " except:\n", 805 | " iterator = tr_stream.get_epoch_iterator()\n", 806 | " x_train, x_target = iterator.next()\n", 807 | " x = input_transform(x_train)\n", 808 | " t = target_transform(x_target)\n", 809 | "\n", 810 | " ## optional - change the criteria for how often we train the generator or discriminator\n", 811 | " if n_updates % 2 == 1:\n", 812 | " cost = _train_g(x,t) \n", 813 | " else:\n", 814 | " cost = _train_d(x,t)\n", 815 | " \n", 816 | " # optional - only train the generator on MSE cost\n", 817 | " #cost = _train_e(x,t)\n", 818 | " g_cost, d_cost, enc_cost = cost\n", 819 | " g_costs.append(g_cost)\n", 820 | " d_costs.append(d_cost)\n", 821 | " e_costs.append(enc_cost)\n", 822 | "\n", 823 | " if n_updates % 100 == 0:\n", 824 | " s= \" ,\".join([\"training errors :\", str(mn(g_costs)), str(mn(d_costs)), str(mn(e_costs))])\n", 825 | " g_costs = []\n", 826 | " d_costs = []\n", 827 | " e_costs = []\n", 828 | " print get_test_errors()\n", 829 | " print s\n", 830 | " sys.stdout.flush()\n", 831 | " save_sample_pictures()\n", 832 | " n_updates += 1 \n", 833 | "\n", 834 | " print \"epoch %i of %i took %.2f seconds\" %(epoch, n_epochs, time() - tm)\n", 835 | " \n", 836 | " ## optional - reduce the learning rate as you go\n", 837 | " #lrt.set_value(floatX(lrt.get_value() * 0.95))\n", 838 | " #print lrt.get_value()\n", 839 | " \n", 840 | " \n", 841 | " sys.stdout.flush()\n", 842 | " \n", 843 | " \n", 844 | " " 845 | ] 846 | }, 847 | { 848 | "cell_type": "markdown", 849 | "metadata": {}, 850 | "source": [ 851 | "# Save weights if wanted\n", 852 | "You can reuse them by using the weights in the make_conv_set method #TODO - actually try this!" 853 | ] 854 | }, 855 | { 856 | "cell_type": "code", 857 | "execution_count": null, 858 | "metadata": { 859 | "collapsed": false 860 | }, 861 | "outputs": [], 862 | "source": [ 863 | "import pickle\n", 864 | "\n", 865 | "all_params = [e_params, g_params, d_params]\n", 866 | "\n", 867 | "pickle.dump(all_params, open(\"faces_dcgan.pkl\", 'w'))" 868 | ] 869 | }, 870 | { 871 | "cell_type": "code", 872 | "execution_count": null, 873 | "metadata": { 874 | "collapsed": true 875 | }, 876 | "outputs": [], 877 | "source": [] 878 | } 879 | ], 880 | "metadata": { 881 | "kernelspec": { 882 | "display_name": "Python 2", 883 | "language": "python", 884 | "name": "python2" 885 | }, 886 | "language_info": { 887 | "codemirror_mode": { 888 | "name": "ipython", 889 | "version": 2 890 | }, 891 | "file_extension": ".py", 892 | "mimetype": "text/x-python", 893 | "name": "python", 894 | "nbconvert_exporter": "python", 895 | "pygments_lexer": "ipython2", 896 | "version": "2.7.6" 897 | } 898 | }, 899 | "nbformat": 4, 900 | "nbformat_minor": 0 901 | } 902 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | blocks==0.0.1 2 | fuel==0.1.1 3 | h5py==2.5.0 4 | ipython==4.0.0 5 | ipython-genutils==0.1.0 6 | numpy==1.10.2 7 | Pillow==3.0.0 8 | Theano==0.7.0 9 | tqdm==3.4.0 10 | -------------------------------------------------------------------------------- /utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mikesj-public/dcgan-autoencoder/772815cb009349abd47b7f31c1ba5f41bef24b51/utils/__init__.py -------------------------------------------------------------------------------- /utils/activations.py: -------------------------------------------------------------------------------- 1 | import theano 2 | import theano.tensor as T 3 | 4 | class Softmax(object): 5 | 6 | def __init__(self): 7 | pass 8 | 9 | def __call__(self, x): 10 | e_x = T.exp(x - x.max(axis=1).dimshuffle(0, 'x')) 11 | return e_x / e_x.sum(axis=1).dimshuffle(0, 'x') 12 | 13 | class ConvSoftmax(object): 14 | 15 | def __init__(self): 16 | pass 17 | 18 | def __call__(self, x): 19 | e_x = T.exp(x - x.max(axis=1, keepdims=True)) 20 | return e_x / e_x.sum(axis=1, keepdims=True) 21 | 22 | class Maxout(object): 23 | 24 | def __init__(self, n_pool=2): 25 | self.n_pool = n_pool 26 | 27 | def __call__(self, x): 28 | if x.ndim == 2: 29 | x = T.max([x[:, n::self.n_pool] for n in range(self.n_pool)], axis=0) 30 | elif x.ndim == 4: 31 | x = T.max([x[:, n::self.n_pool, :, :] for n in range(self.n_pool)], axis=0) 32 | else: 33 | raise NotImplementedError 34 | return x 35 | 36 | class Rectify(object): 37 | 38 | def __init__(self): 39 | pass 40 | 41 | def __call__(self, x): 42 | return (x + abs(x)) / 2.0 43 | 44 | class ClippedRectify(object): 45 | 46 | def __init__(self, clip=10.): 47 | self.clip = clip 48 | 49 | def __call__(self, x): 50 | return T.clip((x + abs(x)) / 2.0, 0., self.clip) 51 | 52 | class LeakyRectify(object): 53 | 54 | def __init__(self, leak=0.2): 55 | self.leak = leak 56 | 57 | def __call__(self, x): 58 | f1 = 0.5 * (1 + self.leak) 59 | f2 = 0.5 * (1 - self.leak) 60 | return f1 * x + f2 * abs(x) 61 | 62 | class Prelu(object): 63 | 64 | def __init__(self): 65 | pass 66 | 67 | def __call__(self, x, leak): 68 | if x.ndim == 4: 69 | leak = leak.dimshuffle('x', 0, 'x', 'x') 70 | f1 = 0.5 * (1 + leak) 71 | f2 = 0.5 * (1 - leak) 72 | return f1 * x + f2 * abs(x) 73 | 74 | class Tanh(object): 75 | 76 | def __init__(self): 77 | pass 78 | 79 | def __call__(self, x): 80 | return T.tanh(x) 81 | 82 | class Sigmoid(object): 83 | 84 | def __init__(self): 85 | pass 86 | 87 | def __call__(self, x): 88 | return T.nnet.sigmoid(x) 89 | 90 | class Linear(object): 91 | 92 | def __init__(self): 93 | pass 94 | 95 | def __call__(self, x): 96 | return x 97 | 98 | class HardSigmoid(object): 99 | 100 | def __init__(self): 101 | pass 102 | 103 | def __call__(self, X): 104 | return T.clip(X + 0.5, 0., 1.) 105 | 106 | class TRec(object): 107 | 108 | def __init__(self, t=1): 109 | self.t = t 110 | 111 | def __call__(self, X): 112 | return X*(X > self.t) 113 | 114 | class HardTanh(object): 115 | 116 | def __init__(self): 117 | pass 118 | 119 | def __call__(self, X): 120 | return T.clip(X, -1., 1.) -------------------------------------------------------------------------------- /utils/config.py: -------------------------------------------------------------------------------- 1 | data_dir = '/home/mike/Documents/convolutional_variational_autoencoder/dcgan/mnist/' 2 | -------------------------------------------------------------------------------- /utils/costs.py: -------------------------------------------------------------------------------- 1 | import theano 2 | import theano.tensor as T 3 | 4 | def CategoricalCrossEntropy(y_true, y_pred): 5 | return T.nnet.categorical_crossentropy(y_pred, y_true).mean() 6 | 7 | def BinaryCrossEntropy(y_true, y_pred): 8 | return T.nnet.binary_crossentropy(y_pred, y_true).mean() 9 | 10 | def MeanSquaredError(y_true, y_pred): 11 | return T.sqr(y_pred - y_true).mean() 12 | 13 | def MeanAbsoluteError(y_true, y_pred): 14 | return T.abs_(y_pred - y_true).mean() 15 | 16 | def SquaredHinge(y_true, y_pred): 17 | return T.sqr(T.maximum(1. - y_true * y_pred, 0.)).mean() 18 | 19 | def Hinge(y_true, y_pred): 20 | return T.maximum(1. - y_true * y_pred, 0.).mean() 21 | 22 | cce = CCE = CategoricalCrossEntropy 23 | bce = BCE = BinaryCrossEntropy 24 | mse = MSE = MeanSquaredError 25 | mae = MAE = MeanAbsoluteError 26 | -------------------------------------------------------------------------------- /utils/cv2_utils.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | 3 | def min_resize(x, size, interpolation=cv2.INTER_LINEAR): 4 | """ 5 | Resize an image so that it is size along the minimum spatial dimension. 6 | """ 7 | w, h = map(float, x.shape[:2]) 8 | if min([w, h]) != size: 9 | if w <= h: 10 | x = cv2.resize(x, (int(round((h/w)*size)), int(size)), interpolation=interpolation) 11 | else: 12 | x = cv2.resize(x, (int(size), int(round((w/h)*size))), interpolation=interpolation) 13 | return x -------------------------------------------------------------------------------- /utils/data_utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from sklearn import utils as skutils 3 | 4 | from rng import np_rng, py_rng 5 | 6 | def center_crop(x, ph, pw=None): 7 | if pw is None: 8 | pw = ph 9 | h, w = x.shape[:2] 10 | j = int(round((h - ph)/2.)) 11 | i = int(round((w - pw)/2.)) 12 | return x[j:j+ph, i:i+pw] 13 | 14 | def patch(x, ph, pw=None): 15 | if pw is None: 16 | pw = ph 17 | h, w = x.shape[:2] 18 | j = py_rng.randint(0, h-ph) 19 | i = py_rng.randint(0, w-pw) 20 | x = x[j:j+ph, i:i+pw] 21 | return x 22 | 23 | def list_shuffle(*data): 24 | idxs = np_rng.permutation(np.arange(len(data[0]))) 25 | if len(data) == 1: 26 | return [data[0][idx] for idx in idxs] 27 | else: 28 | return [[d[idx] for idx in idxs] for d in data] 29 | 30 | def shuffle(*arrays, **options): 31 | if isinstance(arrays[0][0], basestring): 32 | return list_shuffle(*arrays) 33 | else: 34 | return skutils.shuffle(*arrays, random_state=np_rng) 35 | 36 | def OneHot(X, n=None, negative_class=0.): 37 | X = np.asarray(X).flatten() 38 | if n is None: 39 | n = np.max(X) + 1 40 | Xoh = np.ones((len(X), n)) * negative_class 41 | Xoh[np.arange(len(X)), X] = 1. 42 | return Xoh 43 | 44 | def iter_data(*data, **kwargs): 45 | size = kwargs.get('size', 128) 46 | try: 47 | n = len(data[0]) 48 | except: 49 | n = data[0].shape[0] 50 | batches = n / size 51 | if n % size != 0: 52 | batches += 1 53 | 54 | for b in range(batches): 55 | start = b * size 56 | end = (b + 1) * size 57 | if end > n: 58 | end = n 59 | if len(data) == 1: 60 | yield data[0][start:end] 61 | else: 62 | yield tuple([d[start:end] for d in data]) -------------------------------------------------------------------------------- /utils/inits.py: -------------------------------------------------------------------------------- 1 | import os 2 | import numpy as np 3 | 4 | import theano 5 | import theano.tensor as T 6 | 7 | from theano_utils import sharedX, floatX, intX 8 | from rng import np_rng 9 | 10 | class Uniform(object): 11 | def __init__(self, scale=0.05): 12 | self.scale = 0.05 13 | 14 | def __call__(self, shape, name=None): 15 | return sharedX(np_rng.uniform(low=-self.scale, high=self.scale, size=shape), name=name) 16 | 17 | class Normal(object): 18 | def __init__(self, loc=0., scale=0.05): 19 | self.scale = scale 20 | self.loc = loc 21 | 22 | def __call__(self, shape, name=None): 23 | return sharedX(np_rng.normal(loc=self.loc, scale=self.scale, size=shape), name=name) 24 | 25 | class Orthogonal(object): 26 | """ benanne lasagne ortho init (faster than qr approach)""" 27 | def __init__(self, scale=1.1): 28 | self.scale = scale 29 | 30 | def __call__(self, shape, name=None): 31 | print 'called orthogonal init with shape', shape 32 | flat_shape = (shape[0], np.prod(shape[1:])) 33 | a = np_rng.normal(0.0, 1.0, flat_shape) 34 | u, _, v = np.linalg.svd(a, full_matrices=False) 35 | q = u if u.shape == flat_shape else v # pick the one with the correct shape 36 | q = q.reshape(shape) 37 | return sharedX(self.scale * q[:shape[0], :shape[1]], name=name) 38 | 39 | class Frob(object): 40 | 41 | def __init__(self): 42 | pass 43 | 44 | def __call__(self, shape, name=None): 45 | r = np_rng.normal(loc=0, scale=0.01, size=shape) 46 | r = r/np.sqrt(np.sum(r**2))*np.sqrt(shape[1]) 47 | return sharedX(r, name=name) 48 | 49 | class Constant(object): 50 | 51 | def __init__(self, c=0.): 52 | self.c = c 53 | 54 | def __call__(self, shape, name=None): 55 | return sharedX(np.ones(shape) * self.c, name=name) 56 | 57 | class ConvIdentity(object): 58 | 59 | def __init__(self, scale=1.): 60 | self.scale = scale 61 | 62 | def __call__(self, shape, name=None): 63 | w = np.zeros(shape) 64 | ycenter = shape[2]//2 65 | xcenter = shape[3]//2 66 | 67 | if shape[0] == shape[1]: 68 | o_idxs = np.arange(shape[0]) 69 | i_idxs = np.arange(shape[1]) 70 | elif shape[1] < shape[0]: 71 | o_idxs = np.arange(shape[0]) 72 | i_idxs = np.random.permutation(np.tile(np.arange(shape[1]), shape[0]/shape[1]+1))[:shape[0]] 73 | w[o_idxs, i_idxs, ycenter, xcenter] = self.scale 74 | return sharedX(w, name=name) 75 | 76 | class Identity(object): 77 | 78 | def __init__(self, scale=0.25): 79 | self.scale = scale 80 | 81 | def __call__(self, shape, name=None): 82 | if shape[0] != shape[1]: 83 | w = np.zeros(shape) 84 | o_idxs = np.arange(shape[0]) 85 | i_idxs = np.random.permutation(np.tile(np.arange(shape[1]), shape[0]/shape[1]+1))[:shape[0]] 86 | w[o_idxs, i_idxs] = self.scale 87 | else: 88 | w = np.identity(shape[0]) * self.scale 89 | return sharedX(w, name=name) 90 | 91 | class ReluInit(object): 92 | 93 | def __init__(self): 94 | pass 95 | 96 | def __call__(self, shape, name=None): 97 | if len(shape) == 2: 98 | scale = np.sqrt(2./shape[0]) 99 | elif len(shape) == 4: 100 | scale = np.sqrt(2./np.prod(shape[1:])) 101 | else: 102 | raise NotImplementedError 103 | return sharedX(np_rng.normal(size=shape, scale=scale), name=name) -------------------------------------------------------------------------------- /utils/metrics.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import theano 4 | import theano.tensor as T 5 | import gc 6 | import time 7 | 8 | from theano_utils import floatX 9 | from ops import euclidean, cosine 10 | 11 | from sklearn import metrics 12 | from sklearn.linear_model import LogisticRegression as LR 13 | 14 | def cv_reg_lr(trX, trY, vaX, vaY, Cs=[0.01, 0.05, 0.1, 0.5, 1., 5., 10., 50., 100.]): 15 | tr_accs = [] 16 | va_accs = [] 17 | models = [] 18 | for C in Cs: 19 | model = LR(C=C) 20 | model.fit(trX, trY) 21 | tr_pred = model.predict(trX) 22 | va_pred = model.predict(vaX) 23 | tr_acc = metrics.accuracy_score(trY, tr_pred) 24 | va_acc = metrics.accuracy_score(vaY, va_pred) 25 | print '%.4f %.4f %.4f'%(C, tr_acc, va_acc) 26 | tr_accs.append(tr_acc) 27 | va_accs.append(va_acc) 28 | models.append(model) 29 | best = np.argmax(va_accs) 30 | print 'best model C: %.4f tr_acc: %.4f va_acc: %.4f'%(Cs[best], tr_accs[best], va_accs[best]) 31 | return models[best] 32 | 33 | def gpu_nnc_predict(trX, trY, teX, metric='cosine', batch_size=4096): 34 | if metric == 'cosine': 35 | metric_fn = cosine_dist 36 | else: 37 | metric_fn = euclid_dist 38 | idxs = [] 39 | for i in range(0, len(teX), batch_size): 40 | mb_dists = [] 41 | mb_idxs = [] 42 | for j in range(0, len(trX), batch_size): 43 | dist = metric_fn(floatX(teX[i:i+batch_size]), floatX(trX[j:j+batch_size])) 44 | if metric == 'cosine': 45 | mb_dists.append(np.max(dist, axis=1)) 46 | mb_idxs.append(j+np.argmax(dist, axis=1)) 47 | else: 48 | mb_dists.append(np.min(dist, axis=1)) 49 | mb_idxs.append(j+np.argmin(dist, axis=1)) 50 | mb_idxs = np.asarray(mb_idxs) 51 | mb_dists = np.asarray(mb_dists) 52 | if metric == 'cosine': 53 | i = mb_idxs[np.argmax(mb_dists, axis=0), np.arange(mb_idxs.shape[1])] 54 | else: 55 | i = mb_idxs[np.argmin(mb_dists, axis=0), np.arange(mb_idxs.shape[1])] 56 | idxs.append(i) 57 | idxs = np.concatenate(idxs, axis=0) 58 | nearest = trY[idxs] 59 | return nearest 60 | 61 | def gpu_nnd_score(trX, teX, metric='cosine', batch_size=4096): 62 | if metric == 'cosine': 63 | metric_fn = cosine_dist 64 | else: 65 | metric_fn = euclid_dist 66 | dists = [] 67 | for i in range(0, len(teX), batch_size): 68 | mb_dists = [] 69 | for j in range(0, len(trX), batch_size): 70 | dist = metric_fn(floatX(teX[i:i+batch_size]), floatX(trX[j:j+batch_size])) 71 | if metric == 'cosine': 72 | mb_dists.append(np.max(dist, axis=1)) 73 | else: 74 | mb_dists.append(np.min(dist, axis=1)) 75 | mb_dists = np.asarray(mb_dists) 76 | if metric == 'cosine': 77 | d = np.max(mb_dists, axis=0) 78 | else: 79 | d = np.min(mb_dists, axis=0) 80 | dists.append(d) 81 | dists = np.concatenate(dists, axis=0) 82 | return float(np.mean(dists)) 83 | 84 | A = T.matrix() 85 | B = T.matrix() 86 | 87 | ed = euclidean(A, B) 88 | cd = cosine(A, B) 89 | 90 | cosine_dist = theano.function([A, B], cd) 91 | euclid_dist = theano.function([A, B], ed) 92 | 93 | def nnc_score(trX, trY, teX, teY, metric='euclidean'): 94 | pred = gpu_nnc_predict(trX, trY, teX, metric=metric) 95 | acc = metrics.accuracy_score(teY, pred) 96 | return acc*100. 97 | 98 | def nnd_score(trX, teX, metric='euclidean'): 99 | return gpu_nnd_score(trX, teX, metric=metric) 100 | -------------------------------------------------------------------------------- /utils/ops.py: -------------------------------------------------------------------------------- 1 | import theano 2 | import theano.tensor as T 3 | from theano.sandbox.cuda.basic_ops import (as_cuda_ndarray_variable, 4 | host_from_gpu, 5 | gpu_contiguous, HostFromGpu, 6 | gpu_alloc_empty) 7 | from theano.sandbox.cuda.dnn import GpuDnnConvDesc, GpuDnnConv, GpuDnnConvGradI, dnn_conv, dnn_pool 8 | from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams 9 | 10 | from rng import t_rng 11 | 12 | t_rng = RandomStreams() 13 | 14 | def l2normalize(x, axis=1, e=1e-8, keepdims=True): 15 | return x/l2norm(x, axis=axis, e=e, keepdims=keepdims) 16 | 17 | def l2norm(x, axis=1, e=1e-8, keepdims=True): 18 | return T.sqrt(T.sum(T.sqr(x), axis=axis, keepdims=keepdims) + e) 19 | 20 | def cosine(x, y): 21 | d = T.dot(x, y.T) 22 | d /= l2norm(x).dimshuffle(0, 'x') 23 | d /= l2norm(y).dimshuffle('x', 0) 24 | return d 25 | 26 | def euclidean(x, y, e=1e-8): 27 | xx = T.sqr(T.sqrt((x*x).sum(axis=1) + e)) 28 | yy = T.sqr(T.sqrt((y*y).sum(axis=1) + e)) 29 | dist = T.dot(x, y.T) 30 | dist *= -2 31 | dist += xx.dimshuffle(0, 'x') 32 | dist += yy.dimshuffle('x', 0) 33 | dist = T.sqrt(dist) 34 | return dist 35 | 36 | def dropout(X, p=0.): 37 | """ 38 | dropout using activation scaling to avoid test time weight rescaling 39 | """ 40 | if p > 0: 41 | retain_prob = 1 - p 42 | X *= t_rng.binomial(X.shape, p=retain_prob, dtype=theano.config.floatX) 43 | X /= retain_prob 44 | return X 45 | 46 | def conv_cond_concat(x, y): 47 | """ 48 | concatenate conditioning vector on feature map axis 49 | """ 50 | return T.concatenate([x, y*T.ones((x.shape[0], y.shape[1], x.shape[2], x.shape[3]))], axis=1) 51 | 52 | def batchnorm(X, g=None, b=None, u=None, s=None, a=1., e=1e-8): 53 | """ 54 | batchnorm with support for not using scale and shift parameters 55 | as well as inference values (u and s) and partial batchnorm (via a) 56 | will detect and use convolutional or fully connected version 57 | """ 58 | if X.ndim == 4: 59 | if u is not None and s is not None: 60 | b_u = u.dimshuffle('x', 0, 'x', 'x') 61 | b_s = s.dimshuffle('x', 0, 'x', 'x') 62 | else: 63 | b_u = T.mean(X, axis=[0, 2, 3]).dimshuffle('x', 0, 'x', 'x') 64 | b_s = T.mean(T.sqr(X - b_u), axis=[0, 2, 3]).dimshuffle('x', 0, 'x', 'x') 65 | if a != 1: 66 | b_u = (1. - a)*0. + a*b_u 67 | b_s = (1. - a)*1. + a*b_s 68 | X = (X - b_u) / T.sqrt(b_s + e) 69 | if g is not None and b is not None: 70 | X = X*g.dimshuffle('x', 0, 'x', 'x') + b.dimshuffle('x', 0, 'x', 'x') 71 | elif X.ndim == 2: 72 | if u is None and s is None: 73 | u = T.mean(X, axis=0) 74 | s = T.mean(T.sqr(X - u), axis=0) 75 | if a != 1: 76 | u = (1. - a)*0. + a*u 77 | s = (1. - a)*1. + a*s 78 | X = (X - u) / T.sqrt(s + e) 79 | if g is not None and b is not None: 80 | X = X*g + b 81 | else: 82 | raise NotImplementedError 83 | return X 84 | 85 | def deconv(X, w, subsample=(1, 1), border_mode=(0, 0), conv_mode='conv'): 86 | """ 87 | sets up dummy convolutional forward pass and uses its grad as deconv 88 | currently only tested/working with same padding 89 | """ 90 | img = gpu_contiguous(X) 91 | kerns = gpu_contiguous(w) 92 | desc = GpuDnnConvDesc(border_mode=border_mode, subsample=subsample, 93 | conv_mode=conv_mode)(gpu_alloc_empty(img.shape[0], kerns.shape[1], img.shape[2]*subsample[0], img.shape[3]*subsample[1]).shape, kerns.shape) 94 | out = gpu_alloc_empty(img.shape[0], kerns.shape[1], img.shape[2]*subsample[0], img.shape[3]*subsample[1]) 95 | d_img = GpuDnnConvGradI()(kerns, img, out, desc) 96 | return d_img -------------------------------------------------------------------------------- /utils/rng.py: -------------------------------------------------------------------------------- 1 | from numpy.random import RandomState 2 | from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams 3 | from random import Random 4 | 5 | seed = 42 6 | 7 | py_rng = Random(seed) 8 | np_rng = RandomState(seed) 9 | t_rng = RandomStreams(seed) 10 | 11 | def set_seed(n): 12 | global seed, py_rng, np_rng, t_rng 13 | 14 | seed = n 15 | py_rng = Random(seed) 16 | np_rng = RandomState(seed) 17 | t_rng = RandomStreams(seed) 18 | -------------------------------------------------------------------------------- /utils/theano_utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import theano 3 | 4 | def intX(X): 5 | return np.asarray(X, dtype=np.int32) 6 | 7 | def floatX(X): 8 | return np.asarray(X, dtype=theano.config.floatX) 9 | 10 | def sharedX(X, dtype=theano.config.floatX, name=None): 11 | return theano.shared(np.asarray(X, dtype=dtype), name=name) 12 | 13 | def shared0s(shape, dtype=theano.config.floatX, name=None): 14 | return sharedX(np.zeros(shape), dtype=dtype, name=name) 15 | 16 | def sharedNs(shape, n, dtype=theano.config.floatX, name=None): 17 | return sharedX(np.ones(shape)*n, dtype=dtype, name=name) -------------------------------------------------------------------------------- /utils/updates.py: -------------------------------------------------------------------------------- 1 | import theano 2 | import theano.tensor as T 3 | import numpy as np 4 | 5 | from theano_utils import shared0s, floatX, sharedX 6 | from ops import l2norm 7 | 8 | def clip_norm(g, c, n): 9 | if c > 0: 10 | g = T.switch(T.ge(n, c), g*c/n, g) 11 | return g 12 | 13 | def clip_norms(gs, c): 14 | norm = T.sqrt(sum([T.sum(g**2) for g in gs])) 15 | return [clip_norm(g, c, norm) for g in gs] 16 | 17 | class Regularizer(object): 18 | 19 | def __init__(self, l1=0., l2=0., maxnorm=0., l2norm=False, frobnorm=False): 20 | self.__dict__.update(locals()) 21 | 22 | def max_norm(self, p, maxnorm): 23 | if maxnorm > 0: 24 | norms = T.sqrt(T.sum(T.sqr(p), axis=0)) 25 | desired = T.clip(norms, 0, maxnorm) 26 | p = p * (desired/ (1e-7 + norms)) 27 | return p 28 | 29 | def l2_norm(self, p): 30 | return p/l2norm(p, axis=0) 31 | 32 | def frob_norm(self, p, nrows): 33 | return (p/T.sqrt(T.sum(T.sqr(p))))*T.sqrt(nrows) 34 | 35 | def gradient_regularize(self, p, g): 36 | g += p * self.l2 37 | g += T.sgn(p) * self.l1 38 | return g 39 | 40 | def weight_regularize(self, p): 41 | p = self.max_norm(p, self.maxnorm) 42 | if self.l2norm: 43 | p = self.l2_norm(p) 44 | if self.frobnorm > 0: 45 | p = self.frob_norm(p, self.frobnorm) 46 | return p 47 | 48 | 49 | class Update(object): 50 | 51 | def __init__(self, regularizer=Regularizer(), clipnorm=0.): 52 | self.__dict__.update(locals()) 53 | 54 | def __call__(self, params, grads): 55 | raise NotImplementedError 56 | 57 | class SGD(Update): 58 | 59 | def __init__(self, lr=0.01, *args, **kwargs): 60 | Update.__init__(self, *args, **kwargs) 61 | self.__dict__.update(locals()) 62 | 63 | def __call__(self, params, cost): 64 | updates = [] 65 | grads = T.grad(cost, params) 66 | grads = clip_norms(grads, self.clipnorm) 67 | for p,g in zip(params,grads): 68 | g = self.regularizer.gradient_regularize(p, g) 69 | updated_p = p - self.lr * g 70 | updated_p = self.regularizer.weight_regularize(updated_p) 71 | updates.append((p, updated_p)) 72 | return updates 73 | 74 | class Momentum(Update): 75 | 76 | def __init__(self, lr=0.01, momentum=0.9, *args, **kwargs): 77 | Update.__init__(self, *args, **kwargs) 78 | self.__dict__.update(locals()) 79 | 80 | def __call__(self, params, cost): 81 | updates = [] 82 | grads = T.grad(cost, params) 83 | grads = clip_norms(grads, self.clipnorm) 84 | for p,g in zip(params,grads): 85 | g = self.regularizer.gradient_regularize(p, g) 86 | m = theano.shared(p.get_value() * 0.) 87 | v = (self.momentum * m) - (self.lr * g) 88 | updates.append((m, v)) 89 | 90 | updated_p = p + v 91 | updated_p = self.regularizer.weight_regularize(updated_p) 92 | updates.append((p, updated_p)) 93 | return updates 94 | 95 | 96 | class NAG(Update): 97 | 98 | def __init__(self, lr=0.01, momentum=0.9, *args, **kwargs): 99 | Update.__init__(self, *args, **kwargs) 100 | self.__dict__.update(locals()) 101 | 102 | def __call__(self, params, cost): 103 | updates = [] 104 | grads = T.grad(cost, params) 105 | grads = clip_norms(grads, self.clipnorm) 106 | for p, g in zip(params, grads): 107 | g = self.regularizer.gradient_regularize(p, g) 108 | m = theano.shared(p.get_value() * 0.) 109 | v = (self.momentum * m) - (self.lr * g) 110 | 111 | updated_p = p + self.momentum * v - self.lr * g 112 | updated_p = self.regularizer.weight_regularize(updated_p) 113 | updates.append((m,v)) 114 | updates.append((p, updated_p)) 115 | return updates 116 | 117 | 118 | class RMSprop(Update): 119 | 120 | def __init__(self, lr=0.001, rho=0.9, epsilon=1e-6, *args, **kwargs): 121 | Update.__init__(self, *args, **kwargs) 122 | self.__dict__.update(locals()) 123 | 124 | def __call__(self, params, cost): 125 | updates = [] 126 | grads = T.grad(cost, params) 127 | grads = clip_norms(grads, self.clipnorm) 128 | for p,g in zip(params,grads): 129 | g = self.regularizer.gradient_regularize(p, g) 130 | acc = theano.shared(p.get_value() * 0.) 131 | acc_new = self.rho * acc + (1 - self.rho) * g ** 2 132 | updates.append((acc, acc_new)) 133 | 134 | updated_p = p - self.lr * (g / T.sqrt(acc_new + self.epsilon)) 135 | updated_p = self.regularizer.weight_regularize(updated_p) 136 | updates.append((p, updated_p)) 137 | return updates 138 | 139 | 140 | class Adam(Update): 141 | 142 | def __init__(self, lr=0.001, b1=0.9, b2=0.999, e=1e-8, l=1-1e-8, *args, **kwargs): 143 | Update.__init__(self, *args, **kwargs) 144 | self.__dict__.update(locals()) 145 | 146 | def __call__(self, params, cost): 147 | updates = [] 148 | grads = T.grad(cost, params) 149 | grads = clip_norms(grads, self.clipnorm) 150 | t = theano.shared(floatX(1.)) 151 | b1_t = self.b1*self.l**(t-1) 152 | 153 | for p, g in zip(params, grads): 154 | g = self.regularizer.gradient_regularize(p, g) 155 | m = theano.shared(p.get_value() * 0.) 156 | v = theano.shared(p.get_value() * 0.) 157 | 158 | m_t = b1_t*m + (1 - b1_t)*g 159 | v_t = self.b2*v + (1 - self.b2)*g**2 160 | m_c = m_t / (1-self.b1**t) 161 | v_c = v_t / (1-self.b2**t) 162 | p_t = p - (self.lr * m_c) / (T.sqrt(v_c) + self.e) 163 | p_t = self.regularizer.weight_regularize(p_t) 164 | updates.append((m, m_t)) 165 | updates.append((v, v_t)) 166 | updates.append((p, p_t) ) 167 | updates.append((t, t + 1.)) 168 | return updates 169 | 170 | 171 | class Adagrad(Update): 172 | 173 | def __init__(self, lr=0.01, epsilon=1e-6, *args, **kwargs): 174 | Update.__init__(self, *args, **kwargs) 175 | self.__dict__.update(locals()) 176 | 177 | def __call__(self, params, cost): 178 | updates = [] 179 | grads = T.grad(cost, params) 180 | grads = clip_norms(grads, self.clipnorm) 181 | for p,g in zip(params,grads): 182 | g = self.regularizer.gradient_regularize(p, g) 183 | acc = theano.shared(p.get_value() * 0.) 184 | acc_t = acc + g ** 2 185 | updates.append((acc, acc_t)) 186 | 187 | p_t = p - (self.lr / T.sqrt(acc_t + self.epsilon)) * g 188 | p_t = self.regularizer.weight_regularize(p_t) 189 | updates.append((p, p_t)) 190 | return updates 191 | 192 | 193 | class Adadelta(Update): 194 | 195 | def __init__(self, lr=0.5, rho=0.95, epsilon=1e-6, *args, **kwargs): 196 | Update.__init__(self, *args, **kwargs) 197 | self.__dict__.update(locals()) 198 | 199 | def __call__(self, params, cost): 200 | updates = [] 201 | grads = T.grad(cost, params) 202 | grads = clip_norms(grads, self.clipnorm) 203 | for p,g in zip(params,grads): 204 | g = self.regularizer.gradient_regularize(p, g) 205 | 206 | acc = theano.shared(p.get_value() * 0.) 207 | acc_delta = theano.shared(p.get_value() * 0.) 208 | acc_new = self.rho * acc + (1 - self.rho) * g ** 2 209 | updates.append((acc,acc_new)) 210 | 211 | update = g * T.sqrt(acc_delta + self.epsilon) / T.sqrt(acc_new + self.epsilon) 212 | updated_p = p - self.lr * update 213 | updated_p = self.regularizer.weight_regularize(updated_p) 214 | updates.append((p, updated_p)) 215 | 216 | acc_delta_new = self.rho * acc_delta + (1 - self.rho) * update ** 2 217 | updates.append((acc_delta,acc_delta_new)) 218 | return updates 219 | 220 | 221 | class NoUpdate(Update): 222 | 223 | def __init__(self, lr=0.01, momentum=0.9, *args, **kwargs): 224 | Update.__init__(self, *args, **kwargs) 225 | self.__dict__.update(locals()) 226 | 227 | def __call__(self, params, cost): 228 | updates = [] 229 | for p in params: 230 | updates.append((p, p)) 231 | return updates 232 | -------------------------------------------------------------------------------- /utils/vis.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy.misc import imsave 3 | 4 | def grayscale_grid_vis(X, (nh, nw), save_path=None): 5 | h, w = X[0].shape[:2] 6 | img = np.zeros((h*nh, w*nw)) 7 | for n, x in enumerate(X): 8 | j = n/nw 9 | i = n%nw 10 | img[j*h:j*h+h, i*w:i*w+w] = x 11 | if save_path is not None: 12 | imsave(save_path, img) 13 | return img 14 | 15 | def color_grid_vis(X, (nh, nw), save_path=None): 16 | h, w = X[0].shape[:2] 17 | img = np.zeros((h*nh, w*nw, 3)) 18 | for n, x in enumerate(X): 19 | j = n/nw 20 | i = n%nw 21 | img[j*h:j*h+h, i*w:i*w+w, :] = x 22 | if save_path is not None: 23 | imsave(save_path, img) 24 | return img 25 | 26 | def grayscale_weight_grid_vis(w, (nh, nw), save_path=None): 27 | w = (w+w.min())/(w.max()-w.min()) 28 | return grayscale_grid_vis(w, (nh, nw), save_path=save_path) --------------------------------------------------------------------------------