├── .gitignore ├── .gitmodules ├── LICENSE ├── README.md ├── cifar10_deepncm.py ├── cifar10_download_and_extract.py ├── deepncm_do.sh ├── deepncm_experiments.txt ├── figs ├── c100dict.npy ├── c10dict.npy ├── cifar10_rmd_eval.npz ├── cifar10_rmd_train.npz ├── deepncm_overview.ipynb ├── deepncm_rmd.ipynb ├── exp_cifar10_rmd.pdf ├── exp_cifar_best.pdf ├── exp_cifar_overview.pdf └── exp_cifar_overview.png ├── imagenet_deepncm.py ├── resnet_deepncm_run_loop.py ├── resnet_deepx.py ├── resnet_model.py ├── resnet_ncm.py └── resnet_ncmequal.py /.gitignore: -------------------------------------------------------------------------------- 1 | # DeepNCM Experiment Log Files: 2 | logs/ 3 | 4 | # GitHub provided Python .gitignore 5 | # Byte-compiled / optimized / DLL files 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | # C extensions 11 | *.so 12 | 13 | # Distribution / packaging 14 | .Python 15 | env/ 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | downloads/ 20 | eggs/ 21 | .eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | wheels/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | 32 | # PyInstaller 33 | # Usually these files are written by a python script from a template 34 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 35 | *.manifest 36 | *.spec 37 | 38 | # Installer logs 39 | pip-log.txt 40 | pip-delete-this-directory.txt 41 | 42 | # Unit test / coverage reports 43 | htmlcov/ 44 | .tox/ 45 | .coverage 46 | .coverage.* 47 | .cache 48 | nosetests.xml 49 | coverage.xml 50 | *.cover 51 | .hypothesis/ 52 | 53 | # Translations 54 | *.mo 55 | *.pot 56 | 57 | # Django stuff: 58 | *.log 59 | local_settings.py 60 | 61 | # Flask stuff: 62 | instance/ 63 | .webassets-cache 64 | 65 | # Scrapy stuff: 66 | .scrapy 67 | 68 | # Sphinx documentation 69 | docs/_build/ 70 | 71 | # PyBuilder 72 | target/ 73 | 74 | # Jupyter Notebook 75 | .ipynb_checkpoints 76 | 77 | # pyenv 78 | .python-version 79 | 80 | # celery beat schedule file 81 | celerybeat-schedule 82 | 83 | # SageMath parsed files 84 | *.sage.py 85 | 86 | # dotenv 87 | .env 88 | 89 | # virtualenv 90 | .venv 91 | venv/ 92 | ENV/ 93 | 94 | # Spyder project settings 95 | .spyderproject 96 | .spyproject 97 | 98 | # Rope project settings 99 | .ropeproject 100 | 101 | # mkdocs documentation 102 | /site 103 | 104 | # mypy 105 | .mypy_cache/ 106 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "tf/models"] 2 | path = tf/models 3 | url = https://github.com/tensorflow/models.git 4 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 2-Clause License 2 | 3 | Copyright (c) 2018, Thomas Mensink 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 17 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 18 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 19 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 20 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 21 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 22 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 23 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 24 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DeepNCM: Deep Nearest Class Means 2 | This software provides DeepNCM models based on the TensFlow Models Official ResNet implementation. 3 | 4 | ## Citation 5 | When using this code, or the ideas of DeepNCM, please cite the following paper ([openreview](https://openreview.net/forum?id=rkPLZ4JPM)) 6 | 7 | @INPROCEEDINGS{guerriero18openreview, 8 | author = {Samantha Guerriero and Barbara Caputo and Thomas Mensink}, 9 | title = {DeepNCM: Deep Nearest Class Mean Classifiers}, 10 | booktitle = {International Conference on Learning Representations - Workshop (ICLRw)}, 11 | year = {2018}, 12 | } 13 | 14 | ### Dependencies / Notes 15 | DeepNCM is written in python, and follows (as closely as possible) the Tensorflow official ResNet implementation. 16 | - The code is developed with Python 3.6 and TensorFlow 1.6.0 (with GPU support) on Linux 17 | - Reported to work also with Python 2.7. For Python 2.7 change `resnet_ncm.py`, line 113 `ncm['method'].casefold()` to `ncm['method'].lower()` 18 | - Requires TensorFlow Models 19 | - Included as submodule, so to get the required version, after cloning/getting DeepNCM do 20 | `git submodule update --init` 21 | - For reasons of my convenience, `model_dir` and `data_dir` are required to be `model_dir = /tmp/deepncm/cifar10_data` `data_dir = /tmp/deepncm/cifar10_deepncm` -- errors might pop-up when other directories are used. 22 | - The experiments (deepncm_do.sh) uses GNU Parallel for parallelisation (Tange, GNU Parallel - The Command-Line Power Tool, 2011) 23 | 24 | ## Experimental overview on Cifar10/Cifar100 25 | Below are the full experiments, using two learning rates, different condensation (omreset) and decay rates. 26 | ![DeepNCM Experimental Overiew](https://github.com/tmensink/deepncm/blob/master/figs/exp_cifar_overview.png) 27 | Comparison of the following methods: Softmax (sof), Online Means (onl), Mean Condensation (con), Decay Mean (dec), in the legend the maximum Top-1 accuracy is reported. 28 | 29 | The code for the figures above can be found in `figs/deepncm_overview.ipynb` 30 | 31 | # Future research (ideas) 32 | - Current optimiser and learning-rate schedule is optimised for softmax learning. 33 | - Gradient clipping is now set to (-1.0,1.0), this is not tuned 34 | - Experiments on larger datasets, _e.g._, ImageNet 35 | - Class incremental / Open Set learning 36 | 37 | Please contact me when you're interested to collaborate on this! 38 | 39 | ### Copyright (2017-2018) 40 | Thomas Mensink, University of Amsterdam, thomas.mensink@uva.nl 41 | Some preliminary source code is written by Samantha Guerriero and Thomas Mensink. 42 | -------------------------------------------------------------------------------- /cifar10_deepncm.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 Thomas Mensink, University of Amsterdam, thomas.mensink@uva.nl 2 | # 3 | # Beloning to the DeepNCM repository 4 | # DeepNCM is proposed in 5 | # Samantha Guerriero, Barbara Caputo, and Thomas Mensink 6 | # DeepNCM: Deep Nearest Class Mean Classifiers 7 | # ICLR Workshop 2018 8 | # https://openreview.net/forum?id=rkPLZ4JPM 9 | # 10 | # This file (cifar10_deepncm) is based on cifar10_main.py from the 11 | # TensorFlow Models Official ResNet library (release 1.8.0/1.7.0) 12 | # https://github.com/tensorflow/models/tree/master/official/resnet 13 | # With the following copyright notice: 14 | # 15 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 16 | # 17 | # Licensed under the Apache License, Version 2.0 (the "License"); 18 | # you may not use this file except in compliance with the License. 19 | # You may obtain a copy of the License at 20 | # 21 | # http://www.apache.org/licenses/LICENSE-2.0 22 | # 23 | # Unless required by applicable law or agreed to in writing, software 24 | # distributed under the License is distributed on an "AS IS" BASIS, 25 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 26 | # See the License for the specific language governing permissions and 27 | # limitations under the License. 28 | # ============================================================================== 29 | """Runs a ResNet model on the CIFAR-10 dataset.""" 30 | 31 | # Changed to include as well CIFAR100 experiments 32 | # Based on the Research ResNet model: 33 | # https://github.com/tensorflow/models/blob/master/research/resnet/cifar_input.py 34 | 35 | from __future__ import absolute_import 36 | from __future__ import division 37 | from __future__ import print_function 38 | 39 | import os 40 | import sys 41 | sys.path.append("./tf/models/") 42 | 43 | import tensorflow as tf # pylint: disable=g-bad-import-order 44 | 45 | import resnet_ncm as resnet 46 | import resnet_deepncm_run_loop as rrl 47 | 48 | ALLOW_MULTIPLE_MODELS = True 49 | DS = None 50 | 51 | def set_DS_global(dataset="cifar10"): 52 | global DS 53 | DS = set_dataset(dataset=dataset) 54 | 55 | def set_dataset(dataset="cifar10"): 56 | print("set_dataset") 57 | s = type('', (), {})() 58 | 59 | s.NUM_IMAGES = { 60 | 'train': 50000, 61 | 'validation': 10000, 62 | } 63 | s.HEIGHT = 32 64 | s.WIDTH = 32 65 | s.NUM_CHANNELS = 3 66 | s.DEFAULT_IMAGE_BYTES = s.HEIGHT * s.WIDTH * s.NUM_CHANNELS 67 | if dataset == "cifar10": 68 | s.DATASET = 'cifar10' 69 | s.NUM_CLASSES = 10 70 | s.NUM_DATA_FILES = 5 71 | s.DEFAULT_MODEL_DIR = "/tmp/deepncm/cifar10_resnet/" 72 | s.DATA_DIR = '/tmp/deepncm/cifar10_data' 73 | s.DATA_PATH = 'cifar-10-batches-bin' 74 | s.DATA_LABEL_O = 0 75 | s.DATA_LABEL_B = 1 76 | else: 77 | s.DATASET = 'cifar100' 78 | s.DATA_LABEL_O = 1 79 | s.DATA_LABEL_B = 1 80 | s.NUM_CLASSES = 100 81 | s.NUM_DATA_FILES = 1 82 | s.DATA_DIR = '/tmp/deepncm/cifar100_data' 83 | s.DEFAULT_MODEL_DIR = "/tmp/deepncm/cifar100_resnet/" 84 | s.DATA_PATH = 'cifar-100-binary' 85 | 86 | s.RECORD_BYTES = s.DEFAULT_IMAGE_BYTES + s.DATA_LABEL_O + s.DATA_LABEL_B 87 | return s 88 | 89 | ############################################################################### 90 | # Data processing 91 | ############################################################################### 92 | def get_filenames(is_training, data_dir): 93 | """Returns a list of filenames.""" 94 | data_dir = os.path.join(data_dir, DS.DATA_PATH) 95 | 96 | assert os.path.exists(data_dir), ( 97 | 'Run cifar10_download_and_extract.py first to download and extract the ' 98 | 'CIFAR-10/CIFAR-100 data.') 99 | 100 | if DS.DATASET == 'cifar10': 101 | if is_training: 102 | return [ 103 | os.path.join(data_dir, 'data_batch_%d.bin' % i) 104 | for i in range(1, DS.NUM_DATA_FILES + 1) 105 | ] 106 | else: 107 | return [os.path.join(data_dir, 'test_batch.bin')] 108 | 109 | else: 110 | if is_training: 111 | return [os.path.join(data_dir, 'train.bin')] 112 | else: 113 | return [os.path.join(data_dir, 'test.bin')] 114 | 115 | 116 | 117 | def parse_record(raw_record, is_training): 118 | """Parse CIFAR-10/100 image and label from a raw record.""" 119 | # Convert bytes to a vector of uint8 that is record_bytes long. 120 | record_vector = tf.decode_raw(raw_record, tf.uint8) 121 | 122 | # The first byte represents the label, which we convert from uint8 to int32 123 | # and then to one-hot. 124 | label = tf.cast(tf.slice(record_vector, [DS.DATA_LABEL_O], [DS.DATA_LABEL_B]), tf.int32) 125 | #label = tf.cast(record_vector[0],tf.int32) 126 | label = tf.one_hot(tf.squeeze(label), DS.NUM_CLASSES) 127 | 128 | # The remaining bytes after the label represent the image, which we reshape 129 | # from [depth * height * width] to [depth, height, width]. 130 | depth_major = tf.reshape(tf.slice(record_vector, [DS.DATA_LABEL_O + DS.DATA_LABEL_B], [DS.DEFAULT_IMAGE_BYTES]),[DS.NUM_CHANNELS, DS.HEIGHT, DS.WIDTH]) 131 | 132 | # Convert from [depth, height, width] to [height, width, depth], and cast as 133 | # float32. 134 | image = tf.cast(tf.transpose(depth_major, [1, 2, 0]), tf.float32) 135 | 136 | image = preprocess_image(image, is_training) 137 | 138 | return image, label 139 | 140 | 141 | 142 | def preprocess_image(image, is_training): 143 | """Preprocess a single image of layout [height, width, depth].""" 144 | if is_training: 145 | # Resize the image to add four extra pixels on each side. 146 | image = tf.image.resize_image_with_crop_or_pad( 147 | image, DS.HEIGHT + 8, DS.WIDTH + 8) 148 | 149 | # Randomly crop a [_HEIGHT, _WIDTH] section of the image. 150 | image = tf.random_crop(image, [DS.HEIGHT, DS.WIDTH, DS.NUM_CHANNELS]) 151 | 152 | # Randomly flip the image horizontally. 153 | image = tf.image.random_flip_left_right(image) 154 | 155 | # Subtract off the mean and divide by the variance of the pixels. 156 | image = tf.image.per_image_standardization(image) 157 | return image 158 | 159 | 160 | def input_fn(is_training, data_dir, batch_size, num_epochs=1, 161 | num_parallel_calls=1, multi_gpu=False): 162 | """Input_fn using the tf.data input pipeline for CIFAR-10 dataset. 163 | 164 | Args: 165 | is_training: A boolean denoting whether the input is for training. 166 | data_dir: The directory containing the input data. 167 | batch_size: The number of samples per batch. 168 | num_epochs: The number of epochs to repeat the dataset. 169 | num_parallel_calls: The number of records that are processed in parallel. 170 | This can be optimized per data set but for generally homogeneous data 171 | sets, should be approximately the number of available CPU cores. 172 | multi_gpu: Whether this is run multi-GPU. Note that this is only required 173 | currently to handle the batch leftovers, and can be removed 174 | when that is handled directly by Estimator. 175 | 176 | Returns: 177 | A dataset that can be used for iteration. 178 | """ 179 | filenames = get_filenames(is_training, data_dir) 180 | dataset = tf.data.FixedLengthRecordDataset(filenames, DS.RECORD_BYTES) 181 | 182 | num_images = is_training and DS.NUM_IMAGES['train'] or DS.NUM_IMAGES['validation'] 183 | 184 | return rrl.process_record_dataset(dataset, is_training, batch_size, DS.NUM_IMAGES['train'], 185 | parse_record, num_epochs, num_parallel_calls, 186 | examples_per_epoch=num_images, multi_gpu=multi_gpu) 187 | 188 | 189 | 190 | def get_synth_input_fn(): 191 | return rrl.get_synth_input_fn(DS.HEIGHT, DS.WIDTH, DS.NUM_CHANNELS, DS.NUM_CLASSES) 192 | 193 | ############################################################################### 194 | # Running the model 195 | ############################################################################### 196 | ############################################################################### 197 | # Running the model 198 | ############################################################################### 199 | class Cifar10Model(resnet.NCMResModel): 200 | """Model class with appropriate defaults for CIFAR-10 data.""" 201 | 202 | def __init__(self, resnet_size, data_format=None, num_classes=None,version=resnet.RESNET_DEFAULT_VERSION,ncm=resnet.NCM_DEFAULT): 203 | """These are the parameters that work for CIFAR-10 data. 204 | 205 | Args: 206 | resnet_size: The number of convolutional layers needed in the model. 207 | data_format: Either 'channels_first' or 'channels_last', specifying which 208 | data format to use when setting up the model. 209 | num_classes: The number of output classes needed from the model. This 210 | enables users to extend the same model to their own datasets. 211 | version: Integer representing which version of the ResNet network to use. 212 | See README for details. Valid values: [1, 2] 213 | 214 | Raises: 215 | ValueError: if invalid resnet_size is chosen 216 | """ 217 | if resnet_size % 6 != 2: 218 | raise ValueError('resnet_size must be 6n + 2:', resnet_size) 219 | 220 | num_blocks = (resnet_size - 2) // 6 221 | 222 | super(Cifar10Model, self).__init__(resnet_size=resnet_size,bottleneck=False,num_classes=num_classes,num_filters=16,kernel_size=3,conv_stride=1,first_pool_size=None,first_pool_stride=None,second_pool_size=8,second_pool_stride=1,block_sizes=[num_blocks] * 3,block_strides=[1, 2, 2],final_size=64,version=version,data_format=data_format,ncm=ncm) 223 | 224 | 225 | def cifar10_model_fn(features, labels, mode, params): 226 | """Model function for CIFAR-10.""" 227 | features = tf.reshape(features, [-1, DS.HEIGHT, DS.WIDTH, DS.NUM_CHANNELS]) 228 | 229 | learning_rate_fn = rrl.learning_rate_with_decay(batch_size=params['batch_size'], batch_denom=params['batch_size'],num_images=DS.NUM_IMAGES['train'], boundary_epochs=[100, 150, 200],decay_rates=[1, 0.1, 0.01, 0.001],initial_learning_scale=params['initial_learning_scale']) 230 | 231 | # We use a weight decay of 0.0002, which performs better 232 | # than the 0.0001 that was originally suggested. 233 | weight_decay = 2e-4 234 | 235 | # Empirical testing showed that including batch_normalization variables 236 | # in the calculation of regularized loss helped validation accuracy 237 | # for the CIFAR-10 dataset, perhaps because the regularization prevents 238 | # overfitting on the small data set. We therefore include all vars when 239 | # regularizing and computing loss during training. 240 | def loss_filter_fn(_): 241 | return True 242 | 243 | ncm = {'method' : params['ncmmethod'],'param' : params['ncmparam']} 244 | 245 | return rrl.resnet_model_fn(features, labels, mode, 246 | Cifar10Model,resnet_size=params['resnet_size'], 247 | weight_decay=weight_decay,learning_rate_fn=learning_rate_fn, 248 | momentum=0.9,data_format=params['data_format'], 249 | version=params['version'],loss_filter_fn=loss_filter_fn, 250 | multi_gpu=params['multi_gpu'],ncm=ncm) 251 | 252 | def main(argv): 253 | global DS 254 | parser = rrl.ResnetArgParser() 255 | # Set defaults that are reasonable for this model. 256 | parser.set_defaults(resnet_size=32, 257 | train_epochs=250, 258 | epochs_between_evals=1, 259 | batch_size=128, 260 | ) 261 | 262 | flags = parser.parse_args(args=argv[1:]) 263 | 264 | #if not flags.dataset == DS.DATASET: 265 | DS = set_dataset(flags.dataset) 266 | 267 | 268 | flags.model_dir = DS.DEFAULT_MODEL_DIR 269 | flags.model_dir += flags.ncmmethod 270 | 271 | if flags.ncmmethod[-2:] == "eq": 272 | flags.ncmmethod= flags.ncmmethod[:-2] 273 | 274 | if flags.ncmmethod == "decaymean": 275 | flags.model_dir += "_d%02d" %(flags.ncmparam*100) 276 | elif flags.ncmmethod == "omreset": 277 | flags.model_dir += "_r%04d" %(flags.ncmparam) 278 | 279 | flags.model_dir += "_lr%5.0e" %(flags.initial_learning_scale) 280 | 281 | print(flags.model_dir) 282 | flags.data_dir = DS.DATA_DIR 283 | print(flags.data_dir) 284 | 285 | if flags.scratch > 0 and os.path.isdir(flags.model_dir): 286 | print ("Clear model_directory") 287 | import shutil 288 | shutil.rmtree(flags.model_dir) 289 | elif flags.continu > 0: 290 | assert os.path.isdir(flags.model_dir), "Model dir is empty, while continue is set" 291 | elif flags.continu == 0 and flags.scratch == 0: 292 | assert not os.path.isdir(flags.model_dir), "Model dir is not empty, nor continu or scratch is set" 293 | 294 | 295 | input_function = input_fn 296 | 297 | rrl.resnet_main(flags, cifar10_model_fn, input_function,shape=[DS.HEIGHT, DS.WIDTH, DS.NUM_CHANNELS]) 298 | 299 | if __name__ == '__main__': 300 | tf.logging.set_verbosity(tf.logging.INFO) 301 | main(argv=sys.argv) 302 | -------------------------------------------------------------------------------- /cifar10_download_and_extract.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 Thomas Mensink, University of Amsterdam, thomas.mensink@uva.nl 2 | # 3 | # Beloning to the DeepNCM repository 4 | # DeepNCM is proposed in 5 | # Samantha Guerriero, Barbara Caputo, and Thomas Mensink 6 | # DeepNCM: Deep Nearest Class Mean Classifiers 7 | # ICLR Workshop 2018 8 | # https://openreview.net/forum?id=rkPLZ4JPM 9 | # 10 | # This file (cifar10cifar10_download_and_extract) is based on the 11 | # TensorFlow Models Official ResNet library (release 1.8.0/1.7.0) 12 | # https://github.com/tensorflow/models/tree/master/official/resnet 13 | # It is changed to include both CIFAR10 as well as CIFAR100 dataset 14 | 15 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved. 16 | # 17 | # Licensed under the Apache License, Version 2.0 (the "License"); 18 | # you may not use this file except in compliance with the License. 19 | # You may obtain a copy of the License at 20 | # 21 | # http://www.apache.org/licenses/LICENSE-2.0 22 | # 23 | # Unless required by applicable law or agreed to in writing, software 24 | # distributed under the License is distributed on an "AS IS" BASIS, 25 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 26 | # See the License for the specific language governing permissions and 27 | # limitations under the License. 28 | # ============================================================================== 29 | 30 | """Downloads and extracts the binary version of the CIFAR-10/CIFAR-100 dataset.""" 31 | 32 | from __future__ import absolute_import 33 | from __future__ import division 34 | from __future__ import print_function 35 | 36 | import argparse 37 | import os 38 | import sys 39 | import tarfile 40 | 41 | from six.moves import urllib 42 | import tensorflow as tf 43 | 44 | C10_DIR = '/tmp/deepncm/cifar10_data' 45 | C100_DIR = '/tmp/deepncm/cifar100_data' 46 | 47 | C10_URL = 'https://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz' 48 | C100_URL = 'https://www.cs.toronto.edu/~kriz/cifar-100-binary.tar.gz' 49 | 50 | parser = argparse.ArgumentParser() 51 | 52 | parser.add_argument( 53 | '--dataset', type=str, default='cifar10', 54 | help='Dataset to download Cifar10 or Cifar100') 55 | 56 | parser.add_argument( 57 | '--data_dir', type=str, default=C10_DIR, 58 | help='Directory to download data and extract the tarball') 59 | 60 | 61 | def main(_): 62 | """Download and extract the tarball from Alex's website.""" 63 | print(FLAGS.dataset) 64 | 65 | if FLAGS.dataset == 'cifar10': 66 | DATA_URL = C10_URL 67 | else: 68 | DATA_URL = C100_URL 69 | if FLAGS.data_dir == C10_DIR: 70 | FLAGS.data_dir = C100_DIR 71 | 72 | print(FLAGS.data_dir) 73 | print(DATA_URL) 74 | 75 | if not os.path.exists(FLAGS.data_dir): 76 | os.makedirs(FLAGS.data_dir) 77 | 78 | filename = DATA_URL.split('/')[-1] 79 | filepath = os.path.join(FLAGS.data_dir, filename) 80 | 81 | if not os.path.exists(filepath): 82 | def _progress(count, block_size, total_size): 83 | sys.stdout.write('\r>> Downloading %s %.1f%%' % ( 84 | filename, 100.0 * count * block_size / total_size)) 85 | sys.stdout.flush() 86 | 87 | filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath, _progress) 88 | print() 89 | statinfo = os.stat(filepath) 90 | print('Successfully downloaded', filename, statinfo.st_size, 'bytes.') 91 | 92 | tarfile.open(filepath, 'r:gz').extractall(FLAGS.data_dir) 93 | 94 | 95 | if __name__ == '__main__': 96 | FLAGS, unparsed = parser.parse_known_args() 97 | tf.app.run(argv=[sys.argv[0]] + unparsed) 98 | -------------------------------------------------------------------------------- /deepncm_do.sh: -------------------------------------------------------------------------------- 1 | # Copyright 2018 Thomas Mensink, University of Amsterdam, thomas.mensink@uva.nl 2 | # 3 | # Beloning to the DeepNCM repository 4 | # DeepNCM is proposed in 5 | # Samantha Guerriero, Barbara Caputo, and Thomas Mensink 6 | # DeepNCM: Deep Nearest Class Mean Classifiers 7 | # ICLR Workshop 2018 8 | # https://openreview.net/forum?id=rkPLZ4JPM 9 | # 10 | # This file runs the experiments. Making uses of Parallel 11 | # Tange, GNU Parallel - The Command-Line Power Tool, 2011 12 | # 13 | # Define an experiment run 14 | doexp() { 15 | x=$1 16 | dataset=$(echo "$x" | cut -f 1 -d ";" | xargs) 17 | method=$(echo "$x" | cut -f 2 -d ";" | xargs) 18 | param=$(echo "$x" | cut -f 3 -d ";" | xargs) 19 | lr=$(echo "$x" | cut -f 4 -d ";" | xargs) 20 | logfile="logs/${dataset}_${method}_${param}_${lr}.log" 21 | cmd="python cifar10_deepncm.py --dataset ${dataset} --ncmmethod ${method} --ncmparam ${param} -l ${lr} >> ${logfile} 2>&1" 22 | echo ${cmd} 23 | rm ${logfile} 24 | eval ${cmd} 25 | } 26 | export -f doexp 27 | # parallel -P ## (4) inidcates number of parallel calls: 28 | cat deepncm_experiments.txt | parallel -P 4 doexp 29 | -------------------------------------------------------------------------------- /deepncm_experiments.txt: -------------------------------------------------------------------------------- 1 | cifar10 ; softmax ; 0 ; 0.1 2 | cifar10 ; softmax ; 0 ; 0.01 3 | cifar10 ; onlinemean ; 0 ; 0.1 4 | cifar10 ; onlinemean ; 0 ; 0.01 5 | cifar100 ; softmax ; 0 ; 0.1 6 | cifar100 ; softmax ; 0 ; 0.01 7 | cifar100 ; onlinemean ; 0 ; 0.1 8 | cifar100 ; onlinemean ; 0 ; 0.01 9 | cifar10 ; decaymean ; 0.9 ; 0.1 10 | cifar10 ; decaymean ; 0.95 ; 0.1 11 | cifar10 ; decaymean ; 0.75 ; 0.1 12 | cifar10 ; decaymean ; 0.5 ; 0.1 13 | cifar100 ; decaymean ; 0.9 ; 0.1 14 | cifar100 ; decaymean ; 0.95 ; 0.1 15 | cifar100 ; decaymean ; 0.75 ; 0.1 16 | cifar100 ; decaymean ; 0.5 ; 0.1 17 | cifar10 ; omreset ; 100 ; 0.1 18 | cifar10 ; omreset ; 195 ; 0.1 19 | cifar10 ; omreset ; 390 ; 0.1 20 | cifar10 ; omreset ; 781 ; 0.1 21 | cifar100 ; omreset ; 100 ; 0.1 22 | cifar100 ; omreset ; 195 ; 0.1 23 | cifar100 ; omreset ; 390 ; 0.1 24 | cifar100 ; omreset ; 781 ; 0.1 25 | -------------------------------------------------------------------------------- /figs/c100dict.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmensink/deepncm/fe7cdd43eb7276f4374c9c51715bf6cf417f994b/figs/c100dict.npy -------------------------------------------------------------------------------- /figs/c10dict.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmensink/deepncm/fe7cdd43eb7276f4374c9c51715bf6cf417f994b/figs/c10dict.npy -------------------------------------------------------------------------------- /figs/cifar10_rmd_eval.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmensink/deepncm/fe7cdd43eb7276f4374c9c51715bf6cf417f994b/figs/cifar10_rmd_eval.npz -------------------------------------------------------------------------------- /figs/cifar10_rmd_train.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmensink/deepncm/fe7cdd43eb7276f4374c9c51715bf6cf417f994b/figs/cifar10_rmd_train.npz -------------------------------------------------------------------------------- /figs/deepncm_rmd.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Experimental Overview\n", 8 | "This jupyter notebook shows the code to generate the plots as used in\n", 9 | "\n", 10 | "**Samantha Guerriero and Barbara Caputo and Thomas Mensink**, \n", 11 | "*DeepNCM: Deep Nearest Class Mean Classifiers* \n", 12 | "ICLR-Workshop 2018" 13 | ] 14 | }, 15 | { 16 | "cell_type": "markdown", 17 | "metadata": {}, 18 | "source": [ 19 | "#### code update:\n", 20 | "When running the \"RMD\" experiment below, some parts of the resnet_deepncm_run_loop has to be adjusted:\n", 21 | "\n", 22 | " # Resnet_deepncm_run_loop.py (line 217)\n", 23 | " # The following is only required for the Relative Mean Distance Experiment\n", 24 | " # Uncomment the following two lines:\n", 25 | " # metrics['batchmeans'] = tf.metrics.mean_tensor(tf.transpose(bm),weights=bmc)\n", 26 | " # metrics['deepmean'] = tf.metrics.mean_tensor(dm)" 27 | ] 28 | }, 29 | { 30 | "cell_type": "code", 31 | "execution_count": 1, 32 | "metadata": {}, 33 | "outputs": [], 34 | "source": [ 35 | "from __future__ import absolute_import\n", 36 | "from __future__ import division\n", 37 | "from __future__ import print_function\n", 38 | "\n", 39 | "import os\n", 40 | "import sys\n", 41 | "\n", 42 | "import tensorflow as tf # pylint: disable=g-bad-import-order\n", 43 | "import numpy as np\n", 44 | "\n", 45 | "sys.path.append(\"../tf/models/\")\n", 46 | "from official.utils.arg_parsers import parsers\n", 47 | "from official.utils.export import export\n", 48 | "from official.utils.logging import hooks_helper\n", 49 | "from official.utils.logging import logger\n", 50 | "\n", 51 | "sys.path.append(\"..\")\n", 52 | "import resnet_ncm as resnet\n", 53 | "import resnet_deepncm_run_loop as rrl\n", 54 | "import cifar10_deepncm as c10\n", 55 | "\n", 56 | "ALLOW_MULTIPLE_MODELS = True" 57 | ] 58 | }, 59 | { 60 | "cell_type": "markdown", 61 | "metadata": {}, 62 | "source": [ 63 | "# Code from cifar10_ncmnet.py" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": 3, 69 | "metadata": {}, 70 | "outputs": [ 71 | { 72 | "name": "stdout", 73 | "output_type": "stream", 74 | "text": [ 75 | "set_dataset\n", 76 | "set_dataset\n" 77 | ] 78 | } 79 | ], 80 | "source": [ 81 | "ds = 'cifar10'\n", 82 | "c10.set_DS_global(ds)\n", 83 | "DS = c10.set_dataset(ds)" 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": 4, 89 | "metadata": {}, 90 | "outputs": [], 91 | "source": [ 92 | "parser = rrl.ResnetArgParser()\n", 93 | "parser.set_defaults(resnet_size=32,\n", 94 | " train_epochs=250,\n", 95 | " epochs_between_evals=1,\n", 96 | " batch_size=128,\n", 97 | " )\n", 98 | "flags = parser.parse_args([\"--dataset\",\"cifar10\",\"--ncmmethod\",\"onlinemean\",\"--ncmparam\",\"10\",\"-l\",\"0.1\"])" 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": 5, 104 | "metadata": {}, 105 | "outputs": [ 106 | { 107 | "name": "stdout", 108 | "output_type": "stream", 109 | "text": [ 110 | "/tmp/deepncm/cifar10_resnet/onlinemean_lr1e-01\n", 111 | "/tmp/deepncm/cifar10_data\n" 112 | ] 113 | } 114 | ], 115 | "source": [ 116 | "flags.model_dir = DS.DEFAULT_MODEL_DIR\n", 117 | "flags.model_dir += flags.ncmmethod\n", 118 | "\n", 119 | "if flags.ncmmethod == \"decaymean\":\n", 120 | " flags.model_dir += \"_d%02d\" %(flags.ncmparam*100)\n", 121 | "elif flags.ncmmethod == \"omreset\":\n", 122 | " flags.model_dir += \"_r%04d\" %(flags.ncmparam)\n", 123 | "\n", 124 | "flags.model_dir += \"_lr%5.0e\" %(flags.initial_learning_scale)\n", 125 | "flags.data_dir = DS.DATA_DIR\n", 126 | "\n", 127 | "print(flags.model_dir)\n", 128 | "print(flags.data_dir)" 129 | ] 130 | }, 131 | { 132 | "cell_type": "code", 133 | "execution_count": 6, 134 | "metadata": {}, 135 | "outputs": [], 136 | "source": [ 137 | "input_function = c10.input_fn\n", 138 | "model_function = c10.cifar10_model_fn\n", 139 | "shape=[DS.HEIGHT, DS.WIDTH, DS.NUM_CHANNELS]" 140 | ] 141 | }, 142 | { 143 | "cell_type": "markdown", 144 | "metadata": {}, 145 | "source": [ 146 | "## Insert RRL ResNet Main code to see if mean reset works as expected" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": 7, 152 | "metadata": {}, 153 | "outputs": [], 154 | "source": [ 155 | "os.environ['TF_ENABLE_WINOGRAD_NONFUSED'] = '1'\n", 156 | "session_config = tf.ConfigProto(\n", 157 | " inter_op_parallelism_threads=flags.inter_op_parallelism_threads,\n", 158 | " intra_op_parallelism_threads=flags.intra_op_parallelism_threads,\n", 159 | " allow_soft_placement=True)\n", 160 | "\n", 161 | "if ALLOW_MULTIPLE_MODELS:\n", 162 | " session_config.gpu_options.allow_growth = True\n", 163 | "\n", 164 | "run_config = tf.estimator.RunConfig().replace(\n", 165 | " save_checkpoints_secs = 5*60, # Save checkpoints every X minutes.\n", 166 | " keep_checkpoint_max = 1000, # Retain the 1000 most recent checkpoints.\n", 167 | " #tf_random_seed = 5739, # Set random seed for \"reproducible\" results\n", 168 | " save_summary_steps = 10, # Number of steps between summaries\n", 169 | " session_config=session_config)" 170 | ] 171 | }, 172 | { 173 | "cell_type": "code", 174 | "execution_count": 8, 175 | "metadata": {}, 176 | "outputs": [ 177 | { 178 | "name": "stdout", 179 | "output_type": "stream", 180 | "text": [ 181 | "INFO:tensorflow:Using config: {'_model_dir': '/tmp/deepncm/cifar10_resnet/onlinemean_lr1e-01', '_tf_random_seed': None, '_save_summary_steps': 10, '_save_checkpoints_steps': None, '_save_checkpoints_secs': 300, '_session_config': gpu_options {\n", 182 | " allow_growth: true\n", 183 | "}\n", 184 | "allow_soft_placement: true\n", 185 | ", '_keep_checkpoint_max': 1000, '_keep_checkpoint_every_n_hours': 10000, '_log_step_count_steps': 100, '_service': None, '_cluster_spec': , '_task_type': 'worker', '_task_id': 0, '_global_id_in_cluster': 0, '_master': '', '_evaluation_master': '', '_is_chief': True, '_num_ps_replicas': 0, '_num_worker_replicas': 1}\n" 186 | ] 187 | } 188 | ], 189 | "source": [ 190 | "classifier = tf.estimator.Estimator(\n", 191 | " model_fn=model_function, \n", 192 | " model_dir=flags.model_dir, \n", 193 | " config=run_config,\n", 194 | " params={\n", 195 | " 'resnet_size': flags.resnet_size,\n", 196 | " 'data_format': flags.data_format,\n", 197 | " 'batch_size': flags.batch_size,\n", 198 | " 'multi_gpu': flags.multi_gpu,\n", 199 | " 'version': flags.version,\n", 200 | " 'ncmmethod': flags.ncmmethod,\n", 201 | " 'ncmparam' : flags.ncmparam,\n", 202 | " 'initial_learning_scale' : flags.initial_learning_scale\n", 203 | " })" 204 | ] 205 | }, 206 | { 207 | "cell_type": "code", 208 | "execution_count": 9, 209 | "metadata": {}, 210 | "outputs": [], 211 | "source": [ 212 | "def input_fn_eval():\n", 213 | " return input_function(False, flags.data_dir, flags.batch_size,1, flags.num_parallel_calls, flags.multi_gpu)\n", 214 | " \n", 215 | "def input_fn_evaltrain():\n", 216 | " return input_function(True, flags.data_dir, flags.batch_size,1, flags.num_parallel_calls, flags.multi_gpu)" 217 | ] 218 | }, 219 | { 220 | "cell_type": "code", 221 | "execution_count": 10, 222 | "metadata": {}, 223 | "outputs": [ 224 | { 225 | "data": { 226 | "text/plain": [ 227 | "True" 228 | ] 229 | }, 230 | "execution_count": 10, 231 | "metadata": {}, 232 | "output_type": "execute_result" 233 | } 234 | ], 235 | "source": [ 236 | "MFILENAME = \"cifar10_rmd_train.npz\"\n", 237 | "os.path.exists(MFILENAME)" 238 | ] 239 | }, 240 | { 241 | "cell_type": "code", 242 | "execution_count": 11, 243 | "metadata": {}, 244 | "outputs": [ 245 | { 246 | "name": "stdout", 247 | "output_type": "stream", 248 | "text": [ 249 | "Load cifar10_rmd_train.npz\n" 250 | ] 251 | } 252 | ], 253 | "source": [ 254 | "tf.logging.set_verbosity(tf.logging.FATAL)\n", 255 | "MFILENAME = \"cifar10_rmd_train.npz\"\n", 256 | "RUN = False\n", 257 | "\n", 258 | "if (not os.path.exists(MFILENAME)) or RUN:\n", 259 | " inx = [(i*391 + 1) for i in range(0,251)]\n", 260 | " inx[-1] = inx[-1]-1\n", 261 | " #inx = [(i*391 + 1) for i in range(0,10)]\n", 262 | " \n", 263 | " methods = [\"onlinemean_lr1e-01\",\"omreset_r0390_lr1e-01\",\"decaymean_d90_lr1e-01\"]\n", 264 | " M = np.zeros((len(inx),3))\n", 265 | " for m in range(len(methods)):\n", 266 | " for i in range(len(inx)): \n", 267 | " iinx = inx[i]\n", 268 | " chckptf = \"/tmp/deepncm/cifar10_resnet/%s/model.ckpt-%d\" %(methods[m],inx[i])\n", 269 | " eval_results = classifier.evaluate(input_fn=input_fn_evaltrain,checkpoint_path=chckptf)\n", 270 | " \n", 271 | " bmnorm = np.power((eval_results['batchmeans']),2).sum(axis=0)\n", 272 | " diffnorm = np.power((eval_results['batchmeans']-eval_results['deepmean']),2).sum(axis=0)\n", 273 | " rmd = (diffnorm/bmnorm).mean()\n", 274 | " M[i,m] = rmd\n", 275 | " print (\"%03d %d %30s %7d | %10.5f\" %(i,m,methods[m],inx[i],rmd))\n", 276 | " np.savez(MFILENAME,M=M,methods=methods,inx=inx)\n", 277 | "else:\n", 278 | " print(\"Load %s\" %(MFILENAME))\n", 279 | " npzfile = np.load(MFILENAME)\n", 280 | " methods = npzfile['methods']\n", 281 | " inx = npzfile['inx']\n", 282 | " M = npzfile['M']" 283 | ] 284 | }, 285 | { 286 | "cell_type": "code", 287 | "execution_count": 12, 288 | "metadata": {}, 289 | "outputs": [ 290 | { 291 | "name": "stdout", 292 | "output_type": "stream", 293 | "text": [ 294 | "['onlinemean_lr1e-01', 'condensation_r0390_lr1e-01', 'decaymean_d90_lr1e-01']\n" 295 | ] 296 | } 297 | ], 298 | "source": [ 299 | "methodsname = [m.replace(\"omreset\",\"condensation\") for m in methods]\n", 300 | "print(methodsname)" 301 | ] 302 | }, 303 | { 304 | "cell_type": "code", 305 | "execution_count": 13, 306 | "metadata": {}, 307 | "outputs": [], 308 | "source": [ 309 | "import matplotlib.pyplot as plt" 310 | ] 311 | }, 312 | { 313 | "cell_type": "markdown", 314 | "metadata": {}, 315 | "source": [ 316 | "# Workshop/Poster Figure" 317 | ] 318 | }, 319 | { 320 | "cell_type": "code", 321 | "execution_count": 23, 322 | "metadata": {}, 323 | "outputs": [ 324 | { 325 | "data": { 326 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAoYAAAFNCAYAAACdTa6TAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzs3Xd4FNX6wPHv2fRGQgqhhQAJvXcCiAEREbuo2FBsINj9Wa96xWu/XhsWFAsgKIiIggVESmih94SWBAJJgATSSG97fn/MBjfJpieEwPt5nn0Wpp6dmey+c8o7SmuNEEIIIYQQpoYugBBCCCGEuDBIYCiEEEIIIQAJDIUQQgghhIUEhkIIIYQQApDAUAghhBBCWEhgKIQQQgghAAkMhbgkKKVmK6W0UqptPe8nVikVW5/7EPVLKTXNcq2ENnRZhBDnnwSGQjQgyw+w9cuslEpTSoUrpR5TSjk0dBmtWQK/RpP81CrI0Uqp7ytYbqjVcqfOZxnrg43rKl8pdUYptdtyk3BjfV1bVjchofWxfSFE/bJv6AIIIQB4zfJuD7QFbgZCgFHADQ1Uppq4oqELUI5CYJxS6jGtdYqN+Q9ZlrnYvhOLrys7wBPoDNwG3AscUErdrbXeWWqdT4EFwPHzVkohxAXjYvsSFKJR0lpPs/6/UqoLsB24Xil1udZ6bYMUrJq01jENXYZy/A7cCEwAPraeoZTyBG4FfgNuOv9Fqz+lrysApVRT4E1gCvC3Umqw1jrKap0zwJnzVkghxAVFmpKFuABprQ8AxcHgAFvLKKX6KqUWKKVOWJoKTyql5iqlgqu6H6XUfUqpxUqpI0qpHKXUWaXURqXUPaWWa2tpQg60/N+6mTLMarkSfQyVUi9Ylvm/cvbvZdnvMaWUyWq6SSn1oKUs6UqpXKVUhFLqRaWUY1U/n5W/gWMYNYOl3QW4Al9VtAGl1Ail1FKl1GnL8T6mlPpcKdXcxrL9lFLTlVJ7lVKplvJHKaU+UEp521h+ouU4TVNK9VZK/WHpUpCtlFqnlBpag89sk9Y6VWs9Ffge8AbeKVUWm30MlVKhSqnflFJxSqk8pVSSUmq7Uup9pZSyLBOLURsJsMb6OrHaTkel1DuWdU9btnVMKfWVUqqNjWMTatnGbMt1uMDSLJ6rlNqhlLq+vM+qlBqnlPrLsnyepey/KqVG2Vi2yudXiIuZBIZCXPgKSk9QSt0FbAGuwwggPwI2AXcA25VSvau47c8xgr11lm38CLQD5iil3rJaLg2jWTLd8v/XrF6zK9j+d0ARMLGc+eMBZ+A7rbXZ8tnsgV8xAjVfYD7wBcZxeAv407JMdZiBb4BuSqnBpeY9BMQBf5W3slLqeWA1MBRYhnGsDmDUum1XSrW2sc3xwEHgW0v5E4GngHCllEc5u+oPhAOOwNcYNZ1DgVXKqEWuS9Ms79dXUB4AlFJjMT7/ZcAa4H3gZ4zr4nGMpmowjssey7/nUPI6KXYz8DDGMZ8PfIJxLB8Attk4lsUCga2W97kY12o34FelVJkuDEqpb4BFwECM4/i+5TN0B+4utWx1z68QFy+ttbzkJa8GegHa+DMsM70jkGmZ36/UvGAgF4gBWpWaF4rRV25nqemzLdtqW2p6kI19O2H8+BcArUvNi7VV3lLzY0tNW2brc1jmhVvmBVtNe9ky7VPAzmq6CZhpmfd4FY/vNMvyDwOtLMfmG6v5/S3zX7U6H6dKbWM4RmC5CfAqNW+CZZ3FpaYHWpfdavpky/IvlJo+sfhaAO4uZ50Ztb2ubCwXZ1k21MYxs5622DKtj41t+JRzrYWWs89WgJON6Vdj3ER8YeOaLj42L5ead5Vl+rJS0x+yTN8D+Jaap6yv65qcX3nJ62J+SY2hEBcAS/PdNKXUG0qpucAuwA14T2u9o9TiUzCCt6e01gnWM7TWYcBSoI9Sqltl+9U2+gRqrfMwgjJ7YGRNPk8psy3v91pPVEp1xBhgs0FrHW2ZZgKeAJKAJ7XWRVblMgPPYvxQT6huISzHahkw3qqG7CGMoODbClZ9AiOYmKy1Tiu1zeJzdb1SqonV9GPWZbcyEzgLjC5nX+u11vNKTfsWI6C12aWglk5Y3ptVspzZ8p5deobWOrk6O9RaJ1iusdLTlwH7Kf/YxAJvl1rnL4wuAqWPzeOW98na6DNpvY7WWsdbTar2+RXiYiaDT4S4MLxqY9pLWuu3bEwv7m82XCnV18Z8f8t7ZyCyop1a+nQ9jzH6OQBwKbVIq4rWr6JfgVTgTqXUM1rrfMv04kBxttWyHTGaj2OAly1d10rLwfhsNfEVcK2lLN9jNL0v11rHVbDOUIzA7Gal1M025jthNKV2AHYAKCMVzGTgdozmziaU7LpT3nEtfROA1rpAKZUINK2gjLVVWQqiecA4YItSaiFGjfImrXVsdXdk6Y94F0YtaS+Mz2VntUi+jdUAdpcTbMdj3GAUb98do7k4RWu9uQpFqvb5FeJiJoGhEBcArXVx530XjD5RXwKvK6VitNY/llrcx/Juc0CHFfeKZiql2mP02WoKrMfoY5eO0ZzXFiNwc6r6p7BNa52nlFqAUdN5LbDYUjM4AaMGaqHV4sWfLQjbwXJt/QEkAA9ifE4PKhl0YimTfRXKY328f8QY4XwEIzA+BRTXkj1J+cc1vZzphZQMnupKS8v76YoW0lr/qpQaAzyDcV08BKCUigCmaa1/rsY+P8A4BicxrrkEjGAfjGAxsJz1Kjo21kG3p+U93sayttTk/Apx0ZLAUIgLiNY6B1hr+RGOBL5SSq3VWlsnXS7+gfTRtnPyVdXTGD+K92mtZ1vPUErdQamm31qajREYTsTorzYSo4ZyntY6w2q54s/2m9a63NGmNaW1LlJKzcLox9gcIzj5vZLV0gFHrXWVmhKVUv0xgsJVwNVa6wKreSbguZqUva4pY/R6a4zAqtKaMEuz7V9WNy9XA1OBn5RSI3QVUioppZphNPNGAENKnfvi6662ipuDq1rbXa3zK8TFTvoYCnEBsjTRvYtRo/V6qdmbLO+X1XI3xWltbNX2XF7OOkUASqlq1V5prbdi9B+72hIc2GpGBmMUbxowSNUsLU1VfIPRdNoamKW1Lqxk+U2Ah1KqVxW3X3xcl1gHhRYDKdtc31CmWd6XlA7QKqK1ztFar9Vav4BRg6gomYS9uLnX1jXSHuN3Z4WNoLC1ZX6taK2zgH2Aj1JqUBVWqe75FeKiJoGhEBeuDzESDd9nGahR7FOMfljvK6XK9LVTStmVzkFXjljL+4hS61+F0dRqS3FH/jL55qpgNkYrxWSMlCXHMfqqnWMJ0j7GGAzxmVLKtfRGlFK+1UjHU4Yl6B6NUav3URVW+cDyPtNW2hKllLNSapjVpFjLe2ip5ZoBn1WzuHVOGbkjP8fo55cKvFCFdUbZOhcYta5gjJIvVtE1Emt5H2Z9c2HpF/gVddeKNd3y/oWynTfSujaxuudXiIuaNCULcYHSWmcopd4B/odRazjeMv2QUmoiMAuIUEotBw5j1NAEYHSmdwK8KtnF58B9wEKl1M8Yfb26A2Mw+v2Nt7HOCoxar8VKqT8x+oYds4zerMxcjFGlrwAOWOUuLOUNoAdGcHqNUmoVRn8xP4y+h8MwAqwnq7BPm7TWK6ux7Bql1LMYNbhRls99BKPmrw1GupNYoDhY3QZsxBjMEA5swBgQdDVwiH9GAtc7pdQ0yz9N/PNIvMswyn4AIzVOdBU29T+gnTKSmcdiBII9MdLFJGOMti62AqO5/G2lVHeM4BOt9Rta61OW/qa3A7uVUiss5brSss3d/HMca0xr/bVSagjG9R2tlPoVo9tAc4zrZxOW3Jo1OL9CXNwaOl+OvOR1Kb+oJN8cxo9TAka6kD6l5nXFSIJ8FGNgQxrGj/1s4NpSy87Gdh7DIRiJfVOBDIwg5kb+yR03zUZ5pmPU9hVYlgmzmh9LqTyGpdb/vfgzY5W70MZyCmPE8AqMwCMf44d9M/AfoEMVj+80y74ersb5OFXOvMEYCZnjLeVJxmiy/BwYXmpZb8v0WP7JOfkWxhNWyhwj/sljOK2cfVd4XMu7rqxexeXdg5F4+ibAoZJjFmo17TbgB4wbkAzL6wBGbVuAjW08itGPMLf0NW45Bm8C0Zb5cRiBvg8QRqm/B6trcXY55S2zjtW8262u7zzLdbsYGFmb8ysveV3ML6V1ZVkKhBBCCCHEpUD6GAohhBBCCEACQyGEEEIIYSGBoRBCCCGEACQwFEIIIYQQFhIYCiGEEEII4BLOY+jl5aWDg4MrX1BckLKysnBzc2voYogakHPXuMn5a7zk3DVuO3bsOKO19qvv/VxygaFS6jrgupYtW7J9+/aGLo6oobCwMEJDQxu6GKIG5Nw1bnL+Gi85d42bUurY+djPJdeUrLX+TWs9yd3dvaGLIoQQQghxQbnkAkMhhBBCCGGbBIZCCCGEEAK4BANDpdR1SqmZmZmZDV0UIYQQQogLyiUXGEofQyGEEEII2y65wFAIIYQQQtgmgaEQQgghhAAkMBRCCCGEEBYSGAohhBBCCOASDAyLRyUXZiY3dFGEEEIIIS4ol1xgWDwq2VNlN3RRhBBCCCEuKJdcYFjMZM6HooKGLoYQQgghxAXjkg0MQUNyTEMXQgghhBDignEJB4ZA0v6GLoEQQgghxAXjEg4MFSQdaOhCCCGEEEJcMC7ZwNBscpAaQyGEEEIIK5dwYOgogaEQQgghhJVLNjAssnOElKOQL2lrhBBCCCHgEgwMixNcZ+drQMOZQw1dJCGEEEKIC8IlFxgWJ7h2dG1iTEiU5mQhhBBCCLgEA8NimWZ7sHOSfoZCCCGEEBaXbGCYkqPRfp0kZY0QQgghhMUlGxgWakjz6CCBoRBCCCGERaMODJVSDymlopVSeUqpPUqpMdVZf3deC8g4ATmp9VVEIYQQQohGo9EGhkqpccAM4H2gN/AnsEQp1bMq6zvbwbLEpsZ/pNZQCCGEEKLxBobAs8BcrfUMrfUBrfWLwF7giaqs7OagWJ/ezPiPDEARQgghhKi/wFApdYtS6hOl1Hql1FmllFZKLahkndZKqW+UUicszcOxSqmPlFJNSy3nCPQDVpTaxHJgaFXK5+agSDL5kGvnLjWGQgghhBCAfT1u+2WgF5AJxAOdK1pYKdUeCAf8gSXAQWAgRg3gGKXUUK11smVxX4yyJ5bazCmgRVUKZ1IwOMiXqIQAuidGoqr2mYQQQgghLlr12ZT8FNARaAJMqcLyMzCCwse11jdqrV/QWo8EPgQ6AW/aWEdXcZpNY3u0YG9+S4oS94Ou8mpCCCGEEBelegsMtdZrtNZRWlcecVlqC0cDR4HPSs1+FcgCJiil3CzTzgCFQPNSy/pj1BpWyVXdmhNFAPZ56ZBR5dWEEEIIIS5KF8rgk5GW9xVaa7P1DK11BrARcAUGW6blAzuAK0ttZ4xl2SrxdnPEoXk3Yz8yAEUIIYQQl7gLJTDsZHmPKmd+8fSOVtP+h1GLOFkp1Vkp9SZGn8aPq7Pjzr0GAZAYvbM6qwkhhBBCXHTqc/BJdXha3tPLmV883at4gtZ6kVLKG3gemA4cAm7UWu8tbydKqUnAJAA/Pz/CwsJwyNckaS9i92zgoPOA2n4OcZ5kZmYSFhbW0MUQNSDnrnGT89d4ybkTVXGhBIaVKR40XKK/otZ6JjCzqhuxXr5Dpw46NDQUgH072uGVF0+nyy9HKRmf3BiEhYVRfP5E4yLnrnGT89d4ybkTVXGhNCUX1wh6ljO/SanlakwpdZ1SamZCXgKf7f6MjPwM7Ft0I7DoOAdO1HrzQgghhBCN1oUSGB6yvHcsZ34Hy/vh2u5Ia/2b1nqSi50LX+z5gjE/j2GVbxHaVED4tu213bwQQgghRKN1oQSGayzvo5VSJcqklPLAeJpJDrC5rnboa+/LwmsX0qdZH75M3siYgJasjPuOgqKCutqFEEIIIUSjckEEhlrrGIzH27UFHik1+zXADfhOa51V230VNyVnZmbSxacLn17xKd9f+TVd8vKJbLKLt8O/qO0uBMCpfbD0MTAXNXRJhBBCCFFF9fms5BuVUrOVUrOBFyyTBxVPs0y3NhVIAqYrpX5VSr2tlFqN8QSVw8BLdVGu4qZkd3f3c9N6thzE57mudMl25qeoeaw9HFcXu7q0Hf4Ldn4HWacbuiRCCCGEqKL6rDHsDdxreV1lmdbWatq91gtbag37AbOAQcD/AUEYqWhCrJ6TXCvWNYbW7Py78qJZg102k5dO56ftEhzWSk6q8Z4rA3qEEEKIxqI+H4k3TWutKnrZWCdea32/1rqF1tpRax2otX5Ca51Sh+UqU2MIgH9X+iQfZbB/CM6+G3j25228u/wgZrM8Q7lGJDAUQgghGp0Loo/hBaFZVzAX8li7sRSpTAb2OsCMsBimfL+D7PzChi5d45NtieUlMBRCCCEajUsuMCyvKZlmXQHomXWWoa2Gcoq/eH5sO/7en8i9325Fa6k5rBapMRRCCCEanUsuMCy3KblZF/AOgl3fM6XXFFLzUnHy3sy/xnZhW2wqUUmZtjcobDsXGKY1bDmEEEIIUWWXXGBYLqWg/30Qt5leZgeGtBzCnMg5jOrWFIAVkacauICNTI40JQshhBCNjQSG1nrdCXaOsGMWU3pNISU3hbATS+gd4MXf+xMbunSNh9bSlCyEEEI0QpdcYFhuH0MANx/oegPs+ZHeXh0IaRHCrMhZhHb2ZE98OqfSc89/gRujvAwwWwbsSGAohBBCNBqXXGBYbh/DYv3vh7x0iFjMlN5GrWGR+0YA/j4gtYZVUlxbCBIYCiGEEI3IJRcYVqpNCPh2gh2z6NOsD4NaDOLPYwtp6+sq/QyrSgJDIYQQolGSwLA0pYxaw4QdcHIPIwJGkJSTxPDOzmw+kszZ3IKGLuGFr3jgiYMb5MioZCGEEKKxkMDQll7jwd4Zts+ivWd7AIJbZVFQpAk7JM/+rVRxjWHTtlJjKIQQQjQil1xgWOHgk2IuTaHbzbDvJ4JdmwNgckrE191RRidXRfFTT7zbSWAohBBCNCKXXGBY6eCTYv3vh/xMfKPW0MSxCUfSY7iisz9hB5PILzSfn8I2VsXNx8U1hvLUGCGEEKJRuOQCwypr3R/8u6N2zCLYK4iYtBhGd/MnI6+QzUeSG7p0F7acVHB0BzdfMBdAQU5Dl0gIIYQQVSCBYXmKn4Ryai/tHTyJSY9hSJAPLg520pxcmZwUozne2cv4vzQnCyGEEI2CBIYV6XEbOLgRnBJHel46WUVpXN7Rj7/3J6KlebR8Oang4gXOnsb/JTAUQgghGgUJDCvi3AR63kpQ7DYAotOiubKrP6fO5rIvQYKdcmWngIu3BIZCCCFEI3PJBYZVGpVsbeS/CXZsCkBM0l5Gdm6GnUmxIlKak8uVkypNyUKIRkVrzZLdCWRIrlpxibvkAsMqj0ou5uaDzy2zaVJkJmb3HJq62DGgbVPpZ1iRnFRwlRpDIUTjsSsujScW7Gbu5mMNXRQhGtQlFxjWhGrdn2CPAGJykyDsHUZ3bc6hxAyOJWc1dNEuPFpb1RgWB4by9BMhRN1YH3Wa22duIq+wqE63u/pAEgAbo8/U6XYBXv99P+8uP1jn2xWiPkhgWEVBrYcQ7eKOXvdfrnPZC8A3G47KIJTS8s6CLrIEhk2MaVJjKCrx254TRCVmNHQxRCPwyepoNh9JYeexur3hXHXQCAy3x6aSW1B3QeeR05l8s+EoM8JiWLg9rs62K0R9kcCwioK8gjirCznTvDt+Kx7jsd4mvtt0jP9buEcSXlsrfuqJizfYO4G9iwSGokLxqdk8vmAXTy/cIzdaokLRSZlsPWp8x4TH1F3N3sn0HA6cPMvAdt7kFZrZeSy1zrY9JzwWRzsT/QKb8u8lERw8dbbOti1EfZDAsIqCvIIAiBn5HCjF06mv89zINizelcC9324lPaduOiybzbpO71bPu+LnJLsYA3Zw9pTAUFRo4bY4tIZ9CenSd1dUaMHW49ibFO193QiPqbsHDay21Ba+eHVn7E2KDXXUnHw2t4BFO+K5tlcLZtzdFw9nB6Z+v5PMvMI62b4Q9UECwyoK9goGIKYoE8Z9jUrcz1SnZXw4vhfbj6Vwy4xw4lOza7WPgiIzd369mes/3UBhUSOthSwODF29jXcJDEUFCovM/Lg9jss6+NLO140P/j6M2Sy1hqKs3IIiFu2M56puzRnTvTl74tLqLMBafSCJAG8Xegd40TvAi411FHT+tD2erPwi7hvSjmYezky/vQ+xZ7J46Zd9UjsuLlgSGFaRj7MPnk6eRKdFQ4croWVvOLKWm/q0Zs79Azl1NpebPg9nb3zN+738d/lBNh9J4XBiJn/sO1mHpTdSMcSl1C5wrRKbNYYy+ETYtubQaRLP5jFhcCBPXNGBg6cyWB55qqGLJS5Af0WeIi27gDsGtmFIkC+FZs222JRabze3oIiNMWcY2akZSimGBPuyLz6t1q1ARWbNnPBY+gc2pUdrYyBeSJAPT1/ZkSW7T/DD1uO1LrsQ9eGSCwyrncfwn/UI8jSemQxAmyGQsB0K8xgS5MviKUNwtDNx+8zNxJ6p/mjlZftO8tX6o9w9uA0dmrkzIyymTu8of9mVwPD31hCdVL3PXW3SlCyq4Yctx2jm4cTIzs24rldLgpu58+HfhymSWkNRyg9bjhPo48qQIB/6t22Ko52JTXVQs7cpJpncAjMju/gDMDTIB7OGLUdqt+3VB5M4npLNfUPblZg+NTSY4R39eO23/UTIgxLEBeiSCwyrncfQSpCXERhqraHNYCjMhZN7AOjg78FPD4dgpxQv/Vq9ZoIjpzN5dtFeegV48cq1XXn48iAOnspgzaGkapexPAu2Gv24ijtu15tzg0+aGsegEQWGOflFbI9NYdGOeP731yEe/WEn132ygaHvrCbyROP4DI1JQloOYYdPM35AAPZ2JuxMiidHdSAqKZPf955o6OKJC0h0UiZbjqZw+4A2mEwKZwc7+gZ61UlqmVUHE3F1tGNQO6P7S582TXFxsKv1tmeHH6WFpzOju/mXmG4yKT68rRfero488sPOizahdmZeYa2D6/NNay1N/FyCgWFtBHkFcTb/LGdyzhiBIcDxTefmt/Ry4fmrO7MxOpnFOxOqtM3s/EKmzNuJg53i87v64mRvx/W9W9LKy4XP18TUSbmPJWex1dLksut43Y22syknFRw9+HhNLKH/CyPX3qNRBIa5BUXc8NkGbvliE8/8tIcZa2PYl5BOUzdH0rLz+WrdkYYu4kXnx21G6o7xAwLOTRvbvQWdm3vw8cqoxtvPVtS54kEnt/RrfW7akCBf9p88S2pWfo23q7Vm9YEkhgX74uxgB4CjvYmB7bxr1c/w0KkMNkYnMyEkEAe7sj+zPu5OTL+jD8eSs5m3+eJrUtZa89gPOxk/c3P9V0bUUmZeIcv2neSZn/bQ/42V9HtjJVO/38F3m2I5nJhxSQaK9g1dgMakeABKdFo0fi1DwCcYjm2CoU+cW+bOgW34ZVcCb/yxn9BOfvi4O5W7Pa01L/8SweGkDGbfN5BWXi4AONiZmDS8Pa8ujWTr0RQGWu5ka+rnnQkoBV1bNGFXXD3398tJJV158OHKwwDsbaoZmJtuJL5Wqn73XQvTV0VxODGTt27qQUiQD62bupz7Qp+2NJLvtxzjpWu64udR/vkUVVdYZGbhtjgu7+hH66au56abTIonR3Xk4Xk7WLL7BOOsAgHRsIrMmhd+3otHXgGh53G/uQVF/GwZdGL99zckyIcP/oYtR5MZ071FjbZ9KDGDE+m5PH5FhxLThwb78NafB0k8m4t/E+dqb3d2eCxO9ibuGNCm3GUGtvNmaLAPc8JjefCydjYDyMZq1sZY1hw6jb1JMSMsmoHtBtZoOx+vjGLpngSc7O1wtDfhZG/CycEOVwc7bu3fmiu6+Fe+ESCvsIj0nALSswtIzykgLbuAuNRsVh9MYsuRFPKLzHi6OHB5Rz8c7ExsPpLMn/uMvs7ebo6EBPnwyjVdae5Z/WuhMZLAsBrOpaxJiyGkZYhRa3jwDzCbwWT8UZtMirdv7sE109fz5h8H+GB873K398PW4yzelcCTozpweUe/EvNu6x/A9FVRfF6LPyow0t8s3hnPsGBfBrb15v2/D5OeU4Cni0ONt1mRY/HxnM1x5Oa+rUjPLiD8WCEDdSEUZIOjW73ss7YiEtL5ct0Rbu3XmjsHlf0ivyckkNnhsSzYepzHSv2AiJpZc+g0p87m8toN3crMu6qbP91aNuHjVVFc37vlefvBzMkvIuJEOgPa1u5GrKp+2HKcvyJPMePuvrg6Xvhfxd9uOMpPO+IxKRhTBzesVfVX5ClSLYNOrPVs7YWrox3hMTUPDFdZnnYyonOzEtOHBvsCxlNQbu5bvZuTtOx8ftkVz019WtHUzbHCZe8f2o4H5mxnWcQpru/Vslr7uVBFnkjnnWUHGdWlGT1be/HB34c5cPIsXVo0qdZ25m4+xocrDzOwrTdNXBzIKywir9DM2ZwCohKNQWqjujTj1eu6EeDtWmb97PxCFmyN45sNR0lIy7G5j/Z+btw7JJAruvjTP7Ap9lbfNXEp2Ww+kszmIyksizhJfGoOCycPxsnernoHpBG68L+NLiDFI5Nj0q0GoOyaB2cOQ7PO55br6O/BlMuDmL46mpv6tuKyDiWDPq01P26L47Wl+xne0Y/HR5YNNlwc7bh/WDve++sQkSfS6dbSs8wyEQnpRJ5I57b+AahyauO2HE0hPjWHZ0Z3wtdSe7knLo3hpQLRuvDNhqP0OXMKV3cf3rulF7vj0lg00wkcMJqTqxkYHj2TRdLZXAa28y7389VWQZGZ5xbtxdvNkZev6WpzmfZ+7gzv6Me8Lcd4ODToorqzbyjztx4/N+ikNKUUT1/ZkQfmbGfxznjGV1DrUpde+y2SBdvimDA4kH9f17Vez/PRM1lM+y2S/EIz/14Syf9u7VVv+6qI1ppNMcn0aO2Jh3P5N4tHz2TxvxWHCO3kx4G4Mzz6w07+ePyy81KDbj3oxJqjvYkBbb1rlc9w9cEkerTyLFMr2KV5E7zdHNkYnVxuYJhoThZgAAAgAElEQVRXWIS9yegba23+1jhyC8xMHNq20v2P6NSMtj6ufLvh6EURGGbnF/L4/F14uTrw31t6YacUX66N4Yu1MXx8e58qb2d91GmmLY3kis7NmHlP/zLHOL/QzOzwo3y0MopRH6zlkRHBTBreHmcHO9JzCpi7KZZvN8aSkpXPoHbe3DEwAE9XRzxdHPByccDTxQE/DydaWlrpbAnwdiXA25Vb+wdwZVd/Hp63g1eXRPLOuJ41PTyNRqP+hVNKDVdKLVVKJSiltFJqYj3vr9TI5OJ+huFllp06Ipj2vm689EsEOfn/JKxOSMvhnm+38sLiffRp48XH43tjMtkOeu4eHIi7kz0zwkr2NdTaSINw8+fhPP/zvgqTAv+8Mx53J3uu6tacngGeKAW7jtd9c/LcTbG8/vt+Wjrl0rFtG+xMin6BTWnmZ1T1F2RVr2+j1pqH5+5g/MzNjJsRzrrDp+ulr8fMdUfYf/Isr9/QHU/X8n8Y7w0JJPFsHisiJQFzbSXnmAk7lMRt/QPKDb5Gdm5GrwAvpq+KPi9PFjqenM1PO+Jp7+fG3M3HuH/2tjpLWl+a1pp/Ld6Hk72Juwe3YdGOeBbtiK+XfVXm6/VHufPrLUz4ZitZ5eQENJs1zy/ai5O9iXfH9eTRPs6k5xTw+Pxd9T56vPSgk9KGBvsQnZRJ4tncam87JSufncdTy9QWgtHyE9Leh/CYMza/d5LO5jLs3TX0mPYXt34RzrSlkfy8I55DpzKYuymWkPY+dG5eeQ2ZyaS4b2g7dselsbOS/t8RCelk51eet7GwyMwHKw7x0/a4894/7vXfD3DkTBYfju+Nt5sjnq4O3DU4kN/2nOB4ctXSpUUnZTL1+50E+7nz8R19ygSFYNwUTBoexKr/u5xRXf354O/DXPXROqYtjWToO6v534rD9A7wYtHDIfw4OYRHR3ZgwuBAru/VkuEd/egV4FVhUFjamO7NeWREEAu2xfHDlouvT2hpjTowBNyBCOAJwHZdcR0L9gomOi3a+IPzbg9uzeD45jLLOTvY8dbNPTieks3Hq6LQWvP9lmOM/mAtO46l8voN3Zj/0OAKmxo8XRy4e3Agf+47eS4FztncAh79YRevLo1kWAdfOvq789pv+0sEn8Wy8gr5c99JrunRAhdHO5o4O9ChmTu74up2AMqCrcd5ZUkko7o0w98+G5PrP01MI3obtaEbI6o3kGZbbCqHEjO4oXdLEs/mcc+3W7l5Rjhr6zBAjE7K4OOVUVzTowVjujevcNnQTs1o4+3KnPDYOtn3pWxdfCGakoNOSlNK8eQVHUhIy+HPWuT0PJtbUKX0UZ+sjsLepJj/0GD+O64nm48kc/PnG2uUeqoyP+9MYNORZJ4f05nXru/O4PbevPJrxHl/VvTSPSd4888D9Gnjxd74NB6et4O8wrLfI/O2HGNrbAqvXNsV/ybOBHiYeOPG7mw6ksyHfx+u1zLaGnRibUiQ0eRbk7Q1aw8noTVcYSMwBBgS7MPJ9FyOlLoGzGbN//20h4zcAsb1bY1ZGwOp/u+nPVz10TpOpOdyXxVqC4vd0q81Hs72fLvhaLnL/L0/kWs/2cC4GZs4lV5+EJxXWMSjP+xi+uponl20l8fm7+LseRr1vDziJPO3Hmfy8KBzTfEADwxrh73JxMz1lf8GpGbl8+CcbTjamfj63v64O1XcqNnC04XP7uzLvAcGYWdSzNkUS2gnP/54fBjfThxA/zrsFvL0lZ0Y3tGPV5dGVBrEN3aNOjDUWv+ptf6X1noRcF6GMAZ5BZGRn8HpnNPGYIrAEGMAig2D2/swvn8AX60/wq1fbOKlXyLo3caLv54czoSQtuXWFFq7f1hb7O1MfLkuhoiEdK77ZAPLI0/x4tWd+fqe/vznhu4kpOXweVh0mXWXR5wiO7+oRAf+PgFN2XU8rc6Cq4Xb4njxl30M7+jHZ3f2RuWm/fPUE6BXcCAAf++s3hMt5m4+hoezPe/c3JM1z4Ty1k09SDqbx72WAPFUVtVOd3RSps0v0iKz5rlFe3F1smPa9WX7uZVmZ1JMGBzI1tgU9p9oPM861dp4xOKZzDyOJWcRn5pNcmYe2fmF586H2WwkP19zMImZ62J4btEexs0I59mf9nC0jgOjwiIz6+ILGd7Bz2a/IGuXd/Sjva8bs2sYjJvNmnu+2crVH6/nyOny83fGnsli8a4E7hoUiH8TZ24bEMDcBwaRnJXPjZ9vZHMdptxIzszjjT/20y+wKXcONGrWP769D66Odjzyw06bN3j1YVNMMs8s3MPAtt7Mf2gw74zryfqoMzy9cE+JWsC4lGzeWXaQ4R39SgRnt/YPYHz/AD5dE82ag3WXVsvaqgOJ/Lg9rsygE2tdWjTB08Wh3Ocmh0efYcjbq/hkVVSZoHfVgSR83Z3o0apsNx2AYZbgJrxU2ppvNx5lfdQZXr6mK6/f2J2fpwwh4rWrWPHUcN6/tRfTruvKqCoOigBwc7LnjoFtWBZxymZfuBNpOTy7aA/tfd04npzFzZ9v5LCNm4ic/CIe+m4HyyNP8cq1XXluTCeWRZzimunr2V3Pgw5PpOXw/M/76NXak/8b3bHEPP8mzozr14qF2+NJyig/qM0vNDPl+x2cSMtl5j39Kv1+sDasgy9/PTmc3a+M5tM7+9rselVbdibF9Nt708LThSnzdlT4WRq7agWGSqlblFKfKKXWK6XOWppvF1SyTmul1DdKqRNKqTylVKxS6iOlVNPaFb1hWA9AAaBNCKQfh3TbTUEvju1MU1cHDp7K4K2bejDvgUHVuuCbeThzW//WLNoRz82fh5NXYObHSYOZfHkQJpNicHsfbuzdki/XHinzI/7zznjaeLsyoO0/h7pvoBfpOQV18oO/cFsczy/ey7BgX2ZO6IdTYSZo8z/JrQHl4gVAZnoyq6r4A3I6I4/lESe5pV9rXByN0Wh3DmpzLkA8lpzNe9tySaqk+WjNwSTGfLSOwW+v4vpPNzB9VRQHTp5Fa813m2LZeTyNf19b9ZHGt/UPwNnBxHebYqu0fEOZv/U4l/13Nb3/s4IOLy2j8yvL6f/GSi5/L4xh766h3xsr6frvv2j/rz/p9PIyOv97OZf9dw33zd7GW38eZPXB05gU/Lb3BFe8H8ZTP+4mxkZgVVBkZvORZN5ZdpBZG8uv7bC2+mASqXm6zEACW0wmxT0hgeyOS2NPDX7YFm6PY3dcGoVmM08t3FNu+pvpq6NwsFM8HNr+3LTB7X1Y8shQfNwcufvrLSzZXbX0U5V5448DZOUV8vbNPc7dGPo3cebD8b2JSsrk1aUR1d6m2ayJTspg4fY4Xly8jzEfraPLK8t5cfE+mzdFh05lMGnudtr4uDLznn44O9hxW/8A/jW2M3/sPckrSyLO5XN7YfFeTMoYUFe6n+9rN3SjS4smPLVwd60fB2otKSOXR37YyQNzttPC07lMoGHNzqQY3N52P8PkzDye+HE3GbmFvP/3Ya76cB1hltywBUVm1h4+zcjOfuXeoLfxdqWVlwsbo//ZduSJdP67/BCjuvhzl9VANTuToqO/B+P6tWbi0HZVuum3dk9I4LnvJWuFRWYen7+LgkIzX9/bnx8nh1Bg1oybEV6ilvRsbgH3fLuFDVGn+e+4njwwrB1TQ4NZOHkwZjPcMiOcr9YdwVzHTcuZeYX8sfckk+Zup7DIzMe397HZPWTS8CAKiszM2hhbdiMYN7CvLo1g85EU3r2lB/0Cq1/T52BnqrA7UF3wcnXki7v7kZ5TwKPf76LgIk2pVd3BJy8DvYBMIB7oXNHCSqn2QDjgDywBDgIDMZp+xyilhmqtG1UGTJsjk8FoTu5xS5nlvVwd+e2xYdibTDXuqD15eBA/70hgYDvvc303rP1rbBdWHkji1aWRzLlvAEop4lOzCY9J5qlRHUt8ofdpYwRtu46n0d6v+km+ixUHhZd18GPmBOPHhYxSTz0BI8E1EOhWwIywaEZ1aVbpQJKF2+MoKNLcPTiwxPTiALFHK09umbGBibO28ePkwTY7zW8+kszD83bQuYUHV3dvwcoDiXy48jAf/H2YVl4upGTlE9rJj5v6tKryZ/Z0deCmPq34ZVcCL1zdGS/XikccgtEZ++edCcQkZfL06I40qaCDf13YHZfGK79G0K2VJyM7NcPNyR53Z3vcnexxdbTHbKlBzMkvIqfAeKGhra8bHZq5E9zM/dznOp2Rx1frjzB30zGW7E7gul4tuSekLTFJmaw5lMSGqDNkWPVLa+HpXOHo0PTsAl77bT9+LooruthuvittXL/WvPfXIeaEx1Y4wr+01Kx83l1+kIFtvZkQEshj83fx6ZponhxVMsg4cjqTX3cl8MAw41m21gJ93Fg8dSiTvtvOMz/toZmHMyGlBkBUx/qo0/yyK4HHRgbT0d+jxLzhHf14dEQwn6yOZnB7nyqNhI1LyeZ/Kw6x+mASGbnGeWjibE/vNk3p0qIJi3bEsXhnPPeEBDIlNBhvN0dOpucwcdZWXBzsmH3fgBLX8KThQaRmFzAjLAZvV0daNzWCojdv6n4ulZY1Zwc7ZtzVl+s+2cAj3+9k0ZQhtRqwUzwo760/D5BbaOaZ0R2ZNDwIR/uKtzkkyJe/IhOJS8k+d9NtNmue+WkP6TkFLHlkKKcz8pi2NJKJs7YxpltzRnfzJyO30Obgp2JKKYYG+7A84hRFZk1+oZknFuzG09WBd8eVDZRro3VTV8Z0b878Lcd54ooO50apf7Qyiu3HUvlofO9z39e/TB3CxFnbuPfbrbx3a08u6+DHvd9u5cDJs0y/ow/X9vxnEEu/QG/+fPwynvt5D2/+eYCuPiYOm2Jo4elMKy8XWnq50MzDqcRo3MokZeSycn8SK/afIjw6mfwiM95ujvzv1l609bU9wLCdrxtju7dg3qZjTAkNKvE9mHg2l5d+iWDlgUQeGRHETX0u7BRVXVs24d1xPXliwW6eXriHqaFBdG7uUWfXww9bjvP1hiNMv70P3cupza5v1Q0Mn8IICKOBy4E1lSw/AyMofFxr/UnxRKXUB5ZtvQk8bDV9GvBqJdu81dJ03CB8nH3wcvIynpkM4N8DHN2NRNc2AkMw+kHURoC3K9teHoWbo53Ni69ZE2eeurIjr/++n78iExnTvTm/WBJs39y3ZOAT7OeOh5M9u+JSa5wj7sdtx3n+Z6P5+FxQCFaPw7O623MyOmCPCHRiekQa22JTK0xzUWTW/LDlOEODfQgqJ3Dt0dqTR3o78fGuDKbM28m3EweU+PHYG5/Gg3O2E+Dtynf3D8LbzZFHRgSTlJHL6gNJrDyQyLHkbN68qfpf7veEtGX+1jh+2h7PQ8Pbl7vcibQcvtt0jPlbj58bxLA+6jRf3zuAduV8edZWRq4xIMC/iTPf3T+w1imJ/Dyc+NfYLkwa3t4qQDSeSNK8iTPX9mrB5R2bMbCdNxNnbeW5RXvp1tLTZo241ppnFu0h8WwuLw50qnIA4eHswC39WvPD1uO8OLZLlW+u3ltxiLO5hfznxm50bt6EVQcS+WR1NKGdmtE7wOvccp+sjsbJ3o7JlwfZ3I6niwMz7+nPuBnhPDxvB4unDin3uqxITn4RL/0SQTtfNx4ZEWxzmSeu6MDWoym89EsE3m6ODAv2tfmDnZFbwGdrYvh2w1HsTIoberekX2BT+rRpSntft3O1VU9f2ZGPVkbxzYajzN8axwPD2vFX5Ckycgv5cfLgEvkjiz13VSfSsvP5dE00jnYmQtr7VJiLr62vG2+P68GjP+yq0QjynPwijqdkc/RMFrM2HmXL0RQGtfPm7Zt7VPnGdWiwEaxvjD7D7Zaa6FnhRh69/1hqNbu0gGVPXsbX64/yyeoolkeewsFOMaxDxdkZhgb7snB7PJEn0vlpezzRSZnMfWBghflpa+r+oe34c98pft6ZwITBgWyIOsNnYdHc1r81N1rdwLZu6srPDw/hobnbeWLBbpo3cSY1O5+v7ulvcyCNp6sDX9zdj7mbj/Hen5G8s+xgifl2JkX3Vp5MGBzItT1b/PN9bqXIrFl9MInvNsWyIfoMWhs1qveEBDK6W3P6BTa1OUjE2pTQIP7Yd5J5m48xNTQYrTU/bY/n9T/2k19o5qWxXXhgWLsKt3GhuKF3K2KSMvl0TTS/7TlBez83runRgmt6tqCT5aYvPjWHXZbWjt1xaWTlFfL0lR0Z3c12f/b8QjPTfovkhy3HMSl4YsEufn/sMlwcz396HFXTvmZKqVCMwPBHrfXtNua3B2KAo0Cw1tpsNc8DOAkooJnWOssy3RfwLb2tUhK01mU6WCilMoFHtdazq1L+Tp066UOHDlVl0TImLp9IkbmIuWPnGhO+uxGyTsOUjTXaXl0oLDJz7ScbyMgt5O+nhzP24/U093RmwaSQMsve/fUWUrPz+ePxy6q9n+Kg8PKOfnxpHRQCRK2E78fB/SugzaB/pr/ZgoK+9zNoeyi9Wnsy677y8zKu3J/Ig99t54u7+1ZY+xQWFsZp9yCeXbSXm/q04oPbeqGU4nBiBrd9uQkPZ3t+mjykXhKS3vblJk6m5xD2zIgSX4ZFZs3O46nMCY9lWcQptNaM6d6c+4e2o6BIM/X7HRSZNZ/d1bdMCqO68NSPu1myO4GFk0PqtNN1seTMPMIOnaZbqyZ08i95h3wsOYtrpm+gg787CyeHlAn8vt1wlP/8vp+Xr+lCcNFxQkNDq7zfmNOZXPH+Wp6+smOZRMS27IlL48bPN3L/0Ha8cq2Rgig9p4CrP1qHk4Mdfzw+DFdHe6KTMhn94Voeuqw9L47tUuE241KyufGzjbg72/PL1KFlau0r8+7yg8wIi+GHhwadGzBhS+LZXK7/dAOJZ/PwcXNkdDd/xnRvwZAgH0xKsXB7HO+vOMSZzHxu7tuK567qXOk1HpWYwfsrDrM88hT2JsWs+wZUeP0VmTWPL9jFukOn+f3xYQT6lLyRCQsLK3H+tNbc8NlG0rILWP1/l1dY+5STX8R7fx0i4kQ6x5KzSDybd25eE2d7XrqmS4Xpt2zRWjPwrVWEtPdh+h19iEhI56bPNxLaqRkzJ/Qrs62EtBzeXXaQZh5OvHyt7RRVxU5n5DHgzZUMDfZhY3QyDw5rV+k6NaW15sbPNpKRW8iCSYMZO30DXq4OLH10qM08l7kFRTy7aC9hB5OYeU//KtVmh4WF0T9kGCfTcjiRnsuJtBziU7NZEZlIVFImTV0dGD+gDXcNakOAtyupWfn8uD2OuZuOkZCWQwtPZ8YPCODq7i3o6O9e7RvrCd9s4cDJDBZMGsxrv0WyPuoMg9p58+64nuXWNl7IzmTm8VfkKf7Ye5LNR5Ixawj0cSUzt5BkyxN5nOxN9GjlSXpOAVFJmVzZ1Z/Xru9WYlR0UkYuU+ftZPuxVKaEBhHS3od7vt3KnYPa8NZNPc4tp5TaobXuX9+fqz7zGI60vK+wDgoBtNYZSqmNwGhgMLDKMv0MUPuHX9azIM8glsUuQ2tt/GEEDoE1b0FOGrh4Vb6BemBvZ+I/N3Tnti83MXnuDmKTs3nURn5EgD5tvPg8LIbs/MIqJ9bNyitk5rojfLwqitBOfnxxd7+yd5bFNYaupYISZ08cCs5y35C2vF9JstO5m4/h38SpSp23b+0fwKn0XN7/+zDNPZ25Y0Ab7v56C452Jr5/YHC9ZamfOKQtU7/fyZqDSbT1dWVjdDIbo8+w+UgyZ3ML8XC254Fh7bgnJLBErczSR4fx4JztTJy1jZev6cLEIW3rrPlh8c54ftmVwFOjOtZLUAjGY7zKq2UO9HHj7Zt78Nj8Xby/4jAvXP1PL5PdcWm8vewAo7r488CwdqxdW710D0HFeSQ3G81QFdU2Fpk1ryyJwNfdiSdH/XP9e7o48L/benHX11t4848DvHlTD6avisLZwY5JFdT8FgvwdmXmPf2546vNTJ67nXkPDqpyotv5W48z05JAvaKgEIz+hmueCSXs0GmWRZxi6e4TzN8ah6eLAz5ujhw5k0X/wKZ8c+8AegVU7bumg78HX0zoR0RCOnmFRZX237IzKT69ow/Z+UW4VTIqFIwm18dGduCh77ZX+rSaz9ZE8+3Gowxo25TLOvjR1seVQB83An1cCW7mXqNE30ophgQZgVtmXiGPzd+Fj5sT/x3X0+bfVysvF6bfUbWcen4eTnTy92BjdDJdWzTh2TGdql2+qlJKcf+wdjyxYDfjvggnI7eA7x8cVO4xcXaw45M7+pBfaK60ud2au5M9Hfw96GDVneGZ0Z3YdCSZ78KP8dX6I3y5LoZ+bZqyLyGdvEKzMXL+2i6M6uJfrWbn0qaGBnPHV5sZ/eFaXBzseP3G7tw10HYqosbA192JuwYFctegQM5k5rE84hSrDybh7eZIrwAv+gR40am5Bw52JgqKzHyz4SgfrTzMqA+MG92JQ9oSceIsD8/dQXpOAZ/e+U9XgEnD2zNz3REu7+jHVeXUMtaX+qwxfA94BnhGa/2+jfmfAo8AU7XWM2pYBneguF0mHHgHWAqkaK3L/PoopSYBkwD8/Pz6LVy4sCa7Ze3ZtSxKXcQbrd7A094Tr9S99N7zCnt7vEKKT9lg3jMtgkJ7d7Lc29Zof9Uxc28e4ScKcbKDj0e44mxf9g9ud1IhH+3M48WBznTyrvjHLb9IExZXyG9H8snIh0HN7XighxOOdja+cON/p0P0V2wc8h0Fjv/0jRiw9VGyXVuztdPz/F9YNq09TDzdzxlXh5LbSMo289y6HG4MduDG4IprZDIzM3F3dzdyOu7PJyyuEDdLy+m/BrrQyqP+BtwXmjXPrs0hPV9TPIDTz0XRxceOrj529Pazs3ncAXIKNV/tzWNnUhGXtbLn6nYOZBZoMvKNV2a+xqTgikAHnGwcY1sSs8y8Gp5DmyYmXhjojKkBHz04KyKPtfGF/F8/J3r42ZNVoHk1PAet4bUhLrg7qnPnrjqKr9mHezkxuEX5wUNYXAGzI/OZ3NOJkJZll1twMI/lsYWM7+TIwkP5jG3nwK2dql77t+VkITP25BHS0o5JPZwqDOwLzJp5+/NZG19Idx87pvZ2KnPNVya/SBOZXMS2U0UkZpm5qp0DA/xtdyk5X2ydP601/w7PpaBI89ZlLjavwaRsM/9an8OAFnZM7lm3N21r4wuYFZFPZ28Th1LMPD/Qmc6VfLdV1cJD+aw8VsC0IS60dK/fRB6FZs0za3NIy9NM7OZIaEDd9kmuyt9eSq6ZNXGF7EgspFNTO65o40DrOvo+1Vrz6e48isxwd1dHfF0adWKUGjmdbWbegXz2nC6ipbsiKVvj5aR4vI8TbZr8c80WmjWvb84lOcfM60NdaOpsYsSIEY2+xrA4MkgvZ37x9NpUsfWnZD/H1yyvOcDE0gtrrWcCM8FoSq5Oc5Y115OuLFqxCL+ufgxpOQTyB8K+1+jpmQWltxm7Eb57FTxawGM7wb56TVDV1a1fHle8H8bV3VswZpTtDO09s/L5aOffaJ+2hJbTt6qwyMzinQl8tPIwJ9LzGRLkwzNXdaJvmwoGk4dthmgYesU1YGd1acW0xM3ekWuuHAH+J3liwS4+O2DPnPsHnnsaC8Dbfx7AznSUF28bXunzSa2bsy4bbmbq9zvZFJPMvAcHVbkmpTbe9DvJishTDG7vw9Bg32qNNL9qpOajlYeZvjqa9Qm2E9Ymm5ry5YR+ld6d5xeaufWLcJwcC5k1+TKbgwTOp0FDirjhsw3MPpjPn1cN5t9LIkjLy+Gnh0PODXwq3RRZFcPNml+OhbEtzYkX7hhic5mUrHyeXBfG4PbevHDHYJvB0+ChRdz42UZ+PJSBm6Mdb9wdWuljy6yFAq7Nonj/78MM7NKKJ0d1sLmfk+k5TJm3k93x2UwJDeKZ0Z0q7YNVntE1Wqv+lHf+XvI7ycPzdpLRtCM39C47qOvBOdtwcsjn44mhNKvB84crEpSSzayINRxMMfP4yGAeHl13NXuDhxaRll1w3p6T+0HLRA6czGBqaFCd3wBU9W/v5jrda0kjRtTjxhuJW67W/BV5itd+209IkDvTb+9j83uoXY9Mrp2+gUXxrsy9f5CNLdWPhnwkXvEVX+Px81rrMKvtnDfFI5MPpxw2AkNHV2jR2xiAYi09AX661xiZmx4Hu+dB//vrtWx+Hk6sfPryCh9v5e3mSFsfV3aVk6QzKSOXO7/aQnRSJr0CvHjv1l4lEpaWKycVnDxLBoVgfP5MI03ENT1b4Opkx5R5O7jti03MfXAQrbxcyC0o4sftcYzu6l/th9bb25n4ckK/Kjd91YWxPVowtkfNns9qMimeHt2JYR38OJmeg7ebI01dHfF2M14/7YjnlV8jeHHxPv57i+3msGIf/H2YPfHpfHF33wYPCsF4lONnd/bluk83nOsr9/I1Xc4FhTVlpK5py+u/7yciId3maL3/Lj9IZm4h/7mhe7nHzNnBjg/H9+amzzcyaXhQtYLCYo+ODOZochYfr4pi4fY4hgb7clkHX4YG++Lr7sSWI8nnchLOuKsvV9fwOmlsRndtTid/Dz5ZHc11PVuWaB5ccyiJlQeSePHqznUeFILR1B/czJ2mrg5V6odaHc4OdjT3PH8DAEZ29mdk56rnQRSNj1KKMd1bMLprc5Si3O+rID93/n1dV15cvI+vNxw5b+Wrz1/R4hrB8sZbNym13HmhlLoOuK5ly5o/l9Lb2Zsu3l34JuIbxrQbQ3O35kbamq0zoSAXHJyhMA8W3gMFOfDQaljyCKx7H3rfBfb1+3zRqnzx9mnT1DK6TJe5KN/4/QDHk7P54u5+XNXNv+p3rdkptvtYOnvCmahz/x3RqRlzHxjE/bO3ccuMcOY+MIg9cWmkZRcwoVSKmqpSSp23oLCulDc6e8LgQE5n5DF9VRR+Hk48N6ZsVqjiTvzfbjzKHQPbVDhQ53zr4O/Ba9d34/mf9zGqS80V0ioAACAASURBVLM6G2l4a//WvL/iELPDY0s8WzgiIZ0v1x3htz0nmDS8fZlUMKV1adGErS+NwqOG14tSindu7smgdt6sPXyav/cnnnukXefmHkQnZdLG25X5Dw0u0Y/rYmcyKR4dGcxj83cZiZV7GtdkXmER//ltP+393LhvaP2NOl30cAjODna16gMnxPlUlb6Vtw8IIOxQEu/9VbPBsjVRn39BxZ+ivOykxbd19ftMpVK01r9prSdVt4+TNaUU7w5/l/yifJ5Z+wwF5gIj0XVRPpzYZSz057OQsB1unAF+nSD0RTgbD7vm1tEnqZ0+bbw4nZHHiVIJcNdHnWbpnhNMHRHEmO7Nq9eUkZNaduAJGIFhbsn4f0Bbb36cFEJBkebWL8L5PCya9n5utcoTdzF5alQH7hjYhs/DYsokj95xLIWx09fz7caj3BsSyKvX1c8oydq4rX8APzw0iOl39Kmz5rAmzg7c3LcVS/ec4ExmHmsOJXHHzM1c+8kG1hxMYtLw9jx9ZfnJkEtvqzblcrQ3MX5AGz6/qx87X7mSJY8M5dmrOuHt5sgNvVvx66NDL6mgsNjYHi1o7+fGJ6ujzj1Z55sNRzl6JotXr+tWrUES1eXl6mgz1YoQjVnxjaiPW/1WKFmrz8CwuO/faKVUif1Y0tUMxXi+cdkHDdcjpdR1SqmZmZnlPyKrKtp5tuO1oa+x5/QePtzxoVWi602wYzbsnAPDnoau1xvTg0ZCwCBY/4FRm9jA+gQUJ7r+pzk5t6CIfy+JpK2PKw+X0/ewQjkpJZNbFysODEsNdOrasgmLHg7BzcmemNNZTBgc2KCd6i8kSineuLE7V3Xz5z+/72fpnhPkFhTx9p8HuPWLTRQUmfnhoUG8dkP3C/LH0Bgp6lujUaYVuTekLfmFZq54fy33zdrG0TNZ/GtsZ8JfHMm/xnb5f/bOOyyKq+3D97D03hSlqAgIIiCKil1sicZeojGJMTExRX3TTUx9zfvFdI0xPcY0U4zGbjSxYi9YsSIgSAeR3svO98dhqbuwIIiJc18XFzpzZuYsuzvznKf8nlb5W6gMJLq72TJvqCe/zunLkmndW1zI/HZFZSDxn2GeXE7JZeelVFKyi/hsTxQjfZ0Y0qX5JZoUFO4E7CyM+ex+/Srpm4MWi73JshwtSdIORO70PODTarvfAiyArzUahrcKWZa3AFu8vb3n3Oy5RnUaxenU06y6uIoebXsw0rELnP0NMmPBYzgMe71qsCQJr+GqiXDqJ+hz05e/KXzaW2FiaMDpuKzK8viv9kUTk57Pqkf7NO0BW5gJdp3qbje1AbkcSvLBpKantpOjBX882Z8/TsYzvbdbE17JvxdNH92HvjvOC2vO4GpnTkx6PvcHd+DVe7o22GD+34iXkxUTAp2JSsvjv+N8GRvg3KJeKIXGMy7AmU92RfLpnkjcHS0pU8u8Meb282orKPyTaCkZMm006skiSdJEYGLFfzXCOsGSJP2gGSPL8sPVDpmLkJFZLknScOASEAwMRYSQX2vSrG8jXuz1IufSz/HmoTfxdulOh7NrhXE05VswqGVcdQ4Bt75wYAn0mClyEVsJI5UBAa42lR7DmPR8vtgbzfjuzk0XXy7MrNn1RENFWzyKsusYhgDtbEx1ai7e6ZgaqVjxUC/u++YoWQUl/DS7D4PvcM/LJ/fdupWzQuMxVBkwd6gnL/0RzvnEHJ4e5kkHB/2r9hUUFFqXxi61A4FZFT93V2zrVG3brOqDZVmOBoKA7xEG4QuAB7Ac6NcafZKbK5SswUhlxEdDPkJloOL5sjiKzB1g+s/ac+0kCYa+ArnJItTcyvToYMf5pByKy8p5Y+N5TAwNeH1s/R0gdKIurxD41hFKhjp5hgr6YWMmuh/sf2noHW8UKvwzmNTDBTd7M1xszXgqRHsLQAUFhduTRhmGsiwvkmVZqu9HyzEJsizPlmW5vSzLxrIsd5Rl+RlZljOa72U06jXcdPFJbZwtnXl34LtEFCTz7oAHoZ2/7sHuQ6BDf5FrWFrYbHNoCj3cbCkpU/P+9ggORqWzYJQ3ba2a6MUsygZk3cUnlWMUmoKRykDv/sIKCq2NkcqAP57sz4Z5/Vul16uCgkLTUZ40zcQg10HM8Z/D+qgNbI/ZrnugxmuYlyKKVDSoy+F6BJxfB3G3ph5Hoy333aEYAlxteCC4aVIxQFU7PK0ewwoJG8UwVFC4Y3CyNm36QlNBQaHVuOOy15tDx1AXcwPncjzlOP935P/o3qY7zpY6ruE+GDoOFF7D1AuQeh7SLkFZhXSMsRU8fxFMtfcTbi7a2ZjS3saU1JwiFk/0b3JnBqABw1DxGCooKCgoKPwTuOM8hi0RStZgaGDIu4PeRY2aVw68Qrm6XPfgYa9DQTpEbAMTa+j9mNA8nPodlOTC6Z+bfX7amBviwetjfPF31aVDrieVhqG2ULLiMVRQUFBQUPgncMd5DFsaNys3Xgt+jVcPvsrK8yt5POBx7QM79oNXk8DQVISXq3PsGzj2FQQ/UbeyuZmZ2a9T85yooCJlVKvHsMLzqRiGCgoKCgoKtzV3nMfwVjC281hGu4/mizNfEH49XPdAI7O6RiFAv7mQdU14E/8paDyG2opPVEZgZAFFWbd2TgoKCgoKCgqN4o4zDJtbrkbHNXi97+s4mTvx8v6XyS9tpIa39xiw6QBHvmiZCbYEhRUeQ1MdIWlTG8UwVFBQUFBQuM254wzDlswxrI61sTXvDnqXpPwk3j32buMOVhmKMHLc4arey7c7hZnC+NMV+tbSL1lBQUFBQUHh9uKOMwxvJT2dejLHfw6bojfxd+zfjTx4JhhbwtEvW2ZyzY2uricaFMNQQaEGp1JPsSJ8RWtPQ0FBQaEGimHYwjzR/Qm87LxYeW5l4w40tYEeDwpdw5zklplcc1KQob3wRINiGCoo1GDFuRUsP72cnJKc1p6KgoKCQiV3nGF4K3IMq2NkYMQY9zFcyrhESn5K4w4OfkIIX4d92zKTa04KM7UXnmhQDEMFhUqKy4s5kXICgAvpF1p5NgoKCgpV3HGG4a3KMaxOiFsIAPsT9jfuQPvO4DMGTnzX6u3zGqRQ8RgqKOjLydSTFJULQfvz6edbeTYKCgoKVdxxhmFr0NmmM25WbuyN39v4g/s+JYyus6ubf2LNSWGmfoahLN+6OSko3KYcSjyEkYERzhbOnEs/19rTUVBQUKhEMQxvAZIkEeIWwvHk4xSUFjTu4I4DoF2AKEK5XY0qdbkw+hoqPpHVUHJrQvgKCrczhxIPEeQURJBTEOfSzyHfrt9tBQWFOw7FMLxFhLiGUKIu4UjSkcYdKEnQdy6kR8DGpyB8DWReu72MxMIKfcKGPIaghJMV7nhS8lOIzo5moMtA/Bz9SC9MJ7UgtbWnpaCgoAAoLfFuGT2cemBlbMXe+L0M7zi8cQf7TYGoXXBpK5z9TWyzcoYOwdB5KHS/DwxNmn/S+lJf1xMN1Q1DG9eWn5OCwm3KocRDAAxwHkBhmcgdPp9+nnYW7VpzWgoKCgrAHWgYSpI0Dhjn7Ox8S69rZGDEQJeBHEg8QLm6HFVjeiAbGsPUlSJkm3oB4o9B3FHxc2ED7P8Ihr4CAdObr7dydiJYO2tv2Vebwnr6JGtQPIYKCgAcSjpEW/O2eNh6UKouxdDAkPD0cEZ0HNHaU1NQUFC480LJrVGVrGGo21AyijKanmxuoIL2AdBnjjAUnzsPMzcIT93Gp+DL/sKreLNh5qhd8LEvXN6q33iNx1AxDBUU6qVMXcbRpKMMdBmIJEkYq4zxsfNRKpMVFBRuG+44w7A1GeAyAEPJsGnVydqQJPAYBo+Hwr0/groMfn8Avh0ByeFNO2dpIfz5gvj35T/1O0YxDBUU9OJc+jlyS3MZ4Dygcpufox8X0i9Qri5vxZkpKCgoCBTD8BZibWxNkFMQ++L3Ne+JJQm6TYS5x2DccsiOh58mQHpk4891YAlkxoKjt/AcqtUNH1OgRyhZs08xDBXuYA4mHkQlqejr3Ldym38bfwrKCojJjmnFmSkoKCgIFMPwFhPiFkJ0djRxOXHNf3KVIQTNgke2i7DzqsmNa6d3/QocXAYB98HA5yD/OqScbfi4wkxAAlNb3WNMrCvGZuk/HwWFfxmHEg/h7+iPtbF15TY/Rz8ARc9QQUHhtkAxDG8xQ9yGABAaH9pyF3HwgAfWiqKQn6foZ4zJMvz5PBibw11vg2dFInzkroaPLcwAM1swqOfjpDIEY0vFY6hwx5JRlMHFGxcZ4DKgxvZO1p2wNLJU8gwVFBRuCxTD8BbjZuWGp60n+xKaOZxcG+ceMH0VpF+B1fdDaVH948PXQOwBGLEILNuIH+ceELWz4Ws11PVEg9IWT+EO5kjSEWRkBroMrLHdQDKgm2M3xWOooKBwW6AYhq1AiFsIJ1NPkl3cwkaSxzCY9BVcOwTrHxNyN9oozIQdr4FLL+j5cNV2z5GQEFaVQ6iLwsz6u55oMLWBotsglFxeBmmXWnsWCg0QlhLGmPVjSCtIa9LxsixzPv08Z9LONPPMmsahxEPYmtjS1b5rnX3+jv5EZkZSVNbAAq6ZKVOX8UHYB1zNunpLr6ugoHD7ohiGrcAQ1yGUy+UcTDzY8hfznwp3vwuXtsDWZyE7oa6cze7/QcENGPtxzXCw10jRxu5qA1XUBRn/LI/h+T/gi74Qd6x5z6tWw8GPIUMpImgOfrrwE3G5cWyI3NCo4wpKC/jjyh9M3zqdGX/O4JG/H+FC+oUWmqUgpySHbVe3sfDAQj4M+7BO60u1rOZw0mH6OffTqmHq5+hHmVzG5YzLLTrP2hxPOc6qi6tYHXGb92JXUFC4ZSgC162Av6M/9qb27Ivfx5jOY1r+gv3mQl4qHFoGp34CSyfhHXTpCRZt4MT30PcpoZFYHZcgYfBF7hLdV3RRmAltvBueh6kN5CTd3GtpDlIqQnYHPhK5mM3F5S2wa5H4e4z8X/Od9w4kNT+V/Yn7kZDYELWBOQFzMJDqX8dGZUaxOmI1W69uJb80Hy87Lxb2WcgPF35gwf4FrBm7Bkvj5tMvjc+JZ2/8XvYl7ONU6inK5DJsTWzJLs5mf8J+3hv8Ht0cugEQkRHBjaIbdcLIGvwd/QHRASWwbWCzzbEhdsTuAGh8q04FBYV/LXecx7A1Ba41qAxUDHEdwsHEg5SWl96ai45YJPQOR38o2uilR8Ce/4MtT4NVexj6at1jDFQiHN2QbM0/LcdQI+MTuQOS9ai61ge1GkLfF/9OPNU857yD2RS9CbWsZl7gPBLzEjmafLTe8Wevn2XqlqlsiNzAMLdhrBq9inXj1vFA1wd4b9B7JOYlsvjY4mabX1hKGGM3juXDEx+SUZTBrG6zWDV6FaHTQll590oKygp4cNuD/HD+B9SymkNJog1ef+f+Ws/X1rwtbc3bEp7eRP3RJlCmLmNP3B5MVabE5sSSmJd4y66toKBw+3LHGYa3CyFuIeSW5rInfs+tuaAkiWKS4Mdh8tfwn5PwcqzonPLwVjCx0n6c50jIT4MUHQ+srHgozgFzx4bncNsYhlfAYziY2Ih2gs3B5S2QdgFsO0DSad35nAoNopbVrI9cT3C7YB72exgbExvWR66v95gvz36JtbE1O6bu4J1B7xDYNhCpop1jkFMQT3Z/kq1Xt7I5erPW40vVpSw7uYwHtj1QJwysjV8v/YqtiS3bJm9jw4QNPBv0LIFtA1EZqOjdrjfrx68nxDWEJSeX8MTOJ9gRuwMfex8czXR/TwIcA3RXJstylZB8MxGWEkZmcSZzAuYAitdQoYmoy+H3mRDxV2vPRKGZUAzDVmKQ6yA8bT1ZcmIJhWWFrTMJMzvhEXTw0D3Gc7j4ras6ecfrYGgG3e9r+HqmNsKI1Ec0u6UoK4asayJM3meOyL28HnFz59R4Cx28YMhCKMkTxqeC/uRdr6ycP5Z8jMS8RCZ7TcZEZcK4zuPYHbebjCLtRVDnrp/jUOIhZnWbhYOZg9Yxj/s/TpBTEG8ffZvY7Nga+1LyU5j912xWnl9J+PVwtsVsq3eqGUUZhMaHMrbzWNys3LSOsTGxYWnIUv7b77+cvX6WSxmXanQ70Yafox/xufFkaSvQCl8DS3wgr2mFONrYcW0HZoZmzPSdiZO5E4eTDjfbuRVamSt/w5lfIfNay18rajdc2izyqxX+FSiGYSthZGDEa8GvkZyfzIrwFa09Hd1YtoX2gRCpxTC8ug8uboRBz4Ot9gdkDUxtRDFLSZ5+147aLR6IzUnGVTEHxy7Qdy4YmcGBpTd3To23cMjL4NZHbEs8efNzvVMoL4Uv+8FeEepdF7kOGxMbhncUi5IpXlMoU5exJXqL1sO/Cf8GGxMb7vPRvThRGah4b9B7GKuMeWn/S5SUlwCwP2E/U7dM5UrmFT4Y/AFd7Lqw+vJq5Hr6jW+N3kqZXMYkz0n1vixJkpjaZSqrx65mvMd4pnSpJ0+XanmGN7R4DS9vgbKiZvtclanL2H1tN0Nch2BmaEZ/5/4cTT6qtOX7N1BeBmsfho1PwScB8LE/bHgSTq3CrCBJ7G9OTv4gfscfVQrv/iX8ow1DSZIWSpJ0XJKkbEmSbkiStEOSpN6tPS996dWuF2M7j+WHCz/U8WLcVnhpka0pL4PtL4NtR+j/tH7naUy/5LJi2PAErJ8DV3Y0fs660HjyHL3AwgGCHoFza5t+Q1OrIfQ94S30mwz2HiJErccD/D97/sPPF39u2nX/TSSeFF12Yg+QWZTJ7rjdjOs8DhOVCQCedp50b9OddZHr6hhsl25cIjQhlJldZ2JhZFHvZdpZtON//f/HpYxLLD25lKUnlzJv9zzambdjzbg1jHYfzX0+9xGRGcGZ69olbmRZZkPUBgIcA/C089Tr5XW26czigYt1ehc1+Dr4IiHV1TMsL4OY/eLfSaf1umZDaMLId3e6GxC5j7kluVy40bLV2wq3gLSLUFoAw16H0R+Ac6DIp948n+DjT8HbbWFZAPw4Djb/R6TTJDVR0iknGa78VVWceO6P5nsdCq3GP9owBIYBXwODKn6SgV2SJHVs1Vk1ghd6vYCJyoR3j79br5eiVfG6q65sTdi3cP0S3P0OGJnqd57GGIbn1wtjwaKtMBCzExo/b21oDEOHiod6//+IIptDy5p2vkubxY14yMtgoKIcGVx6NGgYFpYVsi8+lF2Xf2/adRuJLMtsjt5MSn7KLbleo4iuyLNNOcfmK+soU5cx2WtyjSFTvKYQkx3D6bSahtHX4V9jZWTF/V3v1+tSwzoMY4bPDH659Avfn/+eaV2m8cuYX+hoLW4ZY9zHYGlkyerL2uVbzqefJyorioleExv5IhvG0tiSzjad6+YZJp2u+s409QFeC00YWVMlHdw+GAlJCSf/G0gIE7/974XgJ0SjgxejYO5RLnvPhwHPgGsvKCmAiO2iCPHbEXC2CfeiMz+DXA5DX4OOAyF8dV05NIV/HP9ow1CW5btkWV4py3K4LMsXgdmABNzVylPTG0czR+YFzuNw0mF2x+1u7elop7psDYh8sL3viPxEn0bI7ehrGMoyHPsSHL3hkW1QXgJ/zBYhx5slPQqsXcCkoirduj30eFDk42iR0rlReIPH/n5Mu76cWg373q/0FsZkxxCyJoRtdm0g9QKU6s4djU05jQxE5MTekgXBTxd/4rWDr/HJqU9a/FqNJnovGBghq8tYH7GGgDYBeNl51Rhyd6e7sTCyYF3kusptERkR7I7bzYO+D2JlrKN4Sgsv9HqBKV5T+HDIh7zR741KzySAuZE5EzwnsOPaDtIL0+scuyFqA6YqU0Z1GtWEF9owfo5+nE8/X+MzoY7ew1FTU1Z59KY86fRNP3irh5FNDcWizs7UDl8HX6UApbmRZeFViz3UuL71N0PCCSFDZlvNP2JgAG27ktJ+JIz4L0z9DubshgVRsOAqdOgLGx6HfR/o//lSq4X8mftgkaceMA1uREGSosrwT6dRhqEkSVMlSfpUkqQDkiTlSJIkS5JUrzKqJEmukiStlCQpSZKkYkmSYiVJWiZJkh76Jo3GHDAC6t7Rb2Pu87kPLzsv3g97X6+KyFtObdma3W9BaT6Mel9UO+uLvoZh3FEhIxP8hAj5jvsE4o/Bnreb/ho0pF8R56zOgGdEZd3hT+sMD0sJ41jKMZ7d+2zdTjXVvIVF6lJe2PcCWcVZhBnKoC6r0kvUQnSkKHDIkyDh2v6bfln1cSTpCEtPLsXYwJjQ+NDK/LrbgsIsSDwBgfdzxsSYqwXJTPGqm4tnbmTOPe73sCN2BzklOQCsOLcCCyMLHuj6QKMuaaIyYVH/RTqNu+ne0ylTl9WphC4sK2R7zHbu6nSXMERLmv+76u/oT0ZRBkn5SWQWZfLD+R8YF7uaOe3b8oE6lY1SPuTenIFxIvUEmcWZ3NWp5vq5v3N/zl4/S56+OcB3CgknYc9i/YrmCjLE2DUPwZcD4R0XWOoDP9wD34+C4txbMN8wcO2t/73ZwgEeXA8B94k8303z9VuEX90LWXHQc5b4v+8EUJk0f164wi2nsR7D14H5QCDQoOiVJEmdgRMIT95x4GPgKvAMcESSJO0lhE3nIyAJ+EfVzRsaGPJa8Guk5Kew4lzLFKKUq8s5nnycxLzEpnmoNLI1p36E0z9D8JPQpkvjzlFpGDbQFu/Yl2KsptLZfyoEPSzCvTeTbyjLQsPQsda87TqJ1e6J74U3tBoRmRGoJBWpBam8fvB11HLFw0HjLXTsAn6TeT/sfSIzI3EwdSCyrOLBmnBC51SuJodV/vvyhWbuOlGYWVlpHZ8bz4L9Cyrz3PJK8xrUBLylxOwXaQrd72OdQzvMMdBpsE3xmkJReRHbr24nOiuaHbE7uN/nfmxMbJp1Su427vRt35c1EWsoU1cl6u+6tou80jwmek6Ea0fgPTfhCWpG/Nr4AbBg3wKGrx3OkpNLcCzO51273vSw8WS5vS15cTfn1dsRWzOMrKGfcz/K5XKOpxy/qfP/69j5Buz/AA434G1Xl8PaWUI4P+W8iEb0fAju+UgsbrPiRF52S1KYCTciRZSnMRgai/apQxaK8PAvUxtewJ/8QbRC7TpO/N/MFrxHiTzDJkR3NAs+hdansYbhc0AXwBp4So/xXwJOwNOyLE+UZXmhLMvDEAaiN1BDcVaSpEUVXsj6fqZqu5AkSf8FpgATZVluJf2XphPkFMR4j/H8cOEHYrL1K4SQZbnKUGmAXy79wqM7HmXUulH0/60/s7bPYvHRxfxx5Q/9etFqZGv+fEFUKg9pwg3O1Fb8ru+GkxUPl7aKVahxtWKCUe+Bk9/N5RvmpUJJbl3DEGDg86Lqc91sOPKFMFgKMojIiMDdxp0Xe71IaEIo351YBrEHIfTdSm/h9ms7+OPKH8z2m82IjiOIyr2GbO2iO89QXU5UbhwukgkqGS4nHm3evJxN8+GbEApyEnlm7zPIsszyocsZ1mEYlkaW7Lq2q/mudbNc3QvGluS29eFvEwNGF5VjbmSudaivgy8+9j6si1zHN+HfYGpoykzfmS0yrfu87yO1IJV9Cfsqt22I2oCblRu9nHqJvEh1Gfz9SrPKL3Wx7YK1sTWx2bFM857GhoDn+TE5lbF+s3i57+tkGhjwzZWmLyTK1GXsjtvNYNfBmKlMRZX++XVQlENgm0DMDM1u/zzDCxuFNy7/FgSGMmNFr3kzOxGxqC93+MBScd8Y9wk8fUp0VRr9npDFCnoYBr0IZ34R+dMthWZ+rk2owZQkGPoKTPxS3OO+G6W7U1VeGkRsg8D7wbAqFYOA6VCQLtJDGkH49XAGrR7U4q0rFfSjUYahLMt7ZVmOlPVwOVV4C+8CYoDPa+3+L5APzJQkqXop4WdA1wZ+/tZyrcXAf4Dhsizrjt/d5jwX9BymKlPeD3tfr/E/XfyJ4WuH1w1x1qK4vJgfLvxAYJtA3uj7BmM6j0Etq9kcvZm3jrzFjD9ncKPwRv0X08jWyOUw4i0wtdb3ZVVhUnFMfYZh2ApAFjfT6hiZwb0/VOYbSuomSC7ULjypTpsuELIQUi+Kh/2P4+ADdyLi9uOdmcz9u5cxOr+ITy98x/HVk4UHwbU319x6sujwIgLbBDK/x3y62HUhvzSfZGc/3Q+RpNNcVcn42njgburA5fJcYWTWR14a5OpROJIeBZf/RC4t4I2/nyA6K5oPB3+Im7UbxipjhrgNYU/8HkrVt6jjTkNE7wH3wWyP20URaqbeSNX5MJIkiclek7mUcYltMdu4z+c+7ExbIiMFhrgNoZ1Fu8oilPiceMJSwpjkOUkIZyeEibBZ8lkIb74CIiOVEZsnbmb3tN0s7LMQz+RLQie0Q1+6tQtifJkRP+dGEJ8T36Tzn4zaRkZRBnelxsLHfrC8h8jfPbwcI5URfdr1ub3zDEuL4O9XIfUc7P+w5a93djUgwaytYNkO1j2mPRx87TCEvgN+U6GHjsXKkJdEK9Ktz4oFcEuQcELM16Vn088ReD88uE7M8cfx2rUzz/wiFkaaMLIGz5HCiG7kd+Jg4sEaHYIUWpeWLD4ZVvF7hyzXdGvJspwLHELkBPattj1dluXLDfzU+FZKkrQEeAJhFDZTf7PWwdHMkTkBcziUeEh3B4QKCkoL+Pbct6QXprPy/Mp6x26K2sT1wuvM6zGPad7TeL3v66y6ZxVH7j/Cj6N+JLs4m5f2v1QjbKaVfvMg8AGxKmwKKkMwttJtGJbkw8kfwWes6CBSm2r5hp1if2v89SulanSEwEMWwkvR8GIkzNxA1rDXSFNJeKslJMcuLOo8hY4m9izo4EHa47spnrWZFw+8jJHKiA+HfIiRgVFl0USknQtkxtSU+Kmg+MpfxBsa4uHcGx+nHlw2Nq7fi6AuFzfo70ZBWQP5gUc+A5UxKzv4sqPgGs8Fzqe/S1UbtpEdRpJdnM2JFN1h7ltGgExg4wAAIABJREFUxlXhkfEYxrrIdXSxdKNbSUm9IfgxncdgqjLFVGXKLN9ZOsfdLIYGhtzb5V6OJh8lJjuGjdEbMZAMGOcxTngIE09C4Axw7gm7/yc+u82Eg5kDZoZm4j/Re6DTgEqvzDP2QRip1Xx0ogkde3a8zo6/n8ZMrWZQ7ClwDRJhTtfecFF0hOnn3I+43Djic1vIcLlZTqyEnERhYIWtbFndPFmGs7+B+yBo5wdTVojPa+1wcEGGMBhtO8LYj3Xn9qmMxDnU5UJXsCU0IxPCoK2v7k5W+tI5RHg8cxLhpwmQX81xoCk66TigbjqRoTF0mwyX/2xUPuXJVLGIPpWqFK7cDrSkYehd8TtSx37N9kYmqlUhSdJnwOPA/UCqJEntKn5arxHyTTLdezrWxtYNil5viNpAVnEWvg6+/HrpV50yJGXqMr47/x3+jv4Etwuusc9AMqCnU09e7/s6x1OO8+npusUXNQiYBhO/EBVuTcXURkhuaEvcD/9d5B/2rSdLwX8qBD5Ih7h1YpXeGNKjwMgCrJ3rH2fZFjyGEeE1BADvu5fAfb9gPuo9Ph79PYVyOQvOfsr7Jz7icsZlFg9YTDuLdgB42gpvZKRphSNcS9/k2Ks7UEsSHo5++LQNJM1QxY2L63WHky9sENJAmTFw4jvd8867Dmd+5XC3u1muymd0Xj6zSoxqDOnv0h8zQ7PbI5xcIVMT4ejOxRsXmew9HUllXCW3oQVrY2ue7/U8C/ss1NnlpLmY7DUZQwNDfrv8G5uiNtHfub94n9MjRAcft2AY9S7kJsHhz5p/AlnxIl+s89DKTW1cg5mTlc2e+D0cSz6m/7myEyk/+hW7rO0Y7DwAsxejYNpPwjMfMF28prTLlb2cb0uvYXEuHFgiCuGm/wwGhkJqpaWIOyoMwe4VUkgd+1eFgzV6fbIshKTz0uDe7xuOpNh3hns+hGsHdUtkFWTUyXXWC1kWiyrXXo0/Vhsd+8GM1WIBt2qiKBQDiD0gttX2FmoImA5lhSIlSA9Ky0s5e/0sEhJnrp9RRNZvAwxb8NyajHBdcUPNdtubuMa8it+1w8tvAYtqD5Yk6XGEIUmbNm0IDQ29iUu3HAPMBrA9fju/7fiN9sbt6+wvl8v5OvFrOpt0ZprpNP6v/P948683ud+hrpZbWF4YiXmJjDEbw759++rsB7DFlgGWA/ju/HcYpBrQ3bx7s78mDa5t7sIj+nsKlvXhou8C8i0rJBVkmd5hS1Fbdubk1WKICdV5DpXlGHqa7ET160Oc6LWMMiP91gEBV45iZNKOkzr+DrXZkyMMl/TL6YRGVs1nmu00fkz7kVNppxhmPQw5WiY0umq/ncqOw6nXeBSJ2EN/cC2x6mtmVJJNdmYktHUkIzKDYnUxABEFSRhv/Z48q8415iCpy+kd9gZqi46UGtlguettjua5UW5YV8y5U8yvdCwv4YviPGxVtiwoNKBgz4eE5XYAqcqY9zH2YXvUdgYUDcBAasm1oXby8vIIDQ2l2/m1WJm05fMzm1GhwibVgWwLd+QLuzhjPFzn8e0R34nQpNAWn2ugaaDohILMWPOxhIaG0i55Jz7AsSQ1hVlF+LYZgMP+JRwr9qTEpPmMVc11wjKsyK+4V1lnq5mZk8OvDi68ufdNXmr/EipJ1eC5PKK+J8nYkAzKcCnrSuj+qkp442IH+iERu+0TYjtOw05lx+bwzbRNbqv1XJr371awO3s3KaUplMglGOXHorIyIN2gALO/F7DI+R48z6/jpHE/cq29Gj5ZI+kS8TlOBqYcTrelvOL1SvQl0Nobi43/4URCOY7px/CM/otIz8dIvJINV0IbPrHcHt82A3Dcs5jTmdbkWnthWpiKY/oxHNOPYZN9kRJjO44Ff4m6mpRSQ5gVJBJclMXlfCtSdLw/TXnv7Lu+jN/5xeR9MZKz3d/CO+Jz7AwtOZJui1rbuWSZYFMnCvd9SXhW3edXba4WXaW4vJhA80DOFJzh152/4maiRycthRajJQ3DhtD425ucdS/LciO0UkCW5W+AbwC8vb3lkJCQpl66RQksCiR0XShnzc4yY9CMOvu3RG8hMy6Tt0PeZrDrYCKPR/Lb5d9YOGIhnW2qDAu1rOaTzZ/gaevJvNHz6jUC+peLgpTfsn5jwoAJdLLp1BIvDQiB6PFYrH+c3mdeEh6XoEdEEcK+eJj4JSGBQxs8y8mCeIJOv8LAnI0w5Vv9Ln36BrgHo+/7vvPgThyLHBk/fHytVxCC6WlTrmRe4aMhH2GkqumV89vlR0pBClIbb9yNMnCvfr3wNXx20QgDJKYOn0phWSGfrv6US8YmPGp6DUJm15zE2dVQmCQ8JLYd4OvBDOIEhPy35riSAjj2CGVdRhMtxzLaYzRtArrC+scIcS4R1YIVFMUUsWD/Aqy7WtOrnf7ehd1xu1l2chnLhi7Dw7ae/toNEBoaSsiggXDkIqW+EzhbdJahHYYyduhYKD0EJ74nZNAAEXprZWzSbHho+0PYmdgxb9Q88V5vXgemtgSPfkCEDQM6wud96F+4G+7+ovkuvvZHsGxH7zEPVYUnS4PhzCsstAvghYwj3HC+wTTvafWfpzCTvCO7+D9XD8wMSnny7ierQtUaEr7GvfAc7kO/ZN/hfeyI3cHAwQMxNKj7iAgNDdX7O3QzJOcl8591/8HWxBYbI0tMytIwtXDAxNaBs+lniR28GM/1oQRlbIJxWxonndUQpYVwZCb4T2LQiNE19/VYA18Nom/MMqFy4D0Gr/s+wqsx1+8bCF8OJCj6E1GUl1qRGt+2GwQ9hMnJHxhscbX+6Eltzoj0Gp/hD+LTtqvWIU1770LAtwvWax5iUOzHkHEGes1m8PC7dR8iz8Js/4eE9PQW1dn1EHUuClLh1WGvMm3rNCQ3iRDfBuZYmCn+bs35nitU0pLuAo1HUJeWhHWtcbcESZLGSZL0TV7e7avVZWtqy7Qu09ges71Oro9aVvPd+e/wtPVkkMsgAOb4z8FUZcqnp2qGgvfG7yUqK4rH/B9r0DNkrDJmachSjAyMeC70uZbVU/QYCk8dEqGZrc8JiYeDy4Qoq1/9/WQ15Fp7Q8grop2dPrpZJQWQHac7v1ALVzKv4G3nrXXf/B7zWT5seR2jEMDLzouY7BhKnSs6oFQPEUfuJNrMgg7WHTBWGWNjYoOzhTMR9i4iZFx9bHmZaLfXLkDkXbbvLsI0R7+A7FpqUWd+gcIMLgWMJ680jz7t+0C3iWDtWkefcZDrIExUJuyK0z+cvDtuNy+GvkhsTiw/X6rVxi83VTwgG0PiSSjO4ZCjKxlFGUzwnCC2u/YSYajU26M6MbBNIEPdhjKr26yq9zrhRE2dOHt3Id905tdm60yCWg1XQ0WuV/WHn5EZtO3KyKx0gpyC+Oz0Zw3KfFw4+AHT2lhxXM7juaDn6hqFIDToUs/BjWj6OfcjtzS3wTznBok5cFNyPpouN1+P/Jqtlj1Zl5jCL6N/YtXoVbQ1b8v62L+EOkLsAaGx2hjKS0Uhiy4itol0ge5a+m/bdRK5hGkXwdIJJnzWeAPFzA4mfyPCxiaWcNfb8PRpmHtY5FF3GiTuifXNsTYJYaLAz1H7Peum6DpW5EcmnhAFgLrCyBoCpgMynG+4Rd7J1JN42HjQ1aErzhbOnEprIM8wOwGW+Ih7/z+d3f8Hm5+Gq/taJue0ibSkYRhR8VvXk1jj+7/SgnOogyzLW2RZftzS8vZOQ5zVbRYqScX357+vsX1/wn6isqJ41P9RUR2JSFZ/uNvD7IrbRfj1cEBI2Xwb/i2ulq6V/VAbor1le94f/D7RWdEsOrKI6wXXyS7OpqC0oOHClMZi2RYeWCcqnC//CTH7oNfsmtIHDTHoeejQT0joZF6rf2xGtPhdW9xaB6XqUqKzouli3/gUWE9bT8rUZVxzdBfSDVlxYodaDdG7iTazpLNNlcfNx96HSybGkHWtZteA8NUir3Doq1UPnmGvC92/ve9UjVOXi6IT194cU4siiN5OvYXHrd9ckc9UrULawsiC/s792Xltp15yRxqj0NfRl7s63sWfV/+sKYK88Sn4vA/sekv0uNaHq3sBiU2F8dib2jPAZYDYrpHZqCfPsMVQq+sURkmSxPJhy3nU/1GxoSgb0i7VlQMZ/CKY28OO15tHeijlLBRmiHy62jgHIiWf4eVeL5FVnMVze59jX/y+OpXmsizzy/kfeDBxC6VGZnw/6gdm+NSNQABVWnSXNtO3fV8kpJvLM8xJhl+ni5+m5MshDEMzQzO6GFjC8RXQfQa08UZloGKCxwQOJR0itesYsHOHnW/q/2AtzIIVQ+GLvrolb878JhZVnQZr3+8/FSZ+Jap3ze2b9ProNABeS4LZf4nWnPbV0kiGvAx5KUI3Vl8SwkQ18s3kgNeH3xS490fR/s7Jt/6xjp5CS7GB6uRydTmn004T5CR0F3s69eRU6qn6tXbPrRXSYlH6dQorLCu8PfMW82/AwaXiPf5pPCztKgqb4sNava1gSxqGGiGjuySpprtKkiQrYABQCNxStd1/gscQoI15GyZ5TmJj1EZS81OBCmPv3Le4WLrUEQF+qNtD2Jvas+zUMmRZ5mjyUc7fOM9s/9law0G66O/cn/k95rM9ZjvD1g5j4OqBBP8aTI9VPej+U3eCfwlm+NrhTNw4kQe3PciTu57k5f0vN01/ysAABj4Lj/wlqp37PNHI41Uw6Wvx7/WPCw+bLiqlavQzDGOyYyhVl+r0GNZHFzthTEZaVDjFNUZZ0mlKC24QJxfXCMX62PtwrTiTAlW16uTyUiGg7dwDulR7r207iI4wZ36p8qpd2iKS5Ps/zfGU43jZeVUVZvR8CExs6hRHjOw4krSCNM6l16/uVN0o/GrEVzzi9wiFZYVsubpFDCjJFx4baxdxk1sxrN6OL5VE7yHTJZDQ5MOM7TwWI4MKb5yNm/DC1FOZ3GKc/B6WdtNaSV5J4ilABrdahqGpjTDgYw/A6VXCq1GS3/QbvEYHrnNI3X3tA6HgBl0NrVjQewGRmZHM3zOfEWtH8P7x97l44yLZxdk8s/cZ3ju5hIEFhaztt5gebXvovp6tm6iwvrgZGxMb/Bz9bk7PcPdboC4V3t+9ixser4XTaafp3qY7hgeXiMVQNe3UiZ4TUctqtlz7C4a/Kbx3Z/XQdyzJh1+nQdpl0UFm9f11vXK5qRC9WxTb1WdkBc6Atj5Nem0N4j5IVP0e/Fg/r2FJgbgfuDRT4YkufMcL2R19CLhP3AuSdYuFRGRGkF+aT1CbADi1ip6O3blRdIO43Djd59UU/sQ3bDqUqksZv3E8H4R9oN+cG4EsyzfnMInaJT7XD28TUmyuvUWThZUjxEK7iQuq5qDFDENZlqOBHUAnqopENLwFWAA/ybLcfDoP+s3rH+ExBHjY72HUspqfLv4EwKm0U5y9fpZZ3WbVMfYsjCx4POBxwlLCOJx0mG/PfUtbs7ZM8JjQ6Os+5v8Ynwz9hNeDX+el3i/xbM9nmRs4l0f9HmWy12T6O/fH3cYdU0NTsoqyOJR0iMd3Pk5UZlSD507OS64b+nLrLaqdLZqQuG/XEcYsETeJg0t1j0uPBCTR01MPIjKEw7sphqG7jTsqSUWkulho3WkMw6idXDMyohy5jmEoI3PFPViI98qyMPyy4sTqvHaYatALwhDZ+V8x9vBysO9MiddITqedrll9bmIFvR6GixuF8VjBELchGBoY1ludXNsotDK2ws/RD18HX9ZErBGr+tiDIrQ0fjnM+B3yr8M3Q4XGnA5DXVWWDwkn2Na2A2XqMsZ7VMvhlCTxcGsNj+HVUCGAfmmz7jGVOnFaOkv0fBjadIXN/4GPu8E7zvC2E3zkDV8OEO+XviH36D0i38zKqe4+5wqNuqTTzPSdye5pu/l02KcEOQXxe8TvTN86neFrh3Mg8QAvFRqw3NANW6/Rdc9TG98JwmOdFcdAl4GEp4fXDSdf3IRFXgPe+YQTQual33zo87jwiKQ0LiydV5JHZFYkPSw7wqlVIppgV9X7t4N1B3o59WJD5AZk34nib7J3cb39ySkrEa3qEsJEXvKkr0WrzU1zaxrw59ZUdOPR4V29VYQsFMbrqZ8aHpt8RmjMNkXYuqUIuFdocNajpKCRqemZEgWb5xN0Q6RO6ZStSb0IqeeFFm1mrDDi6+FY8jFS8lNYE7Gm2SWYlpxYwsRNE8kvbaIJc+UvsGgrol7dJsF9v8CCSBj7MXL6FbjQgkLoDdDYXskTJUn6QZKkH4CFFZuDNdsqtldnLpAGLJckaaMkSe9KkrQH0UHlCvDazU3/342blRuj3Uez9spasoqyWHluJfam9qIllxamdZmGi6ULbx5+k+Mpx5nVbRbGKuNGX9dAMmBYh2FM95nOTN+ZPOr/KE91f4qnez7Ny31e5v8G/B8fD/2Yb+/6ltVjV/P72N8xVZnyxK4nSM7T3cf1r9i/GLthLE/tfErvji16ETBNaGft/1C3tyc9UnjbjLTkV2nhSuYVjA2Mm1SEY6wypqN1RyJzoqF9QJVkTeROop2EoVndMOzqIBLFL7f3gZwE0Wlhf4W+nOeIuhcwsxOhy6idwquYeBL6zSP8xgWKyovo065PzfHBT4KkgqNfVm6yNramb/u+7Ly2U2vY5q/Yv+oYhRqmdZlGVFaUyAGL2l0hwNxfFLjMPSrCknvehpUj4XrdTBG7zHMgl7Op7AZd7bvibV/L+HbtJUL/9XnuWgKNl7I+TcmE49DGu6q9Y3VUhvDINlEoNG65SJPo+yR0uUvkzx7+FD7rBd+OFHqdRTpyA0sKhMHioaMIy6mbkGpJFvmMRgZGhLiFsDRkKXun7eWNvm9wd6e7+dn7UWamxCINfE6/HDjfCgP90hYe9H0QRzNHXjv4GsXlFekB0XthzUN0P/u6bv1AtVqEwyzbiVSPIS+Jv9Xfr+rvPS0rITxGpDkEXj0i0ksGv1hn2CSvScTlxnEy7RSM/J/Q3PvzBe1C8OpyWD9HeGnGfSLyb7tNhBGLROeX0HfFOFkWYWSXoMa3/GxuOg0S36uDSxv2GmoWUs0lVdMcmNmJ8HP4Wp2f9ZOpJ3G1dKXdyVUAuJ/9AzsTu0qDsQ7n1oh72cgKmaL4+iWb/or5CwsjC1QGKr4882W9YxuDLMtsj93OtZxrTdMULS8VXmmvu2p6pU1tCO/Yi56dOrDo0g+kF96C7j5aaKzHMBCYVfGjSVzrVG1bjYzUCq9hEPA9EAy8AHgAy4F+siw30G6j+fmnhJI1POb/GIVlhbx15C0OJB7gga4PaE8eR3RNmBc4j7SCNGxNbJnaRWv3wGbHxdKFL0d+SWFpIY/vfJzMoswa+zUh8AX7FtDGvA3h6eFsiNzQvJMY9LzwXJ1fp31/+hW98wsBLmdcxsPWo1Fh+Op42XkRmVnRszT5jNA5SzzJVceOSEh0su5UOdbJ3AkbExshdK0yFuK32fE1cwtr03sO2HQQDzRzB+h+P8dTjmMgGRDUrpY3y9oZ/O8VnpdqxtbIjiNJzEvkUsalym0X0i/w2I7HWLBvgVajEGC0+2isjKz4PeJ38aDtNBCMTMVOc3uh5zb1O5Ef+fUg0WKwWts4u8wzRJjbcCkvrqropDqVeYa3MJycnSj0CK3ai3Cwtm4PsiwewPU9fM0rescGzRJpEiP/B+M/hYc2wvOXxAOtKBu2PA0fdYH1T4g2atXb6l07LD7LugxDI1No2xWSTtfZZWNiwzTvaSwe8DbdTq8Few9RuKQP9p3ByR8ubsba2Jr/9f8fV7Ov8vmZz8WDfdN8sHNHktUid1CbUP25NaJAYcQi4a02s4OQV0UOccR23dc+8R18PQQ+9IK323B6+zMYyDLdow/BwOdETnItRnQYgYWRBRuiNojQa/CTwlO5zF94bTXeWVkWRW4XN4oij54PVZ1kwLPQ40GxwDq7WoQ+0y60vrcQxHdf4zU8var+sQlhItfSwvGmLlmqLuVw4mEWHV7E+I3juXDjJovAes+G0nytuYayLHMy9SRBZu3EvaLLaKTrl+lh6aa9AEWthnPrRN6t53ARjanHMCwpL2FP3B6GdxjODJ8ZbL26leis6Ma/huJc4e2vdu+MyIwgrSANDxsP/rjyB4cSG1lkFX9MfH+61M3/35ewj3IJNpHL2PVjWHluZdXi7BbR2JZ4i2RZlur70XJMgizLs2VZbi/LsrEsyx1lWX5GluVb7A6onM8/JpQMwrM0vMNwdsXtwtzQnOne9XcdGdN5DCM6jODpnk/r7DnbEnSx68Knwz8lOT+ZebvnVVY1l5aX8t/D/+WTU59wj/s9bJq4iZ5te7Ls1LIGW/k1inb+4qF25te6+9RquBGld0WyLMuiIrm2J6sReNl6kZiXSH47PygtEJXEyESbmOJq5YqpoWnlWEmSRAFKdrRoKZUdL8ILnXUYBiCMg+FviH/3ngPG5hxLPoavvS/WxlpEdvvPFzfoamGpoW5DUUkqdl3bRWx2LC+EvsB9f97HlYwrvNz7Zb6/+/s6RiGAuZE54zzGsfPaDjKyYrR7Nf2mwNxjIkdO02KwIpRtn3Gaze07Y2hgyD3u99Q91rmH0F28leFkzbU0xT0XN9Udk3FVyGS49qm7Tx+snGDA0zDvGDy2W1S8RmwTf5vlgRD6vkgfiN4jFggd+us+V/tAUQGtywsXs08sSAY8LXJx9cV3vHho5SQzwGUAU7ym8OOFHznz5zxhOE/5lgvdFgqP7tqHa6YLFOeJB6hLUM3uSL0eEZWyO16r27lHlsUxWyu8mt6jIeRVTrv6423phsW8MJ05beZG5ozqNIqd13aKYqjR78P8E6IlXfga+Kw3rH4AtjwjwtmDXhRFHtWRJBjzsfDObZov5mhgpLc6QovjPljcCw4srb+w6yaErUvLS9mfsJ83Dr1ByO8hPLHrCaGIkRPPlugtTZx4BS5B4rMatrLOZ/Vq9lWyirMIuh4jCn2mfAvmDvTMuk58bjzXC2rl2MUfE8oS/vcKL7Jzj3oNw8NJh8ktzWVUp1HM9puNuZG5WOQ0ltD3hBh5tSro/QlCB/SLEV/gYePBm4ffFClS5aXw8xSIbKBK/srf4nOmZfF3Ju0MXa06sT4xmd5mziw7tYwJGyfwd2ydbsAtxq1Xt1VoNHP8Rd/gad7TsDHRpf4jMJAM+Hjox9zb5d5bMbUaBDkF8cHgD7hw4wLPhz7PjcIbPLXrKTZEbeCp7k/x3qD3MFGZ8Grwq+SW5LL81PLmnUDgDJEjlXa55vbcJGGc6ekxTC9MJ6Moo0n5hRo87UQHlGirihX88RVgZk90cQYeNnXzHLvadyUqM4pS/wovr7bcwtr4TRXdKwY+S0FpAeHp4UKmRhtO3USnjmrSPnamdvRq14vfLv/GxE0TOZB4gCe7P8m2ydt40PfBetMQ7u1yL6XqMjZaWYjVuzasnETnhAmfiwT0LwdA6PsYFqWwVSogxDVEe69jE0uRX3erDUOVCfhPgzY+QjpI2xi4+TwuSRIP8XHL4IUImLxCSKCEvgvLAkTbtw79wLiehZ1zD1G1nKUjSf/gMlHEE6BFbqU+fCcAMlwWXSsW9F5AO2MbXs8Mo7DfPHDtRZadv5Brid5D2faX+PzM5wT/Ekzf3wcxxF7F3dZqxm+eyLQt0/jxwo+iOv7ud4RhHVato1N5qfCOH1oGvR4VxvL45ZQNfoHw4usEug1q8Ds72WsyhWWFVQ9NBw8YuxSePQ+DFyDHHiT97Cro/Zgw+rVhaAzTV4n3IGa/SIloaqVxc1PpNUzSnWuYnSi8ik34XOaU5PDAtgeYt3seu67tYojrED4Z+gn7pu8j2DmYg4kHb/IFAL0fFZ2b4mpWuWvCxUHx5yD4cfG97zmLnvEiReJkWq1w8rm1Im3FZ4z4f4dgsTjSkVf6V+xf2JjY0Ne5L3amdsz0ncnOazu5eKOBvvTVSbtUlYITUyUKvz9hP90cuuFs6cziQYu5UXiD949XpPVE7RIdeepLnbjyt6hKr9W6sFRdSvj1cHq49MfdvD2fFpmw4q4VWBhZ8OK+uukULcUdZxj+00LJAN0cu/HLPb8wv8f81p5KgwzrMIw3+77JoaRDjF4/mpNpJ3ln4DvMDZxbKa/jbe/NDJ8ZrL2ytmnVzLrwv1fkn5yt5TVsqEdyLSIyIyrn2VS62FZUJqvzRY5VSR6lHkOJzb1GZ9vOdcZ723tToi4hxrmbeKi5D2r4IgYG4kFuZMaZtDOUqcvqtD2sgf+9IkxWTSNwoudEisqLmO49nW2TtzEvcB6Wxg170z3tPAmSLFhrY4favu7rqUSSRKhu7mFhzIS+wyEzMzLKC7WHkTW49hI3WXUz5qLWR8IJcA6s6vV67bCQXKlO/HHR67tNM+rEGZuLHNlZm+HZcKHNae8hvF714Rwofidr0U2M3iPkgIKfrArx60sbb+Hdq/CYWpSV8L+061wzMmK5dTVDtedDxPd5lFmJW/nq7Ff0d+zOpOxshps608tlAF62XuSX5rPi3ApRuek1QniWQ98XMh3FeSIcHb5aGGxjllR6NiMyIygsK6y/iroCf0d/PGw8WB9VKy/Usg3lIQtZPOhhhnZwZXXnXvUvtMzs4P7fwa0v9H+6cX+zlsZ9iJjXwY+1ew2bmF9YUFrA/N3zicyK5J2B77Bv+j7eGfQOwzoMw9TQlEEug7iWc434nJss2vCbIpQRwlbW2Hwi9QRtJGPcJOOq8H7vR/EpKcNMUtUsQCkvFYs1n3uEAQnib6Iu1ZpSUVRWxN64vYzoMKJS8eAh34ewNrbms9N6tq+UZdi2QBhvPmNFiom6nMyiTMKvhzPYVUgZdXPoxmP+j7E5ejN7z1WE/JPP6F7YZsSIFpRedcPIERkRFJUXEejUQ1wzeg997f1YM3YNIa4h+s27GbjjDMN/WihZQ0CbAEwa0R6pNZnSZQovWJdqAAAgAElEQVTPBz2PrYktK0auYJzHuDpj5gbOxcHMgbePvt18hSiWbcFrpPCKVdet0uQa6SlVo6lI1sjONAUXKxfMDM2IzIqqrGCNdwuiTF1W2U+5Ol3tKwpQMiOEdEgjOZZyDEMDQwLbBuoe1G2SMJw1cg/A2M5jOf7AcV4JfgVHs0bkJ5WVMP1GKgkqOJKsh+KUbQd4aDPc8xGr23rW1C7UhmtvITCcrqfMaXlZ00Viy0rEjVzjcfGbDMgiJ606CWHgGtS40GxjsO0AIS8LIzqgAY+/k58IRdV+KF6PgDUPQ1tfURHcFHzHiwKo/HTYvpDgzFTucxvBzxG/EZYShizLbIzayNTMQ8SYmvFh2g0+jrnIyzlFvDl2FYsHLmZJyBKeDXqW7OJszqRVGK93LYaSPPhrIfw4VlSBj/8UBi+oYbSdThWvSR/DUJIkJnlNIvx6eI38scKyQp4LfY7fo9bjaunKu2HvcSDhQP0nc/Dg+oyfUd+iAo5SdSnrrqxrOKVG4zXMSRQh99oeMo2328lf72uXlJfwXOhznL1+lvcHvc84j3F1IgQDXQYCcCCxgb9bQxhbiLSJi5sqJVhkWeZkShhBedlIgfcLwxzAxhUjnzEEFBVzKqVajnHUbuEh96/W5cetIjqiJZx8IPEABWUFNTR8rYytmO03mwOJB6o+k/VxYYMwBoe/Ie6dRdmQfJaDiQeRkSsNQ4AnAp7Ax96Ht64fJKttV2EIH/tK+3kjd4jfWvILNaLuPdr0EKLi5cUQtQuVgQpfhwa0I5uRO84wVLg1POL3CDum7tDZcs3K2IoXer3A+RvnWR/ZjGX53WeIsMrVvVXb0q+IL6qWBHZtRGRG0M6iXYNh+/owkAzwsPEQEj4d+oGBEVftRGsobR7DTtadMFWZcjnjcp19+nA8+TgBjgH155VaOIrE7XN/1AhzVGoINoaE4wzPzsDe0EIUoeiDgQGZAVM5apBXU7tQGxojLVGPApQb0fDd3UIk9mgTKg9TzwnBXI1B4OglHrLVq5NL8oWn9XaRAzE0qShAqfaAy0+HXyryr+7/vcqz0li6jhd5llueER69wS/y3KDFuFq68sahN/gu/TveOPQGvg6+rB+3nlGWnUXRxuAXarQ/6+/cHyMDI0LjQ8WGtj4irHhujUj3mPFbzUKQCk6nnaa9RXvaWbTTa7pjO4/FUDJkY5Qw5DOLMnlsx2OExofySp9XWDd+Hd523ry478XKRV9t1LKar85+xbC1w3hy55O3pBp0Z+xOFh1ZxKN/P0pGUQMp951DhEzVmV+EVmj1dJnq3m49KFOXsfDAQg4nHWZRv0Xc1ekureM6WnfEzcqtecLJvWYL794Z0TUpIS+BtMJ0ggoLhWe7OsFPEFSQx5WsyCpps3NrhfFYXfDdwlHI1sTVNQz/ivkLe1N7erer+X2d4TMDB1MHlp9eXr+IdnEe/P2a6DoV9IjIQQWI2c+BhAM4mDrUMNSMVEa83fsVsilnsaM99JwpDOHaUQcQYWQHT63SaafTTuNi6YKThZN4bpg7CJ1awNXKVfd8mxnFMFRoNca4jyHIKYhlp5aRVZTVPCf1Hi16aFb0DQWEx9DRS++2VVcydLfCawxedl5EZkUKPbcn9hNdKFbL7tbudcaqDFR0sevSJMMwpySHixkXCW5fTxhZg/+9IoG7AZmHBkO4UbswNjBkktck9iXsIyVfi0RI7VPKar4O/5pyymtqF2rDwVOE4OvLM5RlIQj71UC4ESlyAw99IuReGoOm+rm60ec3SUjTZFWE0ZJO3346cc49KuYlCzmT1fdDXqrI67Tt0PTztvMXFa6Xt4p/D3oRcyNz3h74Nkl5SYQXhPNsz2f59q5vaWfvAQ+sFdW+/WqmulgYWdCnfR/2xu+tegiHvCLE7B/eqtVjIssyZ9LO1O/5roWDmQND3IawOXozsdmxPLT9ISIyIlgaspT7u96PuZE5nw77FEtjS+btFqoN1ckvzeeF0Bf4/Mzn9Gvfj1Npp5iyeUrjK00bya64XVgZWxGbE8ujfz9avzEqSULI+8F1omL+mxCRc1heKrzdegpbq2U1iw4vYue1nbzU+yUmeU2qd/xAl4GEpYTdfFVsWx/oOFB8X9VqTiUfB0Reep080o4D6GnaHhk4k3pGGGkR24TXrrbx69ZX3MuqGXkFpQXsT9jPyI4j66hKmBuZMydgDmEpYRxLqeceeOAjkdepSXGwcoI2PpRd3cfBpIMMdBlYp82sd1YKczOz+aswng/MJTKQ62o4FucJL2T1pgUVyLLM6bTTVZ99A5V4nkXugLIS3KwaH0lqKnecYfhPzDH8tyJJEq8Fv0ZeSR6fnP6keU5qaCJyWi5vrZLT0BiGelBcXkxsTuxNhZE1eNl5kVGUQXp5ATj5Ep0djYuli06vnre9N5czLte/ktXCyZSTqGV1Xf1CbfjcIxK46+szWl4G348SUiq65hK1G9yCmdr1AWRZZk1E/f2q80ryeGbvM/xy6Rf6W/ZvOH/TwKBC6FqHxzDvOvw2A7Y+Kzx9Tx2BMUshP61xbcRAGJ9W7UX3Fg3dJovfmiKU5io8aU6cA6EoS1R7b5onHpCTvhbh7ptBksR3yMAIJn5Z+TAOcgris+GfsaD9Ah71fxSVJqRu3V5U+2ppZznUdShxuXHE5FToHprbCzF7HeHapPwk0grT9AojV2eS5yQyijKYumUqGUUZrLhrBSM6VlXLO1k48fnwz8kpyWH+7vmVqglxOXE8uO1B9sTvYUGvBXw98mtWj1mNvak9T+56kiUnllBaXqrrsk2msKyQg4kHucf9Hj4f/jmJeYnM/nt2HaO1Dp4jRJ95tz5CkuenCTW93fUgyzIbMjewKXoTc7vPZaZvA3msCMOwqLyIEyl6eO4botcjou1n9G5ORmzAprwcj75a8jkliYCgORjKMqeitgijsLRALGpr49ZHhJhvVDVX2Jewj6LyIp2tYO/tci/tLNqx9MRStl3dxs5rOwmND+VQ4iHCUsIoSAkXnaICH6gKVwO4D+Zs6glyS3JrhJEruRrKI/mlTOg8jp+vbmZUR1c+uryK9Nxqfe1j9gkpKq+6XtqEvATSC9Pp2bZn1UafcSKlJma/Yhi2JP/UHMN/K17/z959x9d0/w8cf32yE9kEESOoERV7xwhq1ipKjdaoUdRX+X19qQ460KI1ql+0Vbp0fJWqVu3E3puaFWLWSEhSZH5+f5zcNNe9mRJJmvfz8cjjyjmfc87n3uPevO9nvD9elegb0Jcfz/xo5P3LQJJO4qdzPxGdGJ12oVp9jQ/LEz8ZOaiir5oFhvfi7xnjRKwEPefunCNRJz7SxBOTSl6VUs4JcP7OeSp4pD1Ro6p3VaLjorn619UsXWfv9b042TpRw6dGxoUd3YxvoSdWGq0N1hz8wggyjn5nfRWQ6D/h+lF4ojWl3UoTXCaYT499ytiQsVbzhIXdDaPvmr5su7yNiQ0m8px3JmfKlq5vLHX241CjW3Ptq7DpbWOd6AWNjUkW7abB86vAw8+Y5effzJiRm5llxExMuQlTtyh7lzda5EyrD1zeb0wKyS+zVcGoHxjLQR5fDq0nG0mbc0Lz8fCvg0aLYerNpZtT2iHzXVotyrQA+Ls7OQOmCQdZDQyD/ILwLeJLUaeifNXxK6vHV/WuyqwWszgdeZoJ2yaw7fI2nvv1OW7ev8nCpxbywpMvoJTiCa8n+Pbpb+lVuRdLTyzlhd9eePQJGA/ZcWUH9xPu81S5p2jo25AFTy3gz7/+ZNDaQRm3vruVhOdXQqs3IDx5fG8mvrCsvbCW0OhQ+gf056WaL2VYHqB+yfo42DjkTHdyQBcj0fu+xRy4dYw62gGbitYzGjjX7Ee1+EQOXd5mjBn3KGO0Dj6sbPK28L/HOa8NW0tx5+JGgKW1EUD/b5CRT/XSPhy0ZkydMZyKOMWEbRMYFzqO0ZtH89LGlxi8bjD91w0i3t7FyMWZWvnmbHGwwU7Z0LhUY8u6nA/BrlwT3m02jZ+6/cRTPvX4ysWO9is78d7e94ylbc+sBUd3o5v4IabxhWat5RWCwcEVTq3G2+nxffYUusBQ5D/DAodha2NrJKrNwI4rO3hjxxssubkk7UkrfnWNiSZHvv37m2SqGclv7XqLERtHWG3lOhNhTHbIia5k0ySTs5FnSUxKJOxumNmKJw9LmYByO2vdyXuu76F28dqZX+WmRi+4d9sY/P+wB3eNwKtsY2N8zZrxRu6+1P7YbDwmf6i/1+w9RtYaya5ru3hm1TO8tv01LkdfBoyAoO+vfbkbe5dP235Kv4B+KbPTM1StizHJ4vJeIznywa+MmZlb3jf+OA4LhcajzFcOaDEBYq5nvtUw5qbR4mYtN+GT3Y2u2ojzycFjPmotBGOCiY298frU6m8kgs4p9k6P1h2drGSRkgR4B2Q6MDx84zBF7ItQyTPzyegB7GzsWPb0MlZ2XZnul6/mpZszof4EQi+FMnLTSHyL+PLd099Z/KF3snPijcZv8GHwh1yMvki/Nf1ybrgLsOHiBjwdPalXwmjpq1uiLovaLCLiQQQD1w7kSsyV9E9gY2usBjN4rdHd6ZF+sJ6kk1h0ZBG+9r6Mrz8+0+9BZztn6pesnzOBoZ0D1H6eG3+sJ9wmibqlm6Y9vMfBhdpeVTiWGEPs+c3JLdhWwpWilYyxh8lDY6Ljotl2ZRtt/dsaLdqnfzO63MO2GvlUFz8F00vTafMcNnk3Z1XJjiwv053vKvThq8qDebVYY84Sx7KaHSzHpPs3ZZuLM3UcfCzzu969YoxlT85LWMGjAtM6LGb1X050iLfh+1Pf0/3n7tw9u94oY2U86KEbh3CzdzOfnGjvZEyoPLXGSCz/mEhgKPKcp5MnLcu05NfzvxKflH63zcpzK7GzseNs7Fm+PJFGXi+ljJyG4buMgb6QEhhuvLiRNWFr8HT0ZNb+WYTdNV/a63TkaZztnHOk2b6YczG8nbw5G3mWyzGXiUuKSzcwfMLrCWyUjdlKJBm5ff82ZyPPpp2/0JqKrY1xmNa6k7d9YASN7acbM0b/ugXr3zAv88cm45t/SaOF0sXehRE1R7C2+1oGPjmQdRfW0fmnzozcOJLRm0dT1r0s3z39ncVA8AyVeBJe2gZjjsC/z8Cky/BmBLx+A4ZvgxJWZun5N01eRmx25loNr1gZX2jyZPL4qx3zjLF7+Wm5MTC6bv2DjETonWZnegzt49ayTEsO3zic8QQL4NDNQ9T0qfl3N3UWFHMulqmk/n0D+jKq1ih6Vu7JVx2+SndQf5tybVjSbglRcVHMP5zJNCcZiEuMY+vlrbQs09JsDFyt4rX4tO2nRMVFMWz9MO4npLPus0mZBkaOxgzu/abwTfxx9w/aebSzGBuXkaZ+TbkQdSFn1hquO5CDTsaQg3o1B6VbtE6154hXiuP2dsaXWWtsbIwvdcmBYcilEOKT4o1u5KQk40uuV3kjX+i4U8ZylY1GgK0DPsd/psKuRVTZOocnN71PrXVT6LPve1ok2PJx5CGLlturifc552BP8/tWPldMX7JTL0qgFGUbjOCdS+dYWns8UXFRrFZ/WR1fCMaXoprFa1ren6qdjCEyjzGvqwSGIl/oWrErEQ8i2H457W+mEQ8iCLkUQp+qfajhXIO5h+amOcvQSOyrjLEiyha8yhPxIIJ3dr9DgHcAP3T6AUc7R17d9qpZMHo64jSVPCtl6w+TNZU8K3HuzrmULlZrya1NnO2cKe9ePu3nZMW+P40Pi3TzFz7MzsHocjz5izHb1iQizJjVW7OP0U1ZqpaxYsqhr4xUMGB82P6x2Zgd+NA3eE8nT8bVG8ea7mvo/kR3dl3dRZeKXfii/Rf4uvqSI5QyAqK0/hAqZaR8ycwyYmB82NrYgW9Ny32eZYw/OqbWxzJZCL4fl/4rof+KTM9IzQvBZYLR6JTVItISFRfFuchzWZp4kl0v1XyJyY0nZyqQrOJdheeqPsf/zvwv21kDUtt9bTcx8TFmYyBNqherzpzgOYRHh7PgSM6s7au15pOjn+Dv7k9tl6x10cPfaWtypNXQqxz7KzTGxcaBKsWtvOdSqVPBCKAOFi9vfElMS9mGRmvdvQjWhq3Ft4gvNX1qGuPM/zxmpPqxtTPGwgZ0NpapHLQGXg03vmi+etkIGkftQw3dzMQu35Ckk5i5b6bZZUz/f5tfO2s5we18qPFlufhDX1Zr9AZHD2qe2kCgQ1GWu7miK1re97uxdzl355z1IRSV2horIZ18xFVosqDQBYYy+SR/auLXhKJORVn1h5WlyJKt/mM1CUkJdH+iO32K9sHT0ZOJ2yZanzHn4QcVWkBctLGigZ0DU3dPJSouiqlNp+Lr6suUxlM4cfsEC48Y+aa01pyOPE1l70efeGLyhNcTnLtzLmWcobVUNalVLVo1Sy2Ge6/txdXelYCiAVmrWOCzxhJ5qdev3TjZCJJap2ohDH7VWEN39Rjjw/DaYaNF0doyeMmKuxTnjcZvsKvvLqY2nWq2/N9jkVFC4NQu7zO6q9NaZaR6dyN1i52zsRpLfmNjY72LLR+p6l2VEi4lMuxOPnLjCBqd5fGFj8OImiPwcPBg+p7pWZ4c9rANFzfgau9KI18rY+aABr4N6FGpB1+e+DJrq3SkYevlrZyKOMWQwCFZbi0EI21NadfS6QaGmX1N4hPj2c49apesn+E69J5OnlT0qMAevwzed8ljD++GhbLr6i7a+bdDaW2sJFS0krFCVFpsbIxx1+6+4FMZ/OpS2udJhgYOZf3F9ey8sjOl6NbLWynjVAz/uAdwKVXuVq2NwLBCsOV70dHVSF1z8md63LnDHw72HLlvOYb8yM0jQBpja53cjc80CQxzj0w+yZ/sbezpVKETWy5tsdrlpLVm5dmV1ChWgye8nsDV1pV3g97l3J1zzDkwx/pJa/Y1HotVZm3YWtZfXM+oWqNSJoU8Ve4pulbsymfHPuPwjcNc/+s60XHRVPWqmmPPq5JnJe4n3Gfr5a2ULFKSIvZF0i1f1asqf977M9N51PZe30u9EvUy/JC1ULaJMQvXlOz64k4j71bQGHAv9Xc5e2foPNdY5D50ujEbGdJfxznZYw8ITZQy1teNugKHvk67XFIiXDmY/tjBal0BBX51jFYHkWVKKYLLBLPz6k4eJKTdvX/oxiFslS01imViEtVj5uHowb/q/IuDNw7yW9hvGR+QhoSkBEIuhdCiTIt0xwSPrTsWLycvpuycYqwck01aaxYdXYSfqx8dK1hZlzwTlFI0K92Mvdf2Wv0SfjXmKl1+6pL20J5Uvj/9PVdirtA3oG+mrt2xwtPs+XMf/zuTThaFUrXBxo5fzq4gQSfQ3r+9kZz+xu9/txZm0aDqgyjnXo6pe6YSmxjL/YT77L2+l+ZlW6Fs7MyWx+PPE0ZXb4Vg6yerPwSSEulw4wIuys7qczl04xB2yo7qxapbP0dAZ2NG92NS6AJDkX91eaILCTrB6gfv0VtH+ePuH2Z5t4L8guhTtQ9fn/yaXVd3WRxDQGdwKcqtElV5d8+7BBYLZOCTA82KTGwwEd8ivry67VUO3jBmRObEjGQTUxB65OaRdLuRTRqXaoytsuXtXW9nuCLMkZtHuBh1Metj98D4Zlu9B5zbYCxRtvZVcCtlpB15WPnmRiLiXfPh0JfgWwtcfbJ+zcepYisj4Ns+21jZxJqbp4yVONILDN1LGUHmw0l4RZa0LNMy5Y9rWg7fPEwV7yqZ6t7NC8888QwB3gF8cOCDlHQ3WbX/z/3cjb1Lm7Jt0i3n4ejBqw1e5WTESb763fqQCK013576lpEbRxozXq3YdW0Xx24d48XAF7OXyD6ZKW3Ngevm6xf/Ff8XL29+mQtRF5hzcI7FmO3U7sbeZeHRhTTybUQzv0ws+Qm8WP1FgkoFMW3PtLRXK3FwYWepAGbdOULdEnWp5lUFQt8z8po+mX6exrQ42DowqcEkwqPDWXJ8SUoux+ZlWxufF6kDw5TxhcHWT+ZdHqp0wEVrOvo1Y/2F9X8n7k528M+DBBQNwNnO2fo5qnSEbLT2ZpcEhiLfqOxVmWpFq7HqnGV38sqzK3G2cza+DaYytu5YKnhU4PXtr1suLeXggn55P29zm/vx93k36F2LljVXB1emN5vO1b+uMnXPVODvYC4npJ5sklE3MhhB6b/r/ZuQSyHpjjG6GHWR0ZtG4+fqZ3XJwUwJfBaSEmD5QKOL+KnJxvJV1rR5B4oUhzvh8IT1FBP5ilLQYiLcvWS5drZJZteYbTnJmCEtsq1+yfq42LkQcinE6v74pHiO3TyWL7uRTWxtbJnUcBI37t3gs2OfZescGy9uxNnOmSZ+TTIs26ZcG1qWacl/D//XIl3Og4QHvL7jdabtmcb2K9sZsHYA4VHhFudYdGQRJVxK0LViOuuSZ4IpbU3q5fESkxKZsHUC5++cTxky8s7ud9LsVv7k6CdExUbx73r/zvSsaFsbW95v/j4lXUoyLnQcN+/dtChz+MZhXrGPoUJ8PPOaf4A6sdJYizh44iMtX9nErwlty7Xls2Of8d2p73C2czZW8irf3MhWcD95lvr5EKPLOr2Z4U+9BcGv0rPmcB4kPuDX87+m7IpLjOPE7RPpj6119bGeOSGXSGAo8pWuFbtyMuKk2QSMe/H3+C3sN9r5t8PVwXwIgLOdM9ObTSciNoJ/bf4XC48s5IfTP7Dh4gb2X9/Psou/EXI5lNG1R6cZmNUuXpsXq79IdFw0ZdzKZNjdmxVF7Ivg52okTs5MiyFAv4B+dKnYhYVHFrLx4kaL/bfu3+KlDUYL1qI2i/By8spe5UoGQrEqxrffUrXN1yF9mLMndPrQGINYtVP2rve4PdHaSF0UMt360lSX94GztzGGUuQqB1sHgvyC2HJpi9WW8FO3T/Eg8UG+DgzBmDncuUJnlp5YmuXchkk6iU3hm2jq1zTtlqFUTAsA2NnY8dbut1ICrut/XWfA2gH8/MfPjKw5km86fsNf8X+lrPhisv/6fg7eOMig6oMyn8oqDdbS1sw5OIctl7cwocEEulTswti6Y9l3fV/K0oSpXYq6xLJTy+j2RLcs98h4OHowt9VcYuJjGBc6zizh+NnIs4zaNIpijh4suv4n7rfOwZb3jPHAAY8WDAOMrz8eG2XDtivbaOTbyHgdyzc3xh1f3GmMYb64MyVNTZp8KkPwRJ4s9iQB3gEsP7M85X7+fvt3YhNjzRNbW1Mq9ydlmUhgKPKVjuU7YmdjZzYJZd2FddxLuEf3St2tHlOtaDVebfAqZ++c5ePDH/PO7ncYFzqOQesG8d7e96jlUyvDLP8jao2gTvE6NCmV8Tf5rDK1QKaXqiY1pRRvNn6TwGKBTNo+ySzx9734e4zaNIrbD27zceuPKedeLvsVU+rvNBDtpmU8iaHq0zAx3BhvVxAoBZ3mGN3Fy541kp2ndnm/0S2UT9O8/NO0LNOSm/dvWp1QsfOqMcg/vweGAK/UfQV7G3tm7J+RpeMO3zjMrfu3eKps2hO3HlaiSAnG1h3Lnmt7WPXHKvZd30fvX3pzMeoi81rOY0StEQT6BPJFhy+ws7Fj0NpBHPjT6O5ddHQRRZ2K0qNSjyzVMy2mtDWXoy+z4uwKlp5YSu8qvelTtQ8APSr1oHbx2nxw4AOLceKzD87G3sael2u/bO3UGarsVZl3gt7h8M3DTN87HYBL0ZcYvmE4TrZOfBI8h2KJSbB2opG7tuWrOTIpq2SRkoyqNQrg79VOStcHOyfjC/WlvcaqLJkYc23Ss3JPzkSe4fit4wApXeQZzsYvlnOTIjNS6EZTK6U6A51LlSqVYVnx+KXOaTi27ljsbexZcXYF/u7+1PJJ+43Tq0ovelXpRXxiPJGxkUQ+iCTiQQTRcdHGuL0MuhTsbexZ2n5p5pMvZ0FV76psvbw1U13JJo62jswOns1zvz7Hvzb/i+86fYeLvQvjQsdxOuI081rNI9AnMOMTZaTJaCOBqrV0Ldak1dWcX/nWgGeXwrLesHwwPPetMRj9/h1jjGF6MxZFjmrm1wwbZUPIpZCUQfZnI88y9+BctlzeQmCxQIq7FM/gLHmvuEtxhtcczuwDsxm/xWhRik+KJz4xnrikOJztnBlWYxjVipqnLtlwcQP2NvbWl1NLR8/KPfn1/K9M3zOd2MRYyriVYW6ruWaJvCt4VOCrDl8xbMMwhm8YzpDAIey+tpv/q/t/OTYJrKlfU97f9z7zD89n3YV1NPZtzMQGE1P22ygbJjeeTM/VPZm1bxbTmk0DjIkVGy5uYGStkY90f9v5t+Pk7ZMsPr4Y3yK+rDi7grikOJa2W0ppryfAsxxcPWjkV83BXo1+Af0o5lyMNuWSx4XaORorroRtNT4Pla2RTzSTOpbvyKz9s/jx7I8E+gRy6MYhyrqVpZhzsfQP9Mm5SZEZKXQthjIrOf9LndPw/J3zHL55mO6VumcqaLO3tae4S3GqeFehcanGtPVva5mlPg25ERQCPF/teT5v9znuDu5ZOq5EkRLMDp7N9XvXGb9lPFN2TmHH1R280eiNLP9xSZOdY+aDwoKqUht4epaxGP1v4430EleNiUb5Lmn1P5inkye1i9cm9FIo12Ku8fr21+nxcw8O/nmQMXXGsLjd4ryuYqb1D+hPU7+mHLpxiGO3jnH+znmu37tOTFwMh24cos+vfXhv73vExBlp0bTWbArfRJNSTSyGw2TERtkwuclkNJpmfs1Y9vQyq6u7+Lr68kWHL6jgUYGPD3+Mp6MnvaqkMzwki0xpa349/ytl3MowK3iWxZjtip4VGVx9MKvPr2bX1V0p+QCLOxdnQLUBj1yH0bVHE1QqiHmH5nH7wW0WtF7AE17JK4WUSc7l2nJSjvYC2NnY8XSFp82748s3hxsnjCUzS9cDJ49Mn8/VwZX2/u1ZE7aGmLgYDt88nLncnbXGjrAAACAASURBVD45NykyI4WuxVDkf6lzGpZxK4Odssv+BIt8wN3Bnbol6mbr2FrFa/F6w9eZsmsKACNrjqRH5ZzpGipU6g2GyIuwY46R1zIhlpQ0NOKxaVmmJbP2z6LTSqNFZ8CTAxgSOAQPx8z/Yc0PHGwdWPCU9clhUXFRzDs4j2Unl7H+wnomNJiAn6sf1/66xshaI7N1vQoeFQjtFYqznXO6X2C9nbz5vN3nvLvnXYJKBeXoDG+lFG3KteGncz8xv9X8NL/oDqsxjHUX1vHO7ncYGjiUY7eO8U7QOzlSF9NklKl7pvJs5WfNe00aDjcmgKSxskiOKh8MvG0slxn4bJYP71G5ByvPrWThkYVEPIjI3BCKIhm0KOYgCQxFvmPKafjNyW9wdXCleenmGTez/4P1qNyDiAcRxCXF8VJNSZuSba0nG7nANrxp5HD0qZqlb/ri0bUp14alJ5YSVCqIUbVG5dyKOPmIu4M7rzd6na4Vu/LO7nf495Z/4+3kjZ2yo2WZzI9Fe1hmAytXB1fea/Zetq+TnjF1xvBSzZfSrYujrSNvNHqDIeuHMGXXFAK8A+hSMedm9Xs4ejCjuZXxnaXrPb4eAN+a4OgOsVFZGl9oUqNYDSp5VeLrk0ae1Qwnnjxmha4rWRQMppyGd2LvpDnppDAZWmMoo2qNyrXu7kLBxga6LTS6nKKuSDdyHijlWoqQXiG82/Tdf2RQmFqgTyDLnl7GhPoTeJDwgKZ+TQtcy+jDbG1sMxWgNvRtSJeKXUjSSfy73r+zteJKvmZrZ6zL7uCWrc8RpRQ9KvUgUSfi4eiBv4d/ztfxEUiLociXTDkNb927RZBf5gf2CpEueydjAsrK4dnqAhIiK+xs7OhfrT/dnuj2zwuOMvBm4zfpW7UvTxbLh0tJ5oR2U400WLbZSxzeqUInZh+YTW2f2vnu/4YEhiLf+qDFB8QmxmZ9uTch0lOkKPRfnte1EIVIViec/BM42jr+c4NCMPKfPkIOVA9HD+a3nk8JlxI5WKmcIX9xRb5V2i2dTPJCCCFEAdbIt1FeV8Gq/NV++RgopTorpT6JiYnJ66oIIYQQQuQrhS4wlDyGQgghhBDWFbrAUAghhBBCWCeBoRBCCCGEACQwFEIIIYQQySQwFEIIIYQQQAFPV6OUehF4GSgP2ANngdla6y/ytGJCCFGIxMbGEhERQXR0NImJiXldHZEGDw8PTp48mdfVEICtrS1ubm54e3vj6OiY19UxU6ADQ+Aa8CZwBkgAOgOLlVIRWuvVeVozIYQoBGJjYwkPD8fLywt/f3/s7e1l6cZ8Kjo6Gjc3t7yuRqGntSY+Pp6oqCjCw8MpW7ZsvgoOC3RgqLVe89CmOUqpF4DmgASGQgiRyyIiIvDy8qJYsWJ5XRUhCgSlFA4ODinvmYiICHx988/a4VkaY6iU6qmU+kgptU0pFaWU0kqp7zI4prRSarFS6qpSKlYpdUEpNUcp5fVoVbe4jo1Sqg1QBQjNyXMLIYSwLjo6Gnd397yuhhAFkru7O9HR0XldDTNZbTF8HagJxACXgarpFVZKVQB2AiWAVcApoAEwBmivlArSWt/OaqUfukZZ4HfAEaM7eZTW+tdHOacQQojMSUxMxN7ePq+rIUSBZG9vn+/G5WZ1VvJYoDLgDozIRPkFGEHhv7TW3bTWE7XWrYDZGC17U1MXVkpNSW6FTO+n50PXuArUwgg4JwOzlVKts/i8hBBCZJOMKRQie/LjeydLLYZa6xDTvzN6MsmthW2BMODjh3ZPBoYBzyul/k9r/Vfy9vlAul3TwJWH6pQAnEv+9ZBSqjLwBrApg/MIIYQQQohUcnPySavkx/Va66TUO7TW0UqpHRiBYyOSgzit9S3g1iNe1wZwesRzCCGEEEIUOrkZGFZJfjybxv6zGIFhZbLZuqeUejf52AuAM9AReB5jDKO18sMwWirx8fEhNDQ0O5cV+UBMTIzcvwJK7l3B9vD98/DwyHeD54V1iYmJcq/yoQcPHuSrz8TcDAw9kh/vprHftN3zEa7hDSwGSmFMiDkNPK+1ttodrbX+BPgEoEqVKjo4OPgRLi3yUmhoKHL/Cia5dwXbw/fv5MmT/5jceP7+/gBcuHAhZdvSpUsZNGgQS5YsYeDAgXlSr5wieQzzJycnJ2rXrp3X1UiRl3kMTYMUdXZPoLUemeWLKtUZ6FyqVKnsXlYIIYQQ4h8pN9dKNrUIeqSx3/2hco+F1nq11nqYq6vr47ysEEKIAuiZZ57h5MmTPPPMM3ldFSEei9xsMTyd/Fg5jf2Vkh/P5GIdhBBCiGzz8PDAwyOt9g0h/nlys8XQlNqmrVLK7DpKKTcgCLgP7M7FOlhQSnVWSn0SExPzOC8rhBAiByxfvpzg4GA8PT1xcnIiICCAN954w2JSRXBwMEopLly4wKJFiwgMDMTJyYkSJUowdOhQ7ty5k6nrLV26FKUUS5cuNdvu7++PUoqEhASmTZtGpUqVcHR0pEyZMowfP57Y2Fir5zt37hxDhgyhXLlyODo64uPjwzPPPMPBgwctyk6ZMiXl2hs2bKBZs2a4urri4+PDoEGDUp7D/v376dixI15eXri6utKlSxezcZKp3b17lzfffJPq1avj4uKCm5sbTZs25X//+59F2bi4OObPn0+HDh1S6uvl5UXr1q359Vfr60hk93UR+UeuBYZa6z+A9YA/MOqh3W8BRYAvU+UwfCykK1kIIQqmSZMm8eyzz3Ls2DF69+7NmDFjcHZ25t133yUoKIioqCiLY/7zn/8wYcIEatasyahRo/Dz8+Ozzz6ja9euOVKnvn378tFHH9GsWTNGjBiBs7Mzs2bNYtiwYRZlN2/eTO3atfniiy+oU6cOY8aMoW3btqxdu5YmTZqwbt06q9f4+eef6dy5M76+vgwfPhw/Pz+WLl1K165d2blzJ82bN8fW1pYXX3yR2rVrs3r1ap5++mmSkswyxXH16lXq16/PO++8g5eXF8OHD6dPnz6EhYXRq1cvpkyZYlY+IiKCMWPGEBMTQ5s2bRg3bhxdu3bl4MGDdOrUiU8++SRHXheRv2SpK1kp1Q3olvxryeTHhkqppaYyWuuBqQ4ZibEk3rzk1UhOAg2BlhhdyK9lq9ZCCCEKlV27djF9+nT8/PzYu3cvpgmE7733HgMHDuTLL7/k1Vdf5eOPzddT2Lt3L8ePH6d06dIAJCQk0KpVK7Zu3cqePXto2LDhI9UrLCyM33//HS8vLwCmTp1KzZo1+frrr3nvvffw9fUFjJa6Xr164eDgwJ49e6hWrVrKOU6ePEmDBg0YNGgQYWFhODo6ml3jl19+YevWrTRq1AgwWvLq1q3L1q1b6dixI998803KGEitNR06dGDdunWsXr3aLAB+6aWXOHfuHMuWLaNPnz4p26OioggODubtt9+mW7du1KpVCwAvLy8uXryY8tqZREZGEhQUxMSJE3n++edxdnbO9uvyqN5afYLfr1p+IchPqpVyZ3LnJ/O6GpmW1RbDWsCA5J92ydv8U20bkLpwcqthXWAJRkD4f0BFYB7Q+FHXSc4O6UoWQoiC5/PPPweMVsPUWSWUUsyYMQNnZ2e++OIL4uPjzY578803zQIbOzs7Bg8eDMC+ffseuV7vv/9+SvADUKRIEfr3709SUhIHDhxI2f7ll19y+/ZtJk+ebBYUAgQEBDB06FCuXbvGxo0bLa7Rr1+/lKAQwMHBgV69egFQp04ds4kxSin69esHwOHDh1O2Hzt2jNDQULp162YWFAK4u7szZcoUtNZ88803KdsdHR0tgkIwAsYXX3yRyMjINF/DzL4uIv/J6pJ4U4ApWTzmMjA4K8fkJq31amB1lSpVhuZ1XYQQQmSOaQxeq1atLPaVKFGCwMBA9u7dy+nTp6levXrKvrp161qUNwU7kZGRj1yvzJ5/x44dABw9etSiyxbg9GljvuapU6d4+umnzfZZy3FnanEzte5Z23f58mWL60dFRVm9/s2bN1Oun9qJEyeYOXMmW7du5erVqxZjBK9cMVulNkVuv+4mBaklrqDIyzyGQgghRKbcvWtkNitZsqTV/am7bFOzNqPYzs7405eYmPjI9crs+W/fNjrIFi9enO75rPVmubu7W2wzXSO9falbT03X37RpE5s2pb3YWOrr7969m1atWpGQkEDr1q3p0qUL7u7u2NjYcPjwYVatWpXmZJLcft1F7il0gaEkuBZCiILHFGhcv34dT0/LBbOuXbtmVi6/MdXrwIED1KlTJ8+u/8EHHzBu3LhMHfPuu+9y//59QkJCLFYrmj59OqtWrcrpaop8IDfT1eRLMitZCCEKHlMwFRISYrHv5s2bHD9+nCJFilClSpXHXbVMady4MQDbtm0rMNc/d+4c3t7eVpew3LJlS05VTeQzhS4wFEIIUfCYJoxMmzaN69evp2zXWvOf//yHe/fuMWDAAOzt7fOqiukaNGgQXl5evPPOO+zatctiv9aa7du3ExcXlyvXr1u3Lk2bNmXVqlV8+umnaG25Gu2ZM2cIDw9P+d3f35+IiAiOHj1qVm7x4sVpptYRBV+h60oWQghR8DRu3JhXX32V6dOnU716dZ599lk8PDzYsGEDBw8eJDAwkGnTpuV1NdPk7e3Njz/+SLdu3WjSpAmtWrXiySefxN7enkuXLrFnzx7Cw8OJjIzEwcEhV+qwePFiunbtyrBhw/joo49o1KgR3t7eXLlyhRMnTnDo0CFWrlxJ2bJlAXjllVdYt24dTZs2pVevXnh4eLB//362b99Oz549Wb58ea7UU+StQhcYyhhDIYQomKZNm0bt2rWZP38+33zzDbGxsZQvX57XXnuNCRMm4ObmltdVTFfLli05duwYH3zwAWvXrmXnzp3Y2dnh6+tLo0aNmD59utXJJDnF19eX/fv3M3/+fJYvX863335LfHw8JUuWpHLlysydO5cWLVqklG/fvj2rV6/m3Xff5fvvv8fW1pYGDRoQEhLC+fPnJTD8h1LWmpMLgypVqmhTegBR8ISGhlod9yLyP7l3BdvD9+/kyZMEBATkXYVEpkVHR+f74Lkwyux7SCl1QGtdL7frI2MMhRBCCCEEIIGhEEIIIYRIVugCQ1kSTwghhBDCukIXGEoeQyGEEEII6wpdYCiEEEIIIayTwFAIIYQQQgASGAohhBBCiGQSGAohhBBCCKAQBoYyK1kIIYQQwrpCFxjKrGQhhBBCCOsKXWAohBBCCCGsk8BQCCGEEEIAEhgKIYQQQohkEhgKIYQQyaZMmYJSiqVLl+Z1VR6r4OBglFJ5XQ2RD0hgKIQQQvzDBQcH4+7uzoULF/K6KiKfs8vrCgghhBAib3355Zfcu3cvr6sh8oFCFxgqpToDnUuVKpXXVRFCCCHyhbJly+Z1FUQ+Uei6kiWPoRBC5I0DBw7Qt29fypQpg6OjIyVKlKB58+YsWLDArFxISAgdO3akaNGiODo6UqFCBcaMGcONGzcszjlw4ECUUoSGhrJ8+XIaNGiAi4sL3t7e9O7dm8uXL6dZl/bt2+Pm5oa7uztPPfUUO3fuTLf+169f55VXXqFSpUo4OTnh5eVF27Zt2bRpk0XZpUuXopRiypQpHD58mKeffhpPT09cXFxo3rw5O3bssDjm7t27vP3221SvXh13d3dcXV0pX7483bt3JzQ01KzsTz/9RL9+/ahUqRJFihTB1dWVOnXqMGfOHBITE83KKqXYsmULAOXLl0cphVIKf3//lDJpjTHUWvPJJ5/QsGFD3NzccHFxoVatWsyaNYu4uDiL8v7+/iilSEhIYNq0aVSqVAlHR0fKlCnD+PHjiY2NTfc1Fnmv0LUYCiGEePw+//xzhg8fDsDTTz9NQEAAERERHDlyhBkzZjBixAgAFi1axIgRI3B2dubZZ5/F19eXnTt3Mm/ePFauXMn27duttm7997//5eeff6Zr1660aNGCPXv28MMPP3D48GGOHj2Ko6NjStmdO3fy1FNPERsbS/fu3alUqRJHjx6lZcuWtGrVymr9jx07Rps2bbhx4wZt27ala9eu3L59m59++ok2bdrw2WefMXjwYIvj9u/fz4wZMwgKCmLIkCGEh4fz448/0rp1aw4dOkRAQABgBGDt27dn9+7dNGjQgBdffBEHBweuXLnCtm3b2LhxI8HBwSnnnThxIjY2NjRq1Ag/Pz/u3LnDpk2bGDt2LHv37mXZsmUpZSdPnszSpUu5ePEiY8aMwdPTEyDlMT39+/dn2bJl+Pn5MWjQIOzt7Vm9ejXjx49n7dq1rF27Fjs7y1Cib9++bNu2jQ4dOuDu7s6aNWuYNWsWN27c4IsvvsjwuiIPaa0L5U/lypW1KLhCQkLyugoim+TeFWwP37/ff/89w2NOnDih7ezstKurqz5w4IDF/vDwcK211hcuXNAODg66SJEi+vjx42ZlXn/9dQ3ojh07mm0fMGCABrS7u7s+ceKE2b4+ffpoQH/33Xcp25KSknSVKlU0oJcvX25Wfv78+RrQgF6yZEnK9oSEBF25cmXt6OioQ0NDzY65evWqLl26tHZyctLXr19P2b5kyZKUc3311VdmxyxcuFAD+qWXXkrZduTIEQ3oLl26WLw+SUlJ+tatW2bbzp07Z1EuMTFR9+vXTwN6165dZvtatGihAR0WFmZxXOr9qS1btkwDukaNGvru3bsp22NjY3WrVq00oGfMmGF2TLly5TSg69WrpyMiIlK2x8TE6IoVK2obGxt99epVq3UorDLzHtJaa2C/fgzxkbQYCiGEyFULFiwgISGB1157jTp16ljsL1OmDABff/01cXFxjBkzhieffNKszOuvv87nn3/OmjVruHLlCn5+fmb7x4wZQ7Vq1cy2DRs2jG+//ZZ9+/bRu3dvwGgtPH36NE2aNKFHjx5m5UeMGMGcOXM4d+6c2fY1a9Zw5swZxo4dS4sWLcz2+fr6Mn78eMaMGcPy5csZNWqU2f5mzZrRv39/s22DBw/m5ZdfZt++fSnbbGyMkV0uLi4Wr49SiqJFi5ptq1ixokU5Gxsbxo4dyzfffMP69etp1KiRRZms+PzzzwGYPn067u7uKdsdHByYPXs2NWvW5NNPP2X8+PEWx77//vt4eXml/F6kSBH69+/PW2+9xYEDB+jUqdMj1S3FbxPh+rGcOVduKRkIHd7L61pkmgSGQgghctXu3bsBows5PQcPHgSw2p3r6OhI06ZN+eGHHzh06JBFYFi3bl2LY0qXLg1AZGSkxTUeDvDACKyaNm1qERiaxgOGh4czZcoUi+POnj0LwKlTpyz2WauXvb09JUqUMKtXQEAAdevW5bvvvuPChQt07dqVoKAg6tevj5OTk8U5bt++zcyZM1mzZg3nz5/nr7/+Mtt/5coVi2OyyvRatWzZ0mJfjRo1KF68OGfPniUmJoaHx+1n9n6I/OcfERgqpfoAy4B1Wuv2eV0fIYQQf7tz5w7wd2CQlrt37wJQsmRJq/t9fX3NyqXm4eFhsc009i31ZAzTsSVKlLB6DWvbb9++DcCPP/7Ijz/+mGb9Y2JiMlUvU91S18vW1paNGzcydepU/ve///Hqq68CRgti7969mTFjBsWKFQOM17N+/fqEhYXRoEEDXnjhBby9vbGzs+POnTvMnTs3RyZ53L17Fw8PD5ydna3u9/X15caNG9y9e9ciMMzs/XhkBaglrqAo8IGhUqoCMBPYltd1EUIIYck0yeHKlStm3YsPMwUT169ft7r/2rVrZuWyw3Tsn3/+aXW/te2mY3788Ue6d++e7WtnxNPTk5kzZzJz5kzCwsLYunUrixcvZsmSJVy8eDFl9vNnn31GWFgYkydPtmjB3LVrF3Pnzs2R+nh4eBAREcH9+/etBoc5cT9E/lOg09UopeyBb4HXgPN5XB0hhBBWNG7cGIBff/013XKm8YchISEW+2JjY1O6dK2NU8ws07Gm9C2pJSUlsX37dovtpvpv2/b42h/Kly/PgAED2Lx5M2XKlGHz5s0prZ2mru6Hx0iC9ecFRoskZK21zvRaPZwqB+D48ePcuHGDypUrW7QWioIt04GhUqqnUuojpdQ2pVSUUkorpb7L4JjSSqnFSqmrSqlYpdQFpdQcpVTaXxmzZipwQWstc9+FECKfGjFiBPb29kydOpUjR45Y7DflGuzfvz8ODg7897//tRivN336dK5cuULHjh15lAUKmjRpQpUqVdi5c6dFt/CCBQssxhcCdOnShSeeeIKFCxfy888/Wz3voUOHUrqcsyMsLIzjx49bbI+Ojuavv/7Czs4upSvWlH/w4QD60KFDTJ8+3er5Td3Q4eHhma7Tiy++CMCkSZPMusnj4+MZN24cAEOGDMn0+UTBkJWu5NeBmkAMcBmoml7h5C7enUAJYBVwCmgAjAHaK6WCtNbZfhcppdoCvYFa2T2HEEKI3BcQEMDChQsZNmwY9erVo1OnTgQEBHDnzh2OHj3KlStXCAsLo1y5csybN48RI0ZQr149evXqRcmSJdm5cydbtmyhdOnSFsmws0opxeLFi2nTpg29evUyy2O4YcMG2rdvz9q1a82Osbe3Z+XKlSn5Cxs2bEidOnVwdXXl0qVLHDp0iNOnT3Po0CGL2cOZdeTIEZ555hlq165NYGAgpUqVIjIykl9++YWIiAjGjRtHkSJFAHjhhReYOXMmY8eOJTQ0lEqVKnH27Fl++eUXunfvzvfff29x/rZt2/LDDz8wdOhQevbsiaurK56enrz88stp1um5555j9erVLFu2jGrVqvHMM8+k5DE8c+YMrVu35pVXXsnW8xX5WGbz2gAtgUqAAoIx8jN9l075dcllRj+0/cPk7Qsf2j4leXt6Pz2TyxYDrgLBqY5fCqzN7PORPIYFm+TCK7jk3hVs2cljaLJnzx797LPP6hIlSmh7e3tdokQJ3aJFC71o0SKzcps2bdLt27fXXl5e2t7eXvv7++vRo0eb5Qk0MeUxtPb/KiwsTAN6wIABFvv279+v27Vrp11dXbWrq6tu3bq13rlzp548ebJFHkOTmzdv6tdee00HBgZqFxcX7ezsrCtUqKA7d+6sP/30U33v3r2UsqY8hpMnT7b6WpQrV06XK1cu5fdLly7pSZMm6SZNmuiSJUtqBwcH7evrq1u1aqV/+OEHi+NPnDihO3furH18fLSLi4uuU6eO/vTTT9N8zomJiXrixIm6YsWK2t7eXgNm17eWx9B03MKFC3X9+vW1i4uLdnJy0jVq1NAzZszQsbGxVp+XtfOkfk2svbaFWX7LY6iMa2WNUioYCAG+11o/Z2V/BeAPIAx4QmudlGqfG3AtOcAsrrX+K3l7seSALz1XtNbRqa6ferCEqVs8CaiptT6R3omqVKmiT58+ncHlRH4VGhpqtgqAKDjk3hVsD9+/kydPpqzeIfK36Oho3Nzc8roa4iGZfQ8ppQ5orevldn1ya1ayKQnV+tRBIUByYLcDaAs0AjYlb78F3Mrk+fcBgQ9texcjsHwJsBwkIoQQQggh0pVbgWGV5Mezaew/ixEYViY5MMyK5FZGs1G6Sqk7gJPW2nL0rhBCCCGEyFBuBYampEaWWUjNt2e8gncOUkoNA4YB+Pj4WJ2CLwqGmJgYuX8FlNy7gu3h++fh4UF0dHTeVUhkWmJiotyrfOjBgwf56jMxrxJcq+THrA9wTIPWemAmynwCfALGGEMZ51RwyTi1gkvuXcFmbYyhjFsrGGSMYf7k5ORE7dq187oaKXIrwbWpRTCtdOjuD5V7bJRSnZVSn1hbukgIIYQQojDLrcDQNN23chr7KyU/nsml66dJa71aaz1MMrULIYQQQpjLrcDQlI69rVLK7BrJ6WqCgPvA7ly6fpqkxVAIIYQQwrpcCQy11n8A6wF/YNRDu98CigBfmnIYPk7SYiiEEEIIYV2mJ58opboB3ZJ/LZn82FAptdRU5qEJICMxlsSbp5RqDZwEGmKsoHIGeC3btRZCCCGEEDkuK7OSawEDHtrmn/xjMtD0D631H0qpusDbQAegI8aKJ/OAt7TWEVmv7qNTSnUGOj/KIuxCCCGEEP9Eme5K1lpP0Vqr9H6sHHNZaz1Ya+2rtXbQWpfTWo/Jq6AwuU7SlSyEEEIIYUVuTT4RQgghhBAFTKELDGVWshBCCCGEdYUuMJSuZCGE+Gfy9/dHKYtRTUKILCh0gaEQQgghhLBOAkMhhBBCCAEUwsBQxhgKIYQQQlhX6AJDGWMohBAFl9aa+fPn8+STT+Lk5ISfnx+jRo3i7t27aR4TEhJCly5d8PHxwcHBgXLlyjFy5EiuX79utfydO3eYPHkyNWvWxNXVFTc3N6pVq8aYMWP4888/U8qdOXOGiRMnUq9ePXx8fHB0dKRcuXIMHTqU8PBws3P++uuvKKUYNGiQ1WsmJSVRrlw5XFxciIyMBGDp0qUopZgyZQr79++nffv2eHp64unpSY8ePbh06RIA586do1evXvj4+ODs7ExwcDBHjhyxep0HDx4wa9Ys6tati6urK0WKFKFevXosXLgQrbVF+SVLltC9e3cqVKiAs7Mz7u7uBAUF8eWXX1o9f3BwMEopLly4wKJFiwgMDMTJyYkSJUowdOhQ7ty5Y/U4kX8UusBQCCFEwfXKK68wevRoIiIiGDp0KH369GH9+vU89dRTxMXFWZR///33adWqFTt27KBDhw688sorBAQEsGDBAurVq8fly5fNyl+8eJE6derw9ttvo7Vm6NChDB8+nMqVK/PZZ59x8uTJlLIrVqxg4cKFlClThj59+jB69GgCAgJYvHgx9evXNzt3hw4dqFChAt9//73V4GjNmjWEh4fTu3dvvLy8zPbt27eP5s2bY29vz5AhQwgMDGTFihW0bt2a33//nQYNGnDr1i0GDBhAy5Yt2bJlC23atOHhnrHo6GhatGjB+PHj0VozcOBABg0axN27dxkxYoTVoHXkyJFcvHiRM9gCoQAAFB5JREFU5s2b88orr9C7d2/CwsIYMGAAkyZNSvM+/ec//2HChAnUrFmTUaNG4efnx2effUbXrl3TPEbkE1rrQvlTuXJlLQqukJCQvK6CyCa5dwXbw/fv999/f2zX3rFjhwa0v7+/vnnzZsr2Bw8e6KCgIA1o48+aYcuWLVoppRs1aqQjIyPNzvXll19qQD/zzDNm25s0aaIB/dprr1lcPyoqyuw8ly9f1g8ePLAot2bNGm1jY6OHDx9utn3GjBka0HPmzLE4plOnThrQe/bsSdm2ZMmSlOe0fPnylO1JSUm6Xbt2GtAeHh569uzZZucaOnSo1eu88MILGtDvvfee2fYHDx7ojh07akCvWrXKbN+5c+cs6vrgwQMdHBys7ezs9KVLl8z2tWjRQgO6XLlyZvvi4+N1s2bNNKB3795tcc7CLLPvIWC/fgzxUVaWxBNCCCHyzJIlSwCYNGkSxYoVS9nu6OjItGnTaNGihVn5uXPnorVm0aJFeHp6mu17/vnnmT17Nj///DNRUVG4u7tz4MABdu7cSbVq1Xjrrbcsru/m5mb2u5+fn9V6dujQgWrVqrF+/Xqz7YMHD+bNN99k0aJFjBkzJmX7pUuX+O2336hduzYNGjSwOF9wcDA9evRI+V0pRb9+/Vi3bh3FihUzO5fpuX366accPnw4ZVtERATLli2jdu3aTJgwway8o6Mj06dPZ82aNXz11Vd06dIlZV/FihUt6uPo6MjLL79MaGgomzdv5oUXXrAo8+abb1K6dOmU3+3s7Bg8eDDbtm1j3759NGzY0OKY7Hh/7/ucijiVI+fKLVW9qzKhwYSMC+YThS4wlLWShRCiYDp48CCARQAIEBQUhJ2dHQkJCSnbduzYgZ2dHStWrGDFihUWx8TGxpKYmMjZs2epW7cuu3fvBqB9+/bY2tpmWB+tNd988w1Lly7lyJEjREZGkpiYmLLfwcHBrHzRokV57rnnWLp0KVu2bEl5Hp9++imJiYmMGDHC6nVq165tsc3X1xeAGjVqWORuNO1L3ZW9d+9eEhISsLGxYcqUKRbni4+PB+DUKfMgKzw8nPfff5+NGzdy6dIl7t+/b7b/ypUrVutct25di22mQNE0hlLkT4UuMNRarwZWV6lSZWhe10UIIUTmmSaYlChRwmKfra0tRYsWNZsccvv2bRISEqy2/qVmGotnGvuXuqUrPePGjWPOnDn4+vrSrl07/Pz8cHZ2BoyJIxcvXrQ4ZtSoUSxdupSFCxfSokULEhISWLx4Me7u7vTt29fqddzd3S222dnZZbjPFOyB8VoAHDhwgAMHDqT5nFKPSzx//jwNGjQgMjKSZs2a0a5dOzw8PLC1teXChQt88cUXxMbGWj2Ph4dHmvVKHTw/qoLUEldQFLrAUAghRMFkCjb+/PNPi8AjMTExJfhJXT4uLo6oqKhMnd/U3ZxWK1hqN27cYN68eVSvXp2dO3dadDN/++23Vo+rV68eDRo0YMWKFdy8eZPt27dz9epVRo4cSZEiRTJVz+wwvV6jR49m3rx5mTrmww8/5Pbt2yxZsoSBAwea7fv222/54osvcrqaIh+QWclCCCEKhDp16gCwZcsWi307duww60YGaNy4MdHR0WmmbnlY48aNAVi3bl2GrVrnz58nKSmJtm3bWgSFly9f5vz582keO2rUKOLi4vj8889ZuHAhAC+99FKm6phdDRs2xMbGhm3btmX6mHPnzgGYjW80sXYPxD+DBIZCCCEKBFOr1bRp08xaB2NjY62mThk3bhwAw4YNs0hLA0ZOv+3bt6f8XqdOHZo2bcrx48d5++23LcrHxMSkdGf7+/sDsH37drMgMiYmhqFDh1oEqan17t2bokWLMmfOHDZs2EBQUBCBgYHpPPNH5+Pjw3PPPcfhw4eZMmWK1fpdvnzZbIyh6TmGhISYlVu3bh2fffZZrtZX5B3pShZCCFEgBAUFMXr0aD766COqV69Oz549cXR0ZNWqVXh6euLr68u1a9dSyrds2ZKZM2cyYcIEKlWqRMeOHalQoQL3798nPDycrVu34u/vbzZ79+uvvyY4OJi3336bVatW0apVK2xtbQkLC2PdunWsXr2a4OBgSpYsyXPPPcd3331HrVq1aNu2LXfv3mXDhg04OTlRq1Yts/Om5ujoyIsvvsiMGTMA0px0ktNmzpzJhQsXeOutt/jqq69o3rw5JUuW5Pr165w+fZrdu3fz4YcfUrVqVcDIYbhkyRJ69epFjx498PPz4/jx46xdu5ZevXrx/fffP5Z6i8er0LUYypJ4QghRcM2dO5ePPvoIT09PPvnkE5YtW0bbtm3ZuHGjxSxggH//+9/s2LGDbt26sWfPHubOncu3335LWFgYffv2tRhvV65cOQ4cOMCkSZOIi4tjwYIFLFq0iJMnTzJkyBCqVauWUnbx4sVMmjSJ+/fv8/HHH7Nu3To6derEzp07rU6+SG3w4MEAFCtWjJ49e+bAK5MxNzc3QkNDWbBgAaVKlWLlypV8+OGHbNy4ETs7O6ZNm8azzz6bUr5GjRqEhITQpEkT1qxZw4IFC4iKimLFihW53vUt8o7SVpbAKQyqVKmiT58+ndfVENkUGhpKcHBwXldDZIPcu4Lt4ft38uRJAgIC8q5CBdSyZcvo168f48ePT2k5zG3R0dEW4yFF3svse0gpdUBrXS+361PoWgyFEEKIvJSYmMjMmTOxtbV9bN3IQmSWjDEUQgghHoOtW7cSEhLC1q1bOXz4MMOHD6d8+fJ5XS0hzEhgKIQQQjwGmzdv5q233sLb25shQ4bw4Ycf5nWVhLAggaEQQgjxGEyZMsXqcnRC5CcyxlAIIYQQQgASGAohhBBCiGSFLjCUPIZCCCGEENYVusBQa71aaz3M1dU1r6sihBD/CIU1H64Qjyo/vncKXWAohBAi59ja2hIfH5/X1RCiQIqPj8fW1javq2FGAkMhhBDZ5ubmRlRUVF5XQ4gCKSoqKt+tRiOBoRBCiGzz9vYmMjKSW7duERcXly+7xoTIT7TWxMXFcevWLSIjI/H29s7rKpmRPIZCCCGyzdHRkbJlyxIREcGFCxdITEzM6yqJNDx48AAnJ6e8robAGILh5uZG2bJlcXR0zOvqmCnQgaFSagow+aHNiVrrAv28hBCiIHF0dMTX1xdfX9+8ropIR2hoKLVr187raoh87p8QQJ0DmqX6XfoxhBBCCCGy4Z8QGCZqra/ndSWEEEIIIQq6LE0+UUr1VEp9pJTappSKUkpppdR3GRxTWim1WCl1VSkVq5S6oJSao5TyerSqpyinlLqilLqolFqhlKqWQ+cVQgghhChUstpi+DpQE4gBLgNV0yuslKoA7ARKAKuAU0ADYAzQXikVpLW+ndVKp7IHGAicBIoB44HdSqlArfXFRzivEEIIIUShk9V0NWOByoA7MCIT5RdgBIX/0lp301pP1Fq3AmYDVYCpqQsrpaYkt0Km99PTVF5r/ZvW+nut9VGt9WagK3ATGJnF5yWEEEIIUehlqcVQax1i+rdSKt2yya2FbYEw4OOHdk8GhgHPK6X+T2v9V/L2+UC6XdPAlXTqF6eUOogRvAohhBBCiCzIzcknrZIf12utk1Lv0FpHK6V2YASOjYBNydtvAbeye0GllC0QCGzO7jmEEEIIIQqr3AwMqyQ/nk1j/1mMwLAyyYFhVimlPgBWAxcwxhj+BygHLEyj/DCMlkp8fHwIDQ3NzmVFPhATEyP3r4CSe1ewyf0ruOTeiczIzcDQI/nxbhr7Tds9H+EapYBvAB+Mlsb9QBOt9VFrhbXWnwCfACilolu2bHn6Ea4t8lYxHqF1WeQpuXcFm9y/gkvuXcFWJeMijy4v8xiaBilmOyG11rrPI1z/tNa63iMcL/KQUmq/3L+CSe5dwSb3r+CSe1ewKaX2P47rZHVWclaYWgQ90tjv/lA5IYQQQgiRh3IzMDR106Y1Q7hS8uOZXKyDEEIIIYTIpNwMDE2pbdoqpcyuo5RyA4KA+8DuXKxDej7Jo+uKnCH3r+CSe1ewyf0ruOTeFWyP5f4prbM3xE8pFYwR/H2vtX4ujTLrMGYe/0tr/VGq7R9iJMtepLV+KVsVEEIIIYQQOSpLgaFSqhvQLfnXkkA7jFQxW0xltNYDU5WviLEkXnGMJfFOAg2BlhhdyE0ecUk8IYQQQgiRQ7IaGE7BWLUkTVprsyVRlFKlgbeBDkBR4BrwE/CW1joii/UVQgghhBC5JEtjDLXWU7TWKr0fK8dc1loP1lr7aq0dtNbltNZj8iIoVEqVVkotVkpdVUrFKqUuKKXmKKW8HnddhDmlVFGl1ItKqR+VUmeUUveUUlFKqT1KqTFKKfs0jgtQSv2glLqhlHqglDqtlHpLKeX8uJ+D+JtSqoVSKjF5ffP30ikn9y8fUUq1VkqtVEpdT/6MvKqUWqeU6milbGOl1K9KqYjk9+tRpdQryStQicdMKdUp+V6FJ7+XLiilViul2qRRXu7fY6SU6qmU+kgptS35b5tWSqW7BHB2Ph9z4r5me4xhQZO8dvNOoARGt/YpoAFGt/ZpIEi6tfOOUuolYAFwA2PsahhGC3NXjKEI24GntNaxqY5pgLH8oQOwHLiEsRRjPWAH0Dp1efF4KKU8gKOAN+AKvK+1nmilnNy/fEQpNQMYj/Ee/BWjd8cHqANs1lr/J1XZrsCPwAPgeyAC6IyRgHe51vrZx1v7wi3VvYvE6JH7E2MVsG6AM/Ca1npaqvJy/x4zpdRhoCbw/+2dW6gVVRjHf592ygq1OxZ2U4ou+tA9DeRoZUL3h6KCyIoIH7QLhPVWT2lQGIEIZUmFSRF2J4sye6joIbtppWYaUQ+aWlnipb4evjWccbNnn31O++yZmP8PPtY567KZmf+eNd9es9a3dgI/AafReo3GgPvHjunq7rUwYAURTHt2Q/5jKX9R2cdYZ0tf+KuB4Q35o4DVSaN7c/nDgbUp/6pc/rB0Ezlwf9nnVUcDngN+BR5IOsxrUkf6VciAO9I1fws4tEl5T+7vUYTzuBs4N5c/gvjx7cANZZ9TXYwY7Pg7aTKmoWxi0ukvYIT0K1WnqUSYPgN603VeVlB3wP1jJ3Ut/WJ1SZBx6aJsBIY1lI0kPPg/m3WIsvINuCnp93oub1rK+6CF3ptIo+Kyrml1Xbr2NwIzWziG0q8iBhyUHii/AYe3Uf+2pM+SFrquKvu86mLEgk4H3iwozxyMI6VfNawNx3DA/WMndR3KOIZVYlpK33H3f/IF7v4HMSx7CHBhtw9MtMWelO7L5WWavt1Y2d03EqveTyRuItEFzOw4YBHxyuKFfqpLv+pwKfHKeDmw08yuNLO5aW5vsz6xUDvgQ2J0arKZHTQ0hysaWE/0keeZ2Zh8gZlNAMYDq71vqpT0qz6D6R87pmtdHMNs4+n1BeVZftEuLaJcbk9p/gsvTSuEmRnwDLAXmNVGE+lXHc5L6TbgC+A1YB6wAPjYzFaa2dG5+oXaufs+Yn7wAcip7woeCznnAkcBa9ICy4fNbCnwKTEV5/pcE+lXfQbTP3ZM17o4htl+zUX7Mmf5h3XhWMQAMLO7gRnA58DTuSJpWi1mE8Hs73T3rW3Ul37V4ZiUzklpLzHFZiIxN7uXmNeUIe0qhrsvIOZoO/FK8X5iOsc24El335CrLv2qz2A06piudXEM+yMLs1OPJdr/E8zsZuBR4GfgWnffO5DmKZWmQ4yZnQ7MB55191c79bEplX5DTxbGwon7bJW773T3r4FriRWUU8xsUpufJ+26jJndR0wFWEoscDgEmEBEc3jKzBYO5ONSKv2qy2A0artNXRzDzFMeXVA+qqGeKBkzuxVYQoTM6HX3TQ1VpGl1eB7YSt+IUztIv+qwPaXfuvt3+QJ330WMGkKE9wJpVynMbCrwCPCGu89x9w3uvsvd1xAL9z4DZpnZmamJ9Ks+g9GoY7rWxTHMOrui+UqnpHRdF45F9IOZzQIWE3Gbprh7s3kW0rQ6nA2MBXakoK1uZk7MOQSYm/JeybWRftUh02JHQXnmOGZBdQu1M7MDgJOJhWIbO3WAoiVXpPS9xoK02DLbsvaslEq/6jOY/rFjutbFMVyZ0ulmtt85m9lI4CJgF/BJtw9M7I+Z3QssBL4nnMKiL/H7KZ3R5DPGETfHZtS5dYPFBfZhKv8y/f9uro30qw7vEa+XTk0PkEYmpPSHlBZqB0whXmN+5ApO3i0OTOnRBeXZHNJMD+lXfQbTP3ZO17Lj+XQxbpACXFfc6AuIvBY4tp+6rQKAvoQCJJdutI5jKP0qZMRuUA480JA/HfiHGDUcnfJGAVtQgORKGLHi2IndTk5oKDuHGPTYTQp+Lf3KN/5bgOum/WMnda3TlnjjiYtzDNEJfkMEBp1KDMdOdm2JVxpmdgsxp9CJrfG2NKm2w2P1XdbmAuJXUg+xavJH4GK0pVolMLOZxOvkoi3xpF9FMLOxRP94PPGG5TPgJGJLteyB8nKufn7rrWXE6terSFtvAdd7XR4uJZPegq0ALiE2alhOLNgbR6xU7iGciPm5NtKvy5jZNcT9BDAGuIwIUp296sfdZ+bqD7h/7JiuZXvOXfbSxxIhT34hAoJuBh4Hjij72OpuwIPEA6iVbWrS7gzgRfp+Ka0DHgIOLvuc6m60GDGUftUz4lXkE6lf3EMsKFoOnF9QfxKxhd52YlTqK+AeGra1lHVFux7gLmI61O/EXLItSZ/LpV/51s4zrkmbAfePndC1NiOGQgghhBCiNXVZfCKEEEIIIfpBjqEQQgghhADkGAohhBBCiIQcQyGEEEIIAcgxFEIIIYQQCTmGQgghhBACkGMohBBCCCEScgyFEEIIIQQgx1AIIYQQQiTkGAohhBBCCAD+BQ1O0foeIxSIAAAAAElFTkSuQmCC\n", 327 | "text/plain": [ 328 | "
" 329 | ] 330 | }, 331 | "metadata": {}, 332 | "output_type": "display_data" 333 | } 334 | ], 335 | "source": [ 336 | "import matplotlib.pylab as pylab\n", 337 | "params = {'font.size' : 14,\n", 338 | " 'legend.fontsize': 'x-large',\n", 339 | " 'figure.figsize': (10, 5),\n", 340 | " 'axes.labelsize': 'x-large',\n", 341 | " 'axes.titlesize':'x-large',\n", 342 | " 'xtick.labelsize':'x-large',\n", 343 | " 'ytick.labelsize':'x-large'}\n", 344 | "pylab.rcParams.update(params)\n", 345 | "\n", 346 | "plt.plot(np.arange(0,251),M)\n", 347 | "plt.legend([m.split('_')[0] for m in methodsname],markerfirst=False,loc=\"lower right\")\n", 348 | "plt.xlim(0,100)\n", 349 | "plt.yscale(\"log\")\n", 350 | "plt.grid()\n", 351 | "plt.title(\"Relative Mean Distance\")\n", 352 | "plt.savefig(\"./exp_cifar10_rmd.pdf\",dpi=250,bbox_inches=\"tight\",pad_inches=0.1,transparent=False,facecolor='w')\n", 353 | "plt.show()" 354 | ] 355 | } 356 | ], 357 | "metadata": { 358 | "kernelspec": { 359 | "display_name": "Python 3", 360 | "language": "python", 361 | "name": "python3" 362 | }, 363 | "language_info": { 364 | "codemirror_mode": { 365 | "name": "ipython", 366 | "version": 3 367 | }, 368 | "file_extension": ".py", 369 | "mimetype": "text/x-python", 370 | "name": "python", 371 | "nbconvert_exporter": "python", 372 | "pygments_lexer": "ipython3", 373 | "version": "3.6.4" 374 | } 375 | }, 376 | "nbformat": 4, 377 | "nbformat_minor": 2 378 | } 379 | -------------------------------------------------------------------------------- /figs/exp_cifar10_rmd.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmensink/deepncm/fe7cdd43eb7276f4374c9c51715bf6cf417f994b/figs/exp_cifar10_rmd.pdf -------------------------------------------------------------------------------- /figs/exp_cifar_best.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmensink/deepncm/fe7cdd43eb7276f4374c9c51715bf6cf417f994b/figs/exp_cifar_best.pdf -------------------------------------------------------------------------------- /figs/exp_cifar_overview.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmensink/deepncm/fe7cdd43eb7276f4374c9c51715bf6cf417f994b/figs/exp_cifar_overview.pdf -------------------------------------------------------------------------------- /figs/exp_cifar_overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmensink/deepncm/fe7cdd43eb7276f4374c9c51715bf6cf417f994b/figs/exp_cifar_overview.png -------------------------------------------------------------------------------- /imagenet_deepncm.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Runs a ResNet model on the ImageNet dataset.""" 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | 21 | import os 22 | import sys 23 | 24 | import tensorflow as tf # pylint: disable=g-bad-import-order 25 | 26 | import os 27 | import sys 28 | sys.path.append("./tf/models/") 29 | 30 | from official.resnet import imagenet_preprocessing 31 | 32 | import resnet_ncm as resnet 33 | import resnet_deepncm_run_loop as rrl 34 | 35 | _DEFAULT_IMAGE_SIZE = 224 36 | _NUM_CHANNELS = 3 37 | _NUM_CLASSES = 1001 38 | 39 | _NUM_IMAGES = { 40 | 'train': 1281167, 41 | 'validation': 50000, 42 | } 43 | 44 | _NUM_TRAIN_FILES = 1024 45 | _SHUFFLE_BUFFER = 1500 46 | 47 | 48 | ############################################################################### 49 | # Data processing 50 | ############################################################################### 51 | def get_filenames(is_training, data_dir): 52 | """Return filenames for dataset.""" 53 | if is_training: 54 | return [ 55 | os.path.join(data_dir, 'train-%05d-of-01024' % i) 56 | for i in range(_NUM_TRAIN_FILES)] 57 | else: 58 | return [ 59 | os.path.join(data_dir, 'validation-%05d-of-00128' % i) 60 | for i in range(128)] 61 | 62 | 63 | def _parse_example_proto(example_serialized): 64 | """Parses an Example proto containing a training example of an image. 65 | 66 | The output of the build_image_data.py image preprocessing script is a dataset 67 | containing serialized Example protocol buffers. Each Example proto contains 68 | the following fields (values are included as examples): 69 | 70 | image/height: 462 71 | image/width: 581 72 | image/colorspace: 'RGB' 73 | image/channels: 3 74 | image/class/label: 615 75 | image/class/synset: 'n03623198' 76 | image/class/text: 'knee pad' 77 | image/object/bbox/xmin: 0.1 78 | image/object/bbox/xmax: 0.9 79 | image/object/bbox/ymin: 0.2 80 | image/object/bbox/ymax: 0.6 81 | image/object/bbox/label: 615 82 | image/format: 'JPEG' 83 | image/filename: 'ILSVRC2012_val_00041207.JPEG' 84 | image/encoded: 85 | 86 | Args: 87 | example_serialized: scalar Tensor tf.string containing a serialized 88 | Example protocol buffer. 89 | 90 | Returns: 91 | image_buffer: Tensor tf.string containing the contents of a JPEG file. 92 | label: Tensor tf.int32 containing the label. 93 | bbox: 3-D float Tensor of bounding boxes arranged [1, num_boxes, coords] 94 | where each coordinate is [0, 1) and the coordinates are arranged as 95 | [ymin, xmin, ymax, xmax]. 96 | """ 97 | # Dense features in Example proto. 98 | feature_map = { 99 | 'image/encoded': tf.FixedLenFeature([], dtype=tf.string, 100 | default_value=''), 101 | 'image/class/label': tf.FixedLenFeature([1], dtype=tf.int64, 102 | default_value=-1), 103 | 'image/class/text': tf.FixedLenFeature([], dtype=tf.string, 104 | default_value=''), 105 | } 106 | sparse_float32 = tf.VarLenFeature(dtype=tf.float32) 107 | # Sparse features in Example proto. 108 | feature_map.update( 109 | {k: sparse_float32 for k in ['image/object/bbox/xmin', 110 | 'image/object/bbox/ymin', 111 | 'image/object/bbox/xmax', 112 | 'image/object/bbox/ymax']}) 113 | 114 | features = tf.parse_single_example(example_serialized, feature_map) 115 | label = tf.cast(features['image/class/label'], dtype=tf.int32) 116 | 117 | xmin = tf.expand_dims(features['image/object/bbox/xmin'].values, 0) 118 | ymin = tf.expand_dims(features['image/object/bbox/ymin'].values, 0) 119 | xmax = tf.expand_dims(features['image/object/bbox/xmax'].values, 0) 120 | ymax = tf.expand_dims(features['image/object/bbox/ymax'].values, 0) 121 | 122 | # Note that we impose an ordering of (y, x) just to make life difficult. 123 | bbox = tf.concat([ymin, xmin, ymax, xmax], 0) 124 | 125 | # Force the variable number of bounding boxes into the shape 126 | # [1, num_boxes, coords]. 127 | bbox = tf.expand_dims(bbox, 0) 128 | bbox = tf.transpose(bbox, [0, 2, 1]) 129 | 130 | return features['image/encoded'], label, bbox 131 | 132 | 133 | def parse_record(raw_record, is_training): 134 | """Parses a record containing a training example of an image. 135 | 136 | The input record is parsed into a label and image, and the image is passed 137 | through preprocessing steps (cropping, flipping, and so on). 138 | 139 | Args: 140 | raw_record: scalar Tensor tf.string containing a serialized 141 | Example protocol buffer. 142 | is_training: A boolean denoting whether the input is for training. 143 | 144 | Returns: 145 | Tuple with processed image tensor and one-hot-encoded label tensor. 146 | """ 147 | image_buffer, label, bbox = _parse_example_proto(raw_record) 148 | 149 | image = imagenet_preprocessing.preprocess_image( 150 | image_buffer=image_buffer, 151 | bbox=bbox, 152 | output_height=_DEFAULT_IMAGE_SIZE, 153 | output_width=_DEFAULT_IMAGE_SIZE, 154 | num_channels=_NUM_CHANNELS, 155 | is_training=is_training) 156 | 157 | label = tf.one_hot(tf.reshape(label, shape=[]), _NUM_CLASSES) 158 | 159 | return image, label 160 | 161 | 162 | def input_fn(is_training, data_dir, batch_size, num_epochs=1, 163 | num_parallel_calls=1, multi_gpu=False): 164 | """Input function which provides batches for train or eval. 165 | 166 | Args: 167 | is_training: A boolean denoting whether the input is for training. 168 | data_dir: The directory containing the input data. 169 | batch_size: The number of samples per batch. 170 | num_epochs: The number of epochs to repeat the dataset. 171 | num_parallel_calls: The number of records that are processed in parallel. 172 | This can be optimized per data set but for generally homogeneous data 173 | sets, should be approximately the number of available CPU cores. 174 | multi_gpu: Whether this is run multi-GPU. Note that this is only required 175 | currently to handle the batch leftovers, and can be removed 176 | when that is handled directly by Estimator. 177 | 178 | Returns: 179 | A dataset that can be used for iteration. 180 | """ 181 | filenames = get_filenames(is_training, data_dir) 182 | dataset = tf.data.Dataset.from_tensor_slices(filenames) 183 | 184 | if is_training: 185 | # Shuffle the input files 186 | dataset = dataset.shuffle(buffer_size=_NUM_TRAIN_FILES) 187 | 188 | num_images = is_training and _NUM_IMAGES['train'] or _NUM_IMAGES['validation'] 189 | 190 | # Convert to individual records 191 | dataset = dataset.flat_map(tf.data.TFRecordDataset) 192 | 193 | return rrl.process_record_dataset( 194 | dataset, is_training, batch_size, _SHUFFLE_BUFFER, parse_record, 195 | num_epochs, num_parallel_calls, examples_per_epoch=num_images, 196 | multi_gpu=multi_gpu) 197 | 198 | 199 | def get_synth_input_fn(): 200 | return rrl.get_synth_input_fn( 201 | _DEFAULT_IMAGE_SIZE, _DEFAULT_IMAGE_SIZE, _NUM_CHANNELS, _NUM_CLASSES) 202 | 203 | 204 | ############################################################################### 205 | # Running the model 206 | ############################################################################### 207 | class ImagenetModel(resnet.NCMResModel): 208 | """Model class with appropriate defaults for Imagenet data.""" 209 | 210 | def __init__(self, resnet_size, data_format=None, num_classes=_NUM_CLASSES, 211 | version=resnet.RESNET_DEFAULT_VERSION,ncm=resnet.NCM_DEFAULT): 212 | """These are the parameters that work for Imagenet data. 213 | 214 | Args: 215 | resnet_size: The number of convolutional layers needed in the model. 216 | data_format: Either 'channels_first' or 'channels_last', specifying which 217 | data format to use when setting up the model. 218 | num_classes: The number of output classes needed from the model. This 219 | enables users to extend the same model to their own datasets. 220 | version: Integer representing which version of the ResNet network to use. 221 | See README for details. Valid values: [1, 2] 222 | """ 223 | 224 | # For bigger models, we want to use "bottleneck" layers 225 | if resnet_size < 50: 226 | bottleneck = False 227 | final_size = 512 228 | else: 229 | bottleneck = True 230 | final_size = 2048 231 | 232 | super(ImagenetModel, self).__init__( 233 | resnet_size=resnet_size, 234 | bottleneck=bottleneck, 235 | num_classes=num_classes, 236 | num_filters=64, 237 | kernel_size=7, 238 | conv_stride=2, 239 | first_pool_size=3, 240 | first_pool_stride=2, 241 | second_pool_size=7, 242 | second_pool_stride=1, 243 | block_sizes=_get_block_sizes(resnet_size), 244 | block_strides=[1, 2, 2, 2], 245 | final_size=final_size, 246 | version=version, 247 | data_format=data_format) 248 | 249 | 250 | def _get_block_sizes(resnet_size): 251 | """Retrieve the size of each block_layer in the ResNet model. 252 | 253 | The number of block layers used for the Resnet model varies according 254 | to the size of the model. This helper grabs the layer set we want, throwing 255 | an error if a non-standard size has been selected. 256 | 257 | Args: 258 | resnet_size: The number of convolutional layers needed in the model. 259 | 260 | Returns: 261 | A list of block sizes to use in building the model. 262 | 263 | Raises: 264 | KeyError: if invalid resnet_size is received. 265 | """ 266 | choices = { 267 | 18: [2, 2, 2, 2], 268 | 34: [3, 4, 6, 3], 269 | 50: [3, 4, 6, 3], 270 | 101: [3, 4, 23, 3], 271 | 152: [3, 8, 36, 3], 272 | 200: [3, 24, 36, 3] 273 | } 274 | 275 | try: 276 | return choices[resnet_size] 277 | except KeyError: 278 | err = ('Could not find layers for selected Resnet size.\n' 279 | 'Size received: {}; sizes allowed: {}.'.format( 280 | resnet_size, choices.keys())) 281 | raise ValueError(err) 282 | 283 | 284 | def imagenet_model_fn(features, labels, mode, params): 285 | """Our model_fn for ResNet to be used with our Estimator.""" 286 | learning_rate_fn = rrl.learning_rate_with_decay( 287 | batch_size=params['batch_size'], batch_denom=256, 288 | num_images=_NUM_IMAGES['train'], boundary_epochs=[30, 60, 80, 90], 289 | decay_rates=[1, 0.1, 0.01, 0.001, 1e-4]) 290 | 291 | return rrl.resnet_model_fn(features, labels, mode, ImagenetModel, 292 | resnet_size=params['resnet_size'], 293 | weight_decay=1e-4, 294 | learning_rate_fn=learning_rate_fn, 295 | momentum=0.9, 296 | data_format=params['data_format'], 297 | version=params['version'], 298 | loss_filter_fn=None, 299 | multi_gpu=params['multi_gpu']) 300 | 301 | 302 | def main(argv): 303 | parser = rrl.ResnetArgParser( 304 | resnet_size_choices=[18, 34, 50, 101, 152, 200]) 305 | 306 | parser.set_defaults( 307 | resnet_size=50, 308 | train_epochs=100, 309 | data_dir= "/tmp/deepncm/data/imagenet/tf/", 310 | model_dir="/tmp/deepncm/exp/imagenet/" 311 | ) 312 | 313 | flags = parser.parse_args(args=argv[1:]) 314 | 315 | flags.model_dir += "resnet-%d/%s" %(flags.resnet_size,flags.ncmmethod) 316 | 317 | if flags.ncmmethod == "decaymean": 318 | flags.model_dir += "_d%02d" %(flags.ncmparam*100) 319 | elif flags.ncmmethod == "omreset": 320 | flags.model_dir += "_r%04d" %(flags.ncmparam) 321 | 322 | flags.model_dir += "_lr%5.0e" %(flags.initial_learning_scale) 323 | 324 | print(flags.model_dir) 325 | 326 | 327 | input_function = flags.use_synthetic_data and get_synth_input_fn() or input_fn 328 | 329 | rrl.resnet_main( 330 | flags, imagenet_model_fn, input_function, 331 | shape=[_DEFAULT_IMAGE_SIZE, _DEFAULT_IMAGE_SIZE, _NUM_CHANNELS]) 332 | 333 | 334 | if __name__ == '__main__': 335 | tf.logging.set_verbosity(tf.logging.INFO) 336 | main(argv=sys.argv) 337 | -------------------------------------------------------------------------------- /resnet_deepncm_run_loop.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 Thomas Mensink, University of Amsterdam, thomas.mensink@uva.nl 2 | # 3 | # Beloning to the DeepNCM repository 4 | # DeepNCM is proposed in 5 | # Samantha Guerriero, Barbara Caputo, and Thomas Mensink 6 | # DeepNCM: Deep Nearest Class Mean Classifiers 7 | # ICLR Workshop 2018 8 | # https://openreview.net/forum?id=rkPLZ4JPM 9 | # 10 | # This file (resnet_deepncm_run_loop) is based on resnet_run_loop from the 11 | # TensorFlow Models Official ResNet library (release 1.8.0/1.7.0) 12 | # https://github.com/tensorflow/models/tree/master/official/resnet 13 | # 14 | # It contains code to support the ResNet DeepNCM models 15 | # Modifications are made to 16 | # - resnet_model_fn call, to incorporate the NCM update ops 17 | # - ResnetArgParser, to include different command line arguments 18 | # - Main, to allow multiple models at the same GPU 19 | """Contains utility and supporting functions for ResNet. 20 | 21 | This module contains ResNet code which does not directly build layers. This 22 | includes dataset management, hyperparameter and optimizer code, and argument 23 | parsing. Code for defining the ResNet layers can be found in resnet_ncm.py. 24 | """ 25 | 26 | from __future__ import absolute_import 27 | from __future__ import division 28 | from __future__ import print_function 29 | 30 | import argparse 31 | import os 32 | 33 | import tensorflow as tf # pylint: disable=g-bad-import-order 34 | 35 | import numpy as np 36 | 37 | #import resnet_ncm as rncm 38 | import resnet_ncmequal as rncm 39 | 40 | ALLOW_MULTIPLE_MODELS = True 41 | 42 | import sys 43 | sys.path.append("./tf/models/") 44 | from official.utils.arg_parsers import parsers 45 | from official.utils.export import export 46 | from official.utils.logging import hooks_helper 47 | from official.utils.logging import logger 48 | from official.resnet import resnet_run_loop as rrl 49 | ################################################################################ 50 | # Functions for input processing. 51 | ################################################################################ 52 | def process_record_dataset(dataset, is_training, batch_size, shuffle_buffer, 53 | parse_record_fn, num_epochs=1, num_parallel_calls=1, 54 | examples_per_epoch=0, multi_gpu=False): 55 | return rrl.process_record_dataset(dataset, is_training, batch_size, shuffle_buffer, 56 | parse_record_fn, num_epochs=num_epochs, num_parallel_calls=num_parallel_calls, 57 | examples_per_epoch=examples_per_epoch, multi_gpu=multi_gpu) 58 | 59 | def get_synth_input_fn(height, width, num_channels, num_classes): 60 | return rrl.get_synth_input_fn(height, width, num_channles, num_classes) 61 | 62 | ################################################################################ 63 | # Functions for running training/eval/validation loops for the model. 64 | ################################################################################ 65 | def learning_rate_with_decay( 66 | batch_size, batch_denom, num_images, boundary_epochs, decay_rates,initial_learning_scale=0.1): 67 | """Get a learning rate that decays step-wise as training progresses. 68 | 69 | Args: 70 | batch_size: the number of examples processed in each training batch. 71 | batch_denom: this value will be used to scale the base learning rate. 72 | `0.1 * batch size` is divided by this number, such that when 73 | batch_denom == batch_size, the initial learning rate will be 0.1. 74 | num_images: total number of images that will be used for training. 75 | boundary_epochs: list of ints representing the epochs at which we 76 | decay the learning rate. 77 | decay_rates: list of floats representing the decay rates to be used 78 | for scaling the learning rate. It should have one more element 79 | than `boundary_epochs`, and all elements should have the same type. 80 | 81 | Returns: 82 | Returns a function that takes a single argument - the number of batches 83 | trained so far (global_step)- and returns the learning rate to be used 84 | for training the next batch. 85 | """ 86 | initial_learning_rate = initial_learning_scale * batch_size / batch_denom 87 | batches_per_epoch = num_images / batch_size 88 | 89 | # Multiply the learning rate by 0.1 at 100, 150, and 200 epochs. 90 | boundaries = [int(batches_per_epoch * epoch) for epoch in boundary_epochs] 91 | vals = [initial_learning_rate * decay for decay in decay_rates] 92 | 93 | def learning_rate_fn(global_step): 94 | global_step = tf.cast(global_step, tf.int32) 95 | return tf.train.piecewise_constant(global_step, boundaries, vals) 96 | 97 | return learning_rate_fn 98 | 99 | 100 | def resnet_model_fn(features, labels, mode, model_class, 101 | resnet_size, weight_decay, learning_rate_fn, momentum, 102 | data_format, version, loss_filter_fn=None, multi_gpu=False, ncm=rncm.NCM_DEFAULT): 103 | """Shared functionality for different resnet model_fns. 104 | 105 | Initializes the ResnetModel representing the model layers 106 | and uses that model to build the necessary EstimatorSpecs for 107 | the `mode` in question. For training, this means building losses, 108 | the optimizer, and the train op that get passed into the EstimatorSpec. 109 | For evaluation and prediction, the EstimatorSpec is returned without 110 | a train op, but with the necessary parameters for the given mode. 111 | 112 | Args: 113 | features: tensor representing input images 114 | labels: tensor representing class labels for all input images 115 | mode: current estimator mode; should be one of 116 | `tf.estimator.ModeKeys.TRAIN`, `EVALUATE`, `PREDICT` 117 | model_class: a class representing a TensorFlow model that has a __call__ 118 | function. We assume here that this is a subclass of ResnetModel. 119 | resnet_size: A single integer for the size of the ResNet model. 120 | weight_decay: weight decay loss rate used to regularize learned variables. 121 | learning_rate_fn: function that returns the current learning rate given 122 | the current global_step 123 | momentum: momentum term used for optimization 124 | data_format: Input format ('channels_last', 'channels_first', or None). 125 | If set to None, the format is dependent on whether a GPU is available. 126 | version: Integer representing which version of the ResNet network to use. 127 | See README for details. Valid values: [1, 2] 128 | loss_filter_fn: function that takes a string variable name and returns 129 | True if the var should be included in loss calculation, and False 130 | otherwise. If None, batch_normalization variables will be excluded 131 | from the loss. 132 | multi_gpu: If True, wrap the optimizer in a TowerOptimizer suitable for 133 | data-parallel distribution across multiple GPUs. 134 | 135 | Returns: 136 | EstimatorSpec parameterized according to the input params and the 137 | current mode. 138 | """ 139 | # Generate a summary node for the images 140 | tf.summary.image('images', features, max_outputs=6) 141 | 142 | model = model_class(resnet_size, data_format=data_format, num_classes=labels.shape[1].value, version=version,ncm=ncm) 143 | logits, deep_x, deepmean = model(features, mode == tf.estimator.ModeKeys.TRAIN) 144 | 145 | predictions = { 146 | 'classes': tf.argmax(logits, axis=1), 147 | 'probabilities': tf.nn.softmax(logits, name='softmax_tensor'), 148 | } 149 | 150 | 151 | dm = tf.identity(deepmean,"DM") 152 | if not (model.ncmmethod == "softmax"): 153 | rdist,mmsk = model.get_relative_mean_distance(deep_x=deep_x,labels=labels) 154 | mcmd = tf.metrics.mean(rdist,weights=mmsk) 155 | rmd = tf.identity(mcmd[1],name="rmd") 156 | rmd = tf.summary.scalar('rmd', rmd) 157 | predictions['rmd'] = tf.identity(rmd,name="rmd") 158 | 159 | if mode == tf.estimator.ModeKeys.PREDICT: 160 | # Return the predictions and the specification for serving a SavedModel 161 | return tf.estimator.EstimatorSpec( 162 | mode=mode, 163 | predictions=predictions, 164 | export_outputs={ 165 | 'predict': tf.estimator.export.PredictOutput(predictions) 166 | } 167 | ) 168 | 169 | # Calculate loss, which includes softmax cross entropy and L2 regularization. 170 | cross_entropy = tf.losses.softmax_cross_entropy(logits=logits, onehot_labels=labels) 171 | 172 | # Create a tensor named cross_entropy for logging purposes. 173 | tf.identity(cross_entropy, name='cross_entropy') 174 | tf.summary.scalar('cross_entropy', cross_entropy) 175 | 176 | # If no loss_filter_fn is passed, assume we want the default behavior, 177 | # which is that batch_normalization variables are excluded from loss. 178 | def exclude_batch_norm(name): 179 | return 'batch_normalization' not in name 180 | loss_filter_fn = loss_filter_fn or exclude_batch_norm 181 | 182 | # Add weight decay to the loss. 183 | l2_loss = weight_decay * tf.add_n( 184 | [tf.nn.l2_loss(v) for v in tf.trainable_variables() 185 | if loss_filter_fn(v.name)]) 186 | tf.summary.scalar('l2_loss', l2_loss) 187 | loss = cross_entropy + l2_loss 188 | 189 | if mode == tf.estimator.ModeKeys.TRAIN: 190 | ncm_ops = model.get_ncm_ops(deep_x=deep_x,labels=labels) 191 | 192 | # Create a tensor named learning_rate for logging purposes 193 | global_step = tf.train.get_or_create_global_step() 194 | learning_rate = learning_rate_fn(global_step) 195 | tf.identity(learning_rate, name='learning_rate') 196 | tf.summary.scalar('learning_rate', learning_rate) 197 | 198 | # Create loss_op using Gradient clipping 199 | optimizer = tf.train.MomentumOptimizer(learning_rate=learning_rate,momentum=momentum) 200 | gavs = optimizer.compute_gradients(loss) 201 | gavsc= [(tf.clip_by_value(grad, -1., 1.), var) for grad, var in gavs] 202 | loss_op = optimizer.apply_gradients(gavsc,global_step=global_step) 203 | 204 | # Update ops from Graph 205 | update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) 206 | 207 | train_op = tf.group(loss_op, update_ops, ncm_ops) 208 | else: 209 | train_op = None 210 | 211 | accuracy = tf.metrics.accuracy(tf.argmax(labels, axis=1), predictions['classes']) 212 | 213 | dm,bm,bmc = model.get_mean_and_batch_mean(deep_x=deep_x,labels=labels) 214 | 215 | metrics = {'accuracy': accuracy} 216 | if not (model.ncmmethod == "softmax"): 217 | metrics['mcmdistance'] = mcmd 218 | # The following is only required for the Relative Mean Distance Experiment 219 | #metrics['batchmeans'] = tf.metrics.mean_tensor(tf.transpose(bm),weights=bmc) 220 | #metrics['deepmean'] = tf.metrics.mean_tensor(dm) 221 | 222 | # Create a tensor named train_accuracy for logging purposes 223 | tf.identity(accuracy[1], name='train_accuracy') 224 | tf.summary.scalar('train_accuracy', accuracy[1]) 225 | 226 | return tf.estimator.EstimatorSpec( 227 | mode=mode, 228 | predictions=predictions, 229 | loss=loss, 230 | train_op=train_op, 231 | eval_metric_ops=metrics) 232 | 233 | 234 | def resnet_main(flags, model_function, input_function, shape=None): 235 | """Shared main loop for ResNet Models. 236 | 237 | Args: 238 | flags: FLAGS object that contains the params for running. See 239 | ResnetArgParser for created flags. 240 | model_function: the function that instantiates the Model and builds the 241 | ops for train/eval. This will be passed directly into the estimator. 242 | input_function: the function that processes the dataset and returns a 243 | dataset that the estimator can train on. This will be wrapped with 244 | all the relevant flags for running and passed to estimator. 245 | shape: list of ints representing the shape of the images used for training. 246 | This is only used if flags.export_dir is passed. 247 | """ 248 | 249 | # Using the Winograd non-fused algorithms provides a small performance boost. 250 | os.environ['TF_ENABLE_WINOGRAD_NONFUSED'] = '1' 251 | 252 | # Create session config based on values of inter_op_parallelism_threads and 253 | # intra_op_parallelism_threads. Note that we default to having 254 | # allow_soft_placement = True, which is required for multi-GPU and not 255 | # harmful for other modes. 256 | session_config = tf.ConfigProto( 257 | inter_op_parallelism_threads=flags.inter_op_parallelism_threads, 258 | intra_op_parallelism_threads=flags.intra_op_parallelism_threads, 259 | allow_soft_placement=True) 260 | 261 | if ALLOW_MULTIPLE_MODELS: 262 | session_config.gpu_options.allow_growth = True 263 | 264 | # Set up a RunConfig to save checkpoint and set session config. 265 | run_config = tf.estimator.RunConfig().replace( 266 | save_checkpoints_secs = 5*60, # Save checkpoints every X minutes. 267 | keep_checkpoint_max = 1000, # Retain the 1000 most recent checkpoints. 268 | #tf_random_seed = 5739, # Set random seed for "reproducible" results 269 | save_summary_steps = 10000, # Number of steps between summaries 270 | session_config=session_config) 271 | 272 | classifier = tf.estimator.Estimator( 273 | model_fn=model_function, model_dir=flags.model_dir, config=run_config, 274 | params={ 275 | 'resnet_size': flags.resnet_size, 276 | 'data_format': flags.data_format, 277 | 'batch_size': flags.batch_size, 278 | 'multi_gpu': flags.multi_gpu, 279 | 'version': flags.version, 280 | 'ncmmethod': flags.ncmmethod, 281 | 'ncmparam' : flags.ncmparam, 282 | 'initial_learning_scale' : flags.initial_learning_scale 283 | }) 284 | 285 | if flags.benchmark_log_dir is not None: 286 | benchmark_logger = logger.BenchmarkLogger(flags.benchmark_log_dir) 287 | benchmark_logger.log_run_info("resnet") 288 | else: 289 | benchmark_logger = None 290 | 291 | for _ in range(flags.train_epochs // flags.epochs_between_evals): 292 | train_hooks = hooks_helper.get_train_hooks( 293 | flags.hooks, 294 | batch_size=flags.batch_size, 295 | benchmark_log_dir=flags.benchmark_log_dir) 296 | #tensors_to_log = {"iter": "m_iter","deep-cnt": "m_cnt", "deep-sum": "m_sum"} 297 | #logging_hook = tf.train.LoggingTensorHook(tensors=tensors_to_log, every_n_iter=1) 298 | 299 | 300 | print('Starting a training cycle.') 301 | 302 | def input_fn_train(): 303 | return input_function(True, flags.data_dir, flags.batch_size, 304 | flags.epochs_between_evals, 305 | flags.num_parallel_calls, flags.multi_gpu) 306 | 307 | classifier.train(input_fn=input_fn_train, hooks=train_hooks,max_steps=flags.max_train_steps) 308 | 309 | print('Starting to evaluate.') 310 | # Evaluate the model and print results 311 | def input_fn_eval(): 312 | return input_function(False, flags.data_dir, flags.batch_size, 313 | 1, flags.num_parallel_calls, flags.multi_gpu) 314 | 315 | # flags.max_train_steps is generally associated with testing and profiling. 316 | # As a result it is frequently called with synthetic data, which will 317 | # iterate forever. Passing steps=flags.max_train_steps allows the eval 318 | # (which is generally unimportant in those circumstances) to terminate. 319 | # Note that eval will run for max_train_steps each loop, regardless of the 320 | # global_step count. 321 | eval_results = classifier.evaluate(input_fn=input_fn_eval, 322 | steps=flags.max_train_steps) 323 | print(eval_results) 324 | 325 | if benchmark_logger: 326 | benchmark_logger.log_estimator_evaluation_result(eval_results) 327 | 328 | if flags.export_dir is not None: 329 | # Exports a saved model for the given classifier. 330 | input_receiver_fn = export.build_tensor_serving_input_receiver_fn( 331 | shape, batch_size=flags.batch_size) 332 | classifier.export_savedmodel(flags.export_dir, input_receiver_fn) 333 | 334 | 335 | class ResnetArgParser(argparse.ArgumentParser): 336 | """Arguments for configuring and running a Resnet Model.""" 337 | 338 | def __init__(self, resnet_size_choices=None): 339 | super(ResnetArgParser, self).__init__(parents=[ 340 | parsers.BaseParser(), 341 | parsers.PerformanceParser(), 342 | parsers.ImageModelParser(), 343 | parsers.ExportParser(), 344 | parsers.BenchmarkParser(), 345 | ]) 346 | 347 | self.add_argument('--dataset','-d',default="cifar10", 348 | help='Which dataset to use (currently cifar10/cifar100)' 349 | ) 350 | 351 | self.add_argument( 352 | '--version', '-v', type=int, choices=[1, 2], 353 | default=rncm.RESNET_DEFAULT_VERSION, 354 | help='Version of ResNet. (1 or 2) See README.md for details.' 355 | ) 356 | 357 | self.add_argument( 358 | '--resnet_size', '-rs', type=int, default=50, 359 | choices=resnet_size_choices, 360 | help='[default: %(default)s] The size of the ResNet model to use.', 361 | metavar='' if resnet_size_choices is None else None 362 | ) 363 | 364 | self.add_argument( 365 | '--continu',type=int,default=0, 366 | help='Continue with an existing model, or start from scratch' 367 | ) 368 | 369 | self.add_argument( 370 | '--scratch',type=int,default=0, 371 | help='Start from scratch even if model exist' 372 | ) 373 | 374 | self.add_argument( 375 | '--ncmmethod', default=rncm.NCM_DEFAULT_METHOD, 376 | help='[default: %(default)s] Which NCM method to use', 377 | ) 378 | 379 | self.add_argument( 380 | '--ncmparam', default=rncm.NCM_DEFAULT_PARAMETER, type=float, 381 | help='[default: %(default)s] additional NCM parameter to use', 382 | ) 383 | 384 | self.add_argument( 385 | '--initial_learning_scale', '-l', default=0.1, type=float, 386 | help='Intial Learning Scale (default: %(default)s)', 387 | ) 388 | -------------------------------------------------------------------------------- /resnet_deepx.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 Thomas Mensink, University of Amsterdam, thomas.mensink@uva.nl 2 | # 3 | # Beloning to the DeepNCM repository 4 | # DeepNCM is proposed in 5 | # Samantha Guerriero, Barbara Caputo, and Thomas Mensink 6 | # DeepNCM: Deep Nearest Class Mean Classifiers 7 | # ICLR Workshop 2018 8 | # https://openreview.net/forum?id=rkPLZ4JPM 9 | # 10 | # This file (resnet_deepx) is based on resnet_model from the 11 | # TensorFlow Models Official ResNet library (release 1.8.0/1.7.0) 12 | # https://github.com/tensorflow/models/tree/master/official/resnet 13 | # 14 | # It contains the ResNet code to create a deepnetwork, without a final layer. 15 | # 16 | # resnet_model.py has the following copyright notice: 17 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved. 18 | # 19 | # Licensed under the Apache License, Version 2.0 (the "License"); 20 | # you may not use this file except in compliance with the License. 21 | # You may obtain a copy of the License at 22 | # 23 | # http://www.apache.org/licenses/LICENSE-2.0 24 | # 25 | # Unless required by applicable law or agreed to in writing, software 26 | # distributed under the License is distributed on an "AS IS" BASIS, 27 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 28 | # See the License for the specific language governing permissions and 29 | # limitations under the License. 30 | # ============================================================================== 31 | """Contains definitions for Residual Networks. 32 | 33 | Residual networks ('v1' ResNets) were originally proposed in: 34 | [1] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun 35 | Deep Residual Learning for Image Recognition. arXiv:1512.03385 36 | 37 | The full preactivation 'v2' ResNet variant was introduced by: 38 | [2] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun 39 | Identity Mappings in Deep Residual Networks. arXiv: 1603.05027 40 | 41 | The key difference of the full preactivation 'v2' variant compared to the 42 | 'v1' variant in [1] is the use of batch normalization before every weight layer 43 | rather than after. 44 | """ 45 | 46 | from __future__ import absolute_import 47 | from __future__ import division 48 | from __future__ import print_function 49 | 50 | import tensorflow as tf 51 | 52 | _BATCH_NORM_DECAY = 0.997 53 | _BATCH_NORM_EPSILON = 1e-5 54 | DEFAULT_VERSION = 2 55 | 56 | 57 | ################################################################################ 58 | # Convenience functions for building the ResNet model. 59 | ################################################################################ 60 | def batch_norm(inputs, training, data_format): 61 | """Performs a batch normalization using a standard set of parameters.""" 62 | # We set fused=True for a significant performance boost. See 63 | # https://www.tensorflow.org/performance/performance_guide#common_fused_ops 64 | return tf.layers.batch_normalization( 65 | inputs=inputs, axis=1 if data_format == 'channels_first' else 3, 66 | momentum=_BATCH_NORM_DECAY, epsilon=_BATCH_NORM_EPSILON, center=True, 67 | scale=True, training=training, fused=True) 68 | 69 | 70 | def fixed_padding(inputs, kernel_size, data_format): 71 | """Pads the input along the spatial dimensions independently of input size. 72 | 73 | Args: 74 | inputs: A tensor of size [batch, channels, height_in, width_in] or 75 | [batch, height_in, width_in, channels] depending on data_format. 76 | kernel_size: The kernel to be used in the conv2d or max_pool2d operation. 77 | Should be a positive integer. 78 | data_format: The input format ('channels_last' or 'channels_first'). 79 | 80 | Returns: 81 | A tensor with the same format as the input with the data either intact 82 | (if kernel_size == 1) or padded (if kernel_size > 1). 83 | """ 84 | pad_total = kernel_size - 1 85 | pad_beg = pad_total // 2 86 | pad_end = pad_total - pad_beg 87 | 88 | if data_format == 'channels_first': 89 | padded_inputs = tf.pad(inputs, [[0, 0], [0, 0], 90 | [pad_beg, pad_end], [pad_beg, pad_end]]) 91 | else: 92 | padded_inputs = tf.pad(inputs, [[0, 0], [pad_beg, pad_end], 93 | [pad_beg, pad_end], [0, 0]]) 94 | return padded_inputs 95 | 96 | 97 | def conv2d_fixed_padding(inputs, filters, kernel_size, strides, data_format): 98 | """Strided 2-D convolution with explicit padding.""" 99 | # The padding is consistent and is based only on `kernel_size`, not on the 100 | # dimensions of `inputs` (as opposed to using `tf.layers.conv2d` alone). 101 | if strides > 1: 102 | inputs = fixed_padding(inputs, kernel_size, data_format) 103 | 104 | return tf.layers.conv2d( 105 | inputs=inputs, filters=filters, kernel_size=kernel_size, strides=strides, 106 | padding=('SAME' if strides == 1 else 'VALID'), use_bias=False, 107 | kernel_initializer=tf.variance_scaling_initializer(), 108 | data_format=data_format) 109 | 110 | 111 | ################################################################################ 112 | # ResNet block definitions. 113 | ################################################################################ 114 | def _building_block_v1(inputs, filters, training, projection_shortcut, strides, 115 | data_format): 116 | """A single block for ResNet v1, without a bottleneck. 117 | 118 | Convolution then batch normalization then ReLU as described by: 119 | Deep Residual Learning for Image Recognition 120 | https://arxiv.org/pdf/1512.03385.pdf 121 | by Kaiming He, Xiangyu Zhang, Shaoqing Ren, and Jian Sun, Dec 2015. 122 | 123 | Args: 124 | inputs: A tensor of size [batch, channels, height_in, width_in] or 125 | [batch, height_in, width_in, channels] depending on data_format. 126 | filters: The number of filters for the convolutions. 127 | training: A Boolean for whether the model is in training or inference 128 | mode. Needed for batch normalization. 129 | projection_shortcut: The function to use for projection shortcuts 130 | (typically a 1x1 convolution when downsampling the input). 131 | strides: The block's stride. If greater than 1, this block will ultimately 132 | downsample the input. 133 | data_format: The input format ('channels_last' or 'channels_first'). 134 | 135 | Returns: 136 | The output tensor of the block; shape should match inputs. 137 | """ 138 | shortcut = inputs 139 | 140 | if projection_shortcut is not None: 141 | shortcut = projection_shortcut(inputs) 142 | shortcut = batch_norm(inputs=shortcut, training=training, 143 | data_format=data_format) 144 | 145 | inputs = conv2d_fixed_padding( 146 | inputs=inputs, filters=filters, kernel_size=3, strides=strides, 147 | data_format=data_format) 148 | inputs = batch_norm(inputs, training, data_format) 149 | inputs = tf.nn.relu(inputs) 150 | 151 | inputs = conv2d_fixed_padding( 152 | inputs=inputs, filters=filters, kernel_size=3, strides=1, 153 | data_format=data_format) 154 | inputs = batch_norm(inputs, training, data_format) 155 | inputs += shortcut 156 | inputs = tf.nn.relu(inputs) 157 | 158 | return inputs 159 | 160 | 161 | def _building_block_v2(inputs, filters, training, projection_shortcut, strides, 162 | data_format): 163 | """A single block for ResNet v2, without a bottleneck. 164 | 165 | Batch normalization then ReLu then convolution as described by: 166 | Identity Mappings in Deep Residual Networks 167 | https://arxiv.org/pdf/1603.05027.pdf 168 | by Kaiming He, Xiangyu Zhang, Shaoqing Ren, and Jian Sun, Jul 2016. 169 | 170 | Args: 171 | inputs: A tensor of size [batch, channels, height_in, width_in] or 172 | [batch, height_in, width_in, channels] depending on data_format. 173 | filters: The number of filters for the convolutions. 174 | training: A Boolean for whether the model is in training or inference 175 | mode. Needed for batch normalization. 176 | projection_shortcut: The function to use for projection shortcuts 177 | (typically a 1x1 convolution when downsampling the input). 178 | strides: The block's stride. If greater than 1, this block will ultimately 179 | downsample the input. 180 | data_format: The input format ('channels_last' or 'channels_first'). 181 | 182 | Returns: 183 | The output tensor of the block; shape should match inputs. 184 | """ 185 | shortcut = inputs 186 | inputs = batch_norm(inputs, training, data_format) 187 | inputs = tf.nn.relu(inputs) 188 | 189 | # The projection shortcut should come after the first batch norm and ReLU 190 | # since it performs a 1x1 convolution. 191 | if projection_shortcut is not None: 192 | shortcut = projection_shortcut(inputs) 193 | 194 | inputs = conv2d_fixed_padding( 195 | inputs=inputs, filters=filters, kernel_size=3, strides=strides, 196 | data_format=data_format) 197 | 198 | inputs = batch_norm(inputs, training, data_format) 199 | inputs = tf.nn.relu(inputs) 200 | inputs = conv2d_fixed_padding( 201 | inputs=inputs, filters=filters, kernel_size=3, strides=1, 202 | data_format=data_format) 203 | 204 | return inputs + shortcut 205 | 206 | 207 | def _bottleneck_block_v1(inputs, filters, training, projection_shortcut, 208 | strides, data_format): 209 | """A single block for ResNet v1, with a bottleneck. 210 | 211 | Similar to _building_block_v1(), except using the "bottleneck" blocks 212 | described in: 213 | Convolution then batch normalization then ReLU as described by: 214 | Deep Residual Learning for Image Recognition 215 | https://arxiv.org/pdf/1512.03385.pdf 216 | by Kaiming He, Xiangyu Zhang, Shaoqing Ren, and Jian Sun, Dec 2015. 217 | 218 | Args: 219 | inputs: A tensor of size [batch, channels, height_in, width_in] or 220 | [batch, height_in, width_in, channels] depending on data_format. 221 | filters: The number of filters for the convolutions. 222 | training: A Boolean for whether the model is in training or inference 223 | mode. Needed for batch normalization. 224 | projection_shortcut: The function to use for projection shortcuts 225 | (typically a 1x1 convolution when downsampling the input). 226 | strides: The block's stride. If greater than 1, this block will ultimately 227 | downsample the input. 228 | data_format: The input format ('channels_last' or 'channels_first'). 229 | 230 | Returns: 231 | The output tensor of the block; shape should match inputs. 232 | """ 233 | shortcut = inputs 234 | 235 | if projection_shortcut is not None: 236 | shortcut = projection_shortcut(inputs) 237 | shortcut = batch_norm(inputs=shortcut, training=training, 238 | data_format=data_format) 239 | 240 | inputs = conv2d_fixed_padding( 241 | inputs=inputs, filters=filters, kernel_size=1, strides=1, 242 | data_format=data_format) 243 | inputs = batch_norm(inputs, training, data_format) 244 | inputs = tf.nn.relu(inputs) 245 | 246 | inputs = conv2d_fixed_padding( 247 | inputs=inputs, filters=filters, kernel_size=3, strides=strides, 248 | data_format=data_format) 249 | inputs = batch_norm(inputs, training, data_format) 250 | inputs = tf.nn.relu(inputs) 251 | 252 | inputs = conv2d_fixed_padding( 253 | inputs=inputs, filters=4 * filters, kernel_size=1, strides=1, 254 | data_format=data_format) 255 | inputs = batch_norm(inputs, training, data_format) 256 | inputs += shortcut 257 | inputs = tf.nn.relu(inputs) 258 | 259 | return inputs 260 | 261 | 262 | def _bottleneck_block_v2(inputs, filters, training, projection_shortcut, 263 | strides, data_format): 264 | """A single block for ResNet v2, without a bottleneck. 265 | 266 | Similar to _building_block_v2(), except using the "bottleneck" blocks 267 | described in: 268 | Convolution then batch normalization then ReLU as described by: 269 | Deep Residual Learning for Image Recognition 270 | https://arxiv.org/pdf/1512.03385.pdf 271 | by Kaiming He, Xiangyu Zhang, Shaoqing Ren, and Jian Sun, Dec 2015. 272 | 273 | Adapted to the ordering conventions of: 274 | Batch normalization then ReLu then convolution as described by: 275 | Identity Mappings in Deep Residual Networks 276 | https://arxiv.org/pdf/1603.05027.pdf 277 | by Kaiming He, Xiangyu Zhang, Shaoqing Ren, and Jian Sun, Jul 2016. 278 | 279 | Args: 280 | inputs: A tensor of size [batch, channels, height_in, width_in] or 281 | [batch, height_in, width_in, channels] depending on data_format. 282 | filters: The number of filters for the convolutions. 283 | training: A Boolean for whether the model is in training or inference 284 | mode. Needed for batch normalization. 285 | projection_shortcut: The function to use for projection shortcuts 286 | (typically a 1x1 convolution when downsampling the input). 287 | strides: The block's stride. If greater than 1, this block will ultimately 288 | downsample the input. 289 | data_format: The input format ('channels_last' or 'channels_first'). 290 | 291 | Returns: 292 | The output tensor of the block; shape should match inputs. 293 | """ 294 | shortcut = inputs 295 | inputs = batch_norm(inputs, training, data_format) 296 | inputs = tf.nn.relu(inputs) 297 | 298 | # The projection shortcut should come after the first batch norm and ReLU 299 | # since it performs a 1x1 convolution. 300 | if projection_shortcut is not None: 301 | shortcut = projection_shortcut(inputs) 302 | 303 | inputs = conv2d_fixed_padding( 304 | inputs=inputs, filters=filters, kernel_size=1, strides=1, 305 | data_format=data_format) 306 | 307 | inputs = batch_norm(inputs, training, data_format) 308 | inputs = tf.nn.relu(inputs) 309 | inputs = conv2d_fixed_padding( 310 | inputs=inputs, filters=filters, kernel_size=3, strides=strides, 311 | data_format=data_format) 312 | 313 | inputs = batch_norm(inputs, training, data_format) 314 | inputs = tf.nn.relu(inputs) 315 | inputs = conv2d_fixed_padding( 316 | inputs=inputs, filters=4 * filters, kernel_size=1, strides=1, 317 | data_format=data_format) 318 | 319 | return inputs + shortcut 320 | 321 | 322 | def block_layer(inputs, filters, bottleneck, block_fn, blocks, strides, 323 | training, name, data_format): 324 | """Creates one layer of blocks for the ResNet model. 325 | 326 | Args: 327 | inputs: A tensor of size [batch, channels, height_in, width_in] or 328 | [batch, height_in, width_in, channels] depending on data_format. 329 | filters: The number of filters for the first convolution of the layer. 330 | bottleneck: Is the block created a bottleneck block. 331 | block_fn: The block to use within the model, either `building_block` or 332 | `bottleneck_block`. 333 | blocks: The number of blocks contained in the layer. 334 | strides: The stride to use for the first convolution of the layer. If 335 | greater than 1, this layer will ultimately downsample the input. 336 | training: Either True or False, whether we are currently training the 337 | model. Needed for batch norm. 338 | name: A string name for the tensor output of the block layer. 339 | data_format: The input format ('channels_last' or 'channels_first'). 340 | 341 | Returns: 342 | The output tensor of the block layer. 343 | """ 344 | 345 | # Bottleneck blocks end with 4x the number of filters as they start with 346 | filters_out = filters * 4 if bottleneck else filters 347 | 348 | def projection_shortcut(inputs): 349 | return conv2d_fixed_padding( 350 | inputs=inputs, filters=filters_out, kernel_size=1, strides=strides, 351 | data_format=data_format) 352 | 353 | # Only the first block per block_layer uses projection_shortcut and strides 354 | inputs = block_fn(inputs, filters, training, projection_shortcut, strides, 355 | data_format) 356 | 357 | for _ in range(1, blocks): 358 | inputs = block_fn(inputs, filters, training, None, 1, data_format) 359 | 360 | return tf.identity(inputs, name) 361 | 362 | 363 | class ResNetX(object): 364 | """Base class for building the Resnet Model.""" 365 | 366 | def __init__(self, resnet_size, bottleneck, num_classes, num_filters, 367 | kernel_size, 368 | conv_stride, first_pool_size, first_pool_stride, 369 | second_pool_size, second_pool_stride, block_sizes, block_strides, 370 | final_size, version=DEFAULT_VERSION, data_format=None): 371 | """Creates a model for classifying an image. 372 | 373 | Args: 374 | resnet_size: A single integer for the size of the ResNet model. 375 | bottleneck: Use regular blocks or bottleneck blocks. 376 | num_classes: The number of classes used as labels. 377 | num_filters: The number of filters to use for the first block layer 378 | of the model. This number is then doubled for each subsequent block 379 | layer. 380 | kernel_size: The kernel size to use for convolution. 381 | conv_stride: stride size for the initial convolutional layer 382 | first_pool_size: Pool size to be used for the first pooling layer. 383 | If none, the first pooling layer is skipped. 384 | first_pool_stride: stride size for the first pooling layer. Not used 385 | if first_pool_size is None. 386 | second_pool_size: Pool size to be used for the second pooling layer. 387 | second_pool_stride: stride size for the final pooling layer 388 | block_sizes: A list containing n values, where n is the number of sets of 389 | block layers desired. Each value should be the number of blocks in the 390 | i-th set. 391 | block_strides: List of integers representing the desired stride size for 392 | each of the sets of block layers. Should be same length as block_sizes. 393 | final_size: The expected size of the model after the second pooling. 394 | version: Integer representing which version of the ResNet network to use. 395 | See README for details. Valid values: [1, 2] 396 | data_format: Input format ('channels_last', 'channels_first', or None). 397 | If set to None, the format is dependent on whether a GPU is available. 398 | 399 | Raises: 400 | ValueError: if invalid version is selected. 401 | """ 402 | self.resnet_size = resnet_size 403 | 404 | if not data_format: 405 | data_format = ( 406 | 'channels_first' if tf.test.is_built_with_cuda() else 'channels_last') 407 | 408 | self.resnet_version = version 409 | if version not in (1, 2): 410 | raise ValueError( 411 | 'Resnet version should be 1 or 2. See README for citations.') 412 | 413 | self.bottleneck = bottleneck 414 | if bottleneck: 415 | if version == 1: 416 | self.block_fn = _bottleneck_block_v1 417 | else: 418 | self.block_fn = _bottleneck_block_v2 419 | else: 420 | if version == 1: 421 | self.block_fn = _building_block_v1 422 | else: 423 | self.block_fn = _building_block_v2 424 | 425 | self.data_format = data_format 426 | self.num_classes = num_classes 427 | self.num_filters = num_filters 428 | self.kernel_size = kernel_size 429 | self.conv_stride = conv_stride 430 | self.first_pool_size = first_pool_size 431 | self.first_pool_stride = first_pool_stride 432 | self.second_pool_size = second_pool_size 433 | self.second_pool_stride = second_pool_stride 434 | self.block_sizes = block_sizes 435 | self.block_strides = block_strides 436 | self.final_size = final_size 437 | 438 | def __call__(self, inputs, training): 439 | """Add operations to classify a batch of input images. 440 | 441 | Args: 442 | inputs: A Tensor representing a batch of input images. 443 | training: A boolean. Set to True to add operations required only when 444 | training the classifier. 445 | 446 | Returns: 447 | A logits Tensor with shape [, self.num_classes]. 448 | """ 449 | 450 | if self.data_format == 'channels_first': 451 | # Convert the inputs from channels_last (NHWC) to channels_first (NCHW). 452 | # This provides a large performance boost on GPU. See 453 | # https://www.tensorflow.org/performance/performance_guide#data_formats 454 | inputs = tf.transpose(inputs, [0, 3, 1, 2]) 455 | 456 | inputs = conv2d_fixed_padding( 457 | inputs=inputs, filters=self.num_filters, kernel_size=self.kernel_size, 458 | strides=self.conv_stride, data_format=self.data_format) 459 | inputs = tf.identity(inputs, 'initial_conv') 460 | 461 | if self.first_pool_size: 462 | inputs = tf.layers.max_pooling2d( 463 | inputs=inputs, pool_size=self.first_pool_size, 464 | strides=self.first_pool_stride, padding='SAME', 465 | data_format=self.data_format) 466 | inputs = tf.identity(inputs, 'initial_max_pool') 467 | 468 | for i, num_blocks in enumerate(self.block_sizes): 469 | num_filters = self.num_filters * (2**i) 470 | inputs = block_layer( 471 | inputs=inputs, filters=num_filters, bottleneck=self.bottleneck, 472 | block_fn=self.block_fn, blocks=num_blocks, 473 | strides=self.block_strides[i], training=training, 474 | name='block_layer{}'.format(i + 1), data_format=self.data_format) 475 | 476 | inputs = batch_norm(inputs, training, self.data_format) 477 | inputs = tf.nn.relu(inputs) 478 | 479 | # The current top layer has shape 480 | # `batch_size x pool_size x pool_size x final_size`. 481 | # ResNet does an Average Pooling layer over pool_size, 482 | # but that is the same as doing a reduce_mean. We do a reduce_mean 483 | # here because it performs better than AveragePooling2D. 484 | axes = [2, 3] if self.data_format == 'channels_first' else [1, 2] 485 | inputs = tf.reduce_mean(inputs, axes, keepdims=True) 486 | inputs = tf.identity(inputs, 'final_reduce_mean') 487 | 488 | inputs = tf.reshape(inputs, [-1, self.final_size]) 489 | #inputs = tf.layers.dense(inputs=inputs, units=self.num_classes) 490 | #inputs = tf.identity(inputs, 'final_dense') 491 | 492 | return inputs 493 | -------------------------------------------------------------------------------- /resnet_model.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 Thomas Mensink, University of Amsterdam, thomas.mensink@uva.nl 2 | # 3 | # Beloning to the DeepNCM repository 4 | # DeepNCM is proposed in 5 | # Samantha Guerriero, Barbara Caputo, and Thomas Mensink 6 | # DeepNCM: Deep Nearest Class Mean Classifiers 7 | # ICLR Workshop 2018 8 | # https://openreview.net/forum?id=rkPLZ4JPM 9 | # 10 | # This file (resnet_model) is based on resnet_model from the 11 | # TensorFlow Models Official ResNet library (release 1.8.0/1.7.0) 12 | # https://github.com/tensorflow/models/tree/master/official/resnet 13 | # 14 | # It contains the ResNet code to mimic th resnet_model making use of resnet_deepx. 15 | # ============================================================================== 16 | """Contains definitions for Residual Networks based on resnet_deepx 17 | """ 18 | 19 | from __future__ import absolute_import 20 | from __future__ import division 21 | from __future__ import print_function 22 | 23 | import tensorflow as tf 24 | import resnet_deepx as rn 25 | 26 | DEFAULT_VERSION = rn.DEFAULT_VERSION 27 | 28 | class ResNetModel(rn.ResNetX): 29 | """Base class for building the Resnet Model.""" 30 | 31 | def __init__(self, resnet_size, bottleneck, num_classes, num_filters, 32 | kernel_size, 33 | conv_stride, first_pool_size, first_pool_stride, 34 | second_pool_size, second_pool_stride, block_sizes, block_strides, 35 | final_size, version=DEFAULT_VERSION, data_format=None): 36 | super(ResNetModel,self).__init__(resnet_size, bottleneck, num_classes, num_filters, 37 | kernel_size, 38 | conv_stride, first_pool_size, first_pool_stride, 39 | second_pool_size, second_pool_stride, block_sizes, block_strides, 40 | final_size, version, data_format) 41 | 42 | def __call__(self, inputs, training): 43 | inputs = super(ResNetModel,self).__call__(inputs, training) 44 | inputs = tf.layers.dense(inputs=inputs, units=self.num_classes) 45 | inputs = tf.identity(inputs, 'final_dense') 46 | 47 | return inputs 48 | -------------------------------------------------------------------------------- /resnet_ncm.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 Thomas Mensink, University of Amsterdam, thomas.mensink@uva.nl 2 | # 3 | # Beloning to the DeepNCM repository 4 | # DeepNCM is proposed in 5 | # Samantha Guerriero, Barbara Caputo, and Thomas Mensink 6 | # DeepNCM: Deep Nearest Class Mean Classifiers 7 | # ICLR Workshop 2018 8 | # https://openreview.net/forum?id=rkPLZ4JPM 9 | # 10 | # This file (resnet_ncm) has the code for different DeepNCM variantsis 11 | # including: 12 | # softmax (as baseline) 13 | # online means (onlinemean) 14 | # mean condensation (omreset) 15 | # decay mean (decaymean) 16 | # 17 | """Contains definitions for DeepNCM Residual Networks. 18 | 19 | DeepNCM is proposed in: 20 | [1] Samantha Guerriero, Barbara Caputo, and Thomas Mensink 21 | DeepNCM: Deep Nearest Class Mean Classifiers 22 | ICLR Workshop 2018 23 | https://openreview.net/forum?id=rkPLZ4JPM 24 | """ 25 | 26 | from __future__ import absolute_import 27 | from __future__ import division 28 | from __future__ import print_function 29 | 30 | import tensorflow as tf 31 | import resnet_deepx as rn 32 | 33 | RESNET_DEFAULT_VERSION = rn.DEFAULT_VERSION 34 | NCM_DEFAULT_METHOD = "omreset" 35 | NCM_DEFAULT_PARAMETER = 100 36 | NCM_DEFAULT = { 37 | 'method' : NCM_DEFAULT_METHOD, 38 | 'param' : NCM_DEFAULT_PARAMETER, 39 | } 40 | 41 | ############################################################## 42 | ### Code to compute batch counts and means 43 | ############################################################## 44 | def ncm_batch_counts(batch_x,batch_y,oneHot=True): 45 | if oneHot: 46 | by = tf.identity(batch_y) 47 | else: 48 | by = tf.one_hot(batch_y,depth=_TRAININGCLASSES,dtype=tf.float32) 49 | 50 | lBMn = tf.reduce_sum(by,axis=0,keepdims=True) 51 | lBM = tf.matmul(by,batch_x,transpose_a=True) 52 | lBM = tf.transpose(lBM) 53 | return lBM, lBMn 54 | 55 | def ncm_batch_means(batch_x,batch_y,oneHot=True): 56 | lBC, lBMn = ncm_batch_counts(batch_x,batch_y,oneHot=oneHot) 57 | lBMz = lBMn + tf.cast(tf.equal(lBMn,0),dtype=tf.float32) 58 | lBM = tf.transpose(lBC/lBMz) 59 | return lBM, lBMn 60 | 61 | def ncm_sq_dist_bt_norm(a,b): 62 | anorm = tf.reshape(tf.reduce_sum(tf.square(a), 1),[-1, 1]) 63 | bnorm = tf.reshape(tf.reduce_sum(tf.square(b), 0),[1, -1]) 64 | d = -2*tf.matmul(a,b,transpose_b=False)+anorm + bnorm 65 | return d, anorm 66 | 67 | def ncm_sq_dist_bt(a,b): 68 | d, bnorm = ncm_sq_dist_bt_norm(a,b) 69 | return d 70 | 71 | from tensorflow.python.framework import ops 72 | from tensorflow.python.ops import array_ops 73 | from tensorflow.python.ops import math_ops 74 | from tensorflow.python.ops import state_ops 75 | 76 | def save_batch_mean(batch_mean,batch_counts,decay_mean): 77 | condition = tf.cast(math_ops.greater(batch_counts,0),dtype=tf.float32) 78 | sbm1 = tf.multiply(batch_mean,condition) 79 | sbm2 = tf.multiply(tf.transpose(decay_mean),1-condition) 80 | return sbm1 + sbm2 81 | 82 | def _safe_div(numerator, denominator, name): 83 | """Divides two tensors element-wise, returning 0 if the denominator is <= 0. 84 | Args: 85 | numerator: A real `Tensor`. 86 | denominator: A real `Tensor`, with dtype matching `numerator`. 87 | name: Name for the returned op. 88 | Returns: 89 | 0 if `denominator` <= 0, else `numerator` / `denominator` 90 | 91 | Copied from TensorFlow Metrics 92 | """ 93 | t = math_ops.truediv(numerator, denominator) 94 | zero = array_ops.zeros_like(t, dtype=denominator.dtype) 95 | condition = math_ops.greater(denominator, zero) 96 | zero = math_ops.cast(zero, t.dtype) 97 | return array_ops.where(condition, t, zero, name=name) 98 | 99 | 100 | class NCMResModel(rn.ResNetX): 101 | """NCM ResNet class for building the DeepNCM Resnet Model.""" 102 | 103 | def __init__(self, resnet_size, bottleneck, num_classes, num_filters, 104 | kernel_size, 105 | conv_stride, first_pool_size, first_pool_stride, 106 | second_pool_size, second_pool_stride, block_sizes, block_strides, 107 | final_size, version=RESNET_DEFAULT_VERSION, ncm=NCM_DEFAULT, data_format=None): 108 | super(NCMResModel,self).__init__(resnet_size, bottleneck, num_classes, num_filters, 109 | kernel_size, 110 | conv_stride, first_pool_size, first_pool_stride, 111 | second_pool_size, second_pool_stride, block_sizes, block_strides, 112 | final_size, version, data_format) 113 | self.ncmmethod = ncm['method'].casefold() 114 | self.ncmparam = ncm['param'] 115 | 116 | if self.ncmmethod == "decaymean": 117 | assert 0 <= self.ncmparam < 1, "Decay means requires ncmparam between 0 and 1" 118 | 119 | self.iter = tf.get_variable("iter", [],dtype=tf.float32,trainable=False, initializer=tf.initializers.constant(0)) 120 | self.total = tf.get_variable("total",[final_size,num_classes],dtype=tf.float32,trainable=False, initializer=tf.initializers.constant(0)) 121 | self.count = tf.get_variable("count",[1,num_classes],dtype=tf.float32,trainable=False, initializer=tf.initializers.constant(0)) 122 | 123 | 124 | def get_mean_and_batch_mean(self,deep_x=None,labels=None): 125 | bmean,bcounts = ncm_batch_means(deep_x,labels) 126 | return _safe_div(self.total,self.count,name="deepmean"), bmean, bcounts 127 | 128 | def get_relative_mean_distance(self,deep_x=None,labels=None): 129 | bmean,bcounts = ncm_batch_means(deep_x,labels) 130 | dm,dmnorm = ncm_sq_dist_bt_norm(bmean,_safe_div(self.total, self.count,name='deepmean')) 131 | rdist = _safe_div(tf.diag_part(dm),dmnorm,name='relativedist') 132 | 133 | return rdist, tf.cast(tf.greater(bcounts,0),rdist.dtype) 134 | 135 | 136 | def get_reset_op(self,update_op): 137 | reset_total_op = state_ops.assign(self.total,update_op,use_locking=True) 138 | with ops.control_dependencies([update_op]): 139 | reset_count_op = state_ops.assign(self.count,array_ops.ones_like(self.count),use_locking=True) 140 | 141 | reset_op = _safe_div(reset_total_op,reset_count_op, 'reset_op') 142 | return reset_op 143 | 144 | def get_ncm_ops(self,deep_x=None,labels=None): 145 | iter_op = state_ops.assign_add(self.iter,tf.ones([])) 146 | 147 | if self.ncmmethod == "onlinemean" or self.ncmmethod == "omreset": 148 | batchsums,batchcnts = ncm_batch_counts(deep_x,labels) 149 | update_total_op = state_ops.assign_add(self.total, batchsums,use_locking=True) 150 | with ops.control_dependencies([batchsums]): 151 | update_count_op = state_ops.assign_add(self.count, batchcnts,use_locking=True) 152 | 153 | update_op = _safe_div(update_total_op, update_count_op, 'update_op') 154 | 155 | if self.ncmmethod == "decaymean": 156 | batchmeans,batchcnts = ncm_batch_means(deep_x,labels) 157 | batchcnts = tf.transpose(batchcnts) 158 | sbm = save_batch_mean(batchmeans,batchcnts,self.total) 159 | sbm = tf.transpose(sbm) 160 | ndm = self.ncmparam * self.total + (1-self.ncmparam) * sbm 161 | update_total_op = state_ops.assign(self.total,ndm, use_locking=True) 162 | with ops.control_dependencies([ndm]): 163 | update_count_op = state_ops.assign(self.count, array_ops.ones_like(self.count),use_locking=True) 164 | 165 | update_op = _safe_div(update_total_op, update_count_op, 'update_op') 166 | 167 | if self.ncmmethod == "onlinemean" or self.ncmmethod == "decaymean": 168 | ncm_op = tf.group(iter_op,update_op) 169 | elif self.ncmmethod == "omreset": 170 | ncm_op = tf.cond(tf.equal(tf.mod(self.iter,self.ncmparam),0), false_fn=lambda: tf.group(iter_op,update_op),true_fn=lambda: tf.group(iter_op,self.get_reset_op(update_op))) 171 | else: #SOFTMAX case 172 | ncm_op = iter_op 173 | 174 | return ncm_op 175 | 176 | def __call__(self, inputs, training): 177 | deepx = super(NCMResModel,self).__call__(inputs, training) 178 | deepx = tf.identity(deepx, 'deep-representation') 179 | 180 | deepmean = _safe_div(self.total, self.count, 'deepmean') 181 | deepmean = tf.identity(deepmean,name="DeepMeanValue") 182 | 183 | if self.ncmmethod == "softmax": 184 | logits = tf.layers.dense(inputs=deepx, units=self.num_classes) 185 | 186 | elif self.ncmmethod == "onlinemean" or self.ncmmethod == "omreset" or self.ncmmethod == "decaymean": 187 | logits = -ncm_sq_dist_bt(deepx,deepmean) 188 | 189 | logits = tf.identity(logits, 'logits') 190 | return logits, deepx, deepmean 191 | -------------------------------------------------------------------------------- /resnet_ncmequal.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 Thomas Mensink, University of Amsterdam, thomas.mensink@uva.nl 2 | # 3 | # Beloning to the DeepNCM repository 4 | # DeepNCM is proposed in 5 | # Samantha Guerriero, Barbara Caputo, and Thomas Mensink 6 | # DeepNCM: Deep Nearest Class Mean Classifiers 7 | # ICLR Workshop 2018 8 | # https://openreview.net/forum?id=rkPLZ4JPM 9 | # 10 | # This file (resnet_ncm) has the code for different DeepNCM variantsis 11 | # including: 12 | # softmax (as baseline) 13 | # online means (onlinemean) 14 | # mean condensation (omreset) 15 | # decay mean (decaymean) 16 | # 17 | """Contains definitions for DeepNCM Residual Networks. 18 | 19 | DeepNCM is proposed in: 20 | [1] Samantha Guerriero, Barbara Caputo, and Thomas Mensink 21 | DeepNCM: Deep Nearest Class Mean Classifiers 22 | ICLR Workshop 2018 23 | https://openreview.net/forum?id=rkPLZ4JPM 24 | """ 25 | 26 | from __future__ import absolute_import 27 | from __future__ import division 28 | from __future__ import print_function 29 | 30 | import tensorflow as tf 31 | import resnet_deepx as rn 32 | 33 | RESNET_DEFAULT_VERSION = rn.DEFAULT_VERSION 34 | NCM_DEFAULT_METHOD = "omreset" 35 | NCM_DEFAULT_PARAMETER = 100 36 | NCM_DEFAULT = { 37 | 'method' : NCM_DEFAULT_METHOD, 38 | 'param' : NCM_DEFAULT_PARAMETER, 39 | } 40 | 41 | ############################################################## 42 | ### Code to compute batch counts and means 43 | ############################################################## 44 | def ncm_batch_counts(batch_x,batch_y,oneHot=True): 45 | if oneHot: 46 | by = tf.identity(batch_y) 47 | else: 48 | by = tf.one_hot(batch_y,depth=_TRAININGCLASSES,dtype=tf.float32) 49 | 50 | lBMn = tf.reduce_sum(by,axis=0,keepdims=True) 51 | lBM = tf.matmul(by,batch_x,transpose_a=True) 52 | lBM = tf.transpose(lBM) 53 | return lBM, lBMn 54 | 55 | def ncm_batch_means(batch_x,batch_y,oneHot=True): 56 | lBC, lBMn = ncm_batch_counts(batch_x,batch_y,oneHot=oneHot) 57 | lBMz = lBMn + tf.cast(tf.equal(lBMn,0),dtype=tf.float32) 58 | lBM = tf.transpose(lBC/lBMz) 59 | return lBM, lBMn 60 | 61 | def ncm_sq_dist_bt_norm(a,b): 62 | anorm = tf.reshape(tf.reduce_sum(tf.square(a), 1),[-1, 1]) 63 | bnorm = tf.reshape(tf.reduce_sum(tf.square(b), 0),[1, -1]) 64 | d = -2*tf.matmul(a,b,transpose_b=False)+anorm + bnorm 65 | return d, anorm 66 | 67 | def ncm_sq_dist_bt(a,b): 68 | d, bnorm = ncm_sq_dist_bt_norm(a,b) 69 | return d 70 | 71 | from tensorflow.python.framework import ops 72 | from tensorflow.python.ops import array_ops 73 | from tensorflow.python.ops import math_ops 74 | from tensorflow.python.ops import state_ops 75 | 76 | def save_batch_mean(batch_mean,batch_counts,decay_mean): 77 | condition = tf.cast(math_ops.greater(batch_counts,0),dtype=tf.float32) 78 | sbm1 = tf.multiply(batch_mean,condition) 79 | sbm2 = tf.multiply(tf.transpose(decay_mean),1-condition) 80 | return sbm1 + sbm2 81 | 82 | def _safe_div(numerator, denominator, name): 83 | """Divides two tensors element-wise, returning 0 if the denominator is <= 0. 84 | Args: 85 | numerator: A real `Tensor`. 86 | denominator: A real `Tensor`, with dtype matching `numerator`. 87 | name: Name for the returned op. 88 | Returns: 89 | 0 if `denominator` <= 0, else `numerator` / `denominator` 90 | 91 | Copied from TensorFlow Metrics 92 | """ 93 | t = math_ops.truediv(numerator, denominator) 94 | zero = array_ops.zeros_like(t, dtype=denominator.dtype) 95 | condition = math_ops.greater(denominator, zero) 96 | zero = math_ops.cast(zero, t.dtype) 97 | return array_ops.where(condition, t, zero, name=name) 98 | 99 | 100 | class NCMResModel(rn.ResNetX): 101 | """NCM ResNet class for building the DeepNCM Resnet Model.""" 102 | 103 | def __init__(self, resnet_size, bottleneck, num_classes, num_filters, 104 | kernel_size, 105 | conv_stride, first_pool_size, first_pool_stride, 106 | second_pool_size, second_pool_stride, block_sizes, block_strides, 107 | final_size, version=RESNET_DEFAULT_VERSION, ncm=NCM_DEFAULT, data_format=None): 108 | super(NCMResModel,self).__init__(resnet_size, bottleneck, num_classes, num_filters, 109 | kernel_size, 110 | conv_stride, first_pool_size, first_pool_stride, 111 | second_pool_size, second_pool_stride, block_sizes, block_strides, 112 | final_size, version, data_format) 113 | self.ncmmethod = ncm['method'].casefold() 114 | self.ncmparam = ncm['param'] 115 | 116 | if self.ncmmethod == "decaymean": 117 | assert 0 <= self.ncmparam < 1, "Decay means requires ncmparam between 0 and 1" 118 | 119 | self.iter = tf.get_variable("iter", [],dtype=tf.float32,trainable=False, initializer=tf.initializers.constant(0)) 120 | self.total = tf.get_variable("total",[num_classes,num_classes],dtype=tf.float32,trainable=False, initializer=tf.initializers.constant(0)) 121 | self.count = tf.get_variable("count",[1,num_classes],dtype=tf.float32,trainable=False, initializer=tf.initializers.constant(0)) 122 | 123 | 124 | def get_mean_and_batch_mean(self,deep_x=None,labels=None): 125 | bmean,bcounts = ncm_batch_means(deep_x,labels) 126 | return _safe_div(self.total,self.count,name="deepmean"), bmean, bcounts 127 | 128 | def get_relative_mean_distance(self,deep_x=None,labels=None): 129 | bmean,bcounts = ncm_batch_means(deep_x,labels) 130 | dm,dmnorm = ncm_sq_dist_bt_norm(bmean,_safe_div(self.total, self.count,name='deepmean')) 131 | rdist = _safe_div(tf.diag_part(dm),dmnorm,name='relativedist') 132 | 133 | return rdist, tf.cast(tf.greater(bcounts,0),rdist.dtype) 134 | 135 | 136 | def get_reset_op(self,update_op): 137 | reset_total_op = state_ops.assign(self.total,update_op,use_locking=True) 138 | with ops.control_dependencies([update_op]): 139 | reset_count_op = state_ops.assign(self.count,array_ops.ones_like(self.count),use_locking=True) 140 | 141 | reset_op = _safe_div(reset_total_op,reset_count_op, 'reset_op') 142 | return reset_op 143 | 144 | def get_ncm_ops(self,deep_x=None,labels=None): 145 | iter_op = state_ops.assign_add(self.iter,tf.ones([])) 146 | 147 | if self.ncmmethod == "onlinemean" or self.ncmmethod == "omreset": 148 | batchsums,batchcnts = ncm_batch_counts(deep_x,labels) 149 | update_total_op = state_ops.assign_add(self.total, batchsums,use_locking=True) 150 | with ops.control_dependencies([batchsums]): 151 | update_count_op = state_ops.assign_add(self.count, batchcnts,use_locking=True) 152 | 153 | update_op = _safe_div(update_total_op, update_count_op, 'update_op') 154 | 155 | if self.ncmmethod == "decaymean": 156 | batchmeans,batchcnts = ncm_batch_means(deep_x,labels) 157 | batchcnts = tf.transpose(batchcnts) 158 | sbm = save_batch_mean(batchmeans,batchcnts,self.total) 159 | sbm = tf.transpose(sbm) 160 | ndm = self.ncmparam * self.total + (1-self.ncmparam) * sbm 161 | update_total_op = state_ops.assign(self.total,ndm, use_locking=True) 162 | with ops.control_dependencies([ndm]): 163 | update_count_op = state_ops.assign(self.count, array_ops.ones_like(self.count),use_locking=True) 164 | 165 | update_op = _safe_div(update_total_op, update_count_op, 'update_op') 166 | 167 | if self.ncmmethod == "onlinemean" or self.ncmmethod == "decaymean": 168 | ncm_op = tf.group(iter_op,update_op) 169 | elif self.ncmmethod == "omreset": 170 | ncm_op = tf.cond(tf.equal(tf.mod(self.iter,self.ncmparam),0), false_fn=lambda: tf.group(iter_op,update_op),true_fn=lambda: tf.group(iter_op,self.get_reset_op(update_op))) 171 | else: #SOFTMAX case 172 | ncm_op = iter_op 173 | 174 | return ncm_op 175 | 176 | def __call__(self, inputs, training): 177 | deepx = super(NCMResModel,self).__call__(inputs, training) 178 | deepx = tf.identity(deepx, 'deep-representation') 179 | 180 | deepmean = _safe_div(self.total, self.count, 'deepmean') 181 | deepmean = tf.identity(deepmean,name="DeepMeanValue") 182 | 183 | if self.ncmmethod == "softmax": 184 | logits = tf.layers.dense(inputs=deepx, units=self.num_classes) 185 | 186 | elif self.ncmmethod == "onlinemean" or self.ncmmethod == "omreset" or self.ncmmethod == "decaymean": 187 | deepx = tf.layers.dense(inputs=deepx, units=self.num_classes) 188 | logits = -ncm_sq_dist_bt(deepx,deepmean) 189 | 190 | logits = tf.identity(logits, 'logits') 191 | return logits, deepx, deepmean 192 | --------------------------------------------------------------------------------