├── .gitignore ├── logo.png ├── 0_quick_start └── 0_logging_device_placement.py ├── 777_workarounds └── 777_1_tf2_cuda10.py ├── 1_keras_api ├── 2_sequential_model.py ├── 1_numbers_classification.ipynb └── 4_text_classification.ipynb ├── LICENSE ├── 2_estimators ├── 2_1_linear_model.ipynb └── .ipynb_checkpoints │ └── 2_1_linear_model-checkpoint.ipynb ├── 19_lingvo └── 19_1_task_config.py ├── README.md └── 20_tf2 ├── 20_2_a2c.py └── 20_1_actor_critic_agent.ipynb /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | -------------------------------------------------------------------------------- /logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0101011/bootstrap-ml/master/logo.png -------------------------------------------------------------------------------- /0_quick_start/0_logging_device_placement.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | 3 | # Creates a graph 4 | a = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[2, 3], name='a') 5 | b = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[3, 2], name='b') 6 | c = tf.matmul(a, b) 7 | 8 | # Creates a session with log_device_placement set to True 9 | sess = tf.Session(config=tf.ConfigProto(log_device_placement=True)) 10 | 11 | # Runs the op 12 | print(sess.run(c)) 13 | -------------------------------------------------------------------------------- /777_workarounds/777_1_tf2_cuda10.py: -------------------------------------------------------------------------------- 1 | !pip install tf-nightly-gpu-2.0-preview 2 | 3 | !wget https://developer.nvidia.com/compute/cuda/10.0/Prod/local_installers/cuda-repo-ubuntu1604-10-0-local-10.0.130-410.48_1.0-1_amd64 -O cuda-repo-ubuntu1604-10-0-local-10.0.130-410.48_1.0-1_amd64.deb 4 | !dpkg -i cuda-repo-ubuntu1604-10-0-local-10.0.130-410.48_1.0-1_amd64.deb 5 | !apt-key add /var/cuda-repo-10-0-local-10.0.130-410.48/7fa2af80.pub 6 | !apt-get update 7 | !apt-get install cuda 8 | !pip install tf-nightly-gpu-2.0-preview 9 | 10 | import tensorflow as tf 11 | 12 | print(tf.__version__) 13 | -------------------------------------------------------------------------------- /1_keras_api/2_sequential_model.py: -------------------------------------------------------------------------------- 1 | """ 2 | Running the tiny sample model is faster on the CPU: Batch loading 3 | from RAM to GPU is slower at the start of each operation. Forward/backward 4 | computations are very quick in tiny networks so it's rational to use CPU. 5 | You can also try using model.fit_generator instead of plain fit, so that 6 | CPU thread which loads minibatches works in parallel. 7 | 8 | At the time there is no way I am aware of to preload the whole dataset 9 | on GPU with Keras. 10 | """ 11 | 12 | # Hiding a GPU 13 | import os 14 | os.environ["CUDA_VISIBLE_DEVICES"] = '-1' 15 | 16 | # Importing tf and numpy 17 | import tensorflow as tf 18 | from tensorflow import keras 19 | import numpy as np 20 | 21 | # Defining and running the model 22 | model = keras.Sequential([keras.layers.Dense(units=1, input_shape=[1])]) 23 | model.compile(optimizer='sgd', loss='mean_squared_error') 24 | 25 | xs = np.array([-1.0, 0.0, 1.0, 2.0, 3.0, 4.0], dtype=float) 26 | ys = np.array([-3.0, -1.0, 1.0, 3.0, 5.0, 7.0], dtype=float) 27 | 28 | model.fit(xs, ys, epochs=500) -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Andrew Stepin 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /2_estimators/2_1_linear_model.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import tensorflow as tf\n", 10 | "import tensorflow.feature_column as fc \n", 11 | "\n", 12 | "import os\n", 13 | "import sys\n", 14 | "\n", 15 | "import matplotlib.pyplot as plt\n", 16 | "from IPython.display import clear_output" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": null, 22 | "metadata": {}, 23 | "outputs": [], 24 | "source": [ 25 | "tf.enable_eager_execution()" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": null, 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "! git clone --depth 1 https://github.com/tensorflow/models" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": null, 40 | "metadata": {}, 41 | "outputs": [], 42 | "source": [] 43 | } 44 | ], 45 | "metadata": { 46 | "kernelspec": { 47 | "display_name": "Python 3", 48 | "language": "python", 49 | "name": "python3" 50 | }, 51 | "language_info": { 52 | "codemirror_mode": { 53 | "name": "ipython", 54 | "version": 3 55 | }, 56 | "file_extension": ".py", 57 | "mimetype": "text/x-python", 58 | "name": "python", 59 | "nbconvert_exporter": "python", 60 | "pygments_lexer": "ipython3", 61 | "version": "3.6.8" 62 | } 63 | }, 64 | "nbformat": 4, 65 | "nbformat_minor": 2 66 | } 67 | -------------------------------------------------------------------------------- /2_estimators/.ipynb_checkpoints/2_1_linear_model-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import tensorflow as tf\n", 10 | "import tensorflow.feature_column as fc \n", 11 | "\n", 12 | "import os\n", 13 | "import sys\n", 14 | "\n", 15 | "import matplotlib.pyplot as plt\n", 16 | "from IPython.display import clear_output" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": null, 22 | "metadata": {}, 23 | "outputs": [], 24 | "source": [ 25 | "tf.enable_eager_execution()" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": null, 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "! git clone --depth 1 https://github.com/tensorflow/models" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": null, 40 | "metadata": {}, 41 | "outputs": [], 42 | "source": [] 43 | } 44 | ], 45 | "metadata": { 46 | "kernelspec": { 47 | "display_name": "Python 3", 48 | "language": "python", 49 | "name": "python3" 50 | }, 51 | "language_info": { 52 | "codemirror_mode": { 53 | "name": "ipython", 54 | "version": 3 55 | }, 56 | "file_extension": ".py", 57 | "mimetype": "text/x-python", 58 | "name": "python", 59 | "nbconvert_exporter": "python", 60 | "pygments_lexer": "ipython3", 61 | "version": "3.6.8" 62 | } 63 | }, 64 | "nbformat": 4, 65 | "nbformat_minor": 2 66 | } 67 | -------------------------------------------------------------------------------- /19_lingvo/19_1_task_config.py: -------------------------------------------------------------------------------- 1 | def Task(cls): 2 | p = model.AsrModel.Params() 3 | p.name = 'librispeech' 4 | 5 | # Initialize encoder params. 6 | ep = p.encoder 7 | 8 | # Data consists 240 dimensional frames (80 x 3 frames), which we 9 | # re-interpret as individual 80 dimensional frames. See also, 10 | # LibrispeechCommonAsrInputParams. 11 | ep.input_shape = [None, None, 80, 1] 12 | ep.lstm_cell_size = 1024 13 | ep.num_lstm_layers = 4 14 | ep.conv_filter_shapes = [(3, 3, 1, 32), (3, 3, 32, 32)] 15 | ep.conv_filter_strides = [(2, 2), (2, 2)] 16 | ep.cnn_tpl.params_init = py_utils.WeightInit.Gaussian(0.001) 17 | 18 | # Disable conv LSTM layers. 19 | ep.num_conv_lstm_layers = 0 20 | 21 | # Initialize decoder params. 22 | dp = p.decoder 23 | dp.rnn_cell_dim = 1024 24 | dp.rnn_layers = 2 25 | dp.source_dim = 2048 26 | # Use functional while based unrolling. 27 | dp.use_while_loop_based_unrolling = False 28 | 29 | tp = p.train 30 | tp.learning_rate = 2.5e-4 31 | tp.lr_schedule = lr_schedule.ContinuousLearningRateSchedule.Params().Set( 32 | start_step=50000, half_life_steps=100000, min=0.01) 33 | 34 | # Setting p.eval.samples_per_summary to a large value ensures that dev, 35 | # devother, test, testother are evaluated completely (since num_samples for 36 | # each of these sets is less than 5000), while train summaries will be 37 | # computed on 5000 examples. 38 | p.eval.samples_per_summary = 5000 39 | p.eval.decoder_samples_per_summary = 0 40 | 41 | # Use variational weight noise to prevent overfitting. 42 | p.vn.global_vn = True 43 | p.train.vn_std = 0.075 44 | p.train.vn_start_step = 20000 45 | 46 | return p 47 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Bootstrap ML 2 | 3 | ![Bootstrap ML Logo](logo.png) 4 | 5 | **Bootstrap ML** is a comprehensive collection of pre-written code for machine learning and deep learning use cases, all in one convenient place. Whether you're a seasoned practitioner or just starting your ML journey, this repository provides a solid foundation to build upon. 6 | 7 | ## What Is It About? 8 | 9 | **Bootstrap ML** aims to accelerate your machine learning and deep learning projects by providing reusable, well-documented code snippets and notebooks. It covers a range of use cases, from quick starts to advanced neural network implementations. 10 | 11 | ### Folder Overview 12 | 13 | - **0_quick_start**: 14 | - `0_logging_device_placement.py`: Logs device placement to help identify performance bottlenecks. 15 | 16 | - **1_keras_api**: 17 | - `1_numbers_classification.ipynb`: Notebook demonstrating number classification using Keras. 18 | - `2_sequential_model.py`: Basic Sequential model example using Keras. 19 | - `3_basic_classification.ipynb`: Notebook for basic classification using Keras. 20 | - `4_text_classification.ipynb`: Notebook for text classification using Keras. 21 | 22 | - **2_estimators**: 23 | - `2_1_linear_model.ipynb`: Notebook demonstrating a linear model implementation using TensorFlow Estimators. 24 | 25 | - **19_lingvo**: 26 | - `19_1_task_config.py`: Task configuration example using the Lingvo framework. 27 | 28 | - **20_tf2**: 29 | - `20_1_actor_critic_agent.ipynb`: Notebook demonstrating an Actor-Critic agent. 30 | - `20_2_a2c.py`: Advantage Actor-Critic (A2C) implementation. 31 | 32 | - **777_workarounds**: 33 | - `777_1_tf2_cuda10.py`: Workaround for TensorFlow 2.x with CUDA 10 compatibility issues. 34 | 35 | ## Benefits 36 | 37 | - **Plug-and-Play**: Pre-written, reusable code that can be easily integrated into your projects. 38 | - **Wide Range of Use Cases**: From data preprocessing to advanced neural network models. 39 | - **Scalable and Efficient**: Optimized for both small-scale experiments and large-scale production workloads. 40 | - **Customizable**: Easily modify and extend the code to suit your specific needs. 41 | 42 | ## TODO List 43 | 44 | - [ ] Add more examples for TensorFlow 2.x. 45 | - [ ] Add the most used deep learning architectures with practical examples. 46 | - [ ] Expand the Lingvo framework examples. 47 | - [ ] PyTorch models and examples. 48 | - [ ] Add enchmarking suite for model comparisons. 49 | 50 | ## Contributing 51 | 52 | I've been working on this repo on my free time contributing on and off as I had free time. Here's how you can get involved: 53 | 54 | 1. Fork the repository. 55 | 2. Create a new branch (`git checkout -b feature-branch`). 56 | 3. Make your changes and commit them (`git commit -m 'Add new feature'`). 57 | 4. Push to your branch (`git push origin feature-branch`). 58 | 5. Create a new Pull Request. 59 | 60 | Feel free to reach out for questions, suggestions, or feedback! 61 | 62 | -- Andrew 63 | -------------------------------------------------------------------------------- /1_keras_api/1_numbers_classification.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import tensorflow as tf\n", 10 | "\n", 11 | "tf.__version__\n", 12 | "\n", 13 | "mnist = tf.keras.datasets.mnist # 28x28 images of hand-written digits 0-9\n", 14 | "\n", 15 | "(x_train, y_train), (x_test, y_test) = mnist.load_data\n", 16 | "\n", 17 | "x_train = tf.keras.utils.normalize(x_train, axis=1)\n", 18 | "x_test = tf.keras.utils.normalize(x_test, axis=1)\n", 19 | "\n", 20 | "model = tf.keras.models.Sequential()\n", 21 | "model.add(tf.keras.layers.Flatten())\n", 22 | "model.add(tf.keras.layers.Dense(128, activation=tf.nn.relu))\n", 23 | "model.add(tf.keras.layers.Dense(128, activation=tf.nn.relu))\n", 24 | "model.add(tf.keras.layers.Dense(10, activation=tf.nn.softmax))\n", 25 | "\n", 26 | "model.compile(optimizer='adam',\n", 27 | " loss='sparse_categorical_crossentropy',\n", 28 | " metrics=['accuracy'])\n", 29 | "\n", 30 | "# 1. Loss is the degree of error, what you've got wrong\n", 31 | "# 2. Neural networks always try to minimize loss\n", 32 | "# 3. Use adam as the default go-to optimizer in most cases\n", 33 | "\n", 34 | "model.fit(x_train, y_train, epochs=3)\n", 35 | "\n", 36 | "# Validation loss and accuracy calculation\n", 37 | "\n", 38 | "val_loss, val_acc = model.evaluate(x_test, y_test)\n", 39 | "print(val_loss, val_acc)\n", 40 | "\n", 41 | "# 1. Expect loss to be slightly higher, and accuracy to be slightly lower\n", 42 | "# 2. If there's a huge delta then you've probably overfit the model" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": null, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "import matplotlib.pyplot as plt\n", 52 | "\n", 53 | "plt.imshow(x_train[0], cmap = plt.cm.binary)\n", 54 | "plt.show()\n", 55 | "\n", 56 | "# print(x_train[0])" 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": null, 62 | "metadata": {}, 63 | "outputs": [], 64 | "source": [ 65 | "model.save('num_reader.model')" 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": null, 71 | "metadata": {}, 72 | "outputs": [], 73 | "source": [ 74 | "new_model = tf.keras.models.load_model('num_reader.model')" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": null, 80 | "metadata": {}, 81 | "outputs": [], 82 | "source": [ 83 | "predictions = new_model.predict([x_test])\n", 84 | "print(predictions)" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": null, 90 | "metadata": {}, 91 | "outputs": [], 92 | "source": [ 93 | "import numpy as np\n", 94 | "\n", 95 | "print(np.argmax(predictions[0]))" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": null, 101 | "metadata": {}, 102 | "outputs": [], 103 | "source": [ 104 | "plt.imshow(x_test[0], cmap = plt.cm.binary)\n", 105 | "plt.show()" 106 | ] 107 | } 108 | ], 109 | "metadata": { 110 | "kernelspec": { 111 | "display_name": "Python 3", 112 | "language": "python", 113 | "name": "python3" 114 | }, 115 | "language_info": { 116 | "codemirror_mode": { 117 | "name": "ipython", 118 | "version": 3 119 | }, 120 | "file_extension": ".py", 121 | "mimetype": "text/x-python", 122 | "name": "python", 123 | "nbconvert_exporter": "python", 124 | "pygments_lexer": "ipython3", 125 | "version": "3.7.1" 126 | } 127 | }, 128 | "nbformat": 4, 129 | "nbformat_minor": 2 130 | } 131 | -------------------------------------------------------------------------------- /20_tf2/20_2_a2c.py: -------------------------------------------------------------------------------- 1 | import gym 2 | import logging 3 | import numpy as np 4 | import tensorflow as tf 5 | import matplotlib.pyplot as plt 6 | import tensorflow.keras.layers as kl 7 | import tensorflow.keras.losses as kls 8 | import tensorflow.keras.optimizers as ko 9 | 10 | 11 | class ProbabilityDistribution(tf.keras.Model): 12 | def call(self, logits): 13 | # sample a random categorical action from given logits 14 | return tf.squeeze(tf.random.categorical(logits, 1), axis=-1) 15 | 16 | 17 | class Model(tf.keras.Model): 18 | def __init__(self, num_actions): 19 | super().__init__('mlp_policy') 20 | # no tf.get_variable(), just simple Keras API 21 | self.hidden1 = kl.Dense(128, activation='relu') 22 | self.hidden2 = kl.Dense(128, activation='relu') 23 | self.value = kl.Dense(1, name='value') 24 | # logits are unnormalized log probabilities 25 | self.logits = kl.Dense(num_actions, name='policy_logits') 26 | self.dist = ProbabilityDistribution() 27 | 28 | def call(self, inputs): 29 | # inputs is a numpy array, convert to Tensor 30 | x = tf.convert_to_tensor(inputs) 31 | # separate hidden layers from the same input tensor 32 | hidden_logs = self.hidden1(x) 33 | hidden_vals = self.hidden2(x) 34 | return self.logits(hidden_logs), self.value(hidden_vals) 35 | 36 | def action_value(self, obs): 37 | # executes call() under the hood 38 | logits, value = self.predict(obs) 39 | action = self.dist.predict(logits) 40 | # a simpler option, will become clear later why we don't use it 41 | # action = tf.random.categorical(logits, 1) 42 | return np.squeeze(action, axis=-1), np.squeeze(value, axis=-1) 43 | 44 | 45 | class A2CAgent: 46 | def __init__(self, model): 47 | # hyperparameters for loss terms, gamma is the discount coefficient 48 | self.params = { 49 | 'gamma': 0.99, 50 | 'value': 0.5, 51 | 'entropy': 0.0001 52 | } 53 | self.model = model 54 | self.model.compile( 55 | optimizer=ko.RMSprop(lr=0.0007), 56 | # define separate losses for policy logits and value estimate 57 | loss=[self._logits_loss, self._value_loss] 58 | ) 59 | 60 | def train(self, env, batch_sz=32, updates=1000): 61 | # storage helpers for a single batch of data 62 | actions = np.empty((batch_sz,), dtype=np.int32) 63 | rewards, dones, values = np.empty((3, batch_sz)) 64 | observations = np.empty((batch_sz,) + env.observation_space.shape) 65 | # training loop: collect samples, send to optimizer, repeat updates times 66 | ep_rews = [0.0] 67 | next_obs = env.reset() 68 | for update in range(updates): 69 | for step in range(batch_sz): 70 | observations[step] = next_obs.copy() 71 | actions[step], values[step] = self.model.action_value(next_obs[None, :]) 72 | next_obs, rewards[step], dones[step], _ = env.step(actions[step]) 73 | 74 | ep_rews[-1] += rewards[step] 75 | if dones[step]: 76 | ep_rews.append(0.0) 77 | next_obs = env.reset() 78 | logging.info("Episode: %03d, Reward: %03d" % (len(ep_rews)-1, ep_rews[-2])) 79 | 80 | _, next_value = self.model.action_value(next_obs[None, :]) 81 | returns, advs = self._returns_advantages(rewards, dones, values, next_value) 82 | # a trick to input actions and advantages through same API 83 | acts_and_advs = np.concatenate([actions[:, None], advs[:, None]], axis=-1) 84 | # performs a full training step on the collected batch 85 | # note: no need to mess around with gradients, Keras API handles it 86 | losses = self.model.train_on_batch(observations, [acts_and_advs, returns]) 87 | logging.debug("[%d/%d] Losses: %s" % (update+1, updates, losses)) 88 | return ep_rews 89 | 90 | def test(self, env, render=False): 91 | obs, done, ep_reward = env.reset(), False, 0 92 | while not done: 93 | action, _ = self.model.action_value(obs[None, :]) 94 | obs, reward, done, _ = env.step(action) 95 | ep_reward += reward 96 | if render: 97 | env.render() 98 | return ep_reward 99 | 100 | def _returns_advantages(self, rewards, dones, values, next_value): 101 | # next_value is the bootstrap value estimate of a future state (the critic) 102 | returns = np.append(np.zeros_like(rewards), next_value, axis=-1) 103 | # returns are calculated as discounted sum of future rewards 104 | for t in reversed(range(rewards.shape[0])): 105 | returns[t] = rewards[t] + self.params['gamma'] * returns[t+1] * (1-dones[t]) 106 | returns = returns[:-1] 107 | # advantages are returns - baseline, value estimates in our case 108 | advantages = returns - values 109 | return returns, advantages 110 | 111 | def _value_loss(self, returns, value): 112 | # value loss is typically MSE between value estimates and returns 113 | return self.params['value']*kls.mean_squared_error(returns, value) 114 | 115 | def _logits_loss(self, acts_and_advs, logits): 116 | # a trick to input actions and advantages through same API 117 | actions, advantages = tf.split(acts_and_advs, 2, axis=-1) 118 | # polymorphic CE loss function that supports sparse and weighted options 119 | # from_logits argument ensures transformation into normalized probabilities 120 | cross_entropy = kls.CategoricalCrossentropy(from_logits=True) 121 | # policy loss is defined by policy gradients, weighted by advantages 122 | # note: we only calculate the loss on the actions we've actually taken 123 | # thus under the hood a sparse version of CE loss will be executed 124 | actions = tf.cast(actions, tf.int32) 125 | policy_loss = cross_entropy(actions, logits, sample_weight=advantages) 126 | # entropy loss can be calculated via CE over itself 127 | entropy_loss = cross_entropy(logits, logits) 128 | # here signs are flipped because optimizer minimizes 129 | return policy_loss - self.params['entropy']*entropy_loss 130 | 131 | 132 | if __name__ == '__main__': 133 | logging.basicConfig(level=logging.INFO) 134 | 135 | env = gym.make('CartPole-v0') 136 | model = Model(num_actions=env.action_space.n) 137 | agent = A2CAgent(model) 138 | 139 | rewards_history = agent.train(env) 140 | print("Finished training.") 141 | print("Total Episode Reward: %d out of 200" % agent.test(env, True)) 142 | 143 | plt.style.use('seaborn') 144 | plt.plot(np.arange(0, len(rewards_history), 25), rewards_history[::25]) 145 | plt.xlabel('Episode') 146 | plt.ylabel('Total Reward') 147 | plt.show() 148 | -------------------------------------------------------------------------------- /1_keras_api/4_text_classification.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stdout", 10 | "output_type": "stream", 11 | "text": [ 12 | "1.12.0\n" 13 | ] 14 | } 15 | ], 16 | "source": [ 17 | "import tensorflow as tf\n", 18 | "from tensorflow import keras\n", 19 | "\n", 20 | "import numpy as np\n", 21 | "\n", 22 | "print(tf.__version__)" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": 2, 28 | "metadata": {}, 29 | "outputs": [], 30 | "source": [ 31 | "imdb = keras.datasets.imdb\n", 32 | "\n", 33 | "(train_data, train_labels), (test_data, test_labels) = imdb.load_data(num_words=10000)" 34 | ] 35 | }, 36 | { 37 | "cell_type": "code", 38 | "execution_count": 3, 39 | "metadata": {}, 40 | "outputs": [ 41 | { 42 | "name": "stdout", 43 | "output_type": "stream", 44 | "text": [ 45 | "Training entries: 25000, labels: 25000\n" 46 | ] 47 | } 48 | ], 49 | "source": [ 50 | "print(\"Training entries: {}, labels: {}\".format(len(train_data), len(train_labels)))" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": 4, 56 | "metadata": {}, 57 | "outputs": [ 58 | { 59 | "name": "stdout", 60 | "output_type": "stream", 61 | "text": [ 62 | "[1, 14, 22, 16, 43, 530, 973, 1622, 1385, 65, 458, 4468, 66, 3941, 4, 173, 36, 256, 5, 25, 100, 43, 838, 112, 50, 670, 2, 9, 35, 480, 284, 5, 150, 4, 172, 112, 167, 2, 336, 385, 39, 4, 172, 4536, 1111, 17, 546, 38, 13, 447, 4, 192, 50, 16, 6, 147, 2025, 19, 14, 22, 4, 1920, 4613, 469, 4, 22, 71, 87, 12, 16, 43, 530, 38, 76, 15, 13, 1247, 4, 22, 17, 515, 17, 12, 16, 626, 18, 2, 5, 62, 386, 12, 8, 316, 8, 106, 5, 4, 2223, 5244, 16, 480, 66, 3785, 33, 4, 130, 12, 16, 38, 619, 5, 25, 124, 51, 36, 135, 48, 25, 1415, 33, 6, 22, 12, 215, 28, 77, 52, 5, 14, 407, 16, 82, 2, 8, 4, 107, 117, 5952, 15, 256, 4, 2, 7, 3766, 5, 723, 36, 71, 43, 530, 476, 26, 400, 317, 46, 7, 4, 2, 1029, 13, 104, 88, 4, 381, 15, 297, 98, 32, 2071, 56, 26, 141, 6, 194, 7486, 18, 4, 226, 22, 21, 134, 476, 26, 480, 5, 144, 30, 5535, 18, 51, 36, 28, 224, 92, 25, 104, 4, 226, 65, 16, 38, 1334, 88, 12, 16, 283, 5, 16, 4472, 113, 103, 32, 15, 16, 5345, 19, 178, 32]\n" 63 | ] 64 | } 65 | ], 66 | "source": [ 67 | "print(train_data[0])" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 5, 73 | "metadata": {}, 74 | "outputs": [ 75 | { 76 | "data": { 77 | "text/plain": [ 78 | "(218, 189)" 79 | ] 80 | }, 81 | "execution_count": 5, 82 | "metadata": {}, 83 | "output_type": "execute_result" 84 | } 85 | ], 86 | "source": [ 87 | "len(train_data[0]), len(train_data[1])" 88 | ] 89 | }, 90 | { 91 | "cell_type": "code", 92 | "execution_count": 6, 93 | "metadata": {}, 94 | "outputs": [], 95 | "source": [ 96 | "word_index = imdb.get_word_index()\n", 97 | "\n", 98 | "word_index = {k:(v+3) for k,v in word_index.items()} \n", 99 | "word_index[\"\"] = 0\n", 100 | "word_index[\"\"] = 1\n", 101 | "word_index[\"\"] = 2 \n", 102 | "word_index[\"\"] = 3\n", 103 | "\n", 104 | "word_index = imdb.get_word_index()\n", 105 | "\n", 106 | "word_index = {k:(v+3) for k,v in word_index.items()} \n", 107 | "word_index[\"\"] = 0\n", 108 | "word_index[\"\"] = 1\n", 109 | "word_index[\"\"] = 2 \n", 110 | "word_index[\"\"] = 3\n", 111 | "\n", 112 | "reverse_word_index = dict([(value, key) for (key, value) in word_index.items()])\n", 113 | "\n", 114 | "def decode_review(text):\n", 115 | " return ' '.join([reverse_word_index.get(i, '?') for i in text])" 116 | ] 117 | }, 118 | { 119 | "cell_type": "code", 120 | "execution_count": 8, 121 | "metadata": {}, 122 | "outputs": [ 123 | { 124 | "data": { 125 | "text/plain": [ 126 | "\" this film was just brilliant casting location scenery story direction everyone's really suited the part they played and you could just imagine being there robert is an amazing actor and now the same being director father came from the same scottish island as myself so i loved the fact there was a real connection with this film the witty remarks throughout the film were great it was just brilliant so much that i bought the film as soon as it was released for and would recommend it to everyone to watch and the fly fishing was amazing really cried at the end it was so sad and you know what they say if you cry at a film it must have been good and this definitely was also to the two little boy's that played the of norman and paul they were just brilliant children are often left out of the list i think because the stars that play them all grown up are such a big profile for the whole film but these children are amazing and should be praised for what they have done don't you think the whole story was so lovely because it was true and was someone's life after all that was shared with us all\"" 127 | ] 128 | }, 129 | "execution_count": 8, 130 | "metadata": {}, 131 | "output_type": "execute_result" 132 | } 133 | ], 134 | "source": [ 135 | "decode_review(train_data[0])" 136 | ] 137 | }, 138 | { 139 | "cell_type": "code", 140 | "execution_count": 10, 141 | "metadata": {}, 142 | "outputs": [], 143 | "source": [ 144 | "train_data = keras.preprocessing.sequence.pad_sequences(train_data,\n", 145 | " value=word_index[\"\"],\n", 146 | " padding='post',\n", 147 | " maxlen=256)\n", 148 | "\n", 149 | "test_data = keras.preprocessing.sequence.pad_sequences(test_data,\n", 150 | " value=word_index[\"\"],\n", 151 | " padding='post',\n", 152 | " maxlen=256)" 153 | ] 154 | }, 155 | { 156 | "cell_type": "code", 157 | "execution_count": 12, 158 | "metadata": {}, 159 | "outputs": [ 160 | { 161 | "data": { 162 | "text/plain": [ 163 | "(256, 256)" 164 | ] 165 | }, 166 | "execution_count": 12, 167 | "metadata": {}, 168 | "output_type": "execute_result" 169 | } 170 | ], 171 | "source": [ 172 | "len(train_data[0]), len(train_data[1])" 173 | ] 174 | }, 175 | { 176 | "cell_type": "code", 177 | "execution_count": 13, 178 | "metadata": {}, 179 | "outputs": [ 180 | { 181 | "name": "stdout", 182 | "output_type": "stream", 183 | "text": [ 184 | "[ 1 14 22 16 43 530 973 1622 1385 65 458 4468 66 3941\n", 185 | " 4 173 36 256 5 25 100 43 838 112 50 670 2 9\n", 186 | " 35 480 284 5 150 4 172 112 167 2 336 385 39 4\n", 187 | " 172 4536 1111 17 546 38 13 447 4 192 50 16 6 147\n", 188 | " 2025 19 14 22 4 1920 4613 469 4 22 71 87 12 16\n", 189 | " 43 530 38 76 15 13 1247 4 22 17 515 17 12 16\n", 190 | " 626 18 2 5 62 386 12 8 316 8 106 5 4 2223\n", 191 | " 5244 16 480 66 3785 33 4 130 12 16 38 619 5 25\n", 192 | " 124 51 36 135 48 25 1415 33 6 22 12 215 28 77\n", 193 | " 52 5 14 407 16 82 2 8 4 107 117 5952 15 256\n", 194 | " 4 2 7 3766 5 723 36 71 43 530 476 26 400 317\n", 195 | " 46 7 4 2 1029 13 104 88 4 381 15 297 98 32\n", 196 | " 2071 56 26 141 6 194 7486 18 4 226 22 21 134 476\n", 197 | " 26 480 5 144 30 5535 18 51 36 28 224 92 25 104\n", 198 | " 4 226 65 16 38 1334 88 12 16 283 5 16 4472 113\n", 199 | " 103 32 15 16 5345 19 178 32 0 0 0 0 0 0\n", 200 | " 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n", 201 | " 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n", 202 | " 0 0 0 0]\n" 203 | ] 204 | } 205 | ], 206 | "source": [ 207 | "print(train_data[0])" 208 | ] 209 | }, 210 | { 211 | "cell_type": "code", 212 | "execution_count": 14, 213 | "metadata": {}, 214 | "outputs": [ 215 | { 216 | "name": "stdout", 217 | "output_type": "stream", 218 | "text": [ 219 | "_________________________________________________________________\n", 220 | "Layer (type) Output Shape Param # \n", 221 | "=================================================================\n", 222 | "embedding (Embedding) (None, None, 16) 160000 \n", 223 | "_________________________________________________________________\n", 224 | "global_average_pooling1d (Gl (None, 16) 0 \n", 225 | "_________________________________________________________________\n", 226 | "dense (Dense) (None, 16) 272 \n", 227 | "_________________________________________________________________\n", 228 | "dense_1 (Dense) (None, 1) 17 \n", 229 | "=================================================================\n", 230 | "Total params: 160,289\n", 231 | "Trainable params: 160,289\n", 232 | "Non-trainable params: 0\n", 233 | "_________________________________________________________________\n" 234 | ] 235 | } 236 | ], 237 | "source": [ 238 | "vocab_size = 10000\n", 239 | "\n", 240 | "model = keras.Sequential()\n", 241 | "model.add(keras.layers.Embedding(vocab_size, 16))\n", 242 | "model.add(keras.layers.GlobalAveragePooling1D())\n", 243 | "model.add(keras.layers.Dense(16, activation=tf.nn.relu))\n", 244 | "model.add(keras.layers.Dense(1, activation=tf.nn.sigmoid))\n", 245 | "\n", 246 | "model.summary()" 247 | ] 248 | }, 249 | { 250 | "cell_type": "code", 251 | "execution_count": 15, 252 | "metadata": {}, 253 | "outputs": [], 254 | "source": [ 255 | "model.compile(optimizer=tf.train.AdamOptimizer(),\n", 256 | " loss='binary_crossentropy',\n", 257 | " metrics=['accuracy'])" 258 | ] 259 | }, 260 | { 261 | "cell_type": "code", 262 | "execution_count": 16, 263 | "metadata": {}, 264 | "outputs": [], 265 | "source": [ 266 | "x_val = train_data[:10000]\n", 267 | "partial_x_train = train_data[10000:]\n", 268 | "\n", 269 | "y_val = train_labels[:10000]\n", 270 | "partial_y_train = train_labels[10000:]" 271 | ] 272 | }, 273 | { 274 | "cell_type": "code", 275 | "execution_count": 17, 276 | "metadata": {}, 277 | "outputs": [ 278 | { 279 | "name": "stdout", 280 | "output_type": "stream", 281 | "text": [ 282 | "Train on 15000 samples, validate on 10000 samples\n", 283 | "Epoch 1/40\n", 284 | "15000/15000 [==============================] - 4s 273us/step - loss: 0.6921 - acc: 0.6027 - val_loss: 0.6901 - val_acc: 0.7397\n", 285 | "Epoch 2/40\n", 286 | "15000/15000 [==============================] - 1s 66us/step - loss: 0.6865 - acc: 0.7416 - val_loss: 0.6826 - val_acc: 0.7406\n", 287 | "Epoch 3/40\n", 288 | "15000/15000 [==============================] - 1s 54us/step - loss: 0.6742 - acc: 0.7606 - val_loss: 0.6663 - val_acc: 0.7586\n", 289 | "Epoch 4/40\n", 290 | "15000/15000 [==============================] - 1s 51us/step - loss: 0.6505 - acc: 0.7718 - val_loss: 0.6397 - val_acc: 0.7692\n", 291 | "Epoch 5/40\n", 292 | "15000/15000 [==============================] - 1s 55us/step - loss: 0.6154 - acc: 0.7965 - val_loss: 0.6021 - val_acc: 0.7884\n", 293 | "Epoch 6/40\n", 294 | "15000/15000 [==============================] - 1s 53us/step - loss: 0.5711 - acc: 0.8144 - val_loss: 0.5594 - val_acc: 0.8023\n", 295 | "Epoch 7/40\n", 296 | "15000/15000 [==============================] - 1s 53us/step - loss: 0.5221 - acc: 0.8331 - val_loss: 0.5147 - val_acc: 0.8219\n", 297 | "Epoch 8/40\n", 298 | "15000/15000 [==============================] - 1s 54us/step - loss: 0.4736 - acc: 0.8485 - val_loss: 0.4727 - val_acc: 0.8355\n", 299 | "Epoch 9/40\n", 300 | "15000/15000 [==============================] - 1s 51us/step - loss: 0.4296 - acc: 0.8613 - val_loss: 0.4359 - val_acc: 0.8450\n", 301 | "Epoch 10/40\n", 302 | "15000/15000 [==============================] - 1s 52us/step - loss: 0.3902 - acc: 0.8759 - val_loss: 0.4050 - val_acc: 0.8526\n", 303 | "Epoch 11/40\n", 304 | "15000/15000 [==============================] - 1s 49us/step - loss: 0.3573 - acc: 0.8833 - val_loss: 0.3821 - val_acc: 0.8568\n", 305 | "Epoch 12/40\n", 306 | "15000/15000 [==============================] - 1s 51us/step - loss: 0.3301 - acc: 0.8903 - val_loss: 0.3600 - val_acc: 0.8655\n", 307 | "Epoch 13/40\n", 308 | "15000/15000 [==============================] - 1s 51us/step - loss: 0.3058 - acc: 0.8975 - val_loss: 0.3449 - val_acc: 0.8690\n", 309 | "Epoch 14/40\n", 310 | "15000/15000 [==============================] - 1s 50us/step - loss: 0.2859 - acc: 0.9031 - val_loss: 0.3316 - val_acc: 0.8731\n", 311 | "Epoch 15/40\n", 312 | "15000/15000 [==============================] - 1s 50us/step - loss: 0.2687 - acc: 0.9076 - val_loss: 0.3215 - val_acc: 0.8757\n", 313 | "Epoch 16/40\n", 314 | "15000/15000 [==============================] - 1s 51us/step - loss: 0.2540 - acc: 0.9116 - val_loss: 0.3133 - val_acc: 0.8786\n", 315 | "Epoch 17/40\n", 316 | "15000/15000 [==============================] - 1s 51us/step - loss: 0.2399 - acc: 0.9171 - val_loss: 0.3066 - val_acc: 0.8787\n", 317 | "Epoch 18/40\n", 318 | "15000/15000 [==============================] - 1s 50us/step - loss: 0.2277 - acc: 0.9219 - val_loss: 0.3011 - val_acc: 0.8809\n", 319 | "Epoch 19/40\n", 320 | "15000/15000 [==============================] - 1s 50us/step - loss: 0.2163 - acc: 0.9258 - val_loss: 0.2968 - val_acc: 0.8823\n", 321 | "Epoch 20/40\n", 322 | "15000/15000 [==============================] - 1s 51us/step - loss: 0.2063 - acc: 0.9291 - val_loss: 0.2932 - val_acc: 0.8826\n", 323 | "Epoch 21/40\n", 324 | "15000/15000 [==============================] - 1s 51us/step - loss: 0.1965 - acc: 0.9328 - val_loss: 0.2904 - val_acc: 0.8836\n", 325 | "Epoch 22/40\n", 326 | "15000/15000 [==============================] - 1s 49us/step - loss: 0.1876 - acc: 0.9367 - val_loss: 0.2886 - val_acc: 0.8841\n", 327 | "Epoch 23/40\n", 328 | "15000/15000 [==============================] - 1s 55us/step - loss: 0.1795 - acc: 0.9405 - val_loss: 0.2874 - val_acc: 0.8845\n", 329 | "Epoch 24/40\n", 330 | "15000/15000 [==============================] - 1s 53us/step - loss: 0.1713 - acc: 0.9445 - val_loss: 0.2858 - val_acc: 0.8842\n", 331 | "Epoch 25/40\n", 332 | "15000/15000 [==============================] - 1s 51us/step - loss: 0.1642 - acc: 0.9481 - val_loss: 0.2852 - val_acc: 0.8852\n", 333 | "Epoch 26/40\n", 334 | "15000/15000 [==============================] - 1s 50us/step - loss: 0.1571 - acc: 0.9499 - val_loss: 0.2854 - val_acc: 0.8854\n", 335 | "Epoch 27/40\n", 336 | "15000/15000 [==============================] - 1s 57us/step - loss: 0.1511 - acc: 0.9529 - val_loss: 0.2860 - val_acc: 0.8851\n", 337 | "Epoch 28/40\n", 338 | "15000/15000 [==============================] - 1s 76us/step - loss: 0.1448 - acc: 0.9558 - val_loss: 0.2859 - val_acc: 0.8869\n", 339 | "Epoch 29/40\n", 340 | "15000/15000 [==============================] - 1s 61us/step - loss: 0.1390 - acc: 0.9567 - val_loss: 0.2865 - val_acc: 0.8862\n", 341 | "Epoch 30/40\n", 342 | "15000/15000 [==============================] - 1s 53us/step - loss: 0.1340 - acc: 0.9599 - val_loss: 0.2880 - val_acc: 0.8867\n", 343 | "Epoch 31/40\n", 344 | "15000/15000 [==============================] - 1s 51us/step - loss: 0.1279 - acc: 0.9619 - val_loss: 0.2896 - val_acc: 0.8863\n", 345 | "Epoch 32/40\n", 346 | "15000/15000 [==============================] - 1s 51us/step - loss: 0.1233 - acc: 0.9641 - val_loss: 0.2915 - val_acc: 0.8859\n", 347 | "Epoch 33/40\n", 348 | "15000/15000 [==============================] - 1s 51us/step - loss: 0.1179 - acc: 0.9662 - val_loss: 0.2936 - val_acc: 0.8852\n", 349 | "Epoch 34/40\n", 350 | "15000/15000 [==============================] - 1s 50us/step - loss: 0.1135 - acc: 0.9678 - val_loss: 0.2961 - val_acc: 0.8851\n", 351 | "Epoch 35/40\n", 352 | "15000/15000 [==============================] - 1s 57us/step - loss: 0.1095 - acc: 0.9693 - val_loss: 0.2980 - val_acc: 0.8854\n", 353 | "Epoch 36/40\n", 354 | "15000/15000 [==============================] - 1s 56us/step - loss: 0.1046 - acc: 0.9710 - val_loss: 0.3011 - val_acc: 0.8846\n", 355 | "Epoch 37/40\n", 356 | "15000/15000 [==============================] - 1s 51us/step - loss: 0.1007 - acc: 0.9727 - val_loss: 0.3042 - val_acc: 0.8840\n", 357 | "Epoch 38/40\n", 358 | "15000/15000 [==============================] - 1s 50us/step - loss: 0.0973 - acc: 0.9730 - val_loss: 0.3073 - val_acc: 0.8833\n", 359 | "Epoch 39/40\n", 360 | "15000/15000 [==============================] - 1s 50us/step - loss: 0.0930 - acc: 0.9751 - val_loss: 0.3099 - val_acc: 0.8834\n", 361 | "Epoch 40/40\n", 362 | "15000/15000 [==============================] - 1s 50us/step - loss: 0.0893 - acc: 0.9775 - val_loss: 0.3134 - val_acc: 0.8825\n" 363 | ] 364 | } 365 | ], 366 | "source": [ 367 | "history = model.fit(partial_x_train,\n", 368 | " partial_y_train,\n", 369 | " epochs=40,\n", 370 | " batch_size=512,\n", 371 | " validation_data=(x_val, y_val),\n", 372 | " verbose=1)" 373 | ] 374 | }, 375 | { 376 | "cell_type": "code", 377 | "execution_count": 18, 378 | "metadata": {}, 379 | "outputs": [ 380 | { 381 | "name": "stdout", 382 | "output_type": "stream", 383 | "text": [ 384 | "25000/25000 [==============================] - 1s 54us/step\n", 385 | "[0.3345097375965118, 0.87236]\n" 386 | ] 387 | } 388 | ], 389 | "source": [ 390 | "results = model.evaluate(test_data, test_labels)\n", 391 | "\n", 392 | "print(results)" 393 | ] 394 | }, 395 | { 396 | "cell_type": "code", 397 | "execution_count": 19, 398 | "metadata": {}, 399 | "outputs": [ 400 | { 401 | "data": { 402 | "text/plain": [ 403 | "dict_keys(['val_loss', 'val_acc', 'loss', 'acc'])" 404 | ] 405 | }, 406 | "execution_count": 19, 407 | "metadata": {}, 408 | "output_type": "execute_result" 409 | } 410 | ], 411 | "source": [ 412 | "history_dict = history.history\n", 413 | "history_dict.keys()" 414 | ] 415 | }, 416 | { 417 | "cell_type": "code", 418 | "execution_count": 22, 419 | "metadata": {}, 420 | "outputs": [ 421 | { 422 | "data": { 423 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEWCAYAAACJ0YulAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAIABJREFUeJzt3Xl8VNX9//HXJyxiAAVZrIqEYJGKihQRrStK9YvVora4YKpQF7CK/LRuoFVBS93bum9VtEoFtVax2tpW625VUESRqoAsEZVFoCooSz6/P86d4SZMkiHJZJa8n4/HfWTmbvOZO5P7mXPOveeYuyMiIgJQlO0AREQkdygpiIhIkpKCiIgkKSmIiEiSkoKIiCQpKYg0IDNrke0YROpDSUGknsxshJm9YWaLgeVm1jXbMYnUVfNsB9BUmNl8YFtgQ2x2c2Cau++flaCk3sxsDDAU+Jm7v5vteETqSyWFxvVjd2+TmIAzsh2Q1J2ZtQYuBI5UQpBCoaSQQ8xsFzN73sxWmtksMxsczT/ezL6Kpg1m9k3iebS8yMzGmNlcM1tuZg+b2TbRsm5m5lEVx2Iz+9TMzou95jgzezD2/LZo/e9WE+PzsddfYmYTYsu2NrM/mtlSM1tgZr8ys5Tfseh110X7WWlmfzGzttGy4Wb2ipndbGarzOy/ZjawyuvcE72XT8zs12bWLLatm9m5sfV/FM37dWze5dH2X5nZmqgkV93nMjj6PFZG73+XaNGuwJfALWa2wszmmNnp0TbfMbPVZtYhtp89o2PTwszuqxJP1edHmtmM6DVfNbPesWXzzeyHseenmdnzsefJz8/MukbvL/4Zj4w+n6/M7Gszq7Zbg9r2VWXdAWZWEfuuJr6vw6PltX2u25vZVDP7In4so2XVfl+i5aeY2ezoc3jGzEqqvIfRZjbPzJaZ2XWJ72UU08vR4yIzm2xmD8WWp/3+C4WSQo6w0ED5JPAPoDNwNjDJzHq6+5RY6eIlYFTsOcBo4GjgIGB7YAVwa5WXOBjoARwGjImfVGIx9AAOTyPcUdFr7w+cZ2a7RfNvBrYGukexnAz8vIb9TIn20xUoBYbFlu0NzAM6ApcDj1mU6ID7gfXAd4HvR+/ptNi2c6rs6zRgdux9fg8YCxwcvf6PqwvQzHYGHgLOAToBTwNPmllLoDiK/RNgO2AI8BszG+junwHPA8fFdvczYLK7rwMqqOb/z8z6AvcCI4EOwJ3AVDPboro4a3AlsDy279bAbcCw6L3vUdd9VWNxldLwa1WW1/S5PgSUE77DyWMZ2zbl98XMjgYuBn5C+IxeivYVdwzQD+gLHAWckiL2Wwjf35PdvSLF8nTef95TUsgd+wBtgKvdfa27Pwf8lVBfXZuRwCXuXu7u3wLjgCFmFm8zGu/uX0fVHBOr2e9VhC9+upoT2khWRb/UjwfGuvuX7j4fuAE4KY39NCN8F+P/cEuA37v7OnefAnwAHGFm2xIS1znR+1kC/A44Ibbt58B8M/uBmXUGSoA3YsstFn9tjgeecvd/Rifz64EtgX2j5RXARe7+jbvPAP4Qe8/3ExIB0fEZCjwQLVsIHGBmrVK85unAne7+urtvcPf7gW8J35G0RaWLH0RxJBRFMW9We2I1+6qL6j7XHQk/Mqo7lnFVvy8jgavcfba7rwd+A/SJlxaAa9z9C3dfCPyeKt//qJR2MPDT6HOmyvKGev85T0khd2wPLKryC2UBsEMa25YAf4mK1SsJv4o3EBq2ExZV2e/28R2Y2d7A90jvS39T9DqzgHvdfRHhl1/LaN/pxn9ctJ+lwNeEklLCJ165t8ZEzCVAC+DT2Pu9k1C6ivsDoYQwHPhjfIG7zwZ+BbxkoQru8Rpi3D7+nqLPZ1H0vr4FvnD3L6t5z08AvcysO3AosMrdE8npVuAb4PPoPZwY20cJoQS2MvYed6TyZ/Z4bNlN1cR+DXApkDzJRbGeCvzRzFYDb9Xw3mvcVx1V97luT83HEqr/vpQAN8aOxxeExB/ftqbvf19CSaIjoZSbSkO9/5ynpJA7FgM7WuU6+ETVRG0WAYe7e7vY1Mrd49vuWGW/i6vs41pgjLtvoHaj3b0dsA2wv5kNBZYR/mHiv85qi//haD/FwLuEkkXCDmZmseeJmBcRTsYdY+91K3fftcq+/wbsR6hieIBNPRztZzdC1Vt1FsffUxTTjtH7WghsE6/bJvae3f2b6HXKCL94k3G4+1J3P9Tdt46OwZ9i+1gETKjyeRa7e7xK5OjEMkL1YVWHEE5yD6dY9hfCZ3Uw4YRYm5r2tbmq+1wXU8OxjFT3fVkEjKxyvLZ091dj29b0/V8F/BC4BLg3KtXFNeT7z3lKCrnjdcKvnwstNEQOINR1T05j2zuACYnispl1MrOjqqxzqZkVm9muhHr+KbFlhwDu7n/dzJg3AA50ipLJw1EcbaNYfgmk0yhXkdhPbF5nYHR0LI4FdgGedvdPCe0uN5jZVlHj4E5mdlB8h1E81wAPuvsXKV7zduC6qJqrJg8TqjcGRu0+5xGSyatR0n0ZuMrMWkVVDKcCk2Lb/5FQWhlMescC4G7gDDPb24LWZnZElRNmbcYBF1T5VZ5wDTDV3V9vgH1truo+10XAq9R8LBOqfl/uAMZG3+3EhQjHVtnmAjNrH1VT/T8qf//nuvun7n4X8D/g/CrbjqPh3n/O030KOcLd11q42ug2QiPoJ4QGr/+msfmNhOLyP8xse0K97RRC9UXCC4QG2CLgenf/R2zZdtT8a7mqW8zs98Ba4O/APdH8swmNzfMIVSN3ExpMq3N81Ei4AZhJqEtPeJ3QML6M0EYwxN0TdcgnA1cD7wNto9e7purO3X1iqhc1sxOB7xDqlmvk7h+Y2c+i97UDMINwafHaaJUyQvXVp4Rqi8vc/Z+x7V8xswrgrTQSUGKbaRauvLmFcAzWEJLPi+lsH3nb3Z+vOtPM9gOOIJSQ6rWvOqrpcx1KOMEvJlwscXn8WFLN98Xd/2JmbYDJ0Y+RVcA/gUdi2z4BTCc0JN/Hxu9sVacBb5rZ4+7+QTSvId9/zrMmkvyaLDPrBnwMtIga4XKehUsYT/MCuanPzJ4D/uTuf8h2LNmUrc/VwiW3Pdx9TmO+br5SSUEkg8xsLzZeBimS89SmIJIhZnY/8C/C5bNf1ra+SC5Q9ZGIiCSppCAiIkl516bQsWNH79atW7bDEBHJK9OnT1/m7p1qWy/vkkK3bt2YNm1atsMQEckrZrag9rVUfSQiIjFKCiIikqSkICIiSRltUzCzQYQuGJoBf3D3q6ss/x2hUy4InVx1jjq8EpF6WrduHeXl5XzzzTfZDkUaUatWrejSpQstWrSo0/YZSwpRT4O3EroMLif0JzLV3d9PrOPu8dGxziYMmCIiDaC8vJy2bdvSrVs3KndMKoXK3Vm+fDnl5eWUlpbWaR+ZrD7qD8xx93lR52GTqflW/6FsOlpSg5g0Cbp1g6Ki8HdSqn4XRQrMN998Q4cOHZQQmhAzo0OHDvUqHWYyKexA5YEtyqlmwJWoZ8NS4Llqlo8ws2lmNm3p0qWbFcSkSTBiBCxYAO7h74gRSgzSNCghND31/cwzmRRSRVZdnxonAI9WN8CLu9/l7v3cvV+nTrXee1HJJZfA6tWV561eHeYnqCQhIhJkMimUU3m0oy5sOtpXwglkqOpo4cLU8xdEt3GoJCGSOc2aNaNPnz7JqWvXrowaNSrbYaVlw4YNXHvttey777707duXu+++O9shNYpMJoU3gR5mVmpmLQkn/qlVVzKznkB74LVMBNG1a+r5zZrB6NFw3nm1lyREmoJMlJi33HJLZsyYkZyuuOKK+u+0kYwbN46vv/6aZ599lrfeeovTTz+99o0KQMaSQjSgyyjgGcJA8g+7+ywzuyIaYSxhKDA5U0PdTZgAxcWV57VsCXvsAXffDZ9/nnq76koYIoUoGyXmBQsWMHDgQHr37s3AgQNZuHAhc+fOTZYq4qWMxYsXM3fuXAYNGsSee+7JAQccwH//GwYlHD58OGeccQYHHHAAO++8M3/9axhV9r777kuWSj744AOaN2/Oo48+ukkcM2bMYJ999qF3794cc8wxrFixIjomk3jppZfo379/Mr4vv/yS0tJS1q1bB8D//vc/unXrxrp16+jWrRvLli0DQkK5/vrrAWqMOx7Pbrvtxvz585k/fz677RYGxlu3bh3du3dPvo+5c+fSv39/+vTpQ2lpKcOHD2/QzwQyfPOauz/t7ju7+07uPiGad5m7T42tM87dx2QqhrIyuOsuKCkBs/D33nth+nRYuhQ6dky9XbyEoTYHKXTptL01tFGjRnHyySczc+ZMysrKGD16NDvttFOyVBEvZWy//faMGDGCm2++menTp3P99ddz5plnJvc1f/58XnjhBZ566inOOOOMTa6+ufTSS/ne976XMo6TTz6Za665hpkzZ7L77rszfvx4AD7++GOGDRvGu+++m4yvbdu2DBgwgKeeegqAyZMn89Of/pQWLVpQVFREqt+2NcVdm7vuuos2bdokn992220cd9xxzJgxg+uuuy7t/WyOJnFHc1kZzJ8PFRXhb1lZmN+mDfz+95uWJMxg2LDwWG0O0hRUVzLOZIn5tdde48QTTwTgpJNO4uWXX6523a+++opXX32VY489lj59+jBy5Eg+/fTT5PLjjjuOoqIievToQffu3ZO/xgGmT59ORUUF/fr122S/q1atYuXKlRx00EEADBs2jBdfDENhFxUVpYzvtNNOY+LEMPz3xIkT+fnPfw5Aly5dePvttzcr7gsuuCBZGpo7d26lbVevXs3EiRP5xS9+kZzXrFkzvvwys+M15V0vqQ0tkSAuuST8A3TuHJLHlVfCN9/A5MnV/4JKbCuS77p23XjxRdX5jaWmSykrKipo164dM2bMSGvb+PNf/epX/Pa3v+Waa67ZrHjatm2bcp/77bdfsmSyYcOGZFXPb37zG0aNGsWFF17IZ599xvnnn19r3Ndddx1DhgwBSO4n4fe//z0jRoygZcuWyXnnnHMOQ4YMYeLEiTRr1iyZzBpSkygp1CZekvjsM5g7F04/Ha69Nju/oEQaW6q2t+LiMD9T9t13XyZPngyE+vv999+/2nW32morSktLeeSRR4Bw5+4777yTXP7II49QUVHB3LlzmTdvHj179gTghRdeYLvttmOXXXZJud+tt96a9u3b89JLLwHwwAMPJE+0e+21V7XxnXzyyQwdOjRZSgDYf//9k9VdZ5xxRlpxV2fVqlU8/vjjnHLKKZXmd+jQgRYtWvDUU0+p+qgxtW0Ld94JTz8drlJKpTF/QYlkWqq2t7vuymxp+KabbmLixIn07t2bBx54gBtvvLHG9SdNmsQ999zDHnvswa677soTTzyRXNazZ08OOuggDj/8cO644w5atWoFwEcffcS4ceNq3O/999/PBRdcQO/evZkxYwaXXXYZALfccgt33nknvXv35sEHH6wUX1lZGStWrGDo0KG1vs+a4q5OeXk55513Hs2bV67MOffccxk+fDi77757rfuoq7wbo7lfv37emIPs3HknnHUWbIjdVldcnPl/GJH6mj17drW/kAvJ8OHDOfLII5PVMI3h0Ucf5YknnuCBBx5otNfcHKk+ezOb7u6bNqxU0eTbFGozcmRokB49Gr74Alq3hjvuUEIQaarOPvts/va3v/H0009nO5SMUFJIQ1lZmMaPh3HjYPbsbEckIgn33Xdfo77ezTff3Kiv19jUprAZLrssNED/5jdwyy0b5+s+BhEpFCopbAYzuO22cBf06NGw3XbhstURIzZetpq4jwFUxSQi+Uclhc3UvDk89BDss0846avvJBEpJEoKdVBcDE8+GaqK1HeSiBQSVR/VUYcO8MwzsNNOlS9XTdB9DCKhW4b4NfVffPEFgwcP5pZ4o5zkFJUU6qGkBFL1BJzpO0FF8kU+d53dVCkp1NPFF4cpoTHuBBUpBLnSdfaAAQPo2bNnpdcFeP755znwwAM55phj6NWrF2eccQYVFRUAPPTQQ+y+++7stttuXHTRRcl9JWL+7ne/y9ChQ5O9pj744IPJLq9HjhzJhqh6oU2bNpx33nn07duXgQMHkhhueMCAAUybNo0NGzYwePDgZAd8ifkQ+nSK96DaUFR91AAmTIAWLcJ9DHfcAYMGZTsikcrOOQeq6ZOtzvr0Cb0M11Wi6+xhw4Zx7733Mnr0aB5//PFk53Ft2rSp1JHcwIEDueOOO+jRowevv/46Z555Js89F4Z1T3RQN3fuXA4++GDmzJlT6bVq6jobQlcUiV5U4yfaN954g/fff5+SkhIGDRrEY489xr777stFF13E9OnTad++PYcddhiPP/44Rx99dLJktGbNGkpLS1m5ciWfffYZU6ZM4ZVXXqFFixaceeaZTJo0iZNPPpmvv/6avn37csMNN3DFFVcwfvz4SlVrI0eOZJ999qnUxxLAkiVLePbZZ+t45GumpNBAxo6FKVPgF7+A994Ldz6LSPVee+01HnvsMSB0TX3hhRdWu268C+qEb7/9Nvm4rl1n16Z///50794dgKFDh/Lyyy/TokULBgwYQGK8+LKyMl588UWOPvpo1qxZQ58+fSgvL+foo4+mffv2TJo0ienTp7PXXnsBsGbNGjp37gyE7rmPP/54AH72s5/xk5/8JPna48aN44033mDRokWbxHXllVdy8cUXp9X30uZSUmggW2wRqo0OPBAuvxyiQZdEckJ9ftE3llzrOru6/dbUX1yipLB+/XoOPfRQXn31VdydYcOGcdVVV23W622xxRaMHDmSCRMmVGqLmT9/Pu+9917G7qxWm0IDOuCAcOPa734Hb72V7WhEclsudJ1dmzfeeIOPP/6YiooKpkyZwv7778/ee+/NCy+8wLJly9iwYQMPPfTQJuMaNG/enOLiYpYtW8bAgQN59NFHWbJkCRCuwFoQDV5RUVGRbOf405/+VOkYjB07lksvvZSpU6cya9as5Pzx48cnR4fLBCWFBnbNNWGgntNPh/Xrwzx1gyGyqVzpOrsmP/jBDxgzZgy77bYbpaWlHHPMMWy33XZcddVVHHzwweyxxx707duXo446CiBZfbTrrrvSunVrBg0aRK9evfj1r3/NYYcdRu/evTn00EOTo6+1bt2aWbNmseeee/Lcc88lu+1OaNmyJbfeeisjRoxINnJ36dKFAw88sM7vqVbunlfTnnvu6bnu4Yfdwf2GG9wffNC9uDg8T0zFxWG+SCa9//772Q6hUQwbNswfeeSRBt/vv//9bz/iiCMafL9xrVu3zsh+U332wDRP4xyrNoUMGDIEjjwSLr0U2rfXcJ4ikj80yE6GLFwIvXrB11+nXm4Whv8UyZSmMsiObKo+g+yoTSFDunat+a5mdYMhjSHffvRJ/dX3M89oUjCzQWb2gZnNMbMx1axznJm9b2azzOxPmYynsY0aBaWlm85XNxjSGFq1asXy5cuVGJoQd2f58uXJhva6yFibgpk1A24FDgXKgTfNbKq7vx9bpwcwFtjP3VeYWedMxZMNzZrBY4/BnnvClluGtoRECULtCZJpXbp0oby8PNl1gjQNrVq1okuXLnXePpMNzf2BOe4+D8DMJgNHAe/H1jkduNXdVwC4+5IMxpMVffrA+efDtdfCiy+GexlEGkOLFi0oTVVUFalBJquPdgDi92eXR/PidgZ2NrNXzOw/Zpay1yAzG2Fm08xsWj7+6rn8cth++zAgjxqXRSSXZTIppLpnvWrlZnOgBzAAGAr8wczabbKR+13u3s/d+yX6G8kniTaEN98M/SOJiOSqTCaFcmDH2PMuwOIU6zzh7uvc/WPgA0KSKDgnnRSqksaODeM6i4jkokwmhTeBHmZWamYtgROAqVXWeRw4GMDMOhKqk+ZlMKasadYsdJK3YAHcdFO2oxERSS1jScHd1wOjgGeA2cDD7j7LzK4ws8HRas8Ay83sfeDfwAXuvjxTMWXbwIFwxBGhKmnZsmxHIyKyKd3R3Mjefx923x3OOkslBhFpPLqjOUf16hV6UL39dvjww2xHIyJSmZJCFowfD61awUUXqVttEckt6iU1C7bdFsaMgV/9Cv72N0iMKrhgQRikB3THs4hkh0oKWXLuueGKpNgws8DGbrVFRLJBSSFLiothw4bUyxYubNxYREQSlBSyqLrus9Wttohki5JCFv3mN7DFFpXnqVttEckmJYUsKiuDe+4J3WoDdOkCd92lRmYRyR4lhSwrK4Np08Ilqcceq4QgItmlpJADevWCYcPg1lvVyCwi2aWkkCPGjwezMPaCiEi2KCnkiB13DGM6//GPMGtWtqMRkaZKSSGHjB0Lbdro5jURyR4lhRzSoQNceCE88QS8+mq2oxGRpkhJIcecc87GvpHyrFdzESkASgo5pnVruOwyeOml0FmeiEhjUlLIQaedBt27hzaGiopsRyMiTYmSQg5q2RJ+/WuYOTNckaTxFkSksSgp5Kjjj4eSErjjjjDOgvvG8RaUGEQkU5QUclRREaxZs2ljs8ZbEJFMUlLIYUuWpJ6vrjBEJFOUFHJYSUnq+RpvQUQyRUkhh02YEMZXiNN4CyKSSUoKOaysLIyvsP324XmbNhpvQUQyK6NJwcwGmdkHZjbHzMakWD7czJaa2YxoOi2T8eSjsjL45JNwp/Pq1bD77tmOSEQKWcaSgpk1A24FDgd6AUPNrFeKVae4e59o+kOm4sl3l10G7drBL3+p7i9EJHMyWVLoD8xx93nuvhaYDByVwdcraO3bh7EWnn0Wnnoq29GISKHKZFLYAVgUe14ezavqp2Y208weNbMdU+3IzEaY2TQzm7Z06dJMxJoXfvEL6NkTzj8f1q3LdjQiUogymRQsxbyqFR9PAt3cvTfwL+D+VDty97vcvZ+79+vUqVMDh5k/WrSA66+HDz6A22/PdjQiUogymRTKgfgv/y7A4vgK7r7c3b+Nnt4N7JnBeArCEUfAD38I48bBF19kOxoRKTSZTApvAj3MrNTMWgInAFPjK5jZdrGng4HZGYynIJjBDTfAqlVw5ZXZjkZECk3GkoK7rwdGAc8QTvYPu/ssM7vCzAZHq402s1lm9g4wGhieqXgKSe/ecOqpcMst8OGH2Y5GRAqJeZ5d39ivXz+fNm1atsPIus8/hx494OCDw/CdIiI1MbPp7t6vtvV0R3Oe2nZbuPhimDo1PNZ4CyLSEJpnOwCpu223DW0Mid5UE+MtgLrCEJG6UUkhj40fr/EWRKRhKSnkserGVdB4CyJSV0oKeay6cRU03oKI1JWSQh5LNd5Cy5Yab0FE6k5JIY8lxltIjNDWogW0bg1HHpnduEQkfykp5LmyMpg/PzQ4v/JKuNP5oouyHZWI5CslhQKy115w7rlw553wwgvZjkZE8pGSQoG54gro3h1OOw3WrMl2NCKSb5QUCkxxMdx9N8yZE3pSFRHZHEoKBeiQQ0JJ4frrYfr0bEcjIvlESaFAXXdd6Abj1FM1SpuIpE9JoUC1awe33QbvvBMShIhIOtJKCmb2k1RTpoOT+jn6aOjfP/SFZKZeVEWkdun2kjqFMFDONDaOvezAY5kIShrGpEnw7rsbn6sXVRGpTbrVR7sB/wXaAFe7+8/d/ZTMhSUN4ZJLNr0sVb2oikhN0iopuPsHwHFm1hf4rZktBsa5+ycZjU7qRb2oisjmSispmNnNhOoigHnAQcBHQHG1G0nWde0aqoyq2nbbxo9FRPJDum0KVQdF1iDJeWDChNCGsHr1xnlmsGEDrFgB7dtnLzYRyU1ptSm4+/3AQ8DbwFvAQ9E8yWHxXlTNwt/LL4eVK+Hkk6GiItsRikiuSbf66EfAncBcwtVHpWY20t3/lsngpP7Kyja90qhDBzj7bLj6arj44uzEJSK5Kd3qo98CB7v7HAAz2wl4ClBSyENnnRW62b70Uth7bxg4MNsRiUiuSPeS1CWJhBCZByzJQDzSCMxCp3k9e8LQofCJriETkUi6SWGWmT1tZsPNbBjwJPBmbXc2m9kgM/vAzOaY2Zga1htiZm5m/TYzfqmjNm3gz38OjdDHHQdr12Y7IhHJBekmhVbA54RLUQcAS4FtgB8DKQd/NLNmwK3A4UAvYKiZ9UqxXltgNPD6ZsYu9bTLLnDPPfDqq3DssfDtt9mOSESyLd2b135eh333B+a4+zwAM5sMHAW8X2W9K4FrgfPr8BpST8cfD8uXh3aGwYPhL38JYzKISNOUbod4O5vZs2b2XvS8t5n9qpbNdgAWxZ6XR/Pi+/0+sKO7/7WW1x9hZtPMbNrSpUvTCVnSNGkSXHttePyPf0C/fvDll9mNSUSyJ93qo7uBscA6AHefCZxQyzaWYp4nF5oVAb8Dzqvtxd39Lnfv5+79OnXqlGbIUptJk8LNbfG7nmfPhj33DPcyiEjTk25SKHb3N6rMW1/LNuXAjrHnXYDFsedtCR3tPW9m84F9gKlqbG48l1xS+W7nhI8+CpepLl/e+DGJSHalmxSWRfcmOISrhYBPa9nmTaCHmZWaWUtCyWJqYqG7r3L3ju7ezd27Af8BBru7utBoJDV1jDdrFgwYAJ9/3mjhiEgOSDcpnEW4o/l7ZvYJcA5wRk0buPt6YBTwDGEshofdfZaZXWFmg+sRszSQrl1Tzy8pgaeegnnz4MADoby8ceMSkewxd699JbPvuPtnZtYaKHL3rDVF9uvXz6dNU2GiISTaFOJVSMXFob+ksjJ4+WX40Y+gY0f417+ge/fsxSoi9WNm09291ur5dEsKTwO4+9fZTAjSsFJ1mJdICAD77w/PPgurVsF++1UexU1EClO6SUEKVFkZzJ8fekydP3/TzvP22gteegmaNQtVSa++mo0oRaSxpJsUepvZ/2LTl2b2v4xGJjmjV69QldSxIxx6KDzzTLYjEpFMSTcpvOvuW8Wmtu6+VUYjk5zSrVtIDDvvDD/+MUyZku2IRCQTVH0kadt2W/j3v0N320OHwp13ZjsiEWlo6SaFn2Y0CslJkyaFEkJRUfg7aRK0axeqj370IzjjDLjqKkjjAjYRyRPpJoW1ZvYXM1tqZp+b2Z/NrEtGI5OsineB4R7+jhgR5hcXh47zTjxZWYAMAAATJUlEQVQxjNw2ejSsWZPtiEWkIaSbFCYS7kbejtCp3ZPRPClQqbrAWL06zAdo0QIeeADOOQduuQV69w5VSyKS39JNCp3dfaK7r4+m+wD1TFfAqusCIz6/qAh+97twY5s7HHIInHKK+kwSyWfpJoWlZvYzM2sWTT8D9K9fwKrrAiPV/IEDw41tY8bAH/8YBu956CG1NYjko3STwinAccBnhI7whkTzpEBNmLDpYDvFxWF+KltuGRqdp08PjdInnghHHBFuiBOR/JFWUnD3he4+2N07uXtndz/a3RfUvqXkq9q6wKjOHnvAa6/BjTfCiy/CrruGQXy++aZx4haR+qmxQzwzu6mmjd19dINHVAt1iJc/Fi6EUaPgySdD6eHqq+G440KSEZHG1VAd4h0FTK9hEqlW164wdSr885+w1VZwwgmhY73XXst2ZCJSndqSwhfufn91U6NEKDkr1c1tqfzwh/DWW3DPPaGNYd994fjj4eOPGzFYEUlLbUlB149ISjXd3JZKs2bhctUPP4TLLgtVSt/7HlxwgS5hFckl6vtI6qS2m9uq06YNjB8fxoE+8US44Qbo0gVOPRXefjtz8YpIempLCntU6TJbXWcLkN7NbTXZYQeYOBHeew+GD4fJk6Fv39DmMHkyrF3bYKGKyGaoMSm4e7MqXWar62wBNu/mtpr06gW33w6ffBLujl6yJPTAWlIC48bBp5/WO1QR2QyqPpI62dyb22rTrl3oR+mDD+Dpp+H73w/VTF27hpvgHnwQvtRAsCIZp6QgdVLXm9tqU1QEhx8eEsNHH8G554YuNE46CTp3hiFD4M9/Vq+sIplS481ruUg3rzU9FRXwn/+EtoaHH4bPPw8N1kcfHS5tPeSQTUstIoXim29gzpxw5d4ee8BOO9VtPw1185pInaV7H0NtiorCvQ033QTl5aFX1hNOgKeeCkODbrMN/N//hTaJ//5XHfFJ/lmzJpSM//738D0fNQoOOyz83xQXw+67w09/GkrQmaaSgmRE4j6G+GWrxcUNU8WUsHYtPP98+Ef6+99h9uwwv6QEBg0K0yGHhLupRbJl/XpYtAjmzQv385SXhwsryss3Tl98UXmbrbYK46FXnXr2DKXkuki3pJDRpGBmg4AbgWbAH9z96irLzwDOAjYAXwEj3P39mvappJAfunUL/wBVlZRkrufUBQs2Joh//Qu++iqUMnr3Dpe6JqbNvUJKpCbu4QbMjz/eOM2bt3FasAA2bKi8TefO4f6cLl3C5dmJv927h5N/584N30dY1pOCmTUDPgQOBcqBN4Gh8ZO+mW3l7v+LHg8GznT3QTXtV0khPxQVpa7GMQttBJm2di28+moYDe6VV0KbxNdfh2VdumxMEP37w267QevWmY9J8teqVRtP+PPnV348f374ARLXqVM4wVedSkpg++1hiy0a/z2kmxSaZzCG/sAcd58XBTSZ0MFeMikkEkKkNepWo2B07Zq6pNBYv9JbtoQBA8IEoQg/c2ZIEIlpypSwzAy++91QoujdOzTm9e4dSjvq0bWwuYcqzqVLYdmycJ/MggWVf/F//DGsWFF5u7ZtobQ0nOgHDgzfldLS8Ld797A8X2UyKewALIo9Lwf2rrqSmZ0F/BJoCRySakdmNgIYAdBVZf+8MGFC6jaFut7HUF/Nm4c7pvv2hbPPDvMWLgwd9c2cGaZ33oHHHttYwmnbNowil6jLTUzf/a6udsp1GzaEE3zV+vtPPglXry1btjERpBrro2XLjSf6vfcOf+NT+/aF+4Mhk9VHxwL/5+6nRc9PAvq7+9nVrH9itP6wmvar6qP8MWlS6Atp4cJQQpgwoXIjc23Ls+Grr2DWrJAgZs4MVzN9+GFoKIzr2jUki9LS8LikZOO0ww4hCUnDWrs2nMiXLg0n/CVLKj9OTJ9+Gqb16ytv36JFqLr5zndC9U7Hjhv/xh937QrbbReqQAtJLrQp/AAY5+7/Fz0fC+DuV1WzfhGwwt23rmm/SgqFoTGuTmpIX38dLhn88MNw1/WHH4Zp/vxwIoorKgqJYccdw0mmQ4dw2WyHDhunbbYJvzbbtt04tWpVuL8+49zD8VyxIlx1s2JF5cdffFH55J94vGpV6v01bx4aZjt3Dif273xnYyNufOrYsfBO9JsjF5JCc0JD80DgE0JD84nuPiu2Tg93/yh6/GPg8tqCVlIoDNm4OilT1qwJpZ2FC8N7Skzl5eGqlMRU25CkzZqF5NCmTfjbrt3G5LHNNpWn9u1D43jLlmHaYovKf1u2DPusqEg9bdgA334bprVrNz5OTOvXhwSVaioqqrx9qunLL8NJfOXKyn8Tj6v+iq96HDp12jglTvbxadttN85v165pJNP6ynpDs7uvN7NRwDOES1LvdfdZZnYFMM3dpwKjzOyHwDpgBVBj1ZEUjvr2sppLttxyY3tDTVavDr+CE0lixYpQXfXll5WnxLyVK0M1yKxZYbv/5Um/xGYhsW29dThhb711+PXes2d4vPXWlZNd+/aVH7dtq5N8NmW05tPdnwaerjLvstjj/5fJ15fcle2rk7KhuDhMXbrUbfv160OiSFSzrF698Vd+qr+JX/XVTVtsUbmEEZ8SbSIVFaG6p+oU336LLULVV3xbndTzl5rDJCty7eqkfNC8+cZGUZFMacLNLpJN6fSy2lB9J4lI+lRSkKwpK6v+SqOqVyclxoBObCcimaGSguSkuo4BLSL1o6QgOamQrk4SySdKCpKTGmoMaBHZPEoKkpNqGwNajdAimaGkIDmppquTEo3QCxaEa+YTjdBKDCL1p5HXJO8UUhcZIo1FYzRLwVIjtEjmKClI3lEjtEjmKClI3qmtERrUEC1SV0oKkndq6yJDDdEidaeGZik4aogW2ZQamqXJUkO0SN0pKUjBSachWm0OIqkpKUjBSeduaLU5iKSmpCAFp7aGaPXAKlI9JQUpSGVloVG5oiL8jY/BkE6bg6qXpKlSUpAmp7Y2B1UvSVOmpCBNTm1tDqpekqZMSUGanNraHHRJqzRlSgrSJNXU5qBLWqUpU1IQqUKXtEpTltGkYGaDzOwDM5tjZmNSLP+lmb1vZjPN7FkzK8lkPCLp0CWt0pRlLCmYWTPgVuBwoBcw1Mx6VVntbaCfu/cGHgWuzVQ8IpujPpe0qmpJ8lkmSwr9gTnuPs/d1wKTgaPiK7j7v9098ZvrP0CXDMYj0iBqanNQ1ZLku0wmhR2ARbHn5dG86pwK/C3VAjMbYWbTzGza0qVLGzBEkc1XU5uDqpYk32UyKViKeSn76TaznwH9gOtSLXf3u9y9n7v369SpUwOGKLL5ampz0N3Sku+aZ3Df5cCOseddgMVVVzKzHwKXAAe5+7cZjEekwZSVVW5nSOjaNfVYDlXvlk6UJhLVS4l9imRbJksKbwI9zKzUzFoCJwBT4yuY2feBO4HB7r4kg7GINIqGuFtaJQnJpowlBXdfD4wCngFmAw+7+ywzu8LMBkerXQe0AR4xsxlmNrWa3YnkhfreLa2Gask2Dccp0ohqGypUQ4lKpmg4TpEcVFv1khqqJduUFEQaUW3VS+rWW7JNSUGkkdV0t7QaqiXblBREcogaqiXb1NAskkfUUC11pYZmkQJU34ZqVS1JbZQURPJIfRqqVbUk6VBSEMkzdW2oViO1pENJQaSA1KezPpUkBJQURApOdSWJ2u6BUElCQElBpMloiEZqlSQKn5KCSBNR37upVZJoGpQURJqQ+txN3RAlCSWN3KekICJA5ksSqn7KD0oKIpKUyZJEOklDpYjsU1IQkbTUtyRRU9JQKSJ3KCmISNrqU5KoKWmoETt3KCmISIOorSRRU9LQ5bC5Q0lBRBpMTSWJmpKGLofNHUoKItJoqksauhw2dygpiEjW5cLlsEoagQbZEZGclzipx0/8xcUbE0dRUTjZV2UWSiW1DT5U2/4LgQbZEZGCkcnLYUH3UMRlNCmY2SAz+8DM5pjZmBTLDzSzt8xsvZkNyWQsIpLfMnU5LNT/HopCShoZSwpm1gy4FTgc6AUMNbNeVVZbCAwH/pSpOESk8NXnclio3z0UBZc03D0jE/AD4JnY87HA2GrWvQ8Yks5+99xzTxcR2VwPPuheUuJuFv4++GDlZcXF7uG0Hqbi4jDfrPL8xGQWti0pSb28pKT2fTcmYJqncY7NZPXRDsCi2PPyaN5mM7MRZjbNzKYtXbq0QYITkaYlU/dQ1Le9AnKrJJHJpGAp5tXpUid3v8vd+7l7v06dOtUzLBGRTdX1Hor6Jo1cq37KZFIoB3aMPe8CLM7g64mINLhMtldA7nU5nsmk8CbQw8xKzawlcAIwNYOvJyKSEXWteoLMdzne0DKWFNx9PTAKeAaYDTzs7rPM7AozGwxgZnuZWTlwLHCnmc3KVDwiIplSn6RR3+qnhqY7mkVEsqi2u6lruxs7XbqjWUQkD9S3+qmhNc/MbkVEJF1lZdX3sZSYf8klocqoa9eQEDLVJ5OSgohIjqspaTQ0VR+JiEiSkoKIiCQpKYiISJKSgoiIJCkpiIhIUt7dvGZmS4EUt3IA0BFY1ojhbK5cjk+x1Y1iqxvFVjf1ia3E3WvtUTTvkkJNzGxaOnfsZUsux6fY6kax1Y1iq5vGiE3VRyIikqSkICIiSYWWFO7KdgC1yOX4FFvdKLa6UWx1k/HYCqpNQURE6qfQSgoiIlIPSgoiIpJUMEnBzAaZ2QdmNsfMxmQ7njgzm29m75rZDDPL6ghBZnavmS0xs/di87Yxs3+a2UfR3/Y5FNs4M/skOnYzzOxHWYptRzP7t5nNNrNZZvb/ovlZP3Y1xJb1Y2dmrczsDTN7J4ptfDS/1Mxej47blGjI3lyJ7T4z+zh23Po0dmyxGJuZ2dtm9tfoeeaPm7vn/QQ0A+YC3YGWwDtAr2zHFYtvPtAx23FEsRwI9AXei827FhgTPR4DXJNDsY0Dzs+B47Yd0Dd63Bb4EOiVC8euhtiyfuwAA9pEj1sArwP7AA8DJ0Tz7wB+kUOx3QcMyfZ3Lorrl8CfgL9GzzN+3AqlpNAfmOPu89x9LTAZOCrLMeUkd38R+KLK7KOA+6PH9wNHN2pQkWpiywnu/qm7vxU9/pIw7vgO5MCxqyG2rPPgq+hpi2hy4BDg0Wh+to5bdbHlBDPrAhwB/CF6bjTCcSuUpLADsCj2vJwc+aeIOPAPM5tuZiOyHUwK27r7pxBOMEDnLMdT1SgzmxlVL2WlaivOzLoB3yf8ssypY1clNsiBYxdVgcwAlgD/JJTqV7r7+miVrP2/Vo3N3RPHbUJ03H5nZltkIzbg98CFQEX0vAONcNwKJSlYink5k/GB/dy9L3A4cJaZHZjtgPLI7cBOQB/gU+CGbAZjZm2APwPnuPv/shlLVSliy4lj5+4b3L0P0IVQqt8l1WqNG1X0olViM7PdgLHA94C9gG2Aixo7LjM7Elji7tPjs1Os2uDHrVCSQjmwY+x5F2BxlmLZhLsvjv4uAf5C+MfIJZ+b2XYA0d8lWY4nyd0/j/5xK4C7yeKxM7MWhJPuJHd/LJqdE8cuVWy5dOyieFYCzxPq7duZWWI44Kz/v8ZiGxRVx7m7fwtMJDvHbT9gsJnNJ1SHH0IoOWT8uBVKUngT6BG1zLcETgCmZjkmAMystZm1TTwGDgPeq3mrRjcVGBY9HgY8kcVYKkmccCPHkKVjF9Xn3gPMdvffxhZl/dhVF1suHDsz62Rm7aLHWwI/JLR5/BsYEq2WreOWKrb/xpK8EersG/24uftYd+/i7t0I57Pn3L2Mxjhu2W5db6gJ+BHhqou5wCXZjicWV3fC1VDvALOyHRvwEKEqYR2hhHUqoa7yWeCj6O82ORTbA8C7wEzCCXi7LMW2P6GoPhOYEU0/yoVjV0NsWT92QG/g7SiG94DLovndgTeAOcAjwBY5FNtz0XF7D3iQ6AqlbE3AADZefZTx46ZuLkREJKlQqo9ERKQBKCmIiEiSkoKIiCQpKYiISJKSgoiIJCkpiMSY2d5Rj6PvRL2O3hXdKSzSJCgpiFTWCjjJ3fdw910I17H/IcsxiTQaJQWRGHd/wd3LY89vB3Y2s1PNbFWsj/1PzGwcgJn1MbP/RB2o/cXM2ptZczN708wGROtcZWYTosfzzaxj9PhBi40fIZJtSgoiVZjZBbGT/wzCXaRLgJfcvY+HDtR+F9vkj8BF7t6bcCfs5R56shwO3G5mhwKDgPFVXmd3YLfMvyOR9CkpiFTh7tclTv5RAphZ3bpmtjXQzt1fiGbdTxgsCHefRehq4kngFA9jfcT9Gri8wd+ASD0oKYjUwMy2InQ9XddxEnYHVgLbVpm/L/AVoU8skZyhpCASY2bDzez70eNmhDEI/k7oaHET7r4KWGFmB0SzTgJeiLb/CaHDvAOBmxI9ckbGAZdl4j2I1IeSgkhls4DfmtlbhERgwGm1bDMMuM7MZhJKFVdEDclXA6e6+4fALcCNsW1ed/eUiUYkm9RLqoiIJKmkICIiSUoKIiKSpKQgIiJJSgoiIpKkpCAiIklKCiIikqSkICIiSf8f2E/fPMrDAQoAAAAASUVORK5CYII=\n", 424 | "text/plain": [ 425 | "
" 426 | ] 427 | }, 428 | "metadata": { 429 | "needs_background": "light" 430 | }, 431 | "output_type": "display_data" 432 | } 433 | ], 434 | "source": [ 435 | "import matplotlib.pyplot as plt\n", 436 | "\n", 437 | "acc = history.history['acc']\n", 438 | "val_acc = history.history['val_acc']\n", 439 | "loss = history.history['loss']\n", 440 | "val_loss = history.history['val_loss']\n", 441 | "\n", 442 | "epochs = range(1, len(acc) + 1)\n", 443 | "\n", 444 | "plt.plot(epochs, loss, 'bo', label='Потери обучения')\n", 445 | "plt.plot(epochs, val_loss, 'b', label='Потери проверки')\n", 446 | "plt.title('Потери во время обучения и проверки')\n", 447 | "plt.xlabel('Эпохи')\n", 448 | "plt.ylabel('Потери')\n", 449 | "plt.legend()\n", 450 | "\n", 451 | "plt.show()" 452 | ] 453 | }, 454 | { 455 | "cell_type": "code", 456 | "execution_count": 23, 457 | "metadata": {}, 458 | "outputs": [ 459 | { 460 | "data": { 461 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAEWCAYAAACXGLsWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAIABJREFUeJzt3XmcFNW5//HPl01WEWHUALLoxRURlEWN4q64REUTfyAqrmQjLokmojEQEq9xJfHGq8ENoygxJBpjvG5BjAoaBgUUXMImjriwCAiIOMPz++NUDzU9Pd09S0/3zDzv16te3bX2UzU99fQ5p+qUzAznnHMunWb5DsA551zh82ThnHMuI08WzjnnMvJk4ZxzLiNPFs455zLyZOFcHklqke8YnMuGJwvn6pGk/pL+Kmm5pPXAT/Idk3PZkN9nkXuSNsZG2wJfAWXR+HfNbGr9R+Xqm6Q9gX8TEsQjZrY1zyE5lzVPFvVM0nLgEjN7Id+xuPolaQrwjpndlO9YnKsur4YqAJJ2kPRbSSuj4beSdojmTZD0cGzZ8nFJvSRZvN5b0sOSJsTGL5W0WNJaSU9K6hqbt7+k56N5n0q6VtKhkjZGw9eStsbGe0i6QNIrWe7XBZLKonU3SJohqVs2sSVtJ7GfiTjelnRUbL5JukzSUkmrJd0iqVls/kWS3pH0uaRnJfVMWnd+bLx59DcoiU07XNI8SV9En78t/vlJsXaN9mVttG+XxmYPBvaXVCJplaSHJHWM1vuHpB8lbWuBpDMkHZUUT/J4V0l/iba5TNJlsXnJ358W0T73isanSPp1bP7T8e+UpH0lvRr9/TZGf88Lqtj3tNtKsfxySV/G/q5bJM2Mza/y7yqpmaSfS/pA0meS/hg7lpm+L/vEvvfvSTo7aR/ujuZ/IemlFN+X/4renybpQ0l71GT/GxpPFoXhOuAQoD9wIOGk8vNo3jZq+HeSdAxwI3A28A3gA2BaNK8D8ALwDNAV+C/gn2Y228zam1l7YCpwc2LczFbUIIzZ0bZ2IVS/XZkptjR2AjoAjwG3Js0bDgwEDgJOBy6KPucM4FrgTKAIeBl4NGndVpIGRe9PAdYlzb8VeBzYMdqXlWlifBQoIRzTbwP/LenYaF5b4DBgKNAbaAf8Ppr3IHBuYiOSDgS6AU+T5jsQnTz/DsyPlj8WuELSiWliTCk6ofZLmjweeAfYOdr32bXYVirfin3fxqaYn/LvClwQDUcDewDt2X4sEyp9XyS1A54HHiF8J0cC/ytp/9h6o4BfAV2AeYT/g+T9OxK4GzjZzJammH8U2e1/g+HJojCMAiaa2Wdmtgr4JXBeNG8FMEjSTjXc7v1m9oaZfQWMAw6NflWeCnxiZreZ2RYz+8LMXq/1nlStWTSsySK2dAQ0j20n4SYzWxsltN8STgIA3wVuNLN3zKwU+G+gf/zXInAfcEn0/pJoPNVnKm1g0u7A4cDPomM6D7iX7X9LgNvNbKmZbYz2eUT0y/NvQB9JfaLlzgP+FLVrfAjsEiWQZIOAIjObaGZboxPXPcCIdLGmiF3AzcAvkmcR9j3rc0WabdVEVX/XUVR9LCuEQ8Xvy6nAcjN7wMxKzewN4C+ExJ7wDzP7V/S9vI7wvdw9Nn8A8CQwyszeSg64jve/YHiyKAxdCb+sEz6IpkH4tT0fWCZpHXBNivVXS1oXzT87Nr3CdqN/qjWEX6C7A0tqGO8h0eetlTRL0sBMyxJ+rfcGpmQRW1VWAxuBK4Dkev8PY+/jx68n8LvY8VlLOIHEP+cp4KioeuEbwNykbY8FTgO2RNtIWV0WTV9rZl8kxZL4rK+o/HduAewanZgeA86NSgsjgYcAzGwZMBF4Pvr8p2Lb6Al0TexfNP9aYNfYMmfH5q2uIvazCcd/RtL0awm/3DdH6x9SxfrZbKsmqvq7pvqfaUHF/U71fekJDEk6XqOA3VJ9ZvS9XEvFv/m9wH+A46uIuS73v2B4sigMKwlf4oQe0TSiX6jfNrNOZrYT8JsU63cxs52i+Y9Vtd2oCN4Z+IjwD7FnDeN9LfqsIkKRPrn4n2rZ1sDDbE8W6WKrShcza0uojviLpDaxefFffuXHj7Cf300cn2hoY2azYsuXEqqZpsfiK2dmcwgno+uifamqGmolsHNUxRePJbFPK6j8dy4FPo3GHyScuI4FNptZeZVPVHLYJfr8U2Pb+BBYlrR/Hczs5Ngyj8W+H11SxN2SUO3ysxT7voTwY+UP0fqvVbHvGbdVQ1X9XVP9z8SPJaT+vnwIvJR0vNqb2fdTfaak9sDOVPybX0H4G1ws6aCkeOt6/wuGJ4vC8Cjwc0lFkroQiq8PZ1gnG48AFypc278DoQrmdTNbTvh1upukKxQa2DtIGlKdjZtZGbCe7L5HRrhcuCiL2DIpAzoCrWLTrpbUKaouuBz4UzT9bmBcok5aUkdJ30mxzcmEuvlU9dNnE05Gk9LuoNmHwCzgRkmtJfUDLo5t81HgSkm9o5PQfxOqmkqj9WcT2iduIypVZOHfwAZJP5PURqGBvm+sDSYb5wGzzGxB8gxJhwBnEKp5arWtGqrq75r2WCaJf1+eAvaSdJ6kltEwSNK+seVPVriooRXhxP969LdNeNnMPgGuAh6Q1DI2r673v2B4sigMvwaKgQXAW8Ab0bRaMbN/AtcT6mQ/JpQkRkTzviAUo78FfEIoVh+d5aYHKVzRU0L4JXx5mmUPVbjPZD2hkXlsptjSWBdt64+E0sL62Ly/EaqP5gH/IGp3MLPHCVUQ0yRtAN4GTkrecFT3PdLMKjRuS+pESBKXVnEiSjYS6EX4Jfo4MN7Mno/mTSH8CPgXsAzYQuVG3T8CB5Dlj4UoYX+LcHHEMkLVy72Ek2O2OhH+FhVEJ8F7gMvNbENttlULKf+uwP2EhBo/lj9KWrfS9yX63p9A+K6tJHz3bwJ2iK33CKFhfy1wMOE7XomZPUQoqVwbm1zX+18w/D4L1+BJMqCPmS3Odyy1Jel8YIyZHZ7vWPItH39XhXthSszs55mWbWq8ZOFcgZDUFvgBoUrMuYLiycK5AhDdF7GK0ED7SJ7Dca4Sr4ZyzjmXkZcsnHPOZdQo+iwB6NKli/Xq1SvfYTjnXIMyd+7c1WZWlGm5RpMsevXqRXFxcb7DcM65BkXSB5mX8moo55xzWfBk4ZxzLiNPFs455zJqNG0WqXz99deUlJSwZcuWfIfiClDr1q3p3r07LVu2zLywc01co04WJSUldOjQgV69ehG6mHcuMDPWrFlDSUkJvXv3znc4zhW8Rl0NtWXLFjp37uyJwlUiic6dO3up0zVoU6dCr17QrFl4nVqpz+S606iTBeCJwlXJvxuu0KVLBlOnwpgx8MEHYBZex4zJXcJo9MnCOecaokzJ4LrrYPPmiuts3hym54Inixxas2YN/fv3p3///uy2225069atfHzr1q35Dq/OvPXWW5x11lkMGTKEQYMGUVZWlu+QnCsYmUoHVc3LlAxWrEj9eVVNrzUzaxTDwQcfbMkWLVpUaVo6Dz9s1rOnmRReH364WqunNX78eLvlllvqboMF4tNPP7UhQ4bYm2++me9QaqS63xHnkqU7bzz8sFnbtmahbBCGtm3D9HTzzML24vMSgxTm9+yZen7PntWLHyi2LM6xXrKI1Hf93+23307fvn3p27cvv/3tbwGYOXMmp566/fHKvXr1YvXq1Sxfvpy+ffuWT58+fToXXHABAB988AHHHnss/fr149hjj2VF9LPi008/Zfjw4Rx44IEceOCBzJo1i6uvvrpSKecXv/hFpc9Nxcy4+uqr6du3LwcccAB/+tOfymNp1qwZ55xzToV9uf766/nd735Xvv51113HHXfcwZQpUxg7dvvD4dq3b1/+/pZbbmHQoEH069eP8ePHA6Td9wsuuIDp06cDcO+99yKJ1atXA/CrX/2Kvffem/79+9OmTRuWL1+e4S/imqpMjcS1aTdIVzrIVHLo0SN1vInpN9wAbdtWnNe2bZieE9lklIYw1LZkUVdZuirxkkVxcbH17dvXNm7caF988YXtt99+9sYbb9hLL71kJ598ciymnrZq1SpbtmyZ7b///uXT//znP9vo0aPNzOzUU0+1KVOmmJnZfffdZ6effrqZmZ199tk2adIkMzMrLS21devWpYzFzOzFF1+0U045JW3806dPt+OOO85KS0vtk08+sd13391WrlxpEyZMsP3337/SvixbtswGDBhgZmZlZWW2xx572OrVq+3BBx+0H/zgB+XbbdeunZmZPfvss3bppZfatm3brKyszE455RR76aWX0u776NGj7c9//rN9+eWXdvDBB9suu+xiq1atsvXr11tRUZFt3rzZzMz2339/W7ZsWcr98pJF41fTX/7ZzM903khXOshUcsj02Zn2LVt4yaJ66rP+75VXXmH48OG0a9eO9u3bc+aZZ/Lyyy/TvXt33nnnnZSXcy5ZsqS8vePqq68unz579mzOOeccAM477zxeeeUVAGbMmMH3v/99AJo3b07Hjukfyfzyyy/Tv39/BgwYwP33358y5pEjR9K8eXN23XVXjjzySObMmYOZceaZZ1bal169etG5c2fefPNNnnvuOQYMGEDnzp3p3r078+fPZ9u2bRW2/9xzz5Uvd9BBB/Huu+/yn//8J+2+J9x5552MHj2aNm3alE8zM7788su0++waj6p+/de2kbi27QbpSgeZSg6jRsHkydCzJ0jhdfLkMD1h1ChYvhy2bQuvo1I+LbxuNOqb8qqjR4/wRUo1va6FZF7ZHnvswTnnnMNBBx1Eq1atWLlyZfm8Pffck3nz5gGhKuapp55KuY2aXg56xBFH8NRTT7F69Wr22WcfRowYQdtYGbeqmHfccUfWrVuXct4ll1zClClT+OSTT7jooosAOOqoo9h777054IADKtw5bWaMGzeO7373uxW2sXz58rT7vmHDBh599FFmzZrFbbfdVh7TxIkT2XPPPenRowdLliypwRFxhWTq1HCCXrEi/E/ecMP2E2MiISRO6omEAOlP9qNGZT7ZZ5MM0p03brihYmxQsaoo3TwIMeYyAVSHlywi9Vn/N3ToUJ544gk2b97Mpk2bePzxxzniiCMA+PWvf82iRYuYN28eXbt2zbitww47jGnTpgEwdepUDj/8cACOPfZY7rrrLgDKysrYsGFDVrF16NCBFi1aVLqiaejQofzpT3+irKyMVatW8a9//YvBgwczZMgQHn/88ZT7Mnz4cJ555hnmzJnDiSeeCECzZs247777WLhwYXkCADjxxBO5//772bhxIwAfffQRn332WcZ4J02axGWXXUarVq0qTN9ll1049dRTmT9/PnvuuWdW++4KU21KB7X55Z/N/EznjXSlg2xKDoXEk0WkPv9wBx10EBdccEH5yfaSSy5hwIABNdrWHXfcwQMPPEC/fv146KGHyhuVf/e73/Hiiy9ywAEHcPDBB7Nw4cK025k1axaHH344hxxyCFdeeSUdOnSoMH/48OH069ePAw88kGOOOYabb76Z3XbbjW9+85t85zvfYeDAgQwZMoRLL720fF9atWrF0Ucfzdlnn03z5s3Tfv4JJ5zAOeecw6GHHsoBBxzAt7/9bb744ouM+29mnHvuuRWmLV68mFtvvZW777474/quMOTqEtLanuxrkwwS0lUV1Wc1Uq1l07DREIa6uHTW1a2ysjI78MAD7f333893KFXy70j9qE0jc20uIa2LRuJcXlJfCMiygTvvJ/m6GjxZFJaFCxda79697cc//nG+Q0nLvyN1ozbJINMVRZnmZ3NFU2M+2deWJwvzE4HLzL8j2avqpJvLy0uz2X662Fxm2SYLvxrKOZdRLq84ynRFUaIev6qroRLLFHR9fyOQ0wZuScMkvSdpsaRrUszvKemfkhZImimpe2xemaR50fBkLuN0ztW8kbm2VxxlcyVig2oIbqRyliwkNQfuBE4C9gNGStovabFbgT+aWT9gInBjbN6XZtY/Gk7LVZzONRW16bYil1ccNbRLSJusbOqqajIAhwLPxsbHAeOSllkIdI/eC9gQm7exOp/nbRauJprKdySXjczeptCwUQDdfXQDPoyNl0TT4uYDZ0XvhwMdJHWOxltLKpb0mqQzUn2ApDHRMsWrVq2qy9jrRFPpotwVhlx2d52udFBo3VK4HMkmo9RkAL4D3BsbPw/4n6RlugJ/Bd4EfkdIKB0T86LXPYDlwJ7pPq/QSxaNtYvyhq6QviO1UR/dXXvpoHGiAEoWJcDusfHuwMr4Ama20szONLMBwHXRtPWJedHrUmAmULNbnAtUQ+uifObMmXTs2LG8ZNStWzcmTJgAhP6errjiCg477DD69u3Lv//9bwDWrl3LGWecQb9+/TjkkENYsGABABMmTKBbt27069ePffbZhxkzZgCwatUqzjrrLAYNGsSgQYN49dVXy5c/77zzOOaYY+jTpw/33HNPpeP10ksvMWTIENavX19h+tq1a+nYsSO33nprzf5QBaQ2JQdvZHa1lctLZ+cAfST1Bj4CRgDnxBeQ1AVYa2bbCG0a90fTOwGbzeyraJlvAjfXJpgrroBYV0R1on9/iM7z1TJ37lweeOABXn/9dcyMIUOGcOSRR9KsWbNEiStrY8eO5fzzz2f06NHcf//9XHbZZTzxxBNcdtllHHnkkTz++OOUlZWxceNGDjvsMCCcfNu3b89VV10FhJNuNhKdDQLceuut5f04AWzatIlZs2bxr3/9i4suuoi3336b8ePHM2DAAJ544glmzJjB+eefX94f1JVXXslVV13FLbfcwlNPPcUxxxzD5ZdfzpVXXsnhhx/OihUrOPHEE3nnnXcAWLBgAa+99hqbNm1iwIABnHLKKeWf/dZbb3H55Zfz9NNPV+pd98Ybb6Rnz57VOqaFKN2lq9lcnpqpQ7tsLk91TVvOShZmVgqMBZ4F3gEeM7OFkiZKSlzddBTwnqT3gV2BxO+YfYFiSfOBF4HfmNmiXMVa3xpiF+WZjBw5EggdDm7YsIF169bxyiuvcN555wFwzDHHsGbNGtavXw+EDgD3228/brrpJi688EIAXnjhBcaOHUv//v057bTT2LBhQ3n/UKeffjpt2rShS5cuHH300eWll5UrV3LSSScxevToSh0vfvTRR7z22msMHz682vuTL1WVHmpbcvB2BVdbOb0pz8yeBp5OmvaL2PvpwPQU680CDqjLWGpSAsiVqkoPhdxFeSbJnysp5X4mlkuULF544QV+8pOf8Nxzz7Ft2zZmz55d4bkU6bYP8O677zJt2jR++tOfcu6551JUVFS+zC9/+Uuuv/56Zs2alfV+5FO60kNtSw7gN6652vFeZ/OgIXZRnkniMauvvPIKHTt2pGPHjgwdOpSp0U/jmTNn0qVLF3bccccK6+24447lj0I94YQT+P3vf18+L96F+d/+9je2bNnCmjVrmDlzJoMGDQJCieW0007j2muv5fLLLy9ffsmSJSxfvpwTTjihWvuRazVtd6iLkoNzteHdfeRBvItyoNZdlF900UXccsstFBUV8cADDwChi/IxY8Zw33330bx5c+666y4OPfTQKreT6KJ806ZNKbsoz6RTp04cdthhbNiwobwaa8KECVx44YX069ePtm3b8uCDD5YvP2nSJB5++GFKS0vLG5/vuOMOfvjDH9KvXz9KS0sZOnRoeTfjgwcP5pRTTmHFihVcf/31dO3alffff798e+effz5Tp07l6aefpm3btrz77rvlx6JQ1Kbd4aGHvOTg8iybS6YawlDol842ZkceeaTNmTMnZ9vP5WXHdf0dSXd5aV30ruqXrrq6RgFcOutco5OrLjMgu24xvAHa5U02GaUhDF6ycDVRne9Irp/LkPgMLz24+oSXLIJwLJyrrLrfjVx2mZHgpQdXqBp1smjdujVr1qzxhOEqMTPWrFlD69atK0xPV81U2664/Yol15A16quhunfvTklJCYXYyaDLv9atW9O9e/kjVDJerZTpIT1+r4Nr1LKpq2oIQ6o2C+dSqapdoLbPek63becKFf5YVecqq81d0v54T9eUyRpJff7AgQOtuLg432G4AterV+qqpERfg1XNW748l1E5lz+S5prZwEzLNeoGbtc01bSROpurlZxrqjxZuEYl041x6a5Y8quVnKuaJwvX4NTmIUB+l7RzNePJwjUote1Sw0sPztWMJwtXcHL5+FDw0oNzNeHJwhWUXHfG55yrmZwmC0nDJL0nabGka1LM7ynpn5IWSJopqXts3mhJ/4mG0bmM0xWO+nh8qHOu+nKWLCQ1B+4ETgL2A0ZK2i9psVuBP5pZP2AicGO07s7AeGAIMBgYL6lTrmJ19as2/S95Z3zO5UcuSxaDgcVmttTMtgLTgNOTltkP+Gf0/sXY/BOB581srZl9DjwPDMthrK6e1ObSVvCSg3P5kstk0Q34MDZeEk2Lmw+cFb0fDnSQ1DnLdZE0RlKxpGLvLLBhqO2lreAlB+fyIZfJQimmJfctchVwpKQ3gSOBj4DSLNfFzCab2UAzG1hUVFTbeF098EtbnWuYcpksSoDdY+PdgZXxBcxspZmdaWYDgOuiaeuzWdcVtqraJfzSVucaplwmizlAH0m9JbUCRgBPxheQ1EVSIoZxwP3R+2eBEyR1ihq2T4imuQYgXbuEX9rqXMOUs2RhZqXAWMJJ/h3gMTNbKGmipNOixY4C3pP0PrArcEO07lrgV4SEMweYGE1zDUC6dgmvZnKuYfIuyl2da9YslCiSSaFqyTlXOLLtotwffuRqZOrUqh8ClOnxo67h27Il/O0/+KDi8NlnoVqxffvUQ8uWYd0vvwxD/P2XX4Zt77BD9kOrVtvft24N7dpV/LxWrfJ7nBoTTxau2jI9qzqbZ1E3ZWbw9dfbT5SpXrdsga++gq1bw2t8KC2FFi3CiTc+tGoVXsvK4PPPYe3a8Jr8fvPmsExpaXiNvy8thebNq962BCtXwqefVtynZs2ge3fYddcwf+PG7UMiCaQiQZs24UTfpk0YT97f2mjZcnviaNcuxJk8SNtLw19/XXHYunX7+xYtYMcdw9Chw/b3ifH27cP3vF27iq+J98mJs6ElMq+GctWW7mlziSfKpSt5NERmsH49fPJJGD7/PPUJJTG+cWP6E3Zpaf3EvcMOsPPO0KnT9qFdu5AQWrRI/VpWVvUJs6wMvvGN8LdODL16QbduYf1Uyspg06ZwTLZurZgcEgmoKokTeHICSZVEv/pq++ckhi++2P5+06ZQDWoWXuND4jSYKkEmhtLSsL0NG1IPW7ZU72/TqtX2xNG27fYnuyfHtW1b+Lt07Ag77VT5daedoHdvGD68ep+fkG01lCcLV22NsU1i61ZYuhTefz8MS5fCxx9vTw6ffFK9k4EU/pnjJ+r4+3btwskyfuKMv6aremnRIpy4kn8FJ4ZmzbZ/Tps2uTtmrqKyslBqSwybNlV8H09iyQlt8+btJZxUpZ7S0vBjZf16WLcuDIlxMzj0UJg1q2Zxe5uFq5WG2iZhFko38+aFf8TkX5KJ9199VTE5LFtWMdHtvDN07Qq77QaHHx5+Te+22/ahU6ftvzyTf4G2bBl+KTbzPp2blObNQ3VUhw7195nbtoXveW2r67LhycJV0lDaJLZtg8WLYe5ceOON7cO6ddmt37Yt7LUXHHwwnHNOeL/XXtCnT0gGzhW6Zs1Cm0l98GooV0khtkmsWwdvvVVxmD8//KqC8Ou+X79w4j/oIBgwIJQO4kX7+PsWLaBLl/T15c41Bd5m4Wos320SGzbAjBkwe/b2xFBSsn1+x45wwAFw4IHbk8N++4XqH+dc9Xibhaux+m6T2LYtVB89+2wYZs8ODXotW8K++8KRR4bkkBi6d/cSgXP1zZOFq6Q+2iRWrIAXXwzJ4fnnYfXqMH3AALjqKjjxRDjssIZ3LbpzjZUnC1dJou2hrtoktm2DRYvg5ZfhlVfC64fR00p23RVOOikkh+OPh112qZt9cM7VLW+zcDmxbh3cey+89BK8+mq4EQ3CJahHHBEuRx06NFQr+SWmzuWPt1m4tHJ1NdO2bTBlCowbF/oJ2ntvOOuskByOOCLcaertDc41PJ4smqBM91HU1Ouvw49+BHPmhPaG//u/cKWSc67h8wqAJijTc7Cr69NP4cIL4ZBDwiWuDz0U2iY8UTjXeHiyaIIyPQc7W19/DZMmhbuep06Fn/4U3nsPzj3Xq5qca2y8GqoJqs19FF99FUoNzzwDf/1r6F9p2DD47W9D+4RzrnHKaclC0jBJ70laLOmaFPN7SHpR0puSFkg6OZreS9KXkuZFw925jLOpqe5zsJcvh7vugtNPh86d4bjj4I47QmP1k0/C0097onCusctZspDUHLgTOAnYDxgpab+kxX5OeDb3AGAE8L+xeUvMrH80fC9XcTZWU6eGPp6aNQuvU6dun5fNc7DXroVf/CLcQd27N/zgB7BgAZx/fkgQa9bACy/At77lVU7ONQW5rIYaDCw2s6UAkqYBpwOLYssYkOgzsSOwMofxNBnZXO00alTqK5/Wrw/tEJMmhX72jzsOvvvdcOPcXnt5YnCuqcplNVQ34MPYeEk0LW4CcK6kEuBp4Eexeb2j6qmXJB2R6gMkjZFULKl41apVdRh6w1aTq502boQbbwyliF/+MiSJBQvguefgiitCNZMnCuearlwmi1SnluTbxUcCU8ysO3Ay8JCkZsDHQI+oeurHwCOSKvXabmaTzWygmQ0sKiqq4/Abrupc7fTll3D77bDHHnDtteH+iLlz4S9/gb59cxunc67hyGWyKAF2j413p3I108XAYwBmNhtoDXQxs6/MbE00fS6wBNgrh7E2KlVd1RSfbgb33Qd77gk/+Uno7nv2bHjqKb8/wjlXWS6TxRygj6TekloRGrCfTFpmBXAsgKR9CclilaSiqIEcSXsAfYClOYy1Ucl0tdPixXDMMXDJJaHaaebM0PPrIYfUe6jOuQYiZ8nCzEqBscCzwDuEq54WSpoo6bRosZ8Al0qaDzwKXGChZ8OhwIJo+nTge2a2NlexNjZVXe30//4f3HRT6LzvzTfDtJdfDs+LcM65dLzX2SbijTdCSeIRhjQ+AAAVFklEQVTNN2H4cPj976Fr13xH5ZzLt2x7nfXuPhqodPdRxG3eHLrhGDwYPv44NFz/9a+eKJxz1ePdfTRA2fYaW1wMI0bAkiWhVHHzzdCpU/3H65xr+Lxk0QBlcx/FCy/AUUeFZ1nPmAH33OOJwjlXc16yaIAy3Ufx5z+HEsY++4RnXH/jG/UXm3OucapWyUJSa0ntchWMy066+yjuvjtc9TR4cHikqScK51xdyDpZSLqQ0H3HfyRdlbuQXCap7qNo0wYGDYLvfx9OPjl00+HVTs65ulKdksVYYB+gN6GbDpcnyfdR9OgBQ4fC9OnhwUOPP145mTjnXG1UJ1nIzNaY2VfAplwF5LIzalR4zsRXX4VE8eyzocO/Bx+Eli3zHZ1zrrHJ2MAt6e+EDgD3kPQkoYPA5OdSuDzYsgXOOis8fOiGG2DcOO8Z1jmXG9lcDXVr9HpbLgNx1XfFFSFR3H13eOaEc87lSjbVUEeb2UvJQ84jc2nv0n7kEfjDH+BnP/NE4ZzLvWxKFqcRHlLk6lG6u7QPPji8P/xw+PWv8xejc67pyCZZ7CLpx8kTzez2HMTjIlXdpT1uHHTsGC6VnTYNWvhtlc65epDNqaY50J7UT75zOVLVXdoffgglJfB//wfdkh9S65xzOZJNsvjEzCbmPBJXQY8eoeopleuugxNPrN94nHNNWzYN3M/nPApXSaq7tAH23RcmTKj3cJxzTVw2yeKvkjokRiR1kDQkhzE5Kt6lDeFGu44d4Z//hObN8xubc67pySZZ3AVsjI1viqa5HBs1CpYtC114lJaGBxd5x4DOuXzIJlnIYs9eNbNtZNm1uaRhkt6TtFjSNSnm95D0oqQ3JS2QdHJs3rhovfckNdka+vvug4cfhvHj4dhj8x2Nc66pyiZZLJV0maSW0XA5sDTTSpKaA3cCJxG6BxkpKbmbkJ8Dj5nZAGAE8L/RuvtF4/sDw4D/jbbXpPzjHzB2LBx3HPz85/mOxjnXlGWTLL4HHAZ8FA1DgDFZrDcYWGxmS81sKzANOD1pGQN2jN53BFZG708HppnZV2a2DFgcba9RSXeH9rRpcMYZ0LdveO/tFM65fMpYnWRmnxF+5VdXN8LzLxJKCIkmbgLwnKQfAe2A42Lrvpa0bqW7CiSNIUpcPap6IlCBSneH9qZN8L3vwRFHwN//DjvuWPV2nHOuPmQsWUjqLulxSZ9J+lTSXyR1z2LbqW7is6TxkcAUM+sOnAw8JKlZlutiZpPNbKCZDSwqKsoipMJR1R3aY8eGvp5OOgmeecYThXOuMGRTDfUA8CTQlfDr/u/RtExKgN1j493ZXs2UcDHwGICZzQZaA12yXLdBq+oO7XXrwmNRH388dOnhnHOFIJtkUWRmD5hZaTRMAbL5GT8H6COpt6RWhKqsJ5OWWQEcCyBpX0KyWBUtN0LSDpJ6A32Af2e1Rw1EVbVm7duHKqpWreo3HuecSyebZLFa0rmSmkfDucCaTCuZWSnhUazPAu8QrnpaKGmipNOixX4CXCppPvAocIEFCwkljkXAM8APzays+rtXuFLdod2iBdx1lzdmO+cKj2K3UKReQOoB/B44lNBuMAu43Myq6LkoPwYOHGjFxcX5DqNaHnooNGRv3gw77QT/8z/hBjznnKsvkuaa2cBMy2VzNdQKwjMtXB376KOQKG66CX7603xH45xzVcvmGdwPkPpKpItyElET8eKL4YqoESPg6qvzHY1zzqWXTbcdT0WvNwP++7cOfPRRSBJ77QX33APyJ4U45wpcNtVQfwGQ9PPEe1dzX38dLo3dtCmULtq3z3dEzjmXWXUeypm+Jdxl5Zpr4NVX4ZFHYL/knrKcc65AZdNm8RYhUfyXpAWEu6vNzPrlOrjGZvp0uP32cJf2yJH5jsY557KXTcni1JxH0UhNnRoasVesCM+hWLsWhgyB227Ld2TOOVc92SSLL3IeRSOU3FHgyqizkpEj/e5s51zDk80d3HOB4uh1ZWzcpZGqo0CASZPqPxbnnKutbK6G6p14L+nN6EFFLoOqOgqsarpzzhWybEoWAESdAXoFSpa6VXr6RtDAHrvhnHNAdldD/T16uy+hsz+XwQsvwMaNlae3bRs6EHTOuYYmm5LFrYS7t483s+tzHE+DkvxY1AcfDH08HX887LZbSAw9e4Y7tHv2hMmTYdSofEftnHPVl7HXWQBJBwJHRKMvm9n8nEZVA/Xd62zy1U4QkoJZeNLd7bdX7oLcOecKTba9zmbzWNXLganALtHwcPTM7CYt1dVOZlBUBHff7YnCOde4ZHOfxcXAEDPbBCDpJmA28D+5DKzQVXVV0+rV9RuHc87Vh2zaLATEn1JXFk1r0qq6qsmvdnLONUZVJgtJiVLHA8DrkiZImgC8BtyXzcYlDZP0nqTFkq5JMX+SpHnR8L6kdbF5ZbF5yc/uzrsLL6w8za92cs41Vumqof4NHGRmt0uaCRxOKFFcaGZvZtqwpObAncDxQAkwR9KTZrYosYyZXRlb/kdA/Ia/L82sf3V2pr589FFolygqgh12COM9eoRE4Vc7Oecao3TJoryqyczeAN6o5rYHA4vNbCmApGnA6cCiKpYfCYyv5mfUuy+/hDPOCPdRzJ4NffvmOyLnnMu9dMmiSNKPq5ppZrdn2HY34MPYeAkwJNWCknoCvYEZscmtJRUDpcBvzOyJFOuNAcYA9KiHxgIzuPhimDsXnnjCE4VzrulIlyyaA+2peWN2qvWquqljBDDdzOIN6T3MbKWkPYAZkt4ysyUVNmY2GZgM4T6LGsaZtZtugkcfDdVNp52W609zzrnCkS5ZfGxmE2ux7RJg99h4d0KvtamMAH4Yn2BmK6PXpVGbyQBgSeVVcyf+PIouXcJlsSNGwLhx9RmFc87lX7pLZ2t7eewcoI+k3lEnhCOASlc1Sdob6ES4dyMxrZOkHaL3XYBvUnVbR04k7tD+4INQ/bRqVbhD+/jjw6tzzjUl6UoWx9Zmw2ZWKmks8CyhSut+M1soaSJQbGaJxDESmGYV+x3ZF/iDpG2EhPab+FVUdWnbNvj8c1i3bvuwfj1cfnnlO7S3bYOJE+Gii3IRiXPOFa6s+oZqCGraN9THH0PXrtkvL4Wk4ZxzjUG2fUNl091Ho9a5M9xxB3TsCDvttP31lFPC/RPJ/A5t51xT1OSTRatW8KMU3SLedFPlXmX9Dm3nXFOV9ZPymppRo8LzJ/x5FM455yWLtEaN8uTgnHPgJQvnnHNZ8GThnHMuI08WzjnnMvJk4ZxzLiNPFs455zLyZOGccy4jTxbOOecy8mThnHMuI08WzjnnMvJk4ZxzLiNPFs455zLyZOGccy4jTxbOOecy8mThnHMuo5wmC0nDJL0nabGka1LMnyRpXjS8L2ldbN5oSf+JhtG5jNM551x6OXuehaTmwJ3A8UAJMEfSk2a2KLGMmV0ZW/5HwIDo/c7AeGAgYMDcaN3PcxWvc865quWyZDEYWGxmS81sKzANOD3N8iOBR6P3JwLPm9naKEE8DwzLYazOOefSyGWy6AZ8GBsviaZVIqkn0BuYUZ11JY2RVCypeNWqVXUStHPOucpymSyUYppVsewIYLqZlVVnXTObbGYDzWxgUVFRDcN0zjmXSS6TRQmwe2y8O7CyimVHsL0KqrrrOuecy7FcJos5QB9JvSW1IiSEJ5MXkrQ30AmYHZv8LHCCpE6SOgEnRNOcc87lQc6uhjKzUkljCSf55sD9ZrZQ0kSg2MwSiWMkMM3MLLbuWkm/IiQcgIlmtjZXsTrnnEtPsXN0gzZw4EArLi7OdxjOOdegSJprZgMzLed3cDvnnMvIk4VzzrmMPFk455zLyJOFc865jDxZOOecy8iThXPOuYw8WTjnnMvIk4VzzrmMPFk455zLyJOFc865jDxZOOecy8iThXPOuYw8WTjnnMvIk4VzzrmMPFk455zLyJOFc865jDxZOOecyyinyULSMEnvSVos6Zoqljlb0iJJCyU9EpteJmleNFR6drdzzrn6k7NncEtqDtwJHA+UAHMkPWlmi2LL9AHGAd80s88l7RLbxJdm1j9X8TnnnMteLksWg4HFZrbUzLYC04DTk5a5FLjTzD4HMLPPchiPc865GsplsugGfBgbL4mmxe0F7CXpVUmvSRoWm9daUnE0/YxUHyBpTLRM8apVq+o2euecc+VyVg0FKMU0S/H5fYCjgO7Ay5L6mtk6oIeZrZS0BzBD0ltmtqTCxswmA5MBBg4cmLxt55xzdSSXJYsSYPfYeHdgZYpl/mZmX5vZMuA9QvLAzFZGr0uBmcCAHMbqnHMujVwmizlAH0m9JbUCRgDJVzU9ARwNIKkLoVpqqaROknaITf8msAjnnHN5kbNqKDMrlTQWeBZoDtxvZgslTQSKzezJaN4JkhYBZcDVZrZG0mHAHyRtIyS038SvonLOOVe/ZNY4qvoHDhxoxcXF+Q7DOecaFElzzWxgpuX8Dm7nnHMZebJwzjmXkScL55xzGXmycM45l5EnC+eccxl5snDOOZeRJwvnnHMZebJwzjmXkScL55xzGXmycM45l5EnC+eccxl5snDOOZeRJwvnnHMZebJwzjmXkScL55xzGXmycM45l1GTTxZTp0KvXtCsWXidOjXfETnnXOHJabKQNEzSe5IWS7qmimXOlrRI0kJJj8Smj5b0n2gYnYv4pk6FMWPggw/ALLyOGeMJwznnkuXssaqSmgPvA8cDJcAcYGT8WdqS+gCPAceY2eeSdjGzzyTtDBQDAwED5gIHm9nnVX1eTR6r2qtXSBDJevaE5curtSnnnGuQCuGxqoOBxWa21My2AtOA05OWuRS4M5EEzOyzaPqJwPNmtjaa9zwwrK4DXLGietOdc66pymWy6AZ8GBsviabF7QXsJelVSa9JGlaNdZE0RlKxpOJVq1ZVO8AePao33TnnmqpcJgulmJZc59UC6AMcBYwE7pW0U5brYmaTzWygmQ0sKiqqdoA33ABt21ac1rZtmO6cc267XCaLEmD32Hh3YGWKZf5mZl+b2TLgPULyyGbdWhs1CiZPDm0UUnidPDlMd845t10uG7hbEBq4jwU+IjRwn2NmC2PLDCM0eo+W1AV4E+jP9kbtg6JF3yA0cK+t6vNq0sDtnHNNXbYN3C1yFYCZlUoaCzwLNAfuN7OFkiYCxWb2ZDTvBEmLgDLgajNbE+3ArwgJBmBiukThnHMut3JWsqhvXrJwzrnqK4RLZ51zzjUSniycc85l5MnCOedcRo2mzULSKiBF5x3lugCr6ymc6vLYasZjqxmPrWYaa2w9zSzjjWqNJllkIqk4m0acfPDYasZjqxmPrWaaemxeDeWccy4jTxbOOecyakrJYnK+A0jDY6sZj61mPLaaadKxNZk2C+ecczXXlEoWzjnnasiThXPOuYwafbLI5jng+SRpuaS3JM2TlNfOrSTdL+kzSW/Hpu0s6fnoWejPS+pUQLFNkPRRdOzmSTo5D3HtLulFSe9Ez5G/PJqe9+OWJrZCOG6tJf1b0vwotl9G03tLej06bn+S1KqAYpsiaVnsuPWv79hiMTaX9Kakp6Lx3B83M2u0A6G32yXAHkArYD6wX77jSopxOdAl33FEsQwldAv/dmzazcA10ftrgJsKKLYJwFV5PmbfAA6K3ncgdMu/XyEctzSxFcJxE9A+et8SeB04BHgMGBFNvxv4fgHFNgX4dj6PWyzGHwOPAE9F4zk/bo29ZJHNc8BdxMz+BSR3BX868GD0/kHgjHoNKlJFbHlnZh+b2RvR+y+AdwiPAM77cUsTW95ZsDEabRkNBhwDTI+m5+u4VRVbQZDUHTgFuDcaF/Vw3Bp7ssjqWd55ZsBzkuZKGpPvYFLY1cw+hnDyAXbJczzJxkpaEFVT5aWKLEFSL2AA4ZdoQR23pNigAI5bVJUyD/gMeJ5QC7DOzEqjRfL2/5ocm5kljtsN0XGbJGmHfMQG/Bb4KbAtGu9MPRy3xp4ssnqWd55908wOAk4CfihpaL4DakDuAvYkPF3xY+C2fAUiqT3wF+AKM9uQrzhSSRFbQRw3Myszs/6ExyYPBvZNtVj9RhV9aFJskvoC44B9gEHAzsDP6jsuSacCn5nZ3PjkFIvW+XFr7MmiXp7lXRtmtjJ6/Qx4nPBPU0g+lfQNgOj1szzHU87MPo3+qbcB95CnYyepJeFkPNXM/hpNLojjliq2QjluCWa2DphJaBfYKXokMxTA/2sstmFRtZ6Z2VfAA+TnuH0TOE3SckK1+jGEkkbOj1tjTxZzgD7RlQKtgBHAk3mOqZykdpI6JN4DJwBvp1+r3j0JjI7ejwb+lsdYKkicjCPDycOxi+qL7wPeMbPbY7Pyftyqiq1AjluRpJ2i922A4whtKi8C344Wy9dxSxXbu7HkL0KbQL0fNzMbZ2bdzawX4Xw2w8xGUR/HLd+t+rkegJMJV4EsAa7LdzxJse1BuEJrPrAw3/EBjxKqJb4mlMouJtSH/hP4T/S6cwHF9hDwFrCAcHL+Rh7iOpxQ5F8AzIuGkwvhuKWJrRCOWz/gzSiGt4FfRNP3AP4NLAb+DOxQQLHNiI7b28DDRFdM5WsAjmL71VA5P27e3YdzzrmMGns1lHPOuTrgycI551xGniycc85l5MnCOedcRp4snHPOZeTJwrksSBoS9eA6P+rFdXJ0Z7RzTYInC+ey0xo4z8wONLN9Cdfh35vnmJyrN54snMuCmb1kZiWx8buAvSRdLGl97BkHH0maACCpv6TXoo7nHpfUSVILSXMkHRUtc6OkG6L3yyV1id4/rNizO5zLN08WzmVJ0tWxpDCPcNfsZ8DLZtbfQsdzk2Kr/BH4mZn1I9z5O95Cz6AXAHdJOh4YBvwy6XMOAPrmfo+cy54nC+eyZGa3JJJClBgWVLWspI7ATmb2UjTpQcIDnDCzhYQuN/4OXGThWStxvwbG1/kOOFcLniycqwFJOxK6+K7pcyoOANYBuyZNPwzYSOgvzLmC4cnCuSxIukDSgOh9c8IzIJ4hdFBZiZmtBz6XdEQ06TzgpWj9MwkdDQ4F7kj0cBqZAPwiF/vgXG14snAuOwuB2yW9QUgQAi7JsM5o4BZJCwilkIlRA/ZvgIvN7H3g98DvYuu8bmYpE5Bz+eS9zjrnnMvISxbOOecy8mThnHMuI08WzjnnMvJk4ZxzLiNPFs455zLyZOGccy4jTxbOOecy+v/0+VRcZLmz7wAAAABJRU5ErkJggg==\n", 462 | "text/plain": [ 463 | "
" 464 | ] 465 | }, 466 | "metadata": { 467 | "needs_background": "light" 468 | }, 469 | "output_type": "display_data" 470 | } 471 | ], 472 | "source": [ 473 | "plt.clf() # Очистим график\n", 474 | "acc_values = history_dict['acc']\n", 475 | "val_acc_values = history_dict['val_acc']\n", 476 | "\n", 477 | "plt.plot(epochs, acc, 'bo', label='Точность обучения')\n", 478 | "plt.plot(epochs, val_acc, 'b', label='Точность проверки')\n", 479 | "plt.title('Точность во время обучения и проверки')\n", 480 | "plt.xlabel('Эпохи')\n", 481 | "plt.ylabel('Точность')\n", 482 | "plt.legend()\n", 483 | "\n", 484 | "plt.show()" 485 | ] 486 | } 487 | ], 488 | "metadata": { 489 | "kernelspec": { 490 | "display_name": "Python 3", 491 | "language": "python", 492 | "name": "python3" 493 | }, 494 | "language_info": { 495 | "codemirror_mode": { 496 | "name": "ipython", 497 | "version": 3 498 | }, 499 | "file_extension": ".py", 500 | "mimetype": "text/x-python", 501 | "name": "python", 502 | "nbconvert_exporter": "python", 503 | "pygments_lexer": "ipython3", 504 | "version": "3.6.7" 505 | } 506 | }, 507 | "nbformat": 4, 508 | "nbformat_minor": 2 509 | } 510 | -------------------------------------------------------------------------------- /20_tf2/20_1_actor_critic_agent.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Setup" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import gym\n", 17 | "import logging\n", 18 | "import numpy as np\n", 19 | "import tensorflow as tf\n", 20 | "import tensorflow.keras.layers as kl\n", 21 | "import tensorflow.keras.losses as kls\n", 22 | "import tensorflow.keras.optimizers as ko" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": 2, 28 | "metadata": {}, 29 | "outputs": [], 30 | "source": [ 31 | "import matplotlib\n", 32 | "import matplotlib.pyplot as plt\n", 33 | "%matplotlib inline " 34 | ] 35 | }, 36 | { 37 | "cell_type": "code", 38 | "execution_count": 3, 39 | "metadata": {}, 40 | "outputs": [ 41 | { 42 | "name": "stdout", 43 | "output_type": "stream", 44 | "text": [ 45 | "TensorFlow Ver: 1.13.0-dev20190117\n", 46 | "Eager Execution: True\n" 47 | ] 48 | } 49 | ], 50 | "source": [ 51 | "print(\"TensorFlow Ver: \", tf.__version__)\n", 52 | "print(\"Eager Execution:\", tf.executing_eagerly())" 53 | ] 54 | }, 55 | { 56 | "cell_type": "code", 57 | "execution_count": 4, 58 | "metadata": {}, 59 | "outputs": [ 60 | { 61 | "name": "stdout", 62 | "output_type": "stream", 63 | "text": [ 64 | "1 + 2 + 3 + 4 + 5 = tf.Tensor(15, shape=(), dtype=int32)\n" 65 | ] 66 | } 67 | ], 68 | "source": [ 69 | "# eager by default!\n", 70 | "print(\"1 + 2 + 3 + 4 + 5 =\", tf.reduce_sum([1, 2, 3, 4, 5]))" 71 | ] 72 | }, 73 | { 74 | "cell_type": "markdown", 75 | "metadata": {}, 76 | "source": [ 77 | "# Advantage Actor-Critic with TensorFlow 2.0" 78 | ] 79 | }, 80 | { 81 | "cell_type": "markdown", 82 | "metadata": {}, 83 | "source": [ 84 | "## Policy & Value Model Class" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": 5, 90 | "metadata": {}, 91 | "outputs": [], 92 | "source": [ 93 | "class ProbabilityDistribution(tf.keras.Model):\n", 94 | " def call(self, logits):\n", 95 | " # sample a random categorical action from given logits\n", 96 | " return tf.squeeze(tf.random.categorical(logits, 1), axis=-1)\n", 97 | "\n", 98 | "class Model(tf.keras.Model):\n", 99 | " def __init__(self, num_actions):\n", 100 | " super().__init__('mlp_policy')\n", 101 | " # no tf.get_variable(), just simple Keras API\n", 102 | " self.hidden1 = kl.Dense(128, activation='relu')\n", 103 | " self.hidden2 = kl.Dense(128, activation='relu')\n", 104 | " self.value = kl.Dense(1, name='value')\n", 105 | " # logits are unnormalized log probabilities\n", 106 | " self.logits = kl.Dense(num_actions, name='policy_logits')\n", 107 | " self.dist = ProbabilityDistribution()\n", 108 | "\n", 109 | " def call(self, inputs):\n", 110 | " # inputs is a numpy array, convert to Tensor\n", 111 | " x = tf.convert_to_tensor(inputs)\n", 112 | " # separate hidden layers from the same input tensor\n", 113 | " hidden_logs = self.hidden1(x)\n", 114 | " hidden_vals = self.hidden2(x)\n", 115 | " return self.logits(hidden_logs), self.value(hidden_vals)\n", 116 | "\n", 117 | " def action_value(self, obs):\n", 118 | " # executes call() under the hood\n", 119 | " logits, value = self.predict(obs)\n", 120 | " action = self.dist.predict(logits)\n", 121 | " # a simpler option, will become clear later why we don't use it\n", 122 | " # action = tf.random.categorical(logits, 1)\n", 123 | " return np.squeeze(action, axis=-1), np.squeeze(value, axis=-1)" 124 | ] 125 | }, 126 | { 127 | "cell_type": "markdown", 128 | "metadata": {}, 129 | "source": [ 130 | "## Advantage Actor-Critic Agent Class" 131 | ] 132 | }, 133 | { 134 | "cell_type": "code", 135 | "execution_count": 6, 136 | "metadata": {}, 137 | "outputs": [], 138 | "source": [ 139 | "class A2CAgent:\n", 140 | " def __init__(self, model):\n", 141 | " # hyperparameters for loss terms, gamma is the discount coefficient\n", 142 | " self.params = {\n", 143 | " 'gamma': 0.99,\n", 144 | " 'value': 0.5,\n", 145 | " 'entropy': 0.0001\n", 146 | " }\n", 147 | " self.model = model\n", 148 | " self.model.compile(\n", 149 | " optimizer=ko.RMSprop(lr=0.0007),\n", 150 | " # define separate losses for policy logits and value estimate\n", 151 | " loss=[self._logits_loss, self._value_loss]\n", 152 | " )\n", 153 | " \n", 154 | " def train(self, env, batch_sz=32, updates=1000):\n", 155 | " # storage helpers for a single batch of data\n", 156 | " actions = np.empty((batch_sz,), dtype=np.int32)\n", 157 | " rewards, dones, values = np.empty((3, batch_sz))\n", 158 | " observations = np.empty((batch_sz,) + env.observation_space.shape)\n", 159 | " # training loop: collect samples, send to optimizer, repeat updates times\n", 160 | " ep_rews = [0.0]\n", 161 | " next_obs = env.reset()\n", 162 | " for update in range(updates):\n", 163 | " for step in range(batch_sz):\n", 164 | " observations[step] = next_obs.copy()\n", 165 | " actions[step], values[step] = self.model.action_value(next_obs[None, :])\n", 166 | " next_obs, rewards[step], dones[step], _ = env.step(actions[step])\n", 167 | "\n", 168 | " ep_rews[-1] += rewards[step]\n", 169 | " if dones[step]:\n", 170 | " ep_rews.append(0.0)\n", 171 | " next_obs = env.reset()\n", 172 | " logging.info(\"Episode: %03d, Reward: %03d\" % (len(ep_rews)-1, ep_rews[-2]))\n", 173 | "\n", 174 | " _, next_value = self.model.action_value(next_obs[None, :])\n", 175 | " returns, advs = self._returns_advantages(rewards, dones, values, next_value)\n", 176 | " # a trick to input actions and advantages through same API\n", 177 | " acts_and_advs = np.concatenate([actions[:, None], advs[:, None]], axis=-1)\n", 178 | " # performs a full training step on the collected batch\n", 179 | " # note: no need to mess around with gradients, Keras API handles it\n", 180 | " losses = self.model.train_on_batch(observations, [acts_and_advs, returns])\n", 181 | " logging.debug(\"[%d/%d] Losses: %s\" % (update+1, updates, losses))\n", 182 | " return ep_rews\n", 183 | "\n", 184 | " def test(self, env, render=False):\n", 185 | " obs, done, ep_reward = env.reset(), False, 0\n", 186 | " while not done:\n", 187 | " action, _ = self.model.action_value(obs[None, :])\n", 188 | " obs, reward, done, _ = env.step(action)\n", 189 | " ep_reward += reward\n", 190 | " if render:\n", 191 | " env.render()\n", 192 | " return ep_reward\n", 193 | "\n", 194 | " def _returns_advantages(self, rewards, dones, values, next_value):\n", 195 | " # next_value is the bootstrap value estimate of a future state (the critic)\n", 196 | " returns = np.append(np.zeros_like(rewards), next_value, axis=-1)\n", 197 | " # returns are calculated as discounted sum of future rewards\n", 198 | " for t in reversed(range(rewards.shape[0])):\n", 199 | " returns[t] = rewards[t] + self.params['gamma'] * returns[t+1] * (1-dones[t])\n", 200 | " returns = returns[:-1]\n", 201 | " # advantages are returns - baseline, value estimates in our case\n", 202 | " advantages = returns - values\n", 203 | " return returns, advantages\n", 204 | " \n", 205 | " def _value_loss(self, returns, value):\n", 206 | " # value loss is typically MSE between value estimates and returns\n", 207 | " return self.params['value']*kls.mean_squared_error(returns, value)\n", 208 | "\n", 209 | " def _logits_loss(self, acts_and_advs, logits):\n", 210 | " # a trick to input actions and advantages through same API\n", 211 | " actions, advantages = tf.split(acts_and_advs, 2, axis=-1)\n", 212 | " # polymorphic CE loss function that supports sparse and weighted options\n", 213 | " # from_logits argument ensures transformation into normalized probabilities\n", 214 | " cross_entropy = kls.CategoricalCrossentropy(from_logits=True)\n", 215 | " # policy loss is defined by policy gradients, weighted by advantages\n", 216 | " # note: we only calculate the loss on the actions we've actually taken\n", 217 | " # thus under the hood a sparse version of CE loss will be executed\n", 218 | " actions = tf.cast(actions, tf.int32)\n", 219 | " policy_loss = cross_entropy(actions, logits, sample_weight=advantages)\n", 220 | " # entropy loss can be calculated via CE over itself\n", 221 | " entropy_loss = cross_entropy(logits, logits)\n", 222 | " # here signs are flipped because optimizer minimizes\n", 223 | " return policy_loss - self.params['entropy']*entropy_loss" 224 | ] 225 | }, 226 | { 227 | "cell_type": "code", 228 | "execution_count": 7, 229 | "metadata": {}, 230 | "outputs": [ 231 | { 232 | "name": "stdout", 233 | "output_type": "stream", 234 | "text": [ 235 | "WARNING:tensorflow:From /home/inoryy/anaconda3/envs/tf2/lib/python3.6/site-packages/tensorflow/python/ops/resource_variable_ops.py:655: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n", 236 | "Instructions for updating:\n", 237 | "Colocations handled automatically by placer.\n" 238 | ] 239 | }, 240 | { 241 | "data": { 242 | "text/plain": [ 243 | "(array(1), array([0.00197717], dtype=float32))" 244 | ] 245 | }, 246 | "execution_count": 7, 247 | "metadata": {}, 248 | "output_type": "execute_result" 249 | } 250 | ], 251 | "source": [ 252 | "env = gym.make('CartPole-v0')\n", 253 | "model = Model(num_actions=env.action_space.n)\n", 254 | "model.action_value(env.reset()[None, :])" 255 | ] 256 | }, 257 | { 258 | "cell_type": "markdown", 259 | "metadata": {}, 260 | "source": [ 261 | "# Training A2C Agent & Results" 262 | ] 263 | }, 264 | { 265 | "cell_type": "code", 266 | "execution_count": 8, 267 | "metadata": {}, 268 | "outputs": [], 269 | "source": [ 270 | "env = gym.make('CartPole-v0')\n", 271 | "model = Model(num_actions=env.action_space.n)\n", 272 | "agent = A2CAgent(model)" 273 | ] 274 | }, 275 | { 276 | "cell_type": "markdown", 277 | "metadata": {}, 278 | "source": [ 279 | "## Testing with Random Weights" 280 | ] 281 | }, 282 | { 283 | "cell_type": "code", 284 | "execution_count": 9, 285 | "metadata": {}, 286 | "outputs": [ 287 | { 288 | "name": "stdout", 289 | "output_type": "stream", 290 | "text": [ 291 | "Total Episode Reward: 12 out of 200\n" 292 | ] 293 | } 294 | ], 295 | "source": [ 296 | "rewards_sum = agent.test(env)\n", 297 | "print(\"Total Episode Reward: %d out of 200\" % agent.test(env))" 298 | ] 299 | }, 300 | { 301 | "cell_type": "code", 302 | "execution_count": 10, 303 | "metadata": {}, 304 | "outputs": [ 305 | { 306 | "name": "stderr", 307 | "output_type": "stream", 308 | "text": [ 309 | "INFO:root:Episode: 001, Reward: 019\n", 310 | "INFO:root:Episode: 002, Reward: 023\n", 311 | "INFO:root:Episode: 003, Reward: 027\n", 312 | "INFO:root:Episode: 004, Reward: 016\n", 313 | "INFO:root:Episode: 005, Reward: 035\n", 314 | "INFO:root:Episode: 006, Reward: 021\n", 315 | "INFO:root:Episode: 007, Reward: 012\n", 316 | "INFO:root:Episode: 008, Reward: 023\n", 317 | "INFO:root:Episode: 009, Reward: 021\n", 318 | "INFO:root:Episode: 010, Reward: 026\n", 319 | "INFO:root:Episode: 011, Reward: 059\n", 320 | "INFO:root:Episode: 012, Reward: 021\n", 321 | "INFO:root:Episode: 013, Reward: 012\n", 322 | "INFO:root:Episode: 014, Reward: 018\n", 323 | "INFO:root:Episode: 015, Reward: 016\n", 324 | "INFO:root:Episode: 016, Reward: 027\n", 325 | "INFO:root:Episode: 017, Reward: 032\n", 326 | "INFO:root:Episode: 018, Reward: 013\n", 327 | "INFO:root:Episode: 019, Reward: 017\n", 328 | "INFO:root:Episode: 020, Reward: 041\n", 329 | "INFO:root:Episode: 021, Reward: 015\n", 330 | "INFO:root:Episode: 022, Reward: 015\n", 331 | "INFO:root:Episode: 023, Reward: 045\n", 332 | "INFO:root:Episode: 024, Reward: 014\n", 333 | "INFO:root:Episode: 025, Reward: 018\n", 334 | "INFO:root:Episode: 026, Reward: 037\n", 335 | "INFO:root:Episode: 027, Reward: 017\n", 336 | "INFO:root:Episode: 028, Reward: 025\n", 337 | "INFO:root:Episode: 029, Reward: 044\n", 338 | "INFO:root:Episode: 030, Reward: 010\n", 339 | "INFO:root:Episode: 031, Reward: 014\n", 340 | "INFO:root:Episode: 032, Reward: 013\n", 341 | "INFO:root:Episode: 033, Reward: 017\n", 342 | "INFO:root:Episode: 034, Reward: 022\n", 343 | "INFO:root:Episode: 035, Reward: 021\n", 344 | "INFO:root:Episode: 036, Reward: 039\n", 345 | "INFO:root:Episode: 037, Reward: 013\n", 346 | "INFO:root:Episode: 038, Reward: 041\n", 347 | "INFO:root:Episode: 039, Reward: 036\n", 348 | "INFO:root:Episode: 040, Reward: 020\n", 349 | "INFO:root:Episode: 041, Reward: 041\n", 350 | "INFO:root:Episode: 042, Reward: 020\n", 351 | "INFO:root:Episode: 043, Reward: 028\n", 352 | "INFO:root:Episode: 044, Reward: 023\n", 353 | "INFO:root:Episode: 045, Reward: 077\n", 354 | "INFO:root:Episode: 046, Reward: 010\n", 355 | "INFO:root:Episode: 047, Reward: 021\n", 356 | "INFO:root:Episode: 048, Reward: 012\n", 357 | "INFO:root:Episode: 049, Reward: 031\n", 358 | "INFO:root:Episode: 050, Reward: 049\n", 359 | "INFO:root:Episode: 051, Reward: 034\n", 360 | "INFO:root:Episode: 052, Reward: 016\n", 361 | "INFO:root:Episode: 053, Reward: 034\n", 362 | "INFO:root:Episode: 054, Reward: 027\n", 363 | "INFO:root:Episode: 055, Reward: 031\n", 364 | "INFO:root:Episode: 056, Reward: 015\n", 365 | "INFO:root:Episode: 057, Reward: 012\n", 366 | "INFO:root:Episode: 058, Reward: 024\n", 367 | "INFO:root:Episode: 059, Reward: 082\n", 368 | "INFO:root:Episode: 060, Reward: 038\n", 369 | "INFO:root:Episode: 061, Reward: 026\n", 370 | "INFO:root:Episode: 062, Reward: 012\n", 371 | "INFO:root:Episode: 063, Reward: 018\n", 372 | "INFO:root:Episode: 064, Reward: 011\n", 373 | "INFO:root:Episode: 065, Reward: 048\n", 374 | "INFO:root:Episode: 066, Reward: 044\n", 375 | "INFO:root:Episode: 067, Reward: 018\n", 376 | "INFO:root:Episode: 068, Reward: 016\n", 377 | "INFO:root:Episode: 069, Reward: 012\n", 378 | "INFO:root:Episode: 070, Reward: 023\n", 379 | "INFO:root:Episode: 071, Reward: 013\n", 380 | "INFO:root:Episode: 072, Reward: 021\n", 381 | "INFO:root:Episode: 073, Reward: 014\n", 382 | "INFO:root:Episode: 074, Reward: 032\n", 383 | "INFO:root:Episode: 075, Reward: 016\n", 384 | "INFO:root:Episode: 076, Reward: 033\n", 385 | "INFO:root:Episode: 077, Reward: 022\n", 386 | "INFO:root:Episode: 078, Reward: 019\n", 387 | "INFO:root:Episode: 079, Reward: 022\n", 388 | "INFO:root:Episode: 080, Reward: 082\n", 389 | "INFO:root:Episode: 081, Reward: 016\n", 390 | "INFO:root:Episode: 082, Reward: 017\n", 391 | "INFO:root:Episode: 083, Reward: 049\n", 392 | "INFO:root:Episode: 084, Reward: 020\n", 393 | "INFO:root:Episode: 085, Reward: 023\n", 394 | "INFO:root:Episode: 086, Reward: 032\n", 395 | "INFO:root:Episode: 087, Reward: 029\n", 396 | "INFO:root:Episode: 088, Reward: 030\n", 397 | "INFO:root:Episode: 089, Reward: 029\n", 398 | "INFO:root:Episode: 090, Reward: 030\n", 399 | "INFO:root:Episode: 091, Reward: 038\n", 400 | "INFO:root:Episode: 092, Reward: 070\n", 401 | "INFO:root:Episode: 093, Reward: 018\n", 402 | "INFO:root:Episode: 094, Reward: 051\n", 403 | "INFO:root:Episode: 095, Reward: 052\n", 404 | "INFO:root:Episode: 096, Reward: 058\n", 405 | "INFO:root:Episode: 097, Reward: 020\n", 406 | "INFO:root:Episode: 098, Reward: 043\n", 407 | "INFO:root:Episode: 099, Reward: 038\n", 408 | "INFO:root:Episode: 100, Reward: 023\n", 409 | "INFO:root:Episode: 101, Reward: 025\n", 410 | "INFO:root:Episode: 102, Reward: 038\n", 411 | "INFO:root:Episode: 103, Reward: 050\n", 412 | "INFO:root:Episode: 104, Reward: 034\n", 413 | "INFO:root:Episode: 105, Reward: 022\n", 414 | "INFO:root:Episode: 106, Reward: 020\n", 415 | "INFO:root:Episode: 107, Reward: 022\n", 416 | "INFO:root:Episode: 108, Reward: 033\n", 417 | "INFO:root:Episode: 109, Reward: 021\n", 418 | "INFO:root:Episode: 110, Reward: 038\n", 419 | "INFO:root:Episode: 111, Reward: 042\n", 420 | "INFO:root:Episode: 112, Reward: 014\n", 421 | "INFO:root:Episode: 113, Reward: 081\n", 422 | "INFO:root:Episode: 114, Reward: 029\n", 423 | "INFO:root:Episode: 115, Reward: 025\n", 424 | "INFO:root:Episode: 116, Reward: 029\n", 425 | "INFO:root:Episode: 117, Reward: 022\n", 426 | "INFO:root:Episode: 118, Reward: 109\n", 427 | "INFO:root:Episode: 119, Reward: 048\n", 428 | "INFO:root:Episode: 120, Reward: 022\n", 429 | "INFO:root:Episode: 121, Reward: 024\n", 430 | "INFO:root:Episode: 122, Reward: 029\n", 431 | "INFO:root:Episode: 123, Reward: 023\n", 432 | "INFO:root:Episode: 124, Reward: 042\n", 433 | "INFO:root:Episode: 125, Reward: 023\n", 434 | "INFO:root:Episode: 126, Reward: 013\n", 435 | "INFO:root:Episode: 127, Reward: 034\n", 436 | "INFO:root:Episode: 128, Reward: 033\n", 437 | "INFO:root:Episode: 129, Reward: 034\n", 438 | "INFO:root:Episode: 130, Reward: 063\n", 439 | "INFO:root:Episode: 131, Reward: 060\n", 440 | "INFO:root:Episode: 132, Reward: 018\n", 441 | "INFO:root:Episode: 133, Reward: 039\n", 442 | "INFO:root:Episode: 134, Reward: 015\n", 443 | "INFO:root:Episode: 135, Reward: 035\n", 444 | "INFO:root:Episode: 136, Reward: 132\n", 445 | "INFO:root:Episode: 137, Reward: 035\n", 446 | "INFO:root:Episode: 138, Reward: 033\n", 447 | "INFO:root:Episode: 139, Reward: 028\n", 448 | "INFO:root:Episode: 140, Reward: 015\n", 449 | "INFO:root:Episode: 141, Reward: 013\n", 450 | "INFO:root:Episode: 142, Reward: 101\n", 451 | "INFO:root:Episode: 143, Reward: 028\n", 452 | "INFO:root:Episode: 144, Reward: 066\n", 453 | "INFO:root:Episode: 145, Reward: 200\n", 454 | "INFO:root:Episode: 146, Reward: 059\n", 455 | "INFO:root:Episode: 147, Reward: 077\n", 456 | "INFO:root:Episode: 148, Reward: 021\n", 457 | "INFO:root:Episode: 149, Reward: 030\n", 458 | "INFO:root:Episode: 150, Reward: 053\n", 459 | "INFO:root:Episode: 151, Reward: 019\n", 460 | "INFO:root:Episode: 152, Reward: 035\n", 461 | "INFO:root:Episode: 153, Reward: 035\n", 462 | "INFO:root:Episode: 154, Reward: 069\n", 463 | "INFO:root:Episode: 155, Reward: 108\n", 464 | "INFO:root:Episode: 156, Reward: 079\n", 465 | "INFO:root:Episode: 157, Reward: 021\n", 466 | "INFO:root:Episode: 158, Reward: 026\n", 467 | "INFO:root:Episode: 159, Reward: 045\n", 468 | "INFO:root:Episode: 160, Reward: 025\n", 469 | "INFO:root:Episode: 161, Reward: 069\n", 470 | "INFO:root:Episode: 162, Reward: 016\n", 471 | "INFO:root:Episode: 163, Reward: 036\n", 472 | "INFO:root:Episode: 164, Reward: 063\n", 473 | "INFO:root:Episode: 165, Reward: 039\n", 474 | "INFO:root:Episode: 166, Reward: 075\n", 475 | "INFO:root:Episode: 167, Reward: 035\n", 476 | "INFO:root:Episode: 168, Reward: 059\n", 477 | "INFO:root:Episode: 169, Reward: 025\n", 478 | "INFO:root:Episode: 170, Reward: 069\n", 479 | "INFO:root:Episode: 171, Reward: 063\n", 480 | "INFO:root:Episode: 172, Reward: 024\n", 481 | "INFO:root:Episode: 173, Reward: 023\n", 482 | "INFO:root:Episode: 174, Reward: 082\n", 483 | "INFO:root:Episode: 175, Reward: 048\n", 484 | "INFO:root:Episode: 176, Reward: 049\n", 485 | "INFO:root:Episode: 177, Reward: 076\n", 486 | "INFO:root:Episode: 178, Reward: 024\n", 487 | "INFO:root:Episode: 179, Reward: 067\n", 488 | "INFO:root:Episode: 180, Reward: 045\n", 489 | "INFO:root:Episode: 181, Reward: 035\n", 490 | "INFO:root:Episode: 182, Reward: 044\n", 491 | "INFO:root:Episode: 183, Reward: 044\n", 492 | "INFO:root:Episode: 184, Reward: 026\n", 493 | "INFO:root:Episode: 185, Reward: 068\n", 494 | "INFO:root:Episode: 186, Reward: 020\n", 495 | "INFO:root:Episode: 187, Reward: 047\n", 496 | "INFO:root:Episode: 188, Reward: 028\n", 497 | "INFO:root:Episode: 189, Reward: 053\n", 498 | "INFO:root:Episode: 190, Reward: 089\n", 499 | "INFO:root:Episode: 191, Reward: 042\n", 500 | "INFO:root:Episode: 192, Reward: 023\n", 501 | "INFO:root:Episode: 193, Reward: 079\n", 502 | "INFO:root:Episode: 194, Reward: 051\n", 503 | "INFO:root:Episode: 195, Reward: 038\n", 504 | "INFO:root:Episode: 196, Reward: 116\n", 505 | "INFO:root:Episode: 197, Reward: 067\n", 506 | "INFO:root:Episode: 198, Reward: 082\n", 507 | "INFO:root:Episode: 199, Reward: 122\n", 508 | "INFO:root:Episode: 200, Reward: 113\n", 509 | "INFO:root:Episode: 201, Reward: 035\n", 510 | "INFO:root:Episode: 202, Reward: 061\n", 511 | "INFO:root:Episode: 203, Reward: 132\n", 512 | "INFO:root:Episode: 204, Reward: 033\n", 513 | "INFO:root:Episode: 205, Reward: 093\n", 514 | "INFO:root:Episode: 206, Reward: 125\n", 515 | "INFO:root:Episode: 207, Reward: 040\n", 516 | "INFO:root:Episode: 208, Reward: 044\n", 517 | "INFO:root:Episode: 209, Reward: 034\n", 518 | "INFO:root:Episode: 210, Reward: 059\n", 519 | "INFO:root:Episode: 211, Reward: 063\n", 520 | "INFO:root:Episode: 212, Reward: 116\n", 521 | "INFO:root:Episode: 213, Reward: 061\n", 522 | "INFO:root:Episode: 214, Reward: 086\n", 523 | "INFO:root:Episode: 215, Reward: 065\n", 524 | "INFO:root:Episode: 216, Reward: 031\n", 525 | "INFO:root:Episode: 217, Reward: 064\n", 526 | "INFO:root:Episode: 218, Reward: 153\n", 527 | "INFO:root:Episode: 219, Reward: 200\n", 528 | "INFO:root:Episode: 220, Reward: 088\n", 529 | "INFO:root:Episode: 221, Reward: 035\n", 530 | "INFO:root:Episode: 222, Reward: 113\n", 531 | "INFO:root:Episode: 223, Reward: 080\n", 532 | "INFO:root:Episode: 224, Reward: 048\n", 533 | "INFO:root:Episode: 225, Reward: 044\n", 534 | "INFO:root:Episode: 226, Reward: 061\n", 535 | "INFO:root:Episode: 227, Reward: 077\n", 536 | "INFO:root:Episode: 228, Reward: 025\n" 537 | ] 538 | }, 539 | { 540 | "name": "stderr", 541 | "output_type": "stream", 542 | "text": [ 543 | "INFO:root:Episode: 229, Reward: 026\n", 544 | "INFO:root:Episode: 230, Reward: 054\n", 545 | "INFO:root:Episode: 231, Reward: 120\n", 546 | "INFO:root:Episode: 232, Reward: 074\n", 547 | "INFO:root:Episode: 233, Reward: 122\n", 548 | "INFO:root:Episode: 234, Reward: 098\n", 549 | "INFO:root:Episode: 235, Reward: 034\n", 550 | "INFO:root:Episode: 236, Reward: 086\n", 551 | "INFO:root:Episode: 237, Reward: 126\n", 552 | "INFO:root:Episode: 238, Reward: 200\n", 553 | "INFO:root:Episode: 239, Reward: 175\n", 554 | "INFO:root:Episode: 240, Reward: 059\n", 555 | "INFO:root:Episode: 241, Reward: 045\n", 556 | "INFO:root:Episode: 242, Reward: 029\n", 557 | "INFO:root:Episode: 243, Reward: 027\n", 558 | "INFO:root:Episode: 244, Reward: 128\n", 559 | "INFO:root:Episode: 245, Reward: 104\n", 560 | "INFO:root:Episode: 246, Reward: 133\n", 561 | "INFO:root:Episode: 247, Reward: 101\n", 562 | "INFO:root:Episode: 248, Reward: 043\n", 563 | "INFO:root:Episode: 249, Reward: 053\n", 564 | "INFO:root:Episode: 250, Reward: 065\n", 565 | "INFO:root:Episode: 251, Reward: 072\n", 566 | "INFO:root:Episode: 252, Reward: 093\n", 567 | "INFO:root:Episode: 253, Reward: 200\n", 568 | "INFO:root:Episode: 254, Reward: 156\n", 569 | "INFO:root:Episode: 255, Reward: 053\n", 570 | "INFO:root:Episode: 256, Reward: 057\n", 571 | "INFO:root:Episode: 257, Reward: 121\n", 572 | "INFO:root:Episode: 258, Reward: 051\n", 573 | "INFO:root:Episode: 259, Reward: 095\n", 574 | "INFO:root:Episode: 260, Reward: 096\n", 575 | "INFO:root:Episode: 261, Reward: 053\n", 576 | "INFO:root:Episode: 262, Reward: 193\n", 577 | "INFO:root:Episode: 263, Reward: 083\n", 578 | "INFO:root:Episode: 264, Reward: 060\n", 579 | "INFO:root:Episode: 265, Reward: 100\n", 580 | "INFO:root:Episode: 266, Reward: 113\n", 581 | "INFO:root:Episode: 267, Reward: 120\n", 582 | "INFO:root:Episode: 268, Reward: 038\n", 583 | "INFO:root:Episode: 269, Reward: 084\n", 584 | "INFO:root:Episode: 270, Reward: 049\n", 585 | "INFO:root:Episode: 271, Reward: 066\n", 586 | "INFO:root:Episode: 272, Reward: 166\n", 587 | "INFO:root:Episode: 273, Reward: 144\n", 588 | "INFO:root:Episode: 274, Reward: 053\n", 589 | "INFO:root:Episode: 275, Reward: 057\n", 590 | "INFO:root:Episode: 276, Reward: 092\n", 591 | "INFO:root:Episode: 277, Reward: 122\n", 592 | "INFO:root:Episode: 278, Reward: 153\n", 593 | "INFO:root:Episode: 279, Reward: 131\n", 594 | "INFO:root:Episode: 280, Reward: 200\n", 595 | "INFO:root:Episode: 281, Reward: 074\n", 596 | "INFO:root:Episode: 282, Reward: 147\n", 597 | "INFO:root:Episode: 283, Reward: 079\n", 598 | "INFO:root:Episode: 284, Reward: 120\n", 599 | "INFO:root:Episode: 285, Reward: 136\n", 600 | "INFO:root:Episode: 286, Reward: 133\n", 601 | "INFO:root:Episode: 287, Reward: 133\n", 602 | "INFO:root:Episode: 288, Reward: 088\n", 603 | "INFO:root:Episode: 289, Reward: 057\n", 604 | "INFO:root:Episode: 290, Reward: 185\n", 605 | "INFO:root:Episode: 291, Reward: 087\n", 606 | "INFO:root:Episode: 292, Reward: 154\n", 607 | "INFO:root:Episode: 293, Reward: 200\n", 608 | "INFO:root:Episode: 294, Reward: 114\n", 609 | "INFO:root:Episode: 295, Reward: 118\n", 610 | "INFO:root:Episode: 296, Reward: 089\n", 611 | "INFO:root:Episode: 297, Reward: 069\n", 612 | "INFO:root:Episode: 298, Reward: 155\n", 613 | "INFO:root:Episode: 299, Reward: 109\n", 614 | "INFO:root:Episode: 300, Reward: 095\n", 615 | "INFO:root:Episode: 301, Reward: 200\n", 616 | "INFO:root:Episode: 302, Reward: 200\n", 617 | "INFO:root:Episode: 303, Reward: 139\n", 618 | "INFO:root:Episode: 304, Reward: 200\n", 619 | "INFO:root:Episode: 305, Reward: 099\n", 620 | "INFO:root:Episode: 306, Reward: 133\n", 621 | "INFO:root:Episode: 307, Reward: 152\n", 622 | "INFO:root:Episode: 308, Reward: 177\n", 623 | "INFO:root:Episode: 309, Reward: 140\n", 624 | "INFO:root:Episode: 310, Reward: 167\n", 625 | "INFO:root:Episode: 311, Reward: 134\n", 626 | "INFO:root:Episode: 312, Reward: 200\n", 627 | "INFO:root:Episode: 313, Reward: 200\n", 628 | "INFO:root:Episode: 314, Reward: 154\n", 629 | "INFO:root:Episode: 315, Reward: 200\n", 630 | "INFO:root:Episode: 316, Reward: 141\n", 631 | "INFO:root:Episode: 317, Reward: 200\n", 632 | "INFO:root:Episode: 318, Reward: 072\n", 633 | "INFO:root:Episode: 319, Reward: 128\n", 634 | "INFO:root:Episode: 320, Reward: 190\n", 635 | "INFO:root:Episode: 321, Reward: 200\n", 636 | "INFO:root:Episode: 322, Reward: 108\n", 637 | "INFO:root:Episode: 323, Reward: 038\n", 638 | "INFO:root:Episode: 324, Reward: 200\n", 639 | "INFO:root:Episode: 325, Reward: 102\n", 640 | "INFO:root:Episode: 326, Reward: 200\n", 641 | "INFO:root:Episode: 327, Reward: 200\n", 642 | "INFO:root:Episode: 328, Reward: 200\n", 643 | "INFO:root:Episode: 329, Reward: 151\n", 644 | "INFO:root:Episode: 330, Reward: 200\n", 645 | "INFO:root:Episode: 331, Reward: 129\n", 646 | "INFO:root:Episode: 332, Reward: 086\n", 647 | "INFO:root:Episode: 333, Reward: 174\n", 648 | "INFO:root:Episode: 334, Reward: 157\n", 649 | "INFO:root:Episode: 335, Reward: 200\n", 650 | "INFO:root:Episode: 336, Reward: 060\n", 651 | "INFO:root:Episode: 337, Reward: 200\n", 652 | "INFO:root:Episode: 338, Reward: 200\n", 653 | "INFO:root:Episode: 339, Reward: 036\n", 654 | "INFO:root:Episode: 340, Reward: 111\n", 655 | "INFO:root:Episode: 341, Reward: 200\n", 656 | "INFO:root:Episode: 342, Reward: 200\n", 657 | "INFO:root:Episode: 343, Reward: 200\n", 658 | "INFO:root:Episode: 344, Reward: 193\n", 659 | "INFO:root:Episode: 345, Reward: 200\n", 660 | "INFO:root:Episode: 346, Reward: 174\n", 661 | "INFO:root:Episode: 347, Reward: 200\n", 662 | "INFO:root:Episode: 348, Reward: 146\n", 663 | "INFO:root:Episode: 349, Reward: 150\n", 664 | "INFO:root:Episode: 350, Reward: 146\n", 665 | "INFO:root:Episode: 351, Reward: 148\n", 666 | "INFO:root:Episode: 352, Reward: 144\n", 667 | "INFO:root:Episode: 353, Reward: 162\n", 668 | "INFO:root:Episode: 354, Reward: 200\n", 669 | "INFO:root:Episode: 355, Reward: 200\n", 670 | "INFO:root:Episode: 356, Reward: 133\n", 671 | "INFO:root:Episode: 357, Reward: 152\n", 672 | "INFO:root:Episode: 358, Reward: 096\n", 673 | "INFO:root:Episode: 359, Reward: 069\n", 674 | "INFO:root:Episode: 360, Reward: 039\n", 675 | "INFO:root:Episode: 361, Reward: 115\n", 676 | "INFO:root:Episode: 362, Reward: 130\n", 677 | "INFO:root:Episode: 363, Reward: 077\n", 678 | "INFO:root:Episode: 364, Reward: 128\n", 679 | "INFO:root:Episode: 365, Reward: 098\n", 680 | "INFO:root:Episode: 366, Reward: 129\n", 681 | "INFO:root:Episode: 367, Reward: 033\n", 682 | "INFO:root:Episode: 368, Reward: 200\n", 683 | "INFO:root:Episode: 369, Reward: 140\n", 684 | "INFO:root:Episode: 370, Reward: 155\n", 685 | "INFO:root:Episode: 371, Reward: 130\n", 686 | "INFO:root:Episode: 372, Reward: 167\n", 687 | "INFO:root:Episode: 373, Reward: 170\n", 688 | "INFO:root:Episode: 374, Reward: 180\n", 689 | "INFO:root:Episode: 375, Reward: 147\n", 690 | "INFO:root:Episode: 376, Reward: 114\n", 691 | "INFO:root:Episode: 377, Reward: 054\n", 692 | "INFO:root:Episode: 378, Reward: 200\n", 693 | "INFO:root:Episode: 379, Reward: 072\n", 694 | "INFO:root:Episode: 380, Reward: 200\n", 695 | "INFO:root:Episode: 381, Reward: 200\n", 696 | "INFO:root:Episode: 382, Reward: 200\n", 697 | "INFO:root:Episode: 383, Reward: 182\n", 698 | "INFO:root:Episode: 384, Reward: 200\n", 699 | "INFO:root:Episode: 385, Reward: 200\n", 700 | "INFO:root:Episode: 386, Reward: 193\n", 701 | "INFO:root:Episode: 387, Reward: 200\n", 702 | "INFO:root:Episode: 388, Reward: 095\n", 703 | "INFO:root:Episode: 389, Reward: 200\n", 704 | "INFO:root:Episode: 390, Reward: 125\n", 705 | "INFO:root:Episode: 391, Reward: 158\n", 706 | "INFO:root:Episode: 392, Reward: 148\n", 707 | "INFO:root:Episode: 393, Reward: 083\n", 708 | "INFO:root:Episode: 394, Reward: 200\n", 709 | "INFO:root:Episode: 395, Reward: 200\n", 710 | "INFO:root:Episode: 396, Reward: 200\n", 711 | "INFO:root:Episode: 397, Reward: 156\n", 712 | "INFO:root:Episode: 398, Reward: 068\n" 713 | ] 714 | }, 715 | { 716 | "name": "stdout", 717 | "output_type": "stream", 718 | "text": [ 719 | "Finished training.\n" 720 | ] 721 | } 722 | ], 723 | "source": [ 724 | "# set to logging.WARNING to disable logs or logging.DEBUG to see losses as well\n", 725 | "logging.basicConfig(level=logging.INFO)\n", 726 | "\n", 727 | "rewards_history = agent.train(env)\n", 728 | "print(\"Finished training.\")" 729 | ] 730 | }, 731 | { 732 | "cell_type": "markdown", 733 | "metadata": {}, 734 | "source": [ 735 | "## Testing with Trained Model" 736 | ] 737 | }, 738 | { 739 | "cell_type": "code", 740 | "execution_count": 11, 741 | "metadata": {}, 742 | "outputs": [ 743 | { 744 | "name": "stdout", 745 | "output_type": "stream", 746 | "text": [ 747 | "Total Episode Reward: 200 out of 200\n" 748 | ] 749 | } 750 | ], 751 | "source": [ 752 | "print(\"Total Episode Reward: %d out of 200\" % agent.test(env))" 753 | ] 754 | }, 755 | { 756 | "cell_type": "markdown", 757 | "metadata": {}, 758 | "source": [ 759 | "## Training Rewards History" 760 | ] 761 | }, 762 | { 763 | "cell_type": "code", 764 | "execution_count": 12, 765 | "metadata": {}, 766 | "outputs": [ 767 | { 768 | "data": { 769 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfIAAAFYCAYAAACoFn5YAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAIABJREFUeJzs3Xd4VGXe//H3TCaT3isklAAJJQEhIEVF6oJgV1BcwbXg+hNd2yqWdcWy64Lruj66xVURFFwL6CqrIFLERelVEiChSEkoySQhZdKT8/sjEEFIQklmMjOf13U9l5uT5Jzv/ZyQT8597mIyDMNAREREXJLZ2QWIiIjI+VOQi4iIuDAFuYiIiAtTkIuIiLgwBbmIiIgLU5CLiIi4MIuzCzgfubnFzXq+sDB/CgpKm/WcrYG7tgvct21ql2tRu1yLK7crKiqowc/piRywWLycXUKLcNd2gfu2Te1yLWqXa3HXdinIRUREXJiCXERExIUpyEVERFyYglxERMSFKchFRERcmIJcRETEhSnIRUREXJiCXERExIW16Mpur7zyCmvXrqWqqoq7776b/v37M3XqVIqLi4mNjeXll1/GarWyZMkS3n77bSoqKpg4cSLjxo1rybJERETcRosF+fr169mxYwcfffQRx44d45prrmHQoEHceOONjB07lhkzZrBgwQKuuOIKXnrpJT799FMsFgvXX389Y8aMISAgoKVKExERcRstFuR9+vTh1VdfBSAoKIiqqirWrFnDc889B8CIESOYO3cucXFxpKSkEBRUt45samoqGzZsYMiQIS1VmoiINGDPoUK+3XYEu73CIdfrEBtEcsdwh1zLXbVYkFssFiyWutPPnz+fIUOGsHz5cnx9fQEIDw/HZrORm5tLePhPNzEiIgKbzdboucPC/Jt9zdzGFqR3Ze7aLnDftqldrsXd2vXEv1aTU1DmsOtZvMzMeXY0gf5Wh1zP3e4XOGD3s6VLl/Lxxx8za9YsVq5cWX/cMAxMJhPe3t6nfP2J441p7t1roqKCmn1HtdbAXdsF7ts2tcu1uFu7SsuryCkoI6l9KGMHdGjx623dbeObzdl89f1ehvSOa/HrufL9auwPkBYN8pUrV/KPf/yDmTNnEhwcTEBAAGVlZfj5+WGz2YiOjiYqKoq8vLz677HZbAwcOLAlyxIRkTPIttkB6JEQQa/OES1+vbjIAL7ZnM2a9KMOCXJ31WLTz4qLi5k+fTpvvvkmYWFhAAwePJhly5YBsGTJEoYMGUKvXr3IyMiguLgYu93O1q1b6devX0uVJSIiDcjOrQvyDrHBDrleRIgvSe1CyTh4jLzCcodc0x212BP5woULKSws5OGHH64/Nn36dJ544glmzZpFQkICY8eOxWKx8MADD3DrrbdiNpu577776t+ji4iI42TllgDQoY3j3iMPSo4h8+Ax1mw/wpWDOjrsuu6kxYL85ptv5uabbz7t+Jw5c047NmbMGMaMGdNSpYiIyFnIzrVjAtrFBFFc6JgBb/26RfP+kkzWpB9l7MAOTY6RktNpZTcREcEwDLJtdqLC/PC1tvg46HoBvt706hxJts3OwZwSh13XnSjIRUSEInslJWVVxEU6fjGuQckxAKxJP+rwa7sDBbmIiJB1fMR6XFSgw6/dq3MEfj4W1u44Sm2t4fDruzoFuYiI1I9Yj49y/BO5t8WLi7tFUVBcQcaBAodf39UpyEVEhOzjI9ad0bUOMCg5FoDV29W9fq4U5CIiQrbNjpfZREy4v1Oun9gulPBgHzZm5FBZVeOUGlyVglxExMPVHh+xHhvhj8XLObFgNpkY0COGsooatu7Ja/obpJ6CXETEw+UVllNRWeO0bvUTTnSvr0k/4tQ6XI2CXETEw50Y6OaMEesni48KJD4qkB/25FFSVuXUWlyJglxExMNl2+oGujljxPrPDUqJoabWYP3OHGeX4jIU5CIiHq61PJEDDOgegwl1r58LBbmIiIfLyrVj9TYTGeL8DavCg33p2j6UXVmF2I45Zr13V6cgFxHxYNU1tRzJtxMXGYC5lWxYUj/oTXPKz4qCXETEg+UUlFFdYxAX6fxu9RP6do3G4mVmdfoRDENLtjZFQS4i4sGy69dYd/5AtxP8fS307hLB4bxSDhzVjmhNUZCLiHiwrONbh7amIIeTlmzVoLcmKchFRDxY/RN5K+paB+jZOYIAXwtrt2tHtKYoyEVEPFh2bgkBvhZCA63OLuUUFi8zF3eLptBeyY792hGtMQpyEREPVVlVQ05BGXFRgZhayYj1kw3Ukq1nRUEuIuKhDueVYtD63o+f0CU+hIhgXzZk5lKhHdEapCAXEfFQWcf3II938mYpDTGbTAxMjqGisoYtu2zOLqfVUpCLiHion6aeta6BbidT93rTFOQiIh7qxBrrbVvpEzlAXGQA7WMCSfsxn+LSSmeX0yopyEVEPFS2rYTQQCuBft7OLqVRg5JjtSNaIxTkIiIeqLS8ivyiilbdrX5C/+4xmExaHKYhCnIREQ904v14a9iDvClhQT507xDGnuwicgpKnV1Oq6MgFxHxQPV7kLeyFd0aoh3RGmZpyZNnZmYyZcoUbr/9diZOnMgDDzxAQUHdCj3Hjh2jd+/e3HPPPVx99dWkpKQAEBYWxmuvvdaSZYmIeLz6IHeBJ3KA1KQo3lucwer0o1x9ScdWuYCNs7RYkJeWlvLCCy8waNCg+mMnB/RTTz3FuHHjAEhISGDOnDktVYqIiPxMtq0EE9A2wjWC3M/HQp/ESNbtyGHfkWIS2gQ7u6RWo8W61q1WK2+99RbR0dGnfW7v3r0UFBRw0UUXtdTlRUSkAYZhkJVrJyrUDx+rl7PLOWsDtSPaGbVYkFssFnx9fc/4uffee4/bbrut/mObzcaUKVOYMGECCxYsaKmSREQEKCqtoqSsymW61U9ISQgn0M+bdduPUlNb6+xyWo0WfUd+JmVlZaxevZpnnnkGgNDQUH7zm99w7bXXUlpayvjx4xkwYAAxMTENniMszB+LpXn/ioyKCmrW87UW7toucN+2qV2uxRXbdaggF4DEDuEN1t9a23V5nzgWrtpHdkE5fbs1nBMNaa3tuhAOD/JNmzaRmpqK2VzXGRAYGMj48eOBuu745ORkfvzxx0aDvKCZpx9ERQWRm1vcrOdsDdy1XeC+bVO7XIurtittd12Qh/l7n7H+1tyu3p0jWLhqH4tX/Uj7CP9z+t7W3K6mNPYHiMOnn23dupWkpKT6j9evX8/TTz8NQHl5ORkZGSQkJDi6LBERj5F9YrMUF+taB+jcNpioUF82ZdqoqNSOaNCCQZ6WlsakSZP4z3/+w3vvvcekSZM4duwYubm5pwyA69OnDwDjx4/ntttu4+677270aVxERC5Mts2Ol9lETPi5PdG2BiaTiYE9YqmoqmHzrlxnl9MqtFjXekpKyhmnlE2bNu3UAiwW/vCHP7RUGSIicpJawyDbZic2wh+Ll2uuCTYwOYb/rtrH6vSj9SPZPZlr3kURETkv+YXlVFTWENeKdzxrSpuIADrGBpH+Yz5Fdu2IpiAXEfEgWS6wB/nZGJQcS61hsG6HlmxVkIuIeJD6gW4u/EQO0L9HDGaTidXpCnIFuYiIB8m2udYa6w0JCbDSo2MYPx4u4mi+Z++IpiAXEfEg2bl2rN5mIkP9nF3KBRukJVsBBbmIiMeorqnlcJ6duMgAzG6we1ifpEis3mbWpB/FMAxnl+M0CnIREQ+RU1BGdY3hMnuQN8XXaiE1MYqcY2XsPVzk7HKcRkEuIuIh3OX9+MlOzCNfk+a5g94U5CIiHuLEiHV3CvLkhDCC/L1Zu+Mo1TWeuSOaglxExENk5x5/IneTrnUAL7OZ/t1jKCmrYvu+fGeX4xQKchERD5FlsxPgayE00OrsUprVT6PXPbN7XUEuIuIBKqtqyCkoJS4yAJMbjFg/WUKbIKLD/NicmUtZRbWzy3E4BbmIiAc4nFeKYbj+0qxnYjKZGJQcS2V1rUfuiKYgFxHxAFluONDtZAOT67a/9sTudQW5iIgHODH1LN4Nn8gBYsL86dQ2mO378iksqXB2OQ6lIBcR8QAnRqy3dfHNUhozKDkWw4C1O3KcXYpDKchFRDxAtq2E0EArgX7ezi6lxVzcLfr4jmietfa6glxExM2VlleTX1ThlgPdThYcYCWlUzj7jxRzOM/u7HIcRkEuIuLmDp1YmtWNu9VP8MRBbwpyERE3l2Vz7xHrJ+vTJQofby/WpB/xmB3RFOQiIm7uxEA3dx2xfjIfqxepSVHYCsvZk+0ZO6IpyEVE3Fx2bgkmoG2E+z+RAwxKOdG97hmD3hTkIiJuzDAMsnLtRIX64WP1cnY5DtG9QxjBAVbWeciOaApyERE3VlRaRUlZlUe8Hz/By2xmQPcY7OXVpO11/x3RFOQiIm7MHfcgPxue1L2uIBcRcWPuuAf52egQE0RsuD9bdtvcfkc0BbmIiBvL9qCpZyer2xEthqrqWjZmuPeOaC0a5JmZmYwcOZK5c+cC8MQTT3D11VczadIkJk2axIoVKwBYsmQJN998M9dddx3z589vyZJERDxKdq4dL7OJ2HB/Z5ficAOSYwH37163tNSJS0tLeeGFFxg0aNApxx955BGGDRtW/3FJSQkvvfQSn376KRaLheuvv54xY8YQEOBZfz2KiDQ3wzDIstmJDffH4uV5HbDRoX50iQth5/4CCooriIoKcnZJLaLF7qzVauWtt94iOjq60a/btm0bKSkpBAUF4efnR2pqKhs2bGipskREPEZeUTkVlTUe161+skHJMRjA2u3uu2RriwW5xWLB19f3tONz585l4sSJPPTQQ+Tn55Obm0t4eHj95yMiIrDZbC1VloiIx8g6MdDNA1Z0a0i/btF4mU2scePu9RbrWj+Ta6+9lsDAQHr27MnMmTN57bXXGDBgwClfYxgGJpOp0fOEhfljsTTvwgbu2uXiru0C922b2uVaWnO7jv1wGIAenSPPuc7W3K5zEQX07RbDuu1H2H+kiA6xwc4uqdk5NMhPfl8+dOhQpk2bxlVXXUVeXl79cZvNxsCBAxs9T0FBabPWFRUVRG5ucbOeszVw13aB+7ZN7XItrb1dmfvrFkMJsprPqc7W3q5zlZoYwbrtR/h2UxZjLm7n7HLOS2N/WDl09MODDz7Izp07Adi4cSOJiYn06tWLjIwMiouLsdvtbN26lX79+jmyLBERt5Sda8dqMRMZ6ufsUpyqd5dIfK1efLMxi5pa91uytcWeyNPS0pgxYwbZ2dlYLBYWL17MAw88wNNPP42fnx8BAQG8+OKLWK1WHnjgAW699VbMZjP33XffGd+ti4jI2aupreVwnp34qEDMTbyudHdWby8uSYll+aZsVm49zNA+cc4uqVm1WJCnpKQwZ86c046faZ74mDFjGDNmTEuVIiLicXIKyqiuMTx6xPrJrr6kI6vSjvDZdz8yMDkGX6tD3yy3KM+bWCgi4gE8dWnWhoQE+nDD0C4U2StZvO6gs8tpVgpyERE3lHV8s5R4PZHXu25oF0ICrHy19gCFJRXOLqfZKMhFRNxQtk1zyH/Oz8fCtYMTqKiq4fPvfnR2Oc1GQS4i4oaycu0E+FoIDbQ6u5RWZXCvNrSJ8Od/Ww9z6PgfO65OQS4i4mYqq2rIKSglLjKgyQW2PI2X2cy4oZ2pNQw++XaPs8tpFgpyERE3czivFMNQt3pDeneJJCk+hM27bGQePObsci6YglxExM146h7kZ8tkMjF+eBcAPv5mN4ZhOLmiC6MgFxFxMz9NPVOQN6Rz2xD6dYtm76EiNmTkOrucC6IgFxFxMxqxfnbGDemEl9nEJyv2UF3juku3KshFRNxMdm4JIYFWAv28nV1KqxYd5s+wPnHkHCtjxeZsZ5dz3hTkIiJupKyimryiCuLVrX5Wrrq0I34+Xiz4fh+l5dXOLue8KMhFRNxI/ftxdauflWB/K2MHdqCkrIpFa/c7u5zzoiAXEXEjWRqxfs5G9mtHWJAPX68/SH5RubPLOWcKchERN3LiiTxeT+Rnzcfbi+sGJ1BVXctnK11v6VYFuYiIG8k+vllK2wg9kZ+LS1PaEB8VwPfbDpOVU+Lscs6JglxExI1k2+xEhfriY/VydikuxWw2MX5YFwxg3grXWrpVQS4i4iaK7JUUl1ZpD/LzlJIQTvcOYWzbm8f2ffnOLuesKchFRNzEiW51DXQ7PyaTiZuG1S3dOu+bPdS6yNKtCnIRETeRVb+im4L8fHWIDWJgcgz7jxazbvtRZ5dzVhTkIiJuQiPWm8cNgzth8TLxybd7qaqucXY5TVKQi4i4iezcErzMJmLD/Z1dikuLDPVjRN948orKWbax9S/dqiAXEXEDhmGQZbMTG+6PxUu/2i/UlYM64u9j4YtV+ygpq3J2OY3S3RYRcQN5ReVUVNbo/XgzCfTz5qpLOlJaUc3C1a176VYFuYiIG9Ae5M1vRN84IoJ9WbrxILZjZc4up0EKchERN6A9yJuft8WLG4Z0orrG4D8r9zq7nAYpyEVE3IDmkLeMAT1iaB8TyOr0o+w/Uuzscs5IQS4i4gayc+1YLWaiQvycXYpbMZ+0SMzH3+zGaIWLxFga+sSoUaMwmUxn/FxtbS1LlixpsaJEROTs1dTWciivlPioAMzmM//elvPXo2M4KZ3CSdubT9qP+fTsFOHskk7RYJC/8cYbAMyfP5+IiAgGDRpEbW0t33//PeXlZ7dfa2ZmJlOmTOH2229n4sSJHD16lCeffJLKykrMZjN//vOfiYmJITk5mdTU1Prvmz17Nl5eWvBfRORs5BSUUV1Tq271FnTT0C6k713HvG92k9wxvFX9wdRgkHfq1AmA7du3M3v27PrjKSkp3HPPPU2euLS0lBdeeIFBgwbVH3v11VcZN24cY8eO5f3332f27Nk8/vjjBAYGMmfOnAtohoiI5/ppxLoGurWU+OhALu3Zhu+2HWZV2hEu69XG2SXVa/IdeX5+PmvWrKGyspLKykrWrl3LoUOHmjyx1WrlrbfeIjo6uv7Y008/zahRowAICwujpMS19nwVEWmNso4PdIvXE3mLum5wAt4WM/9ZuZfKqtazdGuDT+QnTJs2jenTp5OZmQlA586deeqpp5o+scWCxXLq6QMC6n7Iampq+Pe//839998PQGVlJQ8++CBHjx5l1KhR3HnnnY2eOyzMH4ulebveo6KCmvV8rYW7tgvct21ql2tpDe2yFVcA0KtbDBHNNNitNbSrJVxIu6KigrhuSGfmLdvFqh05jB+R1IyVnb9Gg9wwDPz8/Jg3b16zXbCmpoapU6fSv39/Bg4cCMDUqVO56qqr8Pb2ZuLEifTr149evXo1eI6CgtJmqwfqbk5ubuucVnAh3LVd4L5tU7tcS2tp196sQvx9LNRUVJGbW33B52st7WpuzdGuIT3bsGjVPj5emklqlwiC/a3NVF3jGvsDpNGudZPJxIsvvtisxTz55JPEx8fzwAMP1B+75ZZbCAoKwtfXlwEDBrBr165mvaaIiLuqqq7haEEpcVEBDc40kubj72vh6ks7Ul5Zwxff73N2OcBZdK23b9+eX/3qV/Tu3Rtvb+/64ye6xc/FggULMJvNPPzww/XH9u/fz/Tp0/nb3/4GwJYtWxg9evQ5n1tExBMdzivFMLSimyMN6xPHsg1ZfLM5mxH94okJc+5uc00GeXR0dP2Aterqui6bs/mrLy0tjRkzZpCdnY3FYmHx4sXk5eXh4+PDpEmTgLr37c8++yxJSUmMHz8eb29vhg0b1mi3uoiI/OSnPcg10M1RLF5mbhzamX9+lsan3+7l3utSnFtPU1/w0EMPnXbsr3/9a5MnTklJOespZQ8//PApT+kiInJ2smzHl2bVZikO1a9rFAltglm/M4dRhwrp3DbEabU0Of1s9erV3HzzzYwePZrRo0czbNgwli5d6ojaRESkCfVzyNW17lAmk4mbhnUGYN5y5y7d2mSQ/+Uvf+Gxxx4jODiYv/zlL1xxxRX87ne/c0RtIiLShOzcEkICrQT6eTf9xdKsurYPo3eXSDKzCtmy2+a0OpoM8oCAAPr164ePjw8pKSk8/vjjvPvuu46oTUREGlFWUU1eUQXx6lZ3mnFDO2MywfwVe6iprXVKDU0GeXV1NatWrSIgIID//Oc/bN++nQMHDjiiNhERaYT2IHe+tpEBXH5RWw7nlbLyh8NOqaHJIP/DH/6AxWLhkUceYf78+UydOpVf//rXjqhNREQaUb8HuZ7Ineq6yxLw8fbi85U/Ul554QvynKsmR62vXLmSyy67jE6dOvH+++87oiYRETkLGujWOoQE+jC6fzsWfL+Pr9cd5JrLEhx6/SafyO12O88//zxjxozh97//PYsXL6a42P2W7hMRcTX1Xet6Ine60f3bExxgZdHaAxSWVDj02k0G+b333svs2bNZsGABV155JZ999tkpW5OKiIhzZOeWEBXqi4+1eTeRknPn52Ph2ssSqKiq4XMHL93aZNf65s2bWb9+PRs3bqS0tJTu3bvzyiuvOKI2ERFpQJG9kqLSKno7cSESOdXgXm1Ysv4g/9tyiF/0i6dNhGN6SpoM8okTJzJ48GAmT57MxRdf7IiaRESkCfUD3bQ0a6th8TIzbmhn/vbpNuav2MNvbnTMcuNNBvmqVatYv349ixcv5v/+7/8IDAzk4osv5q677nJEfSIicgZZ9VPPFOStSZ/ESJLahbJ9XwGGYThkR7om35GHhIQwcuRIJk6cyLXXXgvArFmzWrwwERFpWP1mKZEasd6amEwmHhzXi2fvvNhh28qe1aYpaWlpdOjQgUsvvZSHHnqIbt26OaI2ERFpQLatBC+zidgI526hKafz87Hg59NkvDabJq9000038ac//YmioiJiYmIcUZOIiDTCMAyyc+3EhPtj8WqyY1XcXJM/AbW1tYwdO5Zf/epXAPzxj3/U7mciIk6UX1RBeWWN5o8LcBZB/ve//5158+YRFRUF1M0r/9e//tXihYmIyJllH9+DPF4D3YSzCHIfHx8iIyPrPw4PD8fHx6dFixIRkYZpaVY5WZPvyL29vdmyZQsAJSUlLFq0CKvV2uKFiYjImWVpDrmcpMkn8meeeYbXX3+dtLQ0hg4dypIlS3juueccUZuIiJxBdq4dq8VMVIifs0uRVqDJJ/J27doxc+ZMR9QiIiJNqKmt5VBeKXFRAZjNjpmnLK1bg0/kdrudV155hfvuu4933nkHwzAAyM3NZcqUKQ4rUEREfpJTUEZ1TS3xGrEuxzUY5M888wyVlZVcd911bNu2jb///e989tlnjB8/ngEDBjiyRhEROU4D3eTnGuxaz8rK4i9/+QsAQ4cO5ZJLLiE1NZX333+fuLg4hxUoIiI/ydYa6/IzDQb5ySPTvb296dq1q+aPi4g42Yldz+L1RC7HNdi1/vPF3s1mLQMoIuJs2TY7/j4WQgM1DVjqNNq1/re//a3Bj++///6WrUxERE5RVV3D0fwyOscFO2xnLWn9Ggzya665hurq6gY/FhERxzqcV0qtYWigm5yiwSB/6KGHLvjkmZmZTJkyhdtvv52JEyeSl5fH1KlTKS4uJjY2lpdffhmr1cqSJUt4++23qaioYOLEiYwbN+6Cry0i4m7qR6xr6pmcpMVefJeWlvLCCy8waNCg+mMvvfQSN954Ix9//DFxcXEsWLCAkpISXnrpJd5++20++OAD3n77bex2e0uVJSLisrK0WYqcQYsFudVq5a233iI6Orr+2Lp16xg+fDgAI0aM4LvvvmPbtm2kpKQQFBSEn58fqampbNiwoaXKEhFxWSeeyNvqiVxO0uQSred9YosFi+XU09vtdnx9fYG6XdRsNhu5ubmEh4fXf01ERAQ2m62lyhIRcVnZuXZCAqwE+WvEuvykwSAfPnz4GUdFGoaByWRi2bJl53wxb2/v085z8rGTjzcmLMwfi8XrnK/fmKiooGY9X2vhru0C922b2uVaHNWu0vIq8orK6Z0U5ZBr6n65jgaDfPbs2Q1+U2lp6XldLCAggLKyMvz8/LDZbERHRxMVFUVeXl7919hsNgYOHNjoeQoKzu/6DYmKCiI3t7hZz9kauGu7wH3bpna5Fke2a092IQDRIb4tfk3dr9ansT9AGnxH3r59+/r/q6mpwWazYbPZOHToEI899th5FTJ48OD6J/klS5YwZMgQevXqRUZGBsXFxdjtdrZu3Uq/fv3O6/wiIu6qfmlWvR+Xn2nyHfn06dNZvnw5+fn5tGnThsOHD3PHHXc0eeK0tDRmzJhBdnY2FouFxYsX8/LLL/Poo48ya9YsEhISGDt2LBaLhQceeIBbb70Vs9nMfffdV/8eXURE6mTl1I1Y1xxy+bkmg3zTpk18/fXXTJo0iTlz5rBx40ZWrlzZ5IlTUlKYM2fOacfPdGzMmDGMGTPmLEsWEfE8J57I20b6O7kSaW2anH7m5VU3qKy6upra2lr69u3Lli1bWrwwERH5SXZuCZEhvvhaW2yykbioJn8iEhMTmT17NsnJydx111107tyZY8eOOaI2EREBiuyVFJVW0btLiLNLkVaoySB/7rnnKCwsJCAggM8++4zCwkLeeOMNR9QmIiJoD3JpXJNd67/73e8IDQ3F29ub8ePHM3nyZJ599lkHlCYiIvDTHuQKcjmTBp/IFyxYwLx588jIyCArK6v+eHV1NUeOHHFIcSIi8tMTeXykRqzL6RrdxrRv3748+uij3HvvvfXHTSYTSUlJDilORETqlmb1MpuIjdCIdTldo13rcXFxfPDBB3To0AG73U5ZWRkJCQmnrI0uIiItZ832I+zJLiQ+KhCLV4vtcyUurMmfig8//JBbb72Vzz//nE8++YQJEybw+eefO6I2ERGPtnW3jZlf7MDXx8IdY7s5uxxppZoctf7pp5+ycOFC/Pz8ACgpKeHOO+/k2muvbfHiREQ8VebBY/zjszS8zCYeGt+L9jHut9mHNI8mn8i9vb3rQxwgMDDwtB3LRESk+ew/Usz/zd9Kba3BfTf0JDE+1NklSSvW5BN5bGwsf/jDHxg8eDAA3333HbGxsS1emIiIJzqcZ+eVj7dQXlHDPdeH/zz3AAAgAElEQVQm07NThLNLklauySfy559/nvDwcD744AM++OADIiIieOGFFxxRm4iIR8kvKucvH22huLSKSaO70r97jLNLEhfQ6Dzya665hoCAAKZMmeLImkREPE5RaSUvf7iF/KIKbhzSiaF94pxdkriIBp/I58+f78g6REQ8VllFNX/9aCtH8ksZM6A9Vw7q6OySxIVoUqKIiBNVVtXwf/N/YP/RYi6/qA3jhnZ2dkniYhrsWt+8eTNDhw497bhhGJhMJlasWNGCZYmIuL/qmlr++VkamQeP0a9bNLeN7obJZHJ2WeJiGgzyHj168MorrziyFhERj1FrGLyzcAdb9+SRnBDO3Vf1wGxWiMu5azDIrVYrcXEabCEi0twMw+DfSzJZk36UznHB3H99T7wtetMp56fBn5xevXo5sg4REY/x2cofWb4pm/ioAB4afxE+Vi9nlyQurMEgf+yxxxxZh4iIR/h63QH+u2of0aF+PHJzbwJ8tVKmXBj15YiIOMh3Pxzmw+W7CQ208tsJvQkN9HF2SeIGFOQiIg6wMSOXWYt2EOBr4bc39yYq1K/pbxI5CwpyEZEWtn1fPv9akIbV4sXDN/UmLirQ2SWJG1GQi4i0oD2HCnn9k20A/ObGnnRqG+zkisTdKMhFRFpIVm4Jr368lcrqGu65JoUeHcOdXZK4IQW5iEgLyD1Wxl8+2oK9vJo7xnSnb9coZ5ckbkpBLiLSzI6VVPDyh5spLKlkwohELuvVxtkliRtrcGW3ljBv3jwWLFhQ/3FaWhopKSmUlpbi7+8PwOOPP05KSoojyxIRaTb28ipe+WgLucfKufqSjoy6uJ2zSxI359AgHz9+POPHjwdgw4YN/Pe//2Xv3r386U9/IikpyZGliIg0u4rKGl6dt5WsXDsjUuO5bnCCs0sSD+C0rvXXX3+dKVOmOOvyIiLNqqq6lr99+gN7sosYmBzDLb9I1E5m4hAOfSI/4YcffiAmJoaYmBgAXn31VY4dO0bnzp353e9+h6+vrzPKEhE5L7W1Bm/9N530fQVc1DmCO8d2x6wQFwcxGYZhOPqiTz/9NCNHjmTo0KEsWbKELl26kJCQwPPPP0+bNm24++67G/3+6uoaLBZtMiAizmcYBn+bt5Wv1+4nuVMEz/16ED7e+v0kjuOUJ/L169fz+9//HoBf/OIX9ceHDBnCwoULm/z+goLSZq0nKiqI3NziZj1na+Cu7QL3bZva5VoiIwP5x7wtfL32AB1igphybTJFx5r395MzuOv9cuV2RUUFNfg5h78jP3LkCFarFR8fHwzDYNKkSeTm5gKwadMmEhMTHV2SiMh5mb98F1+tPUBsuD8P33wRfj5OeTYSD+fwn7qcnByio6MBMJlMTJw4kXvuuQd/f39iYmL44x//6OiSRETOiWEYLN+UzftLMgkP9uHRCb0J9rc6uyzxUA4P8l69ejFz5sz6j0ePHs3o0aMdXYaIyHnJttn595JMduwvICTQym9v7k14sAboivOoH0hE5CyUVVTz+Xc/smxjFjW1Bj07RXD/zb3xdvx4YZFTKMhFRBpRaxisTjvCvBV7KLJXEhXqyy0jkrioSwTRkYEuO3hK3IeCXESkAfuPFDN3SQZ7souwWsxcPziBKwa0x1vTX6UVUZCLiPxMcWkln/5vL//bcggD6Nc1ipuHJxIRonfh0vooyEVEjqutNVixJZv//G8v9vJq2kYG8MuRidpHXFo1BbmICJB58BjvL8nkYE4Jfj5eTBiRyPDUOCxe2u1ZWjcFuYh4tILiCuat2M2a9KMAXNozlnFDuxASoHnh4hoU5CLikapralmy4SALvt9HRWUNHWKDmPiLJDrHhTi7NJFzoiAXEY+TtjePfy/dxZH8UgL9vJlwRRcG92qL2awdy8T1KMhFxGPkHivjw2W72LzLhskEw1PjuG5wJwL9vJ1dmsh5U5CLiNurqKph0Zr9LFp7gKrqWpLiQ/jlL5JoH9PwjlIirkJBLiJuyzAMNmXm8uGyXeQVVRAaaOWm4V0Y0D0Gk0nd6OIeFOQi4pYO2ez8e2km2/cV4GU2MWZge66+pCO+Vv3aE/ein2gRcStlFdUs+P5Hlm6o29wkpVM4vxyZRGy4v7NLE2kRCnIRcRvrdhzlg6W7KLRXEhniyy0jE+ndJVLd6OLWFOQi4hbSfszjjc/TtbmJeBwFuYi4vKLSSmZ+sQMvs4nHb00loU2ws0sScRgtIiwiLs0wDGYv3EmhvZIbLu+kEBePoyAXEZe2YnM2W3bb6N4hjNED2ju7HBGHU5CLiMvKzi3hw+W7CfC1MPmqHpg1qE08kIJcRFxSVXUN/1qwnarqWu4Y252wIB9nlyTiFApyEXFJ81fsJSu3hCG925KaFOXsckScRkEuIi5n2948lmw4SJsIfyYMT3R2OSJOpSAXEZdSZK9k5pd1U81+fXUyPlbNFRfPpiAXEZdhGAbvLNxBkb2SG4d0pkOsdi8TUZCLiMtYvimbH/bkkdwxjFH92zm7HJFWQUEuIi4hK6eEj5bvJtDPm7s01UyknoJcRFq9yqoa/vXfdKprarljbDdCAzXVTOQEh661vnbtWh588EESE+tGmSYlJTFlyhSmTp1KcXExsbGxvPzyy1itVkeWJSKt3LwVe8jOtTOsTxx9EjXVTORkDt80pX///rz22mv1Hz/++OPceOONjB07lhkzZrBgwQLGjRvn6LJEpJXautvGso1ZtI0M4KbhXZxdjkir4/Su9XXr1jF8+HAARowYwXfffefkikSktSgsqeCdhTuweJm455pkfLw11Uzk5xz+RL57924mT56M3W7n/vvvx2634+vrC0B4eDg2m63Jc4SF+WNp5n2Go6LccxqLu7YL3Ldtaled2lqDv32WRnFpFZOvTSE1uU0LVXZhdL9cizu2y6FB3rFjR+69916uvPJKsrOzue222zAMo/7zhmFgOouRqAUFpc1aV1RUELm5xc16ztbAXdsF7ts2R7SroLiClz/cTKc2wdw6Kglfa8v/Gjifdi1Zf5BNO3NISQhnYLeoVnm/9XPoWly5XY39AeLQrvWYmBiuvvpqzGYz7dq1IzIyktLSUsrKygCw2WxER0c7siQRj/PB0kwO55XyfdoRnp+9gaycEmeXdJqDOSXMW7GbIH9v7rqyu6aaiTTCoUH+5Zdf8vrrrwOQn59PXl4e48aNY9myZQAsWbKEIUOGOLIkEY/ywx4bGzJy6RwXzOj+7TiSX8oL723gf1sPndI75kyVVTX8a0E61TUGd47tToimmok0yqFd60OHDmXhwoVMmDABwzCYNm0a3bt357e//S2zZs0iISGBsWPHOrIkEY9RUVXD3K8z8TKb+NXobsRHB5LULpR3vtzB7EU7yThQwKTRXR3S1d6Yj77ZzSGbnRGp8VzUJdKptYi4Aof+iw0ICODvf//7acfnzJnjyDJEPNKC73/EVljOmAHtiY8OBKBPYhTT7gjkn5+lszr9KPuOFHPvdSnERwU6pcYtu2x8symbuKgAxg/r7JQaRFyN06efiUjLy8ot4et1B4kI9uWaSxNO+VxkiB9PTkxl1MXtOJxXyh/e3cDKHw45vMZj9VPNzNxzdTJWTTUTOSsKchE3V2sYvLc4g5pag1tHJZ1x20+Ll5kJIxK5/4aeWLzMzFq4k5lfbKeissZhNc78YjslZVXcNKxzfY+BiDTNuS/DRKTFfffDYXZnFdI3KYreTbxzTk2Kol10IG98nsb3aUfYe7iIKdelENfCXe1L1h8kfV8BvTpHMKJvfIteS8Td6IlcxI0V2SuZ981ufKxe3DIy8ay+JyrUjycn9mVkv3gO59WNav9+2+EWq/HA0WI++XYPwf7e3Dm2+1mtJSEiP1GQi7ixj5bvxl5ezQ2DOxEe7HvW32fxMvPLkUncd30KXmYzM7/cwTtf7qCiqnm72itOmmp211U9CA7Qhkki50pd6yJuase+fFanH6FDTNB5d1f37RpNu5gg/vlZGt9tO8yPh4u497oU2kYGNEuNHy3bxeG8Ukb2i6dnp4hmOaeIp9ETuYgbqqqu5b2vMzGZ4LYrumI2n393dXSoH09N7MuIvvFk2+w8/+56VqVdeFf7psxcVmw5RHxUIOOHaqqZyPlSkIu4oUVr9nM0v5ThqfEktAm+4PN5W8zc+oskplyXgpfZxNtf7OCdheff1V5QXMHsRTvxtpi555oeeDfzJkginkRd6yJu5mh+KV+s3k9IoJXrB3dq1nP36xZN+5i6BWS++6Guq33KdSm0iTj7rvZaw+Dt41PNJo5KavER8SLuTk/kIm7EOD5nvLqmll+OTMLft/n/Vo8O8+epSakMT40jO9fO87M3sDr9yFl//+J1B9ixv4DeXSIZ1ieu2esT8TQKchE3smb7UXbsL6Bnpwj6dY1qset4W7yYOKor/+/aZEwmeOu/25m9aCeVTXS17z9SzKff7iUkwMrtY7tpqplIM1DXuoibsJdX8dGyXVgtZiaOSnJISPbvHkOH46Pa/7f1EHsPFXHvdcln7GqvqKzhjQXp1NQa3HVVd4L9NdVMpDnoidyB8ovK+WLVPv639RC7so5RUlbl7JLEjcxfsYei0iquvrQjUaF+DrtuTLg/v7utL8P6xJGVW8Lz725gzfbTu9o/WLaLo/mljLq4HSkJmmom0lz0RO4gRwtK+fMHm8kvqjjleHCAlbYR/rSJDKBtRABtIvxpExFAaKBV3Y5y1nZnFfLtlkPERQYwun97h1/f2+LFpNFdSWoXyuyvdvLmgu1kHjjGLSMT8bZ48f0Ph/jf1kO0jw7kxiGaaibSnBTkDnDIZufPH26msKSSqy7pQGSIH4dsdg7nlXI4z87OA8fYeeDYKd/j52OpC/iIANpE+teFfGQAkSG+mBXwcpLqmlreW7wTgEmju2Lxcl5H24AeMXSIDeIf/0ljxZZD7DlUxIQRifzzszSsFjO/viYZb4s6AkWak4K8hR3MKeHlDzdTXFrFhBGJjLq43WlfU1FZw5H8Ug7l2TmcZ+ewre5/7ztSzJ5DRad8rbfFTJvwuif4NhE/BXxMmJ9Tf4GL8yzZcJCsXDuDe7UhqV2os8shNtyfp2/rywfLdvHtlkP8+YPNQN0fGc21IpyI/ERB3oL2HSniLx9uwV5ezaTRXRucauNj9aJDbBAdYoNOOV5dU0tOQRmH8+z1T/CH8uwcySvlQE7JKV9rNpmIDvOj7UkB361zNVXllfj6WPCzWvQk5IZshWV8/t2PBPp5M35YF2eXU8/q7cWvruhG13ahvLc4g4t7xDK0d1tnlyXilhTkLWR3diF//Xgr5RXV3Dm2O5f1anPO57B4mWkbGUDbyAD6dv3peK1hkFdYfjzg67rnDx1/kj+SX9rg+bzMJvx8LPhavRr+r9WC70nH/KxeP31steDn44Wv1XJBS35K8zAMg/e/zqSyqpZJo7oS6Oft7JJOMzA5lr5do2kTG4zNVtL0N4jIOVOQt4CMAwW8Ov8HqqpqufuaHgzsEdus5zebTESF+hEV6kevk8YNGYZBkb2SQ8ffvReVV5NfUEZZZTXlFdWUVdZQXllDWUU1tsIyyitqMM6zBqu3uT70/axeBPh507drFIOSY/Hx1nKbjrApM5ete/Lo1j6US1Ka92esOXlbzBq4KdKCFOTNLH1fPq/P/4GaWoN7r0umb9doh13bZDIREuhDSKAP3TuEERUVRG5ucYNfX2sYVFbVUFZRQ3ll9Rn+W10X/JXVlFf89N8TX1NWWU1ZeRUFReVUVteS/mM+n6zYw+CL2jI8NY7IEMdNgfI0ZRXV/HvpLixeJiaN7qqgFPFgCvJmtHW3jb//Jw2A+2/oyUVdIp1cUePMJhO+Vgu+Vgvgc0HnKiiuYMXmbFZsyeartQdYvO4AfRKj+EW/eJLahSpomtlnK3+koLiCay7teE7rnIuI+1GQN5ONGbm88XkaXmYTv7mxF8kJ4c4uyaHCgny4/vJOXHVJB9btyGHJhoNsysxlU2Yu8VGBjOwXz8AeMVjV7X7B9h8pZunGg0SH+XHloA7OLkdEnExB3gzWbD/C2//dgbe3mYfG9aJr+zBnl+Q03hYvLu3ZhktSYtmdXcjSDVlszMhl9qKdzPtmN0N6xzE8NY7wYF9nl+qSamsN3v1qJ4ZRN51L23+KiIL8An33w2FmLdyBr48XD9/Umy5xIc4uqVUwmUwkxoeSGB9KflE532zO5tsth1i4Zj9frT1AalIkI/u1IzE+RN3u5+CbzdnsO1LMwB4xJHf0rF4fETkzBfkFWLE5m/cWZxDga+G3E3rTMTbY2SW1SuHBvtw4pDNXX9KRtTuOsmxDFhsyctmQkUv76EBGHO9219Nl4wqKK/jk2z34+1i4eUSis8sRkVZCQX6elqw/yAfLdhHk782jE/rQLjrQ2SW1elZvLwb3astlPduwK6uQpRsOsinTxqyFO5n3zR6G9G7LsD7qdm/IB8t2UV5Zw22juxISoJ3DRKSOgvw8fLl6H598u5eQQCuPTeijZSfPkclkIqldKEntQskrPNHtns2Xq/ezaM0B+naNYmS/eLrEqdv9hB/25LFhZw6d44K5XCukichJHB7kr7zyCmvXrqWqqoq7776bb7/9lvT0dEJD69aIvuuuuxg6dKijyzorhmHw+Xc/suD7fYQH+/DYLX2ICfN3dlkuLSLEl3FDO3PNpR1Zs/0oSzdksX5nDut35tAhJoiR/eLp3z3ao7vdK6pqmPt1BmaTidtGd9OmOSJyCocG+fr169mxYwcfffQRx44d45prruGSSy7hkUceYdiwYY4s5ZwZhsH8b/ewaM0BokJ9eWxCHyIduOezu7N6e3H5RW0Z3KsNmQePsXRDFpt25TLzyx18fHy0+7A+cYQFXdh8d1f0xap92ArLuaJ/e73CEZHTODTI+/Tpw6uvvgpAUFAQVVVV1NbWOrKE82IYBh8s3cXSjVnEhPsz9ZY+HhkojmAymejaPoyu7cOwFZbxzaZs/rf1EF+s2seiNfuPd7u3IyoqqOmTuYHs3BK+WnuAiGAfrr0swdnliEgr5NAgt1gsWCx1l5w/fz5DhgwBYO7cucycOZPIyEieeeYZwsNbz7SaWsNg7uIMVmw5RFxkAI9O6E1IoELcESJD/Bg/rAvXXJbAmvQjLN2YxbodOazbkcOATdncNLSzW/9BVWsYvLc4g5pag1t/0RUfq+e+XhCRhpkMwzjffTPO29KlS/nnP//JrFmzSE9PJzAwkJ49ezJz5kwOHjzIs88+2+j3V1fXYHHAO9OaWoPXPtrM8g0H6dQ2hOfvGaQQdyLDMNi2x8a/F2eQvjcPf18Ld1yVzKgBHdxyN7av1+7n9Y+3MKhnG566vb+zyxGRVsrhQb5y5Ur++te/MnPmTMLCTl0Bbc+ePUybNo25c+c2eo7GNgI5H2faXKS6ppa3v9jOuh05JLQJ5pGbLyLAt/VtE9mYpjZNcVW1hsHmPfm88980yipq6NY+lF+N6eYWAw9P3LOi0kp+9+YaqmsN/jh5gMtPyXPXn0W1y7W4crsae51odmAdFBcXM336dN588836EH/wwQfZuXMnABs3biQx0fkLXVRV1/LG5+ms25FDYnwIj07o7XIh7s7MJhNXDOrIHyYPpHeXSHYeOMYzM9exaM1+alxgzMXZmLd8N/byaq4f3MnlQ1xEWpZD35EvXLiQwsJCHn744fpjDzzwAE8//TR+fn4EBATw4osvOrKk01RV1/D3/6Txw548uncI44Ebe+ndZCsVFuTDb27syfqdOfx7SSbzVuxh3Y4c7hjbjfYxrjsYbuf+Ar5PO1K36l3fOGeXIyKtnFPekV+olupar6is4fVPf2D7vgJSOoVz//U9XXq3LlfuRmrKz9tWUlbFR8t28X3akbon9gHtuebSji53/0LD/JkyYzlH80v53W396NTWPZb9ddefRbXLtbhyu1pN13prVlZRzV8/3sL2fQX0SYzkNzf0crkQ8GSBft7cdVUPHrn5IsKDfVi4Zj/T3llHxoECZ5d2Tj75ZjdH8ksZlhrnNiEuIi1LQU7d09wrH20hM6uQi7tFc+91KXhb9P8aV5SSEMHzd/XnF/3akVNQxox/b+a9r3ZSWl7t7NKadDS/lI+XZhISaOWGyzs7uxwRcREev9Z6SVkVf5y7kT2HihiUHMudV3bDy6wQd2W+Vgu3jEykf49oZi/cyYoth9iy28ak0V3pkxjl7PLOqKSsivcWZ1BVXcstIxLx9/X4f5oicpY8/rfF+p057Mkq5PKL2nDbFVrH2p10bhvCtDsuZuHq/fx31T5e/2QbF3eL5pe/SGoVu4cVllSwaZeNTRk57DxwjJpag9Ru0VzcLdrZpYmIC/H4IL8kOZZunSKICfZRiLshi5eZay5LoG+3aGYv2sH6nTls35fPhBGJXJIS6/Dd1WyFZWzKtLExI4fdWYWcGGma0CaI1KQobhrVDXtxuUNrEhHX5vFB7mP1oldcqMuOZJSzExcZwJO39mX5piw++XYvM7/cwZrtR/nV6K4tvvnNkfxSNmbksDEjl31H6n7OTEBifAh9u0aTmhRFREjdXHF/X28FuYicE48PcvEcZrOJkf3a0TsxkvcWZ5C2N5+nZ67lhss7M7JvfLMt82oYBgdzStiUmcvGjFyybXYAvMwmkhPC6ZsURZ+kqFbRvS8irk9BLh4nMsSPh8dfxJr0o3ywbBcfLtvFuh1HuX1MN+Kjzm+b0FrD4MfDRWzKqAvvnGNlQF3Xfu8ukfTtGsVFXSIJ9NMKgSLSvBTk4pFMJhODUmJJTgjng2W7WLv9KM/NWs+Vgzpw5aCOZzX9sLbWYFfWMTZk5LIpM5eC4goAfLy9uLhbNH27RtGzUwR+PvpnJiItR79hxKMFB1i555pkBvSIYc7iDBZ8v48NGbncPqYbXeJCTvv66ppaduwvYGNGLpt35VJcWgVAgK+FS1NiSe0aRXLHcC0mJCIOoyAXAXp3iaRru1Dmr9jDN5uz+dOcjYzoG88NQzphNplI+zGfjRk5bNmdR1lF3eIywf7eDO3dlr5do+naPhSLl9YfEBHHU5CLHOfnY2HS6K4M6BHDrEU7Wboxi/U7cyirrKayqm5XtfBgHy7tGUu/rtF0iQtxy33QRcS1KMhFfiapXSjP33kx/121j6/WHiAi2Je+XeveeXeMDXL43HMRkcYoyEXOwNvixQ2Xd+a6yzphMqHwFpFWS0Eu0gh1nYtIa6fROSIiIi5MQS4iIuLCFOQiIiIuTEEuIiLiwhTkIiIiLkxBLiIi4sIU5CIiIi5MQS4iIuLCFOQiIiIuTEEuIiLiwhTkIiIiLsxkGIbh7CJERETk/OiJXERExIUpyEVERFyYglxERMSFKchFRERcmIJcRETEhSnIRUREXJjF2QU426uvvsqaNWuorKzkueeeo2fPns4u6bysXbuWBx98kMTERACSkpKYMmUKU6dOpbi4mNjYWF5++WWsVquTKz17mZmZTJkyhdtvv52JEyeSl5d3xvYsWbKEt99+m4qKCiZOnMi4ceOcXXqjft6uJ554gvT0dEJDQwG46667GDp0qMu165VXXmHt2rVUVVVx9913079/f7e4Xz9v17fffuvy96usrIwnnniCvLw8SktLue++++jdu7fL368ztWvJkiUuf7+aZHiw1atXG3fddZdhGIaRkZFh/PKXv3RyRedvzZo1xm9+85tTjk2dOtX48ssvDcMwjOnTpxvz5s1zRmnnxW63GxMnTjSefvppY86cOYZhnLk9xcXFxsiRI42ioiKjtLTUGD16tFFSUuLM0ht1pnY9/vjjxvLly0/5Oldr17p164zJkycbhmEYBQUFxuDBg93ifp2pXe5wv7744gvjzTffNAzDMLKysoxRo0a5xf06U7vc4X41xaO71teuXcuIESOAuifYnJwcysrKnFxV81m3bh3Dhw8HYMSIEXz33XdOrujsWa1W3nrrLaKjo+uPnak927ZtIyUlhaCgIPz8/EhNTWXDhg3OKrtJZ2rXmbhau/r06cOrr74KQFBQEFVVVaxZs8bl79eZ2lVbW3va17lau6688kruvvtuAI4cOUJMTIxb/Ps6U7vOxNXa1RSPDvLc3FzCw8PrPw4PD8dmszmxoguze/duJk+ezC233ML333+P3W7H19cXcL22WSyW+tpPOFN7fn4PIyIiWnU7z9QugLlz5zJx4kQeeugh8vPzXbJdAQEBAMyfP58hQ4ZQVlbmFvfr5+0ym80uf79OGD9+PI8++ii///3v3eLf1wkntwtc/99XUzz6Hbm3t/cpHxuGgclkclI1F6Zjx47ce++9XHnllWRnZ3PbbbdhnLT6riu37YST79eJ9rjDPbz22msJDAykZ8+ezJw5k9dee40BAwac8jWu0q6lS5fy8ccfM2vWLFauXFl/3NXv18ntSk9Pd5v7NW/ePNLT03nkkUfw8vKqP+7q9+vkdj311FNuc78a4tFP5FFRUeTl5dV/nJ+fT2RkpBMrOn8xMTFcffXVmM1m2rVrR2RkJKWlpfWvCmw2W5Pdua1dQEDAae35+T10xXYOGjSofpDl0KFD2b17t0u2a+XKlfzjH//g7bffJjg42G3u18/b5Q73a9u2bRw6dAiA5ORkamtr8fPzc/n7daZ2JSUlufz9aopHB/nll1/OsmXLAEhPT6ddu3Zn7PZ0BV9++SWvv/46UPcHSV5eHuPGjatv35IlSxgyZIgzS7xggwcPPq09vXr1IiMjg+LiYux2O1u3bqVfv35OrvTcPPjgg+zcuROAjRs3kpiY6HLtKi4uZvr06bz55puEhYUB7nG/ztQud7hfmzdv5t133wXqQsxutzNs2DCXv19natfzzz/v8verKR6/+9mf//xnVq1ahZeXF3/84x/p2rWrs0s6L3a7nalTp5KXl4dhGEyZMoXu3bvz29/+ljJlIIkAAAO1SURBVNLSUhISEpg+fToWi2u8TUlLS2PGjBlkZ2djsViIiYnh5Zdf5tFHHz2tPYsWLeKf//wnZrOZyZMnc9VVVzm7/AadqV0PPPAAM2bMwM/Pj4CAAF588UXCw8Ndql0fffQRr7/+OgkJCfXHpk+fzhNPPOHS9+tM7XKH+1VZWcmTTz7J4cOHqays5L777iM5OfmMvy9cvV2BgYEuf7+a4vFBLiIi4so8umtdRETE1SnIRUREXJiCXERExIUpyEVERFyYglxERMSFucZcJBE5b1lZWVxxxRX06dPnlONDhgxh8uTJZ/yehx9+mCeeeKLBtarPxv79+7njjjtYvnz5eZ9DRJqmIBfxAOHh4cyZM+esv/6vf/1rC1YjIs1JQS7ioWpqaujZsyf33nsv33//PRUVFbz00kskJiYyfPhwZs2aRUVFBc888wze3t5UVFQwefJkRo0axdatW/nTn/5Uv8DQM888Q1JSEps2beLZZ58lNDSUlJSU+msVFBQwbdo0CgsLKSsrY8KECdxwww3OarqIW9E7chEP5eXlRU1NDT169ODDDz/kl7/8Zf0yvyd8/PHHDB8+nDlz5jBz5kwKCwsBmDp1Kk8++SRz587ljjvu4PnnnwfgpZde4tFHH+W99947Ze3q1157jeHDh/Puu+/y7rvv8sYbb5CTk+O4xoq4MT2Ri3iA/Px8Jk2adMqxxx57DKB+J6jU1FTeeeedU75mxIgRTJ06lezsbC6//HJuuOEGioqKyM/P56KLLgLqNn159NFHAcjIyCA1NRWAgQMH8t577wGwadMmtm3bxieffALUbQ968OBBl96oQqS1UJCLeICzeUdeW1t72laOgwYNYtGiRaxatYr58+fz8ccf8+c///mUr2loleeTj5tMJqZNm1a/C5WINB91rYt4uNWrVwN1O0f9fNOgOXPmYLPZGDVqFNOmTWPjxo0EBwcTGRnJDz/8ANRt89m7d28AOnfuzJYtWwD47rvv6s/Tt29fvvrqKwAqKip49tlnqaysbPG2iXgCPZGLeIAzda3Hx8cDdVv4zp49m7KystOetrt06cJjjz2Gr68v5eXlPPXUUwDMmDGDF198EYvFgpeXF88++yxQ113/wgsv0KZNm/r9oAHuv/9+fv/733PLLbdQUVHBjTfeiNVqbeFWi3gG7X4m4sG6du1Kenq6y2xvKyKnU9e6iPz/9uuABAAAAEDQ/9ftCPREBIw5cgAYc+QAMCbkADAm5AAwJuQAMCbkADAm5AAwFsFpQ/pOBTM6AAAAAElFTkSuQmCC\n", 770 | "text/plain": [ 771 | "
" 772 | ] 773 | }, 774 | "metadata": {}, 775 | "output_type": "display_data" 776 | } 777 | ], 778 | "source": [ 779 | "plt.style.use('seaborn')\n", 780 | "plt.plot(np.arange(0, len(rewards_history), 25), rewards_history[::25])\n", 781 | "plt.xlabel('Episode')\n", 782 | "plt.ylabel('Total Reward')\n", 783 | "plt.show()" 784 | ] 785 | }, 786 | { 787 | "cell_type": "markdown", 788 | "metadata": {}, 789 | "source": [ 790 | "# Static Computational Graph" 791 | ] 792 | }, 793 | { 794 | "cell_type": "code", 795 | "execution_count": 13, 796 | "metadata": {}, 797 | "outputs": [ 798 | { 799 | "name": "stdout", 800 | "output_type": "stream", 801 | "text": [ 802 | "Eager Execution: False\n", 803 | "WARNING:tensorflow:From /home/inoryy/anaconda3/envs/tf2/lib/python3.6/site-packages/tensorflow/python/ops/init_ops.py:1253: calling VarianceScaling.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n", 804 | "Instructions for updating:\n", 805 | "Call initializer instance with the dtype argument instead of passing it to the constructor\n" 806 | ] 807 | }, 808 | { 809 | "name": "stderr", 810 | "output_type": "stream", 811 | "text": [ 812 | "WARNING:tensorflow:From /home/inoryy/anaconda3/envs/tf2/lib/python3.6/site-packages/tensorflow/python/ops/init_ops.py:1253: calling VarianceScaling.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n", 813 | "Instructions for updating:\n", 814 | "Call initializer instance with the dtype argument instead of passing it to the constructor\n", 815 | "INFO:root:Episode: 001, Reward: 020\n" 816 | ] 817 | }, 818 | { 819 | "name": "stdout", 820 | "output_type": "stream", 821 | "text": [ 822 | "WARNING:tensorflow:From /home/inoryy/anaconda3/envs/tf2/lib/python3.6/site-packages/tensorflow/python/keras/engine/base_layer_utils.py:123: calling Zeros.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n", 823 | "Instructions for updating:\n", 824 | "Call initializer instance with the dtype argument instead of passing it to the constructor\n" 825 | ] 826 | }, 827 | { 828 | "name": "stderr", 829 | "output_type": "stream", 830 | "text": [ 831 | "WARNING:tensorflow:From /home/inoryy/anaconda3/envs/tf2/lib/python3.6/site-packages/tensorflow/python/keras/engine/base_layer_utils.py:123: calling Zeros.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n", 832 | "Instructions for updating:\n", 833 | "Call initializer instance with the dtype argument instead of passing it to the constructor\n", 834 | "INFO:root:Episode: 002, Reward: 027\n", 835 | "INFO:root:Episode: 003, Reward: 032\n", 836 | "INFO:root:Episode: 004, Reward: 017\n", 837 | "INFO:root:Episode: 005, Reward: 012\n", 838 | "INFO:root:Episode: 006, Reward: 015\n", 839 | "INFO:root:Episode: 007, Reward: 025\n", 840 | "INFO:root:Episode: 008, Reward: 010\n", 841 | "INFO:root:Episode: 009, Reward: 026\n", 842 | "INFO:root:Episode: 010, Reward: 014\n", 843 | "INFO:root:Episode: 011, Reward: 054\n", 844 | "INFO:root:Episode: 012, Reward: 012\n", 845 | "INFO:root:Episode: 013, Reward: 038\n", 846 | "INFO:root:Episode: 014, Reward: 024\n", 847 | "INFO:root:Episode: 015, Reward: 022\n", 848 | "INFO:root:Episode: 016, Reward: 034\n", 849 | "INFO:root:Episode: 017, Reward: 072\n", 850 | "INFO:root:Episode: 018, Reward: 022\n", 851 | "INFO:root:Episode: 019, Reward: 029\n", 852 | "INFO:root:Episode: 020, Reward: 020\n", 853 | "INFO:root:Episode: 021, Reward: 017\n", 854 | "INFO:root:Episode: 022, Reward: 013\n", 855 | "INFO:root:Episode: 023, Reward: 067\n", 856 | "INFO:root:Episode: 024, Reward: 100\n", 857 | "INFO:root:Episode: 025, Reward: 056\n", 858 | "INFO:root:Episode: 026, Reward: 096\n", 859 | "INFO:root:Episode: 027, Reward: 019\n", 860 | "INFO:root:Episode: 028, Reward: 016\n", 861 | "INFO:root:Episode: 029, Reward: 023\n", 862 | "INFO:root:Episode: 030, Reward: 013\n", 863 | "INFO:root:Episode: 031, Reward: 020\n", 864 | "INFO:root:Episode: 032, Reward: 023\n", 865 | "INFO:root:Episode: 033, Reward: 026\n", 866 | "INFO:root:Episode: 034, Reward: 070\n", 867 | "INFO:root:Episode: 035, Reward: 033\n", 868 | "INFO:root:Episode: 036, Reward: 028\n", 869 | "INFO:root:Episode: 037, Reward: 059\n", 870 | "INFO:root:Episode: 038, Reward: 047\n", 871 | "INFO:root:Episode: 039, Reward: 026\n", 872 | "INFO:root:Episode: 040, Reward: 028\n", 873 | "INFO:root:Episode: 041, Reward: 034\n", 874 | "INFO:root:Episode: 042, Reward: 065\n", 875 | "INFO:root:Episode: 043, Reward: 014\n", 876 | "INFO:root:Episode: 044, Reward: 028\n", 877 | "INFO:root:Episode: 045, Reward: 018\n", 878 | "INFO:root:Episode: 046, Reward: 011\n", 879 | "INFO:root:Episode: 047, Reward: 011\n", 880 | "INFO:root:Episode: 048, Reward: 020\n", 881 | "INFO:root:Episode: 049, Reward: 015\n", 882 | "INFO:root:Episode: 050, Reward: 031\n", 883 | "INFO:root:Episode: 051, Reward: 017\n", 884 | "INFO:root:Episode: 052, Reward: 025\n", 885 | "INFO:root:Episode: 053, Reward: 027\n", 886 | "INFO:root:Episode: 054, Reward: 026\n", 887 | "INFO:root:Episode: 055, Reward: 060\n", 888 | "INFO:root:Episode: 056, Reward: 020\n", 889 | "INFO:root:Episode: 057, Reward: 056\n", 890 | "INFO:root:Episode: 058, Reward: 051\n", 891 | "INFO:root:Episode: 059, Reward: 036\n", 892 | "INFO:root:Episode: 060, Reward: 022\n", 893 | "INFO:root:Episode: 061, Reward: 013\n", 894 | "INFO:root:Episode: 062, Reward: 026\n", 895 | "INFO:root:Episode: 063, Reward: 030\n", 896 | "INFO:root:Episode: 064, Reward: 019\n", 897 | "INFO:root:Episode: 065, Reward: 044\n", 898 | "INFO:root:Episode: 066, Reward: 078\n", 899 | "INFO:root:Episode: 067, Reward: 047\n", 900 | "INFO:root:Episode: 068, Reward: 019\n", 901 | "INFO:root:Episode: 069, Reward: 020\n", 902 | "INFO:root:Episode: 070, Reward: 066\n", 903 | "INFO:root:Episode: 071, Reward: 026\n", 904 | "INFO:root:Episode: 072, Reward: 037\n", 905 | "INFO:root:Episode: 073, Reward: 037\n", 906 | "INFO:root:Episode: 074, Reward: 023\n", 907 | "INFO:root:Episode: 075, Reward: 010\n", 908 | "INFO:root:Episode: 076, Reward: 039\n", 909 | "INFO:root:Episode: 077, Reward: 033\n", 910 | "INFO:root:Episode: 078, Reward: 063\n", 911 | "INFO:root:Episode: 079, Reward: 016\n", 912 | "INFO:root:Episode: 080, Reward: 053\n", 913 | "INFO:root:Episode: 081, Reward: 037\n", 914 | "INFO:root:Episode: 082, Reward: 035\n", 915 | "INFO:root:Episode: 083, Reward: 054\n", 916 | "INFO:root:Episode: 084, Reward: 014\n", 917 | "INFO:root:Episode: 085, Reward: 061\n", 918 | "INFO:root:Episode: 086, Reward: 012\n", 919 | "INFO:root:Episode: 087, Reward: 040\n", 920 | "INFO:root:Episode: 088, Reward: 059\n", 921 | "INFO:root:Episode: 089, Reward: 031\n", 922 | "INFO:root:Episode: 090, Reward: 114\n", 923 | "INFO:root:Episode: 091, Reward: 017\n", 924 | "INFO:root:Episode: 092, Reward: 023\n", 925 | "INFO:root:Episode: 093, Reward: 042\n", 926 | "INFO:root:Episode: 094, Reward: 025\n", 927 | "INFO:root:Episode: 095, Reward: 027\n", 928 | "INFO:root:Episode: 096, Reward: 013\n", 929 | "INFO:root:Episode: 097, Reward: 051\n", 930 | "INFO:root:Episode: 098, Reward: 048\n", 931 | "INFO:root:Episode: 099, Reward: 071\n", 932 | "INFO:root:Episode: 100, Reward: 034\n", 933 | "INFO:root:Episode: 101, Reward: 032\n", 934 | "INFO:root:Episode: 102, Reward: 045\n", 935 | "INFO:root:Episode: 103, Reward: 096\n", 936 | "INFO:root:Episode: 104, Reward: 030\n", 937 | "INFO:root:Episode: 105, Reward: 071\n", 938 | "INFO:root:Episode: 106, Reward: 048\n", 939 | "INFO:root:Episode: 107, Reward: 037\n", 940 | "INFO:root:Episode: 108, Reward: 027\n", 941 | "INFO:root:Episode: 109, Reward: 024\n", 942 | "INFO:root:Episode: 110, Reward: 036\n", 943 | "INFO:root:Episode: 111, Reward: 080\n", 944 | "INFO:root:Episode: 112, Reward: 037\n", 945 | "INFO:root:Episode: 113, Reward: 048\n", 946 | "INFO:root:Episode: 114, Reward: 024\n", 947 | "INFO:root:Episode: 115, Reward: 042\n", 948 | "INFO:root:Episode: 116, Reward: 057\n", 949 | "INFO:root:Episode: 117, Reward: 104\n", 950 | "INFO:root:Episode: 118, Reward: 017\n", 951 | "INFO:root:Episode: 119, Reward: 020\n", 952 | "INFO:root:Episode: 120, Reward: 029\n", 953 | "INFO:root:Episode: 121, Reward: 041\n", 954 | "INFO:root:Episode: 122, Reward: 070\n", 955 | "INFO:root:Episode: 123, Reward: 049\n", 956 | "INFO:root:Episode: 124, Reward: 029\n", 957 | "INFO:root:Episode: 125, Reward: 029\n", 958 | "INFO:root:Episode: 126, Reward: 030\n", 959 | "INFO:root:Episode: 127, Reward: 065\n", 960 | "INFO:root:Episode: 128, Reward: 024\n", 961 | "INFO:root:Episode: 129, Reward: 018\n", 962 | "INFO:root:Episode: 130, Reward: 062\n", 963 | "INFO:root:Episode: 131, Reward: 033\n", 964 | "INFO:root:Episode: 132, Reward: 020\n", 965 | "INFO:root:Episode: 133, Reward: 050\n", 966 | "INFO:root:Episode: 134, Reward: 029\n", 967 | "INFO:root:Episode: 135, Reward: 016\n", 968 | "INFO:root:Episode: 136, Reward: 056\n", 969 | "INFO:root:Episode: 137, Reward: 026\n", 970 | "INFO:root:Episode: 138, Reward: 025\n", 971 | "INFO:root:Episode: 139, Reward: 047\n", 972 | "INFO:root:Episode: 140, Reward: 038\n", 973 | "INFO:root:Episode: 141, Reward: 033\n", 974 | "INFO:root:Episode: 142, Reward: 017\n", 975 | "INFO:root:Episode: 143, Reward: 068\n", 976 | "INFO:root:Episode: 144, Reward: 023\n", 977 | "INFO:root:Episode: 145, Reward: 168\n", 978 | "INFO:root:Episode: 146, Reward: 046\n", 979 | "INFO:root:Episode: 147, Reward: 044\n", 980 | "INFO:root:Episode: 148, Reward: 022\n", 981 | "INFO:root:Episode: 149, Reward: 026\n", 982 | "INFO:root:Episode: 150, Reward: 037\n", 983 | "INFO:root:Episode: 151, Reward: 091\n", 984 | "INFO:root:Episode: 152, Reward: 025\n", 985 | "INFO:root:Episode: 153, Reward: 038\n", 986 | "INFO:root:Episode: 154, Reward: 039\n", 987 | "INFO:root:Episode: 155, Reward: 047\n", 988 | "INFO:root:Episode: 156, Reward: 025\n", 989 | "INFO:root:Episode: 157, Reward: 047\n", 990 | "INFO:root:Episode: 158, Reward: 013\n", 991 | "INFO:root:Episode: 159, Reward: 069\n", 992 | "INFO:root:Episode: 160, Reward: 019\n", 993 | "INFO:root:Episode: 161, Reward: 035\n", 994 | "INFO:root:Episode: 162, Reward: 039\n", 995 | "INFO:root:Episode: 163, Reward: 028\n", 996 | "INFO:root:Episode: 164, Reward: 021\n", 997 | "INFO:root:Episode: 165, Reward: 049\n", 998 | "INFO:root:Episode: 166, Reward: 119\n", 999 | "INFO:root:Episode: 167, Reward: 043\n", 1000 | "INFO:root:Episode: 168, Reward: 067\n", 1001 | "INFO:root:Episode: 169, Reward: 124\n", 1002 | "INFO:root:Episode: 170, Reward: 021\n", 1003 | "INFO:root:Episode: 171, Reward: 049\n", 1004 | "INFO:root:Episode: 172, Reward: 051\n", 1005 | "INFO:root:Episode: 173, Reward: 088\n", 1006 | "INFO:root:Episode: 174, Reward: 056\n", 1007 | "INFO:root:Episode: 175, Reward: 144\n", 1008 | "INFO:root:Episode: 176, Reward: 085\n", 1009 | "INFO:root:Episode: 177, Reward: 116\n", 1010 | "INFO:root:Episode: 178, Reward: 090\n", 1011 | "INFO:root:Episode: 179, Reward: 020\n", 1012 | "INFO:root:Episode: 180, Reward: 038\n", 1013 | "INFO:root:Episode: 181, Reward: 127\n", 1014 | "INFO:root:Episode: 182, Reward: 037\n", 1015 | "INFO:root:Episode: 183, Reward: 053\n", 1016 | "INFO:root:Episode: 184, Reward: 059\n", 1017 | "INFO:root:Episode: 185, Reward: 022\n", 1018 | "INFO:root:Episode: 186, Reward: 068\n", 1019 | "INFO:root:Episode: 187, Reward: 033\n", 1020 | "INFO:root:Episode: 188, Reward: 072\n", 1021 | "INFO:root:Episode: 189, Reward: 077\n", 1022 | "INFO:root:Episode: 190, Reward: 041\n", 1023 | "INFO:root:Episode: 191, Reward: 038\n", 1024 | "INFO:root:Episode: 192, Reward: 074\n", 1025 | "INFO:root:Episode: 193, Reward: 028\n", 1026 | "INFO:root:Episode: 194, Reward: 027\n", 1027 | "INFO:root:Episode: 195, Reward: 036\n", 1028 | "INFO:root:Episode: 196, Reward: 040\n", 1029 | "INFO:root:Episode: 197, Reward: 028\n", 1030 | "INFO:root:Episode: 198, Reward: 030\n", 1031 | "INFO:root:Episode: 199, Reward: 034\n", 1032 | "INFO:root:Episode: 200, Reward: 044\n", 1033 | "INFO:root:Episode: 201, Reward: 113\n", 1034 | "INFO:root:Episode: 202, Reward: 089\n", 1035 | "INFO:root:Episode: 203, Reward: 147\n", 1036 | "INFO:root:Episode: 204, Reward: 077\n", 1037 | "INFO:root:Episode: 205, Reward: 056\n", 1038 | "INFO:root:Episode: 206, Reward: 024\n", 1039 | "INFO:root:Episode: 207, Reward: 091\n", 1040 | "INFO:root:Episode: 208, Reward: 033\n", 1041 | "INFO:root:Episode: 209, Reward: 078\n", 1042 | "INFO:root:Episode: 210, Reward: 044\n", 1043 | "INFO:root:Episode: 211, Reward: 110\n", 1044 | "INFO:root:Episode: 212, Reward: 163\n", 1045 | "INFO:root:Episode: 213, Reward: 053\n", 1046 | "INFO:root:Episode: 214, Reward: 102\n", 1047 | "INFO:root:Episode: 215, Reward: 136\n", 1048 | "INFO:root:Episode: 216, Reward: 128\n", 1049 | "INFO:root:Episode: 217, Reward: 066\n", 1050 | "INFO:root:Episode: 218, Reward: 034\n" 1051 | ] 1052 | }, 1053 | { 1054 | "name": "stderr", 1055 | "output_type": "stream", 1056 | "text": [ 1057 | "INFO:root:Episode: 219, Reward: 069\n", 1058 | "INFO:root:Episode: 220, Reward: 164\n", 1059 | "INFO:root:Episode: 221, Reward: 054\n", 1060 | "INFO:root:Episode: 222, Reward: 061\n", 1061 | "INFO:root:Episode: 223, Reward: 090\n", 1062 | "INFO:root:Episode: 224, Reward: 109\n", 1063 | "INFO:root:Episode: 225, Reward: 161\n", 1064 | "INFO:root:Episode: 226, Reward: 200\n", 1065 | "INFO:root:Episode: 227, Reward: 062\n", 1066 | "INFO:root:Episode: 228, Reward: 059\n", 1067 | "INFO:root:Episode: 229, Reward: 102\n", 1068 | "INFO:root:Episode: 230, Reward: 181\n", 1069 | "INFO:root:Episode: 231, Reward: 031\n", 1070 | "INFO:root:Episode: 232, Reward: 107\n", 1071 | "INFO:root:Episode: 233, Reward: 037\n", 1072 | "INFO:root:Episode: 234, Reward: 113\n", 1073 | "INFO:root:Episode: 235, Reward: 102\n", 1074 | "INFO:root:Episode: 236, Reward: 029\n", 1075 | "INFO:root:Episode: 237, Reward: 023\n", 1076 | "INFO:root:Episode: 238, Reward: 145\n", 1077 | "INFO:root:Episode: 239, Reward: 062\n", 1078 | "INFO:root:Episode: 240, Reward: 068\n", 1079 | "INFO:root:Episode: 241, Reward: 157\n", 1080 | "INFO:root:Episode: 242, Reward: 073\n", 1081 | "INFO:root:Episode: 243, Reward: 077\n", 1082 | "INFO:root:Episode: 244, Reward: 146\n", 1083 | "INFO:root:Episode: 245, Reward: 067\n", 1084 | "INFO:root:Episode: 246, Reward: 130\n", 1085 | "INFO:root:Episode: 247, Reward: 080\n", 1086 | "INFO:root:Episode: 248, Reward: 034\n", 1087 | "INFO:root:Episode: 249, Reward: 188\n", 1088 | "INFO:root:Episode: 250, Reward: 142\n", 1089 | "INFO:root:Episode: 251, Reward: 186\n", 1090 | "INFO:root:Episode: 252, Reward: 049\n", 1091 | "INFO:root:Episode: 253, Reward: 048\n", 1092 | "INFO:root:Episode: 254, Reward: 056\n", 1093 | "INFO:root:Episode: 255, Reward: 061\n", 1094 | "INFO:root:Episode: 256, Reward: 138\n", 1095 | "INFO:root:Episode: 257, Reward: 076\n", 1096 | "INFO:root:Episode: 258, Reward: 125\n", 1097 | "INFO:root:Episode: 259, Reward: 161\n", 1098 | "INFO:root:Episode: 260, Reward: 053\n", 1099 | "INFO:root:Episode: 261, Reward: 045\n", 1100 | "INFO:root:Episode: 262, Reward: 141\n", 1101 | "INFO:root:Episode: 263, Reward: 050\n", 1102 | "INFO:root:Episode: 264, Reward: 089\n", 1103 | "INFO:root:Episode: 265, Reward: 123\n", 1104 | "INFO:root:Episode: 266, Reward: 082\n", 1105 | "INFO:root:Episode: 267, Reward: 064\n", 1106 | "INFO:root:Episode: 268, Reward: 088\n", 1107 | "INFO:root:Episode: 269, Reward: 189\n", 1108 | "INFO:root:Episode: 270, Reward: 081\n", 1109 | "INFO:root:Episode: 271, Reward: 041\n", 1110 | "INFO:root:Episode: 272, Reward: 140\n", 1111 | "INFO:root:Episode: 273, Reward: 107\n", 1112 | "INFO:root:Episode: 274, Reward: 105\n", 1113 | "INFO:root:Episode: 275, Reward: 174\n", 1114 | "INFO:root:Episode: 276, Reward: 112\n", 1115 | "INFO:root:Episode: 277, Reward: 080\n", 1116 | "INFO:root:Episode: 278, Reward: 195\n", 1117 | "INFO:root:Episode: 279, Reward: 186\n", 1118 | "INFO:root:Episode: 280, Reward: 036\n", 1119 | "INFO:root:Episode: 281, Reward: 087\n", 1120 | "INFO:root:Episode: 282, Reward: 133\n", 1121 | "INFO:root:Episode: 283, Reward: 037\n", 1122 | "INFO:root:Episode: 284, Reward: 114\n", 1123 | "INFO:root:Episode: 285, Reward: 065\n", 1124 | "INFO:root:Episode: 286, Reward: 031\n", 1125 | "INFO:root:Episode: 287, Reward: 071\n", 1126 | "INFO:root:Episode: 288, Reward: 168\n", 1127 | "INFO:root:Episode: 289, Reward: 121\n", 1128 | "INFO:root:Episode: 290, Reward: 200\n", 1129 | "INFO:root:Episode: 291, Reward: 046\n", 1130 | "INFO:root:Episode: 292, Reward: 048\n", 1131 | "INFO:root:Episode: 293, Reward: 100\n", 1132 | "INFO:root:Episode: 294, Reward: 088\n", 1133 | "INFO:root:Episode: 295, Reward: 158\n", 1134 | "INFO:root:Episode: 296, Reward: 151\n", 1135 | "INFO:root:Episode: 297, Reward: 037\n", 1136 | "INFO:root:Episode: 298, Reward: 136\n", 1137 | "INFO:root:Episode: 299, Reward: 096\n", 1138 | "INFO:root:Episode: 300, Reward: 047\n", 1139 | "INFO:root:Episode: 301, Reward: 121\n", 1140 | "INFO:root:Episode: 302, Reward: 041\n", 1141 | "INFO:root:Episode: 303, Reward: 128\n", 1142 | "INFO:root:Episode: 304, Reward: 163\n", 1143 | "INFO:root:Episode: 305, Reward: 181\n", 1144 | "INFO:root:Episode: 306, Reward: 104\n", 1145 | "INFO:root:Episode: 307, Reward: 121\n", 1146 | "INFO:root:Episode: 308, Reward: 142\n", 1147 | "INFO:root:Episode: 309, Reward: 200\n", 1148 | "INFO:root:Episode: 310, Reward: 200\n", 1149 | "INFO:root:Episode: 311, Reward: 198\n", 1150 | "INFO:root:Episode: 312, Reward: 181\n", 1151 | "INFO:root:Episode: 313, Reward: 062\n", 1152 | "INFO:root:Episode: 314, Reward: 159\n", 1153 | "INFO:root:Episode: 315, Reward: 123\n", 1154 | "INFO:root:Episode: 316, Reward: 097\n", 1155 | "INFO:root:Episode: 317, Reward: 200\n", 1156 | "INFO:root:Episode: 318, Reward: 080\n", 1157 | "INFO:root:Episode: 319, Reward: 070\n", 1158 | "INFO:root:Episode: 320, Reward: 200\n", 1159 | "INFO:root:Episode: 321, Reward: 095\n", 1160 | "INFO:root:Episode: 322, Reward: 142\n", 1161 | "INFO:root:Episode: 323, Reward: 138\n", 1162 | "INFO:root:Episode: 324, Reward: 141\n", 1163 | "INFO:root:Episode: 325, Reward: 175\n", 1164 | "INFO:root:Episode: 326, Reward: 092\n", 1165 | "INFO:root:Episode: 327, Reward: 124\n", 1166 | "INFO:root:Episode: 328, Reward: 200\n", 1167 | "INFO:root:Episode: 329, Reward: 113\n", 1168 | "INFO:root:Episode: 330, Reward: 191\n", 1169 | "INFO:root:Episode: 331, Reward: 177\n", 1170 | "INFO:root:Episode: 332, Reward: 200\n", 1171 | "INFO:root:Episode: 333, Reward: 074\n", 1172 | "INFO:root:Episode: 334, Reward: 034\n", 1173 | "INFO:root:Episode: 335, Reward: 159\n", 1174 | "INFO:root:Episode: 336, Reward: 127\n", 1175 | "INFO:root:Episode: 337, Reward: 200\n", 1176 | "INFO:root:Episode: 338, Reward: 140\n", 1177 | "INFO:root:Episode: 339, Reward: 135\n", 1178 | "INFO:root:Episode: 340, Reward: 200\n", 1179 | "INFO:root:Episode: 341, Reward: 200\n", 1180 | "INFO:root:Episode: 342, Reward: 177\n", 1181 | "INFO:root:Episode: 343, Reward: 187\n", 1182 | "INFO:root:Episode: 344, Reward: 149\n", 1183 | "INFO:root:Episode: 345, Reward: 191\n", 1184 | "INFO:root:Episode: 346, Reward: 155\n", 1185 | "INFO:root:Episode: 347, Reward: 157\n", 1186 | "INFO:root:Episode: 348, Reward: 164\n", 1187 | "INFO:root:Episode: 349, Reward: 158\n", 1188 | "INFO:root:Episode: 350, Reward: 200\n", 1189 | "INFO:root:Episode: 351, Reward: 138\n", 1190 | "INFO:root:Episode: 352, Reward: 144\n", 1191 | "INFO:root:Episode: 353, Reward: 147\n", 1192 | "INFO:root:Episode: 354, Reward: 200\n", 1193 | "INFO:root:Episode: 355, Reward: 145\n", 1194 | "INFO:root:Episode: 356, Reward: 150\n", 1195 | "INFO:root:Episode: 357, Reward: 062\n", 1196 | "INFO:root:Episode: 358, Reward: 149\n", 1197 | "INFO:root:Episode: 359, Reward: 187\n", 1198 | "INFO:root:Episode: 360, Reward: 164\n", 1199 | "INFO:root:Episode: 361, Reward: 144\n", 1200 | "INFO:root:Episode: 362, Reward: 200\n", 1201 | "INFO:root:Episode: 363, Reward: 200\n", 1202 | "INFO:root:Episode: 364, Reward: 104\n", 1203 | "INFO:root:Episode: 365, Reward: 127\n", 1204 | "INFO:root:Episode: 366, Reward: 200\n", 1205 | "INFO:root:Episode: 367, Reward: 111\n", 1206 | "INFO:root:Episode: 368, Reward: 200\n", 1207 | "INFO:root:Episode: 369, Reward: 200\n", 1208 | "INFO:root:Episode: 370, Reward: 124\n", 1209 | "INFO:root:Episode: 371, Reward: 200\n", 1210 | "INFO:root:Episode: 372, Reward: 200\n", 1211 | "INFO:root:Episode: 373, Reward: 178\n", 1212 | "INFO:root:Episode: 374, Reward: 200\n", 1213 | "INFO:root:Episode: 375, Reward: 200\n", 1214 | "INFO:root:Episode: 376, Reward: 057\n", 1215 | "INFO:root:Episode: 377, Reward: 166\n", 1216 | "INFO:root:Episode: 378, Reward: 118\n", 1217 | "INFO:root:Episode: 379, Reward: 200\n", 1218 | "INFO:root:Episode: 380, Reward: 082\n", 1219 | "INFO:root:Episode: 381, Reward: 118\n", 1220 | "INFO:root:Episode: 382, Reward: 058\n", 1221 | "INFO:root:Episode: 383, Reward: 200\n", 1222 | "INFO:root:Episode: 384, Reward: 171\n", 1223 | "INFO:root:Episode: 385, Reward: 113\n", 1224 | "INFO:root:Episode: 386, Reward: 169\n", 1225 | "INFO:root:Episode: 387, Reward: 103\n", 1226 | "INFO:root:Episode: 388, Reward: 141\n", 1227 | "INFO:root:Episode: 389, Reward: 191\n", 1228 | "INFO:root:Episode: 390, Reward: 200\n", 1229 | "INFO:root:Episode: 391, Reward: 171\n", 1230 | "INFO:root:Episode: 392, Reward: 052\n", 1231 | "INFO:root:Episode: 393, Reward: 171\n" 1232 | ] 1233 | }, 1234 | { 1235 | "name": "stdout", 1236 | "output_type": "stream", 1237 | "text": [ 1238 | "Finished training, testing...\n", 1239 | "Total Episode Reward: 200 out of 200\n" 1240 | ] 1241 | } 1242 | ], 1243 | "source": [ 1244 | "with tf.Graph().as_default():\n", 1245 | " print(\"Eager Execution:\", tf.executing_eagerly()) # False\n", 1246 | "\n", 1247 | " model = Model(num_actions=env.action_space.n)\n", 1248 | " agent = A2CAgent(model)\n", 1249 | "\n", 1250 | " rewards_history = agent.train(env)\n", 1251 | " print(\"Finished training, testing...\")\n", 1252 | " print(\"Total Episode Reward: %d out of 200\" % agent.test(env))" 1253 | ] 1254 | }, 1255 | { 1256 | "cell_type": "markdown", 1257 | "metadata": {}, 1258 | "source": [ 1259 | "# Benchmarks" 1260 | ] 1261 | }, 1262 | { 1263 | "cell_type": "code", 1264 | "execution_count": 18, 1265 | "metadata": {}, 1266 | "outputs": [], 1267 | "source": [ 1268 | "# Note: comparing wall time isn't exactly fair due to specifics of how things are executed on multi-core CPU" 1269 | ] 1270 | }, 1271 | { 1272 | "cell_type": "code", 1273 | "execution_count": 14, 1274 | "metadata": {}, 1275 | "outputs": [], 1276 | "source": [ 1277 | "env = gym.make('CartPole-v0')\n", 1278 | "obs = np.repeat(env.reset()[None, :], 100000, axis=0)" 1279 | ] 1280 | }, 1281 | { 1282 | "cell_type": "markdown", 1283 | "metadata": {}, 1284 | "source": [ 1285 | "## Eager Benchmark" 1286 | ] 1287 | }, 1288 | { 1289 | "cell_type": "code", 1290 | "execution_count": 15, 1291 | "metadata": {}, 1292 | "outputs": [ 1293 | { 1294 | "name": "stdout", 1295 | "output_type": "stream", 1296 | "text": [ 1297 | "Eager Execution: True\n", 1298 | "Eager Keras Model: True\n", 1299 | "CPU times: user 639 ms, sys: 736 ms, total: 1.38 s\n", 1300 | "Wall time: 116 ms\n" 1301 | ] 1302 | } 1303 | ], 1304 | "source": [ 1305 | "%%time\n", 1306 | "\n", 1307 | "model = Model(env.action_space.n)\n", 1308 | "model.run_eagerly = True\n", 1309 | "\n", 1310 | "print(\"Eager Execution: \", tf.executing_eagerly())\n", 1311 | "print(\"Eager Keras Model:\", model.run_eagerly)\n", 1312 | "\n", 1313 | "_ = model(obs)\n", 1314 | "# _ = model.predict(obs)" 1315 | ] 1316 | }, 1317 | { 1318 | "cell_type": "markdown", 1319 | "metadata": {}, 1320 | "source": [ 1321 | "## Static Benchmark" 1322 | ] 1323 | }, 1324 | { 1325 | "cell_type": "code", 1326 | "execution_count": 16, 1327 | "metadata": {}, 1328 | "outputs": [ 1329 | { 1330 | "name": "stdout", 1331 | "output_type": "stream", 1332 | "text": [ 1333 | "Eager Execution: False\n", 1334 | "Eager Keras Model: False\n", 1335 | "CPU times: user 793 ms, sys: 79.7 ms, total: 873 ms\n", 1336 | "Wall time: 656 ms\n" 1337 | ] 1338 | } 1339 | ], 1340 | "source": [ 1341 | "%%time\n", 1342 | "\n", 1343 | "with tf.Graph().as_default():\n", 1344 | " model = Model(env.action_space.n)\n", 1345 | "\n", 1346 | " print(\"Eager Execution: \", tf.executing_eagerly())\n", 1347 | " print(\"Eager Keras Model:\", model.run_eagerly)\n", 1348 | "\n", 1349 | " _ = model.predict(obs)" 1350 | ] 1351 | }, 1352 | { 1353 | "cell_type": "markdown", 1354 | "metadata": {}, 1355 | "source": [ 1356 | "## Default Benchmark" 1357 | ] 1358 | }, 1359 | { 1360 | "cell_type": "code", 1361 | "execution_count": 17, 1362 | "metadata": {}, 1363 | "outputs": [ 1364 | { 1365 | "name": "stdout", 1366 | "output_type": "stream", 1367 | "text": [ 1368 | "Eager Execution: True\n", 1369 | "Eager Keras Model: False\n", 1370 | "CPU times: user 994 ms, sys: 23.1 ms, total: 1.02 s\n", 1371 | "Wall time: 769 ms\n" 1372 | ] 1373 | } 1374 | ], 1375 | "source": [ 1376 | "%%time\n", 1377 | "\n", 1378 | "model = Model(env.action_space.n)\n", 1379 | "\n", 1380 | "print(\"Eager Execution: \", tf.executing_eagerly())\n", 1381 | "print(\"Eager Keras Model:\", model.run_eagerly)\n", 1382 | "\n", 1383 | "_ = model.predict(obs)" 1384 | ] 1385 | } 1386 | ], 1387 | "metadata": { 1388 | "kernelspec": { 1389 | "display_name": "Python 3", 1390 | "language": "python", 1391 | "name": "python3" 1392 | }, 1393 | "language_info": { 1394 | "codemirror_mode": { 1395 | "name": "ipython", 1396 | "version": 3 1397 | }, 1398 | "file_extension": ".py", 1399 | "mimetype": "text/x-python", 1400 | "name": "python", 1401 | "nbconvert_exporter": "python", 1402 | "pygments_lexer": "ipython3", 1403 | "version": "3.6.8" 1404 | } 1405 | }, 1406 | "nbformat": 4, 1407 | "nbformat_minor": 2 1408 | } 1409 | --------------------------------------------------------------------------------