├── .gitignore ├── utils.py ├── normalize_env.py ├── random_process.py ├── models.py ├── shared_adam.py ├── ddpg.py ├── main.py ├── replay_memory.py └── plotUtil.ipynb /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | .idea 3 | .idea/* 4 | 5 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch.autograd import Variable 3 | 4 | def to_numpy(var): 5 | return var.data.numpy()#var.cpu().data.numpy() if use_cuda else var.data.numpy() 6 | 7 | def to_tensor(x, volatile=False, requires_grad=True, dtype=torch.FloatTensor): 8 | x = torch.from_numpy(x).float() 9 | x = Variable(x, requires_grad=requires_grad).type(dtype) 10 | return x 11 | 12 | def weightSync(target_model, source_model, tau = 0.001): 13 | for parameter_target, parameter_source in zip(target_model.parameters(), source_model.parameters()): 14 | parameter_target.data.copy_((1 - tau) * parameter_target.data + tau * parameter_source.data) 15 | -------------------------------------------------------------------------------- /normalize_env.py: -------------------------------------------------------------------------------- 1 | import gym 2 | 3 | class NormalizeAction(gym.ActionWrapper): 4 | def _action(self, action): 5 | #tanh outputs (-1,1) from tanh, need to be [action_space.low, action_space.high] 6 | act_k = (self.action_space.high - self.action_space.low)/ 2. 7 | act_b = (self.action_space.high + self.action_space.low)/ 2. 8 | return act_k * action + act_b 9 | 10 | def _reverse_action(self, action): 11 | #reverse of that above 12 | act_k_inv = 2./(self.action_space.high - self.action_space.low) 13 | act_b = (self.action_space.high + self.action_space.low)/ 2. 14 | return act_k_inv * (action - act_b) 15 | -------------------------------------------------------------------------------- /random_process.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | class GaussianNoise(object): 5 | def __init__(self, dimension, num_epochs, mu=0.0, var=0.3): 6 | self.mu = mu 7 | self.var = var 8 | self.dimension = dimension 9 | self.epochs = 0 10 | self.num_epochs = num_epochs 11 | self.min_epsilon = 0.01 # minimum exploration probability 12 | self.epsilon = 1.0 13 | self.decay_rate = 5.0/num_epochs # exponential decay rate for exploration prob 14 | self.iter = 0 15 | 16 | def sample(self): 17 | x = np.random.normal(self.mu, np.sqrt(self.epsilon*self.var), size=self.dimension) 18 | return x 19 | 20 | def reset(self): 21 | self.epsilon = self.min_epsilon + (1.0 - self.min_epsilon)*np.exp(-self.decay_rate*self.iter) 22 | 23 | class OrnsteinUhlenbeckProcess(object): 24 | def __init__(self, dimension, num_steps, theta=0.25, mu=0.0, sigma=0.05, dt=0.01): 25 | self.theta = theta 26 | self.mu = mu 27 | self.sigma = sigma 28 | self.dt = dt 29 | self.x = np.zeros((dimension,)) 30 | self.iter = 0 31 | self.num_steps = num_steps 32 | self.dimension = dimension 33 | self.min_epsilon = 0.01 # minimum exploration probability 34 | self.epsilon = 1.0 35 | self.decay_rate = 5.0/num_steps # exponential decay rate for exploration prob 36 | 37 | def sample(self): 38 | self.x = self.x + self.theta*(self.mu-self.x)*self.dt + \ 39 | self.sigma*np.sqrt(self.dt)*np.random.normal(size=self.dimension) 40 | return self.epsilon*self.x 41 | 42 | def reset(self): 43 | self.x = 0*self.x 44 | self.iter += 1 45 | self.epsilon = self.min_epsilon + (1.0 - self.min_epsilon)*np.exp(-self.decay_rate*self.iter) 46 | 47 | if __name__ == '__main__': 48 | noise = GaussianNoise(dimension=1, num_epochs=1000) 49 | y = np.zeros((1000)) 50 | for i in range(1000): 51 | y[i] = noise.sample() 52 | plt.plot(range(1000),y) 53 | plt.show() -------------------------------------------------------------------------------- /models.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | import torch 5 | 6 | def fanin_init(size, fanin=None): 7 | fanin = fanin or size[0] 8 | v = 1. / np.sqrt(fanin) 9 | return torch.Tensor(size).normal_(0.0, v) 10 | 11 | # ---------------------------------------------------- 12 | # actor model, MLP 13 | # ---------------------------------------------------- 14 | # 2 hidden layers, 400 units per layer, tanh output to bound outputs between -1 and 1 15 | class actor(nn.Module): 16 | def __init__(self, input_size, output_size): 17 | super(actor, self).__init__() 18 | self.fc1 = nn.Linear(input_size, 400) 19 | #self.bn1 = nn.BatchNorm1d(400) 20 | self.fc2 = nn.Linear(400, 400) 21 | #self.bn2 = nn.BatchNorm1d(400) 22 | self.fc3 = nn.Linear(400, output_size) 23 | self.init_weights() 24 | 25 | def init_weights(self, init_w=10e-3): 26 | self.fc1.weight.data = fanin_init(self.fc1.weight.data.size()) 27 | self.fc2.weight.data = fanin_init(self.fc2.weight.data.size()) 28 | self.fc3.weight.data.normal_(0, 3e-3) 29 | 30 | def forward(self, state): 31 | out = self.fc1(state) 32 | #out = self.bn1(out) 33 | out = F.relu(out) 34 | out = self.fc2(out) 35 | #out = self.bn2(out) 36 | out = F.relu(out) 37 | action = F.tanh(self.fc3(out)) 38 | return action 39 | 40 | 41 | # ---------------------------------------------------- 42 | # critic model, MLP 43 | # ---------------------------------------------------- 44 | # 2 hidden layers, 300 units per layer, outputs rewards therefore unbounded 45 | # Action not to be included until 2nd layer of critic (from paper). Make sure 46 | # to formulate your critic.forward() accordingly 47 | 48 | class critic(nn.Module): 49 | def __init__(self, state_size, action_size, output_size): 50 | super(critic, self).__init__() 51 | self.fc1 = nn.Linear(state_size, 300) 52 | #self.bn1 = nn.BatchNorm1d(300) 53 | self.fc2 = nn.Linear(300 + action_size, 300) 54 | self.fc3 = nn.Linear(300, output_size) 55 | self.init_weights() 56 | 57 | def init_weights(self, init_w=10e-3): 58 | self.fc1.weight.data = fanin_init(self.fc1.weight.data.size()) 59 | self.fc2.weight.data = fanin_init(self.fc2.weight.data.size()) 60 | self.fc3.weight.data.normal_(0, 3e-4) 61 | 62 | def forward(self, state, action): 63 | out = self.fc1(state) 64 | #out = self.bn1(out) 65 | out = F.relu(out) 66 | out = F.relu(self.fc2(torch.cat([out,action],1))) 67 | qvalue = self.fc3(out) 68 | return qvalue 69 | -------------------------------------------------------------------------------- /shared_adam.py: -------------------------------------------------------------------------------- 1 | import torch 2 | 3 | class SharedAdam(torch.optim.Adam): 4 | def __init__(self, params, lr=1e-3, betas=(0.9, 0.9), eps=1e-8, 5 | weight_decay=0): 6 | super(SharedAdam, self).__init__(params, lr=lr, betas=betas, eps=eps, weight_decay=weight_decay) 7 | # State initialization 8 | for group in self.param_groups: 9 | for p in group['params']: 10 | state = self.state[p] 11 | state['step'] = 0 12 | state['exp_avg'] = torch.zeros_like(p.data) 13 | state['exp_avg_sq'] = torch.zeros_like(p.data) 14 | 15 | # share in memory 16 | state['exp_avg'].share_memory_() 17 | state['exp_avg_sq'].share_memory_() 18 | 19 | # 20 | # 21 | # 22 | # import math 23 | # 24 | # import torch 25 | # import torch.optim as optim 26 | # 27 | # class SharedAdam(optim.Adam): 28 | # """Implements Adam algorithm with shared states. 29 | # """ 30 | # 31 | # def __init__(self, 32 | # params, 33 | # lr=1e-3, 34 | # betas=(0.9, 0.999), 35 | # eps=1e-8, 36 | # weight_decay=0): 37 | # super(SharedAdam, self).__init__(params, lr, betas, eps, weight_decay) 38 | # 39 | # for group in self.param_groups: 40 | # for p in group['params']: 41 | # state = self.state[p] 42 | # state['step'] = torch.zeros(1) 43 | # state['exp_avg'] = p.data.new().resize_as_(p.data).zero_() 44 | # state['exp_avg_sq'] = p.data.new().resize_as_(p.data).zero_() 45 | # 46 | # def share_memory(self): 47 | # for group in self.param_groups: 48 | # for p in group['params']: 49 | # state = self.state[p] 50 | # state['step'].share_memory_() 51 | # state['exp_avg'].share_memory_() 52 | # state['exp_avg_sq'].share_memory_() 53 | # 54 | # def step(self, closure=None): 55 | # """Performs a single optimization step. 56 | # Arguments: 57 | # closure (callable, optional): A closure that reevaluates the model 58 | # and returns the loss. 59 | # """ 60 | # loss = None 61 | # if closure is not None: 62 | # loss = closure() 63 | # 64 | # for group in self.param_groups: 65 | # for p in group['params']: 66 | # if p.grad is None: 67 | # continue 68 | # grad = p.grad.data 69 | # state = self.state[p] 70 | # 71 | # exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq'] 72 | # beta1, beta2 = group['betas'] 73 | # 74 | # state['step'] += 1 75 | # 76 | # if group['weight_decay'] != 0: 77 | # grad = grad.add(group['weight_decay'], p.data) 78 | # 79 | # # Decay the first and second moment running average coefficient 80 | # exp_avg.mul_(beta1).add_(1 - beta1, grad) 81 | # exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad) 82 | # 83 | # denom = exp_avg_sq.sqrt().add_(group['eps']) 84 | # 85 | # bias_correction1 = 1 - beta1**state['step'][0] 86 | # bias_correction2 = 1 - beta2**state['step'][0] 87 | # step_size = group['lr'] * math.sqrt( 88 | # bias_correction2) / bias_correction1 89 | # 90 | # p.data.addcdiv_(-step_size, exp_avg, denom) 91 | # 92 | # return loss -------------------------------------------------------------------------------- /ddpg.py: -------------------------------------------------------------------------------- 1 | from models import actor, critic 2 | import torch 3 | import torch.optim as optim 4 | import torch.nn as nn 5 | from random_process import OrnsteinUhlenbeckProcess, GaussianNoise 6 | from utils import * 7 | from replay_memory import Replay#SequentialMemory as Replay 8 | 9 | class DDPG: 10 | def __init__(self, obs_dim, act_dim, env, memory_size=50000, batch_size=64,\ 11 | lr_critic=1e-4, lr_actor=1e-4, gamma=0.99, tau=0.001, n_steps = 1): 12 | 13 | self.gamma = gamma 14 | self.batch_size = batch_size 15 | self.obs_dim = obs_dim 16 | self.act_dim = act_dim 17 | self.memory_size = memory_size 18 | self.tau = tau 19 | self.env = env 20 | self.n_steps = n_steps 21 | self.n_step_gamma = self.gamma ** self.n_steps 22 | 23 | # actor 24 | self.actor = actor(input_size = obs_dim, output_size = act_dim) 25 | self.actor_target = actor(input_size = obs_dim, output_size = act_dim) 26 | self.actor_target.load_state_dict(self.actor.state_dict()) 27 | 28 | # critic 29 | self.critic = critic(state_size = obs_dim, action_size = act_dim, output_size=1) 30 | self.critic_target = critic(state_size = obs_dim, action_size = act_dim, output_size=1) 31 | self.critic_target.load_state_dict(self.critic.state_dict()) 32 | 33 | # optimizers 34 | self.optimizer_actor = optim.Adam(self.actor.parameters(), lr=lr_actor) 35 | self.optimizer_critic = optim.Adam(self.critic.parameters(), lr=lr_critic) 36 | 37 | # critic loss 38 | self.critic_loss = nn.MSELoss() 39 | 40 | # noise 41 | # self.noise = OrnsteinUhlenbeckProcess(dimension=act_dim, num_steps=5000) 42 | self.noise = GaussianNoise(dimension=act_dim, num_epochs=5000) 43 | 44 | # replay buffer 45 | #self.replayBuffer = Replay(self.memory_size, window_length=1) 46 | self.replayBuffer = Replay(self.memory_size, self.env) 47 | 48 | def hard_update(self): 49 | self.actor_target.load_state_dict(self.actor.state_dict()) 50 | self.critic_target.load_state_dict(self.critic.state_dict()) 51 | 52 | def share_memory(self): 53 | self.actor.share_memory() 54 | self.critic.share_memory() 55 | 56 | def assign_global_optimizer(self, optimizer_global_actor, optimizer_global_critic): 57 | self.optimizer_global_actor = optimizer_global_actor 58 | self.optimizer_global_critic = optimizer_global_critic 59 | 60 | def copy_gradients(self, model_local, model_global ): 61 | for param_local, param_global in zip(model_local.parameters(), model_global.parameters()): 62 | if param_global.grad is not None: 63 | return 64 | param_global._grad = param_local.grad 65 | 66 | def sync_grad_with_global_model(self, global_model): 67 | self.copy_gradients(self.actor, global_model.actor) 68 | self.copy_gradients(self.critic, global_model.critic) 69 | 70 | def update_target_parameters(self): 71 | # Soft update of actor_target 72 | for parameter_target, parameter_source in zip(self.actor_target.parameters(), self.actor.parameters()): 73 | parameter_target.data.copy_((1 - self.tau) * parameter_target.data + self.tau * parameter_source.data) 74 | # Soft update of critic_target 75 | for parameter_target, parameter_source in zip(self.critic_target.parameters(), self.critic.parameters()): 76 | parameter_target.data.copy_((1 - self.tau) * parameter_target.data + self.tau * parameter_source.data) 77 | 78 | def sync_local_global(self, global_model): 79 | self.actor.load_state_dict(global_model.actor.state_dict()) 80 | self.critic.load_state_dict(global_model.critic.state_dict()) 81 | 82 | def train(self, global_model): 83 | # sample from Replay 84 | #states, actions, rewards, next_states, terminates = self.replayBuffer.sample_and_split(self.batch_size) 85 | states, actions, rewards, next_states, terminates = self.replayBuffer.sample(self.batch_size) 86 | 87 | # update critic (create target for Q function) 88 | target_qvalues = self.critic_target(to_tensor(next_states, volatile=True),\ 89 | self.actor_target(to_tensor(next_states, volatile=True))) 90 | y = to_numpy(to_tensor(rewards) +\ 91 | self.n_step_gamma*to_tensor(1-terminates)*target_qvalues) 92 | 93 | q_values = self.critic(to_tensor(states), 94 | to_tensor(actions)) 95 | qvalue_loss = self.critic_loss(q_values, to_tensor(y, requires_grad=False)) 96 | 97 | 98 | # critic optimizer and backprop step (feed in target and predicted values to self.critic_loss) 99 | self.critic.zero_grad() 100 | qvalue_loss.backward() 101 | self.copy_gradients(self.critic, global_model.critic) 102 | self.optimizer_global_critic.step() 103 | 104 | # update actor (formulate the loss wrt which actor is updated) 105 | policy_loss = -self.critic(to_tensor(states),\ 106 | self.actor(to_tensor(states))) 107 | policy_loss = policy_loss.mean() 108 | 109 | # actor optimizer and backprop step (loss_actor.backward()) 110 | self.actor.zero_grad() 111 | policy_loss.backward() 112 | self.copy_gradients(self.actor, global_model.actor) 113 | self.optimizer_global_actor.step() 114 | 115 | # copy global network weights to local 116 | self.sync_local_global(global_model) 117 | 118 | # soft-update of target 119 | self.update_target_parameters() -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | import gym 3 | import argparse 4 | from ddpg import DDPG 5 | from utils import * 6 | from shared_adam import SharedAdam 7 | import numpy as np 8 | from normalize_env import NormalizeAction 9 | import torch.multiprocessing as mp 10 | from pdb import set_trace as bp 11 | import datetime 12 | import time 13 | import pickle 14 | 15 | # Parameters 16 | parser = argparse.ArgumentParser(description='async_ddpg') 17 | 18 | #parser.add_argument('--seed', type=int, default=1, help='random seed (default: 1)') 19 | parser.add_argument('--n_workers', type=int, default=2, help='how many training processes to use (default: 4)') 20 | parser.add_argument('--rmsize', default=60000, type=int, help='memory size') 21 | parser.add_argument('--tau', default=0.001, type=float, help='moving average for target network') 22 | parser.add_argument('--ou_theta', default=0.15, type=float, help='noise theta') 23 | parser.add_argument('--ou_sigma', default=0.2, type=float, help='noise sigma') 24 | parser.add_argument('--ou_mu', default=0.0, type=float, help='noise mu') 25 | parser.add_argument('--bsize', default=64, type=int, help='minibatch size') 26 | parser.add_argument('--gamma', default=0.99, type=float, help='') 27 | parser.add_argument('--env', default='Pendulum-v0', type=str, help='Environment to use') 28 | parser.add_argument('--max_steps', default=500, type=int, help='Maximum steps per episode') 29 | parser.add_argument('--n_eps', default=2000, type=int, help='Maximum number of episodes') 30 | parser.add_argument('--debug', default=True, type=bool, help='Print debug statements') 31 | parser.add_argument('--warmup', default=10000, type=int, help='time without training but only filling the replay memory') 32 | #parser.add_argument('--num_states', default=4, type=int) 33 | parser.add_argument('--multithread', default=0, type=int, help='To activate multithread') 34 | parser.add_argument('--logfile', default='train_logs', type=str, help='File name for the train log data') 35 | parser.add_argument('--n_steps', default=5, type=int, help='number of steps to rollout') 36 | 37 | args = parser.parse_args() 38 | 39 | env = NormalizeAction(gym.make(args.env)) 40 | discrete = isinstance(env.action_space, gym.spaces.Discrete) 41 | 42 | # Get observation and action space dimensions 43 | obs_dim = env.observation_space.shape[0] 44 | act_dim = env.action_space.n if discrete else env.action_space.shape[0] 45 | 46 | def configure_env_params(): 47 | if args.env == 'Pendulum-v0': 48 | args.v_min = -1000.0 49 | args.v_max = 100 50 | elif args.env == 'InvertedPendulum-v2': 51 | args.v_min = -100 52 | args.v_max = 500 53 | elif args.env == 'HalfCheetah-v1': 54 | args.v_min = -1000 55 | args.v_max = 1000 56 | else: 57 | print("Undefined environment. Configure v_max and v_min for environment") 58 | 59 | 60 | class Worker(object): 61 | def __init__(self, name, optimizer_global_actor, optimizer_global_critic): 62 | self.env = NormalizeAction(gym.make(args.env).env) 63 | self.env._max_episode_steps = args.max_steps 64 | self.name = name 65 | 66 | self.ddpg = DDPG(obs_dim=obs_dim, act_dim=act_dim, env=self.env, memory_size=args.rmsize,\ 67 | batch_size=args.bsize, tau=args.tau, gamma = args.gamma, n_steps = args.n_steps) 68 | self.ddpg.assign_global_optimizer(optimizer_global_actor, optimizer_global_critic) 69 | print('Intialized worker :',self.name) 70 | 71 | def warmup(self): 72 | n_steps = 0 73 | self.ddpg.actor.eval() 74 | # for i in range(args.n_eps): 75 | # state = self.env.reset() 76 | # for j in range(args.max_steps): 77 | # 78 | state = self.env.reset() 79 | for n_steps in range(args.warmup): 80 | action = np.random.uniform(-1.0, 1.0, size=act_dim) 81 | next_state, reward, done, _ = self.env.step(action) 82 | self.ddpg.replayBuffer.append(state, action, reward, done) 83 | 84 | if done: 85 | state = self.env.reset() 86 | else: 87 | state = next_state 88 | 89 | 90 | def work(self, global_ddpg): 91 | avg_reward = 0. 92 | n_steps = 0 93 | #self.warmup() 94 | 95 | self.ddpg.sync_local_global(global_ddpg) 96 | self.ddpg.hard_update() 97 | 98 | # Logging variables 99 | self.start_time = datetime.datetime.utcnow() 100 | self.train_logs = {} 101 | self.train_logs['avg_reward'] = [] 102 | self.train_logs['total_reward'] = [] 103 | self.train_logs['time'] = [] 104 | self.train_logs['x_val'] = [] 105 | self.train_logs['info_summary'] = "DDPG" 106 | self.train_logs['x'] = 'steps' 107 | step_counter = 0 108 | 109 | for i in range(args.n_eps): 110 | state = self.env.reset() 111 | total_reward = 0. 112 | 113 | episode_states = [] 114 | episode_rewards = [] 115 | episode_actions = [] 116 | 117 | for j in range(args.max_steps): 118 | self.ddpg.actor.eval() 119 | 120 | state = state.reshape(1, -1) 121 | noise = self.ddpg.noise.sample() 122 | action = np.clip(to_numpy(self.ddpg.actor(to_tensor(state))).reshape(-1, ) + noise, -1.0, 1.0) 123 | # action = to_numpy(self.ddpg.actor(to_tensor(state))).reshape(-1, ) + noise 124 | next_state, reward, done, _ = self.env.step(action) 125 | total_reward += reward 126 | 127 | #### n-steps buffer 128 | episode_states.append(state) 129 | episode_actions.append(action) 130 | episode_rewards.append(reward) 131 | 132 | if j >= args.n_steps-1: 133 | cum_reward = 0. 134 | exp_gamma = 1 135 | for k in range(-args.n_steps, 0): 136 | cum_reward += exp_gamma * episode_rewards[k] 137 | exp_gamma *= args.gamma 138 | self.ddpg.replayBuffer.add(episode_states[-args.n_steps].reshape(-1), episode_actions[-args.n_steps], cum_reward, next_state, done) 139 | # self.ddpg.replayBuffer.add_experience(state.reshape(-1), action, reward, next_state, done) 140 | #self.ddpg.replayBuffer.append(state.reshape(-1), action, reward, done) 141 | 142 | self.ddpg.actor.train() 143 | self.ddpg.train(global_ddpg) 144 | step_counter += 1 145 | n_steps += 1 146 | 147 | if done: 148 | break 149 | 150 | 151 | state = next_state 152 | # print("Episode ", i, "\t Step count: ", n_steps) 153 | 154 | self.ddpg.noise.reset() 155 | avg_reward = 0.95*avg_reward + 0.05*total_reward 156 | if i%1==0: 157 | print('Episode ',i,'\tWorker :',self.name,'\tAvg Reward :',avg_reward,'\tTotal reward :',total_reward,'\tSteps :',n_steps) 158 | self.train_logs['avg_reward'].append(avg_reward) 159 | self.train_logs['total_reward'].append(total_reward) 160 | self.train_logs['time'].append((datetime.datetime.utcnow()-self.start_time).total_seconds()/60) 161 | self.train_logs['x_val'].append(step_counter) 162 | with open(args.logfile, 'wb') as fHandle: 163 | pickle.dump(self.train_logs, fHandle, protocol=pickle.HIGHEST_PROTOCOL) 164 | with open(args.logfile_latest, 'wb') as fHandle: 165 | pickle.dump(self.train_logs, fHandle, protocol=pickle.HIGHEST_PROTOCOL) 166 | 167 | 168 | if __name__ == '__main__': 169 | configure_env_params() 170 | args.logfile_latest = args.logfile + '_' + args.env + '_latest_DDPG' + '.pkl' 171 | args.logfile = args.logfile + '_' + args.env + '_DDPG_' + time.strftime("%Y%m%d-%H%M%S") + '.pkl' 172 | 173 | 174 | 175 | global_ddpg = DDPG(obs_dim=obs_dim, act_dim=act_dim, env=env, memory_size=args.rmsize,\ 176 | batch_size=args.bsize, tau=args.tau) 177 | optimizer_global_actor = SharedAdam(global_ddpg.actor.parameters(), lr=5e-5) 178 | optimizer_global_critic = SharedAdam(global_ddpg.critic.parameters(), lr=5e-5)#, weight_decay=1e-02) 179 | 180 | # optimizer_global_actor.share_memory() 181 | # optimizer_global_critic.share_memory() 182 | global_ddpg.share_memory() 183 | 184 | if not args.multithread: 185 | worker = Worker(str(1), optimizer_global_actor, optimizer_global_critic) 186 | worker.work(global_ddpg) 187 | else: 188 | processes = [] 189 | for i in range(args.n_workers): 190 | worker = Worker(str(i), optimizer_global_actor, optimizer_global_critic) 191 | p = mp.Process(target=worker.work, args=[global_ddpg]) 192 | p.start() 193 | processes.append(p) 194 | 195 | for p in processes: 196 | p.join() -------------------------------------------------------------------------------- /replay_memory.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from collections import deque, namedtuple 3 | import warnings 4 | import random 5 | import numpy as np 6 | 7 | # [reference] https://github.com/matthiasplappert/keras-rl/blob/master/rl/memory.py 8 | 9 | # This is to be understood as a transition: Given `state0`, performing `action` 10 | # yields `reward` and results in `state1`, which might be `terminal`. 11 | Experience = namedtuple('Experience', 'state0, action, reward, state1, terminal1') 12 | 13 | 14 | def sample_batch_indexes(low, high, size): 15 | if high - low >= size: 16 | # We have enough data. Draw without replacement, that is each index is unique in the 17 | # batch. We cannot use `np.random.choice` here because it is horribly inefficient as 18 | # the memory grows. See https://github.com/numpy/numpy/issues/2764 for a discussion. 19 | # `random.sample` does the same thing (drawing without replacement) and is way faster. 20 | try: 21 | r = xrange(low, high) 22 | except NameError: 23 | r = range(low, high) 24 | batch_idxs = random.sample(r, size) 25 | else: 26 | # Not enough data. Help ourselves with sampling from the range, but the same index 27 | # can occur multiple times. This is not good and should be avoided by picking a 28 | # large enough warm-up phase. 29 | warnings.warn( 30 | 'Not enough entries to sample without replacement. Consider increasing your warm-up phase to avoid oversampling!') 31 | batch_idxs = np.random.random_integers(low, high - 1, size=size) 32 | assert len(batch_idxs) == size 33 | return batch_idxs 34 | 35 | 36 | class RingBuffer(object): 37 | def __init__(self, maxlen): 38 | self.maxlen = maxlen 39 | self.start = 0 40 | self.length = 0 41 | self.data = [None for _ in range(maxlen)] 42 | 43 | def __len__(self): 44 | return self.length 45 | 46 | def __getitem__(self, idx): 47 | if idx < 0 or idx >= self.length: 48 | raise KeyError() 49 | return self.data[(self.start + idx) % self.maxlen] 50 | 51 | def append(self, v): 52 | if self.length < self.maxlen: 53 | # We have space, simply increase the length. 54 | self.length += 1 55 | elif self.length == self.maxlen: 56 | # No space, "remove" the first item. 57 | self.start = (self.start + 1) % self.maxlen 58 | else: 59 | # This should never happen. 60 | raise RuntimeError() 61 | self.data[(self.start + self.length - 1) % self.maxlen] = v 62 | 63 | 64 | def zeroed_observation(observation): 65 | if hasattr(observation, 'shape'): 66 | return np.zeros(observation.shape) 67 | elif hasattr(observation, '__iter__'): 68 | out = [] 69 | for x in observation: 70 | out.append(zeroed_observation(x)) 71 | return out 72 | else: 73 | return 0. 74 | 75 | 76 | class Memory(object): 77 | def __init__(self, window_length, ignore_episode_boundaries=False): 78 | self.window_length = window_length 79 | self.ignore_episode_boundaries = ignore_episode_boundaries 80 | 81 | self.recent_observations = deque(maxlen=window_length) 82 | self.recent_terminals = deque(maxlen=window_length) 83 | 84 | def sample(self, batch_size, batch_idxs=None): 85 | raise NotImplementedError() 86 | 87 | def append(self, observation, action, reward, terminal, training=True): 88 | self.recent_observations.append(observation) 89 | self.recent_terminals.append(terminal) 90 | 91 | def get_recent_state(self, current_observation): 92 | # This code is slightly complicated by the fact that subsequent observations might be 93 | # from different episodes. We ensure that an experience never spans multiple episodes. 94 | # This is probably not that important in practice but it seems cleaner. 95 | state = [current_observation] 96 | idx = len(self.recent_observations) - 1 97 | for offset in range(0, self.window_length - 1): 98 | current_idx = idx - offset 99 | current_terminal = self.recent_terminals[current_idx - 1] if current_idx - 1 >= 0 else False 100 | if current_idx < 0 or (not self.ignore_episode_boundaries and current_terminal): 101 | # The previously handled observation was terminal, don't add the current one. 102 | # Otherwise we would leak into a different episode. 103 | break 104 | state.insert(0, self.recent_observations[current_idx]) 105 | while len(state) < self.window_length: 106 | state.insert(0, zeroed_observation(state[0])) 107 | return state 108 | 109 | def get_config(self): 110 | config = { 111 | 'window_length': self.window_length, 112 | 'ignore_episode_boundaries': self.ignore_episode_boundaries, 113 | } 114 | return config 115 | 116 | 117 | class SequentialMemory(Memory): 118 | def __init__(self, limit, **kwargs): 119 | super(SequentialMemory, self).__init__(**kwargs) 120 | 121 | self.limit = limit 122 | 123 | # Do not use deque to implement the memory. This data structure may seem convenient but 124 | # it is way too slow on random access. Instead, we use our own ring buffer implementation. 125 | self.actions = RingBuffer(limit) 126 | self.rewards = RingBuffer(limit) 127 | self.terminals = RingBuffer(limit) 128 | self.observations = RingBuffer(limit) 129 | 130 | def sample(self, batch_size, batch_idxs=None): 131 | if batch_idxs is None: 132 | # Draw random indexes such that we have at least a single entry before each 133 | # index. 134 | batch_idxs = sample_batch_indexes(0, self.nb_entries - 1, size=batch_size) 135 | batch_idxs = np.array(batch_idxs) + 1 136 | assert np.min(batch_idxs) >= 1 137 | assert np.max(batch_idxs) < self.nb_entries 138 | assert len(batch_idxs) == batch_size 139 | 140 | # Create experiences 141 | experiences = [] 142 | for idx in batch_idxs: 143 | terminal0 = self.terminals[idx - 2] if idx >= 2 else False 144 | while terminal0: 145 | # Skip this transition because the environment was reset here. Select a new, random 146 | # transition and use this instead. This may cause the batch to contain the same 147 | # transition twice. 148 | idx = sample_batch_indexes(1, self.nb_entries, size=1)[0] 149 | terminal0 = self.terminals[idx - 2] if idx >= 2 else False 150 | assert 1 <= idx < self.nb_entries 151 | 152 | # This code is slightly complicated by the fact that subsequent observations might be 153 | # from different episodes. We ensure that an experience never spans multiple episodes. 154 | # This is probably not that important in practice but it seems cleaner. 155 | state0 = [self.observations[idx - 1]] 156 | for offset in range(0, self.window_length - 1): 157 | current_idx = idx - 2 - offset 158 | current_terminal = self.terminals[current_idx - 1] if current_idx - 1 > 0 else False 159 | if current_idx < 0 or (not self.ignore_episode_boundaries and current_terminal): 160 | # The previously handled observation was terminal, don't add the current one. 161 | # Otherwise we would leak into a different episode. 162 | break 163 | state0.insert(0, self.observations[current_idx]) 164 | while len(state0) < self.window_length: 165 | state0.insert(0, zeroed_observation(state0[0])) 166 | action = self.actions[idx - 1] 167 | reward = self.rewards[idx - 1] 168 | terminal1 = self.terminals[idx - 1] 169 | 170 | # Okay, now we need to create the follow-up state. This is state0 shifted on timestep 171 | # to the right. Again, we need to be careful to not include an observation from the next 172 | # episode if the last state is terminal. 173 | state1 = [np.copy(x) for x in state0[1:]] 174 | state1.append(self.observations[idx]) 175 | 176 | assert len(state0) == self.window_length 177 | assert len(state1) == len(state0) 178 | experiences.append(Experience(state0=state0, action=action, reward=reward, 179 | state1=state1, terminal1=terminal1)) 180 | assert len(experiences) == batch_size 181 | return experiences 182 | 183 | def sample_and_split(self, batch_size, batch_idxs=None): 184 | experiences = self.sample(batch_size, batch_idxs) 185 | 186 | state0_batch = [] 187 | reward_batch = [] 188 | action_batch = [] 189 | terminal1_batch = [] 190 | state1_batch = [] 191 | for e in experiences: 192 | state0_batch.append(e.state0) 193 | state1_batch.append(e.state1) 194 | reward_batch.append(e.reward) 195 | action_batch.append(e.action) 196 | terminal1_batch.append(0. if e.terminal1 else 1.) 197 | 198 | # Prepare and validate parameters. 199 | state0_batch = np.array(state0_batch).reshape(batch_size, -1) 200 | state1_batch = np.array(state1_batch).reshape(batch_size, -1) 201 | terminal1_batch = np.array(terminal1_batch).reshape(batch_size, -1) 202 | reward_batch = np.array(reward_batch).reshape(batch_size, -1) 203 | action_batch = np.array(action_batch).reshape(batch_size, -1) 204 | 205 | return state0_batch, action_batch, reward_batch, state1_batch, terminal1_batch 206 | 207 | def append(self, observation, action, reward, terminal, training=True): 208 | super(SequentialMemory, self).append(observation, action, reward, terminal, training=training) 209 | 210 | # This needs to be understood as follows: in `observation`, take `action`, obtain `reward` 211 | # and weather the next state is `terminal` or not. 212 | if training: 213 | self.observations.append(observation) 214 | self.actions.append(action) 215 | self.rewards.append(reward) 216 | self.terminals.append(terminal) 217 | 218 | @property 219 | def nb_entries(self): 220 | return len(self.observations) 221 | 222 | def get_config(self): 223 | config = super(SequentialMemory, self).get_config() 224 | config['limit'] = self.limit 225 | return config 226 | 227 | 228 | class EpisodeParameterMemory(Memory): 229 | def __init__(self, limit, **kwargs): 230 | super(EpisodeParameterMemory, self).__init__(**kwargs) 231 | self.limit = limit 232 | 233 | self.params = RingBuffer(limit) 234 | self.intermediate_rewards = [] 235 | self.total_rewards = RingBuffer(limit) 236 | 237 | def sample(self, batch_size, batch_idxs=None): 238 | if batch_idxs is None: 239 | batch_idxs = sample_batch_indexes(0, self.nb_entries, size=batch_size) 240 | assert len(batch_idxs) == batch_size 241 | 242 | batch_params = [] 243 | batch_total_rewards = [] 244 | for idx in batch_idxs: 245 | batch_params.append(self.params[idx]) 246 | batch_total_rewards.append(self.total_rewards[idx]) 247 | return batch_params, batch_total_rewards 248 | 249 | def append(self, observation, action, reward, terminal, training=True): 250 | super(EpisodeParameterMemory, self).append(observation, action, reward, terminal, training=training) 251 | if training: 252 | self.intermediate_rewards.append(reward) 253 | 254 | def finalize_episode(self, params): 255 | total_reward = sum(self.intermediate_rewards) 256 | self.total_rewards.append(total_reward) 257 | self.params.append(params) 258 | self.intermediate_rewards = [] 259 | 260 | @property 261 | def nb_entries(self): 262 | return len(self.total_rewards) 263 | 264 | def get_config(self): 265 | config = super(SequentialMemory, self).get_config() 266 | config['limit'] = self.limit 267 | return config 268 | 269 | 270 | 271 | 272 | 273 | 274 | 275 | 276 | 277 | 278 | class Replay(object): 279 | def __init__(self, max_size, env, n_steps=1, gamma=0.99): 280 | self.buffer = [] 281 | self.capacity = max_size 282 | self.position = 0 283 | self.env = env 284 | self.n_steps = n_steps 285 | self.gamma = gamma 286 | self.initialize(init_length=1000) 287 | 288 | def add(self, state, action, reward, next_state, done): 289 | if len(self.buffer) < self.capacity: 290 | self.buffer.append(None) 291 | self.buffer[self.position] = (np.asarray(state), action, reward,\ 292 | np.asarray(next_state), done) 293 | self.position = (self.position+1)%self.capacity 294 | 295 | def initialize(self, init_length): 296 | state = self.env.reset() 297 | step_counter = 0 298 | episode_states = [] 299 | episode_rewards = [] 300 | episode_actions = [] 301 | 302 | 303 | while True: 304 | action = np.random.uniform(-1.0, 1.0, size=self.env.action_space.shape) 305 | next_state, reward, done, _ = self.env.step(action) 306 | 307 | #### n-steps buffer 308 | episode_states.append(state) 309 | episode_actions.append(action) 310 | episode_rewards.append(reward) 311 | 312 | if step_counter >= self.n_steps - 1: 313 | cum_reward = 0. 314 | exp_gamma = 1 315 | for k in range(-self.n_steps, 0): 316 | cum_reward += exp_gamma * episode_rewards[k] 317 | exp_gamma *= self.gamma 318 | self.add(episode_states[-self.n_steps].reshape(-1), episode_actions[-self.n_steps], cum_reward, 319 | next_state, done) 320 | # self.add(state, action, reward, next_state, done) 321 | 322 | if len(self.buffer) >= init_length: 323 | break 324 | if done: 325 | state = self.env.reset() 326 | step_counter = 0 327 | episode_states = [] 328 | episode_rewards = [] 329 | episode_actions = [] 330 | else: 331 | state = next_state 332 | step_counter += 1 333 | 334 | 335 | def sample(self, batch_size): 336 | states = [] 337 | actions = [] 338 | rewards = [] 339 | next_states = [] 340 | terminates = [] 341 | samples = random.sample(self.buffer, batch_size) 342 | for state, action, reward, next_state, done in samples: 343 | states.append(state) 344 | actions.append(action) 345 | rewards.append(reward) 346 | next_states.append(next_state) 347 | terminates.append(done) 348 | 349 | states = np.array(states, dtype=np.float).reshape(batch_size,-1) 350 | actions = np.array(actions, dtype=np.float).reshape(batch_size,-1) 351 | rewards = np.array(rewards, dtype=np.float).reshape(batch_size,-1) 352 | next_states = np.array(next_states, dtype=np.float).reshape(batch_size,-1) 353 | terminates = np.array(terminates, dtype=np.float).reshape(batch_size,-1) 354 | return states, actions, rewards, next_states, terminates 355 | 356 | 357 | 358 | 359 | 360 | # 361 | # 362 | # class Replay(object): 363 | # def __init__(self, max_size, env): 364 | # self.buffer = [] 365 | # self.capacity = max_size 366 | # self.position = 0 367 | # self.env = env 368 | # self.initialize(init_length=1000) 369 | # 370 | # def add_experience(self, state, action, reward, next_state, done): 371 | # if len(self.buffer) < self.capacity: 372 | # self.buffer.append(None) 373 | # self.buffer[self.position] = (np.asarray(state), action, reward,\ 374 | # np.asarray(next_state), done) 375 | # self.position = (self.position+1)%self.capacity 376 | # 377 | # def initialize(self, init_length): 378 | # state = self.env.reset() 379 | # while True: 380 | # action = np.random.uniform(-1.0, 1.0, size=self.env.action_space.shape) 381 | # next_state, reward, done, _ = self.env.step(action) 382 | # self.add_experience(state, action, reward, next_state, done) 383 | # if len(self.buffer) >= init_length: 384 | # break 385 | # if done: 386 | # state = self.env.reset() 387 | # else: 388 | # state = next_state 389 | # 390 | # def sample(self, batch_size): 391 | # states = [] 392 | # actions = [] 393 | # rewards = [] 394 | # next_states = [] 395 | # terminates = [] 396 | # samples = random.sample(self.buffer, batch_size) 397 | # for state, action, reward, next_state, done in samples: 398 | # states.append(state) 399 | # actions.append(action) 400 | # rewards.append(reward) 401 | # next_states.append(next_state) 402 | # terminates.append(done) 403 | # 404 | # states = np.array(states, dtype=np.float).reshape(batch_size,-1) 405 | # actions = np.array(actions, dtype=np.float).reshape(batch_size,-1) 406 | # rewards = np.array(rewards, dtype=np.float).reshape(batch_size,-1) 407 | # next_states = np.array(next_states, dtype=np.float).reshape(batch_size,-1) 408 | # terminates = np.array(terminates, dtype=np.float).reshape(batch_size,-1) 409 | # return states, actions, rewards, next_states, terminates 410 | -------------------------------------------------------------------------------- /plotUtil.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import pickle\n", 10 | "import matplotlib.pyplot as plt\n", 11 | "import numpy as np" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": 15, 17 | "metadata": {}, 18 | "outputs": [ 19 | { 20 | "name": "stdout", 21 | "output_type": "stream", 22 | "text": [ 23 | "/home/aditya/ADITYA/Project/d3pg-pytorch\r\n" 24 | ] 25 | } 26 | ], 27 | "source": [ 28 | "! pwd" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": 4, 34 | "metadata": {}, 35 | "outputs": [], 36 | "source": [ 37 | "def numpy_ewma_vectorized_v2(data, window=20):\n", 38 | "\n", 39 | " alpha = 2 /(window + 1.0)\n", 40 | " alpha_rev = 1-alpha\n", 41 | " n = data.shape[0]\n", 42 | "\n", 43 | " pows = alpha_rev**(np.arange(n+1))\n", 44 | "\n", 45 | " scale_arr = 1/pows[:-1]\n", 46 | " offset = data[0]*pows[1:]\n", 47 | " pw0 = alpha*alpha_rev**(n-1)\n", 48 | "\n", 49 | " mult = data*pw0*scale_arr\n", 50 | " cumsums = mult.cumsum()\n", 51 | " out = offset + cumsums*scale_arr[::-1]\n", 52 | " return out\n", 53 | "\n", 54 | "def plot_logs(train_logs, time=True):\n", 55 | " avg_reward = numpy_ewma_vectorized_v2(np.array(train_logs['total_reward']))\n", 56 | " plt.plot(train_logs['time'],avg_reward , label = train_logs['info_summary'])\n", 57 | " plt.xlabel('time(in minutes)')\n", 58 | " plt.ylabel('avg_reward')\n", 59 | " plt.title('Avg reward vs Time')\n", 60 | " plt.legend()\n", 61 | " plt.show() \n", 62 | "\n", 63 | " \n", 64 | " plt.plot(avg_reward, label = train_logs['info_summary'])\n", 65 | " plt.xlabel(train_logs['x'])\n", 66 | " plt.ylabel('avg_reward')\n", 67 | " plt.title('Avg reward vs ' + train_logs['x'])\n", 68 | " plt.legend()\n", 69 | " plt.show() \n", 70 | " " 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": 13, 76 | "metadata": {}, 77 | "outputs": [ 78 | { 79 | "data": { 80 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZgAAAEWCAYAAABbgYH9AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJzt3XdcVff5wPHPwxJRhiKIgogDNU40\nuJI4khijGTWjaWxmM5pVO5KmbfLr79espm2atmnTjNambUxSm6QZ1UyjSdQs90ZUcAKKgICAyLo8\nvz/uwaCCAnK5F+7zfr3uy3u+59xznoN6H77jfL+iqhhjjDGtLcDbARhjjOmYLMEYY4zxCEswxhhj\nPMISjDHGGI+wBGOMMcYjLMEYY4zxCEswxrQzIqIiMtCL1y8Tkf7eur5pPyzBmHZHRJaKSJGIdPJ2\nLB2NiPyPk0DKRKRCRFz1ttMAVLWrqu7ydqzG91mCMe2KiCQBkwAFvtFG1wxqi+v4wnVV9VdOAukK\n3AV8VbetqsPaOh7TvlmCMe3NTcAK4EXg5rpCERkvIrkiEliv7EoR2eS87ywi85yaT7qI/FREshu7\niNMM9T0RyQAynLIhIrJYRApFZLuIfMsp7ycixSIS4Gz/TUTy6p3rZRH5kfP+Fuf6pSKyS0TurHfc\nVBHJFpGfiUgu8E+n/CcickBE9ovIraeI+VoRWXNC2b0istB5f4mIbHWunSMi95/uh32Kn81A5/2L\nIvKciHzg1HK+EJE4Efmj87PeJiKj6322t4i8KSL5IrJbRH7QkhhM+2AJxrQ3NwH/cl4Xi0hPAFVd\nCRwBLqh37HXAfOf9Q0AS0B+4CLihCde6AhgPDBWRLsBi53yxwGzgOREZqqq7gRKg7ot0MlAmImc5\n21OAZc77POAyIAK4BXhKRMbUu2Yc0B3oC9whIjOA+52Yk4Fpp4j3HWCwiCQ38jP4O3CnqoYDw4FP\nmvAzaIpvAf8L9AAqga+Adc72G8AfAJwE/A6wEYgHLgR+JCIXt1IcxsdYgjHthoich/uL93VVXQvs\nxP0FWuffwLedY8OBS5wycH8J/kpVi1Q1G3i6CZf8taoWqupR3Elhj6r+U1VrVHU98CZwjXPsMmCK\niMQ522842/1wJ5ONAKr6nqruVLdlwEe4m/zq1AIPqWqlc91vAf9U1S2qegR4uLFgVbUcWFDvZ5AM\nDAEWOodU406WEc7PYV0TfgZN8baqrlXVCuBtoEJVX1JVF/AaXyfesUCMqj6qqlVOP87fcCdr0wFZ\ngjHtyc3AR6pa4GzPp14zmbN9ldP5fxWwTlX3Ovt6A1n1jq3/vjH1j+kLjHeawopFpBi4HneNA9wJ\nZiru2styYCnumssU4DNVrQUQkZkissJpZivGnQR71LtOvvNFXefEuPdyavNxEgzu5PtfJ/EAXO1c\nb6+ILBORiac5V1MdrPf+aAPbXZ33fYHeJ/wM/wfo2UpxGB/jlc5LY5pLRDrj/m0+0OmfAOgERInI\nKFXdqKpbRWQvMJPjm4YADgAJwFZnu08TLlt/qvEsYJmqXtTIscuAJ4Fs5/3nwF+ACmcbJ/G9ibuZ\nb4GqVovIfwFp5Jp1cdePNfE0MS8GYkQkBXeiuffYiVVXA7NEJBiYA7xO034OrSUL2K2qyac90nQI\nVoMx7cUVgAsYCqQ4r7OAz3B/YdeZD/wQd03iP/XKXwceFJFuIhKP+wu2Od4FBonIjSIS7LzG1vWz\nqGoG7t/Wb8CdiEpw/yZ/NV/3v4TgTor5QI2IzASmn+a6rwPfEZGhIhKGuy+pUapajfu+n8Tdl7MY\nQERCROR6EYl0jinB3RzXllYBpc4ghs4iEigiw0VkbBvHYdqIJRjTXtyMuy9in6rm1r2AZ4Dr6w3p\n/TfuZqlP6jWlATyKu3axG1iCu4+ksqkXV9VS3MlgNrAfyAWewJ0w6iwDDqlqVr1twd3hXXeOH+BO\nGkW4a1kLOQVV/QD4I+4O+Uya1jE/H/dggP+oak298huBPSJSgnsI8vVNOFercfpkLsP9y8FuoAB4\nAYhsyzhM2xFbcMz4IxG5G5itqlO8HYsxHZXVYIxfEJFeInKuiASIyGDgx7hHPBljPMQ6+Y2/CAH+\nCvQDioFXgee8GpExHZw1kRljjPEIayIzxhjjEX7dRNajRw9NSkrydhjGGNOurF27tkBVY053nF8n\nmKSkJNasWXP6A40xxhzjPNB8WtZEZowxxiMswRhjjPEISzDGGGM8wq/7YBpSXV1NdnY2FRUVpz+4\nAwoNDSUhIYHg4GBvh2KMaecswZwgOzub8PBwkpKSEJHTf6ADUVUOHTpEdnY2/fr183Y4xph2zprI\nTlBRUUF0dLTfJRcAESE6Otpva2/GmNZlCaYB/phc6vjzvRtjWpclGGOM8QM7Dpby+posql1ttwyQ\nJRgfFBgYSEpKCsOGDWPUqFH8/ve/p7bW/Y9i6dKlREZGMnr0aAYPHszkyZN59913j3324YcfJj4+\nnpSUFIYPH87ChV8vN/LKK68wcuTIY+e9/fbbKS4ubvP7M8a0rbV7i7j6+S/56RubuPzPn7N2b2Gb\nXNc6+X1Q586d2bBhAwB5eXlcd911lJSU8MgjjwAwadKkY0llw4YNXHHFFXTu3JkLL7wQgHvvvZf7\n77+f9PR0Jk2aRF5eHh999BFPPfUUH3zwAfHx8bhcLubNm8fBgweJioryzo0a40EFZZVszjlM/x5d\n6BvdxdvhtLkaVy3bcktZvaeQJxdtJza8E/936VD+uGQH1/zlK5befz6J0WEejcESjI+LjY1l7ty5\njB07locffvik/SkpKfziF7/gmWeeOZZg6px11lkEBQVRUFDA448/zu9+9zvi4+MBdy3p1ltvbYtb\nMKZNVFS7eGXFXtbtK2Jj1mFyio8CEBIUwP9eehY3TujbofsYa2uV7QdL+SKzgK92HmLl7kLKKt0L\nmg7tFcGLt4wlNiKUS0f2Yun2fI8nF7AEc0qPvJPG1v0lrXrOob0jeOjyYc36TP/+/XG5XOTl5TW4\nf8yYMTz55JMnla9cuZKAgABiYmJIS0tjzJgxLYrZGF/nqlV+8O/1fLT1IIndwxidGMV3zknirF4R\nvPD5Ln6xII0Pt+Qy54KBTOx//CjR9AMlZOaVceFZsYSFBFF0pIp3N+3ngy259I/pwl1TBpDQrXW/\njGtrlY3ZxXy6LY+Pt+VxqKyKW85N4qaJSXQOCWzyeQ6XV7MoLZflGfl8tfMQh45UAZAUHcY3Unoz\nvl93xiR2I6Fb52P33KVTEJeO7NWq99MYSzAdwIlr+jz11FO88sorhIeH89prr530W9vmzZu58cYb\nKS0t5Ve/+hXXXnttW4ZrzBmpcdVSU6uEBru/iFWVx97dykdbD/LQ5UO55dzjn+E6Z0A0L6/Yy58/\nyeS6v60kpU8Ut0/qR1bhURZsyGFbbikAUWHBjEyI4qudBVS7lP4xXVi9p5BXV2XxwMwh3D6p/2nj\nUiA4sOGu7dpaZeXuQt7ZtJ+P0nIpKKsiQCC1b3eiYoP59Qfb+MuynVw1JoGBsV15f/MB9hWWMysl\nnhvGJxIbEQrA0SoXS9IPsmDDfpbtyKPapcSGd2LyoBjOGRDNOQN7EB/V+Qx/yq3DJxOMiDwJXA5U\nATuBW1S12Nn3IHAb4AJ+oKqLnPIZwJ+AQOAFVf3NmcbR3JqGp+zatYvAwEBiY2NJT08/af/69es5\n66yzjm3X9cHUN2zYMNatW8f555/PiBEj2LBhA3PmzOHo0aMej9+Y1rB1fwmvr8ni3U37Kamo4ZLh\ncSRGd+GDzQfIyCvj9vP6nZRcAAIChJvPSeLasX34z9ps5i7fyZz56wEYkxjFo7OG0b9HV178cg87\nDpZy88QkrhqTwNDeEewvPspDC9P45XvpRIQGM2t0b5ZszSO6awgT+kcDcODwUf61Yh//XrWPKlct\nV6TEc+3YPgyPjwQg42Apr652x32wpJLOwYFccFYs04f2ZMqgGKLCQgBYvaeQf36xm3lf7qGmVkns\nHkZi9zD+/EkGz32aycwRvYjuEsKb67IpraihZ0Qnbp6YxDdSejMiPtInm/98MsEAi4EHVbVGRJ4A\nHgR+JiJDgdnAMKA3sEREBjmfeRa4CMgGVovIQlXd6oXYW1V+fj533XUXc+bMafAf0KZNm3jsscd4\n4YUXTnmeBx98kPvvv58FCxaQkJAAYMnF+DxVZcWuQp5ftpPlO/IJCQrgwiGxdO8SwsKN+ymrrGFs\nUncev3I43x6beMpzhQYHcuOEvnx7bB++3HmIpOgux/VDnJfc46TP9I7qzLPXjeG2eat54K1NPPHh\ntmPNUOcOjCayczCL0g5Sq8qFQ3oSHhrE62uyeHnFXobHR9C9Syd33IEBTBkcw+WjejPNaYo70dik\n7oxN6k5BWSUFZZUM7hmOiLCn4Agvr9jL62uyqKh2MXN4L2aP68P4ftEEBvheUqnPJxOMqn5Ub3MF\n8E3n/SzgVVWtBHaLSCYwztmXqaq7AETkVefYdplgjh49SkpKCtXV1QQFBXHjjTdy3333Hdv/2Wef\nMXr0aMrLy4mNjeXpp58+qYP/RJdccgn5+fnMnDkTl8tFVFQUw4cP5+KLL/b07RhzTGlFNfuLKxgc\nF37aY9ftK+Kxd7eyfl8xPbqG8NMZg7l+XF8iw9zz5P3fZUMpr3LRvUtIs2IICgxg8qDTrpV1TEhQ\nAH+54WzmzF9HYEAAN07sS2ZeGc9+momrVrn9vH7cMKEvfbq7k9XDlw9jwcYcXl2VxY7cUn580SCu\nn9C3yXH26NqJHl07HdtO6tGF/7tsKD+5eDBVrloiQtvPPIFyYvu9rxGRd4DXVPUVEXkGWKGqrzj7\n/g584Bw6Q1Vvd8pvBMar6pwGzncHcAdAYmLi2Xv3Hr9uTnp6+nHNTf7IfgbGEyqqXXzrr1+xJecw\nv75qBNeOTaTaVcvnmQUM7hlO73r9BnsPHeHyP39Ol05B3DN1ANek9jnW5+Irql21qLoTkL8RkbWq\nmnq647xWgxGRJUBcA7t+rqoLnGN+DtQA/2qt66rqXGAuQGpqqm9nV2M6CFXlZ29uYnPOYYb3juRn\nb25mY/ZhPs8oYF9hOZ2CArjtvH7cPqk/ocEB3PnyWgIChNfvnHisZuBrGuvMN1/zWoJR1Wmn2i8i\n3wEuAy7Ur6tZOUCfeoclOGWcotwY4wUlFdVc/7eVKEqvyM4s3nqQn1w8mO9O6s+9r29g/sp9DI+P\n4E+zU/h0Wx7PLd3JX5btpFdkZw4cPsqLt4zz2eRimsYn+2CcEWE/Baaoanm9XQuB+SLyB9yd/MnA\nKkCAZBHphzuxzAaua+n1VdUnR2S0BV9vMjXtg6ryszc2sfVACWcndmPZjnyuSOnNPVMHICI8PXs0\n379g4LGO7Fkp8dw1dQDvbjzAp9vzuH1Sv2b1kxjf5JMJBngG6AQsdr7oV6jqXaqaJiKv4+68rwG+\np6ouABGZAyzCPUz5H6qa1pILh4aGcujQIb+csr9uPZjQ0FBvh2LauXlf7uGDLbk8OHMId04ZQI2r\nlsAAOfZ/KjBAGBIXcdxnhsRFMCQugvsvHuyNkI0H+HwnvyelpqbqmjVrjiuzFS1tRUtzZpZuz+O7\nL61hcnIMf7splQAfH0prms/nO/l9VXBwsK3maEwLzftyD4+8k8bguAh+/61Rllz8nCUYY8wZU1We\nWryDpz/JZNpZPfnT7BS6dLKvF39n/wKMMWekotrF0x9n8NzSnVyb2odfXTXC558wN23DEowxpskq\na1zsyC0jbf9hth4oYXPOYbbkHKbapcwe24dfXTnCmsXMMZZgjDGNqnHVEuQ8UPjyir089s5Wqpwl\nd7t2CmJorwhuPa8fE/pFM2VQjCUXcxxLMMaYBq3YdYhbX1zNpSN6ce7AHvxiwRbOG9iD2WMTGdY7\ngsTuYZZQzClZgjHGnCQzr5Q7XlpDl05BvLEum/+szSalTxRzb0xt1oJYxr9ZgjHGHJNVWM7Cjft5\n6as9hAQF8Nbd55BVVM6ba3N48JIhllxMs1iCMcYAkF1UzvSnlnO02sXZfbvxyDeG0ad7GH26h3HO\ngJPXSjHmdCzBGGMAeHVVFpU1Lhb9aHKT1msx5nRsvmljDNWuWl5dncX5g2MtuZhWYwnGGMPirQcp\nKKvk+gmnXnbYmOawBGOMn1NV5n25h/iozkwZFOvtcEwHYgnGGD/34ZZcVu4u5Lbz+tkUL6ZVWYIx\nxo+VVlTz8DtpDO0VwU0T+3o7HNPB2CgyY/zY7z/aQV5pJXNvTD02JYwxrcX+RRnjpzIOlvLyir1c\nPz6RUX2ivB2O6YAswRjjp375XjphIYHcd5EtUWw8w6cTjIj8WERURHo42yIiT4tIpohsEpEx9Y69\nWUQynNfN3ovaGN/3weYDLNuRzw8vTKZ7lxBvh2M6KJ/tgxGRPsB0YF+94plAsvMaDzwPjBeR7sBD\nQCqgwFoRWaiqRW0btTG+Zev+EuZ9uYf1WUVUu5RzBkSzr7CczzIKGBDThZsmJnk7RNOB+WyCAZ4C\nfgosqFc2C3hJVRVYISJRItILmAosVtVCABFZDMwA/t22IRvjOyprXNzx8hoOl1eTmtSNwADhv+tz\nCOsUxM9mDOH6CYmEBPl0I4Zp53wywYjILCBHVTeKHDcuPx7Iqred7ZQ1Vt7Que8A7gBITLSnlk3H\n9fJXe8kuOsort43nvGT3ZJU1rlpExJ53MW3CawlGRJYAcQ3s+jnwP7ibx1qdqs4F5gKkpqaqJ65h\njLcdLq/mz59kMnlQzLHkAthQZNOmvJZgVHVaQ+UiMgLoB9TVXhKAdSIyDsgB+tQ7PMEpy8HdTFa/\nfGmrB21MO/HcskxKKqp5YMYQb4di/JjP/TqjqptVNVZVk1Q1CXdz1xhVzQUWAjc5o8kmAIdV9QCw\nCJguIt1EpBvu2s8ib92DMd5UXF7Fy1/t5fKRvRnaO8Lb4Rg/5pN9MKfwPnAJkAmUA7cAqGqhiDwG\nrHaOe7Suw98YfzPvy72UV7m45/wB3g7F+DmfTzBOLabuvQLfa+S4fwD/aKOwjPFJ5VU1vPjlbi4c\nEsuQOKu9GO/yuSYyY0zLvboqi6Lyau6earUX432WYIzpIArKKvnTxxmcMyCa1KTu3g7HGEswxnQU\nj727laNVLh6dNdzboRgDWIIxpkP4PKOABRv2c/fUAQyM7ertcIwBLMEY0yH8ZdlOekeG2sgx41Ms\nwRjTzu0uOMLnmQVcNz6RTkGB3g7HmGMswRjTzs1fuZegAOFbqX1Of7AxbcgSjDHtWEW1i/+szWb6\nsJ7ERoR6OxxjjmMJxph27L1NBygur+aG8X29HYoxJ7EEY0w7VVVTy58+zmBIXDgTB0R7OxxjTmIJ\nxph2av7KvewrLOdnM4dwwrpJxvgESzDGtEOlFdU8/UkmE/tHM3VQjLfDMaZBlmCMaWdUlUfe2Urh\nkSoevMRqL8Z3WYIxpp356/JdvLE2mx9emMzIhChvh2NMoyzBGNOOfJ5RwBMfbuOykb340bRkb4dj\nzClZgjGmnaitVX79QTp9uoXxu2tGWdOY8XmWYIxpJxal5ZK2v4QfXphMaLBNCWN8nyUYY9oBV63y\n1JIdDIjpwhWj470djjFN4rMJRkS+LyLbRCRNRH5br/xBEckUke0icnG98hlOWaaIPOCdqI3xjA+3\n5LLjYBk/mjaIwABrGjPtQ5C3A2iIiJwPzAJGqWqliMQ65UOB2cAwoDewREQGOR97FrgIyAZWi8hC\nVd3a9tEb0/oWbswhLiKUS0f08nYoxjSZr9Zg7gZ+o6qVAKqa55TPAl5V1UpV3Q1kAuOcV6aq7lLV\nKuBV51hj2r2KahfLdxQwbWgsAVZ7Me2IryaYQcAkEVkpIstEZKxTHg9k1Tsu2ylrrNyYdu+LzAKO\nVru4aGict0Mxplm81kQmIkuAhv7H/Bx3XN2BCcBY4HUR6d9K170DuAMgMTGxNU5pjEct3nqQrp2C\nmNC/u7dDMaZZvJZgVHVaY/tE5G7gLVVVYJWI1AI9gByg/qpKCU4Zpyg/8bpzgbkAqamp2uIbMKYN\n1NYqS9LzmDI4xlarNO2OrzaR/Rc4H8DpxA8BCoCFwGwR6SQi/YBkYBWwGkgWkX4iEoJ7IMBCr0Ru\nTCvakF1MQVklF53V09uhGNNsPjmKDPgH8A8R2QJUATc7tZk0EXkd2ArUAN9TVReAiMwBFgGBwD9U\nNc07oRvTej5JzyMwQDh/cKy3QzGm2XwywTgjwW5oZN/jwOMNlL8PvO/h0IxpU8sz8knpE0VkWLC3\nQzGm2Xy1icwYv1d4pIrNOYeZnGzrvZj2yRKMMT7qs4x8VGHKYEswpn2yBGOMj1q+o4CosGBGxEd6\nOxRjWsQSjDE+SFX5LCOf8wb2sLnHTLtlCcYYH7Qtt5S80komD7LmMdN+nXYUmYjcd6r9qvqH1gvH\nGAOwYMN+RLAOftOuNWWYcrjz52Dc07bUPcB4Oe6HHI0xrehgSQUvfrmbWaN6ExcZ6u1wjGmx0yYY\nVX0EQESWA2NUtdTZfhh4z6PRGeOH/vRxBq5a5b6LBns7FGPOSHP6YHrifqq+TpVTZoxpJbvyy3ht\ndRbXjUskMTrM2+EYc0aa8yT/S7gnnnzb2b4CeLHVIzLGjz39cQadggKYc0Gyt0Mx5ow1OcGo6uMi\n8gEwySm6RVXXeyYsY/xPba3yybY8Lh/Zm5jwTt4Ox5gz1qQEIyKBQJqqDgHWeTYkYxpXVVPLm+uy\n+SKzgMTuYfx4+uCTnhPJKixn1e5Cpg/rSXho+5nDa/vBUkoqahjXz9Z9MR1DkxKMqrpEZLuIJKrq\nPk8HZUxj/vxJBn/+JJMeXUN4d9MBDhyu4HfXjDqWZGpctdz58lq2HighbEEg9100iNsntcpadR63\nek8hgCUY02E0pw+mG+7p8lcBR+oKVfUbrR6VMQ1QVd7ddIBzB0bzym3jefbTTH730Q7SD5Rw2che\nXDkmgfc27WfrgRIemDmEzzLyeeLDbUwfGtdoh/nBkgrCQ4MIC/H+xOKrdhcSFxFKQrfO3g7FmFbR\nnP9V/+exKIxpgoy8MnYXHOG28/ohIsy5IJlekZ3596p9/O6jHfxh8Q4CA4SLhvbkzsn9uSIlnilP\nfspTS3bw1LUpDZ7zyme/4IKzYvnlFSPa+G6Op6qs3lPIuH7RiNjUMKZjaE4n/zJPBmLM6Xy4JRcR\nmD7069HxV5+dwNVnJ5BdVM6/V+1j7d4iHp01DBEhLjKUW87tx1+X7+S7k/oztHfEcec7VFbJ/sMV\nLNuRf6xs9Z5COgUFMDIhqs3uCyCr8CgHSyoZl9StTa9rjCc1+TkYEZkgIqtFpExEqkTEJSIlngzO\nmPo+3JLL2YndiI04+en2hG5h/OTiIbx6x0R6RX7dxHT3lAFEdg7mx//ZyNEq13GfycgrA9xf7tlF\n5VS7arnr5bU8+NZmz95IA1Y5/S9jrf/FdCDNedDyGeDbQAbQGbgdeNYTQRlzoj0FR9h6oIQZw+Oa\n9bnIsGCeujaFbbklPPDWJtwrb7vVJRiAlbsKWbo9n0NHqth6oISiI1UNnc5jVu8uJLJzMINiw09/\nsDHtRLNmU1bVTCBQVV2q+k9ghmfCMuZ4zy/dSUhgAJeO7NXsz54/OJb7pw9mwYb9vLvpwLHyzIOl\ndAkJpFtYMF/tOsQba7MIChBU4atdh1oz/NNau6+Is/t2I8Cm5jcdSHMSTLmIhAAbROS3InJvMz/f\nZCKSIiIrRGSDiKwRkXFOuYjI0yKSKSKbRGRMvc/cLCIZzutmT8RlvGN3wRHeWJfNdeMTj2v+ao67\npwwgObYrz36aeawWk5lfxsCe4YzvF83S7fl8si2PGyb0pUtIIF/uLGjNWzilw0erycwrY3Sftu33\nMcbTmpMgbnSOn4N7mHIf4GpPBAX8FnhEVVOAXzjbADOBZOd1B/A8gIh0Bx4CxgPjgIdExHpLO4g/\nLtlBcKBwz/kDWnyOgADh7qkD2JZbyifb8gDIOFhGcmxXJvTvTkFZJdUu5dvjEhnXrztfZrprMKv3\nFJJdVN4q99GYjVnFAIxOtH+ypmNpToIZCIiqlqjqI6p6n9Nk5gkK1A35iQT2O+9nAS+p2wogSkR6\nARcDi1W1UFWLgMVY812HUHikioUb93PTxCRiw89s6vrLR/UmoVtnnvk0k8Pl1eSVVroTzIBoAEYm\nRDI4LpxzB/ZgV8ERXl+dxey5K3jkna2tcSuN2pBVjAiM7GNLI5uOpTkJ5iZgo9N09aSIXO7BWsKP\ngCdFJAv4HfCgUx4PZNU7Ltspa6z8JCJyh9PstiY/P7+hQ4wPWb2nENXjhya3VHBgAHdNGcD6fcX8\n8eMdACT37Mqg2HAuHBLLPVPdNaRzBvQA4KdvbsJVq3yRWUBljavR856p9fuKSI7tSkQ7mtbGmKZo\ncoJR1ZtVdRBwFe4v82eBFn9Di8gSEdnSwGsWcDdwr6r2Ae4F/t7S65xIVeeqaqqqpsbE2GqBvm7N\nnkJCggIYkdA6v93PHtuHEfGR/POLPQAMjAknIED4+3fGMmO4ewDBkLhworuE0KNrCI98YxjlVS7W\n7ClqleufSFVZn1XM6D7WPGY6niY/aCkiN+CeSXkEUIB72PJnLb2wqk47xbVeAn7obP4HeMF5n4O7\n76dOglOWA0w9oXxpS2MzvmPVniJSEqLoFBTYKucLCgzgN1eP4BvPfEFwoBDfwLQsAQHC325OJbJz\nMHERoTz+XjrLduRz7sAerRJDfXsOlVNcXk1KonXwm46nOU1kfwRSgL8BP1DV36rqV54Ji/3AFOf9\nBbifvQH3cs03OaPJJgCHVfUAsAiYLiLdnGa76U6ZacfKq2pIyzlMais/3T6sdyQPzhzC7LGJJ83E\nXGdMYjcGxHSlS6cgxvbrxtLtea0aw9q9RXycfpD1+9w1o9GWYEwH1JypYnqIyDBgMvC4iCQD21X1\nRg/E9V3gTyISBFTgHjEG8D5wCZAJlAO3OLEVishjwGrnuEdVtdADcZk2tCGrmJpa9cjT7c2ZYXnq\noFgefz+d/cVH6R3VOhNR/s9bm9l+sJSI0CC6hASSbA9Ymg6oOVPFRACJQF8gCfforlpPBKWqn6vq\n2ao6SlXHq+pap1xV9XuqOkAUaJuVAAAbpElEQVRVR6jqmnqf+YeqDnRe//REXKZtrd5dhIi7NuFN\nUwe7++qWbm+dQSHlVTVk5JUyJjGKmlplQv/oRmtSxrRnzZlN+fN6r2dUNdszIRnjtmZvIUPiIojs\n7N3RVQNjuxIf1Zml2/O4bnziGZ8v/UAJtQp3Tx3IuKTuBHjkcWVjvK85TWQjAUQkTFU9++SZ8Xuq\nyuacw8xs5txjniAiTBkcw4L1OVTV1BISdGYZYXP2YQBGxEcSGWZDk03H1ZwmsokishXY5myPEpHn\nPBaZ8Wt5pZUUl1czuKdv9E1MGRTDkSoXa/aeedfe5pwSenTtRM+ITq0QmTG+q7mjyC4GDgGo6kbc\nHf7GtLptuaUADOkVcZoj28a5A3sQHCjHrR3TUltyDjM8PsIWFjMdXnNnU846ochzjzcbv7Y9173U\n0JA436jBdO0URGrf7iw7w47+o1UuMvJKGRFv08KYjq85CSZLRM4BVESCReR+IN1DcRk/t+1AKT0j\nOhEVFuLtUI6ZOjiGbbmlHDh8tMXn2Op08A+3BGP8QHMSzF3A93DP8ZWD+6HL73kiKGO25ZYyOM43\nmsfqTB0cC3BGtZgtOV938BvT0TUpwYhIIHCjql6vqj1VNVZVb1DVtl2VyfiFGlctmfllPtM8VmdQ\nz670igzl0zN4qn9zzmGiu4TQK/LMZoY2pj1oUoJRVRdwnYdjMQaAPYeOUFVT6zMjyOqICBcMiWX5\njgIqqlvW/bg5+zDD4yOtg9/4heY0kX0uIs+IyCQRGVP38lhkxm+lH6gbQeZbCQbg4mFxHK128XlG\n81e8LKusYUdeKSm2cqXxE815kj/F+fPRemWKezJKY1rN9txSAgOEgbFdvR3KSSb0jyY8NIhFablM\na+YaNZuyi1HFZk42fqM5T/Kff6r9InKzqs4785CMv9uQVUxybNdWm6K/NYUEBXDhkFiWpB+kxlVL\nUGDTGwE2Zrk7+FMSLMEY/9CasyD98PSHGHNqlTUuVu8pZKKzjLEvmj4sjqLyalY3YRGy/NJKsgrd\nMyttyCoiKTqMbl18Z+i1MZ7UmgnGei3NGVu/r5jKmtpjyxb7oimDYggJCmBRWu5pj73/Pxu5+vkv\nqah2sSGrmFHW/2L8SGsmGG3Fcxk/9eXOQwQIjPPAGjCtpUunIKYOiuGdjfs5WtX4aLKKahdf7TpE\nXmklf/o4g4MlldbBb/yK1WCMT1mx8xDD4yO9PkX/6dw+qT+HjlTx+prjZ0/65btbue+1DQCs3lNI\nVU0tkZ2D+cuynQCWYIxfac0E80Urnsv4ofKqGtZnFfl0/0udcf26k9q3G3OX76La5V53r7ZWeWNd\nNm+tz2HfoXI+yyggJDCAJ64egSoEBwpDe/vW7ATGeFKTR5GJyH0NFB8G1qrqBlWd03phGX+0Zk8R\n1S716f6X+u45fwC3vriGhRv2c/XZCaTnllBcXg3Aq6v38VlGAWP6RnHxsDhGJ0YRHBjgkyPjjPGU\n5tRgUnHPRxbvvO4EZgB/E5GfNvfCInKNiKSJSK2IpJ6w70ERyRSR7SJycb3yGU5Zpog8UK+8n4is\ndMpfExEbptMObcgqBuDsvt5dIrmpzh8cy5C4cF74fDcAX+10z5w0KiGS+av2kX6ghEnJMYgIL906\njhduTj3V6YzpcJqTYBKAMar6Y1X9MXA2EIt7TZjvtODaW4CrgOX1C0VkKDAbGIY7gT0nIoHOfGjP\nAjOBocC3nWMBngCeUtWBQBFwWwviMV62PbeUxO5hdO3UnOd/vUdEuH58IukHSkjbf5ivdh6iX48u\n/ODC5GM1mfMGumtj4aHBRIT6dr+SMa2tOQkmFqist10N9FTVoyeUN4mqpqvq9gZ2zQJeVdVKVd0N\nZALjnFemqu5S1SrgVWCWuCd1ugB4w/n8POCK5sZjvC89t8TnJrg8nctH9SYkMIDXVmexarf7+Z0p\ng2LoFRlKVFiwTctv/FpzflX8F7BSRBY425cD80WkC7C1FWOKB1bU2852ygCyTigfD0QDxapa08Dx\nJxGRO4A7ABITE1spZHOmKqpd7Ck4wmUje3s7lGaJCgvhoqE9mb9yHzW1ysT+0QQFBvCbq0dScrSa\nwAAbXGn8V3OminlMRD4AznWK7lLVNc776xv6jIgsAeIa2PVzVV3QQLnHqepcYC5AamqqPbvjIzIO\nllGrvrOCZXN8MzWB9zYfANxzlYH7YUxj/F1zRpE9jbvp6k9N/YyqTmtBTDlAn3rbCU4ZjZQfAqJE\nJMipxdQ/3rQT6T62RHJzTBrYg9jwTkSFBRMT3snb4RjjM5rTRLYW+F8RGQy8jTvZrDnNZ1piIe6m\ntz8AvYFkYBXuBzmTRaQf7gQyG7hOVVVEPgW+ibtf5mbAK7Uj03Lbc0sJDQ6gb3QXb4fSbEGBATx3\n/RhrDjPmBE3u5FfVeap6CTAW2A48ISIZLb2wiFwpItnAROA9EVnkXCcNeB13v86HwPdU1eXUTuYA\ni4B04HXnWICfAfeJSCbuPpm/tzQu4x3bcksY1DO83X5JpyZ1Z3Ri+xhebUxbacl40IHAEKAv7i/6\nFlHVt3HXhBra9zjweAPl7wPvN1C+C/coM9NObc8t5XxnzXtjTMfQ5BqMiPzWqbE8CmwGUlX1co9F\nZvxGfmklBWVVDOll06gY05E0pwazEzgH6A90AkaKCKq6/NQfM+bU5q/cB7SfJ/iNMU3TnARTC3yC\ne5TWBmAC8BW2ZLI5A3sPHeHZpZlcOrKXzTRsTAfTnCf5f4C7g3+vs3zyaKDYI1EZv/HQwjRCAgP4\nxWVDT3+wMaZdaU6CqVDVCgAR6aSq24DBngnL+IOd+WUs3Z7PPecPoGdEqLfDMca0suY0kWWLSBTw\nX2CxiBQBez0TlvEHH6UdBOCKlEZn9jHGtGPNmSrmSuftw86DjZG4n1MxpkU+2prLiPhIekd19nYo\nxhgPaNG86Kq6rLUDMf7lYEkF6/cVc//0Qd4OxRjjIa25ZLIxTbZ4q7t5bPqwhuZCNcZ0BJZgjFd8\nuCWXfj26kBzb1duhGGM8xBKMaXN7Dx3hi50FfGNUb9zrxRljOiJLMKbNzV+5jwARvj3OFnwzpiOz\nBGPaVEW1i9fWZDF9aE/iIu3ZF2M6Mkswpk29t+kAxeXV3Dihr7dDMcZ4mCUY06aW7cgnLiKUiQOi\nvR2KMcbDLMGYNpWRV8aQXuHWuW+MH7AEY9qMq1bZlV/GwBgbmmyMP7AEY9pMdlE5lTW1JPe0BGOM\nP/BaghGRa0QkTURqRSS1XvlFIrJWRDY7f15Qb9/ZTnmmiDwtTjuLiHQXkcUikuH8aStX+aCMg2UA\nDIwN93Ikxpi24M0azBbgKuDEFTELgMtVdQRwM/ByvX3PA98Fkp3XDKf8AeBjVU0GPna2jY/JzK9L\nMFaDMcYfeC3BqGq6qm5voHy9qu53NtOAziLSSUR6ARGqukJVFXgJuMI5bhYwz3k/r1658SEZB8uI\nDe9EZOdgb4dijGkDvt4HczWwTlUrgXggu96+bKcMoKeqHnDe5wI9GzuhiNwhImtEZE1+fr4nYjaN\nyMwvs9qLMX7EowlGRJaIyJYGXrOa8NlhwBPAnc25plO70VPsn6uqqaqaGhMT05xTmzOgquzMK7PJ\nLY3xIy1aD6apVHVaSz4nIgnA28BNqrrTKc4BEuodluCUARwUkV6qesBpSstraczGM3JLKiirrLEa\njDF+xOeayJxlmd8DHlDVL+rKnSawEhGZ4IweuwlY4OxeiHtAAM6fCzA+xUaQGeN/vDlM+UoRyQYm\nAu+JyCJn1xxgIPALEdngvGKdffcALwCZwE7gA6f8N8BFIpIBTHO2jQ/JzHMnGHsGxhj/4dEmslNR\n1bdxN4OdWP5L4JeNfGYNMLyB8kPAha0do2k9q/cUEt0lhOguId4OxRjTRnyuicx0PHsKjrAoLZdr\nUvvYHGTG+BFLMMbj/rp8J0GBAdx6XpK3QzHGtCFLMMajDpZU8ObaHL6VmkBsuC0wZow/sQRjPOqf\nX+zBpcqdkwd4OxRjTBuzBGM8RlVZuCGHyck96NM9zNvhGGPamCUY4zHrs4rZf7iCy0b29nYoxhgv\nsARjPOa9TQcICQxg2tBGp4YzxnRglmCMR9TWKu9vPsDkQT1s9mRj/JQlGOMR67OKOXC4gktH9vJ2\nKMYYL7EEYzxi/b4iACYl24zVxvgrSzDGI/YVlhMeGmRTwxjjxyzBGI/Ye6icvtFhNjWMMX7MEozx\niL2HjtC3exdvh2GM8SJLMKbV1bhqyS46SmK0PVxpjD+zBGNa3YHDFdTUKkmWYIzxa5ZgTKvbc+gI\nAInWRGaMX7MEY1qspKKaoiNVJ5XvPVQOQF+rwRjj1yzBmBa786W1TP7tp3ycfvC48n2F5YQEBRAX\nYdPzG+PPvJZgROQaEUkTkVoRSW1gf6KIlInI/fXKZojIdhHJFJEH6pX3E5GVTvlrImIPX3jY9txS\nvtp1CARum7eGPy3JoLZWAfcKlondwwgIsCHKxvgzb9ZgtgBXAcsb2f8H4IO6DREJBJ4FZgJDgW+L\nyFBn9xPAU6o6ECgCbvNU0MbtXyv3EhIUwEf3Tuaq0fE8tWQHd72yltKKavYVltPXpuc3xu95LcGo\narqqbm9on4hcAewG0uoVjwMyVXWXqlYBrwKzxP0k3wXAG85x84ArPBe5OVJZw1vrcrh0RC96RXbm\n998axUOXD+XjbXlc8ewX7Dl0xIYoG2N8rw9GRLoCPwMeOWFXPJBVbzvbKYsGilW15oRy4yELN+6n\nrLKGGyYkAiAi3HJuP165bTxF5dVUVNeSFG0jyIzxd0GePLmILAHiGtj1c1Vd0MjHHsbd3FXmiWlG\nROQO4A6AxMTEVj9/R1dVU8vc5bsYEhfOmMRux+2bOCCad75/Hn9dtpOZwxv6azfG+BOPJhhVndaC\nj40HvikivwWigFoRqQDWAn3qHZcA5ACHgCgRCXJqMXXljcU0F5gLkJqaqi2Iz29s3V/C+qwiJifH\nHFvy+MUvd7O74Agv3jK2wXnG4qM68+is4W0dqjHGB3k0wbSEqk6qey8iDwNlqvqMiAQBySLSD3cC\nmQ1cp6oqIp8C38TdL3Mz0FjtyDRRZY2Lu15Zy75C9zMtIxMi+c45STz9cSYXDoll6uBYL0dojPF1\n3hymfKWIZAMTgfdEZNGpjndqJ3OARUA68Lqq1g0C+Blwn4hk4u6T+bvnIvcPL325l32F5fz26pH8\n76VnUVxezX2vb6SyxsX/Xjb09Ccwxvg9UfXfVqLU1FRds2aNt8PwOYVHqpjy5Kec3bcbL94yDoBq\nVy3vbNxPWEggM4bbKpXG+DMRWauqJz2/eCKfayIz3rN8Rz7/XrWPjVnFlFe5+PklZx3bFxwYwFVj\nErwYnTGmvbEEYwB4Z+N+fvTaBmK6diKlTxSXjepFcs9wb4dljGnHLMH4OVXllRV7eWhhGqlJ3fnH\nd8bStZP9szDGnDn7JvFjBWWVPPDmJpak5zF1cAzPX382nUMCvR2WMaaDsATjx+59bQMrdxfyf5cN\n5ZZzkmxySmNMq/K5qWJM29iZX8ZnGQX88MJkbjuvnyUXY0yrswTjp/61Yh/BgcK3Uvuc/mBjjGkB\nSzB+6GiVizfWZnHxsDhiwjt5OxxjTAdlCcYPvbNpPyUVNVw/vq+3QzHGdGCWYDqgo1UuDpZUHFth\nso6qMn/lPh5emMbgnuFM6N/dSxEaY/yBjSLrAFSVNXuLeHt9Dmv2FJKZV0atQkhQACPi3ZNUhocG\n8eynmazeU8S5A6N58pujGpwN2RhjWoslmDP0UVou81fto6LaxY+nD2ZskudrBV/uLGD9vmLKq2rY\nlX+E9fuKyS2pICwkkAn9o5kxvBcxXUPIKjrK4q0H+f6/1wMQFxHKL68YznXjEm3UmDHG42yyyzOY\n7FJVOf93SymrrEFECAoQPrp3MuGhwa0SX0W1i8qaWiI7B6Oq5JVW8tsPt/PmumwAAgQSuoUxMiGS\nqYNjmTk8ji4nPIVfW6ss3ZFHaUUNM4f3IiTIWkWNMWfGJrtsAxuzD7PnUDlPXD2CwXERXPXcF/zq\n/W38+qoRZ3ReVeWtdTk8+u5WDh+tJtxJGqWVNQQGCHPOH8jdUwcQFhJ42maugADhgiE9zygeY4xp\nCUswZ+C/63MICQxgxvBeRHYO5ruT+vPX5bu4bGQvzh3Yo9nnU1U2ZBXzxyUZLNuRT2rfbkwf1pP9\nxRXUqpIU3YVzB/ZgcJxNQmmM8X2WYFqoxlXLu5v2c8GQWCI7u5vE7r1oEIvTD/K9+euYf/sEhvaO\nOHZ8ZY2L/cUVBAUIafsP8+6mA5RV1pAU3YWIzsGUHK1m3b4iNmUfpmunIB6+fCg3TbTpW4wx7Zcl\nmBb6YuchCsqquGJ072NlocGBvPidccye+xXXv7CCebeOY1DPcJZuz+PRd7ay/3DFsWN7dA2hZ0Qo\na/YUUVZZQ3hoEH26hfHYrGFcOSbBZjQ2xrR79i3WQgvW5xARGnTS2vSJ0WHM/+4EZs9dwTee+eJY\n+ZC4cH40bRAi0DuqM+P7dScoMABVpVYh0GoqxpgOxhJMCxytcrEoLZfLR/UmNPjk6e2TenThrXvO\n4cMtuVTW1BIb3olZKb0JCjx5BJeIEGi5xRjTAXktwYjINcDDwFnAOFVdU2/fSOCvQARQC4xV1QoR\nORt4EegMvA/8UFVVRLoDrwFJwB7gW6pa5KnYF6cf5EiVi1kp8Y0e0zuqM7ee189TIRhjjM/z5kMR\nW4CrgOX1C0UkCHgFuEtVhwFTgWpn9/PAd4Fk5zXDKX8A+FhVk4GPnW2P+e/6HHpFhjK+n021Yowx\njfFaglHVdFXd3sCu6cAmVd3oHHdIVV0i0guIUNUV6n469CXgCuczs4B5zvt59co9ETeDeoZzsy3Q\nZYwxp+SLfTCDABWRRUAM8Kqq/haIB7LrHZftlAH0VNUDzvtcoNEnC0XkDuAOgMTExGYHJyI8MHNI\nsz9njDH+xqMJRkSWAHEN7Pq5qi5o5GNBwHnAWKAc+FhE1gKHm3JNp0+m0flvVHUuMBfcU8U05ZzG\nGGOaz6MJRlWnteBj2cByVS0AEJH3gTG4+2US6h2XAOQ47w+KSC9VPeA0peWdQdjGGGNagS/OfLgI\nGCEiYU6H/xRgq9MEViIiE8Q9AddNQF0taCFws/P+5nrlxhhjvMRrCUZErhSRbGAi8J7T54IzvPgP\nwGpgA7BOVd9zPnYP8AKQCewEPnDKfwNcJCIZwDRn2xhjjBfZdP1nMF2/Mcb4o6ZO1++LTWTGGGM6\nAEswxhhjPMISjDHGGI/w6z4YEckH9jbzYz2AAg+E09Y6wn3YPfgGuwff0Vb30VdVY053kF8nmJYQ\nkTVN6dzydR3hPuwefIPdg+/wtfuwJjJjjDEeYQnGGGOMR1iCab653g6glXSE+7B78A12D77Dp+7D\n+mCMMcZ4hNVgjDHGeIQlGGOMMR5hCaYZRGSGiGwXkUwR8eiyzJ4gIv8QkTwR2eLtWFpKRPqIyKci\nslVE0kTkh96OqSVEJFREVonIRuc+HvF2TC0hIoEisl5E3vV2LC0lIntEZLOIbBCRdjk5oYhEicgb\nIrJNRNJFZKK3YwLrg2kyEQkEdgAX4V6zZjXwbVXd6tXAmkFEJgNlwEuqOtzb8bSEs95PL1VdJyLh\nwFrgivb09wDgLDnRRVXLRCQY+Bz4oaqu8HJozSIi9wGpuJczv8zb8bSEiOwBUuvWoGqPRGQe8Jmq\nviAiIUCYqhZ7Oy6rwTTdOCBTVXepahXwKjDLyzE1i6ouBwq9HceZUNUDqrrOeV8KpPP10tnthrqV\nOZvBzqtd/bYnIgnApbiX0DBeIiKRwGTg7wCqWuULyQUswTRHPJBVbzubdvjF1pGISBIwGljp3Uha\nxmle2oB7BdbFqtre7uOPwE+BWm8HcoYU+EhE1orIHd4OpgX6AfnAP53myhdEpIu3gwJLMKadEpGu\nwJvAj1S1xNvxtISqulQ1Bffy3+NEpN00W4rIZUCeqq71diyt4DxVHQPMBL7nNCW3J0G4l5V/XlVH\nA0cAn+gjtgTTdDlAn3rbCU6ZaWNOn8WbwL9U9S1vx3OmnOaMT4EZ3o6lGc4FvuH0X7wKXCAir3g3\npJZR1RznzzzgbdzN4e1JNpBdrwb8Bu6E43WWYJpuNZAsIv2cTrTZwEIvx+R3nM7xvwPpqvoHb8fT\nUiISIyJRzvvOuAePbPNuVE2nqg+qaoKqJuH+v/CJqt7g5bCaTUS6OINFcJqVpgPtapSlquYCWSIy\n2Cm6EPCJQS9B3g6gvVDVGhGZAywCAoF/qGqal8NqFhH5NzAV6CEi2cBDqvp370bVbOcCNwKbnf4L\ngP9R1fe9GFNL9ALmOaMTA4DXVbXdDvVtx3oCb7t/byEImK+qH3o3pBb5PvAv55ffXcAtXo4HsGHK\nxhhjPMSayIwxxniEJRhjjDEeYQnGGGOMR1iCMcYY4xGWYIwxxniEJRjjd5yZZ++pt91bRN5oxfP/\nSERuct4/KiLTWuvc9a7R4vOeeP8t+PyrIpLc0s8b/2HDlI3fceYwe9cTM0qLSBCwDhijqjWtff7W\ncKb3LyJTgBtU9butGZfpeKwGY/zRb4ABzvofT4pIUt0aOSLyHRH5r4gsdtYJmSMi9zmTCK4Qke7O\ncQNE5ENngsTPRGSIc+4LgHV1yUVEXhSRbzrv94jIIyKyzll/ZMiJgTXj+qc9r4g8LCL31zv3Fie5\nHHf/zr6fiMhqEdkkzto0zlPu74l7zZotInKtc6rPgGlOMjWmUZZgjD96ANipqimq+pMG9g8HrgLG\nAo8D5c4kgl8BNznHzAW+r6pnA/cDzznl5+Jeo6YxBc7Eis87n2tIU67fkvPWOe7+RWQ6kIx7Dq4U\n4GxnwscZwH5VHeXUdj4EUNVaIBMYdZrrGD9nCcaYk32qqqWqmg8cBt5xyjcDSc5MzucA/3Gmq/kr\n7qlfcP7MP8W56ybnXAskteT6Z3Dexkx3XutxN+8NwZ1wNgMXicgTIjJJVQ/X+0we0LuZ1zF+xqq4\nxpysst772nrbtbj/zwQAxc5U+yc6CoQ24dwuGv//d7rrN/W8NRz/S2RjcQnwa1X960k7RMYAlwC/\nFJGPVfXReuc62sj5jAGsBmP8UykQ3tIPO+vP7BaRa8A9w7OI1DUXpQMDzzzEVrEHZ9p2J1H0c8pP\nvP9FwK1OzQwRiReRWBHpjbt57hXgSY6fAn4Q7WzWYdP2LMEYv6Oqh4AvnI7rJ1t4muuB20RkI5DG\n18tnf4B7+Vpf8CbQXUTSgDnADjj5/lX1I2A+8JWIbMa9nkg4MAJY5TQDPgT8EkBEegJHnWnijWmU\nDVM2ppWJyNvAT1U1w9uxeIKI3AuUtMOlHkwbsxqMMa3vAb7u9O+IioF53g7C+D6rwRhjjPEIq8EY\nY4zxCEswxhhjPMISjDHGGI+wBGOMMcYjLMEYY4zxiP8HqCc4cIa5K8oAAAAASUVORK5CYII=\n", 81 | "text/plain": [ 82 | "" 83 | ] 84 | }, 85 | "metadata": {}, 86 | "output_type": "display_data" 87 | }, 88 | { 89 | "data": { 90 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZoAAAEWCAYAAABfdFHAAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJzt3Xd8VfX9+PHXOxsISSAQVggh7L3C\nVBABFSfurbhHtbVabbV+22pb2zqq1Z/aiqNV0boHdbFkOdh7E8LIggzIYGS/f3+cE7yEBEjIzb1J\n3s/H4z4453PW+x6S+87ncz738xFVxRhjjPGWAF8HYIwxpnGzRGOMMcarLNEYY4zxKks0xhhjvMoS\njTHGGK+yRGOMMcarLNEY40dEREWku6/jqIqIXCcis+r4nPHuew6qy/Ma/2KJxvgVEZkvIvtFJNTX\nsZijqeo7qnq2r+MwDY8lGuM3RCQeGAsocFE9XdMnf0nbX/CmKbFEY/zJjcBi4D/A1IpCERkpIntE\nJNCj7BIRWesuNxORN92a0CYR+bWIpFZ3Ebep5h4R2QZsc8t6i8hsEdknIltE5Eq3vKuI5IpIgLv+\nqohkepzrbRH5pbt8s3v9AhFJFpE7PfYbLyKpIvIbEdkD/Nstf0hEMkQkXURuOU7MV4nI8kpl94vI\nDHf5PBHZ6F47TUQePM65bnHj3C8iM0WkS6V78ws3/mwRedrjvd8kIt+5yyIiz4lIpojki8g6Eenv\nbosUkbdEJEtEdonI/3mcI1BEnnHPnQycXym2SBF53b0naSLyZ8//d9NAqaq97OUXLyAJ+BkwDCgB\n2nls2w6c5bH+IfCwu/w3YAHQCogF1gKpx7mOArOB1kAzoAWQAtwMBAFDgGygr7v/bmCYu7wFSAb6\neGwb4i6fD3QDBDgDOAQMdbeNB0qBJ4FQ97qTgb1AfzeGd93YulcRc3OgAOjhUbYMuNpdzgDGusut\nKq5bxXmmuPe5j/te/w/4odK9mefemzhgK3Cbu+0m4Dt3+RxgBRDlvt8+QAd321vA50BLIN49x63u\ntruAzUBn9xrz3GsGuds/BV5x70cMsBS409c/m/Y6xd9tXwdgL3upKsDpbnJp465vBu732P5n4A13\nuSVwEOjiricD53jse9tJJJoJHutXAYsq7fMK8Ad3+W3gAaC9m2iecj8wuwK5QEA11/kMuM9dHg8U\nA2Ee298A/uax3rO6RONunw783l3u4Sae5u76buBOIOIE9/nrig99dz0AJyFW3EsFJnts/xkw1132\nTDQT3AQyyvP9A4Hu++zrUXYnMN9d/ha4y2Pb2RWJBmgHFAHNPLZfA8zz9c+nvU7tZU1nxl9MBWap\nara7/i4ezWfu+qVuJ4FLgZWqusvd1hGnRlLBc7k6nvt0AUa6TWS5IpILXIeTWMCpLY0HxgELgfk4\nNZYzcBJUOYCInCsii93mt1zgPKCNx3WyVLXQY71y3Ls4vndxPngBrgU+U9VD7vpl7vV2icgCERld\nzTm6AM97vM99ODWSTh77VI6pY+WTqOq3wIvAS0CmiEwTkQic9xtc6b3s8jj/8d5zF/fYDI/4XsGp\n2ZgGzBKN8TkRaQZcCZzhPovZA9wPDBKRQQCquhHnQ+lcnA/Zdz1OkYHTZFah80lc1nPY8hRggapG\nebzCVfVud/sCnE4K493l74DTcBLNAvc9hAIfA8/gNPlFAV/hfIhXdc2KuD1jjTtBzLOBtiIyGCfh\nHLkHqrpMVafgfCh/BnxQzTlScJqiPN9rM1X9wWOfyjGlV3UiVX1BVYcBfXFqYw/hNDmW4CQNz3Ok\nucvHe88pODWaNh6xRahqv2rei2kgLNEYf3AxUIbzgTXYffUBFuF0EKjwLnAfTs3iQ4/yD4BHRKSV\niHQC7q3h9b8AeorIDSIS7L6Gi0gfAFXdBhwGrsdJSPk4z1Yuw000QAjOs5csoFREzsVpFjqeD4Cb\nRKSviDQH/nC8nVW1BOd9P43zfGM2gIiEiPMdl0h3n3ygvJrT/AvnXvVzj40UkSsq7fOQey8749zv\n9yufxL0/I0UkGKcZsxAoV9Uy9309ISIt3Y4GD+A0+1W851+ISKyItAIe9nh/GcAs4O8iEiEiASLS\nTUTOON59Mf7PEo3xB1OBf6vqblXdU/HCaZq5Tn7qCvxfnFrEtx5NbAB/BFKBHcAc4COcv4xPiqoW\n4CSFq3H+et/DTw/tKywAclQ1xWNdgJUe5/gFzgfpfpxa14wTXPdr4B84zy2S3H9P5F1gEvChqpZ6\nlN8A7BSRfJznR9dVc81P3ff2nrvvepxaoqfPcR70rwa+BF6v4lQRwKs473UXkIOTAAF+jpN8knFq\nf+/iPI/CPWYmsAbn3n1S6bw34iTtje65PwI6VPVeTMMhqjbxmWlcRORunN5Y9pdwDYmI4vRsS/J1\nLKbxsBqNafBEpIOInOY2tfQCfoXTTdYY4wfs28mmMQjB6Z1U0d34PeBln0ZkjDnCms6MMcZ4lTWd\nGWOM8aom3XTWpk0bjY+P93UYxhjToKxYsSJbVdue7P5NOtHEx8ezfPnyE+9ojDHmCBE50SgWR7Gm\nM2OMMV5licYYY4xXWaIxxhjjVU36GU1VSkpKSE1NpbCw8MQ7N0JhYWHExsYSHBzs61CMMY2EJZpK\nUlNTadmyJfHx8YjIiQ9oRFSVnJwcUlNT6dq1q6/DMcY0EtZ0VklhYSHR0dFNLskAiAjR0dFNtjZn\njPEOSzRVaIpJpkJTfu/GGO+wRGOMMU1ARt5h3l68i8PFZfV+bUs0figwMJDBgwfTr18/Bg0axN//\n/nfKy515rObPn09kZCRDhgyhV69ejBs3ji+++OLIsY899hidOnVi8ODB9O/fnxkzfpoSZfr06Qwc\nOPDIeW+77TZyc3Pr/f0ZY+pXWu5hrnzlR3732XrOfX4hS3fsq9frW2cAP9SsWTNWr14NQGZmJtde\ney35+fk8/vjjAIwdO/ZIclm9ejUXX3wxzZo1Y+LEiQDcf//9PPjgg2zatImxY8eSmZnJrFmzeO65\n5/j666/p1KkTZWVlvPnmm+zdu5eoqCjfvFFjvKiwpIy1qXk0Cw5kQGykr8PxiZwDRSxO3sffvtlE\n7qES/nxxf15ZuJ2rp/3IvAfH0yW6Rb3EYYnGz8XExDBt2jSGDx/OY489dsz2wYMH8/vf/54XX3zx\nSKKp0KdPH4KCgsjOzuaJJ57gmWeeoVOnToBTa7rlllvq4y0YU29S9h1i+pJdLN+5n3WpeRSXOS0B\nd56RwINn9yI4sHE34uQXlrA0eR8/bM/hh+3ZbN5TAEB0ixCm3zqSQZ2juGRIJ+Zvyaq3JAOWaI7r\n8f9tYGN6fp2es2/HCP5wYb8aHZOQkEBZWRmZmZlVbh86dChPP/30MeVLliwhICCAtm3bsmHDBoYO\nHVqrmI1pCLIPFHH1tMVkFhQyMDaKm0+LZ1iXVizYmsUrC5JZtDWbu8Z349z+7Y9KOEWlZfyQlENI\nUABjujk9TrMKipi5YQ+zNu6lVfNgfja+O73at6zTePfmF/Lt5kzmbspk+a59nNatDT+f2J3e7SNq\ndJ6MvMPM2rCXmRv2sGTHPsrKldCgABLjW/HQOb0Y3S2agZ0iCXLfc4vQIM4fWL+zY1uiaQQqzyn0\n3HPPMX36dFq2bMn7779/TE+ydevWccMNN1BQUMBf/vIXrrrqqvoM15hTVl6uFJeVExYcCDjNZHe8\ntZycg0V8fPcYBsb+1Bx8dr/2jOnWhmdmbeEX/11Fh8gwbhoTT5fo5nyzfg9zNmVyoKgUgB4x4bRu\nEcLSnftQha5tWrByVxGfr07njnEJPHJu7+P2zCwuLScoQAgIqHqfzPxCZqxJZ8aadNam5gHQKaoZ\nY3u0Zd7mTL5cl8HYHm24cGBHVuzaz6JtWYxKiOa6UV0YGhd15No7sw/yzYY9fLN+D6tTnOes3WPC\nuXNcAmN7tGVIXNSRe+MP/DLRiMjTwIVAMbAduFlVc91tjwC3AmXAL1R1pls+GXgeCAReU9W/nWoc\nNa15eEtycjKBgYHExMSwadOmY7avWrWKPn36HFmveEbjqV+/fqxcuZIzzzyTAQMGsHr1au69914O\nHz7s9fiNqSvZB4p4b+lu3l+eQkZuIZP6tKN7TDifrkojLfcwL1839KgkU+H8gR04t3975m3J5LVF\nO/jr15sBiGoezPkDOjB5QHtyDxXzn+93knuohJ9P6MH5AzrQs104eYdLePKbzUxbmEx0ixCmjoln\nzqa9dIhsxrAurQDYureAf3+/g09XpdEmPJRrR8ZxZWJn2oSHUlxazlfrMvh4ZSrfJ2VTrjCgUyQP\nndOLiX1i6NWuJSJC7qFi3vpxF+8t3c2vP15Li5BARiVEM2vjXj5ZlUafDhGc2789C7ZmsWLXfgAG\nxjrnOadfe7rHhNfff0QN+WWiAWYDj6hqqYg8CTwC/EZE+gJXA/2AjsAcEenpHvMScBaQCiwTkRmq\nutEHsdeprKws7rrrLu69994q/5Jau3Ytf/rTn3jttdeOe55HHnmEBx98kM8//5zY2FgASzKmwdiV\nc5BpC5P5aEUqRaXljE6IZmLvdvxvTTrfbNjDad2j+eOUfkzs067acwQECBP7tGNin3Zsysgn91AJ\nifGtjmpGu2RI7DHHRTUP4YmLB1BQWMpfv97MKwuT2XewGICz+rajsKSMRduyCQ0K4KJBHUndf5in\nvtnCc7O3MqF3DGtT88jIK6Rz62bce2Z3LhrcqcqkENU8hF9M7ME9Z3ZnQ3oe3WPCaR4SxMGiUj5f\nnc70xbt4dvZWEtq24JFze3P+wA7EtmpeB3fX+/wy0ajqLI/VxcDl7vIU4D1VLQJ2iEgSMMLdlqSq\nyQAi8p67b4NMNIcPH2bw4MGUlJQQFBTEDTfcwAMPPHBk+6JFixgyZAiHDh0iJiaGF1544ZiOAJWd\nd955ZGVlce6551JWVkZUVBT9+/fnnHPO8fbbMeYY5eXK+vQ8BnSKPG5TVGZ+IY9/sZGv12UQFBDA\npUM7cfu4BLq1dT6of3teHwoKS4gOD63R9ft0qNlzkIAA4ZkrBlFcWk5ZuXLjmHjWpebyz/nbCQ8L\n4qFzenHNiDhatwgBICnzAO8s2cWM1el0jwnnr5cO4IyebU/qC9GBAXJUraxFaBDXjozjmhGdySoo\nom3L0Ab3xWqp3L7vb0Tkf8D7qjpdRF4EFqvqdHfb68DX7q6TVfU2t/wGYKSq3lvF+e4A7gCIi4sb\ntmvX0fP3bNq06ahmqKbI7oHxtr/P2sL/+zaJ28d25bfn9SH3UAmrU3MZ0y2a0CDn2UJ5uXLNq4tZ\nk5rL1DHx3HpaV2Iiwnwc+dGKS8sJEI48aG8qRGSFqiae7P4+q9GIyBygfRWbHlXVz919HgVKgXfq\n6rqqOg2YBpCYmOjfWdaYRmjl7v28NC+JjpFhvLpoB+l5hXyflE3uoRK6tmnBb8/rw4TeMfz7+x0s\n2bGPpy4byJXDO/s67CqFBDWtBFNbPks0qjrpeNtF5CbgAmCi/lTtSgM8f+Ji3TKOU26M8bHVKbnc\nPX0FPdq1ZEf2AdpHhPH1L8fx5y828uGKVEYnRHPZsFhenpfE7W8tJ7pFCAVFpUzsHcMVicc+NzEN\ni18+o3F7kP0aOENVD3lsmgG8KyLP4nQG6AEsBQToISJdcRLM1cC1tb2+qja4NtC64u9NqabhKSkr\n5zcfraW4tJz03MPsySvkzZtHENksmCcvG8jt4xLoEROOiHDRoI7M2riHmRv2snvfIf562YAm+7vY\nmPhlogFeBEKB2e4P2WJVvUtVN4jIBzgP+UuBe1S1DEBE7gVm4nRvfkNVN9TmwmFhYeTk5DTJqQIq\n5qMJC/OvdnDTsE1bmMyWvQW8dmMik/q2o7Ss/MgzjYAAoWe7n74IGRIUwAUDO3LBwI6+Ctd4gV8m\nGlXtfpxtTwBPVFH+FfDVqV47NjaW1NRUsrKyTvVUDVLFDJvG1IVlO/fx/NxtnDegPZP6Ol2Pm9qD\nc+OnicaXgoODbXZJY+rA56vTeOjDtcS2asZjfvLlZ+MblmiMMXXuje928McvNjIqoTX/un4YUc1D\nfB2S8SFLNMaYOlNUWsZri3bw9MwtTO7XnheuGWJdgI0lGmNM7ZSXKztzDrIuLY81KXmsSc09MjT/\nhYM68tyVg+x5jAEs0RhjTpJnb7HNe/KZ+sZS9uYXARAWHEC/jpHcdFo8I+Jbc2bvGAKrGcHYND2W\naIwxJ7Q6JZcbXlvC6G7R3DW+G3e9vYIAEZ68bAADY6PoERNutRdTLUs0xpjj2pl9kFv/s4xmIYEs\n2JrFrI17iQgL4sO7xtT5ZGCmcbJEY4ypUkbeYT5blc5bP+6kXJUP7xiNAi/NS+K6kV0syZiTZonG\nGHOM7ANFnP3cQgoKSxkSF8VjF/YjwR2a/9krB/s4OtPQWKIxxhzj7R93UVBYyuf3nMagzsfOWGlM\nTdjTO2PMUQpLynh78S4m9o6xJGPqhCUaY8xRPlmZxr6Dxdw2NsHXoZhGwhKNMeaIkrJyXvsumf6d\nIhiV0NrX4ZhGwhKNMeaIVxZsJznrIL+Y0KPJTZNhvMcSjTEGgK17C3hhbhLnD+zA2f2qmmXdmNqx\nRGOMoaxceeijtYSHBfHHi2xIf1O3LNEYY/hweQprUnL5/QV9iQ4P9XU4ppGxRGNME5d3uISnZm5h\neHwrpgy2KZRN3fPrRCMivxIRFZE27rqIyAsikiQia0VkqMe+U0Vkm/ua6ruojWk4ysuVp77ZTO6h\nYh67qJ91ADBe4bcjA4hIZ+BsYLdH8blAD/c1EvgnMFJEWgN/ABIBBVaIyAxV3V+/URvjv2Zv3Mtn\nq9JYnZJL6xYhDI9vzaJtWWzLPMCNo7vQr2Okr0M0jZQ/12ieA36NkzgqTAHeUsdiIEpEOgDnALNV\ndZ+bXGYDk+s9YmP81K6cg9w9fQXLd+1jcFwUzUICeXvxTkTg+asH84cLrQOA8R6/rNGIyBQgTVXX\nVKrKdwJSPNZT3bLqyqs69x3AHQBxcXF1GLUx/uvZ2VsJChT+d+/pxESEAc6XM4MCxJrLjNf5LNGI\nyBygqs76jwK/xWk2q3OqOg2YBpCYmKgn2N2YBm9jej4z1qRz1xndjiQZgGCbqMzUE58lGlWdVFW5\niAwAugIVtZlYYKWIjADSgM4eu8e6ZWnA+Erl8+s8aGMaoL/P2kLL0CDuGtfN16GYJsrv/qRR1XWq\nGqOq8aoaj9MMNlRV9wAzgBvd3mejgDxVzQBmAmeLSCsRaYVTG5rpq/dgjL9Yn5bH3M2Z3D42gcjm\nwb4OxzRRfvmM5ji+As4DkoBDwM0AqrpPRP4ELHP3+6Oq7vNNiMb4j38u2E54aBA3jon3dSimCfP7\nROPWaiqWFbinmv3eAN6op7CM8Xs7sg/y9boM7hjXjchmVpsxvuN3TWfGmLrxr/nbCQoM4JbT430d\nimniLNEY0wgt27mPD1akcN3IOGJahp34AGO8yBKNMY3MwaJSfvXBGmJbNePBs3v5Ohxj/P8ZjTGm\nZp6euYWU/Yf47+2jaBFqv+LG96xGY0wjsv9gMe8u3c1ViZ0ZlRDt63CMASzRGNOofLwyleLScm46\nLd7XoRhzhCUaYxoJVeXdpbsZGhdF7/YRvg7HmCMs0RjTSCzZsY/krINcO7KLr0Mx5iiWaIxpJKYv\n3kVEWBAXDOzg61CMOYolGmMagQ3peXy5LoNrR3YhLDjQ1+EYcxRLNMY0cKrKX77aRGSzYO4ebyM0\nG/9jicaYBm7+liy+T8rhvok9bEwz45cs0RjTgOUcKOJ3n68nPro511knAOOn7GvDxjRQRaVl3Pn2\nCrIKinj/ztGEBNnfjcY/WaIxpoH60xcbWb5rPy9eO4TBnaN8HY4x1bI/gYxpgHZmH+S/S1OYOroL\nFwzs6OtwjDkuSzTGNEAvzksiKEC4Z0J3X4dizAlZojGmgdmVc5BPV6Vxrc01YxoIv000IvJzEdks\nIhtE5CmP8kdEJElEtojIOR7lk92yJBF52DdRG+N9ryxMJjBAuOsM+86MaRj8sjOAiJwJTAEGqWqR\niMS45X2Bq4F+QEdgjoj0dA97CTgLSAWWicgMVd1Y/9Eb4z2lZeV8vS6D8wd0oF2E1WZMw+CXiQa4\nG/ibqhYBqGqmWz4FeM8t3yEiScAId1uSqiYDiMh77r6WaEyjsnJ3LvsPlXBW33a+DsWYk+avTWc9\ngbEiskREFojIcLe8E5DisV+qW1ZduTGNyuyNewgJDGBcz7a+DsWYk+azGo2IzAHaV7HpUZy4WgOj\ngOHAByKSUEfXvQO4AyAuLq4uTmlMvVBVZm/cy6hu0YTbFM2mAfHZT6uqTqpum4jcDXyiqgosFZFy\noA2QBnT22DXWLeM45ZWvOw2YBpCYmKi1fgPG1LPtWQfZmXOIW0/v6utQjKkRf206+ww4E8B92B8C\nZAMzgKtFJFREugI9gKXAMqCHiHQVkRCcDgMzfBK5MV4yZ9NeACb2seczpmHx1/r3G8AbIrIeKAam\nurWbDSLyAc5D/lLgHlUtAxCRe4GZQCDwhqpu8E3oxnjHt5sy6dshgo5RzXwdijE14peJRlWLgeur\n2fYE8EQV5V8BX3k5NGN8oqCwhJW793P7uDp5VGlMvfLXpjNjjIcftudQWq6M62G9zUzDY4nGmAZg\n4dYsWoQEMqxLK1+HYkyNWaIxxs+pKgu3ZTG6W7TNOWMaJPupNcbP7cw5RMq+w/YlTdNgWaIxxs8t\n3JoFYM9nTIN1wl5nIvLA8bar6rN1F44xprKv12fQJbo58W1a+DoUY2rlZLo3t3T/7YUzHEzFFyEv\nxPmypDHGS9an5bE4eR8Pn9vb16EYU2snTDSq+jiAiCwEhqpqgbv+GPClV6Mzpol7/bsdtAgJ5JoR\nNi6fabhq8oymHc639CsUu2XGGC/IyDvM/9akc+XwzkQ2C/Z1OMbUWk1GBngLZ4DLT931i4H/1HlE\nxhgApi/eRbkqt5xmg2iahu2kE42qPiEiXwNj3aKbVXWVd8IyxszfksXIrtF0bt3c16EYc0pOKtGI\nSCCwQVV7Ayu9G5IxJ7YpI5+v1+9hQ1oeFwzqwCVDYo/ZR1X5fHU6cdHNGdI5ChHxQaS1U1BYwqaM\nfH4+oYevQzHmlJ1UolHVMhHZIiJxqrrb20EZczz5hSVc+vIPFJWW0SY8lLmbM0nPLeRn47sdlUz+\nuzSF3366DoCENi144Zoh9O8U6auwa2TFrv2UK4zo2trXoRhzymrSGaAVzjD9c0VkRsXLW4EZU51v\n1u3hcEkZH941hu9+M4GLBnXk6ZlbuOjF7/nHnK0kZRaQnHWAP32xkdO7t+GpywdyoKiUX3+0lrLy\n4891tzP7YD29i+NbtnMfgQHCkLgoX4dizCmrSWeA33ktCmNq4LPVacRHN2donNMc9o+rBjO4cxRf\nrE3n+bnb+MecbYSHBhEaHMDfrxxEu4gwmgUH8vP/ruK/S3dz/aguVZ53XWoeF774HW/eMoIzfDzc\ny7Id++nfMYLmIX45k4cxNVKTzgALvBmIMSdjT14hPybn8IsJPY40kwUECLec3pVbTu9KZkEhM1an\nM3PDHu4e3412EWEAXDCwA+8s2cUzs7Zw/oAOtGoRcsy5V6XsB2D+lkzO6NmWotIyPlmZxuXDYgkO\nrL/RmopKy1idmsuN1SREYxqak/7tEZFRIrJMRA6ISLGIlIlIvjeDM6ay/61JRxWmDO5Y5faYlmHc\nNjaBD+8aw4TeP33NS0R47KJ+HCgs5Z53V1JcWn7MsRvTnR/nH5JyAPh0ZRqPfLKO2Rv3euGdVG9t\nah7FpeUMt+czppGoyZ9pLwLXANuAZsBtwEveCMqYqqgqn6xKY2BsJAltw2t8fO/2ETx52UB+2J7D\nwx+vxZkd/CcbM5xEs2VvAVkFRXy5LgOA75KyTz34Gli6Yx8Aw+Mt0ZjGoUbtAaqaBASqapmq/huY\n7J2wjDnWl+sy2JSRz/Uja9+kdNmwWB44qyefrErjvWUpR8pLy8rZsqfgSC+vr9Zl8MN2p2bzfT0n\nmpW79pPQtgWtq2jeM6YhqkmiOSQiIcBqEXlKRO6v4fEnTUQGi8hiEVktIstFZIRbLiLygogkicha\nERnqccxUEdnmvqZ6Iy7jO4UlZfz1q8306RDBZcOO/c5MTfx8QneGxkXxjzlbKSwpA2BH9kGKSsu5\nYlgsLcOCeG7OVsrKlasSO7Mr5xAp+w7Vxds4IVVldUouQzrbTJqm8ahJorjB3f9e4CDQGbjMG0EB\nTwGPq+pg4PfuOsC5QA/3dQfwTwARaQ38ARgJjAD+ICL2m9qIvLowmbTcw/z+gr4EBpzaFy9FhF9P\n7s3e/CLe+nEn8FOz2YDYSEZ2jSb3UAkJbVpw21hn+Jfvk7I5XFzGN+szjmlyq0up+w+Tc7CYwdat\n2TQiNUk03QFR1XxVfVxVH3Cb0rxBgQh3ORJId5enAG+pYzEQJSIdgHOA2aq6T1X3A7OxZr1Go7Ss\nnFcXJXNW33aM7hZdJ+cclRDNGT3b8vL87eQXlrAxPZ+QwAC6tQ1njHuN8wd2oHtMOO0iQlm0LZv7\n3lvFXdNX8mNyTp3EUJXVKbkADI61RGMaj5okmhuBNW6T1tMicqEXaw2/BJ4WkRTgGeARt7wTkOKx\nX6pbVl35MUTkDrc5bnlWVladB27q3rq0PPILS7loUNU9zWrroXN6kXe4hD/9byMbM/Lp2T6c4MAA\nJvdvz9C4KK4Y1hkR4bTubfhyXQaz3N5n3nxmszoll9CgAHp3aHninY1pIE460ajqVFXtCVyK86H+\nElDrT2oRmSMi66t4TQHuBu5X1c7A/cDrtb1OZao6TVUTVTWxbVubGrchqPhgH1NHtZkK/TtFcs/4\n7ny4IpXFyTn0ae9UojtGNeOTn51GXLQzmOXYHm0AuG5kHEPjovg+yXs1mjUpufTvFFmv39sxxttO\n+gubInI9zsjNA4BsnO7Oi2p7YVWddJxrvQXc565+CLzmLqfhPBuqEOuWpQHjK5XPr21sxr98n5RD\nnw4RRIeH1vm575vUg++SslmdkkvfjhFV7nP+gI4EBwZwTr/2vDB3Gy/NSyK/sISIsLqdI6akrJx1\naXnVjlxgTENVkz+b/gEMBl57wFFfAAAdtklEQVQFfqGqT6nqj94Ji3TgDHd5As53d8CZRvpGt/fZ\nKCBPVTOAmcDZItLKbc472y0zDVxhSRkrdu/ntDquzVQIDgzg+asHM7Jra8b3iqlyn5CgAC4Y6CSb\nMd3aUK6wJHlfncWwM/sgn6xMZUN6PkWl5QzqbM9nTONSkyFo2ohIP2Ac8ISI9AC2qOoNXojrduB5\nEQkCCnF6mAF8BZwHJAGHgJvd2PaJyJ+AZe5+f1TVuvskMD6zfOd+ikvLOc1tvvKGLtEteP/O0Se1\n79AuUYQFB/B9UjZn9a2bCWb/MWcrn61Op2WY8+s4xBKNaWRq0nQWAcQBXYB4nN5gx47jUQdU9Ttg\nWBXlCtxTzTFvAG94Ix7jO99vzyYoQBjhJ9+SDw0KZHh8a37cXnfPadal5dE9JpwCtzkutlWzOju3\nMf6gJkPDfufxelFVU70TkjE/+SEpmyFxUbQI9Z9RjMd0a8OT32wms6CQmJZhp3Sug0WlJGcf5L6J\nPbhzXDcOl5Q1qAnajDkZNel1NlBVfwZ8ZknG1Iei0jI2pOeT6Ce1mQoVvdAWbT31bs4bM/JRhQGd\nImkWEmjDzphGqSajN48WkY3AZnd9kIi87LXITJO3dc8BSsuVAX42K2bfDhG0CQ9l/tZT/x7W+rQ8\ngAYz86cxtVHTXmfnADkAqroGp2OAMV6xPt39EO7oXx/CAQHCGT3bsmhb1gln7DyRdWl5tG0ZemTe\nHGMao5qO3pxSqaisDmMx5ijr0/JoGRZE59b+93B8fK+25B4qYU1q7imdZ0NaPv2r+f6OMY1FTRJN\nioiMAVREgkXkQWCTl+Iyhg3p+fTrGOGXD8fH9mhDgMD8LbVvPjtcXMa2zAK/axo0pq7VJNHchdO1\nuBPON/EHU01XY2NOVWlZOZsy8v2u2axCVPMQBneOYsGWzFqfY9OefMoV+lmiMY3cSSUaEQkEblDV\n61S1narGqOr1quq9QZ9Mk7Y9y5kfxp8fko/vFcPatDxyDhTV6viKjgBWozGN3UklGlUtA671cizG\nHFHxIdzPj59fTOgdgyrMq2Xz2drUPKJbhNAh0joCmMatJk1n34nIiyIyVkSGVry8Fplp0jak5xMW\nHEBC23Bfh1Ktfh0jaB8Rxhx3+oCaWp2Sy+DOUX75DMqYulSTr1sPdv/9o0eZ4gx6aUydWp+eR98O\nEac8m6Y3iQiT+sbwyco0CkvKCAsOPOlj8wtL2J51oM7n2DHGH9VkZIAzq3gdSTIiMtU7IZqmprCk\njDUpuQ1iFOOJfdpxqLisxrNurk3JQxUGN4D3aMypqsvZle478S7GnNji5ByKSss5o6f/T0w3OiGa\n5iGBJ9V8drColC17CgBYnbIfoEEkU2NOVV0mGv9t4zANyoKtWYQGBTAqwTtz0NSlsOBAxvVoy9xN\nmTiDi1fv/32bxPkvLGJ3ziFWp+SS0LYFkc3qdvI0Y/xRXSaaUxuLwxjXgi1ZjEqIrtEzD186p387\n9uQX8l3S8QfZXLA1i9Jy5aV5SUc6AhjTFFiNxviV3TmHSM4+yPhe/t9sVuG8AR1oFxHKy/O2H1U+\nc8MeJv9jIQeLSsk+UMSmjHwimwXz4YoUsg8U2wRnpsmoy0TzfR2eyzRRC7Y637RvCM9nKoQGBXLb\n6Qn8mJzDqt37j5T/b006m/cU8OW6DH5wJ0r726UDjvSkG9y5lU/iNaa+1WSagAeqeN0qIoMBVPVe\n74Vpmor5W7KIa92crm1a+DqUGrlmZByRzYJ5eb5Tq1FVluxwZhN/f1kK32/LpmVYEGf3a8/Vw+OI\nCAuiV/uWvgzZmHpTkxpNIs54Z53c153AZOBVEfl1TS8sIleIyAYRKReRxErbHhGRJBHZIiLneJRP\ndsuSRORhj/KuIrLELX9fRGz2qAZIVVm2cx+ndY9ucF9iDA8NYuqYeGZv3MuunIMkZx8kq6CInu3C\nWbFrP1+tz2BMt2gCA4TfX9iXOQ+cQUhQXTYoGOO/avKTHgsMVdVfqeqvgGFADM6cNDfV4trrgUuB\nhZ6FItIXuBroh5PIXhaRQHe8tZeAc4G+wDXuvgBPAs+pandgP3BrLeIxPpa6/zD5haV+Pb7Z8Vw7\nIo4AgY9WpLIk2anN/OWSAQQFCAWFpZzew2kODA4MIMbmnzFNSE0STQzgOXpgCdBOVQ9XKj8pqrpJ\nVbdUsWkK8J6qFqnqDiAJGOG+klQ1WVWLgfeAKeL86TsB+Mg9/k3g4prGY3xvg59OdHay2keGcXqP\ntny8IpUftmcT0zKUYV1aMbFPDACnd2/j4wiN8Y2aDEHzDrBERD531y8E3hWRFsDGOoypE7DYYz3V\nLQNIqVQ+EogGclW1tIr9jyEidwB3AMTFxdVRyKYurE/LJzBAGvSziyuGxfLz/65i7/o9nD+gAyLC\nQ+f0ZkhcK+Kjm/s6PGN84qQTjar+SUS+Bk5zi+5S1eXu8nVVHSMic4D2VWx6VFU/r6Lc61R1GjAN\nIDEx0b7740fWp+fRIya8wXx/pipn9W1HRFgQ+YWljExoDUD3mHC6x/jv4KDGeNtJJxoReQGnSev5\nkz1GVSfVIqY0oLPHeqxbRjXlOUCUiAS5tRrP/U0DoaqsT8vjjJ4xvg7llIQFB3LR4I5MX7ybkV39\nf2QDY+pDTZ7RrAD+T0S2i8gzlXuK1aEZwNUiEioiXYEewFJgGdDD7WEWgtNhYIY6437MAy53j58K\n+KS2ZGovs6CI7APF9O/kv/PPnKz7J/Xk2SsHWS3GGFdNRm9+U1XPA4YDW4AnRWRbbS8sIpeISCow\nGvhSRGa619kAfIDz3Ocb4B5VLXNrK/cCM4FNwAfuvgC/AR4QkSScZzav1zYu4xsVE5011B5nnqLD\nQ7l0aKyvwzDGb9SkM0CF7kBvoAvOB36tqOqnwKfVbHsCeKKK8q+Ar6ooT8bplWYaqPVp+YhAnw4N\nv0ZjjDlaTUYGeMqtwfwRWAckquqFXovMNCnr0/Po2qYF4aG1+dvHGOPPavJbvR0YAyQAocBAEUFV\nFx7/MGOOb9veAhZsyeKyYdbcZExjVJNEUw58i9OrazUwCvgRm8rZnILSsnIe/HAN4WFB/Orsnr4O\nxxjjBTXpdfYLnI4Au1T1TGAIkOuVqEyT8dp3O1iTmscfp/SjTXior8MxxnhBTRJNoaoWAohIqKpu\nBnp5JyzTFJSUlfPKgu1M6B3D+QM6+DocY4yX1KTpLFVEooDPgNkish/Y5Z2wTFPwXVI2+w+VcO2I\nuAY3WrMx5uTVZAiaS9zFx0RkHhCJ8z0XY2plxup0IpsFM64BTXJmjKm5WvUlVdUFdR2IaVoOF5cx\nc8MepgzuaPOyGNPI2W+48Ym5m/dyqLiMiwZVO9C2MaaRsERjfOKzVWm0iwhlRNfWvg7FGONllmhM\nvUvOOsDczZlcMawzgQHWCcCYxs4Sjal3ry7aQXBgAFPHxPs6FGNMPbBEY+pVVkERH69M5bKhsbRt\naV/QNKYpsERj6tVbP+6kpKyc28d29XUoxph6YonG1Ku5mzIZnRBNQlubFMyYpsISjak35eVKcvYB\nm3PGmCbGEo2pN2m5hyksKbcpjo1pYizRmHqTlHkAwBKNMU2MzxKNiFwhIhtEpFxEEj3KzxKRFSKy\nzv13gse2YW55koi8IO5IjCLSWkRmi8g2999WvnhP5viOJBp7PmNMk+LLGs164FKg8gyd2cCFqjoA\nmAq87bHtn8DtQA/3NdktfxiYq6o9gLnuuvEzSZkHiG4RQqsWIb4OxRhTj3yWaFR1k6puqaJ8laqm\nu6sbgGYiEioiHYAIVV2sqgq8BVzs7jcFeNNdftOj3PiRpKwDdLNmM2OaHH9/RnMZsFJVi4BOQKrH\ntlS3DKCdqma4y3uAdtWdUETuEJHlIrI8KyvLGzGbKqgqSZkH7PmMMU1QraYJOFkiMgdoX8WmR1X1\n8xMc2w94Eji7JtdUVRURPc72acA0gMTExGr3M3Ur+0AxeYdL7PmMMU2QVxONqk6qzXEiEgt8Ctyo\nqtvd4jQg1mO3WLcMYK+IdFDVDLeJLbO2MRvvsB5nxjRdftd05k4X/SXwsKp+X1HuNo3li8got7fZ\njUBFrWgGTscB3H+PW1sy9S8pyxKNMU2VL7s3XyIiqcBo4EsRmeluuhfoDvxeRFa7rxh328+A14Ak\nYDvwtVv+N+AsEdkGTHLXjR/ZnnmAFiGBdIgM83Uoxph65tWms+NR1U9xmscql/8Z+HM1xywH+ldR\nngNMrOsYTd1QVZbv2kePdi1xv/pkjGlC/K7pzDQ+PybnsD4tnysSY0+8szGm0bFEY7zuXwuSaRMe\nymVDLdEY0xRZojFetSE9j4Vbs7j5tHjCggN9HY4xxgcs0Rivem3RDlqEBHL9yC6+DsUY4yOWaIzX\n5BeW8NW6DC4Z2onI5sG+DscY4yOWaIzXfLk2g6LSci4f1tnXoRhjfMgSjfGaj1ak0j0mnEGxkb4O\nxRjjQ5ZojFfsyD7Iil37uXxYrH13xpgmzhKN8YpPVqYSIHDJkE4n3tkY06hZojFesXTHPgZ3jqJd\nhA05Y0xTZ4nGeMX2rIM2gKYxBrBEY7wgv7CE7ANFJNjcM8YYLNEYL0jOOghAQpsWPo7EGOMPLNGY\nOrfdneSsmzWdGWOwRGO8IDn7AEEBQlzr5r4OxRjjByzRmDqXnHWQuNbNCQ60Hy9jjCUaU0uFJWVk\n5hdWuW171gHrCGCMOcISjamV3322ntOfnMcHy1KOKi8rV3bmHKJbW+sIYIxx+CzRiMgVIrJBRMpF\nJLGK7XEickBEHvQomywiW0QkSUQe9ijvKiJL3PL3RSSkvt5HU7T/YDGfr0knNCiAX3+8lt99tp7C\nkjIA0vYfpri0nARLNMYYly9rNOuBS4GF1Wx/Fvi6YkVEAoGXgHOBvsA1ItLX3fwk8Jyqdgf2A7d6\nK2gDH69Mpbi0nPfuHMXtY7vy9uJdXPLyD2zbW8D2bKfHmTWdGWMqBPnqwqq6CahywEURuRjYARz0\nKB4BJKlqsrvPe8AUEdkETACudfd7E3gM+Ke3Ym/KVJV3l+xmWJdW9OsYSb+OkYzuFs1DH67lwhe/\nY2hcKwC6WaIxxrj87hmNiIQDvwEer7SpE+D5QCDVLYsGclW1tFK58YIfk3NIzj7IdSPjjpRN6N2O\nr+8by/D41vywPYeo5sG0bmGtl8YYh1drNCIyB2hfxaZHVfXzag57DKcZ7IA3hpcXkTuAOwDi4uJO\nsLfxpKq8NC+JVs2DOW9Ah6O2xUSE8ebNI3hnyS6bFsAYcxSvJhpVnVSLw0YCl4vIU0AUUC4ihcAK\nwHOqxlggDcgBokQkyK3VVJRXF9M0YBpAYmKi1iK+JiEt9zCzN+xhdLc29GwXjogwa+Nevk/K4fGL\n+hEWHHjMMQEBwg2j4+s/WGOMX/PZM5rqqOrYimUReQw4oKovikgQ0ENEuuIkkquBa1VVRWQecDnw\nHjAVqK62ZE6CqvLQh2v4YXsOAPHRzbnrjG68PH87PduFH9VsZowxJ+LL7s2XiEgqMBr4UkRmHm9/\nt7ZyLzAT2AR8oKob3M2/AR4QkSScZzavey/yxm/elkx+2J7DLyf14K+XDiCiWTAPf7KO3fsO8fsL\n+hFk3/g3xtSAqDbd1qPExERdvny5r8PwK6Vl5Ux+fhFl5crMX44jJCgAVeXbzZlkFhRxzQirzRjT\n1InIClU95vuP1fG7pjPjG9v2FvDP+dtZm5ZHUuYB/nX9MEKCnJqLiDCxTzsfR2iMaags0Rg278nn\n2leXUFpWTmJ8a64ZEcc5/SyxGGPqhiWaJm7V7v3c+uZyggOFj+8+na42WZkxpo5ZommiSsvKeeHb\nJF6al0T7iDCm3zbSkowxxiss0TRRb/24ixfmbuPSIZ14bEo/IsKCfR2SMaaRskTTBKkq05fsYmhc\nFM9eNdjX4RhjGjn7QkQTtGTHPpKzDnLtyC6+DsUY0wRYommC3l2ym5ZhQZxfabwyY4zxBks0Tcy+\ng8V8s34Plw2NpVnIseOVGWNMXbNE08iUlysZeYcpKi07Ztv6tDyunvYjpeXl9g1/Y0y9sc4AjcDh\n4jK+Xp/B/9aks3zXfgoKSxGB2FbNuHxoZyb0juHdpbv4cHkq0eEhvHHTcHq1b+nrsI0xTYQlmjrw\n2qJkFm3L5kBRKVcmxnLVcO/VFg4Xl/HJqlRyDhSTd7iEjen5rEnN5VBxGXGtm3PBwI707dCSnIPF\nrNydy3NztvLcnK2EBAVwzYg4Hjy7F5HNrSuzMab+WKI5RSn7DvGXrzbRuXVzVOHx/21kfK8Y2kWE\nnfK5CwpLCA4MICw4kJKycpbt2MdvP13HzpxDAIQGBdCrfUsuGxrLBQM7MKJr62MmHUvKLODH7Tmc\n0689MXUQkzHG1JQlmlP07+93EiDCe3eMoqRUmfTsAp78ZjPPXln776eoKm98v5O/frWJ0nIlukUI\nuYdLKCtXOrduxju3jWRk19YnNVx/95iWdI+xZjJjjO9YojkFeYdLeH/Zbi4Y2IEOkc0AuHVsV/45\nfzs3jOrCkLhWNT5n6n6nhvTVuj1M6hPDgE5R7Mk/THSLULq2acG5A9rTPMT+24wxDYd9Yp2C95ft\n5mBxGbeNTThSds+Z3fl4RSq/fH81n9w9hujw0CPb9h90nqsAfJeUzdxNewkLDqRrmxaUqZK6/zAz\n1+9BBH49uRd3n9HtmKYwY4xpaCzR1FJJWTn//n4noxJa079T5JHy8NAg/nXDMK6Ztpjb31rO61OH\nIwL/WpDMG9/toLis/Mi+8dHNCRBh1sa9BIrQqkUw142M467x3Y7UkIwxpqGzRFNLX63LICOvkD9f\n3P+YbUPjWvGPqwbzs3dXMuRPs4+UXzq0E6d3b0NpuTKgUyS927dERCgrVwIEq70YYxolSzS1oKq8\ntmgHCW1bcGavmCr3OXdAB965dSQb0vMpLivn9O5tGNQ5qsp9AwMswRhjGi+fjQwgIleIyAYRKReR\nxErbBorIj+72dSIS5pYPc9eTROQFcasAItJaRGaLyDb335o/ha+BpTv2sS4tj1tP70rAcZLEmO5t\nuH1cAvec2b3aJGOMMY2dL4egWQ9cCiz0LBSRIGA6cJeq9gPGAyXu5n8CtwM93Ndkt/xhYK6q9gDm\nuute8+qiHbRqHsylQ2K9eRljjGkUfNZ0pqqboMrnEmcDa1V1jbtfjrtfByBCVRe7628BFwNfA1Nw\nEhLAm8B84Ddeipse7cIZHt/KBqU0xpiT4I/PaHoCKiIzgbbAe6r6FNAJSPXYL9UtA2inqhnu8h6g\nXXUnF5E7gDsA4uJqPlSMiPCbyb1rfJwxxjRVXk00IjIHaF/FpkdV9fNqDgsCTgeGA4eAuSKyAsg7\nmWuqqoqIHmf7NGAaQGJiYrX7GWOMqRteTTSqOqkWh6UCC1U1G0BEvgKG4jy38XwoEgukuct7RaSD\nqma4TWyZpxC2McaYOuSP89HMBAaISHO3Y8AZwEa3aSxfREa5vc1uBCpqRTOAqe7yVI9yY4wxPubL\n7s2XiEgqMBr40n0mg6ruB54FlgGrgZWq+qV72M+A14AkYDtORwCAvwFnicg2YJK7bowxxg+IatN9\nTJGYmKjLly/3dRjGGNOgiMgKVU088Z4Of2w6M8YY04hYojHGGONVlmiMMcZ4VZN+RiMiWcCuWh7e\nBsiuw3Dqkj/HBhbfqfDn2MC/4/Pn2MC/46scWxdVbXuyBzfpRHMqRGR5TR6G1Sd/jg0svlPhz7GB\nf8fnz7GBf8d3qrFZ05kxxhivskRjjDHGqyzR1N40XwdwHP4cG1h8p8KfYwP/js+fYwP/ju+UYrNn\nNMYYY7zKajTGGGO8yhKNMcYYr7JEU0MiMllEtohIkoh4dcrok4yns4jME5GNIrJBRO5zy1uLyGwR\n2eb+28qHMQaKyCoR+cJd7yoiS9x7+L6IhPgwtigR+UhENovIJhEZ7S/3TkTud/9P14vIf0UkzJf3\nTkTeEJFMEVnvUVblvRLHC26ca0VkqI/ie9r9v10rIp+KSJTHtkfc+LaIyDn1HZvHtl+JiIpIG3fd\nL+6dW/5z9/5tEJGnPMprdu9U1V4n+QICcUaNTgBCgDVAXx/H1AEY6i63BLYCfYGngIfd8oeBJ30Y\n4wPAu8AX7voHwNXu8r+Au30Y25vAbe5yCBDlD/cOZ/bYHUAzj3t2ky/vHTAOZ26o9R5lVd4r4Dyc\n0dUFGAUs8VF8ZwNB7vKTHvH1dX9/Q4Gu7u91YH3G5pZ3xpkaZRfQxs/u3ZnAHCDUXY+p7b2zGk3N\njACSVDVZVYuB94ApvgxIVTNUdaW7XABswvmQmoLzIYr778W+iE9EYoHzcaZ3wJ1LaALwkR/EFonz\nC/Y6gKoWq2oufnLvcCYmbObOy9QcyMCH905VFwL7KhVXd6+mAG+pYzEQ5U5KWK/xqeosVS11Vxfz\n0+SJU3CmiS9S1R04U4+MqM/YXM8BvwY8e2X5xb0D7gb+pqpF7j4VE0rW+N5ZoqmZTkCKx3qqW+YX\nRCQeGAIsAdqpM1kcwB6gnY/C+gfOL1K5ux4N5Hr88vvyHnYFsoB/u017r4lIC/zg3qlqGvAMsBsn\nweQBK/Cfe1ehunvlj78rt/DTHFY+j09EpgBpqrqm0iafx+bqCYx1m2oXiMhwt7zG8VmiaSREJBz4\nGPilquZ7blOnvlvv/dhF5AIgU1VX1Pe1T1IQTnPBP1V1CHAQp/nnCB/eu1Y4fzl2BToCLYDJ9R1H\nTfjqXp0MEXkUKAXe8XUsACLSHPgt8Htfx3IcQUBrnOa7h4AP3BaJGrNEUzNpOG2qFWLdMp8SkWCc\nJPOOqn7iFu+tqG67/2ZWd7wXnQZcJCI7cZoZJwDP4zQFBLn7+PIepgKpqrrEXf8IJ/H4w72bBOxQ\n1SxVLQE+wbmf/nLvKlR3r/zmd0VEbgIuAK5zkyH4Pr5uOH9ErHF/P2KBlSLS3g9iq5AKfOI24S3F\naZVoU5v4LNHUzDKgh9vzJwS4Gpjhy4DcvzBeBzap6rMem2YAU93lqcDn9R2bqj6iqrGqGo9zr75V\n1euAecDlvozNjW8PkCIivdyiicBG/ODe4TSZjRKR5u7/cUVsfnHvPFR3r2YAN7o9qEYBeR5NbPVG\nRCbjNN1epKqHPDbNAK4WkVAR6Qr0AJbWV1yquk5VY1Q13v39SMXp1LMHP7l3wGc4HQIQkZ44nWWy\nqc2983Zvhsb2wukRshWnp8WjfhDP6TjNFWuB1e7rPJxnIXOBbTg9R1r7OM7x/NTrLMH9wUwCPsTt\n1eKjuAYDy9379xnQyl/uHfA4sBlYD7yN08vHZ/cO+C/O86ISnA/GW6u7Vzg9pl5yf0/WAYk+ii8J\n53lCxe/Gvzz2f9SNbwtwbn3HVmn7Tn7qdeYv9y4EmO7+/K0EJtT23tkQNMYYY7zKms6MMcZ4lSUa\nY4wxXmWJxhhjjFdZojHGGONVlmiMMcZ4lSUaY3xERP4oIpPq4DwH6iIeY7zFujcb08CJyAFVDfd1\nHMZUx2o0xtQhEbleRJaKyGoReUWcuXgOiMhz7pwec0Wkrbvvf0Tkcnf5b+LMKbRWRJ5xy+JF5Fu3\nbK6IxLnlXUXkRxFZJyJ/rnT9h0RkmXvM4/X9/o2piiUaY+qIiPQBrgJOU9XBQBlwHc6AmMtVtR+w\nAPhDpeOigUuAfqo6EKhIHv8PeNMtewd4wS1/Hmcg0AE43+auOM/ZOMOBjMAZ8WCYiIzzxns1piYs\n0RhTdyYCw4BlIrLaXU/AGYzwfXef6TjDBnnKAwqB10XkUqBiTK7ROBPGgTMETcVxp+EMGVJRXuFs\n97UKZ8iQ3jiJxxifCjrxLsaYkyQ4NZBHjioU+V2l/Y56MKqqpSIyAicxXQ7cizPS9fFU9XBVgL+q\n6is1itoYL7MajTF1Zy5wuYjEAIhIaxHpgvN7VjHi8rXAd54HuXMJRarqV8D9wCB30w84o16D0wS3\nyF3+vlJ5hZnALe75EJFOFbEY40tWozGmjqjqRhH5P2CWiATgjIR7D86EaiPcbZk4z3E8tQQ+F5Ew\nnFrJA275z3Fm/3wIZybQm93y+4B3ReQ3eEwToKqz3OdEP7rzUx0Arsc38+kYc4R1bzbGy6z7sWnq\nrOnMGGOMV1mNxhhjjFdZjcYYY4xXWaIxxhjjVZZojDHGeJUlGmOMMV5licYYY4xX/X/WUm74sjaj\nKgAAAABJRU5ErkJggg==\n", 91 | "text/plain": [ 92 | "" 93 | ] 94 | }, 95 | "metadata": {}, 96 | "output_type": "display_data" 97 | } 98 | ], 99 | "source": [ 100 | "with open('train_logs_Pendulum-v0_20180608-210703.pkl', 'rb') as fHandle:\n", 101 | " train_logs = pickle.load(fHandle)\n", 102 | "plot_logs(train_logs)" 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": null, 108 | "metadata": {}, 109 | "outputs": [], 110 | "source": [] 111 | } 112 | ], 113 | "metadata": { 114 | "kernelspec": { 115 | "display_name": "pytorch", 116 | "language": "python", 117 | "name": "pytorch" 118 | }, 119 | "language_info": { 120 | "codemirror_mode": { 121 | "name": "ipython", 122 | "version": 2 123 | }, 124 | "file_extension": ".py", 125 | "mimetype": "text/x-python", 126 | "name": "python", 127 | "nbconvert_exporter": "python", 128 | "pygments_lexer": "ipython2", 129 | "version": "2.7.12" 130 | } 131 | }, 132 | "nbformat": 4, 133 | "nbformat_minor": 2 134 | } 135 | --------------------------------------------------------------------------------