├── GPN.ipynb ├── GPN_TSPTW.ipynb ├── Pointer-Net-Reproduce.ipynb ├── README.md └── figure └── tsptw.png /GPN_TSPTW.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "name": "GPN-TSPTW.ipynb", 7 | "version": "0.3.2", 8 | "provenance": [], 9 | "collapsed_sections": [] 10 | }, 11 | "kernelspec": { 12 | "name": "python3", 13 | "display_name": "Python 3" 14 | }, 15 | "accelerator": "GPU" 16 | }, 17 | "cells": [ 18 | { 19 | "metadata": { 20 | "colab_type": "code", 21 | "id": "VgYUmJ06hwqH", 22 | "outputId": "096e40b1-a871-4fb2-b268-ed6348e4ff64", 23 | "colab": { 24 | "base_uri": "https://localhost:8080/", 25 | "height": 122 26 | } 27 | }, 28 | "cell_type": "code", 29 | "source": [ 30 | "from google.colab import drive\n", 31 | "drive.mount('/content/drive/')" 32 | ], 33 | "execution_count": 0, 34 | "outputs": [ 35 | { 36 | "output_type": "stream", 37 | "text": [ 38 | "Go to this URL in a browser: https://accounts.google.com/o/oauth2/auth?client_id=947318989803-6bn6qk8qdgf4n4g3pfee6491hc0brc4i.apps.googleusercontent.com&redirect_uri=urn%3Aietf%3Awg%3Aoauth%3A2.0%3Aoob&scope=email%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fdocs.test%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fdrive%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fdrive.photos.readonly%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fpeopleapi.readonly&response_type=code\n", 39 | "\n", 40 | "Enter your authorization code:\n", 41 | "··········\n", 42 | "Mounted at /content/drive/\n" 43 | ], 44 | "name": "stdout" 45 | } 46 | ] 47 | }, 48 | { 49 | "metadata": { 50 | "id": "6Xz6NnkPBbhU", 51 | "colab_type": "text" 52 | }, 53 | "cell_type": "markdown", 54 | "source": [ 55 | "# Neural Net Model" 56 | ] 57 | }, 58 | { 59 | "metadata": { 60 | "id": "BHWwRyM_SOza", 61 | "colab_type": "text" 62 | }, 63 | "cell_type": "markdown", 64 | "source": [ 65 | "## low layer" 66 | ] 67 | }, 68 | { 69 | "metadata": { 70 | "id": "jxuOR6Ye-IAJ", 71 | "colab_type": "code", 72 | "colab": {} 73 | }, 74 | "cell_type": "code", 75 | "source": [ 76 | "import torch\n", 77 | "import torch.nn as nn\n", 78 | "import torch.nn.functional as F\n", 79 | "import math\n", 80 | "\n", 81 | "class GPN(torch.nn.Module):\n", 82 | " def __init__(self, n_feature, n_hidden, n_class):\n", 83 | " super(GPN, self).__init__()\n", 84 | " self.size = 0\n", 85 | " self.batch_size = 0\n", 86 | " self.dim = n_hidden\n", 87 | " \n", 88 | " v = torch.FloatTensor(n_hidden).cuda()\n", 89 | " h0 = torch.FloatTensor(n_hidden).cuda()\n", 90 | " c0 = torch.FloatTensor(n_hidden).cuda()\n", 91 | " r1 = torch.ones(1).cuda()\n", 92 | " r2 = torch.ones(1).cuda()\n", 93 | " r3 = torch.ones(1).cuda()\n", 94 | " gamma = torch.ones(1).cuda()\n", 95 | " self.v = nn.Parameter(v)\n", 96 | " self.h0 = nn.Parameter(h0)\n", 97 | " self.c0 = nn.Parameter(c0)\n", 98 | " self.r1 = nn.Parameter(r1)\n", 99 | " self.r2 = nn.Parameter(r2)\n", 100 | " self.r3 = nn.Parameter(r3)\n", 101 | " self.gamma = nn.Parameter(gamma)\n", 102 | " self.v.data.uniform_(-1/math.sqrt(n_hidden), 1/math.sqrt(n_hidden))\n", 103 | " self.h0.data.uniform_(-1/math.sqrt(n_hidden), 1/math.sqrt(n_hidden))\n", 104 | " self.c0.data.uniform_(-1/math.sqrt(n_hidden), 1/math.sqrt(n_hidden))\n", 105 | " \n", 106 | " # embedding\n", 107 | " self.embedding_x = nn.Linear(n_feature, n_hidden)\n", 108 | " self.embedding_all = nn.Linear(n_feature, n_hidden)\n", 109 | " # self.encoder_ori = GCNConv(n_hidden, n_hidden) # use GCN as encoder\n", 110 | " self.encoder_1 = nn.Linear(n_hidden, n_hidden) # use GCN as encoder\n", 111 | " self.encoder_2 = nn.Linear(n_hidden, n_hidden) # use GCN as encoder \n", 112 | " self.encoder_3 = nn.Linear(n_hidden, n_hidden) # use GCN as encoder\n", 113 | " \n", 114 | " self.enc_1 = nn.Linear(n_hidden, n_hidden) # use GCN as encoder\n", 115 | " self.enc_2 = nn.Linear(n_hidden, n_hidden) # use GCN as encoder \n", 116 | " self.enc_3 = nn.Linear(n_hidden, n_hidden) # use GCN as encoder\n", 117 | " \n", 118 | " # parameters for input gate\n", 119 | " self.Wxi = nn.Linear(n_hidden, n_hidden) # W(xt)\n", 120 | " self.Whi = nn.Linear(n_hidden, n_hidden) # W(ht)\n", 121 | " self.wci = nn.Linear(n_hidden, n_hidden) # w(ct)\n", 122 | " \n", 123 | " # parameters for forget gate\n", 124 | " self.Wxf = nn.Linear(n_hidden, n_hidden) # W(xt)\n", 125 | " self.Whf = nn.Linear(n_hidden, n_hidden) # W(ht)\n", 126 | " self.wcf = nn.Linear(n_hidden, n_hidden) # w(ct)\n", 127 | " \n", 128 | " # parameters for cell gate\n", 129 | " self.Wxc = nn.Linear(n_hidden, n_hidden) # W(xt)\n", 130 | " self.Whc = nn.Linear(n_hidden, n_hidden) # W(ht)\n", 131 | " \n", 132 | " # parameters for forget gate\n", 133 | " self.Wxo = nn.Linear(n_hidden, n_hidden) # W(xt)\n", 134 | " self.Who = nn.Linear(n_hidden, n_hidden) # W(ht)\n", 135 | " self.wco = nn.Linear(n_hidden, n_hidden) # w(ct)\n", 136 | " \n", 137 | " # parameters for pointer attention\n", 138 | " self.Wref = nn.Linear(n_hidden, n_hidden)\n", 139 | " self.Wq = nn.Linear(n_hidden, n_hidden)\n", 140 | " \n", 141 | " \n", 142 | " def forward(self, x, input, mask, h=None, c=None):\n", 143 | " ''' args\n", 144 | " x: current city (B, 2)\n", 145 | " input: all cities (B*size, 2)\n", 146 | " h: hidden variable (B, dim)\n", 147 | " c: cell gate (B, dim)\n", 148 | " context: encoded context, encoder(input) --> (B*size, dim)\n", 149 | " '''\n", 150 | " self.batch_size = x.size(0)\n", 151 | " self.size = int(input.size(0) / self.batch_size)\n", 152 | " \n", 153 | " if h is None:\n", 154 | " h = self.h0.unsqueeze(0).expand(self.batch_size, self.dim)\n", 155 | " if c is None:\n", 156 | " c = self.c0.unsqueeze(0).expand(self.batch_size, self.dim)\n", 157 | "\n", 158 | "\n", 159 | " x = self.embedding_x(x)\n", 160 | " context = self.embedding_all(input)\n", 161 | " \n", 162 | " context = self.enc_1(context) + F.relu(self.encoder_1(context))\n", 163 | " # context = self.enc_2(context) + F.relu(self.encoder_2(context))\n", 164 | " # context = self.enc_3(context) + F.relu(self.encoder_3(context))\n", 165 | " \n", 166 | " # hidden variable does not have graph structure\n", 167 | " # input gate\n", 168 | " i = torch.sigmoid(self.Wxi(x) + self.Whi(h) + self.wci(c))\n", 169 | " # forget gate\n", 170 | " f = torch.sigmoid(self.Wxf(x) + self.Whf(h) + self.wcf(c))\n", 171 | " # cell gate\n", 172 | " c = f*c + i*torch.tanh(self.Wxc(x) + self.Whc(h))\n", 173 | " # output gate\n", 174 | " o = torch.sigmoid(self.Wxo(x) + self.Who(h) + self.wco(c))\n", 175 | " h = o*torch.tanh(c)\n", 176 | " \n", 177 | "\n", 178 | " # query and reference\n", 179 | " q = h\n", 180 | " ref = context\n", 181 | " q = self.Wq(q) # (B, dim)\n", 182 | " ref = self.Wref(ref)\n", 183 | " ref = ref.view(self.batch_size, self.size, self.dim) # (B, size, dim)\n", 184 | " \n", 185 | " q_ex = q.unsqueeze(1).repeat(1, self.size, 1) # (B, size, dim)\n", 186 | " # v_view: (B, dim, 1)\n", 187 | " v_view = self.v.unsqueeze(0).expand(self.batch_size, self.dim).unsqueeze(2)\n", 188 | " \n", 189 | " # (B, size, dim) * (B, dim, 1)\n", 190 | " u = torch.bmm(torch.tanh(q_ex + ref), v_view).squeeze(2)\n", 191 | " hidden_u = u.clone()\n", 192 | " u = 10*torch.tanh(u) + mask\n", 193 | " return F.softmax(u, dim=1), h, c, hidden_u" 194 | ], 195 | "execution_count": 0, 196 | "outputs": [] 197 | }, 198 | { 199 | "metadata": { 200 | "id": "cict1wwjSRQp", 201 | "colab_type": "text" 202 | }, 203 | "cell_type": "markdown", 204 | "source": [ 205 | "## high layer" 206 | ] 207 | }, 208 | { 209 | "metadata": { 210 | "id": "8fm1WeuQtTx4", 211 | "colab_type": "code", 212 | "colab": {} 213 | }, 214 | "cell_type": "code", 215 | "source": [ 216 | "import torch\n", 217 | "import torch.nn as nn\n", 218 | "import torch.nn.functional as F\n", 219 | "import math\n", 220 | "\n", 221 | "class GPN_High(torch.nn.Module):\n", 222 | " def __init__(self, n_feature, n_hidden, n_class):\n", 223 | " super(GPN_High, self).__init__()\n", 224 | " self.size = 0\n", 225 | " self.batch_size = 0\n", 226 | " self.dim = n_hidden\n", 227 | " \n", 228 | " v = torch.FloatTensor(n_hidden).cuda()\n", 229 | " h0 = torch.FloatTensor(n_hidden).cuda()\n", 230 | " c0 = torch.FloatTensor(n_hidden).cuda()\n", 231 | " r1 = torch.ones(1).cuda()\n", 232 | " r2 = torch.ones(1).cuda()\n", 233 | " r3 = torch.ones(1).cuda()\n", 234 | " gamma = torch.ones(1).cuda()\n", 235 | " self.v = nn.Parameter(v)\n", 236 | " self.h0 = nn.Parameter(h0)\n", 237 | " self.c0 = nn.Parameter(c0)\n", 238 | " self.r1 = nn.Parameter(r1)\n", 239 | " self.r2 = nn.Parameter(r2)\n", 240 | " self.r3 = nn.Parameter(r3)\n", 241 | " self.gamma = nn.Parameter(gamma)\n", 242 | " self.v.data.uniform_(-1/math.sqrt(n_hidden), 1/math.sqrt(n_hidden))\n", 243 | " self.h0.data.uniform_(-1/math.sqrt(n_hidden), 1/math.sqrt(n_hidden))\n", 244 | " self.c0.data.uniform_(-1/math.sqrt(n_hidden), 1/math.sqrt(n_hidden))\n", 245 | " \n", 246 | " # embedding\n", 247 | " self.embedding_x = nn.Linear(n_feature, n_hidden)\n", 248 | " self.embedding_all = nn.Linear(n_feature, n_hidden)\n", 249 | " # self.encoder_ori = GCNConv(n_hidden, n_hidden) # use GCN as encoder\n", 250 | " self.encoder_1 = nn.Linear(n_hidden, n_hidden) # use GCN as encoder\n", 251 | " self.encoder_2 = nn.Linear(n_hidden, n_hidden) # use GCN as encoder \n", 252 | " self.encoder_3 = nn.Linear(n_hidden, n_hidden) # use GCN as encoder\n", 253 | " \n", 254 | " self.enc_1 = nn.Linear(n_hidden, n_hidden) # use GCN as encoder\n", 255 | " self.enc_2 = nn.Linear(n_hidden, n_hidden) # use GCN as encoder \n", 256 | " self.enc_3 = nn.Linear(n_hidden, n_hidden) # use GCN as encoder\n", 257 | " \n", 258 | " # parameters for input gate\n", 259 | " self.Wxi = nn.Linear(n_hidden, n_hidden) # W(xt)\n", 260 | " self.Whi = nn.Linear(n_hidden, n_hidden) # W(ht)\n", 261 | " self.wci = nn.Linear(n_hidden, n_hidden) # w(ct)\n", 262 | " \n", 263 | " # parameters for forget gate\n", 264 | " self.Wxf = nn.Linear(n_hidden, n_hidden) # W(xt)\n", 265 | " self.Whf = nn.Linear(n_hidden, n_hidden) # W(ht)\n", 266 | " self.wcf = nn.Linear(n_hidden, n_hidden) # w(ct)\n", 267 | " \n", 268 | " # parameters for cell gate\n", 269 | " self.Wxc = nn.Linear(n_hidden, n_hidden) # W(xt)\n", 270 | " self.Whc = nn.Linear(n_hidden, n_hidden) # W(ht)\n", 271 | " \n", 272 | " # parameters for forget gate\n", 273 | " self.Wxo = nn.Linear(n_hidden, n_hidden) # W(xt)\n", 274 | " self.Who = nn.Linear(n_hidden, n_hidden) # W(ht)\n", 275 | " self.wco = nn.Linear(n_hidden, n_hidden) # w(ct)\n", 276 | " \n", 277 | " # parameters for pointer attention\n", 278 | " self.Wref = nn.Linear(n_hidden, n_hidden)\n", 279 | " self.Wq = nn.Linear(n_hidden, n_hidden)\n", 280 | " \n", 281 | " \n", 282 | " def forward(self, x, input, mask, latent, h=None, c=None):\n", 283 | " ''' args\n", 284 | " x: current city (B, 2)\n", 285 | " input: all cities (B*size, 2)\n", 286 | " h: hidden variable (B, dim)\n", 287 | " c: cell gate (B, dim)\n", 288 | " context: encoded context, encoder(input) --> (B*size, dim)\n", 289 | " '''\n", 290 | " self.batch_size = x.size(0)\n", 291 | " self.size = int(input.size(0) / self.batch_size)\n", 292 | " # print(self.size)\n", 293 | " \n", 294 | " if h is None:\n", 295 | " h = self.h0.unsqueeze(0).expand(self.batch_size, self.dim)\n", 296 | " if c is None:\n", 297 | " c = self.c0.unsqueeze(0).expand(self.batch_size, self.dim)\n", 298 | "\n", 299 | "\n", 300 | " x = self.embedding_x(x)\n", 301 | " context = self.embedding_all(input)\n", 302 | " \n", 303 | " context = self.enc_1(context) + F.relu(self.encoder_1(context))\n", 304 | " # context = self.enc_2(context) + F.relu(self.encoder_2(context))\n", 305 | " # context = self.enc_3(context) + F.relu(self.encoder_3(context))\n", 306 | " \n", 307 | " # hidden variable does not have graph structure\n", 308 | " # input gate\n", 309 | " i = torch.sigmoid(self.Wxi(x) + self.Whi(h) + self.wci(c))\n", 310 | " # forget gate\n", 311 | " f = torch.sigmoid(self.Wxf(x) + self.Whf(h) + self.wcf(c))\n", 312 | " # cell gate\n", 313 | " c = f*c + i*torch.tanh(self.Wxc(x) + self.Whc(h))\n", 314 | " # output gate\n", 315 | " o = torch.sigmoid(self.Wxo(x) + self.Who(h) + self.wco(c))\n", 316 | " h = o*torch.tanh(c)\n", 317 | " \n", 318 | "\n", 319 | " # query and reference\n", 320 | " q = h\n", 321 | " ref = context\n", 322 | " q = self.Wq(q) # (B, dim)\n", 323 | " ref = self.Wref(ref)\n", 324 | " ref = ref.view(self.batch_size, self.size, self.dim) # (B, size, dim)\n", 325 | " \n", 326 | " q_ex = q.unsqueeze(1).repeat(1, self.size, 1) # (B, size, dim)\n", 327 | " # v_view: (B, dim, 1)\n", 328 | " v_view = self.v.unsqueeze(0).expand(self.batch_size, self.dim).unsqueeze(2)\n", 329 | " \n", 330 | " # (B, size, dim) * (B, dim, 1)\n", 331 | " u = torch.bmm(torch.tanh(q_ex + ref), v_view).squeeze(2)\n", 332 | " u = 10*torch.tanh(u) + mask + self.gamma * latent\n", 333 | " return F.softmax(u, dim=1), h, c" 334 | ], 335 | "execution_count": 0, 336 | "outputs": [] 337 | }, 338 | { 339 | "metadata": { 340 | "id": "AK0m9iSCQyN9", 341 | "colab_type": "text" 342 | }, 343 | "cell_type": "markdown", 344 | "source": [ 345 | "# Training" 346 | ] 347 | }, 348 | { 349 | "metadata": { 350 | "id": "tIwLQCrQQxwv", 351 | "colab_type": "code", 352 | "colab": {} 353 | }, 354 | "cell_type": "code", 355 | "source": [ 356 | "import numpy as np\n", 357 | "\n", 358 | "import torch.nn.functional as F\n", 359 | "import torch.optim as optim\n", 360 | "from torch.autograd import Variable\n", 361 | "from torch.optim import lr_scheduler\n", 362 | "from scipy.spatial.distance import squareform, pdist\n", 363 | "from sklearn.neighbors import NearestNeighbors\n", 364 | "import matplotlib.pyplot as plt" 365 | ], 366 | "execution_count": 0, 367 | "outputs": [] 368 | }, 369 | { 370 | "metadata": { 371 | "id": "J9R2F6MotL2O", 372 | "colab_type": "code", 373 | "colab": {} 374 | }, 375 | "cell_type": "code", 376 | "source": [ 377 | "model_low = torch.load('drive/My Drive/GCN-CO/model/0420/TSP50.pt')" 378 | ], 379 | "execution_count": 0, 380 | "outputs": [] 381 | }, 382 | { 383 | "metadata": { 384 | "id": "l889RjHhUlU4", 385 | "colab_type": "code", 386 | "colab": {} 387 | }, 388 | "cell_type": "code", 389 | "source": [ 390 | "size = 50\n", 391 | "\n", 392 | "learn_rate = 1e-3\n", 393 | "beta = 0.8\n", 394 | "B = 128" 395 | ], 396 | "execution_count": 0, 397 | "outputs": [] 398 | }, 399 | { 400 | "metadata": { 401 | "id": "Y1CxhELRUk1u", 402 | "colab_type": "code", 403 | "colab": {} 404 | }, 405 | "cell_type": "code", 406 | "source": [ 407 | "model_high = GPN_High(n_feature=3,\n", 408 | " n_hidden=128,\n", 409 | " n_class=1)" 410 | ], 411 | "execution_count": 0, 412 | "outputs": [] 413 | }, 414 | { 415 | "metadata": { 416 | "id": "h3mvCONdJ1mI", 417 | "colab_type": "code", 418 | "colab": {} 419 | }, 420 | "cell_type": "code", 421 | "source": [ 422 | "model_high.cuda()\n", 423 | "model_low.cuda()\n", 424 | "\n", 425 | "learn_rate = 1e-3\n", 426 | "lr_decay_step = 5000\n", 427 | "lr_decay_rate = 0.96\n", 428 | "\n", 429 | "\n", 430 | "optimizer = optim.Adam(model_high.parameters(), lr=learn_rate)\n", 431 | "\n", 432 | "opt_scheduler = lr_scheduler.MultiStepLR(optimizer, range(lr_decay_step, lr_decay_step*1000,\n", 433 | " lr_decay_step), gamma=lr_decay_rate)" 434 | ], 435 | "execution_count": 0, 436 | "outputs": [] 437 | }, 438 | { 439 | "metadata": { 440 | "id": "RvqZdJMFrvJ0", 441 | "colab_type": "text" 442 | }, 443 | "cell_type": "markdown", 444 | "source": [ 445 | "# TSPTW" 446 | ] 447 | }, 448 | { 449 | "metadata": { 450 | "id": "gMbBd5IY7qdb", 451 | "colab_type": "code", 452 | "outputId": "24d53c15-22fb-4ae1-b9ce-7725d2b04174", 453 | "colab": { 454 | "base_uri": "https://localhost:8080/", 455 | "height": 51 456 | } 457 | }, 458 | "cell_type": "code", 459 | "source": [ 460 | "X = np.random.rand(B*size, 2)\n", 461 | "Time = 5*np.random.rand(B*size,1)\n", 462 | "print(X.shape,Time.shape)\n", 463 | "Z = np.concatenate((X, Time), axis=1)\n", 464 | "print(Z.shape)" 465 | ], 466 | "execution_count": 0, 467 | "outputs": [ 468 | { 469 | "output_type": "stream", 470 | "text": [ 471 | "(6400, 2) (6400, 1)\n", 472 | "(6400, 3)\n" 473 | ], 474 | "name": "stdout" 475 | } 476 | ] 477 | }, 478 | { 479 | "metadata": { 480 | "id": "2WDytFpNuCzX", 481 | "colab_type": "code", 482 | "outputId": "59846671-7d46-4e3a-dff3-f2f502ee0329", 483 | "colab": { 484 | "base_uri": "https://localhost:8080/", 485 | "height": 1717 486 | } 487 | }, 488 | "cell_type": "code", 489 | "source": [ 490 | "# consider time information\n", 491 | "\n", 492 | "B = 128 # batch_size\n", 493 | "C = 0 # baseline\n", 494 | "R = 0 # reward\n", 495 | "\n", 496 | "model_low.eval()\n", 497 | "model_high.train()\n", 498 | "\n", 499 | "reward = 0\n", 500 | "for i in range(5000):\n", 501 | " optimizer.zero_grad()\n", 502 | "\n", 503 | " X = np.random.rand(B*size, 2)\n", 504 | " Time = 5*np.random.rand(B*size,1)\n", 505 | " Time = Time.reshape(B,size,1)\n", 506 | " Time[:,0] = 0\n", 507 | " Time = Time.reshape(B*size,1)\n", 508 | "\n", 509 | " X = np.concatenate((X, Time), axis=1)\n", 510 | " \n", 511 | " X = torch.Tensor(X).cuda()\n", 512 | " Time = torch.Tensor(Time).cuda()\n", 513 | " mask = torch.zeros(B,size).cuda()\n", 514 | "\n", 515 | " reward = 0\n", 516 | " R = torch.zeros(B).cuda()\n", 517 | " logprobs = 0\n", 518 | " time_cost = torch.zeros(B).cuda()\n", 519 | " total_time_cost = torch.zeros(B).cuda()\n", 520 | " T = torch.zeros(B).cuda()\n", 521 | " \n", 522 | " \n", 523 | " Y = X.view(B,size,3) # to the same batch size\n", 524 | " Time = Time.view(B,size)\n", 525 | " x = Y[:,0,:]\n", 526 | " h = None\n", 527 | " c = None\n", 528 | "\n", 529 | " # print(Y.size())\n", 530 | " \n", 531 | " for k in range(size):\n", 532 | " \n", 533 | " output, h, c, hidden_u = model_low(x=x[:,:2], input=X[:,:2], h=h, c=c, mask=mask)\n", 534 | " # hidden_u = 0\n", 535 | " output, h, c = model_high(x=x, input=X, h=h, c=c, mask=mask, latent=hidden_u)\n", 536 | " \n", 537 | " sampler = torch.distributions.Categorical(output)\n", 538 | " # idx = torch.argmax(output, dim=1)\n", 539 | " idx = sampler.sample() # now the idx has B elements\n", 540 | "\n", 541 | " # print(output)\n", 542 | " \n", 543 | " Y1 = Y[[i for i in range(B)], idx.data]\n", 544 | "\n", 545 | " \n", 546 | " if k == 0:\n", 547 | " Y_ini = Y1.clone()\n", 548 | " if k > 0:\n", 549 | " reward = torch.norm(Y1[:,:2]-Y0[:,:2], dim=1)\n", 550 | "\n", 551 | " # print(\"test\")\n", 552 | " Y0 = Y1.clone()\n", 553 | " x = Y[[i for i in range(B)], idx.data]\n", 554 | " \n", 555 | " R += reward\n", 556 | " total_time_cost += reward\n", 557 | " \n", 558 | " # enter time\n", 559 | " time = Time[[i for i in range(B)], idx.data]\n", 560 | " # determine the total reward and current enter time\n", 561 | " time_cost = torch.lt(total_time_cost, time).float()*(time - total_time_cost) \n", 562 | " \n", 563 | " total_time_cost += time_cost\n", 564 | " T += time_cost # total time cost\n", 565 | "\n", 566 | " TINY = 1e-15\n", 567 | " logprobs += torch.log(output[[i for i in range(B)], idx.data]+TINY) \n", 568 | " \n", 569 | " mask[[i for i in range(B)], idx.data] += -np.inf \n", 570 | " \n", 571 | "\n", 572 | " R += torch.norm(Y1[:,:2]-Y_ini[:,:2], dim=1)\n", 573 | " \n", 574 | " if i == 0:\n", 575 | " C = total_time_cost.mean()\n", 576 | " else:\n", 577 | " C = (total_time_cost * beta) + ((1. - beta) * total_time_cost.mean())\n", 578 | " \n", 579 | " loss = ((total_time_cost-C)*logprobs).mean()\n", 580 | "\n", 581 | " loss.backward()\n", 582 | " \n", 583 | " max_grad_norm = 1.0\n", 584 | " torch.nn.utils.clip_grad_norm_(model_low.parameters(),\n", 585 | " max_grad_norm, norm_type=2)\n", 586 | " optimizer.step()\n", 587 | " opt_scheduler.step()\n", 588 | " \n", 589 | " if i % 50 == 0:\n", 590 | " print(\"epoch:{}, loss:{}, reward:{}, time:{}\"\n", 591 | " .format(i,loss.item(),R.mean().item(), T.mean().item()))" 592 | ], 593 | "execution_count": 0, 594 | "outputs": [ 595 | { 596 | "output_type": "stream", 597 | "text": [ 598 | "epoch:0, loss:-0.5635337829589844, reward:9.112625122070312, time:0.6236771941184998\n", 599 | "epoch:50, loss:-0.03489159047603607, reward:8.654573440551758, time:1.0510509014129639\n", 600 | "epoch:100, loss:-0.12127609550952911, reward:8.743059158325195, time:0.8178374767303467\n", 601 | "epoch:150, loss:-0.04674021154642105, reward:8.853475570678711, time:0.733428955078125\n", 602 | "epoch:200, loss:-0.10001125931739807, reward:9.05912971496582, time:0.5542683005332947\n", 603 | "epoch:250, loss:-0.042808376252651215, reward:9.033817291259766, time:0.6222018003463745\n", 604 | "epoch:300, loss:-0.04344319552183151, reward:8.961321830749512, time:0.8123394250869751\n", 605 | "epoch:350, loss:-0.1761457920074463, reward:8.967403411865234, time:0.7493728399276733\n", 606 | "epoch:400, loss:-0.0811735987663269, reward:8.890156745910645, time:0.7346435785293579\n", 607 | "epoch:450, loss:-0.0596654936671257, reward:8.875138282775879, time:0.6490026116371155\n", 608 | "epoch:500, loss:-0.11659561097621918, reward:9.208836555480957, time:0.5808483362197876\n", 609 | "epoch:550, loss:-0.0928509309887886, reward:8.932666778564453, time:0.8298534154891968\n", 610 | "epoch:600, loss:-0.0524151436984539, reward:8.510120391845703, time:1.162402868270874\n", 611 | "epoch:650, loss:-0.12722988426685333, reward:8.916303634643555, time:0.8190017938613892\n", 612 | "epoch:700, loss:-0.08490787446498871, reward:8.387847900390625, time:1.2225828170776367\n", 613 | "epoch:750, loss:-0.14278855919837952, reward:8.963871002197266, time:0.7153630256652832\n", 614 | "epoch:800, loss:-0.03347434103488922, reward:8.791842460632324, time:0.7384847402572632\n", 615 | "epoch:850, loss:-0.03585170954465866, reward:8.806673049926758, time:0.7105007171630859\n", 616 | "epoch:900, loss:-0.15803056955337524, reward:8.782690048217773, time:0.7297488451004028\n", 617 | "epoch:950, loss:-0.10664328187704086, reward:8.677019119262695, time:0.9932083487510681\n", 618 | "epoch:1000, loss:-0.1031683087348938, reward:8.747486114501953, time:0.9692719578742981\n", 619 | "epoch:1050, loss:-0.09759443998336792, reward:9.063064575195312, time:0.580978512763977\n", 620 | "epoch:1100, loss:-0.1600848138332367, reward:8.765963554382324, time:0.9560766816139221\n", 621 | "epoch:1150, loss:-0.09278318285942078, reward:8.682710647583008, time:0.8357343077659607\n", 622 | "epoch:1200, loss:-0.0013828426599502563, reward:8.632381439208984, time:0.8592376708984375\n", 623 | "epoch:1250, loss:-0.10523693263530731, reward:8.658557891845703, time:0.8926814794540405\n", 624 | "epoch:1300, loss:-0.12389063090085983, reward:9.003210067749023, time:0.6148272156715393\n", 625 | "epoch:1350, loss:-0.06243058666586876, reward:9.04667854309082, time:0.5830898284912109\n", 626 | "epoch:1400, loss:-0.04717152193188667, reward:8.849627494812012, time:0.805357038974762\n", 627 | "epoch:1450, loss:-0.20492687821388245, reward:8.89620590209961, time:0.8552446365356445\n", 628 | "epoch:1500, loss:-0.03358153998851776, reward:8.945140838623047, time:0.7700551748275757\n", 629 | "epoch:1550, loss:-0.09499255567789078, reward:8.832075119018555, time:0.8205121755599976\n", 630 | "epoch:1600, loss:-0.09116586297750473, reward:8.795154571533203, time:0.6554267406463623\n", 631 | "epoch:1650, loss:-0.11398519575595856, reward:9.048820495605469, time:0.5155866742134094\n", 632 | "epoch:1700, loss:-0.15059001743793488, reward:9.052688598632812, time:0.5349957942962646\n", 633 | "epoch:1750, loss:-0.1372184455394745, reward:9.049918174743652, time:0.6176662445068359\n", 634 | "epoch:1800, loss:-0.13550613820552826, reward:8.8087797164917, time:0.8061460256576538\n", 635 | "epoch:1850, loss:-0.0213179811835289, reward:8.891910552978516, time:0.6810145378112793\n", 636 | "epoch:1900, loss:-0.06060586869716644, reward:8.918651580810547, time:0.6411097049713135\n", 637 | "epoch:1950, loss:-0.09891600906848907, reward:8.923828125, time:0.69578617811203\n", 638 | "epoch:2000, loss:-0.06275218725204468, reward:8.95689582824707, time:0.5658798813819885\n", 639 | "epoch:2050, loss:-0.11416131258010864, reward:8.734716415405273, time:0.8323589563369751\n", 640 | "epoch:2100, loss:-0.08405698090791702, reward:8.993157386779785, time:0.6838529109954834\n", 641 | "epoch:2150, loss:-0.14514678716659546, reward:8.941850662231445, time:0.6441011428833008\n", 642 | "epoch:2200, loss:-0.16426804661750793, reward:9.05589485168457, time:0.5920640230178833\n", 643 | "epoch:2250, loss:-0.06752412021160126, reward:8.866586685180664, time:0.6008732914924622\n", 644 | "epoch:2300, loss:-0.06918899714946747, reward:8.704383850097656, time:0.9676716923713684\n", 645 | "epoch:2350, loss:-0.07058773934841156, reward:8.82143497467041, time:0.7670353651046753\n", 646 | "epoch:2400, loss:-0.05848565697669983, reward:8.87424373626709, time:0.7740436792373657\n", 647 | "epoch:2450, loss:-0.11992520093917847, reward:8.900223731994629, time:0.7352457046508789\n", 648 | "epoch:2500, loss:-0.1749691218137741, reward:8.902121543884277, time:0.683015763759613\n", 649 | "epoch:2550, loss:-0.06190435588359833, reward:8.967233657836914, time:0.6267620325088501\n", 650 | "epoch:2600, loss:-0.08369515836238861, reward:8.875661849975586, time:0.7675507068634033\n", 651 | "epoch:2650, loss:-0.1630706787109375, reward:8.84705924987793, time:0.8345162868499756\n", 652 | "epoch:2700, loss:-0.0092984139919281, reward:9.111949920654297, time:0.6135114431381226\n", 653 | "epoch:2750, loss:-0.16354578733444214, reward:9.081850051879883, time:0.5794411897659302\n", 654 | "epoch:2800, loss:-0.04701709374785423, reward:8.847454071044922, time:0.6980524659156799\n", 655 | "epoch:2850, loss:-0.013745903968811035, reward:8.6673583984375, time:0.9215983748435974\n", 656 | "epoch:2900, loss:-0.10398997366428375, reward:8.6686429977417, time:0.8005019426345825\n", 657 | "epoch:2950, loss:-0.11781647056341171, reward:8.65434455871582, time:0.936253547668457\n", 658 | "epoch:3000, loss:-0.04914351552724838, reward:8.650622367858887, time:0.8790426254272461\n", 659 | "epoch:3050, loss:-0.10199323296546936, reward:8.98037338256836, time:0.5582394599914551\n", 660 | "epoch:3100, loss:-0.07593823224306107, reward:8.503681182861328, time:1.1256253719329834\n", 661 | "epoch:3150, loss:-0.05250772088766098, reward:8.837794303894043, time:0.7143948078155518\n", 662 | "epoch:3200, loss:-0.14625421166419983, reward:8.828633308410645, time:0.7558512687683105\n", 663 | "epoch:3250, loss:-0.0792405903339386, reward:8.720843315124512, time:0.864219069480896\n", 664 | "epoch:3300, loss:-0.1072240099310875, reward:8.633138656616211, time:1.0284255743026733\n", 665 | "epoch:3350, loss:-0.1131829172372818, reward:8.647789001464844, time:0.9115370512008667\n", 666 | "epoch:3400, loss:-0.08459718525409698, reward:8.832234382629395, time:0.6702319383621216\n", 667 | "epoch:3450, loss:-0.10764221101999283, reward:9.106353759765625, time:0.5510483384132385\n", 668 | "epoch:3500, loss:-0.11048790067434311, reward:8.788463592529297, time:0.8448485136032104\n", 669 | "epoch:3550, loss:-0.06024862080812454, reward:8.849193572998047, time:0.6575841903686523\n", 670 | "epoch:3600, loss:-0.06686222553253174, reward:8.63388442993164, time:0.9995033740997314\n", 671 | "epoch:3650, loss:-0.1208692342042923, reward:8.857743263244629, time:0.7093453407287598\n", 672 | "epoch:3700, loss:-0.11312463134527206, reward:8.86367416381836, time:0.757157564163208\n", 673 | "epoch:3750, loss:-0.07143333554267883, reward:8.732383728027344, time:0.8386166095733643\n", 674 | "epoch:3800, loss:-0.07936899363994598, reward:8.843174934387207, time:0.8409878611564636\n", 675 | "epoch:3850, loss:-0.06484765559434891, reward:8.799470901489258, time:0.8135070204734802\n", 676 | "epoch:3900, loss:-0.09852610528469086, reward:8.895376205444336, time:0.684874415397644\n", 677 | "epoch:3950, loss:-0.048286519944667816, reward:8.960168838500977, time:0.6344684958457947\n", 678 | "epoch:4000, loss:-0.092548668384552, reward:8.955897331237793, time:0.6811671257019043\n", 679 | "epoch:4050, loss:-0.13661867380142212, reward:8.880826950073242, time:0.5931400060653687\n", 680 | "epoch:4100, loss:-0.1092352569103241, reward:8.643815040588379, time:0.9844122529029846\n", 681 | "epoch:4150, loss:-0.07912304997444153, reward:8.694469451904297, time:0.9451181888580322\n", 682 | "epoch:4200, loss:-0.15413469076156616, reward:8.660812377929688, time:0.8905932307243347\n", 683 | "epoch:4250, loss:-0.06282613426446915, reward:8.746149063110352, time:0.7942360639572144\n", 684 | "epoch:4300, loss:-0.026885904371738434, reward:8.911535263061523, time:0.6589419841766357\n", 685 | "epoch:4350, loss:0.014763601124286652, reward:8.568220138549805, time:0.9578315019607544\n", 686 | "epoch:4400, loss:-0.14085274934768677, reward:8.870918273925781, time:0.7608169317245483\n", 687 | "epoch:4450, loss:-0.07291917502880096, reward:8.83354377746582, time:0.7281978130340576\n", 688 | "epoch:4500, loss:-0.0881900042295456, reward:8.901152610778809, time:0.6890379190444946\n", 689 | "epoch:4550, loss:-0.019869375973939896, reward:8.630209922790527, time:0.9335203170776367\n", 690 | "epoch:4600, loss:-0.0390123575925827, reward:8.653127670288086, time:0.8833146095275879\n", 691 | "epoch:4650, loss:0.03238852322101593, reward:8.788601875305176, time:0.6800191402435303\n", 692 | "epoch:4700, loss:-0.03223256766796112, reward:8.739492416381836, time:0.8673750758171082\n", 693 | "epoch:4750, loss:-0.09341800212860107, reward:8.981427192687988, time:0.6944162845611572\n", 694 | "epoch:4800, loss:-0.1373763382434845, reward:8.950793266296387, time:0.647646963596344\n", 695 | "epoch:4850, loss:-0.153321772813797, reward:9.146354675292969, time:0.4629411995410919\n", 696 | "epoch:4900, loss:-0.087690070271492, reward:8.997803688049316, time:0.6091476678848267\n", 697 | "epoch:4950, loss:-0.09135127067565918, reward:8.798588752746582, time:0.7804281711578369\n" 698 | ], 699 | "name": "stdout" 700 | } 701 | ] 702 | }, 703 | { 704 | "metadata": { 705 | "id": "NU8ziGXUFqks", 706 | "colab_type": "code", 707 | "colab": {} 708 | }, 709 | "cell_type": "code", 710 | "source": [ 711 | "" 712 | ], 713 | "execution_count": 0, 714 | "outputs": [] 715 | }, 716 | { 717 | "metadata": { 718 | "id": "sHHdTXTSS_MY", 719 | "colab_type": "text" 720 | }, 721 | "cell_type": "markdown", 722 | "source": [ 723 | "# Inference" 724 | ] 725 | }, 726 | { 727 | "metadata": { 728 | "id": "a-XCHqoELuNF", 729 | "colab_type": "code", 730 | "outputId": "e671a0bb-8f16-4cb2-e22a-375d25e9a63d", 731 | "colab": { 732 | "base_uri": "https://localhost:8080/", 733 | "height": 918 734 | } 735 | }, 736 | "cell_type": "code", 737 | "source": [ 738 | "B = 1\n", 739 | "size = 50\n", 740 | "\n", 741 | "X = np.random.rand(B*size, 2)\n", 742 | "Time = 5*np.random.rand(B*size,1)\n", 743 | "Time = Time.reshape(B,size,1)\n", 744 | "Time[:,0] = 0\n", 745 | "Time = Time.reshape(B*size,1)\n", 746 | "X = np.concatenate((X,Time), axis=1)\n", 747 | "\n", 748 | "time_cost = 0\n", 749 | "total_time_cost = torch.zeros(B).cuda()\n", 750 | "X = torch.Tensor(X).cuda()\n", 751 | "Time = torch.Tensor(Time).cuda()\n", 752 | "mask = torch.zeros(B,size).cuda()\n", 753 | " \n", 754 | "R = torch.zeros(B).cuda()\n", 755 | "logprobs = 0\n", 756 | "reward = 0\n", 757 | "\n", 758 | "solution = []\n", 759 | "T = 0\n", 760 | "Y = X.view(B,size,3) # to the same batch size\n", 761 | "Y_Time = Time\n", 762 | "Time = Time.view(B,size)\n", 763 | "x = Y[:,0,:]\n", 764 | "h = None\n", 765 | "c = None\n", 766 | "divide = 0\n", 767 | "\n", 768 | "\n", 769 | "enter_time = []\n", 770 | "pass_time = []\n", 771 | "\n", 772 | "# print(Y.size())\n", 773 | "\n", 774 | "for k in range(size):\n", 775 | " \n", 776 | " output, h, c, hidden_u = model_low(x=x[:,:2], input=X[:,:2], h=h, c=c, mask=mask)\n", 777 | " output, h, c = model_high(x=x, input=X, h=h, c=c, mask=mask, latent=hidden_u)\n", 778 | " \n", 779 | " sampler = torch.distributions.Categorical(output)\n", 780 | " # idx = sampler.sample() # now the idx has B elements\n", 781 | " idx = torch.argmax(output, dim=1)\n", 782 | "\n", 783 | " Y1 = Y[[i for i in range(B)], idx.data]\n", 784 | " \n", 785 | " if k == 0:\n", 786 | " Y_ini = Y1.clone()\n", 787 | " if k > 0:\n", 788 | " reward = torch.norm(Y1[:,:2]-Y0[:,:2], dim=1)\n", 789 | "\n", 790 | " Y0 = Y1.clone()\n", 791 | " x = Y[[i for i in range(B)], idx.data]\n", 792 | "\n", 793 | " solution.append(x[0,:2].cpu().numpy())\n", 794 | " \n", 795 | " R += reward\n", 796 | " total_time_cost += reward\n", 797 | " \n", 798 | " # enter time\n", 799 | " time = Time[[i for i in range(B)], idx.data]\n", 800 | "\n", 801 | " # determine the total reward and current enter time\n", 802 | " time_cost = torch.lt(total_time_cost, time).float()*(time - total_time_cost) \n", 803 | " \n", 804 | " if time_cost.item()>0:\n", 805 | " divide = k\n", 806 | " \n", 807 | " total_time_cost += time_cost\n", 808 | " T += time_cost # total time cost\n", 809 | "\n", 810 | " \n", 811 | " mask[[i for i in range(B)], idx.data] += -np.inf \n", 812 | " \n", 813 | " print(\"time:{}, distance:{}\".format(time.item(), total_time_cost.item()))\n", 814 | " enter_time.append(time.item())\n", 815 | " pass_time.append(total_time_cost.item())\n", 816 | " \n", 817 | " \n", 818 | "R += torch.norm(Y1[:,:2]-Y_ini[:,:2], dim=1)\n", 819 | "total_time_cost += torch.norm(Y1[:,:2]-Y_ini[:,:2], dim=1)\n", 820 | "\n", 821 | "\n", 822 | "print(\"total length:\", total_time_cost.item())\n", 823 | "print(\"time cost:\", T.item())\n", 824 | "print(\"tour lenght:\", R.item())" 825 | ], 826 | "execution_count": 0, 827 | "outputs": [ 828 | { 829 | "output_type": "stream", 830 | "text": [ 831 | "time:0.0, distance:0.0\n", 832 | "time:0.295327365398407, distance:0.33913496136665344\n", 833 | "time:0.9984871745109558, distance:0.9984871745109558\n", 834 | "time:0.763039231300354, distance:1.266855001449585\n", 835 | "time:1.268875002861023, distance:1.6089143753051758\n", 836 | "time:1.353973150253296, distance:1.8912614583969116\n", 837 | "time:0.6640166640281677, distance:1.9896411895751953\n", 838 | "time:2.023508310317993, distance:2.313350200653076\n", 839 | "time:0.36442306637763977, distance:2.387784481048584\n", 840 | "time:0.6906937956809998, distance:2.5786213874816895\n", 841 | "time:1.539695382118225, distance:2.708146810531616\n", 842 | "time:1.0814710855484009, distance:2.90852952003479\n", 843 | "time:2.9543755054473877, distance:2.9543755054473877\n", 844 | "time:0.6871660351753235, distance:3.2436091899871826\n", 845 | "time:3.2388479709625244, distance:3.348568916320801\n", 846 | "time:1.2242628335952759, distance:3.43845534324646\n", 847 | "time:2.0252959728240967, distance:3.499624729156494\n", 848 | "time:0.47900083661079407, distance:3.6414108276367188\n", 849 | "time:4.094509601593018, distance:4.094509601593018\n", 850 | "time:3.3222713470458984, distance:4.301178455352783\n", 851 | "time:4.239171028137207, distance:4.616779327392578\n", 852 | "time:4.632911682128906, distance:4.669863700866699\n", 853 | "time:1.4778172969818115, distance:4.786561489105225\n", 854 | "time:3.1822924613952637, distance:4.907071590423584\n", 855 | "time:1.8424592018127441, distance:5.078830718994141\n", 856 | "time:2.237530469894409, distance:5.232535362243652\n", 857 | "time:4.7146735191345215, distance:5.250123023986816\n", 858 | "time:1.2632544040679932, distance:5.3340044021606445\n", 859 | "time:3.7245700359344482, distance:5.512524604797363\n", 860 | "time:0.2505350708961487, distance:5.909144401550293\n", 861 | "time:4.074026107788086, distance:6.052345275878906\n", 862 | "time:3.401338815689087, distance:6.129876136779785\n", 863 | "time:4.433657646179199, distance:6.205013275146484\n", 864 | "time:2.8145365715026855, distance:6.405936241149902\n", 865 | "time:4.148168087005615, distance:6.736670017242432\n", 866 | "time:2.8954386711120605, distance:6.880939960479736\n", 867 | "time:3.4101805686950684, distance:7.030532360076904\n", 868 | "time:2.882981061935425, distance:7.106689453125\n", 869 | "time:0.8663321733474731, distance:7.244050025939941\n", 870 | "time:3.4774677753448486, distance:7.372501850128174\n", 871 | "time:2.2678780555725098, distance:7.738094806671143\n", 872 | "time:4.937931537628174, distance:7.905282974243164\n", 873 | "time:4.587130546569824, distance:8.011631965637207\n", 874 | "time:2.737105369567871, distance:8.071555137634277\n", 875 | "time:3.6433651447296143, distance:8.127467155456543\n", 876 | "time:3.9233431816101074, distance:8.235284805297852\n", 877 | "time:4.2677836418151855, distance:8.320414543151855\n", 878 | "time:4.9783854484558105, distance:8.710134506225586\n", 879 | "time:4.086057186126709, distance:8.806817054748535\n", 880 | "time:3.7738893032073975, distance:8.927824020385742\n", 881 | "total length: 9.331299781799316\n", 882 | "time cost: 0.4616178870201111\n", 883 | "tour lenght: 8.869680404663086\n" 884 | ], 885 | "name": "stdout" 886 | } 887 | ] 888 | }, 889 | { 890 | "metadata": { 891 | "id": "Bi3TPVRegfQU", 892 | "colab_type": "code", 893 | "outputId": "12381ff6-8d94-437c-8f02-3db6daef9887", 894 | "colab": { 895 | "base_uri": "https://localhost:8080/", 896 | "height": 357 897 | } 898 | }, 899 | "cell_type": "code", 900 | "source": [ 901 | "plt.figure(figsize=(7,5))\n", 902 | "t = range(50)\n", 903 | "plt.plot(t, enter_time, label='enter time')\n", 904 | "plt.plot(t, pass_time, label='pass time')\n", 905 | "\n", 906 | "plt.xlabel('node', fontsize=12)\n", 907 | "plt.ylabel('time', fontsize=12)\n", 908 | "plt.legend(loc='upper right', fontsize=12)" 909 | ], 910 | "execution_count": 0, 911 | "outputs": [ 912 | { 913 | "output_type": "execute_result", 914 | "data": { 915 | "text/plain": [ 916 | "" 917 | ] 918 | }, 919 | "metadata": { 920 | "tags": [] 921 | }, 922 | "execution_count": 530 923 | }, 924 | { 925 | "output_type": "display_data", 926 | "data": { 927 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAbYAAAFDCAYAAABWVivaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzs3Xd4XNW18OHfmZFG0ozKqNqWJVuS\nbVxxAWPABgwE02JIoSUBU0KA5AYCufd+pJCEFEICIYWENCCEBEgogVxCCabZlLjghnsvqlax2qhN\n398fR2esMlVlRpLX+zx6sKSZOduyOGvW3muvrSmlEEIIIcYKU6IHIIQQQgwlCWxCCCHGFAlsQggh\nxhQJbEIIIcYUCWxCCCHGFAlsQgghxhQJbEIIIcYUCWxCCCHGFAlsQgghxpSkRA8glLy8PFVSUpLo\nYQghhBghNm3adEwplR/pcSM2sJWUlLBx48ZED0MIIcQIoWlaeTSPk6lIIYQQY4oENiGEEGOKBDYh\nhBBjigQ2IYQQY8qILR4RQoih5vF4qKqqwul0JnooIoTU1FSKiopITk4e8GtIYBNCnDCqqqrIyMig\npKQETdMSPRzRh1KKxsZGqqqqKC0tHfDryFSkEOKE4XQ6yc3NlaA2QmmaRm5u7qAzaglsQogTigS1\nkW0o/n0ksAkhhBhTJLAJIYSIqKKigvT0dHw+X2xP9HmgvQ6UGp6BBSGBTQghRrmSkhLefvvtYX3N\nSZMm0d7ejtlsju4FvC5oqYS6neCoAU/nkI4vHKmKFEKIE5hSCqUUJtMQ5TnuTj1Dc7YAGlhzwFYA\nyalD8/pRkIxNCCFGgJqaGq644gry8/MpLS3l17/+deB73//+97n66qu5/vrrycjIYPbs2YEm8StW\nrKCiooLLLruM9PR0HnzwQQDWrVvH4sWLsdvtzJs3j9WrVwde79xzz+Wee+5hyZIlWK1WDh061Gss\nwV7zyJEjaJqG1+sNvMZ3vvMdFi9eTHp6Opd98hIa933EtZ+7kszJJ3Pa8ps40mUD+yRITmXPnj0s\nW7aMnJwcpk+fzvPPPz98P0wjWo+0j1NPPVUJIcRQ2rVrV6KHEJTP51OnnHKK+sEPfqBcLpc6ePCg\nKi0tVW+88YZSSql7771XpaSkqNdee015vV71zW9+U51++umB50+ePFm99dZbgc+rqqpUTk6Oeu21\n15TP51NvvvmmysnJUfX19UoppZYuXaqKi4vVjh07lMfjUW63u9+Y+r7m4cOHFaA8Hk/gNaZMKVMH\nPl6rWvatUzOnlappZZPVW/96XnlcTrVixQp14403KqWUam9vV0VFReqJJ55QHo9Hbd68WeXm5qqd\nO3cG/XmE+ncCNqoo4odMRQohTlg/eGUnu2ocw3qNWYWZ3HvZ7LCP2bBhAw0NDXzve98DoKysjFtu\nuYVnn32Wiy66CICzzjqLSy+9FNAzql/96lchX+/pp5/m0ksvDTx+2bJlLFy4kNdff50bbrgBgBtv\nvJHZs8OPKyi/DzpbwdPFTVdczJT8FLCkc8nFF7Nr/yEuuOwqAK666iq++93vAvDqq69SUlLCTTfd\nBMCCBQu44ooreOGFF7j33ntjH0MEEtiEECLBysvLqampwW63B77m8/k4++yzA5+PHz8+8Ger1YrT\n6cTr9ZKU1P82Xl5ezgsvvMArr7wS+JrH4+G8884LfF5cXBzbIN1d+n/rdoDZBMrPuOIyKJgFSSmk\nZeYwbtzxApG0tDTa29sD41m/fn2vv5/X62XFihWxjSFKEtiEECesSJlUvBQXF1NaWsr+/fsH9Py+\nm5qLi4tZsWIFjz32WNTPCft9rxtaDut/TsuGjAKw2CA1C5JSIo6vuLiYpUuX8tZbb0V87FCQ4hEh\nhEiwRYsWkZGRwQMPPEBXVxc+n48dO3awYcOGqJ4/bty4XgUg1113Ha+88gorV67E5/PhdDpZvXo1\nVVVVUY8p8JpKQUvF8W/YJ0FKetSvA7B8+XL27dvHU089hcfjwePxsGHDBnbv3h3T60RLApsQQiSY\n2Wzm1Vdf5eOPP6a0tJS8vDy+9KUv0draGtXzv/Wtb3Hfffdht9t56KGHKC4u5uWXX+b+++8nPz+f\n4uJifvazn+H3+6MeU+A1s+089OvfQvq4gf71yMjI4M033+TZZ5+lsLCQ8ePH841vfAOXyzXg1wxH\nU3HcDR6LhQsXKqOcVQghhsLu3buZOXNmoocxenhd0LAHkm2QOwXi1Gcz1L+TpmmblFILIz1fMjYh\nhBD99ZyCtBfHLagNBQlsQggh+us8Bu52yJwYVYHISCKBTQghRG9el97fMSUDrLmJHk3MJLAJIYQ4\nTiloKQc0yJo0qqYgDRLYhBBCHNfRAO4OyJoISZZEj2ZAJLAJIYTQeZzgOAopmZCWk+jRDJh0HhFC\niBOdUnqhiKNGn3ocZVWQfUlgE0KIE5Hyg6tdPzfN2Qp+L6BB9mQwj84pSINMRQohxIlCKT2INZdD\n7Q5oOghdzWBJh+wSGH+y3gsSeOaZZ7jwwgsTO94BkoxNCCHGOp8bOhuhoxH8HtDMkJoJqXZIyeRI\nRQWluTl4PB6SutOda6+9lmuvvTax4x4gCWxCCDEWGetmHQ16lgbd+9KK9KCmjd0Ju7H7NxNCiFGk\npKSEn/zkJ8yaNYvs7GxuuukmnE4nAM3NzSxfvpz8/Hyys7NZvnx5r079Tz75JGVlZWRkZFBaWsIz\nf/od1O/mwMZ3WfrJq8mauZS8ucu45r/ugTR7v6B2zjnnAGC320lPT2ft2rU8+eSTnHXWWYHHaJrG\n7373O6ZNm0ZGRgbf/e53OXjwIIsXLyYzM5Orr74at9sdePyrr77K/PnzsdvtLF68mG3btg3fD68P\nCWxCCDFCPPPMM6xcuZKDBw+yb98+7rvvPgD8fj833XQT5eXlVFRUkJaWxu233w5+Lx2NR/na1+7g\n3397lLb9a1jz0qPMLysAUxLf/dVfufCTn6a5uYWqqiruuOOOoNd9//33AWhpaaG9vZ0zzzwz6ONW\nrlzJpk2bWLduHQ8++CC33norTz/9NJWVlezYsYO///3vAGzZsoUvfvGL/PGPf6SxsZHbbruNyy+/\nfNi6+fcVt6lITdO+DnwJUMB24CallDNe1xdCiH7+/U2o3T681xh/Mlzy06geevvttwdOtr7nnnu4\n4447uO+++8jNzeWKz35Wb3WlOrnnjps47/LP62Pv7MKkwY6dO5g0qZgJJy1ggiUdktNITrNRXlFB\nTU0NRUVFvTKwgbj77rvJzMxk9uzZzJkzhwsvvJCysjIALrnkErZs2cINN9zAo48+ym233cbpp58O\nwA033MD999/PunXrWLp06aDGEI24ZGyapk0EvgYsVErNAczA5+JxbSGEGC2MoAYwubiImpoaaKul\ns3IHt11/FZNLS8kcX8I5n7yallYHPmsBtqI5PPfsc/zh2deYMOt0Pnn1jew5WA7Agw8+iFKKRYsW\nMXv2bJ544olBjW/cuONnsqWlpfX7vL29HYDy8nJ+/vOfY7fbAx+VlZX63ycO4lk8kgSkaZrmAaxA\nfP6GQggRSpSZVLxUVlTo7aw6jlGx9QMKx+VB21F+/ts/s/dwNevff4fxxWV8vGM3C045BZU+DpKS\nuOiSS7nokkvp6uriO9/5DrfccgsffPAB48eP57HHHgPgww8/5IILLuCcc85h6tSpva6rDfFm7OLi\nYu655x7uueeeIX3daMUlY1NKVQMPARXAUaBVKfVm38dpmnarpmkbNU3b2NDQEI+hCSFE4vl9oPz8\n9te/pGrbBzQdLefHjzzJNVdfDeNOps2fSlqGHXthGU1tnfzghz8MPLWuro6XX36Zjo4OUlJSSE9P\nx2TSb+0vvPBCoMgkOzsbTdMC3+spPz8fk8nEoUOHhuSvc8stt/CHP/yB9evXo5Sio6OD1157jba2\ntiF5/UjiNRWZDXwKKAUKAZumadf1fZxS6lGl1EKl1ML8/Px4DE0IIRLH0wUtlVC3A/xevvDZT3Lh\ndXdStvhypkyfxXe+/yMwJ3HXXXfR1dVFXl4eZ5xxBhdffHHgJfx+P7/4xS8oLCwkJyeH9957j9//\n/vcAbNiwgdNPP5309HQuv/xyHn744cCaWE9Wq5V77rmHJUuWYLfbWbdu3aD+WgsXLuSxxx7j9ttv\nJzs7m6lTp/Lkk08O6jVjoSmlhv8imnYVcLFS6ubuz68HzlBK/Veo5yxcuFBt3Lhx2McmhDhx7N69\nm5kzZyZ6GODzgqMaupoADdKyKVlwDo8/9jgXLFuW6NElXKh/J03TNimlFkZ6frzW2CqAMzRNswJd\nwCcAiVpCiBOLUnoLK0e1Pv2YPg5sBWBOArRR3Xh4JIlLYFNKrdc07R/AZsALbAEejce1hRBiRPC6\noLUSXG2QbIXcSZCcluhRjUlxq4pUSt0L3Buv6wkhxIiglN7Wqu2o/nlmEdjy+mVnR44cif/Yxijp\nFSmEEMPF44SWcvB06od3ZhWP2lOpRxMJbEKIE4pSasj3bQXl90HTIf2cs+wSvZO+rKFFNBQFjdIr\nUghxwjCbzXg8nvhczFENPhfklOpnnElQi4rH4yEpaXA5lwQ2IcQJw263U1dXh9/vH94LdbXo55+l\nF+hHxYio+P1+6urqyMrKGtTryFSkEOKEkZeXR1VVFXv37h2+i/i90FYLpiRITwHNMXzXGoNsNht5\neXmDeg0JbEKIE4bJZGLSpEnDdwG/D/76KajeBLd9AHlTIz9HDDkJbEIIMVTW/BqOfACXPyJBLYFk\njU0IIYZC9WZ49z6Y9SlY0K8VrogjCWxCCDFYrnZ48Ut6i6zLHpYKyASTqUghhBisN76h71m78VW9\ntF8klGRsQggxGNtegC1Pw9n/DSVnJXo0AsnYhBBiYMrXwHsPwqFVMHEhnPutRI9IdJPAJoQQ0VJK\nr3p870H9v7Z8WPZDWHgzmJMTPTrRTQKbEEJEohQcfFcPaJXr9CKRi34Cp94IFmuiRyf6kMAmhBDh\nHFoN7/wIqjdC5kS45Gdwygo5S20Ek8AmhBDBVG6Ad38Ih9/Xz1Bb/kuYfy0kpSR6ZCICCWxCCNFT\n3U5498ew9zWw5sHFP4VTb4Lk1ESPTERJApsQQoC+D23VT2D7C3pH/vO/A6d/BVLSEz0yESMJbEKI\nE5NS0LAH9r8J+9/Sy/fNFlhyp/5hzUn0CMUASWATQpw43J16mf6+lXowa63Qvz5uDpx1Fyy6FTLG\nJ3aMYtAksAkhxr72Blj1Y9j6d/A6IdkGZefCOf8DU5dB1sREj1AMIQlsQoixy+uC9X+E938Gnk69\n6/6sT8HkJVLdOIZJYBNCjD1Kwd7XYeU90HwYpl0EF94H+SclemQiDiSwCSHGltodsPJb+v6zvOlw\n3Ysw9YJEj0rEkQQ2IcTY0FYHq++HzX+F1Cy49CF9/5lZbnMnGvkXF0KMbq52WPMb/cPngkW3wdK7\npVz/BCaBTQgxOvm8sOUpWP0TaK/Ti0I+cS/kTkn0yESCSWATQowuSun70N6+V99gXXw6XPM0FC9K\n9MjECCGBTQgxerg74bnr4OA7kFMGVz8FMy8DTUv0yMQIIoFNCDE6KAWv3Kmfi3bR/XDaLZBkSfSo\nxAgkgU0IMTqs/yNsfx7OuwfO/GqiRyNGMFOiByCEEBEd+Q+s/DZMvxTO/t9Ej0aMcBLYhBAjm6MG\nXrgBckrhM38Ak9y2RHgyFSmEGLm8LnhuBXi64IZX9Y3XQkQggU0IMXL9+26o3ghX/xUKZiR6NGKU\nkJxeCDEybfoLbHoSzvq6vvlaiChJYBNCjDxVm+D1/4Up58P53030aMQoI4FNCDGyNJfD8yv0k6yv\n+BOYzIkekRhlZI1NCDFy7FsJL90Kyg83viqNjMWASMYmhEg8vw/e+RH87WqwF8Nt78GEeYkelRil\nJGMTQiRWez28eLN+MOiCFXDpzyA5LdGjEqOYBDYhROKUr4V/3ARdzfCp38KC6xI9IjEGSGATQsSf\nUrD2EXjrXsieDNe+AONPTvSoxBghgU0IER/uTjjyIRx4C/a/Cc1H9CNnPvVb6SgihpQENiHE8Gk8\nCPvf0oPZkQ/B64SkNChbCku/CfM+J2epiSEngU0IMXS8bqhYA/vehH1vQNNB/eu5U2HhF2HqBTB5\nCSSnJnacYkyLW2DTNM0OPA7MARTwRaXU2nhdXwgxTNob9Ixs3xtw4F1wt4E5BUrPhtO/DNMu0E+7\nFiJO4pmxPQy8oZS6UtM0C2CN47WFEEPtyIew6n4oXwMoSB8Pcz4LJ12sTzVabIkeoThBxSWwaZqW\nBZwD3AiglHID7nhcWwgxxOp3w9vf1zO0zIlw7rfgpIv0DdWyXiZGgHhlbKVAA/BnTdPmAZuAO5VS\nHT0fpGnarcCtAJMmTYrT0IQQUXEchdX3w5anwZIBF3xfn2qUzdRihNGUUsN/EU1bCKwDliil1mua\n9jDgUEqFbNu9cOFCtXHjxmEfmxAiAqcD1vwa1jwCfi8suhXO+V/p4yjiTtO0TUqphZEeF6+MrQqo\nUkqt7/78H8A343RtIcRA1e2Cv14OHQ0w50o4/zuQU5roUQkRVlwCm1KqVtO0Sk3Tpiul9gKfAHbF\n49pCiAHyeeH/vqJ3CbnlXZh4aqJHJERU4lkVeQfwTHdF5CHgpjheWwgRqzUPw9GP4aq/SFATo0rc\nAptS6mMg4tyoEGIEqN8Dq38Ksz4Fsz+d6NEIERM5j00I0ZvfBy9/FSzpcOnPEz0aIWImLbWEEL2t\n/S1Ub4Qr/gTp+YkejRAxk4xNCHHcsQOw6scw/ZMw54pEj0aIAZHAJoTQGVOQSamw/BfSRUSMWjIV\nKYTQffQoVK6DT/8BMsYnejRCDJhkbEIIaDoEb/8Apl2on5EmxCgmgU2IE53fDy/fAeZkWP4rmYIU\no55MRQpxIvO64K17ofxDuPw3kDUx0SMSYtAksAlxojq6Ff75ZajfBad9CRasSPSIhBgSEtiEONH4\nvPDhL+G9n4I1D77wApx0YaJHJcSQkcAmxImkYR/88zao2ax367/0Z3L8jBhzJLAJcSLw+2H97+Gd\nH0KyFa56EmZ/JtGjEmJYSGATYqxSCup3w+5XYOc/oWE3nHQJXPYwZIxL9OiEGDYS2IQYS/x+qNkC\nu/+lB7Smg4AGk86EzzwKc6+Wcn4x5klgE2I0cRyF9jpwtkBXC3Q1H/9zxzE4tAoc1WBKgtJzYPHt\nMGM5pBckeuRCxI0ENiFGg5ZKeOu7+pRiMKZkSLND8enwie/BSRdBWnZ8xyjECCGBTYiRzOOENb+G\nD34BKDj7f6FwgR7E0rIh1a7/OdkqU4xCdJPAJsRIpBTseQ1WfhtayvWTrC+8D+yTEj0yIUY8CWxC\njDQNe+Hf39DXy/JnwvX/grKliR6VEKOGBDYhRgqPU+8GsuY3kGyDix+A027WmxMLIaImgU2IkaBi\nvX7IZ+N+mH8tXPADSM9P9KiEGJUksAmRSO4OePc+WPd7yCqC616CqZ9I9KiE6Mfr82PSNEymkV+k\nJIFNiEQ5/D786w5oPqJ317/g+5CSkeBBCRHcNY+uY2p+Og9cOTfRQ4lIDhoVIt6cDnj16/CXywAN\nbnwNPvlzCWonmD21Ds55cBXbq1oTPZSIdtU42FTezOvbj+Lx+RM9nIgksAkRLx3HYNX98PA82Phn\nOPN2+MoaKDkr0SMTCbCtspWKpk6++rfNOJyeRA8nrJc2VwHQ5vKy8UhzgkcTmQQ2IYZb0yF47X/g\nl7PhvQdg0hlwyztw0Y/BYk306ESC1Lc5Aahu6eJbL25HKZXgEQXn9fn5v49rOGtqHslmjVV76xM9\npIhiWmPTNG0GcBUwXin11e7PLUqpbcMyOiFGs+rNeteQXS/rvRvnXgOL74D86YkemRgB6ttcZKYm\n8ZVzp/LAG3s4Y30uK86YnOhh9fPB/mMca3ex4sw5ALy7p55vXzozwaMKL+qMTdO0q4D3gYmAcYZ8\nOvCLYRiXEKOT3w/7VsKTy+Gx8+DAO7D4a3DnNvjUIxLUREC9w0VBZiq3nVPGudPz+dGru9hZM/LW\n217cXEW2NZnzphdw3owCDtS3U9nUmehhhRXLVOQPgWVKqS8Dvu6vbQXmDfmohBht3J2w8Qn47SL4\n29X69OOyH8HXd8KyH0DmhESPUIww9W1OCjJSMJk0fn7VPLKtydz+ty20u7yJHlpAa5eHN3fVcfm8\nQixJJs6foZ8S8e6ekT0dGUtgKwCMKUfV478jc2JYiHhoq9P3of1ytl7paLHBFX+CO7fCkq9BamZc\nhqGU4p5/bmfjkaa4XE8MXn2bi4KMFABy01P49ecWUN7YwbdfGjnrba9vP4rb6+ezpxQBUJpnozTP\nNuIDWyxrbJvQpyD/2uNrnwM+GtIRCTGS+X3QeEA/zPPQatjxIvg8MP1SOPOrMHlxQrrst3Z5eGZ9\nBempSSwsyYn79UVslFLUt7kYl5ka+NrpZbn897KTeOjNfZw5JZfPL0p8w+uXNlcxtSCduUVZga+d\nN72Ap9eX0+n2YrWMzK3QsYzqa8CbmqbdDNg0TVsJnARcOCwjE2IkaDyoBzHj4+hWcLcD4DFbMS24\nHvOZ/wW5UxI6zFqHXmHX3OFO6DhEdFq7PLi9fvK7MzbDf507lfWHm/j+v3Yyv9jOzAnxyfiDKW/s\nYMORZu6+eDpajzdr588o4In/HGbNgUYumDUuYeMLJ+qpSKXUHmAG8FvgO8CfgZOVUvuHaWxCJNYb\n34bfnAIv3gwbHgefG+Z/AT79ex6Z8RTTOx7lg2nfSHhQA6ht7Q5snSN7P5TQ1be5ACjokbEBmEwa\nv7xmPllpyXz1b5vpSOB620ubq9E0+MyCib2+vqg0B5vFzDsjeDoypjxSKdUJPD9MYxFi5Nj4BKz7\nLZxyAyy6Va9m7O6yv/ZgIw99vA6AxvaRkSHVScY2qtQ7ugNbn4wNIC89hV99bj5feGw9z22o5Itn\nlcZ7eCileGlLFUum5DEhK63X9yxJJs6els/qvfUopXplcyNFLOX+kzRN+5OmaZs1TdvX82M4ByhE\n3B1+H17/fzDtQlj+Sxg/JxDUOt1e7n5xK4VZ+jvtxg5XIkcaUNuqj6O5UwLbaGBszg4W2AAWT8nD\nbk3mQEN7PIcVsOFIM5VNXXz2lIlBv3/+jAKOtjrZfbQtziOLTiwZ2wvAHuB7QNfwDEeIBGs6BM9f\nD7lT9epGk7nXtx98Yy9VzV08d+uZXPf4ehpHSIYUWGOTqchRIdRUZE8luTbKGzviNaReXtpchdVi\n5uI544N+/9wZ+pFKq/bWM6swceuAocQS2GYAZyqlRn4HTCEGwtkKf/uc/ufP/71fqf66Q408ueYI\nNy4uYVFpDjk2C00jbCqypdON369GxdEiJ7J6hwurxUx6SuhbcEmulQ0J6Mvo9Ph4bdtRLpkzIWTV\nY0FGKidPzOLdPfV89bypcR5hZLHsY3sFkPPpxdjk98E/boamg3D1U5BT1uvbnW4vd/9jG5Nzrdx9\nsd49JMdmoWmkZGzdxSN+BW3OkbPBN5S/f1TBpQ9/MGL2a8WbsTk7nMm5Nmpau3B6fGEfN9Te3FVH\nm8vLFSGmIQ3nzShgS0XziFzXjSWwfQ34vaZpr2ma9kTPj+EanBBx89b34MBbcOlDUHp2v28/+MZe\nKpo6efCKuYF3sbnpFo6NkP+p6xxOrBZ92rRpFKyz/evjGnYddVDnGBlrlPGmb84OPQ0JUJJnRSmo\nao5v+6qXNldRmJXKGWW5YR93/owC/Are29cQp5FFL5bA9mf0Vlq7geo+H0KMXpufgrWPwKLbYOFN\n/b69vscU5Ok9/mfPtVloGgHFIy6vj8YONzPG6+e5jfQCEqfHx6YKfYptf/3ILD4Ybg1tLgoyw2ds\nJbk2AI4ci19gq29z8v6+Bj5zysSI09lzJ2aRl24ZkV1IYlljOx8oVEqdmL+JYmwqX6O3wio7Dy66\nv9+39SrIbUzKOT4FacixpYyINTajdHzGhEw2V7SMyKmhnrZUtOD26kv1B+rbOXtafoJHFH/1Difn\nTS8I+5hAYItjAcnLW2rwKwIttMIxmTSWnlTA27vr8Pr8JJlHzilosYxkGxA+NxViNNn6HDz1Wcie\nDFf9Gcz93+f9bOVeyhs7eaDHFKQhN91Ch9sX9zWQvozCEaNLxUivjFx7qBGTBjaLmf31iSlnT6R2\nl5cOty9ixma3JpOZmhS3wKaU4sXNVcwvtjMlPz2q55w/o4DWLg9bKluGeXSxiSVjexe9pdafgbqe\n31BKyTqbGD18Hlh5D3z0R5i8BK56EtKy+z1sU3kTT645wg1nTubMKf3f0+XaLAA0driZaE/r9/14\nMUr9Z3ZPRbaM8KnIdQcbOXliFslmEwdOwMBW7wi/h82gaRoleTbKG+MzFfnshkr21Lbxk8+eHPVz\nzj4pjySTxrt76jltBPUojSVjOwt9Pe1C9GbIxsd1wzAuIYZHWx385TI9qJ3xVbj+ZUgPPiX0zPoK\nstKSufviGUG/n9Md2BI9HWlURE4tSCfJpI2YSs1gutw+tlQ2c0ZZLlML0jl4IgY2Yw9bhOIR0Kcj\n45Gx7ahu5d5/7eTsaXlcs7A46udlpiazsCSbVSNsnS3qjE0pdd5wDkSIYVf5ETy3Qt+vdsWf4OQr\nQz7U71e8v6+Bc6blYwux1yg33cjYEltAUudwkppsIistGbs1eURPRW4qb8bjU5wxJZeD9e08u6GS\npg534E1CNNxeP8lmbUS2corG8c3Z4TM20PeyvbqtBrfXjyVpeNawHE4PX/3bZnKsFn51zfyY90Ce\nP6OA+1/fQ3VLV0JnLnoK+5PSevzmaJpmCvUx/MMUYhCU0psY//lSSE6FL70dNqgB7KxxcKzdzbnT\nQxc25Nj0G1OiM6SjrU7GZ6aiaRrZVsuILh5Ze+gYZpPGaSU5TC3Q13FimY50enyc8ZN3eG5D5XAN\ncdhFOxUJ+l42v4LKYSr5V0px9wvbqGru4pEvLCA3PfKY+jp/ht7hfyRVR0YKSj3PKfcCnj4fxtei\nommaWdO0LZqmvRrrQIUYEKXY+/gX4bX/gSnnwa2r9d6PEazeq/9Pes5JoQNbIGNL8FRkncMZONcr\n22qJa7l/rBus1x5sZG5RFukl3djqAAAgAElEQVQpSYHAFkvJ/+6jDpo63Hyw/1hM1x1JGtpcWJL0\nDDuSkjwrwLC11nriP0d4Y2ct37x4xoDP8ZuSb2NSjnVETUdGCmyze/z5G0BZkI+7Y7jenej74ISI\nC9+uV5le/RIrs66Czz8XtEgkmNX7GphblEVemHewGSlJJJu1hPeLrHU4Gd/dlDnblhy3wObzK856\nYBVPryuP6vEdLi/bqlo5s3svYGFWGmnJ5pgyth01DgA+HmFVeLGob3ORn54S1VTq5GHcy7a5opmf\nvL6bZbPG8aWzB36CgKZpXDxnPKv31rNq78gIbmEDm1KqZ77/XaVUeZ+PI8A90VxI07Qi4JPA4wMe\nrRCx8DjhzXvY6y/irmOfxuGOriy/pdPNlopmzg2TrYH+P3ROgjdpK6Woc7gY3ytji88aW2uXh+qW\nLp748HBUmduGI014/SpQYWoyaUwtSI8tsFXpk0jVLV0ca0/85viBqG9zRrW+BnrlbUZK0pBnbM0d\nbm5/ZjMT7Kk8dOW8Qa9X3vmJacwYn8kdf9vCvrrEb3WOuD6madr5mqadDyRpmnae8Xn3x5eAaP8W\nv0LP7qSJsoiPdb/D3FrOD7zX0+XTeGtnXeTnAB/sP4ZfwdIIG2ihe5N2AjO25k79JObAVKTNQkun\nOy49GFu79AB66FhHVM161x5qJNmssXDy8SmvmANbTSuZqXoxz7aq0Zm11TtcjIuiIhL0N0+T86wc\nGcKSf79f8fXnP+ZYu5vffeFUsqyRp0QjsaUk8fgNC0mzmLn5LxtoTPCbjmgKP/7U/ZECPNHj88eB\nLwJ3RHoBTdOWA/VKqU0RHnerpmkbNU3b2NAw8vqPiVHEcRTef4jmSReyxq+vqb26rSaqp67e24Dd\nmsz8YnvEx+alWziWwDU2o9Q/MBVpTcbjU7TH4eTlnvvlnt1QEfHx6w42Mr/YTprl+FFAUwvSOdrq\npM0ZOct0eX3sq2vjMwsmYtJga2VrxOeMRPVRtNPqafIQl/z//r2DrN7bwHcvm8XJRVlD9rqF9jQe\nu34h9Q4XX356Ey5v4hoXRAxsSqlSpVQp8Izx5+6PMqXUYqXUv6K4zhLgck3TjgDPAudrmvZ0kGs9\nqpRaqJRamJ9/4rXZEUPonR+C38OuOfoS8BllOXyw/1jEzct+v+K9ffWcPS0fcxRlz4nu8G90HTEy\nNrtVL2hpicN0pJGxzZyQyevbj+IIE5wcTg/bq4+vrxmMApKDDZFv3Ptq2/H4FItKc5lWkMHWUZix\nOT0+Wrs8UVVEGkpyrVQ1d+HxDX6ya9Xeen7+5l4um1fIdadPGvTr9TW/2M5DV81jw5Fmvv3SjoSd\n3hB1qb5S6vqBXkQp9S2lVJFSqgT4HPCuUko2dovhUbUJtv4NzvwqR80TAFhxRglev2LlztqwTzXK\n/M8LU+bfU6IDm9F1xMjYcroDWzzGZAS2284pw+nx86+PQ2fEGw434VdwxpTggS2a6cgdNXqGNmdi\nJnOLsthW1Trqjr1piGFztqEk14bPr6huHtz5zh8dbuIrT29i5oRMfvLZk4dtH+Bl8wq564JpvLi5\nij+8d2hYrhGJ7EETY4vfD/++G9LHwdn/E8jQzpqWx6QcK69uOxr26dGU+feUa7PQ7vImrF9kbasT\nTTu+Jyrbpq+XxKMy0ghsZ03LY+aEzLB7y9YebMSSZOKUSb2rUifnWEk2a9EFtupWMlKTmJRjZV6x\nnaYON1WDvNnHW32b/kYkP4apyJK8wTdD3lHdys1PbqDQnsZfvrgo7AGnQ+HOT0zjsnmFPLhyT8Q3\nk8Mh7oFNKbVaKbU83tcVJ4jtL0D1Rrjg+5CSQXOnG7NJIzM1ieVzJ7DmYGPYhe1oyvx7Mja0Jipr\nq3M4ybWlkNzdWT27O2OLR2Azpjuz0pK5ZmER26tb2VkTfN1r7aFGTplkJzXZ3OvrSWYTpXk2DkSx\nl21HjYM5hVlomsa8In39c7RNRxonMcQyFTk5V9/LduTYwALbwYZ2bnjiIzLTknn65tOj/t0eDE3T\n+NmVc5lbZOeuZz9mR3V810MlYxNjh6sd3r4XCk+BuZ8D9JuvPS0ZTdNYPrcQn1/xRoh3kNGW+fcU\n6BeZoMCm72E7fqMKBLaO4V9ja+n0kJ6SRLLZxKcXTMSSZOL5IFlbS6ebXUcdIQ+unFaQETFj8/j8\n7D7qYM5E/QSD6eMzsCSZ2DrK9rPF0ifSkJ+egs1iHlBlZHVLFyseX4+mwVM3L6Iwji2vUpPNPLbi\nVOzWZG7568ZAx5V4kMAmxo4PfwltR+GSB8Ck/2q3dHoC5cwzJ2RQlmfj1a3BpyNjKfM39Ozwnwi1\n3e20DJlpyWhabB3+Wzs9A9rw3NrlCXTPsFstXDJnPP/cUt1vWvajw00oRb/CEcOUgnQqmjrDTuce\nqG/H7fUzZ6JexWdJMjFrQiZbq0ZXZWR9mxOzSQv83kRD0zQm59pi3svW0OZixePraXN5+esXT6cs\nyqNohlJBZiqP37CQvPQUuuI4XS+BTYwNzUdgzW9g7jVQvCjw5ZYudyCL0bO2Caw/3BhY6+gpljJ/\nw/GMLTH7dnq20wIwmzTsack0xRDYHv/wEFf/cS0+f2yFGK1d7l5toa5ZWIzD6eWNHb0z4rWHGklJ\nMjF/UvCf69SCdPwKDoeZajOmsozABnoF3o7q1pjHnUj1Dhd56ZaYGw2X5FljOr6mtcvD9U98RE1r\nF3++8TRmFWbGOtQhM7swi3/dviTQRSUeJLCJseHN74LJrK+t9dDc4SG7xwbU5fMK8Sv63Xz1Mv+G\nqMv8DcYaWyL6RTo9Ppo7PUzI6j2tFWv3kcqmTtxef6AYJFo9MzaAM8pymZRj7VdEsvZgIwtLsklJ\nMvd9CQCmRVEZubPGgc1iprTHzXFuURadbt+oOtOtvs3V641ItCbn2qhs7sQbRcl/l9vHzU9u4EB9\nG39csXDAPSCHUrxPYpDAJka35nJ46VbY/S84+78hs7DXt/Wb7/Fpn5PGZXDSuPR+05G7jjo41u6K\naX0NIDM1cf0ijUKEvjfKbFtsHf6NLQOxFpy0dHqw93jTYDJpXHNaMWsPNQYKHZo63OypbQs5DQlQ\nmmfDpBH2NO3t1a3MKszslenM686sR9M6W32bK6bCEUNJrhWPT1HTEnmd6pdv72NzRTMPf24BS2P8\nfR4rJLCJ0amjEd74FjyyEHa9DEvugsV39ntYc6e7V8YGsHxuIRvKmwJdOyD2Mn+DcVRMIg4b7buH\nzZAd45lsxs8h1pO3+2ZsAFecUoRJg+c36lnb+kONAEFPIDekJpuZlGMNeeioz6/YVePoNQ0JUJpr\nIyMlaVRVRja0OcmPoXDEUJIbfcn/mztrWXpSPpeePCHm64wVEtjE6OLuhA9+Dr+eD+v/oK+p3bEZ\nlv0AknovyLu8Pjrdvl5ZBcAn505AKXht+/GsbfXeBk6emEX+AN5N59gsCcnYAoGtb8YWw5lsSqnA\n6zTFWEnZ0uXp12dwfFYq500v4B+bqvD6/Kw91IjVYmZuUfh1y6kF6SGPrzl8rJ0uj485hb0Dm8mk\nMbc4a9QENq/PT2OHe2AZW/detkgFJIePdXCksZPzZkRfADUWSWATo4PHCZv+Ar85RW+XVXIWfGUN\nfOoRyJoY9Cmt3VmL0WbKMCU/nZkTMgO9I1s63WyuaI6620hfeekpCTlFu6470xrXN2OzRX8mm6PL\ni9Ojr9vEMhXp9Phwe/1BzxS75rRi6ttcrN7b0L2+lhPYZxfKlIJ0Dh/rCLqGtD1I4YhhbpGdPUfb\nErZBPhbH2t0oFd3J2X0VZKSQmmyKWPJvnIl27kkndmAb3u3nQgxUZxNUrIOKtVC5Hmq2gM8NRYvg\nyidg8uKIL9HSZQS2/jff5XMn8LOVe6lq7mRLRUvMZf495dgsw3bCcThHW51YLWYy+nSRsFuTcXn9\ndLl9vRoOB1PbY29RLFORxuZse1r/svXzZhSQl57C71YfYH99O589pSji600ryMDjU1Q0dfYrS99R\n7SA12cSU/P5VdfOK7Hj9il1HHf26msTLfw4cI81ijnj9usDJ2bFPRWqaRkkUJf+r9tbrB392b+o+\nUUlgEyODuxP2r4SDq/SAdmyv/nVTMhQugNO/DGXnwpTzIcoKK2M6Ltva/+Z72dxCfrZyL69vP8re\n2vaYy/x7yrElZo2tzqHvYetbcRboF9npZqIl/IbcnoEtlqlIo4IyWMaWbDZx5alF/OG9g0D49TXD\n8dO024MEtlZmTsgkKUjWN69Yz+K2VbYkLLD94JWdpCWbefn2s8I+7vjm7IF1/ijJtYU9bbzT7WX9\noSauP3PygF5/LJHAJhLH64KD78KOF2HP6+DpgJQsmHQ6zLsGis+AiadA8sC6JbSEuflOyrUytyiL\nV7Ye5WirM+Yy/55ybRbaXF5cXl/IkvbhUNtnD5vBHug+4mZihE4Tta16r8VYN3Ubjw2WDQNcvVAP\nbOkpScyJYg9Vz2bIF80+/nV/d+HIpxcEn24en5lKQUZKVBu11x9q5D8HjvH1ZScNafl5bauTLo8P\np8fXr2VYT8beyYFMRQJMzrPy7p56fH4V9Hd17cFG3D7/Cb++BhLYRLz5vHD4PdjxEux+BVytkJYN\nc6+COVfA5CX6frQhYNx8s0N0eVg+dwL3v74HIOYy/5569ouckBW/lkW1rU4Wlfbfo2RsGo9mzay2\nVc8iSnJtMa2xhcvYAMry0/nEjAKy0pKDZlp9packMSErtd+etPKmTtpc3kArrb40TWNukT1iAYnX\n5+dbL23n0LEOFpXmcta0vIhjiobT48Ph1M++izQdWu9woWkMuFdjSa4Nt8/P0dYuirL7TzWu2luP\nzWJmYUliMteRRAKbiI+mw7D5L7DlGeioB0sGzFyuB7Oyc8E8+FN8+2oOrAMFf+1LTz4e2GIt8+/J\nCCSN7fELbH6/or4teMZmbG+IpuS/1uEk12YhPyMlpv6S4bJhw+M3LIwpMwp2mnawjiN9zS/O4u3d\ndUG3Hxj+tbWGQ8c6SEky8fA7+1gyNXdIsjZjLyHAxxXhp0Pr21zk2iwRC2lCMZohlzd29gtsSilW\n7WlgydS8uM4ajFQS2MTw8Xlg7+uw8c9waBVoJjjpYpj3eZh2ISTHvogei5ZODxazCWuIAoqibCuL\nSnPw+vwDKvM35KbHvxFyU6cbj08xPsi0lpGhRjO1WOdwMj4rlWxrMoeiOOzT4DACW4ipSIi928TU\ngnSe21CJ368CG7F31LRiMZuYVpAR8nnGVoLtVa1BMzGvz8/D7+xn1oRMrl5YxPdf2cXag40snjr4\nrK2uR2u2LRE2ig90D5uhtLvk//CxDpb0GfuB+naqW7q4/fypA379sUQCmxh6TYdh819hy9N6dpZZ\nBOd+G05Z0a8zyHBq6XSTZU0Oe4N9dMWpDPasykR0+Dc2VffdnA3HM9RoxnO01UlhVmp3G67o94O1\ndHowm7R+FZmDMbUgnU63j5oeU207qlsDnfxDmVukZ3Nbq1qCBraXtlRT3tjJY9cv5Oxpefz+vYP8\n6p39nDll8FmbUelYlm/j48rmsI8daNcRw7iMVFKSTEErI1d1Nxg4d4BbVsYa2ccmhtbO/4PfnAr/\n+RUULYQvPA93bYNzvwGZhdzwxEe8/HF1XIbS0unp13WkL7vVEnINLlp5Nv1mdSzMOW9DzbihBpuK\nTDKbyEhNCpTkR3qdcVmpZNsstHS6oz6RuqXLTWZq0pAWYRhZmTEdqZRiR7Uj5PqawW61UJJrDdpa\ny+Pz8+t39jO3KIsLZhaQmmzmK0un8NHhJtZ2d0UZDGMq8qLZ46ls6gr7O1DvGFxgM5k0Judag+5l\nW7WngRnjM+K6xjuSSWATQ+fw+/DSLVB0Gty1Az7/dzjpokAxSIfLy3v7GnhvX0NchtPc6Q66z2qo\nZaYlkWTSos7Yyhs7Yj6CpK9Q7bQMOTZLxPG4vD6aOtyMz9SnIr1+RZvLG9X1W7u8/Ta+D9bUPs2Q\nq5q7aO3yhF1fM8wrtrMtSGXkPzZVUdXc1asS8nOLJlGQkcLDb+8f9Jjr2pxYzKZA8dHHFcGzXr9f\ncazdNeCKSEOw42vanB42HGmSasgeJLCJoXF0K/z9C5AzBb7wbNBuIMY+nsqm+Gxmbu3yhCxHH0qa\nppEdRSAx/O8LW7nz2Y8Hdc26VicmTT+EMhi7NXL3ESPbGJ+VGghSLVEWkLR0usMWjgxEjs1Cjs0S\nCGzGadx9W2kFM7fITq3DGchkQQ/cv3lnPwsm2XtVvaYmm/ny0imsP9zEukFmbfUOF/kZKcwtsmM2\naSHPtWvqdOP1qwFtzu6pNM9GeWMn/h5H9fznwDG8fsV5A2wwMBZJYBOD13QYnr4SUrPguhf18v0g\njJtOZVNXXIalN0Ae/owN9L1s0fSLVEqxt7aNXTUOPFEcQRJKrcNJXnpKyFL6HGtyxMDWs9eksak7\n+lZcoSsQB6NnZeT26lbMJo3p40MXjhjmd2/U7jkd+fyGSmpanfx3kH1rXzh9EvlDkLXplakppFnM\nzBifwZYQ62zHu44MNmOz4vL6e22sX7WngYzUJE4Jcd7diUgCmxic9np46jPg98CKl0L2bYTj/3PX\ntTlxeYe/t1/fY1WGU266hcYo1tgaO9w4nF7cPj/76wZ+jlitwxVyGhKMRsjhs6+jPQpQsm3dBSdR\nBraWYcqG9WbI7YH1tWkF6WE3PRtmTcjCbNIC+9mcHh+PrDrAopIczgpS/ZiabOa2c8pYe6gxcALB\nQNQ5XIEsbMEkO1srgx98Gug6MsipyL5d/pVSrNpbzznT8qPaL3iikJ+EGDhXGzxzJbTV6kUi+dPD\nPtwIbEpBdfPwZm1dbh8ur3/I14FCybGlRDUV2bOkfkdN5G4ZodS1Bt/DZrBbLRHL/QNNlDN7TEVG\nGdjC7RkbjKn56bR2eWhod7GjupWTo1hfA0izmJk+LiOwzvb3jyqoc7jCdhm59vTJ5KWn8PA7A8/a\n9BPM9WA1vzibdpeXgw3937A0OIx2WoObiuy5lw30TeH1bS6phuxDApsYGK8bnrsOanfA1X+B4kUR\nn1LXYzNrxTCvs7V0hW/5NNSinYo0bnomDXZWDzyw1Xb3iQwlx5ZMh9sXNjOudThJSzaTmZp0fCoy\nijU2v1/p65fDENimjdMLSP5z4BiNHe6oCkcM84qz2FrZQqfby+9WH+TMstywfSrTLGa+vLSMNQcb\n2XCkKeaxdrl9tDm9FHT/Oxi9RoMVkBjttAazXxKgMCsNS5IpkLGt3qsXYi2VwNaLBDYRO78f/u/L\ncGg1XP4bvfIxCrUOJ+nd+54qhzljM27Qkcr9h0qOzUKb04vbG37d7FBDO5YkEwsmZbOjxjGgazk9\nPlq7PGGnIo9nYKEDVa3DyYQsvYlyZlpy1P0i21xelILMYVpjA/i/LfqRQpFK/XuaV2TH4fTy49d2\n09Dm4r8vPCnic/SszTKgtbZA78fuYFWWZyMzNSnoOlt9m4ustOSoplXDMZk0JuVYAyeUr9pTz8kT\nswadCY41EthEeJ1NcPgDWP8ovHInPL4Mz/1FsONFvOffCwuujfql6h1OZhVmYkkyURWnjC0rDuX+\nEH33kUMNHZTl2ZhblMWuGkfQ9ZhIaltD72EzRLNpvLbHdKbZpJGVlhzVGluoc+6GwvjMVNJTkvhg\nfwMmDWZOiD6wGR1InllfwdnT8jitpH8fzb7SLGZuPaeMDw8cY2OMWZsxA2H8DE0mjXnFdrYEy9gG\nuYetp5JcK+WNnYM+R3Ask8Am+vO64I1vw0PT4cFS+Mty+Pf/0zdfm5PZZL+Ir7q/RtXMW2N62TqH\niwlZqRTZ04b9/DIjUzGKIoZbrtEvMsKBowcb2inLtzGnMIsuj4/Dx2IvIAl1cnZP9kC/yPCBrWfW\np3cfiTwVGakB8mBomsaUgnT8Sj8Q1mqJvrPJSePSSU3Wb2lfXxY5WzNcd8Zkcm2WmNfajIyt5xuM\nBZOy2VfXRkef/YD1bc5BF44YJufaONLYwXv7GvArOFf2r/UjLbVEb83l8MIN+sGesz4FE0+Fgtkw\nbhZkTABN47d/Ws8H/mNc63BS0ufsrFCUUt0L7am05HiGfY3NuKHHY4M26MUjED5Dcnv9VDZ3sXxu\nYWDtaEe1g6lh+iAGE66d1vHxhF8zC9ZEOduaHNVU5HCvX07NT2drZUtM62ugd1zRqwO1mM5ms1qS\nuOWcMn767z0cqG+L+t+jztH/fLUFxXb8CrZVtfZa36tvc0WVQUajJM+G0+Pn+Y2VZFuTmVckZf59\nSWATx+1bCS/dCsoP1zyjd98PwqjIqmlxBv1+MK1dHlxeP+MyU+l0e0NuZB0qgROe47jGBuEDW0VT\nBz6/YkqBjSn5NlKSTOyobg151lgokbqOwPHDVUNlbEYT5Ql9Mraa1sj/psOZscHxApLZUZzj1tcf\nV5w6oGsaWwL217VHHdjqHXrXkZ6/Y/OMApLKlkBgU0oNuk9kTyXdlZH/OdDIp+cXDvgcwbFMApvQ\nz0hb9WP48Bcw/mS4+q+QUxb0oW6vn6ruacSjLdEXgBxfj0jB6/PT2uUZtpJx0IsgUpNNg16sj1Ze\n+vGja0I5UK8v+JflpZNkNjFzQuaASv5rW/UinPQwDYiNm22oDCzYOp3damHX0cgFLS0RjgMaLCOg\nnTI59nPFBtq7sjhHDxaxTJHXt+ktsnpeM8em963cUnG8gMTRpRcVDbYi0mDsZQOkjVYIssZ2omur\ng6c+rQe1U26Am98KGdQAqlu6MOodalpjCWzHb6SBm8gwTkfqDZDjMw0JkJmajNmkhV1jO9S9nlaW\nr9+Y5kzMZGe1o1d7pGj03DsVSkqSGZvFTFOIqchg05k5tsjdSuB4xjYcVZGgZ0+vfe2smKYTBysr\nLZnM1KSYpsjrQpxgPr/YzseVLYGG0sdPzh6aysUJWakkmzU0Dc6ZJoUjwUhgO5Ed/gD+eA5UbYRP\n/x4u/zUkh+8ObuyfSTJpMU1F1vUoeJjUHdiqhrGApLlz+LLBYEwmjWxr+H6Rhxo6KMhIISNVH9ec\nwizaXN6YC2lqHc6w05CGcJu0gxWg2K0WnB4/Xe7wXWFauzzDmg1rmsbsKPpDDrVJudaY2r3VOZxB\npxcXTMqmvs0V6OwSbC1uMJLMJkpybSwotg/6ZIqxSqYiT0RVm+C9B2D/Sr1p8YqXYNzsqJ5a0b2+\nNq/YTk1MU5HHN6hmdt/Yh7NnZGtX/PpEGnJtlrBTkYe6KyINPQtIJveYXoqkrtXJGWE2HhtybJaQ\n5ft1Dr2JsjGFCr3X5dIsod/gDEcD5JGgONvK3rq2qB9f3+bi7CAZk7FRe0tFC4X2tH773YbCL6+Z\nT1qIA3SFZGwnlsqP4Okr4PHzoWoDfOJ7cNv7UQc10DM2q8XMyROzqGnpivr8rjqHC7tV36CaZU0m\nIzVpWEv+m+PYJ9KQmx46Y1NKcbChg7IeVaTTxqWTbNZiWmfTqxldYUv9DXZrcsjy/dpWJwUZqb36\nC+bYIm8RgO5TE+JUbRpPxTlWqpq7opoa7nR7u7uO9A9WMyfoezWNg0eP94kcuk3UcyZmMSXKiuQT\nkWRsJ4KKdbD6p3BoFVhz4YLvw2lfgpTYysxBr4icnGtjoj2NDrcPh9Mb1bv3OoeTcT26I0zKsQ5r\nyb/eADm+N98cm4WdIbqJNHW4ae3y9LoZpSSZOWlcBjtiaK11rMOF16+imorMtloCFax91XYfMNpT\nNN1KjO+PzYwtDbfXr79xiPDzrQ/T+9GSZGJOYWZgo3a9w4XNYg5b7COGlmRsY1njQfjL5fDERVC3\nA5b9CO7aDmd9fUBBDfSMrSTXSqFdn6qKdjqyrs+NtDjbOmzFI0opWjrdcWunZdCnIoMXjxzqboHU\ncyoS9HW2nTWO6DPf1t7dLsLJsYU+k6221cn4PtmGMRUZqXtKa5eHrDj/bOMhlsrI48VQwacX5xdn\ns726FY/P3705W1pexZMEtrFq63N6YcjRj+HCH8Od22DJ18AS/VpOXz6/orJJz9gm2PX/UY9GWRlZ\n53AxrscaQ3FOGlXN0U9lxqLD7cPrV3GfisyxpeBweoOes3aw+4yxKXm9p4/mTMykqcMdKDSIJJqu\nIwa7NZm2EOMJ1kQ5O8IWAcNwbtNIpFiqdY3pxVBvMOZPsuPy+tlb20Z9m2vISv1FdCSwjTWuNvjn\nl+Gft+p70r78H1h8O1isg37po61deHyKklwrE7sztuooKiN9fkVDu6vXTaA4Rz8wsaEt8hlmsWru\nMDpjxLl4JN3o9tE/MBw61oElycTE7N5FGbMDBSTRTUdGsznbYGwa7zu12OHS14fGZ/Ueiz1QPBJ+\nKnK4OvsnmvE7HU1RU6SDQxcECkiaaRjCzdkiOhLYxpKaLfDHpbDtOVj6TbjhVbAXD9nLG+s1k3Kt\n+unNJi2qTdqNHS58ftVr2sZ4dzwc62zGPqt433yNfpHHglRGHmpopzTX1q9LxMzxmZg0ou70X9fq\nxGzSyEuPfKMMdcba8eDY+zUsSSbSU5LCFo+4vX463b64Z8PxkJpsZlxmSlS/k/VtLixJppCZa1F2\nGnnpFrZUtlDvcEr3/TiTwDYW+P2w5hF4fBl4nXpAO+9bYB7axWpjD1tJ9w16fFZqVGtswdaFirNj\n7/QQLePGHO89PuHaaukVkf2ngdMsZqYWpEd9Nlutw0l+ekpUbZRyQqyZ1YU5HcBuTQ6acRqGu51W\nohVnW6P6nazv3iQfqtOJpmnML85mzYFGOty+IWuALKIjgW008/uh5mP429Xw5j0w7UL48odQsmRY\nLlfe2IklyRRYmynMSouqt2DPriOGouzop31iNdwtn0IxpiL7dh9xe/1UNHUGDWygF5BsjzKw9S3C\nCed4h//eU4vh1un0gpPQU5GtxnFAcZ7mjZdJOdaojlSqc7giZmELJtkDP2uZiowvqT8dbTqb4OC7\ncOAdOPA2dNSDOQUufbdtqVMAACAASURBVEgv4R9gr7xolDd2MDnHiqk7Wyi0p7KxvP+hin3VBTne\nIzXZTEFGdNM+sTKm3uK+xhaiw39FU6fe/DjEvqPZE7N4aUu1PmUVoSikttUZMkD2ZWSsfacWw63T\nhetWAmM/YyvKsXL042rcXj+WpNDv++vanMwYH76y2NioDcG3BYjhI4FtNGiphK1/h/1vQfVGvft+\nWjZMOR+mLoOpF0D68PeM0/ewHS9CKbSnUbvtKD6/Cjs1Vtfav8sF6O+Oh6Pk38g44n3zzUrr7hfZ\nZ43tYIPRIzJ4YJvT3fR3Z40jcmBzOFkcRdcROD4V2S+wtTrJTE0KetZZtjU57BlxicqG46U4Ow2l\n9G0sJXmh30A0OFwR+zTOLcpC00Cp0NsCxPCQwDbStdfr+9AcNVC4AM7+X5i2TD8nzRS/ljpKKY40\ndrCk+3gPgAn2NLx+xbE+FY991TlcerGJufc74OIcKx8dju3U4mi0dHpIT0kK+457OOj9IpNp7JOx\nHWoIvofNMKs7sO2obg3brd3odhHtVGSaxUxKkqlfVWTfA0Z7yrZaaAnROBnGfsbWs6gpVGDrcHlp\ncwXvOtJTRmoy0wrS2VfXLhlbnElgG8m8bnhuhT79eOsqPbAlSH2bC6fHHzgLCmBi91626pau8IGt\nLXgX9OLsNF7+uCvitE+sEtnLMMdmoanPGtuhhvZePTL7ykhNpjTPFrG1VqAjfwybffXx9CkeCdGV\nHvTA1ubS974lm/v/m8T7nLt4mxTFJu3AHrYogtWpk7Opau4iM01utfEkxSMj2b/vhsp18KlHEhrU\nAI50d87o2ax3Qvc+qKMR9rLVOVxBp2KKcqz4VfTdS6LV0uUh25aYG2+uLaVfIDnY0E5ZmGkt0M8g\n21EdvuR/zcFGAIqyo9+TGGzNLNjmbIPxcwvVVsvI2DJCBOnRblymfiRMuKKm+iDFUKF8fdlJ/PWL\niwZ8TpwYGAlsI9XGJ2DTn2HJXXDylYkeDeXda2E9DzmMtq1WXYiiiGjeHQ9Ec6c7YU16c9L7d/g/\ndKwj5PqaYc7ELKpbukKW2te0dPHAv/dwZlkuC2M4gDPHltwr0Hp9+qb4CWGmIiF0I+TWLg+ZqUlj\n9tRms0ljoj0t7O9kXaCpceR1s4KMVBaW5AzZ+ER0JLCNROVr4fX/pxeGfOJ7iR4NoFdEJpk0Cu3H\nb4iZqUnYLOawB466vD6aOtxBM4TjLYyGNmNrTUBnf0OuzdJrja2pw01Lp4cpESoZ53SfPxasibJS\nim+9tB2vX/HAFXMDVanR0DO249lXQ7sLvyLkOl0gsIUIsC2d7jHZJ7Kn4ghFTYGMTdbNRiwJbCNN\naxU8vwLsk+GKx+NaIBLOkcZOirLTehWAaJpGoT0tbMbWEOip1//d7Xhj2mc4MrYE3XxzbBZauzyB\n/oxGRWSkI0ZmGwUkQdbZXthUxXv7GvjmJTOYlBtba7Rsa+9TsSOt04Xa+2YYq0fW9BQxsLW5SEky\nybrZCBaXwKZpWrGmaas0TduladpOTdPujMd1Rx1PFzx7LXic8Pm/Q5o98nPipLyxI+hhmIX2tLAN\nfAM99YLcSM0mPTAO5V42v1/R2uWJ+yGjhtzuVldGMDkUKPUPn7Fl2yxMtKf16xlZ2+rkR6/uYlFp\nDivOmBzzeHKsFlq6PPi6zxgLtlm+7zh6jr+vljHaALmn4mwrzZ0e2l3eoN/Xp9ZDdx0RiRevjM0L\n/I9SahZwBvBVTdNmxenao4NS8Mqdejf+zz4K+dMD3zrW7hryAovYhqYoP9bZqyLSUGgP31arzhG+\ngizaTg+dbi9bK1siPq7N6cWv4r8522D0izTW2Q41dGAxm6Iq+JgzMbPXVKRSim//czsen5+fXRnb\nFKTBbrWgFDi6iz4CGVuIqchQe98MY/XImp6Kc4yuOMF/L/ueLShGnrgENqXUUaXU5u4/twG7gYnx\nuPao4PPA+w/pzYvPuwdmXNrr2998cRu3PbUpQYPTp6XaXN7gGVtWGsfa3Tg9vqDPrYvQjb4o20pl\nc+Sg/fDb+7ni92twOCMcgtnd8ilRG4j79os82NBOSZ41qmKLOYVZHD7WQVv33/GlzdW8u6eeuy+a\nEfRnH9N4ugPVUYcTi9kUCGB9hdr7Zmgdo4eM9mT0MQ01k1DfFn7fpki8uK+xaZpWAiwA1gf53q2a\npm3UNG1jQ0NDvIcWf7U74I1vw89nwKr7YNan9A3YPSil2FLRwt7atsB0UrwZzY8nB8nYJnRXRtaG\nmI6sc7hINmshD/0szkmjqcMdctoH9J/Bq9uO4vUr9teF7ooBx9eGElfub/SLPJ6xleWFX18zzCnS\nC0h21Tioczj5wSs7Oa0kmxsXlwx4PPY+Z6zVterTaOGyv2yrJWjxiFJqzB5Z09OkCOey1TvkfLWR\nLq6BTdO0dOBF4C6lVL/yL6XUo0qphUqphfn5w98iKiE6GmHdH+APZ8MflsBHj8LkxfD55+CKJ8DU\n+5+kzuGiscON2+cfthOnIylv7L+HzWBUSYaajqzrPrIj1HpEpJsIwPbqVqq7X39fXVvYsRo38KwE\nFTgYa2xN7S48Pr358ZSC6LItozJye3Ur/7+9O4+PsywXPv67MpnJNlmbpUmT7imlZW/ZWkUoghQR\nPEgRUUBRlsPR43J8fdVXX4+ejwt6XI7bUY6guIGyKcgiZRNxQShb25TSvU2aTJpmXyeZuc8fz/NM\nJslkMmkmM5OZ6/v58CGzZObp085zzX3f131d/++BrQyNBPn6FScf0xSkY7Qrtj0VGWUPW+h3Jum8\n7TRwTfcRW0m+G29ONo0RZhL6hkboHRrREVuKS1haj4i4sYLar4wx9yfqfVNGfzv84ePw+sMQHIbq\nU2DjN6w9avmT73MJTyawprWOvQP2sdrf1o/I6NpDuBp7k/ZkVf59dnuPyYTa17T3c3x1UcTnPLqt\nBVeWkJ0lMQQ2e8SWpHWgkjw3WWKN2A629zMSNDGP2CoKc6gqyuH25/bR3DXI595+PEtm+PddNi4Z\nxNc9FCrhNRkrk3LiVGSoz12ar7GJCLWleRG/bLVGyfJVqSMhgU2sr+u3AzuMMd9KxHumlKEe+NUV\n1tTjmTfBKVdD1eqYfjU8mWDPkV7OP75qto5yUgfb+6kpziMne+LWA2ftLNqI7bgoVdBDe9kmWWcz\nxvDo1mbWLZtH98BwDFORyans77DqRVp72fa0xpYRGe6EmmKefL2V0xaW8IH1S2Z8POFTkcYYWroG\nOT9KPUqwRnk7WibupxsdDad3YAPr36UzUxFutHO2jthSWaKmItcD1wAbROQV+7+Lp/qltDAyZKXw\nH34FNv0M3vblmIMawPbDXSwtL6Dcm8Pu1ugX9dmy/2gfi8sjZ/Xlul2Ue3NonmSTdusUfatK890U\neFyTTkXuaO5h/9F+Np5QTX1VITtjHLEl8+JbVuChvdfP3jan+HFsIzaANYtLyXVn8Y1NJ8eluoc3\nJxu3S2jvG6Z7YISB4cCkiTyO0oLIzUZHCyCn9z42cDpPDGDM2HXt0e0SOmJLZQkZsRljngMyb9NH\nYATu+yDs+xO880cTsh1jsf1wN6ctKqW1e5A9RyZ+g0yEA0f7edvq+ZM+XlOSS1OEepFOFfRo6xEi\nEnVD7GPbmskSuHB1FX1DI9y7pZGOPv+k3bE7+/1JL/k0z2sVHt57pJdyb860guwNb17KlWvrKPfG\n58IpIqF6kS0x1jgszbc2mQeDZsz6XleaF0AOV1eax8BwgLZe/5hEkSOhclo6YktlWnlkthgDf/go\n7HgILvoanPKeab9ER5+fps4BVtcUsbzSy+7W3gnfIGdb18Aw7X3+iHvYHDXFkauPjKb6R79I15Xl\nT1p95JFtLZy5ZB7l3hzqq6yRT7R1NqsAcnJHFPMKcmjrG2LPkb5pTUMCuF1ZcQtqjrJ8K9BGazAa\nriTfQ9AwYWtFuresCVc3SR1TX/egVXUkV6uOpDINbLPBGNj8eXj5l/CW/wtn/fMxvUxDs7XOsbqm\niGUVXroGhif0+pptB49aH+xo+6iqS3Jp7ow0bRNbe4+60sjTPrt8Pexu7eXiE63R4ooqa63ujShT\nsh39yU9Hd1rF7D3SO2UprUQoyXfT2T+ML8a2N6WTlNXqzJDkEQivYzo+sFl72LTqSGrTwDYbnvs2\n/PV7cMaNcO5njvllttt1A1fXFLO80rpAJnqd7UC7Nf052RobwIKSPPr8AboHxu5Fi1ZOK1xd2ei0\nT7hHt7UgQmgatLo4l8KcbHZFGbF19fuTljjiKCuwCg93xFD8OBFK8z209/tDpc+mqkpfOm6TuaNr\nYBi3S8hzp0b90tlUWxq5+khrT/QsX5UaNLDF24t3wJNfhBM3wUW3wgy+2W1r6qamOJeyAg/L7MDm\nFNVNlAP2iM3ZbxZJdSjlf+x0ZKwL7ZO1r3lkazNrF5WGAqOIUF/lZWfL5IGtI4mV/R3l3tHAOt2p\nyNlQWjC6xjavwBMxu3XM8+0vBuP7uHX2D1Oc58mI0Uq+J5tyb86EzhNTJUOp1KCBLZ5efxj+8Amo\nfxu8878nbLaeru2Hu1hlb9qtLsolz+1iT2tiE0j2t/VRWZhDvmfyNYXJNmn7uoco8LimbEoZadpn\n75FeXm/pYeMJ1WOeu6KqkF1RRq2d/f6kFUB2lBWMBvJUmIp09qW1dEXvdB7+fJg4Fdk14Kc4gyra\n15VN7MvmFEBWqU0DW7y074UHbrY6XW/6GbhmNmro94+wt60v1M4kK0tYVlnA7iSM2BZPUadwQUnk\nTdq+nsGYLqSRpn0e3dYCwEUnjM3GXFFVSHufn7beoQmvMxII0j04kvTkBmdTdKzFj2dbWYGHQNCw\nq7V3ysQRCKvwH2EqMtnTvIlUVzo2qal3aIQ+f0CrjswBGtjiYXgAfnstSBZceSd4Zn4x29HcjTFW\nZ2XHsgpvaNPvTBljuOb25/nl3w9Efd6B9r6INSLDlXtzcLtk4oitK7Zvt9a0j2fMtM9j21o4pa4k\n1KXbEUogiTAd2T1orfElq+qIY549FbloXmzFj2ebE4waOwZiCmyFOdlkZ8mEslqdGVAAOVxdWR6H\nOwcZsXvrtYY2Z+uILdVpYIuHRz8FLVvh8v+BkoVxeUmn4sjqsPJHyyu8NHUO0O+fvGDwdF7/z7va\n+OojO0If2PH6/SP4uoemLOOVlSXML7YyI8PFOmKDsSn/h9r72drUFcqGDLciSsq/cyFOfrq/9f6p\nMA0JUBZWEHqqjEhw9r5NLKuVCQWQwy0syycQNKGkm1CWr47YUp4Gtpl6+Vfw0s+tqvwrLozby25v\n6qY030112DdsJ4Fkbxw2aj/e4CNLwB8I8p+P74z4HKdtR7TEEUd1sfXt1mGMwdc9FNOFFMZO+zy6\nrRlgwvoaWPUUS/LdEVP+U6HqCFgjJG9O9qS1LxMtfPow1r+PUntTd7iu/mGKMiiwhdcxBSsjErTq\nyFyggW0mWrbCw5+AJefAeZ+N60tvb+7ihAXFYzLQnBFAPDIjNzf4WLuojPevW8w9WxondG4Gq/gx\nMOUaG1jrbE1hI7bO/mH8I8GYKzSET/s8uq2FExYUhZJKwokIKyoLI05FOhfiZCePuLKEP3zkTdx4\nztKkHocj/HxUxTAV6fxOeLr/SCBIz9BI0jNOE2n8Ju1We8RWoVmRKU8D27Ea7LLW1XJL4F23Q1b8\n9vb4R4LsbOmZUIV9cXk+WcKM19kOtfezo7mbC1ZV8ZHz6ynL9/ClhxombJB2isAunGKNDaw9Zr7u\nwVDPON80v93WlVrTPi8d7OTlg50RR2uO+iovb/h6JhxvRwqVfFpcXkCeJzX2e4U3Fa2OMbA5m7od\nzvplskfDiVRdnIsrS0IzF77uQXLdWnVkLtDAdiyMgd9/GDoOwKafgjd6tfQ9R3r5wdO7uf5nL4Qq\neUSzq7WH4YBhdU3xmPtzsl0sLMufcc3IJ3b4ALhgVRVFuW4+ceEK/rG/PZSJ6DjQ3k9ZgSemi1lN\nSR4jQROqpTfd9QhnuvO2Z/cCsPGEyWtTHje/kO7BkdB7ODqTXNk/VRXmZuPksMT691GaP7YnW6a0\nrAmX7cqipiQ3lNTkdM7OhH18c51+9TgWf/9v2PEgXPAfVpPQcYwx7PT18OjWFh7b1hKqSC9iXcD/\n/dLo1f2dxJETIvTNcmpGzsTmBh/1ld5QUsi719bxi78d4CuP7GDDykpy7coSB45OnRHpGE35tzLv\nQnUip5E8AlbQXTm/MGpF/PpKOzPS1zMmy6+zf5gssbL61CinlU6/PxDzaMNpNmqMQUQyqmVNuPC1\nX6tprq6vzQU6Ypuug89bdSBXXgLrPjLmoe7BYb7+2Ots+OafuOg7f+a7T+2iOM/NF96xir9+egMX\nn1jN719pwj8SjPoW25u6KPC4Iq5tLavwsq+tLzTlN11d/cM8v6+dC1aN9nXLdmXx/y9ZRWPHAHf8\nZV/o/v1t/SyKIXEErHqRMLpJ26lLWBHjhcCZ9oHISSPhJsuM7BywymnNpON0uirJdzO/OPbRRmm+\nm+GAoc8fADKrZU04p44pWCM2reo/N+hX2+lo2QZ3vRuKa+GyH0wol/UfDzVw30uNrFtWzgfftIQL\nV1eNKb+zaU0tD7/WzJM7fGw8cfKL9/bD3RxfXRTxAr2swos/EORQe/8xddN+emcrgaDhravGNixd\nt7ycC1ZV8YOndnPFabUU57s53DXAonm1Mb2us9+s2c6M9PUMUpLvDo3+phI+7RMpzT/cPG8O5V7P\nhMCWCgWQU9WieQVkTWMKzUk46ejz483JzqjK/uHqyvJo6x1iwB/A1z3IecdFX3ZQqUFHbLFqfR1+\nfhlk58E1D0BeyZiH9xzp5b6XGrl+/RJ++aEzed9ZiybUlHtzfQXzi3K5Z0vjpG8TDBoamrvH7F8L\nN9OakZsbfFQU5nBKbcmExz578fGh9H+r2n704sfhinLdeHOyQ5mRvu6hKav6j7e8wsuKKi/1VZN3\n3HbUVxbyxrhu2l0pUCcyVX3nqlP41rtPjvn5zl5AJ4EkE9fYYHSKfEdLN/3+gJbTmiM0sMWibTf8\n/FIr8/G6h6BsYhr3d57YRa7bxc3nLpv0ZVxZwuWnLeCZna2hNajx9h3to98fYPWC4oiPL59Byv/Q\nSIBndrby1uMrI44Gl5QX8IH1S7hnSyOPbLX2kkVrVzNeTUluaCqytXsw5tRyx61XnMSd158R03NX\nVHnZNS4zsiMFKvunqqJcN0VT1OwM51RvabfX1lJlj2CiOYFty/4OQPewzRUa2KbSvhfufAcEA3Dt\ng1C+fMJTdjR389Crh7l+/ZIpm0ResaaWoIH7X2qK+HikiiPhivPdlHtzjimB5G97jtLnD4xZXxvv\nwxuWU5bv4btP7gJi28PmqC7OC1VpaOkepGqaC+2VhbmhTgFTWTG/kD5/YMLeuUwbUcyWknEV/rsG\nhinwuHC7MuuS4WzS3nLADmy6h21OyKx/pdPVcQDuvBRGBuG6B6FyZcSnfWvzGxTmZnPDm6fekLu0\nwsvaRaXcs+VQxG7Y2w934XZJKPMvkmUVBceU8r+5wUe+x8W6ZeWTPsdJ/x8JGgpzsqdVd7GmxOqk\nHbDT/mez9JBTM3JX2HRkZ7+fkgxLbpgtZeMKIVtfGjLv3JZ7PeS5XbxoBzZNHpkbNLBNpqvRGqkN\ndcO1v4OqyCn6rxzqZHODj5vOWUpxjEHgyrV17D3Sx0sHOyc81nC4m+PmF+LJnvyvZpmd8h8pME4m\nGDQ8scPHOfUVUyZ0XHX6Qo6vLmLF/MJp7dlZUJLL0T4/TR0DBM3sTtussAO/s5XCPxKkzx9IegHk\ndFGc50YE2kNrbP6MKqflEBFqS/NC3SR0jW1u0MAWSXezNVIb6LASRaonX3T/5uM7KSvw8P71S2J+\n+YtPqibP7eLeLYfG3G+MYVtTF6urI6+vOZZXeOkaGObouLYi0Wxt6sLXPRR1GtLhyhLuuuFMfvS+\nNTG/Pow2HH2l0QrYszliK853U1WUE8qM7BxwNmdn3sV3NriyhKJc95ipyEzNOHWKB+S5XbpHco7Q\nwDaOCQY48L23M9TZTODqe2DB5Bf35/ce5c+72rjl3GV4p/EP3puTzcUnVvPQq80M2PuEAJq7Buno\nH2b1gujFc0OZkdNYZ9vc4MOVJWxYGVu6ckm+J+Y9aA4n5f/lg85C++xO26yoKgxNRXaFymll3nTZ\nbCkr8ITKlGVay5pwTgJJVVGOVh2ZIzSwjbPz74+waHgPnx28lluezWZwOBDxecYYvvn4G1QV5fC+\nsxZN+302ra2ld2iEx7Y3h+4bTRyZYsRmB7bpNB21ih6XzmpLF6eT9ssHZ3/EBlbK/67WHoJBE7oA\nJ7sAcjqx6kWGjdgydDTsNMIdv31HpS4NbOMMPX8HXaaA486/jscbfFxz+/Oh0UC4Z3e18Y/97Xx4\nQ33Mm5DDnbmkjIVl+fz2hdE9bduauhCB46uj7+OqLsolz+1iT2tsCSQHj/az09cT0zTkTDjlrRoO\nd5Ml1sL7bDpuvpfB4SCHOvpDdQ0z9eI7G8Ir/HcO6IhN19fmDg1sYQY6fKzq/BOvztvIjeev4rtX\nncqrh7rY9OO/0tw1mlZujdZ2Uluax7vX1h3Te4kIV6yp5W97j4b6PW0/3M3S8gLyPdGnNbOyhKUV\nBTGP2B5vsIobX7gqekWPmcrJdlFRmIM/EKTcm0P2LKeGOxu53/D1hk1FZubFdzY4Ff4HhwP4R4Ix\nJ0elm4WhqUgdsc0VGtjC7HniNtwSoGj9hwB4x8k1/OwDp3O4c5DLf/hXdtmJCo83+HitsYuPnl8f\nNXtxKu9aU4sI3GtXImk4bPVgi8XySm/Ma2ybG3wcV1UYU/uZmaqxR22JuAjUV47WjOzQyv5xV2ZX\n+M/UzdmOhWX5FHhcKdMRXU1NA5vDGObtvJvXslZy0qlnhe5et7yc39x0FiNBwxU/+hv/2NfOtx5/\ng6UVBfzTqQtm9JYLSvJ40/Jy7t3SSFvvEIe7BifdmD3esgovTZ0DY5JPIuno8/PC/vZZn4Z0OAkk\niajQUJjrZkFJHm/4eugcGMbtEgpSpAdaOigtsDoCOJ2jM3WPYEFONs/8n/O4cm1sdVNV8mlgsx1t\neIrqkUYOL3v3hHJTq2uKuf+f11FW4OGq2/7GTl8PH3/rirhMtV2xppamzgHueG5f6L1iEWs37ade\nbyVoSFhgc1L+E7WR1Wo62ktnv5/iPI9mrcWRM6273+4hmKkjNrC6VMz21LqKH/2bsh390210m3xW\nnX9NxMfryvK59+azOXVhKactLOHtUarzT8fbVs+nMDebn4QCW2wjtuUxFkPe3OCjqiiHE2Oc4pwp\nJzMy1j5sM3VcVSF7Wntp6/Xr5uw4czpv72+zkpR0/VLNFRrYANN3lMWtT/KXgreycH7FpM+b583h\n3pvP5p6b18Wt51eu28WlJ9fgHwmyoCQv5jWixeX5ZEn0vWyDwwGe3XWEtx5flbAeZYmcigQrgcQf\nCPLqoU698MZZybjAlskjNjW3aGADmp79KR6GkTXXTvlcEQk1xIyXTXZmZayjNbAyEBeW5UetGfmf\nf9xJvz8Qt9FlLFZUFZIlxNR6Jj7vZ41cW3uGNHEkzkoLrEC276gd2PSLg5ojtD6MMXhe+QWvmOWs\nX39uUg7h5Npirjq9LuaqII5lFd5Jq/w/8HIjP3luH9edvYh1yycvehxvyyu9vPT5CxIWZJZXehEB\nY8jYkk+zxZmKPHC0nywB7xTbUJRKFRk/Yhva91cqh/azo/pyCqfRryqeRISvveskLlw9vX1myyq9\n7GvrIxAcWwx5W1MXn75vK2csKeNzl6yK56HGJJEjp3xPdqi1yGxWVclEzt9je5+f4jx3wqazlZqp\njA9sR575MT0mj4VveV+yD2Xalld48QeCNHb0h+472jvETb/YwrwCDz9872kZ0T/LmY7UNaD48mRn\nhbZP6LlVc0n6X/WiGeig8tCjbHadw1nHLUz20UzbskqrCagzHTkcCPIvv36Jtt4hfnzN2imbnqYL\npzeb1omMP2fUVqznVs0hGR3Yel74NR7jp3v1e+OeEJII4/eyfeWRHfx9bztfvfxETqxNTHp/KnAC\nm2ZFxp/TcFRHbGouydzAZgz+53/Ka8ElvPmc85N9NMekJN9DudfD7tZe7tvSyE//sp/r1y/h8tMy\nq0LCmUvLWDm/MGF79TKJ82VBE3PUXJKxaU6m8QXm9e3iN0Uf4ZY5XANuaYWX53a18btXDrNu2Tw+\ne/HKZB9SwlUX5/HYx85J9mGkJWd6V0fDai7J2BFb559/Qp/Joeysq5N9KDOyvNLL4a5BKrw5fP/q\n07Tsj4ornYpUc1FmXgX72ynY/XseNuvZeFp9so9mRk5bWEqBx8WPr1kTuggpFS/OSE0Dm5pLMm8q\nMhgkcN8NSHCYXYvfy5VzfIrlijW1XHJS9TE1O1VqKs5UpAY2NZdk3IjNPPsNXHue4IvD13LuOecm\n+3DiQoOami3OpnctV6bmkswKbHuegme+ygOB9VSedwvrE1hqSqm5qK7UKmrtdG1Qai7InKnIrkYG\n7/4AB4ILePnkL/DF8+f22ppSiXDqwlKe/uS5LCkvSPahKBWzzAhsI34677wal3+IO+u+zZf+6XRt\nSKlUjDSoqbkmI6YiW+/7JCXtr/LDoo/xuesu1ZR4pZRKYwm7wovIRSKyU0R2i8inE/W+vr/8gsod\nd/Kb7Ev54E0fJ19bbyilVFpLSGATERfwA2AjsAp4j4jMej+V9n2vUrT533iZlZx5w/cypiiwUkpl\nskSN2M4Adhtj9hpj/MDdwGWz+YZ93R30/+Jqek0u2VfdyeKqktl8O6WUUikiUYFtAXAo7Hajfd8Y\nInKjiLwoIi8eOXJkRm/42v23Uh1o4uCG73Piysyrn6iUUpkqpbIojDG3GWPWGmPWVlRUzOi1Tn/v\nl9h50V2seculyyee0wAABkhJREFUcTo6pZRSc0GiAlsTUBd2u9a+b9Zkuz2sOnvjbL6FUkqpFJSo\nwPYCUC8iS0TEA1wFPJig91ZKKZVBEpL7bowZEZEPA38EXMAdxpjtiXhvpZRSmSVhm7qMMY8AjyTq\n/ZRSSmWmlEoeUUoppWZKA5tSSqm0ooFNKaVUWtHAppRSKq1oYFNKKZVWNLAppZRKKxrYlFJKpRUx\nxiT7GCISkSPAgRm+TDnQFofDSWd6jqLT8zM1PUfR6fmZWqznaJExZspCwikb2OJBRF40xqxN9nGk\nMj1H0en5mZqeo+j0/Ewt3udIpyKVUkqlFQ1sSiml0kq6B7bbkn0Ac4Ceo+j0/ExNz1F0en6mFtdz\nlNZrbEoppTJPuo/YlFJKZZi0DWwicpGI7BSR3SLy6WQfTyoQkTtEpFVEtoXdVyYim0Vkl/3/0mQe\nYzKJSJ2IPC0iDSKyXUQ+at+v5wgQkVwR+YeIvGqfny/a9y8Rkeftz9pv7GbCGUtEXCLysoj8wb6t\n5yeMiOwXka0i8oqIvGjfF9fPWFoGNhFxAT8ANgKrgPeIyKrkHlVK+Blw0bj7Pg08aYypB560b2eq\nEeDfjDGrgLOAf7H/3eg5sgwBG4wxJwOnABeJyFnArcC3jTHLgQ7gg0k8xlTwUWBH2G09PxOdZ4w5\nJSzFP66fsbQMbMAZwG5jzF5jjB+4G7gsyceUdMaYZ4H2cXdfBtxp/3wn8M6EHlQKMcY0G2Nesn/u\nwbo4LUDPEQDG0mvfdNv/GWADcK99f8aeHwARqQXeDvzEvi3o+YlFXD9j6RrYFgCHwm432vepiaqM\nMc32zy1AVTIPJlWIyGLgVOB59ByF2NNsrwCtwGZgD9BpjBmxn5Lpn7XvAJ8Cgvbteej5Gc8Aj4vI\nFhG50b4vrp+x7Jn8skovxhgjIhmfJisiXuA+4GPGmG7rS7cl08+RMSYAnCIiJcADwMokH1LKEJFL\ngFZjzBYROTfZx5PC3mSMaRKRSmCziLwe/mA8PmPpOmJrAurCbtfa96mJfCJSDWD/vzXJx5NUIuLG\nCmq/Msbcb9+t52gcY0wn8DRwNlAiIs6X5Ez+rK0HLhWR/VjLHxuA/0LPzxjGmCb7/61YX47OIM6f\nsXQNbC8A9XY2kge4CngwyceUqh4ErrN/vg74fRKPJans9ZDbgR3GmG+FPaTnCBCRCnukhojkARdg\nrUM+DVxhPy1jz48x5jPGmFpjzGKsa85Txpj3oucnREQKRKTQ+Rm4ENhGnD9jabtBW0QuxprvdgF3\nGGO+nORDSjoRuQs4F6uStg/4AvA74LfAQqxuClcaY8YnmGQEEXkT8GdgK6NrJJ/FWmfL+HMkIidh\nLey7sL4U/9YY8yURWYo1QikDXgbeZ4wZSt6RJp89FflJY8wlen5G2efiAftmNvBrY8yXRWQecfyM\npW1gU0oplZnSdSpSKaVUhtLAppRSKq1oYFNKKZVWNLAppZRKKxrYlFJKpRUNbErNMSLyjIh8KNnH\noVSq0sCmlFIqrWhgU0oplVY0sCmVIHaDxU+KyGsi0mU3ncy1H7vBbkTZLiIPikhN2O9dICKv27/z\nfUDGve71IrJDRDpE5I8isijBfzSlUooGNqUS60qsZq9LgJOA94vIBuCr9mPVWCWF7gYQkXLgfuBz\nWKXQ9mAV28V+/DKssl+XAxVYJcHuStCfRamUpCW1lEoQu+r754wxv7Rvfx0owmrYedQY8yn7fi9W\np+V64BzgFmPMWfZjgtVr8N+NMT8RkUeBe40xt9uPZwG9wPHGmAOJ/PMplSp0xKZUYrWE/dwPeIEa\nrFEaAHaX6qNYDSlrCGuaa6xvouFNdBcB/yUinSLSidUhXdBmliqDaaNRpZLvMFaAAkLtPOZh9e1q\nJqy3oD1iC+81eAj4sjHmV4k5VKVSn47YlEq+u4APiMgpIpIDfAV43hizH3gYWC0il9vNKv8VmB/2\nuz8CPiMiqwFEpFhENiX28JVKLRrYlEoyY8wTwOexOnc3A8uwGlVijGkDNgFfw5qerAf+Eva7DwC3\nAneLSDdW08aNiTx+pVKNJo8opZRKKzpiU0oplVY0sCmllEorGtiUUkqlFQ1sSiml0ooGNqWUUmlF\nA5tSSqm0ooFNKaVUWtHAppRSKq1oYFNKKZVW/heu7atRFuppSwAAAABJRU5ErkJggg==\n", 928 | "text/plain": [ 929 | "
" 930 | ] 931 | }, 932 | "metadata": { 933 | "tags": [] 934 | } 935 | } 936 | ] 937 | }, 938 | { 939 | "metadata": { 940 | "id": "RcyAawnDsT1v", 941 | "colab_type": "code", 942 | "colab": {} 943 | }, 944 | "cell_type": "code", 945 | "source": [ 946 | "solution.append(solution[0])\n", 947 | "route = np.array(solution)" 948 | ], 949 | "execution_count": 0, 950 | "outputs": [] 951 | }, 952 | { 953 | "metadata": { 954 | "id": "jb6e9xHEsZMh", 955 | "colab_type": "code", 956 | "colab": {} 957 | }, 958 | "cell_type": "code", 959 | "source": [ 960 | "import matplotlib.pyplot as plt" 961 | ], 962 | "execution_count": 0, 963 | "outputs": [] 964 | }, 965 | { 966 | "metadata": { 967 | "id": "7eBq9vj1sZEg", 968 | "colab_type": "code", 969 | "outputId": "500d011b-3bce-4871-e403-290d48fb3f12", 970 | "colab": { 971 | "base_uri": "https://localhost:8080/", 972 | "height": 354 973 | } 974 | }, 975 | "cell_type": "code", 976 | "source": [ 977 | "plt.figure(figsize=(5,5))\n", 978 | "plt.plot(route[:divide,0],route[:divide,1], marker='s')\n", 979 | "plt.plot(route[divide-1:,0],route[divide-1:,1], marker='s')\n", 980 | "plt.title('cost: 9.175', fontsize=14)" 981 | ], 982 | "execution_count": 0, 983 | "outputs": [ 984 | { 985 | "output_type": "execute_result", 986 | "data": { 987 | "text/plain": [ 988 | "Text(0.5, 1.0, 'cost: 9.175')" 989 | ] 990 | }, 991 | "metadata": { 992 | "tags": [] 993 | }, 994 | "execution_count": 521 995 | }, 996 | { 997 | "output_type": "display_data", 998 | "data": { 999 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAT8AAAFACAYAAADK0nu/AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzsnXd4lFX2xz93Jr2HFEoCJIQOIiAd\nFZAi2FCxd+xl/bnqWtZ1XesWXd1d6wqirLt2bIhIVUQFpIj0HlqAdALpyczc3x93EoaQSSbJO/1+\nnud9krz1JJn5zjn3nnuOkFKi0Wg0wYbJ2wZoNBqNN9Dip9FoghItfhqNJijR4qfRaIISLX4ajSYo\n0eKn0WiCEi1+Go0mKNHip/EIQoixQggphEhu5fX3CCG2CSEqhRA7hBA3uHDNH4QQPwkhyoUQpyS0\nCiFustvU2DbU4bzGjt/Zmt9D4zuEeNsAjaY5hBB3AX8DbgN+BoYBM4UQR6WUXzVxaTjwGbAMeKyR\n4x8BCxrsewEYBaxtsP82YJ7Dz8dctV/jm2jPT1OPUDwohNglhKgWQuQIIf7icPw0IcQSu/dVLISY\nLYSIb3B8qRDiuBCiTAixQQgxTgiRAXxnP63A7jnNboFp1wMzpZQfSCmzpZQfAjOAR5q6SEr5hJTy\nRWC9k+OVUsrcug04DlwIzJKnLn0qcTxXSlnZAvs1PogWP40jfwb+CPwF6AdcDhwEEEJEAwuBMpTn\ndQnKQ3rb4fr3gSP24wOBJ4Eq+z2m2c/pB3QE7rPfty70zGjCrnD7fRypBIYJIUJb/Fs65wogmpN/\npzr+JYQoFEKsEULcKYTQ7x0/R4e9GgCEEDHA/cBvpZR1b/7dwEr799eghOF6KWWp/Zrbge+EEN2l\nlLuBrsDfpZTbHa6vu3+x/dt8KWWhw6OPATuA2ibMWwjcIoT4DBWOngHcCoQCySjBNYLbgXl2L9CR\nJ1CeaxkwHnjR/txnDXquxgto8dPU0RflYS11crwPsLFO+OysAGz2a3cDLwFvCSFutN/nUwchbBQp\n5efA583Y9gzQwf48AeQB/wEetj+/zQgh+gEjgfMbsfEZhx9/FUKYgT+gxc+v0a67xggkgJTySZQQ\nfoEKiTcKIW5u883V2NzNQBSQAXQB9gGlQEFb72/ndlR43nACpDF+BuKEEO0NerbGC2jx09SxDahG\nhXXOjp8mhIh12DcK9RraVrdDSrlLSvmylPJ8YBYqPAWosX81t9ZAKWWtlDJHSmkFrkKFqG32/IQQ\nEahJlbddvN9A1BhkSVufrfEeWvw0ANjD2X8BfxFCTBdCZAkhhtnTTADeAyqAd+2zumcDbwKfSSl3\nCyEihRCv2fP5MoQQw4Ezga326/ejPMTzhRAp9jFGhBCXCCG2CyHSnNkmhOgphLheCNHDbtOHQH8c\n0lcau48QoosQYiDKW0QIMdC+xTR4xGVAPI1MdAghLhRC3CaE6G//m9wKPA3MkFJWu/TH1fgmUkq9\n6Q0pJagPw0eBbJSndhB4zuH4aaixvErgKDAbiLcfC0PN9u5DeZCHUekocQ7X/xE1OWEDZtv33YQS\nxYwm7OqDSlepQE2QfAH0anDOKfex2ycb2cY2uPZ7YL6TZ0+2P7sUKAc2oWaqQ7z9/9Jb2zZh/wdr\nNBpNUKHDXo1GE5Ro8dNoNEGJFj+NRhOUaPHTaDRBSbPiJ4R4WwiRL4TY7OS4EEK8LITYLYTYKIQY\nbLyZGo1GYyyuLG+bDbwKvOvk+BSgh30bDrxh/9okycnJMiMjwyUjNRqNxlXWrVtXKKVMae68ZsVP\nSrm8mYobU4F3pcqZWSWESBBCdJRSNrnYPCMjg7VrG5ZM02g0mrYhhNjvynlGjPmlYS97ZCfHvk+j\n0Wh8Fo9OeAghbhdCrBVCrC0oMGo9ukaj0bQcI8TvENDZ4ed0+75TkFLOkFIOkVIOSUlpNiTXaDQa\nt2GE+M0FbrDP+o4AjjU33qfRaDTeptkJDyHEB8BYIFkIkQP8CVVBFynlv4H5wHmoYpYVwHR3GavR\naDRG4cps79XNHJfAPYZZpNFoNB5Ar/DQaDRBie7hodH4IEOeXUxhWc0p+5Njwlj7+EQvWBR4aM9P\no/FBGhO+pvZrWo4WP41GE5TosFej8SEOFlewZFuet80ICrT4aTRexGqTrD9wlCXb8vl2ex4788q8\nbVLQoMVPo/EwpVW1/LCrkCXb8li2o4Di8hpCTIJhme24cmgXxvdOZezfl3nbzIBHi59G4wEOFlew\ndFseS7fnsyq7iFqrJD4ylHG9Uhjfpz1n90whPjK0/vzkmDCns70aY9Dip9G4AatN8utBFc4u3XYi\nnM1Kiebm0Zmc0zuVM7omEmJufM6xLp3l8n+vIMRk4oPbR3jM9mBBi59GYxBl1RZ+2FnAkm35fLcj\nn+LyGswmwbCMdjx+fmcm9GlPRnJ0i+4ZEWqmtMriJouDGy1+Gk0bcAxnf84upsZqIz4ylLH2cHZM\ng3C2pUSEmikorTbQYk0dWvw0mhagwtkSJXjb8tmRVwpAt+RobhzVlfF92jOkiXC2pUSGmqmqtRpy\nL83JaPHTaJqhrNrCj7vs4ez2fIrs4ezQjEQeP78P5/ROpVtKjFueHRFqolKLn1vQ4qfRNELO0QqW\nbstnyba8+nA2LiKEsb1SGd8nlbE9U4mPan046yrK87O5/TnBiBY/jQaw2SS/5pwIZ7fnqnA2Mzma\nG0baw9mMREINCmddJSLMrD0/N6HFz1u80APK80/dH50KD+3yvD1BSHm1hR92FbJ0Wx7f7cinsEyF\ns0O6JvKH8/owvo/7wllXiQgxU2OxYbNJTCbhVVsCDS1+3qIx4Wtqv8YQDpVUsnRbHku25bNqTxE1\nVhux9nB2Qp9UxvRMISHKdxKJI8PMAFRZrESF6berkei/piagsdkkG3JK6sfv6sLZjKQorh/ZlfF9\nUhma0c7j4ayrRIYq8aus0eJnNPqvaRC6+KTncfY3T4oJ48+XnMbSbXl8u72AwrJqTAKGZLTjsfN6\nM75Pe7K8HM66SkSoEuUqi570MBotfgZhaPHJmefAwGuh/zSITGijZYGLs79tUVkNd/x3HbERIYzp\nmcKEPu0Z28u3wllXiXDw/DTGosXP01hqYNHjTZ9TWwlfPwALH4PeF8Cg6yBzDJh8MzTzBqVVtU0e\nf//W4QzN9N1w1lXqxE8nOhuPFj8D2FdY7tqJxw/DxzdCzmoIjVQi15DoVLhrBRxeD7++B5s+gc1z\nIL4znH41DLwG2mUa+wv4AVW1VtbtP8qKPYWs2FPExpxjTZ4/qnuyhyxzL5Fa/NyGFr82YLHamPXj\nXl5avLP5k/cuhzk3Q00FXPa2CmmbIm2w2iY9B9vnKSFc/gIsfx66ngmDroW+UyGsZQvl/YVaq40N\nB0tYsaeIFXsK+WV/CTVWG2aT4PT0eO4ak8Wr3+32tplup262V+f6GY8Wv1ay9fBxHvl0I5sONe2B\ngIQf/wlLn4J2WXDjPEjt7fqDQiPgtMvUdiwHNnwAv74PX9wF8x+CfhfDwOugywgQ/psHZrVJth05\nXu/Zrd5bTEWNFSGgb8c4bhzVlVFZyQzNbEdMuHrZBoP4RYToMT93ocWvhVTVWnnl2128+X02FpsE\n4PzTOvLz3qJTBuBjqODlyJmw5GfocxFMfQ0i4lr/8Ph0OPshOOt3cGAlrH8PNn8O6/+nhHXQtSo0\njuvUll/RI0gp2Z1fVu/Zrcou5lilGsfLSonmsjPSGZWVxPDMJBKjG5+oCIaCn5FherbXXWjxcxFn\naRURISZevnoQ5obZ9/nb4KProHgvTHoWRv7GOM9MCOg6Sm1T/gZbv1Rh8dKn4dtnIcs+W9z7fAgJ\nN+aZBnCgqKLes1uxp4jCMlWqKS0hknP7tWdUVjIjs5JoHxfh0v2CIYWofsJDe36Go8XPBcqqLU7T\nKqosNob/eclJxy8yreCvoTOpEJEk3/QVZIx2n3HhMcrjG3QtFO1RIfGGD2DOdIhIgNMuV8c6DvR4\nWJx3vIqVds9uxZ4ico6qCZ6U2HBGd09iVFYSo7KS6dwuyqN2+RP1qS56zM9wtPg1w3fb8/nD55ua\nPKdO+EKx8FjIe0wPWcgaW0/uqb6P1e4UvoYkZcH4P8K4xyB7mfIGf3kX1syE1H4qZWbAFRDtnpnQ\no+U1rMouqg9l9xSoWfD4yFBGdGvHbWd1Y1RWEt1TYxB+PD7pSfRsr/vQ4ueE4vIanv5qC1/8epge\nqc2vBmhPMa+FvcwQ005mWabwF8vVWAihpKLG88m1JjN0H6+2yqOw+VM1Prjw97D4j9BzshLC7hPB\n3PqXQFm1hTV7i/lpt/LstuUeR0qICjPbO5F1ZlRWMn06xp06LKBxCe35uQ8tfg2QUjJ3w2Ge+mor\npVW13De+B3ePy6LX4wucXjPCtJVXQl8mimp+U3Mv82wj648NfHox3ZKjGdg5gUFdEhjYOZHeHWM9\nl3wbmQhDb1Vb3lblDW78SKXPRKfC6Veq2WIXZqCraq38sv9ovWe3IecYVpskzGxicNcEHpjQk1Hd\nkxiQnuD3ycW+gtkkCDPrgqbuQIufA0eOVfL455tZuj2f0zsn8Py0AfTqENvEFZLbzfN4OOQj9sv2\nXF37OLtl+klnPHRuL349WMLyXYV8tv4QAOEhJk5Li68Xw0FdEugYH+H+ULB9Xzj3OZjwJOxarGaJ\nV70BK16BtDNOWVJXa7WxMaeEFbtVKLvuwFFqLCrXbkB6PHeO6caorGTO6JpY76FojCci1ES1Lmhq\nOFr8UJU/3l99gL9+sx2rTfL4+X2YPjrzpFCtYVpFDBW8EPomU8xrmG8dxsO1t1PGqQP394zrDiiP\n8lBJJesPlPDrwRLWHzjKf1buZ+YPewFIjQ0/SQwHpMe7r4qHORR6n6e2sgLlCf76Hnz9ALYFvyc7\neRyfy3H8J7cLZTUqnadPxzhuGNGVUd2TGJrRjtgI91cx1igiw8w6z88NBL34ZReU8ehnm1i9t5jR\n3ZP4yyUD6JJ0qoidlFbhmMYy8VmeWNqLstpT15o65psJIUhPjCI9MYoLT1d5eDUWG9uOHK8Xw18P\nlrBwSx4AJgG9OsTVh8uDOieQlRJjaEFLKSV7KiJYIS5gRcwIjhesYXL1UqbmLuchsYDbwttT0Hca\nKWdNJyGtp2HP1bSMiFAzVRYtfkYTtOJXa7Ux84ds/rlkFxEhJp6/bACXn5HefOi5aQ7MvRfCYuDG\nuZBxJmtHtc6GsBATp3dO4PTOCdw4KgNQEy0b7GK4/mAJ8zYe5oPVBwCIDQ/h9PqxQ7UlxZzI43Ol\nrNbB4pNz7eraIqYlRDKq7yhiu19IZZcY4o8sJWH9eyRsfwO2vx4US+p8lchQ7fm5g6AUv82HjvHI\npxvZcvg4k/t14Omp/UhtLrHWUqNmSn/+N3QeAZfPhriOhtvWLjqMcb1TGdc7FVAheXZheb1nuP5A\nCa8v24PVvrqkS7uoejFsqqzWI3M2siK7kIPFKtcuOSaMkVnJjK7PtYs8WfiTpqnxv2OH7Evq3gu4\nJXX+QkSo7uPhDoSU0isPHjJkiFy7dq1Hn1lVa+VfS3cxY3k2iVFhPDO1H1NOc0HAjh+BT26Egz/D\n8Ltg0jNq3MxLVNRY2JRzrF4M1x88St7xphtbx0WEMKKbPbG4ezI9WpprJ+WJJXVbPofacr9bUuev\nXDVjJTYbfHznyOZP1iCEWCelHNLceUHj+f2cXcSjn21ib2E5VwxJ5w/n9XWt9eC+H+GT6VBTDtNm\nqQIDXiYqLITh3ZIY3i2pft+RY5WM/Mu3Tq9Z/8SktuXa+emSukAgMtTcuqK4miYJKPFzuv421ERV\nrY3O7SL53y3DObOHCyscpFQpIEuehHbd1Pheah/jjTaIjvGRTR43NMnYcUldcbZaUverbyypC0Qi\ndftKtxBQ4ud0/W2tjVvOzOTBST1dSx+pOg5f3gPb5hpTjSWQadcNznkcxv4e9n6vwmIPLqkLBiJC\nzHp5mxsIKPFrij9e0Ne1E91ZjcXNeLXEk8msQt+sc+xL6j5TYbHBS+qCkYgwLX7uQL8KHWkkjcWf\n8JkST5GJMPQWteVvUytJWrmkTqM8P53qYjxa/MBjaSxBSWqfk5fU/fqe8yV1L/RovGl7dCo8tMvT\nlvsMkWEmqiw2pJS6Go6BaPHzsTSWgKXhkrpNH6vxQccudY0JHzjfHyREhpqx2iS1VklYiBY/o3Cp\n9IYQYrIQYocQYrcQ4tFGjncRQnwnhFgvhNgohDjPeFObx9nYltMxr30/wptnQ+5mlcYy5a9a+DxB\nTAqMvAfu+gluXwaDrofdS7xtlc+iy1q5h2Y9PyGEGXgNmAjkAGuEEHOllFsdTnsc+FhK+YYQoi8w\nH8hwg71NUjfmVV5tod+fFvL7Kb25Y0zWqSf6WRpLwCIEdBqktknPwnPtvW2RT1InftW1VojUH85G\n4UrYOwzYLaXMBhBCfAhMBRzFTwJ1uSDxwGEjjWwpUWFmQs2CkspGGluflMZyIUx9Xaex+AKhrvXt\nCEYitefnFlwRvzTgoMPPOcDwBuc8CSwSQtwLRAMTDLGulQghiI8Mo6Sigfjlb4OPrleJuROfgVH3\n+k0aiyZ40WGvezCq3O7VwGwpZTpwHvBfIcQp9xZC3C6EWCuEWFtQUGDQoxsnISqUY5UOOW+bP4WZ\n46GqRIW5o/9PC5+v4axaTHSqZ+3wMerbV+qCpobiiud3COjs8HO6fZ8jtwCTAaSUK4UQEUAycNI0\nnZRyBjADVGGDVtrsEgmRoRwtrwVrLSz6I/z8hk5j8WUsNRAeB2lD1IeTpp56z0/n+hmKK+K3Bugh\nhMhEid5VwDUNzjkAjAdmCyH6ABGAe127xnDIE5tTt+8Z+1edxuLbbPkMSo/ARa942xKfI0J3cHML\nzYqflNIihPgNsBAwA29LKbcIIZ4G1kop5wIPAjOFEPejJj9ukt6oldVUPtiUv3rODk3LkBJWvAop\nvaG7V4eLfRLdvtI9uJTkLKWcj0pfcdz3hMP3WwEPNqjVBBR7v4e8TXDhy3octhH0bK970P0FNd5n\n5WsQnQIDrvS2JT7JibBXT3gYSfCIX2Hwrg31aQp2wK5FMPQ2nevnBO35uYfgEb/XhsPXv4PyQm9b\nonFk5asQEqEqwGgaJaI+1UWLn5EElvg5yweLSoYzboK1b8O/BsIPL0FtpUdN0zRCWT5s+AhOv0oX\nO22CMLMJIbT4GU1gVXWxlz06pZx9FfAjnBE9gE8zF8LSp2DNLBj/hCq5bgqszwC/Yc0ssFbDiHu8\nbYlPI4TQ7SvdQEC+652Vs19XngJXfwA3zlOexue3w8yxsHe5Zw3UKM97zUxV4TlFN0RvDt2+0ngC\nUvyaJfMsuO07uHQmVBTDfy6E969Sg+8az7DhQ6goUm0CNM0SGWrWs70GE5ziByrUHXAF/GaNqjK8\n/yd4fSTMu1+NRWnch82m0ls6DPC7VgHeQnUg1J6fkQSv+NURGgln3g//t17NOP7yLrw8CJa/ADUV\n3rYuMNm9GIp26ao6LUCHvcajxa+O6GQ47wW4exV0G6sacb9yhupJa9PhhqGseAXi0qDfJd62xG9Q\nYa8WPyMJSPFLiGq8eIFLLRyTe8BV78H0byC2A3xxF8w4G7KXGWtksHJkA+z7AYbfoYtMtADduNx4\nAivVxc60wem8u3Ifqx+bQGJ0K3vWdh0Fty5V1UaWPAXvToUek2Di07rkfVtY+ZpqDTr4Rm9b4leE\nh5gpKK32thkBRcCI3ym5fcCgZxaTHBPW+n62JhOcdpnqLLZ6Biz/O7wxSjXcGfcHiNU9J1rEsUOq\nqOzQ21SrygCgsdcd0LbXXSNEhpmptujhFyMJGPFzltvnbH+LCI1QlZ8HXQffP6/y0zbNoaLWShRV\npz6TeJKfPND25wYaq98EaYMRd3rbEsNw6+uOU8U149GvAbu4ht+t+xy3gYARv6a44e3VhIeYCAsx\nER5iIjzEbP9qcthvJjzURJjZRHjoiXPCQhy/DyF8yONE9b6exBV/JmrXvEafl8wxD/+GfkB1Kayd\nDX0ugsQMb1vjEb7bnk+IWWA2CULNJswmQYhJEGIyEWI+9XuzSRBiNqn9ZnWsSXG16D7HbSEoxO9Y\nZS01FhvVFivVtTZqrDaqa61UW2xtCCWuYV9E4+KnaYT170H1saBKap4+e423TdA0QVCI35f3OK+z\nKqWk1iqptljtAlm3Ofxca6PGqoSz2mKrF1IWevCX8GdsVlj1OnQeDp2Hetsaj/H53aOw2tTry2qT\n1NpsWK0Si82GxSYdjtlOnGO1YbVJLDaJxSr5x5KdjdxZMkBke/z3CTSCQvyaQghBWIggLKQVWT9a\n/Fxj21dQsl81Jg8grLamOzUM6pLY5mc4il9nkcfFpp+42PwTWaYjbb53sBMw4pccE+Z01k3jZVa+\nqsb5ep/vbUsM5Z2f9jo9ZtTrLoFSLjCv4mLzTwwx7cQmBatsffh37YW8EDrD+YXVpRAea4gNgUrA\niJ+RaQWuUkh8o5Mbar8GgIOrIWcNTHkeTGZvW2MYewvLeWHhDib0SWXmDUMQRi7Tq62EnQtg48es\niVhEKBZ22NL5a+1VfGkdzRGSlLiGf+F8cuO/l8C1cwImpcgdBIz4eQNn6Sxa+BxY8QpExMPAa71t\niWFYbZKHPtlAeIiJ5y45zRjhs9lUcY2NH8HWL6H6OMR2JHTUXTDgSm5+J59Dx6pY+uAYslJi7Bc5\nSWfZNg8+uUkl5l//OUS1a7t9AYgWP437KN4L2+fB6PsgPKb58/2E/6zYx9r9R3nx8tNpH9fGviN5\nW5XgbfoEjh9Sq1/6TlUVhzLOqveWhelbwMXG5X0uUEs0P7pelWu74UtdKbsRtPhpjMWhcXw9P/5D\npboEQOLtvsJynl+4nXN6p3Lp4LTW3eT4Ydg0BzZ+rFp2mkJUv+JJz0DPKRAWdcolJrt36XJxg57n\nwjUfwgfXwOzz4Ya5ekVSA7T4aYzF2RhUeX7jwgjNr0ho7XUGY7NJHv50I6FmE39uabhbdVzNem/8\nyF45XELaEJjyAvS/tFnPrO5RLSpomnUOXPsJvH8lzD4PbvwK4jq5fn2Ao8VP4zmaEsbGkFIN/rf0\nOjfx31X7Wb23mOcvG0CHeBfCXWst7PlWCd72+WCphMRMGPOICmuTslx+dp3n1+LKLplnwfWfwf8u\ng3emKAFM6NKyewQoWvw0vsHnd0HlUagqUV/rNqsxa2Rbg7OiBc8v2M4VQzo3fpGUcGidErzNn6pS\n/ZHt1LrwAVdC+pBWFXCtu6JVZa26jFDjfv+7BN45D26cC+26tfw+AYYWP41vsO8HlZYRkQDJPSEy\n8cS25E9eMalFRQuK9qhJi40fQXG26kXcawoMuEqFnyFty/s7Efa2sqZf+hlq3O+/F8M75ysBTO7R\nJpv8HS1+Gt/g/s3Oj3lJ/JqlvEjVe9z4kcplRKgw86wHoc+FKsXHIFo84dEYnQbCTV/Dfy464QEG\ncW1KLX4aY4lOdT45EQDVRsKpYYLpF3Jen0Fa4Y8ImwXa91dFbvtfBvGtnAFuhjrPr829e9v3g+nz\nlQDOPl+Fwx1Oa7uBfogWP42xtHbWtimaEs4n408+zw2zvyZsDDdt4xLTj0wxryZWVHIkrx1vWqfw\nY8Q4UtqdwYiwdoy0JNJZSmNXe9gR1Hl+BhQ0TellF8ALYfYFKhE6bXDb7+tnaPHTeI7WClPD6550\nEk4a7Fn2Fge42PwjU80r6CiKKZWRfGMdxue2M3n2vjuI2VtCfHYRP+wq4PP1hwDoFB/BiKwkRnRL\nYmS3JDq3OzVnrzVY7E20DOvjkZSlBHD2hTDzHKCRIg0BXhRVi5/Gt5FSrXwo2AGFu6CwsRJPBnLs\nkH3i4mMWhG+hVppZZjudZ63XscQ2mGrCSI4JI6t9PFnt47luRFeklOzOL2NVdhErs4tYtqOAz35R\nYpiWEMnIOjHMSiItIbJVZtXVnTS0g1tihhLAf/Zv/HgADFM0hRY/jW9gqYHiPUrcCnaqr4U7leDV\nlp84L7z1kwiFT3ZptBBFEXEkTf2LPQH5B0BC+jA47++E9ruUidFJNFU2QwhBj/ax9Ggfy/UjM7DZ\nJLvqxHBPEUu35TFnXQ4AXdpFMaJbu3pB7BjvmhjWhbuGt69McJKyEwRo8dN4lsoSuwe342ShO7oP\npMMbO76zSsUYfL1KfanbYlLhqdZVKnHWXiCJ4/DlPSr3bezvVdOqFiQgN8RkEvTqEEuvDrHcOEqJ\n4Y68UlbuKWJVdhELt+Tx8VolhhlJUfVe4YhuSU7XCldb1N9Gt680Di1+GuOREo7lnPDcCu0ha8GO\nk0Mpcxi0y4IO/dUSrzqBS+ru+UIIty6FtDNalYDcHCaToE/HOPp0jOPmMzOx2STbco/bxbCYrzcd\n4cM1BwHolhzN8HoxbEdqbARDnl1MaZUFgC9/PcyXvx4GjO8QF2xo8dO0Hku1Suh1HI8r3AGFu08O\nVSPiIbmX6nuc4uDFJXQFcytegk2l07SW9CGtv7aFmEyCfp3i6dcpnlvP6obVJtl25Hi9Zzhvw2E+\nWK3KpWWlRLu9Q1ywosVP0zyVJSfG4ByFrtFQtScMHnWyyEWnGOtRBdgMpNkk6J8WT/+0eG47uxsW\nq42tdjFcmV3EnoLy5m/SFtzxYeIHaPELFpqrjGKzqVnVQofJhrrxuIahalJ3lRjbf5oStxR7qBoW\n7bnfJ4AJMZsYkJ7AgPQE7hiTVd+r1208tAsO/QIzx8GV76l6gEGAFr9goanKKG+e3UiomqCSYXtO\nUiFrncgldPXbcvS67UATHFMTMO5aoeKLaPHTqLC062g1u1ondNHJbhn89yZ1bQce+mQDy3cV8PNj\nE9R+bxrVDEOeXeyZBx1XeYnEB0/qixY/DVz3qbct8ChpiZHkl1ZTY7G1rmWpB2lqUsPQzoTHclQl\nmqgk4+7p4/j2f16jcQNpCZFICUeOVXrblDZhaJrL8UOqynOAeftN4ZL4CSEmCyF2CCF2CyEedXLO\nFUKIrUKILUKI9401U6MxjrREtaoi56h/i5+hHDsEccEz3gcuiJ8Qwgy8BkwB+gJXCyH6NjinB/B7\nYLSUsh/wWzfYqmkLztIWzGEZ4vdpAAAgAElEQVQqKTmISE9QxQYOafE7wbEciE/3thUexZUxv2HA\nbillNoAQ4kNgKrDV4ZzbgNeklEcBpJSBvSLaH2ksN27Vv2HBI7DqDRh5t+dt8hId4iMwCcgp8W3x\ns9k89KFktUBZbtCJnythbxpw0OHnHPs+R3oCPYUQPwkhVgkhJhtloMaNDL8Dep0Pi59QfSeChLAQ\nE+3jInze8/tq42Gnxwyd7Cg9AtIWdGGvUbO9IUAPYCyQDiwXQpwmpSxxPEkIcTtwO0CXLrqDlNcR\nAqa+Cv8+Cz6ZDnf+YGjpdV8mLSGSQyUV3jbDKTUWGy8u2kmfjnF8fe+ZmExunIioT3PRnl9DDgGO\nyT/p9n2O5ABzpZS1Usq9wE6UGJ6ElHKGlHKIlHJISkpKa23WGElUO7jsbTXmM/f/gmb8Ly0x0qcn\nPD5YfYADxRU8PLmXe4UPTiQ4B5nn54r4rQF6CCEyhRBhwFXA3AbnfIHy+hBCJKPC4GwD7dS4ky7D\n4ZzHYesXsO4db1vjEdITI8k9VoXVU+NqLaC82sIr3+5ieGY7xvb0gJMQhKs7wAXxk1JagN8AC4Ft\nwMdSyi1CiKeFEBfZT1sIFAkhtgLfAQ9JKYvcZbTGDYz+rWqx+M2jkNtEJ7UAIS0hCotNkne8ytum\nnMJbP+ylsKyGR6f0dks/kFM4fgjC44JmyKMOl/L8pJTzpZQ9pZRZUsrn7PuekFLOtX8vpZQPSCn7\nSilPk1J+6E6jNW7AZIJLZqjeuZ/cBNVl3rbIrdTl+h3ysRnfwrJqZizfw+R+HRjUJdEzDw3CHD/Q\nKzw0jsSkwLS3oGg3zH/I29a4lbpeGjlHfWvS49Vvd1NZa+V35/by3EOP5wRdyAta/DQNyTwbxjwM\nG96HXwN3oU6d+PlSusuBogre+3k/Vw7tTPdUD1ay1p6fRmNnzCPQ9Uz4+kFV0y8AiQwzkxwT5lNh\n70uLd2ASgvvG9/TcQ2sroaIw6NJcQIufpjFMZpg2E0Ij1fhfre8IhJGkJfhOusuWw8f4csNhpo/O\npEN8402M3MJxeyK1Fj+Nxk5cJ7jkTcjfAgsf87Y1biEtMdJnwt7nF+wgLiKUu8a0vmtci3ihh2r+\n/spg9fMXd6mfXzglPTdg0eKncU6PiTDq/2Dt27D5M29bYzhqlUcl0suJ3Sv3FPH9zgLuHptFfFSo\nZx7aVGXvIEGLn6Zpxj8B6UPhq/tUp7YAIj0ximqLzatd0KSU/HXBdjrGR3DjqAyv2XES1lpvW+AR\ntPhpmsYcCtNmqXXAc24GS+C0S6yf8fXipMeCzblsOFjC/RN6EhHqI71R/pYB71+pqv4U7AjYJY9a\n/DTNk9gVLnoVDq+HJU962xrDqE909tK4n8Vq44VFO+ieGsOlg30o1WTAFUr0FjwCrw2Dl/rC53fB\nxo+hNM/b1hmG7uGhcY2+F8Gw22HVa5BxJvQ+z9sWtZkTFZ29k+j8ybocsgvKmXH9GYSYfcgPueAf\n6uvRfZC9DPZ8Bzu/UbmfAKn9IGscdBsLXUf5bctSLX4a15n4DBxYBV/eDR1/9Pv0iLiIUOIiQrwS\n9lbWWPnH4p2c0TWRiX3be/z5LjUqT8yAM25Sm80GuRuUEGYvg9UzYeWrqhJ45+HQbQx0Owc6DfSb\n1qZa/DSuExoBl89WfX7n3AI3fQ1m/34JpSVGeSXsfWfFXvJLq3n1msGeKV7QkMYqezeFyQSdBqnt\nrAegpgIOrFRCmP0dfPus2iLi1SqhbuOUd5iY6bNNkfz7lavxPElZcME/4bNbYdmf1WywH6MSnT0b\n9pZU1PDGsj2M753KsMx2Hn22YYRFQffxagMoK4C93ysh3LMMtn2l9id0OSGEX/9OrSZpSHRqy8XY\nALT4aVrOgMvVC/2Hl1Sz87o3gB+SnhjJquwipJQe88BeX7aHsmoLD032YPECdxOTAqddpjYpoWiP\nXQi/gy2fwy//cX6tl3ILfWiUVeNXTHkeUnrB53f49QxgWkIkZdUWjldaPPK8wyWVzF6xj0sGpdG7\nQ5xHnulxhIDk7jDsNrj6fXh4L9yy2NtWnYIWP03rCItS43/VZSoEtlm9bVGrSK+b8fVQP49/LN4J\nEh6Y6MHiBd7GHAKdh3nbilPQ4qdpPal94LznYe9yFQL7IZ7M9duVV8qnv+Rw/ciupCdGuf15mqbR\n4qdpG4Ouh9MuV5Mf+37ytjUt5kRRU/eL3/MLdxAdFsI947q7/Vma5tHip2kbQqik2MQM+PRWKPev\n1i3tosOICDW5Pddv7b5iFm/N444x3WgXbWDPXX9h0xznxxxzCz2Inu3VtJ3wWLjsHZg1UZVGuvpD\nlRfmBwghSHdzrp+Ukr8t2E5KbDg3n5nptuf4LEf3w7z7IX0YTP/GZ3JD/eMVqvF9Og2ESc/BroVq\nCZwfUVfayl18uz2fNfuOct/4HkSF+cYb32NYLfDZber7aTN9RvhAi5/GSIbdBr0vUMUPctZ62xqX\nSUt0n/hZbcrry0yO5sqhnd3yDJ9m+Qtw8OcTQyM+hBY/jXEIAVNfhdhOMGc6VJZ42yKXSEuIpLi8\nhooa43P9Pl9/iJ15ZTw4qSehvlS8wBPsXwnLn4cBV6nkZx8jyP4bGrcTmQiXva16Q8y91y9qwaW7\nKd2lqlYVLxiQHs95/Tsaem+fp7JEhbsJXeC8F7xtTaNo8dMYT+ehas3vtrmw5i1vW9MsJxKdjRW/\n/63az6GSSh6Z3BuTyTcX97sFKdUEx/HDqhBuhG+uZNHip3EPI++F7hNV86MjG71tTZOkJaiEYyM9\nv+NVtbz63W7O6pHM6O7Jht3XL9jwAWz5DMY9BulDvG2NU7T4adyDyQSX/BuiktT4X3Wpty1ySmps\nOKFmYWii85vf76GkopZHJvc27J5+QdEeVb2l65lw5v3etqZJtPhp3Ed0Mkx7SzU+mveAz47/mUyC\njvHGzfjmH69i1o97ufD0TvRPizfknn6BpQY+vUX1fbn0TZ8vaqrFT+NeMs6EMY/Cpo/h1/e8bY1T\n0hMjOWRQXb9/Ld2FxSp5MJiKF4Ba4nh4PVz0sl9U+dbip3E/Z/8OMs6C+Q9B/nZvW9MoRiU6ZxeU\n8eGag1wzvAsZyf7Z26JVZH8PP/4TBt8Afad62xqX0OKncT8mM1w6E0Kj1PhfjXcaBjVFWmIk+aXV\nVFvaVprrxUU7CQ8xce85PQyyzA+oKFZ1HZOyYPJfvW2Ny2jx03iGuI5qHCh/Kyx41NvWnEJaQiRS\nwpGSqlbfY8PBEr7edIRbz+pGSmy4gdb5MFKqfM7yQpXW4ked3LT4aTxH9wkw+reqpHlTVT68QF19\nvdaGvnXFC9pFh3HbWUFUvGDdO7B9Hkz4k1rf7Uf4zipjTXBwzuOq5eGnt6jNES81soG2r/L4YVch\nK/YU8cQFfYmNCDXSNN+lYAcseEw1KBpxj7etaTHa89N4FnMo2JysofVSIxuADvERmETrVnnY7MUL\n0hMjuXZEFzdY54NYqlX70rAolc/pJyXMHPE/izUaNxBqNtE+LqJVbSy/2niYLYeP8+CknoSH+HZu\nm2EseQryNsHU1yG2g7etaRVa/DSe5eg+b1vglLSEyBaHvTUWGy8u2knvDrFMPT3NTZb5GLuWqJqN\nQ2+DXpO9bU2r0eKn8Qw2G/z8Jrw+0tuWOCW9FXX9Plh9gAPFFTwyJUiKF5Tlwxd3QkofmPSMt61p\nE1r8NO6ncBe8MwW+eVg1OfdR0hIjyT1WhcVqc+n88moLr3y7i+GZ7RjbM8XN1vkAUsIXd0PVcbhs\nFoRGetuiNqHFT+M+rBb48R/wxmgo2A6XvAnXfuK8YY2XGtnUkZYQhcUmySutdun8t37YS2FZDY9M\n6Y0QQeD1/fwm7F4Mk56F9v28bU2b0akuGveQt0V5CUd+hT4XwnkvQmx7dcxL6SzN4djDt66lpTMK\ny6qZsXwPk/t1YHCXRE+Y511yN8PiJ6DHuapdQQCgxU9jLJYa+OFFtUXEw+X/gX4Xe9sql6jP9Sup\nANo1ee6r3+6mstbK787t5QHLvExtpcrJjEyAi19X7QoCAC1+GuM49At8+RvI3wKnXaHWeUYnedsq\nl6nz9pqb8T1YXMF7P+/nyqGd6Z4a4wnTvMuix9WwxXWfqTJlAYJLY35CiMlCiB1CiN1CCKcLM4UQ\n04QQUgjhu+VbNcZTW6lCorfGQ2UxXP2RalPoR8IHEBFqJjkmrNkZ3xcX7cAkBPeND4KSVdvnq1YE\nI38D3cd72xpDadbzE0KYgdeAiUAOsEYIMVdKubXBebHAfcDP7jBU46McWAVf3gNFu1U5o4nPqPDI\nT0lLiGyyovPWw8f5csNh7jg7iw7xER60zEO80KPxlTYbP4Jzn/O8PW7EFc9vGLBbSpktpawBPgQa\nK9j1DPA3oPVlMTT+Q3UZfPMIvD0ZrDVw/Rdw0St+LXygChw0FfY+v3A7cRGh3DUmy4NWeRBnSwzL\nCzxrhwdwRfzSgIMOP+fY99UjhBgMdJZSfm2gbRpfJXsZvDFSpT4Mux3uWglZ47xtlSHUNTCXjZTc\nX7mniGU7Crh7bBbxUQFYvMBH2wy4izZPeAghTMBLwE0unHs7cDtAly5BsgA8kKg6pga/f3kXkrrD\n9G+gq++u2GgNaQmRVFtsFJbVnFSTT0rJXxdsp2N8BDeOyvCegUZTdVx9mO1erJatBRGuiN8hoLPD\nz+n2fXXEAv2BZfZEzw7AXCHERVLKtY43klLOAGYADBkyJLg+ZvydHQtUL9ayXBh9H4z9vd9n+DdG\n3YxvztGKk8Rv4ZZcNhws4flpA4gI9ePiBVJC/jbYtQh2L4EDK1WVnfA46DZW9VoOElwRvzVADyFE\nJkr0rgKuqTsopTwG1M9/CyGWAb9rKHwaP6WiWI3tbfoYUvvCVf+DtDO8bZXbqE90LqlkkD152WK1\n8fzCHXRPjeHSwX5YvKC6VPXYqBO843bfpX1/NYvbYxJ0HqbKjT0ZPN3mmhU/KaVFCPEbYCFgBt6W\nUm4RQjwNrJVSBs9HRbCx5QuY/zuoPKo6sJ31IISEedsqt5LWSFHTT9blkF1QzpvXn0GI2Q9WhEqp\n8vJ2LVbh7P6VYKuFsFjIGgtjHlFVteMbEfLo1MYnPby89NAduDTmJ6WcD8xvsO8JJ+eObbtZGq9S\nmgfzH4RtX0HHgWomt0N/b1vlEeIiQomLCKnP9aussfKPxTsZ3CWBSX3be9m6Jqgug73LT3h3x+xz\nlKl9YeTd0H0idB7e/IeXjy49dAd6hYfmBFLChg9Vg6HaSpjwJIy8F8zB9TJJc0h3eWfFXvJLq3n1\nmsG+VbxASijcqby7XYvU2J21BsJi1Njd2b+ze3e+3z/XWwTXq1rjnGM58NVvVZjUeThMfQ2Sg6j9\nogNpCZEcLK6gpKKGN5btYXzvVIZlNr3W1yPUlNu9u8VqO3ZA7U/pA8PvsI/djQj4oQmj0OIX7EgJ\n62bDoj+CtMLkv6mqHSY/ntFsI+mJkazKLuL1ZXsoq7bw0GQvFS+QUq2c2bVIid3+n5R3FxqtvLuz\n7lfhbELn5u6kaQQtfsFM8V7Vc3XfD5B5Nlz4MrQLoraLTkhPjKSs2sKM5dlcOjiN3h3iPPfwmgr1\n/6gLZ0v2q/3JvVRCeY+J0GUkhARJX2A3osUvGLFZYfUMWPo0CDNc+C8YfGPAlCpqK461/B6YaGDx\nAmfrZqOS4OyHlODt+xGs1RAaBZljYPT/Ke8usatxdmgALX7BR8FOVYggZ7UaI7rgn42nPAQZQ55d\nTGFZzSn7L37tJ9Y+PtGYhzhbN1tRpCaZknrA0FuhxwToMgpCA7Bwgg+hxS9YsFpgxb9g2d/svVZn\nwIArtLdnpzHha2q/4dy3ARIzPPMsDaDFLzjI3aS8vSMboO9UOO/vEBN4Sat+jRY+j6PFL5CxVMPy\nv8OPL0FkIlzxrhI/jUajxS9gyVmnvL2CbTDgKpj8F4jygVw1jcZH8IOFipoWUVupcvZmTVAlqK75\nGC59UwufL9DU+tjD6z1nhwbQnl9gsX+FaiBUvEelrkx6RnVQ0zRLckxYo5MbSTEGrpZobN1sRTH8\n+0z4ZDrcsRwiPJhTGORozy8QqC6D+Q/BO1NUbbYbvoSLXtbC1wJWPzaB1NhwJvfrwL6/ns+/rhoI\nwIe3jXDvg6PawbS3VDLz1w8EXTVlb6I9P3/CWZKsMKk3zfC7YPwfISza87b5OesPlpBfWs3k/h2A\nEz18c0oq6dE+1r0P7zpKFYf97jm1bG3Qde59ngbQ4udfOEuSlTa4eSF0cbOXEsAs2pJLiEkwrrca\nl0tLiAJospOboZz1oCpaMP8hSB8KKUHQDN3L6LA3UNDC12qklCzYksuo7snER6rGRKmx4YSaRbMN\nzA3DZIZLZ6rWAJ9MVxNXGreixU8T9GzPLWV/UQWT+3Wo32cyCTolRDbbwNxQ4jrCJW9C/hZY+AfP\nPTdI0eLnL+iBcLexYHMuQsDEBpWa0xIiOXS0wrPG9JgIo+6FtbNg65eefXaQocXPH5ASlvzJ21YE\nLAu35DK0a7uTurWBEj+Pjfk5cs4TqknUl/fC0f2ef36QoMXP17HZVArET/+CECetIgOwuYyn2FdY\nzvbcUib1O7U/R1piJPml1VRbrJ41KiQMps0CJHx6C1hrPfv8IEHP9voy1lr44m7VNnL0b1VPDV2F\nxVAWbskF4FyH8b460hPVjO+Rkioykj2cPtQuEy78J8y5Gb59FiY+5dnnBwHa8/NVaqvg4xuV8I1/\nQr34tfAZzoItufRPi6Nzu6hTjtUVNfXopIcj/aeplTo//RN2L/WODQGMFj9fpKYcPrgSdnytyk+d\n9aC3LQpIco9Vsf5AyUmzvI6kN9LD1+NM/qtqUPT5HaqlqMYwtPj5GpUl8O7FKuH14jdUMyGNW1i8\n1XnIC9AhPgKTgBxPz/g6EhYFl7+jljB+frsaA9YYghY/X6KsAGZfoCp8XP4fGHiNty0KaBZsyaVb\nSjTdU2MaPR5qNtEhLoIcb4W9daT2gSl/g+xl8NM/vGtLAKHFz1c4lqMKExTthms+hL4XeduigOZo\neQ2rsouZ3K9Dk83I0xIjvRv21jH4Buh3KXz7HBxY5W1rAgItfr5A0R54ewqU5cH1n0P3Cd62KOBZ\nsi0Pq03WFzJwRpqnV3k4Qwg1+5vQGT69VZXC0rQJLX7eJm+r8vhqyuDGr6DrSG9bFBQs3JJHp/gI\nTktruuxXWmIkR45VYbH6wFhbRDxc9jaUHlH9lvWqnzahxc+bHFoHs89TJammfwOdBnrboqCgvNrC\n8l0FTGom5AWV62e1SfJKqz1kXTOknaHyPbfPgzVvedsav0aLn7fY+wP85yIIj4ObF0Bqb29bFDQs\n21FAjcXWbMgLDrl+vjDuV8eIe1Qj84WPwZGN3rbGb9Hi5w12LoL3LoO4NCV8um2hR1mwJZek6DCG\nZjTf1yStLtevxIvpLg0xmeCSf0NkO7UCpLrM2xb5JVr8PM3mz+DDqyGltwp14zp526Kgotpi5bvt\n+Uzs2x6zqfkVM3WeX06xD3l+ANHJMG2myg6Y/5C3rfFLtPh5kl/eVQvV04fCjXMhOsnbFgUdK3YX\nUVZtcZrY3JCIUDPJMWG+MePbkMyzYczDsOF92PCht63xO7T4eYqVr6sZum7j4LrPdHMhL7Fgcy4x\n4SGM6u76B09aYpRvih/A2Q9Dl1Ew7wEo3O1ta/wKLX7uRkpY9jdY+HvocxFc/YFasqTxOBarjcXb\n8jindyrhIWaXr0tP8JFE58Ywh6jubyFhMOcmsPjIrLQfoMXPnUgJix6HZX+GgdfCZe9ASHjz12nc\nwtr9Rykur3FplteRtESV6Cx9Na8uPk2tA8/dBIuf8LY1foMWP3dhs8JX/wcrX4Vhd8BFr6pPaY3X\nWLA5l7AQE2N6prTourSESKotNgrKfNir6jVFtS79+d+w/WtvW+MXaPFzB9ZatQTpl3fhrN+pRekm\n/af2JlJKFm7J5eweKUSHt+xDyCdKW7nCxKeg4+mqAO6xHG9b4/Pod6TR1FbCh9fCls9gwlOqibgu\nQup1NuYc48ixqhaHvOCY6+fj4hcSroZWbBaYcwtYLd62yKfR4mck1aXw3uWwaxGc/xKc+VtvW6Sx\ns3BLLmaTYEKflvc78clVHs5IyoIL/gEHV8H3f/W2NT6NFj+jqCiGd6fC/hVw6QwYeou3LdLYkVKy\nYHMuI7q1IyEqrMXXx0aEEhcR4p1Obq1hwBUw8DpY/nfI/t7b1vgsegTeCErz4L+XQNEuuPK/0Pt8\nb1ukcWB3fhnZheVMH53R6nuk+2qu3ws9oDz/1P3RKZDUHT67De78CWJaNskTDGjPr62UHIB3JsPR\nvXDNx1r4fJAFm1W5+kkurupoDJ8patqQxoQPoLwALp+t2iJ8cacuf98ILomfEGKyEGKHEGK3EOLR\nRo4/IITYKoTYKIRYKoToarypPkjhblWEtLwIrv8CssZ52yJNIyzcmsvgLgm0j4to9T3qipr6VK5f\ndWnTx0PCYdKzsHuJSrnSnESzYa8Qwgy8BkwEcoA1Qoi5UsqtDqetB4ZIKSuEEHcBzwNXusNgnyF3\nkwp1pYSb5kHHAd62SNMIB4sr2HzoOI+d17aSYemJkZRVWzhWWduqcUPDqC6DnQtgy+dK1Jri1SEQ\nal9NtPiPULBdtcJs3w/C7X1LnIbNqfDQLmNt9zFcGfMbBuyWUmYDCCE+BKYC9eInpfzO4fxVwHVG\nGulzHFwD702DsBi44UtI7uFtizROaKopeUuor+5ytNLz4lcneFu/gF2LwVIFMR2UkK1+0/l1U1+D\n3M2w70fI2wS/vqc2hGqK3r5/E2Gzk/0BhCvilwYcdPg5BxjexPm3AN+0xSifJnsZfHANxKSqyiwJ\nXbxtkaYJFm7JpXeHWLomRbfpPumJyoM6VFJJ/2ZK3xuCU8G7AfpdAp1HqMT5psRvkIMPcnA1zJoI\nIRGqD3TuJsjb7P7fw4cxdLZXCHEdMAQY4+T47cDtAF26+KFo7PgGPr5R5VJd/znEts2b0LiXgtJq\n1u4/yn3j2+6Zp3lilUd1GexaqELaesFrrwSv78XQZQSYGhRkiE51HrY60nkYjP8TLH1KzQRf+V+1\n/8ngrS7kivgdAjo7/Jxu33cSQogJwB+AMVLKRhdBSilnADMAhgwZ4kMjxy6w8RP4/A61fOi6TyGq\n+SrAGu+yeGseUtKqVR0NSYwKJTLU3Pp0l6ZSUqb8DbbUeXiVzQueIy0Zlxv9W9i7HBY8Cp2HQ/u+\nLf89AghXxG8N0EMIkYkSvauAk7ppCyEGAW8Ck6WUgTdYsPYdmHc/dB2teuqGx3rbIo0LLNiSS9ek\nKHq1b/v/SwhBWmIkOUdbWc6+qZSUOTcrwRt0nQppmxO81mIyqQT8N0bDnOlw23fNXxPANCt+UkqL\nEOI3wELADLwtpdwihHgaWCulnAu8AMQAn9i7YR2QUvpn121nn9DmMLhuDoRGet4mTYs5VlnLit2F\n3HJmZrMd2lwlPdFNPXxv+hq6jHSP4DUkJlUJ4H8vgQWPuB42ByAujflJKecD8xvse8Lh+8Dpsu3s\nE9pa45rwBXHqgC/x7fY8LDbJuQaEvHWkJUSy4WCJYferJ+NM4+/ZFFnj4Mz74ceXYNosOO0ytX/7\nfNVf5to50GOiZ23yAnqFh9EEceqAL7Fwcx7t48IZmJ5g2D3TEiM5WlFLeXUAVEsZ9xikD4OvfgvF\n2Wpf9wkQkQCbPvGubR5Ci58m4KissbJsZz6T+nbA5EKHNlepr+7ii2t8W4o5FC6bpcYB59wMlhpV\nCr/vVNg2D2p8qFWnm9Dipwk4vt9ZQFWta03JW0Kri5rabCCcjOd5c2wtoYuqMH54vUqBATjtcqgt\nh52Bm6pbh67q4kmsteoTV+NWFm7JJSEqlGGZxqYj1SU657TU89v5DUgrXDpTlZvyJfpeBENvU2t/\nM89WoW9sJ9g0B/pP87Z1bkV7fo7kbXV+zIhP6DfPhn0/tf0+GqfUWGws3ZbHhD7tCTUb+/JOiQkn\nzGxqmecnJfzwIiR0hX6XGmqPYUx6Vi11++IuKMuD/peqnMOKYm9b5la051eHzQpf3gNRSXDPaohO\nbt19wmMbr7YREa8y+GefBwOuhIlP6xUiBjLk2cUUltXU/zxnXQ5z1uWQHBPG2seNmbk0mQQdEyJa\nNua3dzkcWqcqe/tqA6vQCFX+fsYY+Ox21Qtk5auwbS6ccZO3rXMbPvrf8AKrXofDv6ip/9YKn5SQ\nmKmE9K6fTu3dUVOh0gt++pdKKxj3GAy73XffFH6Eo/C5sr+1pCW0MNH5hxdVAvPAaw21w3BSesL5\nLyrvb9diSO6pVjUFsPjpsBegaA98+xz0Oq9t4xyH1kHuRlXCvrHE2rAoOOdxuHsVdBmuGpm/eZYO\nhf2I9JYUNc1ZB3u/h5G/Ud6Vr3P61Soq+f5vENsR9v8U0F3gtPjZbPDVfWoi4vwX29Zpbc1bqsxV\nc4PaSVkqkfTK91SIPPs8FW6U5rX+2RqnXPHvlTwyZyNvLNvDgs257MwrparW2qp7pSVEkV9aTbXF\nhet/fEnlzQ2Z3qpneRwh1HsgMVOJNhI2f+Ztq9yGjrd+mQ37foALX4a4Tq2/T0WxeqEMvt61tb9C\nQJ8LIOscFRqteFlVjRn7ex0KG4xEsnR7PoUOTceFgE7xkWQmR5OZHE1GcjTd7F/TEyOdTpbUVXc5\nUlJFRnITZbLyt8H2eTDmEf9aCx4eC5e9rcpfWWtg08cw+v+8bZVbCO532LFDsOgJNcU/+Ia23Wv9\n/8BaDUNa2LUtLEr19h14Dcx/SIXC6/8H5/8duo5qm00aAD65U/0dj1fVsq+wnL32re77L349RGmV\nhTXhd5Eijp1yfWVYEr9csZqM5Gg6xavwNedoZdPi9+M/ITQaht/plt/JrXQaCBOfUWt/czdBwQ5I\n6eVtqwwneMVPSlWpRUI8AQAAABNXSURBVFqV19eWcNdmg7WzVNWX1pYJSspSpbK2z4MFv4d3psCA\nq+yzwu1bb1uQkBwT1ujkRnLMiarLcRGhDEhPYECDJW9SSorLa0j6+6nCBxBZU8S1b/180r7rZv3M\nnWOy6r3FzORokmPCVBGFo/vUErERd/lv6bPhdyiv79A6WPQ4XBt4S96CV/w2zVGFI8/9iyrp3Rb2\nfKte8Of8sW33EQL6XAhZ4x1CYfus8NDbdCjcBM2ms9hsqnxU6WE4fgSOH4LSI3D8CKL0MEnHjzR5\n+fu3DWdvYTm78sqYvWIfALN+zKbWeqIsZUx4CJnJ0TxsncEoBItjp9HhYAmZSdHER/lZcrsQalz6\n+UzYtQiqjkNEnLetMpTgfDeVFcA3D0P6UPUJ11bWvKWKUvYxqIpXw1B4waPwy391KOyM2ioHUTt8\n4nvHfWW5YGtQkECYVa5lbEeV6lG4w+kjRmUlMypLpUAt2pLLiKwknp82gMMlVWQXltWH0EV5Bxl2\neD4fW8/i93OPAEpU20WHqbHFpGi6paivaqwxiqgwH30bRrVTzc9//Z/KAbz3l7ZFSD6Gj/7V3cyC\nR6CmTK1rfLF320pQlRxQHuSZD6iF4UYS7KGwlFB5VImXM1ErPazOaUhYjBK1uI6QeZb9+04n9sWl\nqQ8sxxp6TZV0r62sL2mmippWEmI20SUpii5JUVA3JLb4Yzhi5dJ7X2AoHcguKGdf0Ylxxh93F/Dp\nLyenj3SIizhl0iUzOZou7aIIC/FyQsbkPyvxK85WzY8GBU5vsuATv+3zYfOnMO4PkOpE+MD1ElTr\nZquv7koGdRoK/wGG3uq/obC1Fkpzmxa10lzVx+IkhBKtuI6Q2FVVPY7rqNaj1olabEfjQ7RZk+CK\nd6FdJmkJkazZ14jgVh6FNbOg3yWEp/agO9A99dSZ3vJqS70g7issJ9v+dcHmIxytqK0/zyTUeuJ6\nUUyKIjMlhsykaNISIzEbWLHGKRHx0Ot82PE1zHtARUsBMvnhp++cVlJZoiY52vdX/QzaiqUafnkX\nek6GhM7Nn98W6kLh069WIfuCR2D9f+G8v0PXke59dkupOm4fTzvUuKgdP6LG32jQxsUcfkLI0oac\nLGqxneyeWwf3FYdwVtU4Ih5K9qvQ75IZpCdm8tXGI1isNkIcU2JWv6UiijMfaPox4SH06xRPv06n\nepolFTVKFIvK2VtQzt6iCvYWlvHL/qOUOdQRDDOb6NwukszkmJPC6MzkaNrHhRtWvRqA069U4met\nhk+mw21LA6KieXCJ3+I/qhf31R8YE6Ju+0q9iYe2ML2lLSR3bxAKT1aCOPFpVaLcndisUJbv4Kkd\naSQkPaIEoCGRiScErOPpp4paXCd1jjfHlJoa5ji6Dz66Hj64kilZt/G6bQx5pdX1Nf6oKVdLJHtO\nhg79W21CQlQYg7qEMahL4kn7pZQUlFWzr1CJYbZDqs7yXQXUWGz150aFmemaVBdCR5GZHEOm/Wti\nVGjLhbHHJAiPU20v87fAwj/ABS+1+nf0FYJH/LKXKS9t9H2QNtiYe66ZpbLhu51jzP1cpT4UrkuQ\nfgW2fw3S1rjwuDJ+WVNxQsyciVpprkoNcsQUovrJxnVUaT7dJzQQtY4qDPV3TyExA25ZDN88RL9f\nZvJu6AryDvciLaGnOr7uP1BZrHriugEhBKmxEaTGRpxSqstmkxw+VlkvjHvtX7ceOc6CLblYbSc8\n7LiIEDJTYuxhdDSZKdFkJimRjI1w4lGHRqrX29a5KgF/9QzoNkYVPvVjhJTe6SA5ZMgQuXbtWs88\nrKYcXh+p3qh3/XTijbj2bRUGO+PJxvO+AMjbAm+MUuWARt1rrL0tpXA3fPOQSrlxxh0/NBC3Qyd7\nb1WN9KYIi7WPo3Vy8NQ6nvDUYjvZJw2Ca5Vk/vdvEffto4QLKwLbqSf4UL+WWquNg8UV7CsqP2ny\nZV9hxSnVaZJjwhv1FrsmRRFx4HvV9GjaLOXhFu6GO39Q464+hhBinZRySHPnBYfn9+2zaszmpvkn\nhG/nIvj6QdWVzdpI5Y/m6vetmaXGqHyhWkdyd7juM3iqiX4Vb57l8INQIXJcJ+W5dh11qqjFdfSv\nZVkeJG7UdKYtqObr8McaP8GH+rWEmk10S4mhW0oM5/Q++VhVrZX9RSd7i3sLy/l2ewGFZSdmpIWA\nznFhzDUlcmjJu2zp9xSX5l+F5cPpmG/5htCwcA//VsYQ+OJ3cDWsekPNjGaMVvsOr4dPboIOA1Tb\nwPAYtb+8CP55GvSaovobOKO6FDZ+pCrA+EoGf3PjOFe8e0LUYtrritJtICLUTF50L/DzPkYRoWZ6\ndYilV4dTP+RKq2rZV1hhz2FUwrh8/9mce2w+V397hO9NN/Na7cu8+cytfBh/i5qJdvAWM1Oi6RgX\nYWgPFaMJbPGzVMOXv1HpD+P/pPYd3Q/vXaGKll7z8QnhA4hOguG3q3WZYx52PqW/8SM1tjb0Vvf/\nDkbh5+MzvkZaYiQUeNsK9xEbEcpp6fGclu4wI51zH7z1JSsurmBHx/vJXnqEOw58Qk3caOYf78eq\n7GIqHarlhIeYyEg6NYzOSI76//bOPTiq+orjnwMBkmwgidmlQsIjEVDxiQZb1JHaYgfQglZUEK1Y\n1FFbasfWGa22RWnHV61jLdVSsNWqII1VUVEpiDAqz6kgyKgTEt4oCSY88uCR/PrH7+ZJNrlJNnvv\n7p7PzE5u7l52z+Fuvvt7nAehtPA70s0L09YRycK0EO/it/JxG7U/tcDGfVWVwUuT7Jb9tLdaDhQe\nNQPWzIEVj7U8+jPGTnn7nRO5jRMl5sjJiG/xa5H5kwFIe/cuzueu+tMz9tzLjF9+gUnry9cHjzQZ\nLRaXVlK47zDvf77vhFTAelHMSiXXCdfJC6ZFrTBt/IrfV5vgwydtRsTQy+wocMFUG7Jw4+vhR3WB\nLLjgVlttuaXR345VsG8LTHjaf6k+4eLUvOwQFqfUdXJLKMKuZRr4z63Ija9zcnoyJ6cnc+EpTa84\nXlN7Qipg8f5KNuws4+1P91Drwb5rfIpfzXE73U3JhLEP26T21++wlWmvntew9heOC2fA2r+3PPpb\nNxd6pcOZk7rO/o7ikx3GRCA7M4USk95iCayE/LIpXgEfPRk21Kd5KuCR4zXs2F9JUWkFn+89xIov\n9/HJznKiGXwSn+K36mnYuwGued5uSCydaVPaxsyEs1yI1l9H2d6lmwvso47UIFQfsCPDnqldZLwS\nC2RnpDDyyDO8dueFJwQkJyRn/Mi2ghh0kU05xMYf7j1YTXFJBUWlhykqachx3lVW2WS0F0zrxcjB\nJ5EXDLBg3c6omBx/4ldaCMsfhtOusIv86+bZ6W/+dPcpbeGG95Wl9mf+TyJjqxKz1FV03l1epeIH\nbBgxkyHF66h58SZm5TzL5m+6s21/BdXHmmae5AYDnDMggytHZJMXdCrcBAP0aRRgreLXEWprYZHT\nLObyJ+DL92Dxr2zK0bjHIrNGlzsagkM7/zpKTFOX1rarPT18Y5jqYzW01oLpyrmbOFtuo6DnTCZs\nf4SynFlcPCRIbshuYuSFAvTt7S7n2E1h2kgQX+K3fp7dkJg422YtFNxsY/kmPRe56iexFN6idBm9\nk3uQntKjfQ3MfU5trWF3eVX91LSoxOYQF5dWsLu8irU9w6xxAqtOfYWqcU/RfatwyX8f4JIzNtrl\noQ4QyXCW1ogf8SvfYdf28i6FwRfD3DG2/+71C6FnK70W2sup4yP3WkpMk52R0r4G5j6hrOJokzW4\n+rW4/RVNCiTUVaY+b2Amk87P4ePgKvKcAOa0Xo50GAMr/0i/5b+HJQdtMP22lfDer2HAt6Hf2R55\n2TbxIX7GwJu/sD8vexBeusbWi5v2duSLfs7K8lXupuId2Zkp7NjfjgbmUaT6WE39CK5O4Iqc9LXy\nRjUDk7oJA7NSyQsGGH1qqL4sVl4o0Gogcj0iMPoemxb55s/hH+PhqmdtqFnBzXDbiqaJBD4iPsRv\n4wLYuszpOHVf27F8bREuXq4OH+VuKt6RnZHCx4WlGGMiWz/PJTW1hj3lVWwtOdxE5OqmqY2pqxY9\n/qx+9RsNucE0BmSmNK1J2FFGTLUDjYU3wfwpNuRl8T32cdUznX/9LiD2xe/Q17bHRc4FsOd/7mP5\nWqNuVNdaWXMl4cnJTKHiaA0Hqo6RkRrhFgYOdZ3lGkZvFbaeX0kF2/dXcrSmYZrau1cSeaEAIwdn\ncl1oQP0oLjcYINArCn/qQ8bY2dbL18L7s+zy08aXbfmrcyZ3/fu3k9gXv3fusf0VAiH47DUY86C7\nWD5F6SR1WR67yqo6LX6VR487KWF2o6HYKW9fVHKYg9UNFRR6dBcGOVWbv3da3/oRXJPWmV7S/1xb\n9/DFq2H7x/bcW3fbytzBId7a1ozYFr8ti2DLG9Anx5bZzp9ui5UqShfTOPn+iqc/rD/fWvL98Zpa\ndpdXOaJWUV9Cqqikgr0HmvYq6ZeeTF4owIRz+9eXqs8LBsjOiNA0tSvJHATTl9hc4J1rbMJAwTS4\nZRkk+af8VeyKX+U3th4fwMFdMGxc5GL5FKUNWku+Lzl0xFmDO9xoqlrB9v0VTZL7+yQnkRdKY1Re\nlp2eOjFxvm5n6ZbUk+DHb8Crt9iWC19tgiW/gfGPeW1ZPbH7P7zkgYaNh/4jbA5upDuZaaEApQOM\n/MPS+uOe3bsxKCuVU0IBxpz+rUabDQFOCvhgmtqV9EixoS/v3gdr/9bwaIyHkROxKX6FS20PUYCM\ngZGP5atDw1mUDvC7Hw4nNxjglFAa/TOi1GLSr3TrDuMePVH06vAwciL2xO/IIRvTB5CcAVNf7fqu\nZYrSDm6+KNdrE/yFT0e3Pl85bYFlD8GBnbb3xpT5EBrmtUWKosQg/h/5PT605aFxUrJtvKMoHhCt\n5Hul63AlfiIyFngK6A7MNcY80uz5XsALwPnAfuA6Y8y2iFgYbk3gyMGIvLyidIRoJd8rXUeb014R\n6Q7MBsYBw4EpIjK82WXTgTJjzBDgSeDRSBuqKEoMEy5CwsPICTcjvwuAQmNMEYCILAAmAlsaXTMR\nmOkcFwB/ERExXnVEVxTFX/gwcsLNhkc20Li06i7nXIvXGGOOAweArEgYqCiK0hVEdbdXRG4TkfUi\nsr6kJNH6/imK4ifciN9uYECj33Occy1eIyJJQDp246MJxpg5xph8Y0x+KBRyZ6EP1woURYl93Kz5\nrQOGikguVuQmA9c3u2YRcBOwCpgEvB+x9T4frhUoihL7tCl+xpjjIvIz4D1sqMtzxpjPROQhYL0x\nZhEwD/iXiBQC32AFUlEUxbe4ivMzxiwGFjc799tGx9XANZE1TVEUpeuIvfQ2RVGUCKDipyhKQqLi\npyhKQqLipyhKQqLipyhKQqLipyhKQiJe1R4QkRJgezv+SRAo7SJzok08+QLx5Y/64k/a48sgY0yb\nKWSeiV97EZH1xph8r+2IBPHkC8SXP+qLP+kKX3TaqyhKQqLipyhKQhJL4jfHawMiSDz5AvHlj/ri\nTyLuS8ys+SmKokSSWBr5KYqiRAzfiZ+IjBWRL0SkUETubeH5XiLyivP8GhEZHH0r3eHCl7tFZIuI\nfCoiy0RkkBd2uqEtXxpdd7WIGBHx7S6jG19E5Frn3nwmIi9H28b24OJzNlBElovIJ85nbbwXdraF\niDwnIvtEZHOY50VE/uz4+amInNepNzTG+OaBrRe4FcgDegIbgeHNrrkTeNY5ngy84rXdnfDlUiDV\nOb4jln1xrusNrARWA/le292J+zIU+ATIdH7v67XdnfRnDnCHczwc2Oa13WF8uQQ4D9gc5vnxwDuA\nAN8B1nTm/fw28qvvFGeMOQrUdYprzETgeee4APi+iEgUbXRLm74YY5YbYyqdX1djWwT4ETf3BWAW\ntm1pdTSNaydufLkVmG2MKQMwxoRpHu0L3PhjgD7OcTqwJ4r2ucYYsxJbDDkcE4EXjGU1kCEi/Tr6\nfn4Tv3jqFOfGl8ZMx36r+ZE2fXGmIAOMMW9H07AO4Oa+DAOGichHIrJaRMZGzbr248afmcANIrIL\nW5R4RnRMizjt/ZtqFVeVnJWuRURuAPKB0V7b0hFEpBvwJ2Cax6ZEiiTs1Pe72NH4ShE5yxhT7qlV\nHWcK8E9jzBMiMgrbcuJMY0yt14Z5id9GfhHrFOcD3PiCiIwB7gcmGGOORMm29tKWL72BM4EPRGQb\ndj1mkU83Pdzcl13AImPMMWNMMfAlVgz9iBt/pgMLAYwxq4BkbK5srOHqb8o1Xi9yNlvQTAKKgFwa\nFm/PaHbNT2m64bHQa7s74csI7GL1UK/t7awvza7/AP9ueLi5L2OB553jIHaqleW17Z3w5x1gmnN8\nOnbNT7y2PYw/gwm/4XE5TTc81nbqvbx2tgUHx2O/abcC9zvnHsKOjMB+a/0bKATWAnle29wJX5YC\nXwMbnMcir23uqC/NrvWt+Lm8L4Kdxm8BNgGTvba5k/4MBz5yhHED8AOvbQ7jx3xgL3AMO/qeDtwO\n3N7ovsx2/NzU2c+YZngoipKQ+G3NT1EUJSqo+CmKkpCo+CmKkpCo+CmKkpCo+CmKkpCo+CmKkpCo\n+CmKkpCo+CmKkpD8H0fbKpFmdlXHAAAAAElFTkSuQmCC\n", 1000 | "text/plain": [ 1001 | "
" 1002 | ] 1003 | }, 1004 | "metadata": { 1005 | "tags": [] 1006 | } 1007 | } 1008 | ] 1009 | }, 1010 | { 1011 | "metadata": { 1012 | "id": "0j5vcFepLuHa", 1013 | "colab_type": "code", 1014 | "colab": {} 1015 | }, 1016 | "cell_type": "code", 1017 | "source": [ 1018 | "\n" 1019 | ], 1020 | "execution_count": 0, 1021 | "outputs": [] 1022 | }, 1023 | { 1024 | "metadata": { 1025 | "id": "Rem5LA9hSm2R", 1026 | "colab_type": "text" 1027 | }, 1028 | "cell_type": "markdown", 1029 | "source": [ 1030 | "# Test" 1031 | ] 1032 | }, 1033 | { 1034 | "metadata": { 1035 | "id": "Yi2tuUdRS0k0", 1036 | "colab_type": "text" 1037 | }, 1038 | "cell_type": "markdown", 1039 | "source": [ 1040 | "## random test" 1041 | ] 1042 | }, 1043 | { 1044 | "metadata": { 1045 | "id": "0OD_zKSpSz7S", 1046 | "colab_type": "code", 1047 | "colab": {} 1048 | }, 1049 | "cell_type": "code", 1050 | "source": [ 1051 | "B = 1000\n", 1052 | "size = 50\n", 1053 | "\n", 1054 | "X = np.random.rand(B*size, 2)\n", 1055 | "Time = 5*np.random.rand(B*size,1)\n", 1056 | "Time = Time.reshape(B,size,1)\n", 1057 | "Time[:,0] = 0\n", 1058 | "Time = Time.reshape(B*size,1)\n", 1059 | "X = np.concatenate((X,Time), axis=1)\n", 1060 | "\n", 1061 | "\n", 1062 | "X = torch.Tensor(X).cuda()\n", 1063 | "Time = torch.Tensor(Time).cuda()\n", 1064 | "mask = torch.zeros(B,size).cuda()\n", 1065 | " \n", 1066 | " \n", 1067 | "time_cost = 0\n", 1068 | "total_time_cost = torch.zeros(B).cuda()\n", 1069 | "T = torch.zeros(B).cuda()\n", 1070 | "R = torch.zeros(B).cuda()\n", 1071 | "reward = 0\n", 1072 | "\n", 1073 | "Y = X.view(B,size,3) # to the same batch size\n", 1074 | "Y_Time = Time\n", 1075 | "Time = Time.view(B,size)\n", 1076 | "x = Y[:,0,:]\n", 1077 | "h = None\n", 1078 | "c = None\n", 1079 | "\n", 1080 | "# print(Y.size())\n", 1081 | "\n", 1082 | "for k in range(size):\n", 1083 | " \n", 1084 | " output, h, c = model_high(x=x, input=X, h=h, c=c, mask=mask, latent=0)\n", 1085 | " \n", 1086 | " sampler = torch.distributions.Categorical(output)\n", 1087 | " # idx = sampler.sample() # now the idx has B elements\n", 1088 | " idx = torch.argmax(output, dim=1)\n", 1089 | "\n", 1090 | " Y1 = Y[[i for i in range(B)], idx.data]\n", 1091 | " \n", 1092 | " if k == 0:\n", 1093 | " Y_ini = Y1.clone()\n", 1094 | " if k > 0:\n", 1095 | " reward = torch.norm(Y1[:,:2]-Y0[:,:2], dim=1)\n", 1096 | "\n", 1097 | " Y0 = Y1.clone()\n", 1098 | " x = Y[[i for i in range(B)], idx.data]\n", 1099 | " \n", 1100 | " R += reward\n", 1101 | " total_time_cost += reward\n", 1102 | " \n", 1103 | " # enter time\n", 1104 | " time = Time[[i for i in range(B)], idx.data]\n", 1105 | "\n", 1106 | " # determine the total reward and current enter time\n", 1107 | " time_cost = torch.lt(total_time_cost, time).float()*(time - total_time_cost) \n", 1108 | "\n", 1109 | " \n", 1110 | " total_time_cost += time_cost\n", 1111 | " T += time_cost # total time cost\n", 1112 | "\n", 1113 | " \n", 1114 | " mask[[i for i in range(B)], idx.data] += -np.inf \n", 1115 | "\n", 1116 | " \n", 1117 | "R += torch.norm(Y1[:,:2]-Y_ini[:,:2], dim=1)\n", 1118 | "total_time_cost += torch.norm(Y1[:,:2]-Y_ini[:,:2], dim=1)\n", 1119 | "\n", 1120 | "\n", 1121 | "print(\"total time cost:\", total_time_cost.mean().item())\n", 1122 | "print(\"time cost:\", T.mean().item())\n", 1123 | "print(\"tour lenght:\", R.mean().item())" 1124 | ], 1125 | "execution_count": 0, 1126 | "outputs": [] 1127 | }, 1128 | { 1129 | "metadata": { 1130 | "id": "CA3yEdi4S4v_", 1131 | "colab_type": "text" 1132 | }, 1133 | "cell_type": "markdown", 1134 | "source": [ 1135 | "## held-out data test" 1136 | ] 1137 | }, 1138 | { 1139 | "metadata": { 1140 | "id": "5xfOqkVziW6_", 1141 | "colab_type": "code", 1142 | "colab": {} 1143 | }, 1144 | "cell_type": "code", 1145 | "source": [ 1146 | "from tqdm import tqdm\n", 1147 | "import torch\n", 1148 | "from torch.utils.data import Dataset\n", 1149 | "import numpy as np\n", 1150 | "import time as tm\n", 1151 | "from scipy.spatial import distance" 1152 | ], 1153 | "execution_count": 0, 1154 | "outputs": [] 1155 | }, 1156 | { 1157 | "metadata": { 1158 | "id": "hSjb19rLLuEz", 1159 | "colab_type": "code", 1160 | "colab": {} 1161 | }, 1162 | "cell_type": "code", 1163 | "source": [ 1164 | "# Training Data\n", 1165 | "class TSPDataset(Dataset):\n", 1166 | " \n", 1167 | " def __init__(self, dataset_fname=None, train=False, size=50, num_samples=100000, random_seed=1111):\n", 1168 | " super(TSPDataset, self).__init__()\n", 1169 | " \n", 1170 | " torch.manual_seed(random_seed)\n", 1171 | "\n", 1172 | " self.data_set = {\"x\":[], \"time\":[]}\n", 1173 | " \n", 1174 | " # randomly sample points uniformly from [0, 1]\n", 1175 | " for l in tqdm(range(num_samples)):\n", 1176 | " x = torch.FloatTensor(2, size).uniform_(0, 1)\n", 1177 | "# time = torch.cat((torch.FloatTensor(1, size).uniform_(0, 5), 1000 * torch.ones(1, size)), 0)\n", 1178 | " time = torch.cat((torch.FloatTensor(1, size).uniform_(0, 5), torch.FloatTensor(1, size).uniform_(1000, 1300)), 0)\n", 1179 | " time[0][0] = 0\n", 1180 | " \n", 1181 | " #x = torch.cat([start, x], 1)\n", 1182 | " self.data_set[\"x\"].append(x)\n", 1183 | " self.data_set[\"time\"].append(time)\n", 1184 | "\n", 1185 | " self.size = len(self.data_set)\n", 1186 | "\n", 1187 | " def __len__(self):\n", 1188 | " return self.size\n", 1189 | "\n", 1190 | " def __getitem__(self, idx):\n", 1191 | "\n", 1192 | " return self.data_set[\"x\"][idx], self.data_set[\"time\"][idx]" 1193 | ], 1194 | "execution_count": 0, 1195 | "outputs": [] 1196 | }, 1197 | { 1198 | "metadata": { 1199 | "id": "ws02ctL0uCw0", 1200 | "colab_type": "code", 1201 | "colab": {} 1202 | }, 1203 | "cell_type": "code", 1204 | "source": [ 1205 | "dataset = torch.load('drive/My Drive/GCN-CO/TSPTW50_in_U05_1000.pt')" 1206 | ], 1207 | "execution_count": 0, 1208 | "outputs": [] 1209 | }, 1210 | { 1211 | "metadata": { 1212 | "id": "j397WS0HiSwi", 1213 | "colab_type": "code", 1214 | "outputId": "fcf7c616-f95a-4727-9bd9-252045d3e99a", 1215 | "colab": { 1216 | "base_uri": "https://localhost:8080/", 1217 | "height": 68 1218 | } 1219 | }, 1220 | "cell_type": "code", 1221 | "source": [ 1222 | "B = 1000\n", 1223 | "size = 50\n", 1224 | "\n", 1225 | "X = torch.zeros(B, size, 3)\n", 1226 | "Time = torch.zeros(B, size)\n", 1227 | "for i in range(B):\n", 1228 | " X[i, :,:2] = dataset[i][0].transpose(1,0)\n", 1229 | " X[i, :,2:] = dataset[i][1].transpose(1,0)[:,0:1]\n", 1230 | " Time[i, :] = dataset[i][1].transpose(1,0)[:,0]\n", 1231 | "\n", 1232 | "X = X.view(B*size,3).cuda()\n", 1233 | "Time = Time.view(B*size).cuda()\n", 1234 | "mask = torch.zeros(B,size).cuda()\n", 1235 | " \n", 1236 | " \n", 1237 | "time_cost = 0\n", 1238 | "total_time_cost = torch.zeros(B).cuda()\n", 1239 | "T = torch.zeros(B).cuda()\n", 1240 | "R = torch.zeros(B).cuda()\n", 1241 | "reward = 0\n", 1242 | "\n", 1243 | "Y = X.view(B,size,3) # to the same batch size\n", 1244 | "Y_Time = Time\n", 1245 | "Time = Time.view(B,size)\n", 1246 | "x = Y[:,0,:]\n", 1247 | "h = None\n", 1248 | "c = None\n", 1249 | "\n", 1250 | "# print(Y.size())\n", 1251 | "\n", 1252 | "for k in range(size):\n", 1253 | " \n", 1254 | " output, h, c = model_high(x=x, input=X, h=h, c=c, mask=mask, latent=0)\n", 1255 | " \n", 1256 | " sampler = torch.distributions.Categorical(output)\n", 1257 | " # idx = sampler.sample() # now the idx has B elements\n", 1258 | " idx = torch.argmax(output, dim=1)\n", 1259 | "\n", 1260 | " Y1 = Y[[i for i in range(B)], idx.data]\n", 1261 | " \n", 1262 | " if k == 0:\n", 1263 | " Y_ini = Y1.clone()\n", 1264 | " if k > 0:\n", 1265 | " reward = torch.norm(Y1[:,:2]-Y0[:,:2], dim=1)\n", 1266 | "\n", 1267 | " Y0 = Y1.clone()\n", 1268 | " x = Y[[i for i in range(B)], idx.data]\n", 1269 | " \n", 1270 | " R += reward\n", 1271 | " total_time_cost += reward\n", 1272 | " \n", 1273 | " # enter time\n", 1274 | " time = Time[[i for i in range(B)], idx.data]\n", 1275 | "\n", 1276 | " # determine the total reward and current enter time\n", 1277 | " time_cost = torch.lt(total_time_cost, time).float()*(time - total_time_cost) \n", 1278 | "\n", 1279 | " \n", 1280 | " total_time_cost += time_cost\n", 1281 | " T += time_cost # total time cost\n", 1282 | "\n", 1283 | " \n", 1284 | " mask[[i for i in range(B)], idx.data] += -np.inf \n", 1285 | "\n", 1286 | " \n", 1287 | "R += torch.norm(Y1[:,:2]-Y_ini[:,:2], dim=1)\n", 1288 | "total_time_cost += torch.norm(Y1[:,:2]-Y_ini[:,:2], dim=1)\n", 1289 | "\n", 1290 | "\n", 1291 | "print(\"total time cost:\", total_time_cost.mean().item())\n", 1292 | "print(\"time cost:\", T.mean().item())\n", 1293 | "print(\"tour lenght:\", R.mean().item())" 1294 | ], 1295 | "execution_count": 0, 1296 | "outputs": [ 1297 | { 1298 | "output_type": "stream", 1299 | "text": [ 1300 | "total time cost: 9.619950294494629\n", 1301 | "time cost: 0.9608845710754395\n", 1302 | "tour lenght: 8.659066200256348\n" 1303 | ], 1304 | "name": "stdout" 1305 | } 1306 | ] 1307 | } 1308 | ] 1309 | } -------------------------------------------------------------------------------- /Pointer-Net-Reproduce.ipynb: -------------------------------------------------------------------------------- 1 | {"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"reproduce_CO_v3.ipynb","version":"0.3.2","provenance":[],"collapsed_sections":[]},"kernelspec":{"name":"python3","display_name":"Python 3"},"accelerator":"GPU"},"cells":[{"metadata":{"id":"FJONlHfivgMb","colab_type":"code","outputId":"d2813a56-b4a0-4874-9aa5-441dcd67c18f","executionInfo":{"status":"ok","timestamp":1554073489808,"user_tz":240,"elapsed":498,"user":{"displayName":"Qiang Ma","photoUrl":"https://lh6.googleusercontent.com/-xvqSKhxJSkY/AAAAAAAAAAI/AAAAAAAAAAs/aWtprnac56k/s64/photo.jpg","userId":"05776109497485911566"}},"colab":{"base_uri":"https://localhost:8080/","height":34}},"cell_type":"code","source":["from google.colab import drive\n","drive.mount('/content/drive/')"],"execution_count":0,"outputs":[{"output_type":"stream","text":["Drive already mounted at /content/drive/; to attempt to forcibly remount, call drive.mount(\"/content/drive/\", force_remount=True).\n"],"name":"stdout"}]},{"metadata":{"id":"d-Wlgv92vuWl","colab_type":"text"},"cell_type":"markdown","source":["# Generate Data"]},{"metadata":{"id":"trnXgxO8vt-7","colab_type":"code","colab":{}},"cell_type":"code","source":["import requests\n","from tqdm import tqdm\n","from torch.utils.data import Dataset\n","from torch.autograd import Variable\n","import torch\n","import os\n","import numpy as np\n","import re\n","import zipfile\n","import itertools\n","from collections import namedtuple\n","import matplotlib.pyplot as plt"],"execution_count":0,"outputs":[]},{"metadata":{"id":"XR6Myzfqv3q7","colab_type":"code","colab":{}},"cell_type":"code","source":["# Training Data\n","class TSPDataset(Dataset):\n"," \n"," def __init__(self, dataset_fname=None, train=False, size=50, num_samples=100000, random_seed=1111):\n"," super(TSPDataset, self).__init__()\n"," \n"," torch.manual_seed(random_seed)\n","\n"," self.data_set = []\n"," \n"," # randomly sample points uniformly from [0, 1]\n"," for l in tqdm(range(num_samples)):\n"," x = torch.FloatTensor(2, size).uniform_(0, 1)\n"," #x = torch.cat([start, x], 1)\n"," self.data_set.append(x)\n","\n"," self.size = len(self.data_set)\n","\n"," def __len__(self):\n"," return self.size\n","\n"," def __getitem__(self, idx):\n"," return self.data_set[idx]"],"execution_count":0,"outputs":[]},{"metadata":{"id":"iE1zTJ6Cv8uj","colab_type":"text"},"cell_type":"markdown","source":["## DataLoader"]},{"metadata":{"id":"QRn9F8c0v5cj","colab_type":"code","colab":{}},"cell_type":"code","source":["import pprint as pp\n","import numpy as np\n","\n","import torch\n","from torch.utils.data import DataLoader"],"execution_count":0,"outputs":[]},{"metadata":{"id":"EKO_ilm9v8B_","colab_type":"code","colab":{}},"cell_type":"code","source":["input_dim = 2\n","size = 50\n","train_size = 1280000\n","val_size = 1000\n","batch_size = 128"],"execution_count":0,"outputs":[]},{"metadata":{"id":"OnZurVCDwAkf","colab_type":"code","outputId":"04da8921-cd3e-4d3d-817c-e13266f8daed","executionInfo":{"status":"ok","timestamp":1554073514218,"user_tz":240,"elapsed":20025,"user":{"displayName":"Qiang Ma","photoUrl":"https://lh6.googleusercontent.com/-xvqSKhxJSkY/AAAAAAAAAAI/AAAAAAAAAAs/aWtprnac56k/s64/photo.jpg","userId":"05776109497485911566"}},"colab":{"base_uri":"https://localhost:8080/","height":51}},"cell_type":"code","source":["training_dataset = TSPDataset(train=True, size=size,\n"," num_samples=train_size)\n","\n","val_dataset = TSPDataset(train=True, size=size,\n"," num_samples=val_size)\n","\n","train_dataloader = DataLoader(training_dataset, batch_size=batch_size,\n"," shuffle=True, num_workers=4)\n","\n","val_dataloader = DataLoader(val_dataset, batch_size=int(batch_size/2), shuffle=True, num_workers=1)"],"execution_count":0,"outputs":[{"output_type":"stream","text":["100%|██████████| 1280000/1280000 [00:08<00:00, 158112.40it/s]\n","100%|██████████| 1000/1000 [00:00<00:00, 127808.88it/s]\n"],"name":"stderr"}]},{"metadata":{"id":"EGdCTITmwsMs","colab_type":"text"},"cell_type":"markdown","source":["# Neural Net Model"]},{"metadata":{"id":"HcbzYnHNws5t","colab_type":"text"},"cell_type":"markdown","source":["## Encoder"]},{"metadata":{"id":"gzMgAKZpwB9y","colab_type":"code","colab":{}},"cell_type":"code","source":["import torch\n","import torch.nn as nn\n","import torch.autograd as autograd\n","from torch.autograd import Variable\n","import torch.nn.functional as F\n","import math\n","import numpy as np"],"execution_count":0,"outputs":[]},{"metadata":{"id":"uoejC9qZwwNT","colab_type":"code","colab":{}},"cell_type":"code","source":["class Encoder(nn.Module):\n"," \"\"\"Maps a graph represented as an input sequence\n"," to a hidden vector\"\"\"\n"," def __init__(self, input_dim, hidden_dim, use_cuda):\n"," super(Encoder, self).__init__()\n"," self.hidden_dim = hidden_dim\n"," self.lstm = nn.LSTM(input_dim, hidden_dim)\n"," self.use_cuda = use_cuda\n"," self.enc_init_state = self.init_hidden(hidden_dim)\n","\n"," def forward(self, x, hidden):\n"," output, hidden = self.lstm(x, hidden)\n"," return output, hidden\n"," \n"," def init_hidden(self, hidden_dim):\n"," \"\"\"Trainable initial hidden state\"\"\"\n"," enc_init_hx = Variable(torch.zeros(hidden_dim), requires_grad=False)\n"," if self.use_cuda:\n"," enc_init_hx = enc_init_hx.cuda()\n","\n"," #enc_init_hx.data.uniform_(-(1. / math.sqrt(hidden_dim)),\n"," # 1. / math.sqrt(hidden_dim))\n","\n"," enc_init_cx = Variable(torch.zeros(hidden_dim), requires_grad=False)\n"," if self.use_cuda:\n"," enc_init_cx = enc_init_cx.cuda()\n","\n"," #enc_init_cx = nn.Parameter(enc_init_cx)\n"," #enc_init_cx.data.uniform_(-(1. / math.sqrt(hidden_dim)),\n"," # 1. / math.sqrt(hidden_dim))\n"," return (enc_init_hx, enc_init_cx)"],"execution_count":0,"outputs":[]},{"metadata":{"id":"8a2Tgp9GwycP","colab_type":"text"},"cell_type":"markdown","source":["## Attention"]},{"metadata":{"id":"E7SbZzRswx_N","colab_type":"code","colab":{}},"cell_type":"code","source":["class Attention(nn.Module):\n"," \"\"\"A generic attention module for a decoder in seq2seq\"\"\"\n"," def __init__(self, dim, use_tanh=False, C=10, use_cuda=True):\n"," super(Attention, self).__init__()\n"," self.use_tanh = use_tanh\n"," # query matrix W_q\n"," self.project_query = nn.Linear(dim, dim)\n"," # refence matrix W_ref\n"," self.project_ref = nn.Conv1d(dim, dim, 1, 1)\n"," self.C = C # tanh exploration\n"," self.tanh = nn.Tanh()\n"," \n"," v = torch.FloatTensor(dim)\n"," if use_cuda:\n"," v = v.cuda() \n"," self.v = nn.Parameter(v)\n"," self.v.data.uniform_(-(1. / math.sqrt(dim)) , 1. / math.sqrt(dim))\n"," \n"," def forward(self, query, ref):\n"," \"\"\"\n"," Args: \n"," query: is the hidden state of the decoder at the current\n"," time step. batch x dim\n"," ref: the set of hidden states from the encoder. \n"," sourceL x batch x hidden_dim\n"," \"\"\"\n"," # ref is now [batch_size x hidden_dim x sourceL]\n"," ref = ref.permute(1, 2, 0)\n"," q = self.project_query(query).unsqueeze(2) # batch x dim x 1\n"," e = self.project_ref(ref) # batch_size x hidden_dim x sourceL \n"," # expand the query by sourceL\n"," # batch x dim x sourceL\n"," expanded_q = q.repeat(1, 1, e.size(2)) \n"," # batch x 1 x hidden_dim\n"," v_view = self.v.unsqueeze(0).expand(\n"," expanded_q.size(0), len(self.v)).unsqueeze(1)\n"," # [batch_size x 1 x hidden_dim] * [batch_size x hidden_dim x sourceL]\n"," u = torch.bmm(v_view, self.tanh(expanded_q + e)).squeeze(1)\n"," if self.use_tanh:\n"," logits = self.C * self.tanh(u)\n"," else:\n"," logits = u \n"," \n"," # e is projected reference, logits is u\n"," # ref is used to compute glimpse: G = ref * softmax(u)\n"," \n"," # the logits has the same length with the number of cities\n"," # so softmax(logits) is exact the probability of each city\n"," return e, logits\n"],"execution_count":0,"outputs":[]},{"metadata":{"id":"sVm9-az2w5-k","colab_type":"text"},"cell_type":"markdown","source":["## Decoder"]},{"metadata":{"id":"X4zooEF2w3s-","colab_type":"code","colab":{}},"cell_type":"code","source":["class Decoder(nn.Module):\n"," def __init__(self, \n"," embedding_dim,\n"," hidden_dim,\n"," max_length,\n"," tanh_exploration,\n"," terminating_symbol,\n"," use_tanh,\n"," decode_type,\n"," n_glimpses=1,\n"," beam_size=0,\n"," use_cuda=True):\n"," super(Decoder, self).__init__()\n"," \n"," self.embedding_dim = embedding_dim\n"," self.hidden_dim = hidden_dim\n"," self.n_glimpses = n_glimpses\n"," self.max_length = max_length\n"," self.terminating_symbol = terminating_symbol \n"," self.decode_type = decode_type\n"," self.beam_size = beam_size\n"," self.use_cuda = use_cuda\n","\n"," self.input_weights = nn.Linear(embedding_dim, 4 * hidden_dim)\n"," self.hidden_weights = nn.Linear(hidden_dim, 4 * hidden_dim)\n","\n"," self.pointer = Attention(hidden_dim, use_tanh=use_tanh, C=tanh_exploration, use_cuda=self.use_cuda)\n"," self.glimpse = Attention(hidden_dim, use_tanh=False, use_cuda=self.use_cuda)\n"," self.sm = nn.Softmax(dim=-1)\n","\n"," def apply_mask_to_logits(self, step, logits, mask, prev_idxs): \n"," if mask is None:\n"," mask = torch.zeros(logits.size()).byte()\n"," if self.use_cuda:\n"," mask = mask.cuda()\n"," \n"," maskk = mask.clone()\n","\n"," # to prevent them from being reselected. \n"," # Or, allow re-selection and penalize in the objective function\n"," if prev_idxs is not None:\n"," # set most recently selected idx values to 1\n"," maskk[[x for x in range(logits.size(0))],\n"," prev_idxs.data] = 1\n"," # just implement the equation (8) in the paper\n"," logits[maskk] = -np.inf\n"," return logits, maskk\n","\n"," def forward(self, decoder_input, embedded_inputs, hidden, context):\n"," \"\"\"\n"," Args:\n"," decoder_input: The initial input to the decoder\n"," size is [batch_size x embedding_dim]. Trainable parameter.\n"," embedded_inputs: [sourceL x batch_size x embedding_dim]\n"," hidden: the prev hidden state, size is [batch_size x hidden_dim]. \n"," Initially this is set to (enc_h[-1], enc_c[-1])\n"," context: encoder outputs, [sourceL x batch_size x hidden_dim] \n"," \"\"\"\n"," def recurrence(x, hidden, logit_mask, prev_idxs, step):\n"," \n"," hx, cx = hidden # batch_size x hidden_dim\n"," \n"," gates = self.input_weights(x) + self.hidden_weights(hx)\n"," ingate, forgetgate, cellgate, outgate = gates.chunk(4, 1)\n","\n"," ingate = torch.sigmoid(ingate)\n"," forgetgate = torch.sigmoid(forgetgate)\n"," cellgate = torch.tanh(cellgate)\n"," outgate = torch.sigmoid(outgate)\n","\n"," cy = (forgetgate * cx) + (ingate * cellgate)\n"," hy = outgate * torch.tanh(cy) # batch_size x hidden_dim\n"," \n"," g_l = hy\n"," for i in range(self.n_glimpses):\n"," ref, logits = self.glimpse(g_l, context)\n"," logits, logit_mask = self.apply_mask_to_logits(step, logits, logit_mask, prev_idxs)\n"," # [batch_size x h_dim x sourceL] * [batch_size x sourceL x 1] = \n"," # [batch_size x h_dim x 1]\n"," g_l = torch.bmm(ref, self.sm(logits).unsqueeze(2)).squeeze(2) \n"," _, logits = self.pointer(g_l, context)\n"," \n"," # update the mask\n"," logits, logit_mask = self.apply_mask_to_logits(step, logits, logit_mask, prev_idxs)\n"," probs = self.sm(logits)\n"," return hy, cy, probs, logit_mask\n"," \n"," batch_size = context.size(1)\n"," outputs = []\n"," selections = []\n"," steps = range(self.max_length) # or until terminating symbol ?\n"," inps = []\n"," idxs = None\n"," mask = None\n"," \n"," if self.decode_type == \"stochastic\":\n"," # for loop to get all city coordinates\n"," for i in steps:\n"," # the first decoder_input is just a trainable parameters\n"," # the next decoder_input is the embedded_inputs[idxs]\n"," hx, cx, probs, mask = recurrence(decoder_input, hidden, mask, idxs, i)\n"," # hidden contains two tensors with size [batch_size x hidden_dim]\n"," # and the (hy, cy) is the next (hx, cx)\n"," # the first hidden comes from the encoder\n"," hidden = (hx, cx)\n"," # select the next inputs for the decoder [batch_size x hidden_dim]\n"," decoder_input, idxs = self.decode_stochastic(\n"," probs,\n"," embedded_inputs,\n"," selections)\n"," inps.append(decoder_input) \n"," # use outs to point to next object\n"," outputs.append(probs)\n"," selections.append(idxs)\n"," return (outputs, selections), hidden\n"," \n"," elif self.decode_type == \"greedy\":\n"," for i in steps:\n"," hx, cx, probs, mask = recurrence(decoder_input, hidden, mask, idxs, i)\n"," hidden = (hx, cx)\n"," decoder_input, idxs = self.decode_greedy(\n"," probs,\n"," embedded_inputs,\n"," selections)\n"," inps.append(decoder_input) \n"," outputs.append(probs)\n"," selections.append(idxs)\n"," \n"," return (outputs, selections), hidden\n"," \n"," def decode_stochastic(self, probs, embedded_inputs, selections):\n"," \"\"\"\n"," Return the next input for the decoder by selecting the \n"," input corresponding to the max output\n"," Args: \n"," probs: [batch_size x sourceL]\n"," embedded_inputs: [sourceL x batch_size x embedding_dim]\n"," selections: list of all of the previously selected indices during decoding\n"," Returns:\n"," Tensor of size [batch_size x sourceL] containing the embeddings\n"," from the inputs corresponding to the [batch_size] indices\n"," selected for this iteration of the decoding, as well as the \n"," corresponding indicies\n"," \"\"\"\n"," batch_size = probs.size(0)\n"," # idxs is [batch_size]\n"," # idxs = probs.multinomial().squeeze(1)\n"," # idxs is the sampled city index\n"," c = torch.distributions.Categorical(probs)\n"," \n"," idxs = c.sample()\n"," \n"," # due to race conditions, might need to resample here\n"," for old_idxs in selections:\n"," # compare new idxs\n"," # elementwise with the previous idxs. If any matches,\n"," # then need to resample\n"," if old_idxs.eq(idxs).data.any():\n"," print(' [!] resampling due to race condition')\n"," idxs = probs.multinomial().squeeze(1)\n"," break\n"," \n"," # embedded inputs are the embedded city coordinates, the next index is idxs,\n"," # so the next embedded input is embedded_inputs[idex]\n"," sels = embedded_inputs[idxs.data, [i for i in range(batch_size)], :] \n"," return sels, idxs\n"," \n"," def decode_greedy(self, probs, embedded_inputs, selections):\n","\n"," batch_size = probs.size(0)\n"," # idxs is [batch_size]\n"," # idxs is the greedy city index\n"," _, idxs = torch.max(probs,1)\n"," \n"," # embedded inputs are the embedded city coordinates, the next index is idxs,\n"," # so the next embedded input is embedded_inputs[idex]\n"," sels = embedded_inputs[idxs.data, [i for i in range(batch_size)], :] \n"," return sels, idxs"],"execution_count":0,"outputs":[]},{"metadata":{"id":"_Mp6qyxhxIR2","colab_type":"text"},"cell_type":"markdown","source":["## Pointer Network"]},{"metadata":{"id":"QGRfBUq0w8SR","colab_type":"code","colab":{}},"cell_type":"code","source":["class PointerNetwork(nn.Module):\n"," \"\"\"\n"," The pointer network, which is the core seq2seq model\n"," \"\"\"\n"," def __init__(self, \n"," embedding_dim,\n"," hidden_dim,\n"," max_decoding_len,\n"," terminating_symbol,\n"," n_glimpses,\n"," tanh_exploration,\n"," use_tanh,\n"," beam_size,\n"," use_cuda):\n"," super(PointerNetwork, self).__init__()\n","\n"," self.encoder = Encoder(\n"," embedding_dim,\n"," hidden_dim,\n"," use_cuda)\n","\n"," self.decoder = Decoder(\n"," embedding_dim,\n"," hidden_dim,\n"," max_length=max_decoding_len,\n"," tanh_exploration=tanh_exploration,\n"," use_tanh=use_tanh,\n"," terminating_symbol=terminating_symbol,\n"," decode_type=\"stochastic\",\n"," n_glimpses=n_glimpses,\n"," beam_size=beam_size,\n"," use_cuda=use_cuda)\n","\n"," # Trainable initial hidden states\n"," dec_in_0 = torch.FloatTensor(embedding_dim)\n"," if use_cuda:\n"," dec_in_0 = dec_in_0.cuda()\n","\n"," self.decoder_in_0 = nn.Parameter(dec_in_0)\n"," self.decoder_in_0.data.uniform_(-(1. / math.sqrt(embedding_dim)),\n"," 1. / math.sqrt(embedding_dim))\n"," \n"," def forward(self, inputs):\n"," \"\"\" Propagate inputs through the network\n"," Args: \n"," inputs: [sourceL x batch_size x embedding_dim]\n"," \"\"\"\n"," \n"," (encoder_hx, encoder_cx) = self.encoder.enc_init_state\n"," encoder_hx = encoder_hx.unsqueeze(0).repeat(inputs.size(1), 1).unsqueeze(0) \n"," encoder_cx = encoder_cx.unsqueeze(0).repeat(inputs.size(1), 1).unsqueeze(0) \n"," \n"," # encoder forward pass\n"," enc_h, (enc_h_t, enc_c_t) = self.encoder(inputs, (encoder_hx, encoder_cx))\n","\n"," dec_init_state = (enc_h_t[-1], enc_c_t[-1])\n"," \n"," '''\n"," The first decoder_input is just the random trainable parameters\n"," \n"," The inputs are the embedded x (city coordinates)\n"," \n"," The initial decoder hidden states are the first encoded hidden variable\n"," \n"," The encoded latent variable is passed in decoder as the context, also\n"," as the reference in the attention\n"," '''\n"," \n"," # repeat decoder_in_0 across batch\n"," decoder_input = self.decoder_in_0.unsqueeze(0).repeat(inputs.size(1), 1)\n"," \n"," (pointer_probs, input_idxs), dec_hidden_t = self.decoder(decoder_input,\n"," inputs,\n"," dec_init_state,\n"," enc_h)\n"," \n"," return pointer_probs, input_idxs"],"execution_count":0,"outputs":[]},{"metadata":{"id":"BUXUKmjGxLlZ","colab_type":"text"},"cell_type":"markdown","source":["## Critic Network"]},{"metadata":{"id":"GN9PmTA8xJ0N","colab_type":"code","colab":{}},"cell_type":"code","source":["class CriticNetwork(nn.Module):\n"," \"\"\"Useful as a baseline in REINFORCE updates\"\"\"\n"," def __init__(self,\n"," embedding_dim,\n"," hidden_dim,\n"," n_process_block_iters,\n"," tanh_exploration,\n"," use_tanh,\n"," use_cuda):\n"," super(CriticNetwork, self).__init__()\n"," \n"," self.hidden_dim = hidden_dim\n"," self.n_process_block_iters = n_process_block_iters\n","\n"," self.encoder = Encoder(\n"," embedding_dim,\n"," hidden_dim,\n"," use_cuda)\n"," \n"," self.process_block = Attention(hidden_dim,\n"," use_tanh=use_tanh, C=tanh_exploration, use_cuda=use_cuda)\n"," self.sm = nn.Softmax(dim=-1)\n"," self.decoder = nn.Sequential(\n"," nn.Linear(hidden_dim, hidden_dim),\n"," nn.ReLU(),\n"," nn.Linear(hidden_dim, 1)\n"," )\n","\n"," def forward(self, inputs):\n"," \"\"\"\n"," Args:\n"," inputs: [embedding_dim x batch_size x sourceL] of embedded inputs\n"," \"\"\"\n"," \n"," (encoder_hx, encoder_cx) = self.encoder.enc_init_state\n"," encoder_hx = encoder_hx.unsqueeze(0).repeat(inputs.size(1), 1).unsqueeze(0)\n"," encoder_cx = encoder_cx.unsqueeze(0).repeat(inputs.size(1), 1).unsqueeze(0) \n"," \n"," # encoder forward pass\n"," enc_outputs, (enc_h_t, enc_c_t) = self.encoder(inputs, (encoder_hx, encoder_cx))\n"," \n"," # grab the hidden state and process it via the process block \n"," process_block_state = enc_h_t[-1]\n"," for i in range(self.n_process_block_iters):\n"," ref, logits = self.process_block(process_block_state, enc_outputs)\n"," process_block_state = torch.bmm(ref, self.sm(logits).unsqueeze(2)).squeeze(2)\n"," # produce the final scalar output\n"," out = self.decoder(process_block_state)\n"," return out"],"execution_count":0,"outputs":[]},{"metadata":{"id":"SG_tYpghxPep","colab_type":"text"},"cell_type":"markdown","source":["## Final Model"]},{"metadata":{"id":"T5Em4wX8xNBf","colab_type":"code","colab":{}},"cell_type":"code","source":["class NeuralCombOptRL(nn.Module):\n"," \"\"\"\n"," This module contains the PointerNetwork (actor) and\n"," CriticNetwork (critic). It requires\n"," an application-specific reward function\n"," \"\"\"\n"," def __init__(self, \n"," input_dim,\n"," embedding_dim,\n"," hidden_dim,\n"," max_decoding_len,\n"," terminating_symbol,\n"," n_glimpses,\n"," n_process_block_iters,\n"," tanh_exploration,\n"," use_tanh,\n"," beam_size,\n"," objective_fn,\n"," is_train,\n"," use_cuda):\n"," super(NeuralCombOptRL, self).__init__()\n"," self.objective_fn = objective_fn\n"," self.input_dim = input_dim\n"," self.is_train = is_train\n"," self.use_cuda = use_cuda\n","\n"," \n"," self.actor_net = PointerNetwork(\n"," embedding_dim,\n"," hidden_dim,\n"," max_decoding_len,\n"," terminating_symbol,\n"," n_glimpses,\n"," tanh_exploration,\n"," use_tanh,\n"," beam_size,\n"," use_cuda)\n"," \n"," #self.critic_net = CriticNetwork(\n"," # embedding_dim,\n"," # hidden_dim,\n"," # n_process_block_iters,\n"," # tanh_exploration,\n"," # False,\n"," # use_cuda)\n"," \n"," embedding_ = torch.FloatTensor(input_dim,\n"," embedding_dim)\n"," if self.use_cuda: \n"," embedding_ = embedding_.cuda()\n"," self.embedding = nn.Parameter(embedding_) \n"," self.embedding.data.uniform_(-(1. / math.sqrt(embedding_dim)),\n"," 1. / math.sqrt(embedding_dim))\n","\n"," def forward(self, inputs):\n"," \"\"\"\n"," Args:\n"," inputs: [batch_size, input_dim, sourceL]\n"," \"\"\"\n"," batch_size = inputs.size(0)\n"," input_dim = inputs.size(1)\n"," sourceL = inputs.size(2)\n","\n"," # repeat embeddings across batch_size\n"," # result is [batch_size x input_dim x embedding_dim]\n"," embedding = self.embedding.repeat(batch_size, 1, 1) \n"," embedded_inputs = []\n"," # result is [batch_size, 1, input_dim, sourceL] \n"," ips = inputs.unsqueeze(1)\n"," \n"," for i in range(sourceL):\n"," # [batch_size x 1 x input_dim] * [batch_size x input_dim x embedding_dim]\n"," # result is [batch_size, embedding_dim]\n"," embedded_inputs.append(torch.bmm(\n"," ips[:, :, :, i].float(),\n"," embedding).squeeze(1))\n","\n"," # Result is [sourceL x batch_size x embedding_dim]\n"," embedded_inputs = torch.cat(embedded_inputs).view(\n"," sourceL,\n"," batch_size,\n"," embedding.size(2))\n","\n"," # query the actor net for the input indices \n"," # making up the output, and the pointer attn \n"," probs_, action_idxs = self.actor_net(embedded_inputs)\n"," \n"," # Select the actions (inputs pointed to \n"," # by the pointer net) and the corresponding\n"," # logits\n"," # should be size [batch_size x \n"," actions = []\n"," # inputs is [batch_size, input_dim, sourceL]\n"," inputs_ = inputs.transpose(1, 2)\n"," # inputs_ is [batch_size, sourceL, input_dim]\n"," for action_id in action_idxs:\n"," actions.append(inputs_[[x for x in range(batch_size)], action_id.data, :])\n","\n"," if self.is_train:\n"," # probs_ is a list of len sourceL of [batch_size x sourceL]\n"," probs = []\n"," for prob, action_id in zip(probs_, action_idxs):\n"," probs.append(prob[[x for x in range(batch_size)], action_id.data])\n"," else:\n"," # return the list of len sourceL of [batch_size x sourceL]\n"," probs = probs_\n","\n"," # get the critic value fn estimates for the baseline\n"," # [batch_size]\n"," #v = self.critic_net(embedded_inputs)\n"," \n"," # [batch_size]\n"," R = self.objective_fn(actions, self.use_cuda)\n"," \n"," #return R, v, probs, actions, action_idxs\n"," return R, probs, actions, action_idxs"],"execution_count":0,"outputs":[]},{"metadata":{"id":"uTsf6pGJxSAe","colab_type":"code","colab":{}},"cell_type":"code","source":["def reward(sample_solution, USE_CUDA=False):\n"," \"\"\"\n"," Args:\n"," List of length sourceL of [batch_size] Tensors\n"," Returns:\n"," Tensor of shape [batch_size] containins rewards\n"," \"\"\"\n"," batch_size = sample_solution[0].size(0)\n"," n = len(sample_solution)\n"," tour_len = Variable(torch.zeros([batch_size]))\n"," \n"," if USE_CUDA:\n"," tour_len = tour_len.cuda()\n","\n"," for i in range(n-1):\n"," tour_len += torch.norm(sample_solution[i] - sample_solution[i+1], dim=1)\n"," \n"," tour_len += torch.norm(sample_solution[n-1] - sample_solution[0], dim=1)\n","\n"," # For TSP_20 - map to a number between 0 and 1\n"," # min_len = 3.5\n"," # max_len = 10.\n"," # TODO: generalize this for any TSP size\n"," #tour_len = -0.1538*tour_len + 1.538 \n"," #tour_len[tour_len < 0.] = 0.\n"," return tour_len"],"execution_count":0,"outputs":[]},{"metadata":{"id":"k_VQmPNFxVxP","colab_type":"code","colab":{}},"cell_type":"code","source":[""],"execution_count":0,"outputs":[]},{"metadata":{"id":"boxbNGBxxXSU","colab_type":"text"},"cell_type":"markdown","source":["# Training"]},{"metadata":{"id":"P4xJkWPxxVu6","colab_type":"code","outputId":"60028e16-f649-4635-d053-336300043b32","executionInfo":{"status":"ok","timestamp":1554073534320,"user_tz":240,"elapsed":972,"user":{"displayName":"Qiang Ma","photoUrl":"https://lh6.googleusercontent.com/-xvqSKhxJSkY/AAAAAAAAAAI/AAAAAAAAAAs/aWtprnac56k/s64/photo.jpg","userId":"05776109497485911566"}},"colab":{"base_uri":"https://localhost:8080/","height":34}},"cell_type":"code","source":["import os\n","from tqdm import tqdm \n","\n","import pprint as pp\n","import numpy as np\n","\n","import torch\n","print(torch.__version__)\n","import torch.optim as optim\n","import torch.autograd as autograd\n","from torch.optim import lr_scheduler\n","from torch.autograd import Variable\n","from torch.utils.data import DataLoader\n","from tqdm import tqdm_notebook"],"execution_count":0,"outputs":[{"output_type":"stream","text":["1.0.1.post2\n"],"name":"stdout"}]},{"metadata":{"id":"MYWYhTqVxVsk","colab_type":"code","colab":{}},"cell_type":"code","source":["input_dim = 2\n","embedding_dim = 128\n","hidden_dim = 128\n","max_decoding_len = 50\n","terminating_symbol = ''\n","n_glimpses = 1\n","n_process_block_iters = 3\n","tanh_exploration = 10 \n","use_tanh = True\n","beam_size = 1\n","objective_fn = reward\n","is_train = True\n","use_cuda = True"],"execution_count":0,"outputs":[]},{"metadata":{"id":"NzD1ULP3xVqF","colab_type":"code","colab":{}},"cell_type":"code","source":["# build model\n","model = NeuralCombOptRL(input_dim, embedding_dim, hidden_dim,\n"," max_decoding_len, terminating_symbol,\n"," n_glimpses, n_process_block_iters, \n"," tanh_exploration, use_tanh, beam_size,\n"," objective_fn, is_train, use_cuda)"],"execution_count":0,"outputs":[]},{"metadata":{"id":"8VBssLWXxVnj","colab_type":"code","colab":{}},"cell_type":"code","source":["# the critic baseline\n","critic_exp_mvg_avg = torch.zeros(1)\n","beta = 0.8\n","\n","if use_cuda:\n"," model = model.cuda()\n"," #critic_mse = critic_mse.cuda()\n"," critic_exp_mvg_avg = critic_exp_mvg_avg.cuda()"],"execution_count":0,"outputs":[]},{"metadata":{"id":"bLv1ugjgxT4O","colab_type":"code","colab":{}},"cell_type":"code","source":["# The low learning rate doesn't work\n","actor_net_lr = 1e-3\n","critic_net_lr = 1e-3\n","actor_lr_decay_step = 5000\n","critic_lr_decay_step = 5000\n","actor_lr_decay_rate = 0.96\n","actor_lr_decay_rate = 0.96"],"execution_count":0,"outputs":[]},{"metadata":{"id":"32DqDIRLxiJ3","colab_type":"code","colab":{}},"cell_type":"code","source":["#critic_mse = torch.nn.MSELoss()\n","#critic_optim = optim.Adam(model.critic_net.parameters(), lr=float(args['critic_net_lr']))\n","actor_optim = optim.Adam(model.actor_net.parameters(), lr=actor_net_lr)\n","\n","actor_scheduler = lr_scheduler.MultiStepLR(actor_optim,\n"," range(actor_lr_decay_step, actor_lr_decay_step * 1000,actor_lr_decay_step),\n"," gamma=actor_lr_decay_rate)\n","\n","#critic_scheduler = lr_scheduler.MultiStepLR(critic_optim,\n","# range(int(args['critic_lr_decay_step']), int(args['critic_lr_decay_step']) * 1000,\n","# int(args['critic_lr_decay_step'])), gamma=float(args['critic_lr_decay_rate']))\n"],"execution_count":0,"outputs":[]},{"metadata":{"id":"DLfFI8aRxjgR","colab_type":"code","outputId":"448bf5a5-6f96-4c27-acaf-8c5880bda83a","executionInfo":{"status":"ok","timestamp":1554079501208,"user_tz":240,"elapsed":5957384,"user":{"displayName":"Qiang Ma","photoUrl":"https://lh6.googleusercontent.com/-xvqSKhxJSkY/AAAAAAAAAAI/AAAAAAAAAAs/aWtprnac56k/s64/photo.jpg","userId":"05776109497485911566"}},"colab":{"base_uri":"https://localhost:8080/","height":3505}},"cell_type":"code","source":["step = 0\n","log_step = 100\n","epoch = 0\n","n_epoch = 1\n","Reward = []\n","for i in range(epoch, epoch + n_epoch):\n","\n"," # put in train mode!\n"," model.train()\n"," # model.actor_net.decoder.decode_type = \"greedy\"\n"," # sample_batch is [batch_size x input_dim x sourceL]\n"," for batch_id, sample_batch in enumerate(train_dataloader):\n","\n"," bat = Variable(sample_batch)\n"," if use_cuda:\n"," bat = bat.cuda()\n","\n"," R, probs, actions, actions_idxs = model(bat)\n"," \n"," if batch_id == 0:\n"," critic_exp_mvg_avg = R.mean()\n"," else:\n"," # update critic baseline\n"," critic_exp_mvg_avg = (critic_exp_mvg_avg * beta) + ((1. - beta) * R.mean())\n","\n"," advantage = R - critic_exp_mvg_avg\n","\n"," # log probability\n"," logprobs = 0\n"," nll = 0\n"," entropy = 0\n"," # the size of probs (50 * batchsize)\n"," for prob in probs: \n"," # compute the sum of the log probability\n"," # for each tour in the batch\n"," logprob = torch.log(prob)\n"," nll += -logprob\n"," logprobs += logprob\n"," \n"," # guard against nan\n"," nll[(nll != nll).detach()] = 0.\n"," # clamp any -inf's to 0 to throw away this tour\n"," logprobs[(logprobs < -1000).detach()] = 0.\n","\n"," # multiply each time step by the advantage\n"," # advantage = Reward-Critic\n"," # reinforce = (R-b)*log(p)\n"," # R-b does not have gradient\n"," # gradient = (R-b)*d log(p) \n"," \n"," reinforce = advantage * logprobs\n"," actor_loss = reinforce.mean()\n","\n"," actor_optim.zero_grad()\n"," \n"," actor_loss.backward()\n","\n"," max_grad_norm = 1.0\n"," # clip gradient norms\n"," # to avoid too large gradient\n"," torch.nn.utils.clip_grad_norm_(model.actor_net.parameters(),\n"," max_grad_norm, norm_type=2)\n","\n"," actor_optim.step()\n"," actor_scheduler.step()\n","\n"," critic_exp_mvg_avg = critic_exp_mvg_avg.detach()\n","\n"," #critic_scheduler.step()\n","\n"," #R = R.detach()\n"," #critic_loss = critic_mse(v.squeeze(1), R)\n"," #critic_optim.zero_grad()\n"," #critic_loss.backward()\n"," \n"," #torch.nn.utils.clip_grad_norm_(model.critic_net.parameters(),\n"," # float(args['max_grad_norm']), norm_type=2)\n","\n"," #critic_optim.step()\n"," \n"," step += 1\n","\n"," if step % log_step == 0:\n"," print('epoch: {}, train_batch_id: {}, avg_reward: {}'.format(\n"," i, batch_id, R.mean().item()))\n"," example_output = []\n"," example_input = []\n"," for idx, action in enumerate(actions):\n"," example_output.append(actions_idxs[idx][0].item())\n"," example_input.append(sample_batch[0, :, idx][0])\n"," # print('Example train input: {}'.format(example_input))\n"," print('Example train output: {}'.format(example_output))\n"," Reward.append(R.mean().item())\n","\n"," # validation\n"," avg_reward = []\n"," val_step = 0\n"," model.eval()\n"," model.actor_net.decoder.decode_type = \"greedy\"\n"," \n"," for batch_id, val_batch in enumerate(tqdm_notebook(val_dataloader)):\n"," bat = Variable(val_batch)\n"," \n"," if use_cuda:\n"," bat = bat.cuda()\n"," \n"," R, probs, actions, action_idxs = model(bat)\n"," avg_reward.append(R.mean().cpu().numpy())\n"," val_step += 1.\n"," \n"," print('Validation overall avg_reward: {}'.format(np.mean(avg_reward)))\n"," print('Validation overall reward var: {}'.format(np.var(avg_reward)))\n"," \n"," # before next training loop \n"," model.actor_net.decoder.decode_type = \"stochastic\"\n"," # generate new data\n"," training_dataset = TSPDataset(train=True, size=size,\n"," num_samples=train_size)\n"," train_dataloader = DataLoader(training_dataset, batch_size=batch_size,\n"," shuffle=True, num_workers=1)"],"execution_count":0,"outputs":[{"output_type":"stream","text":["epoch: 0, train_batch_id: 99, avg_reward: 17.68767547607422\n","Example train output: [15, 20, 30, 32, 37, 34, 23, 33, 45, 2, 7, 13, 1, 49, 40, 43, 36, 28, 26, 8, 38, 21, 39, 18, 10, 27, 4, 48, 11, 29, 47, 5, 22, 46, 3, 24, 41, 14, 35, 0, 9, 19, 17, 42, 31, 12, 16, 6, 44, 25]\n","epoch: 0, train_batch_id: 199, avg_reward: 17.257694244384766\n","Example train output: [20, 14, 43, 41, 32, 12, 38, 48, 21, 0, 44, 28, 11, 40, 24, 35, 42, 25, 22, 7, 45, 33, 49, 34, 3, 16, 4, 26, 30, 47, 37, 10, 18, 6, 8, 29, 19, 39, 31, 5, 9, 27, 13, 1, 15, 17, 23, 46, 2, 36]\n","epoch: 0, train_batch_id: 299, avg_reward: 17.48166275024414\n","Example train output: [42, 20, 11, 46, 14, 43, 13, 39, 38, 29, 44, 26, 19, 22, 17, 37, 15, 2, 16, 34, 7, 47, 8, 5, 24, 30, 12, 32, 40, 33, 21, 45, 23, 18, 10, 28, 48, 36, 9, 0, 49, 3, 4, 6, 35, 1, 31, 25, 27, 41]\n","epoch: 0, train_batch_id: 399, avg_reward: 17.0690860748291\n","Example train output: [33, 44, 17, 9, 49, 46, 47, 45, 39, 41, 38, 37, 3, 7, 20, 16, 25, 12, 14, 32, 13, 15, 21, 29, 31, 28, 19, 23, 1, 27, 18, 24, 5, 6, 26, 0, 36, 22, 8, 40, 10, 42, 30, 43, 2, 4, 48, 35, 11, 34]\n","epoch: 0, train_batch_id: 499, avg_reward: 16.97426986694336\n","Example train output: [31, 49, 21, 25, 38, 11, 41, 48, 37, 23, 27, 35, 30, 19, 15, 16, 46, 43, 26, 8, 20, 17, 29, 10, 40, 28, 0, 9, 33, 5, 1, 34, 12, 13, 39, 18, 24, 4, 22, 42, 7, 47, 44, 2, 32, 45, 14, 6, 3, 36]\n","epoch: 0, train_batch_id: 599, avg_reward: 17.267196655273438\n","Example train output: [8, 38, 7, 44, 27, 3, 45, 41, 16, 29, 15, 36, 39, 20, 49, 5, 24, 22, 18, 11, 17, 13, 46, 32, 40, 21, 6, 30, 4, 25, 28, 12, 42, 1, 31, 35, 0, 23, 19, 26, 9, 33, 2, 43, 48, 47, 10, 34, 14, 37]\n","epoch: 0, train_batch_id: 699, avg_reward: 17.793434143066406\n","Example train output: [46, 20, 11, 36, 17, 18, 43, 35, 40, 1, 44, 16, 13, 23, 33, 12, 29, 49, 30, 38, 45, 28, 21, 22, 27, 39, 10, 25, 47, 41, 19, 34, 6, 32, 7, 14, 24, 5, 0, 15, 4, 3, 2, 9, 31, 37, 8, 48, 26, 42]\n","epoch: 0, train_batch_id: 799, avg_reward: 17.213123321533203\n","Example train output: [43, 18, 1, 23, 2, 32, 34, 7, 35, 19, 14, 6, 13, 29, 16, 26, 4, 44, 41, 30, 46, 0, 5, 45, 8, 20, 25, 24, 12, 36, 39, 15, 49, 17, 37, 28, 9, 47, 3, 40, 38, 27, 22, 10, 48, 21, 33, 42, 11, 31]\n","epoch: 0, train_batch_id: 899, avg_reward: 17.36566162109375\n","Example train output: [32, 37, 20, 25, 4, 29, 48, 3, 9, 49, 18, 47, 17, 36, 14, 2, 8, 31, 21, 33, 30, 44, 22, 38, 16, 13, 41, 10, 23, 1, 35, 46, 7, 15, 39, 0, 34, 43, 5, 12, 24, 45, 26, 42, 27, 28, 6, 11, 19, 40]\n","epoch: 0, train_batch_id: 999, avg_reward: 17.21158218383789\n","Example train output: [49, 34, 14, 9, 25, 28, 48, 32, 19, 38, 17, 41, 0, 6, 40, 5, 36, 42, 27, 12, 8, 22, 13, 29, 1, 3, 44, 47, 20, 15, 33, 2, 16, 46, 11, 23, 43, 35, 37, 4, 31, 39, 18, 21, 7, 10, 30, 26, 24, 45]\n","epoch: 0, train_batch_id: 1099, avg_reward: 17.273561477661133\n","Example train output: [47, 13, 1, 27, 35, 30, 39, 12, 34, 17, 14, 36, 3, 33, 22, 2, 31, 45, 42, 44, 28, 19, 25, 4, 26, 18, 5, 6, 15, 43, 23, 9, 32, 16, 21, 0, 40, 24, 37, 41, 49, 48, 7, 10, 8, 11, 38, 20, 29, 46]\n","epoch: 0, train_batch_id: 1199, avg_reward: 17.135452270507812\n","Example train output: [0, 14, 35, 13, 2, 1, 23, 19, 10, 27, 46, 8, 26, 38, 9, 24, 48, 20, 4, 12, 3, 34, 42, 15, 40, 39, 30, 6, 45, 31, 17, 28, 44, 32, 36, 29, 18, 47, 25, 43, 16, 11, 49, 33, 37, 5, 7, 22, 41, 21]\n","epoch: 0, train_batch_id: 1299, avg_reward: 16.924787521362305\n","Example train output: [30, 22, 32, 31, 3, 24, 10, 34, 17, 5, 33, 7, 12, 27, 6, 40, 11, 36, 8, 29, 19, 23, 1, 46, 15, 13, 26, 39, 2, 16, 49, 41, 28, 47, 21, 4, 25, 48, 9, 38, 43, 14, 20, 42, 18, 35, 0, 44, 45, 37]\n","epoch: 0, train_batch_id: 1399, avg_reward: 17.108524322509766\n","Example train output: [18, 43, 37, 31, 47, 28, 41, 20, 33, 19, 42, 44, 34, 17, 8, 1, 29, 15, 40, 48, 13, 49, 14, 9, 6, 39, 11, 22, 12, 10, 45, 36, 26, 30, 16, 21, 5, 3, 4, 24, 2, 46, 32, 0, 35, 25, 23, 38, 7, 27]\n","epoch: 0, train_batch_id: 1499, avg_reward: 17.303258895874023\n","Example train output: [10, 32, 16, 44, 3, 30, 11, 39, 6, 47, 31, 18, 19, 25, 4, 40, 35, 28, 26, 34, 48, 27, 17, 24, 42, 7, 38, 43, 45, 21, 49, 13, 9, 1, 22, 15, 5, 33, 14, 23, 0, 12, 20, 8, 37, 2, 41, 46, 36, 29]\n","epoch: 0, train_batch_id: 1599, avg_reward: 17.257083892822266\n","Example train output: [8, 38, 9, 25, 27, 6, 40, 15, 36, 22, 24, 3, 49, 10, 46, 20, 42, 2, 30, 37, 18, 5, 28, 41, 21, 7, 34, 47, 43, 29, 12, 17, 23, 16, 48, 13, 31, 0, 14, 33, 35, 11, 19, 26, 45, 44, 39, 1, 4, 32]\n","epoch: 0, train_batch_id: 1699, avg_reward: 16.959808349609375\n","Example train output: [38, 34, 15, 9, 4, 24, 0, 43, 13, 25, 42, 44, 48, 35, 7, 46, 26, 33, 22, 6, 10, 49, 28, 20, 12, 29, 27, 32, 14, 39, 45, 3, 18, 19, 8, 36, 21, 30, 31, 41, 47, 2, 1, 16, 23, 5, 11, 17, 40, 37]\n","epoch: 0, train_batch_id: 1799, avg_reward: 17.068336486816406\n","Example train output: [29, 47, 41, 45, 32, 14, 20, 4, 26, 23, 44, 7, 34, 31, 3, 22, 27, 16, 39, 30, 49, 15, 43, 35, 18, 33, 46, 17, 37, 21, 38, 0, 48, 12, 13, 5, 36, 1, 6, 11, 2, 19, 10, 8, 40, 28, 42, 25, 9, 24]\n","epoch: 0, train_batch_id: 1899, avg_reward: 17.474180221557617\n","Example train output: [39, 0, 29, 30, 47, 36, 25, 1, 2, 24, 44, 16, 5, 23, 40, 3, 19, 31, 49, 26, 28, 20, 4, 6, 27, 15, 9, 10, 48, 37, 8, 33, 46, 17, 7, 11, 14, 18, 12, 38, 21, 41, 42, 13, 35, 34, 43, 22, 32, 45]\n","epoch: 0, train_batch_id: 1999, avg_reward: 18.319530487060547\n","Example train output: [21, 24, 36, 1, 46, 32, 49, 8, 40, 17, 43, 4, 42, 31, 26, 28, 38, 44, 9, 16, 30, 47, 19, 6, 33, 15, 22, 29, 34, 7, 41, 5, 37, 35, 25, 3, 10, 48, 11, 12, 27, 45, 2, 0, 14, 18, 13, 23, 39, 20]\n","epoch: 0, train_batch_id: 2099, avg_reward: 16.942581176757812\n","Example train output: [20, 47, 25, 19, 24, 1, 16, 26, 21, 48, 7, 30, 4, 9, 2, 8, 33, 46, 14, 18, 0, 42, 23, 11, 28, 45, 29, 6, 34, 13, 41, 5, 10, 12, 40, 49, 31, 32, 3, 17, 37, 36, 22, 43, 44, 35, 15, 39, 38, 27]\n","epoch: 0, train_batch_id: 2199, avg_reward: 17.265579223632812\n","Example train output: [14, 49, 10, 0, 38, 8, 9, 39, 11, 3, 28, 41, 27, 33, 36, 5, 24, 23, 6, 37, 45, 20, 31, 4, 7, 15, 48, 34, 22, 16, 1, 29, 21, 25, 35, 12, 30, 42, 13, 17, 32, 2, 47, 40, 18, 26, 46, 19, 43, 44]\n","epoch: 0, train_batch_id: 2299, avg_reward: 17.329673767089844\n","Example train output: [36, 34, 16, 14, 38, 30, 43, 4, 2, 6, 49, 24, 7, 18, 26, 40, 32, 27, 10, 47, 9, 19, 23, 11, 5, 12, 37, 45, 20, 8, 39, 1, 48, 22, 28, 29, 0, 31, 33, 13, 44, 41, 15, 42, 3, 21, 46, 25, 35, 17]\n","epoch: 0, train_batch_id: 2399, avg_reward: 17.97271728515625\n","Example train output: [15, 7, 27, 5, 1, 26, 46, 44, 2, 6, 49, 43, 45, 16, 18, 12, 37, 11, 36, 41, 25, 20, 31, 4, 24, 29, 0, 3, 17, 14, 34, 13, 22, 33, 30, 32, 8, 48, 9, 19, 47, 21, 35, 42, 28, 40, 39, 10, 38, 23]\n","epoch: 0, train_batch_id: 2499, avg_reward: 16.934507369995117\n","Example train output: [14, 19, 12, 28, 11, 6, 9, 10, 7, 41, 37, 22, 18, 29, 32, 5, 35, 4, 23, 44, 17, 45, 3, 26, 16, 1, 39, 34, 15, 48, 31, 38, 49, 42, 21, 46, 40, 0, 47, 20, 33, 36, 13, 2, 24, 43, 8, 27, 25, 30]\n","epoch: 0, train_batch_id: 2599, avg_reward: 16.64907455444336\n","Example train output: [48, 18, 43, 22, 30, 16, 3, 32, 23, 1, 0, 47, 14, 49, 27, 34, 11, 10, 17, 21, 2, 42, 20, 45, 13, 35, 6, 37, 8, 19, 24, 39, 36, 28, 15, 29, 41, 38, 26, 5, 9, 33, 7, 46, 12, 40, 44, 4, 25, 31]\n","epoch: 0, train_batch_id: 2699, avg_reward: 16.849361419677734\n","Example train output: [21, 27, 4, 38, 17, 6, 14, 2, 32, 39, 11, 28, 19, 12, 5, 31, 10, 7, 42, 9, 16, 49, 15, 41, 44, 47, 20, 26, 0, 36, 37, 13, 46, 34, 18, 35, 48, 1, 24, 33, 25, 29, 8, 30, 40, 23, 22, 43, 3, 45]\n","epoch: 0, train_batch_id: 2799, avg_reward: 18.169694900512695\n","Example train output: [29, 0, 41, 26, 22, 19, 40, 44, 33, 20, 46, 48, 39, 38, 1, 18, 6, 15, 42, 7, 14, 49, 16, 28, 5, 11, 32, 13, 25, 17, 47, 30, 23, 37, 45, 8, 21, 34, 12, 31, 36, 2, 43, 10, 27, 24, 35, 4, 9, 3]\n","epoch: 0, train_batch_id: 2899, avg_reward: 17.602397918701172\n","Example train output: [46, 44, 24, 29, 22, 9, 13, 17, 34, 8, 43, 25, 5, 42, 40, 12, 30, 36, 37, 49, 41, 32, 10, 18, 35, 2, 4, 28, 3, 38, 27, 48, 6, 21, 0, 14, 31, 33, 39, 47, 15, 1, 23, 20, 26, 11, 45, 16, 7, 19]\n","epoch: 0, train_batch_id: 2999, avg_reward: 17.306486129760742\n","Example train output: [8, 34, 3, 38, 39, 20, 2, 43, 4, 48, 31, 42, 1, 5, 30, 46, 15, 22, 47, 33, 32, 29, 26, 23, 6, 27, 44, 37, 28, 49, 9, 25, 45, 16, 40, 13, 18, 17, 0, 36, 12, 35, 41, 24, 11, 10, 19, 14, 7, 21]\n","epoch: 0, train_batch_id: 3099, avg_reward: 17.244874954223633\n","Example train output: [28, 25, 38, 8, 14, 42, 16, 21, 23, 37, 36, 11, 3, 20, 19, 47, 34, 49, 6, 17, 24, 44, 46, 35, 40, 29, 5, 39, 1, 45, 22, 33, 48, 43, 9, 4, 0, 41, 30, 32, 15, 27, 12, 18, 26, 13, 7, 2, 31, 10]\n","epoch: 0, train_batch_id: 3199, avg_reward: 16.562347412109375\n","Example train output: [33, 16, 1, 19, 11, 3, 20, 34, 17, 28, 26, 2, 30, 23, 5, 39, 32, 24, 22, 41, 9, 48, 13, 37, 12, 6, 31, 21, 42, 25, 36, 35, 49, 8, 43, 38, 29, 40, 7, 15, 44, 27, 10, 0, 14, 47, 18, 4, 46, 45]\n","epoch: 0, train_batch_id: 3299, avg_reward: 16.905227661132812\n","Example train output: [28, 18, 36, 0, 1, 31, 32, 23, 17, 38, 3, 27, 44, 19, 29, 20, 7, 9, 2, 15, 5, 6, 33, 16, 25, 47, 30, 21, 46, 41, 37, 8, 35, 12, 39, 48, 34, 22, 13, 14, 42, 45, 49, 10, 4, 40, 24, 26, 43, 11]\n","epoch: 0, train_batch_id: 3399, avg_reward: 17.229862213134766\n","Example train output: [28, 27, 37, 36, 19, 26, 21, 41, 48, 33, 40, 24, 15, 46, 4, 10, 39, 25, 17, 0, 38, 49, 14, 22, 34, 7, 20, 44, 5, 23, 42, 1, 30, 3, 9, 16, 45, 12, 11, 31, 29, 2, 35, 6, 13, 18, 43, 32, 47, 8]\n","epoch: 0, train_batch_id: 3499, avg_reward: 16.743146896362305\n","Example train output: [3, 35, 24, 1, 41, 31, 17, 18, 6, 45, 16, 13, 9, 49, 4, 25, 0, 7, 8, 23, 29, 20, 34, 43, 46, 39, 28, 32, 5, 10, 37, 38, 36, 47, 30, 12, 11, 15, 48, 22, 26, 21, 40, 42, 2, 33, 27, 14, 44, 19]\n","epoch: 0, train_batch_id: 3599, avg_reward: 16.75096893310547\n","Example train output: [2, 21, 42, 3, 18, 0, 49, 32, 35, 46, 48, 37, 41, 10, 33, 44, 8, 20, 13, 9, 45, 28, 16, 15, 23, 6, 39, 27, 29, 40, 38, 14, 11, 25, 31, 22, 43, 36, 34, 4, 26, 17, 24, 30, 12, 1, 5, 7, 47, 19]\n","epoch: 0, train_batch_id: 3699, avg_reward: 17.437156677246094\n","Example train output: [24, 35, 18, 6, 28, 14, 40, 37, 49, 31, 3, 26, 47, 45, 4, 46, 1, 7, 10, 17, 44, 34, 25, 13, 16, 48, 32, 9, 19, 29, 30, 21, 41, 0, 33, 39, 5, 15, 12, 11, 22, 43, 38, 2, 36, 23, 27, 8, 42, 20]\n","epoch: 0, train_batch_id: 3799, avg_reward: 16.777637481689453\n","Example train output: [25, 42, 9, 30, 4, 3, 32, 38, 39, 12, 23, 49, 16, 20, 43, 35, 44, 19, 0, 14, 15, 22, 37, 28, 34, 11, 10, 8, 33, 24, 5, 27, 17, 2, 46, 31, 36, 41, 6, 48, 18, 26, 13, 21, 29, 1, 7, 45, 40, 47]\n","epoch: 0, train_batch_id: 3899, avg_reward: 16.97039031982422\n","Example train output: [32, 48, 30, 39, 19, 29, 10, 14, 11, 43, 27, 23, 21, 2, 34, 18, 40, 22, 6, 3, 41, 36, 4, 42, 28, 47, 8, 31, 49, 26, 24, 0, 20, 38, 25, 35, 17, 15, 46, 5, 16, 1, 13, 7, 33, 45, 12, 9, 37, 44]\n","epoch: 0, train_batch_id: 3999, avg_reward: 16.9527587890625\n","Example train output: [49, 19, 14, 39, 24, 15, 9, 0, 11, 36, 17, 40, 45, 43, 1, 16, 27, 6, 7, 33, 37, 13, 4, 23, 2, 35, 20, 44, 12, 29, 8, 32, 5, 10, 38, 47, 26, 46, 41, 30, 48, 28, 34, 3, 18, 21, 42, 31, 25, 22]\n","epoch: 0, train_batch_id: 4099, avg_reward: 16.580066680908203\n","Example train output: [6, 21, 23, 24, 4, 30, 22, 20, 16, 31, 9, 0, 14, 10, 25, 37, 42, 17, 40, 26, 27, 33, 1, 19, 12, 46, 36, 29, 5, 39, 13, 34, 32, 49, 11, 8, 7, 3, 18, 44, 28, 15, 41, 38, 47, 48, 2, 35, 45, 43]\n","epoch: 0, train_batch_id: 4199, avg_reward: 16.807767868041992\n","Example train output: [27, 28, 47, 48, 35, 23, 3, 1, 33, 38, 36, 41, 17, 11, 31, 8, 9, 32, 49, 26, 37, 34, 43, 13, 29, 10, 39, 42, 5, 4, 6, 25, 30, 2, 40, 44, 19, 14, 0, 22, 15, 24, 46, 16, 45, 12, 20, 18, 21, 7]\n","epoch: 0, train_batch_id: 4299, avg_reward: 16.7429256439209\n","Example train output: [28, 16, 13, 7, 46, 32, 38, 12, 39, 42, 31, 35, 43, 34, 9, 40, 21, 23, 27, 4, 22, 17, 37, 2, 26, 14, 1, 44, 24, 36, 49, 41, 20, 18, 33, 30, 19, 48, 0, 29, 47, 15, 8, 45, 25, 11, 6, 10, 3, 5]\n","epoch: 0, train_batch_id: 4399, avg_reward: 16.89980697631836\n","Example train output: [47, 35, 37, 48, 27, 34, 30, 26, 44, 2, 8, 43, 49, 46, 6, 38, 25, 13, 19, 24, 5, 16, 17, 40, 15, 21, 32, 10, 29, 12, 7, 18, 42, 3, 23, 0, 4, 22, 11, 31, 9, 14, 36, 39, 28, 33, 41, 45, 20, 1]\n","epoch: 0, train_batch_id: 4499, avg_reward: 16.86855697631836\n","Example train output: [22, 23, 48, 7, 43, 17, 10, 27, 19, 24, 29, 20, 3, 14, 15, 1, 40, 44, 21, 5, 38, 46, 49, 0, 11, 26, 33, 42, 34, 41, 37, 47, 35, 2, 4, 13, 31, 45, 6, 12, 8, 25, 30, 18, 9, 32, 36, 28, 39, 16]\n","epoch: 0, train_batch_id: 4599, avg_reward: 16.626972198486328\n","Example train output: [6, 8, 37, 47, 23, 36, 0, 31, 49, 25, 48, 24, 34, 5, 13, 45, 20, 12, 40, 4, 10, 30, 39, 18, 9, 44, 42, 33, 22, 35, 28, 21, 16, 46, 27, 7, 38, 3, 14, 19, 43, 1, 2, 41, 11, 32, 17, 29, 15, 26]\n","epoch: 0, train_batch_id: 4699, avg_reward: 16.84837532043457\n","Example train output: [2, 43, 35, 12, 14, 0, 20, 38, 11, 8, 31, 39, 4, 30, 26, 41, 1, 23, 27, 37, 13, 44, 40, 3, 36, 5, 34, 17, 15, 47, 24, 7, 22, 45, 33, 9, 10, 28, 29, 16, 19, 49, 48, 32, 46, 25, 42, 6, 21, 18]\n","epoch: 0, train_batch_id: 4799, avg_reward: 16.959041595458984\n","Example train output: [6, 11, 23, 9, 24, 22, 14, 26, 25, 45, 15, 28, 31, 1, 36, 0, 16, 4, 2, 41, 8, 7, 10, 21, 49, 38, 39, 47, 20, 34, 43, 44, 46, 29, 17, 32, 12, 18, 40, 3, 5, 33, 27, 19, 30, 48, 35, 42, 37, 13]\n","epoch: 0, train_batch_id: 4899, avg_reward: 17.138431549072266\n","Example train output: [36, 1, 45, 18, 41, 39, 7, 19, 38, 17, 0, 44, 13, 32, 29, 20, 35, 21, 24, 12, 48, 42, 10, 33, 27, 34, 25, 37, 5, 2, 49, 23, 43, 47, 30, 9, 46, 16, 40, 26, 4, 15, 8, 11, 14, 28, 22, 6, 31, 3]\n","epoch: 0, train_batch_id: 4999, avg_reward: 17.029644012451172\n","Example train output: [6, 49, 3, 0, 22, 16, 43, 5, 45, 15, 29, 12, 34, 11, 19, 39, 46, 28, 20, 26, 35, 41, 38, 31, 48, 24, 8, 9, 33, 25, 23, 30, 7, 10, 27, 2, 47, 37, 14, 44, 17, 4, 32, 13, 36, 40, 1, 18, 42, 21]\n","epoch: 0, train_batch_id: 5099, avg_reward: 17.085834503173828\n","Example train output: [37, 7, 33, 39, 0, 10, 45, 41, 44, 35, 28, 13, 14, 20, 17, 6, 8, 1, 29, 46, 26, 2, 34, 43, 30, 49, 18, 3, 32, 19, 48, 22, 38, 47, 21, 24, 12, 23, 42, 36, 16, 9, 4, 40, 25, 5, 31, 15, 11, 27]\n","epoch: 0, train_batch_id: 5199, avg_reward: 16.761516571044922\n","Example train output: [42, 9, 10, 4, 28, 30, 11, 48, 29, 47, 27, 37, 3, 40, 17, 2, 21, 8, 13, 38, 18, 19, 15, 46, 16, 43, 23, 12, 1, 49, 31, 34, 24, 32, 22, 6, 33, 7, 25, 20, 5, 39, 41, 36, 44, 35, 14, 0, 26, 45]\n","epoch: 0, train_batch_id: 5299, avg_reward: 16.657907485961914\n","Example train output: [25, 34, 8, 39, 5, 47, 14, 20, 40, 43, 23, 41, 18, 22, 10, 31, 45, 29, 46, 12, 21, 0, 36, 35, 9, 4, 37, 7, 3, 42, 49, 17, 6, 27, 15, 48, 19, 38, 16, 30, 24, 13, 33, 28, 26, 32, 2, 11, 44, 1]\n","epoch: 0, train_batch_id: 5399, avg_reward: 16.97037124633789\n","Example train output: [47, 39, 26, 36, 0, 17, 34, 6, 44, 1, 29, 28, 22, 10, 33, 16, 3, 21, 11, 41, 14, 38, 20, 25, 23, 37, 13, 18, 24, 2, 8, 9, 31, 46, 35, 43, 7, 40, 30, 12, 42, 32, 45, 4, 48, 27, 5, 19, 15, 49]\n","epoch: 0, train_batch_id: 5499, avg_reward: 17.729955673217773\n","Example train output: [31, 47, 45, 44, 5, 10, 19, 35, 16, 22, 36, 25, 4, 27, 30, 2, 1, 37, 15, 41, 49, 28, 12, 42, 40, 33, 32, 38, 43, 48, 23, 0, 29, 21, 9, 7, 11, 20, 24, 3, 39, 14, 8, 17, 46, 13, 18, 34, 6, 26]\n","epoch: 0, train_batch_id: 5599, avg_reward: 16.60460090637207\n","Example train output: [25, 6, 40, 34, 28, 30, 3, 9, 24, 14, 36, 41, 4, 19, 16, 42, 0, 43, 1, 7, 29, 37, 44, 47, 17, 45, 21, 48, 22, 38, 20, 18, 13, 33, 27, 11, 10, 31, 8, 49, 39, 23, 32, 5, 46, 35, 15, 26, 12, 2]\n","epoch: 0, train_batch_id: 5699, avg_reward: 16.78877830505371\n","Example train output: [12, 19, 32, 13, 16, 22, 2, 6, 39, 27, 29, 10, 48, 47, 46, 38, 14, 7, 33, 31, 36, 25, 9, 0, 21, 44, 4, 45, 37, 5, 11, 30, 43, 40, 34, 18, 20, 8, 17, 49, 23, 41, 42, 28, 26, 3, 35, 1, 24, 15]\n","epoch: 0, train_batch_id: 5799, avg_reward: 16.640872955322266\n","Example train output: [0, 20, 1, 11, 6, 42, 2, 47, 38, 43, 16, 19, 23, 32, 22, 28, 30, 17, 26, 44, 33, 12, 39, 7, 8, 34, 40, 27, 10, 36, 21, 31, 18, 35, 49, 3, 9, 4, 29, 41, 37, 48, 25, 46, 5, 13, 45, 14, 15, 24]\n","epoch: 0, train_batch_id: 5899, avg_reward: 16.577537536621094\n","Example train output: [39, 41, 31, 35, 2, 20, 6, 49, 33, 21, 22, 3, 19, 32, 28, 26, 30, 27, 34, 10, 5, 14, 18, 24, 9, 42, 23, 7, 11, 47, 16, 38, 17, 4, 36, 13, 37, 29, 40, 0, 43, 12, 15, 44, 8, 45, 48, 46, 1, 25]\n","epoch: 0, train_batch_id: 5999, avg_reward: 16.936908721923828\n","Example train output: [42, 47, 29, 39, 46, 13, 22, 2, 18, 5, 36, 31, 30, 21, 15, 17, 45, 9, 40, 35, 41, 7, 8, 25, 33, 11, 34, 23, 44, 37, 6, 27, 32, 10, 16, 12, 38, 0, 24, 48, 28, 14, 49, 26, 1, 3, 20, 43, 4, 19]\n","epoch: 0, train_batch_id: 6099, avg_reward: 16.68242645263672\n","Example train output: [4, 33, 16, 42, 12, 38, 21, 6, 2, 7, 5, 19, 1, 46, 23, 43, 22, 32, 27, 39, 29, 40, 44, 18, 35, 14, 10, 30, 0, 37, 25, 48, 8, 24, 26, 28, 45, 20, 36, 47, 49, 13, 17, 3, 9, 41, 34, 15, 31, 11]\n","epoch: 0, train_batch_id: 6199, avg_reward: 17.255672454833984\n","Example train output: [16, 30, 28, 26, 27, 7, 21, 19, 32, 45, 38, 42, 1, 13, 46, 37, 5, 43, 36, 0, 6, 11, 35, 41, 2, 14, 22, 23, 44, 8, 3, 17, 10, 31, 25, 9, 20, 48, 29, 39, 12, 24, 15, 33, 18, 4, 34, 47, 40, 49]\n","epoch: 0, train_batch_id: 6299, avg_reward: 16.934633255004883\n","Example train output: [44, 49, 29, 45, 0, 1, 38, 41, 13, 28, 39, 35, 34, 26, 21, 16, 23, 6, 10, 37, 24, 2, 4, 5, 42, 43, 9, 36, 15, 31, 18, 25, 33, 40, 12, 32, 20, 14, 19, 3, 46, 11, 48, 27, 22, 8, 30, 7, 47, 17]\n","epoch: 0, train_batch_id: 6399, avg_reward: 16.437273025512695\n","Example train output: [22, 49, 36, 43, 28, 38, 47, 24, 34, 44, 11, 15, 26, 13, 46, 39, 8, 18, 40, 29, 17, 35, 25, 4, 48, 19, 37, 30, 3, 45, 32, 20, 2, 31, 27, 16, 1, 21, 6, 12, 0, 5, 10, 7, 14, 23, 41, 9, 33, 42]\n","epoch: 0, train_batch_id: 6499, avg_reward: 17.040401458740234\n","Example train output: [28, 33, 17, 38, 12, 16, 7, 5, 13, 45, 39, 23, 3, 9, 34, 4, 36, 1, 48, 18, 47, 46, 19, 14, 10, 24, 35, 32, 22, 11, 27, 30, 31, 0, 15, 41, 8, 44, 2, 20, 37, 40, 6, 21, 26, 43, 42, 25, 29, 49]\n","epoch: 0, train_batch_id: 6599, avg_reward: 12.356466293334961\n","Example train output: [18, 20, 42, 15, 28, 43, 3, 31, 11, 38, 35, 24, 14, 40, 5, 49, 30, 9, 10, 33, 8, 4, 16, 37, 26, 45, 19, 12, 22, 34, 41, 17, 29, 0, 48, 44, 23, 27, 46, 21, 2, 6, 1, 36, 25, 32, 7, 39, 13, 47]\n","epoch: 0, train_batch_id: 6699, avg_reward: 10.586816787719727\n","Example train output: [22, 28, 35, 21, 30, 12, 0, 19, 47, 16, 46, 41, 37, 14, 3, 44, 31, 43, 45, 48, 20, 36, 27, 42, 4, 7, 24, 26, 23, 34, 33, 38, 40, 10, 11, 32, 13, 49, 25, 29, 5, 39, 8, 6, 15, 1, 9, 18, 2, 17]\n","epoch: 0, train_batch_id: 6799, avg_reward: 9.73804759979248\n","Example train output: [42, 14, 17, 0, 7, 9, 21, 47, 34, 26, 38, 30, 4, 16, 12, 41, 36, 23, 49, 18, 37, 20, 5, 40, 43, 8, 10, 3, 1, 13, 28, 44, 6, 31, 2, 33, 29, 11, 45, 35, 15, 24, 39, 48, 19, 25, 32, 22, 46, 27]\n","epoch: 0, train_batch_id: 6899, avg_reward: 9.136409759521484\n","Example train output: [26, 11, 40, 18, 46, 3, 41, 16, 34, 5, 17, 31, 24, 30, 6, 21, 19, 27, 7, 37, 39, 2, 22, 10, 8, 35, 44, 47, 15, 38, 14, 1, 33, 49, 45, 28, 9, 0, 25, 12, 20, 48, 23, 29, 4, 13, 32, 36, 43, 42]\n","epoch: 0, train_batch_id: 6999, avg_reward: 8.926204681396484\n","Example train output: [42, 49, 7, 20, 25, 32, 2, 47, 11, 10, 43, 15, 29, 16, 39, 9, 3, 5, 34, 18, 23, 4, 12, 24, 33, 19, 1, 17, 8, 38, 46, 13, 36, 26, 31, 35, 27, 45, 0, 14, 22, 30, 44, 21, 6, 40, 28, 37, 48, 41]\n","epoch: 0, train_batch_id: 7099, avg_reward: 8.45364761352539\n","Example train output: [26, 42, 19, 30, 29, 43, 40, 27, 46, 12, 24, 1, 28, 47, 20, 16, 17, 13, 2, 0, 45, 10, 33, 4, 21, 22, 3, 38, 11, 48, 41, 36, 25, 15, 44, 9, 23, 5, 6, 14, 35, 37, 18, 32, 8, 31, 49, 7, 39, 34]\n","epoch: 0, train_batch_id: 7199, avg_reward: 8.215137481689453\n","Example train output: [14, 24, 49, 40, 45, 38, 31, 20, 13, 17, 29, 21, 3, 36, 30, 9, 22, 1, 32, 15, 43, 2, 37, 18, 23, 27, 41, 5, 7, 8, 39, 4, 33, 16, 26, 34, 12, 28, 35, 44, 10, 11, 6, 25, 47, 46, 0, 42, 19, 48]\n","epoch: 0, train_batch_id: 7299, avg_reward: 7.934445381164551\n","Example train output: [48, 45, 11, 30, 1, 12, 46, 4, 33, 23, 9, 24, 7, 49, 37, 36, 3, 5, 0, 32, 42, 39, 25, 43, 29, 6, 20, 21, 26, 15, 8, 13, 34, 44, 31, 38, 16, 10, 19, 17, 41, 18, 35, 27, 40, 2, 47, 22, 14, 28]\n","epoch: 0, train_batch_id: 7399, avg_reward: 7.791155815124512\n","Example train output: [45, 21, 39, 6, 47, 13, 1, 41, 8, 15, 48, 10, 23, 22, 36, 9, 49, 11, 31, 3, 7, 28, 17, 32, 27, 2, 46, 44, 37, 12, 29, 35, 42, 19, 34, 30, 16, 24, 0, 25, 40, 26, 20, 14, 43, 5, 18, 4, 38, 33]\n","epoch: 0, train_batch_id: 7499, avg_reward: 7.767943859100342\n","Example train output: [31, 3, 23, 32, 14, 20, 26, 11, 18, 46, 44, 22, 8, 29, 2, 39, 17, 0, 19, 37, 28, 45, 27, 40, 34, 21, 10, 43, 4, 33, 12, 16, 42, 24, 25, 49, 35, 41, 13, 38, 47, 36, 5, 6, 48, 7, 15, 30, 9, 1]\n","epoch: 0, train_batch_id: 7599, avg_reward: 7.5126824378967285\n","Example train output: [47, 26, 15, 18, 17, 35, 34, 5, 12, 11, 16, 14, 13, 41, 6, 29, 8, 45, 38, 39, 43, 21, 42, 32, 31, 40, 25, 37, 4, 20, 0, 36, 48, 22, 2, 46, 3, 44, 28, 24, 27, 30, 19, 49, 1, 9, 23, 10, 7, 33]\n","epoch: 0, train_batch_id: 7699, avg_reward: 7.330972671508789\n","Example train output: [3, 40, 6, 42, 26, 1, 45, 25, 18, 32, 13, 35, 33, 29, 20, 44, 49, 37, 24, 15, 48, 23, 30, 12, 4, 17, 8, 27, 47, 41, 22, 28, 38, 10, 14, 31, 43, 5, 34, 9, 21, 2, 39, 11, 46, 36, 7, 16, 19, 0]\n","epoch: 0, train_batch_id: 7799, avg_reward: 7.54127311706543\n","Example train output: [34, 29, 12, 20, 32, 43, 27, 26, 9, 45, 41, 36, 3, 14, 31, 4, 37, 35, 40, 10, 33, 44, 1, 7, 46, 22, 48, 39, 13, 18, 24, 47, 0, 2, 28, 30, 49, 11, 16, 42, 38, 23, 21, 5, 25, 6, 19, 8, 17, 15]\n","epoch: 0, train_batch_id: 7899, avg_reward: 7.393662452697754\n","Example train output: [31, 3, 16, 38, 11, 18, 9, 39, 32, 43, 26, 36, 25, 44, 35, 41, 46, 15, 33, 47, 27, 1, 48, 12, 8, 22, 7, 30, 40, 37, 45, 0, 28, 23, 29, 2, 21, 13, 24, 34, 5, 14, 4, 42, 6, 49, 17, 20, 19, 10]\n","epoch: 0, train_batch_id: 7999, avg_reward: 7.208779335021973\n","Example train output: [22, 42, 31, 48, 44, 26, 13, 2, 16, 4, 27, 15, 36, 18, 12, 29, 49, 33, 19, 0, 10, 14, 43, 24, 41, 8, 20, 9, 34, 28, 3, 45, 47, 7, 35, 17, 23, 25, 37, 11, 1, 5, 30, 21, 6, 38, 32, 46, 40, 39]\n","epoch: 0, train_batch_id: 8099, avg_reward: 7.679110050201416\n","Example train output: [39, 13, 2, 43, 28, 32, 18, 23, 27, 21, 49, 37, 15, 16, 5, 29, 6, 0, 45, 35, 11, 44, 20, 8, 22, 47, 34, 25, 9, 26, 30, 31, 7, 41, 10, 14, 33, 19, 3, 36, 38, 1, 24, 12, 40, 4, 17, 42, 46, 48]\n","epoch: 0, train_batch_id: 8199, avg_reward: 7.246831893920898\n","Example train output: [46, 19, 28, 14, 6, 41, 2, 20, 33, 25, 42, 34, 18, 29, 26, 23, 47, 35, 37, 17, 9, 13, 31, 43, 16, 12, 5, 24, 22, 4, 40, 10, 49, 39, 3, 8, 48, 38, 27, 7, 32, 15, 21, 30, 0, 36, 11, 45, 44, 1]\n","epoch: 0, train_batch_id: 8299, avg_reward: 7.119370460510254\n","Example train output: [36, 34, 2, 28, 15, 13, 17, 7, 42, 21, 33, 14, 26, 5, 9, 24, 46, 12, 32, 29, 18, 0, 30, 20, 43, 37, 35, 8, 41, 19, 22, 38, 6, 11, 48, 4, 31, 40, 39, 47, 49, 1, 3, 25, 44, 27, 16, 10, 23, 45]\n","epoch: 0, train_batch_id: 8399, avg_reward: 7.252811431884766\n","Example train output: [39, 17, 5, 37, 42, 20, 18, 44, 11, 47, 34, 8, 35, 32, 10, 3, 45, 31, 7, 24, 2, 19, 9, 38, 43, 1, 49, 22, 16, 12, 21, 0, 46, 25, 13, 29, 28, 33, 30, 41, 15, 27, 23, 26, 4, 6, 40, 48, 36, 14]\n","epoch: 0, train_batch_id: 8499, avg_reward: 7.244790077209473\n","Example train output: [9, 23, 18, 42, 34, 3, 17, 14, 15, 1, 27, 41, 35, 45, 5, 44, 32, 31, 8, 48, 29, 49, 0, 47, 26, 19, 40, 16, 13, 38, 7, 37, 12, 24, 30, 11, 46, 43, 25, 22, 28, 20, 33, 21, 6, 39, 36, 10, 2, 4]\n","epoch: 0, train_batch_id: 8599, avg_reward: 7.3148908615112305\n","Example train output: [42, 9, 48, 33, 49, 37, 36, 26, 2, 43, 7, 20, 32, 29, 16, 3, 22, 45, 6, 28, 24, 47, 46, 39, 40, 38, 0, 35, 27, 13, 34, 17, 44, 15, 21, 4, 8, 11, 41, 10, 18, 30, 5, 23, 1, 14, 12, 31, 19, 25]\n","epoch: 0, train_batch_id: 8699, avg_reward: 7.144269943237305\n","Example train output: [17, 8, 49, 13, 31, 25, 11, 1, 36, 42, 0, 30, 27, 46, 16, 3, 26, 44, 39, 40, 21, 23, 28, 41, 33, 35, 43, 10, 38, 7, 29, 6, 37, 19, 22, 14, 20, 4, 45, 15, 12, 47, 2, 18, 24, 9, 32, 5, 34, 48]\n","epoch: 0, train_batch_id: 8799, avg_reward: 7.22873067855835\n","Example train output: [29, 26, 48, 42, 1, 27, 36, 40, 14, 24, 34, 4, 15, 32, 20, 10, 6, 3, 37, 0, 16, 43, 22, 33, 28, 13, 7, 5, 45, 21, 12, 25, 47, 41, 31, 30, 2, 9, 23, 19, 35, 18, 44, 49, 39, 17, 46, 38, 8, 11]\n","epoch: 0, train_batch_id: 8899, avg_reward: 7.157253265380859\n","Example train output: [30, 17, 38, 5, 27, 16, 21, 0, 22, 25, 20, 7, 8, 33, 13, 12, 40, 45, 2, 19, 18, 6, 26, 42, 46, 31, 24, 1, 43, 3, 10, 47, 23, 15, 29, 48, 4, 44, 14, 35, 39, 41, 11, 49, 32, 36, 9, 34, 28, 37]\n","epoch: 0, train_batch_id: 8999, avg_reward: 7.119638442993164\n","Example train output: [45, 43, 8, 3, 0, 46, 42, 2, 9, 21, 37, 7, 25, 19, 17, 36, 32, 48, 38, 35, 27, 14, 33, 28, 47, 1, 22, 10, 41, 30, 6, 4, 31, 5, 26, 11, 15, 44, 23, 24, 39, 29, 49, 16, 12, 13, 20, 40, 18, 34]\n","epoch: 0, train_batch_id: 9099, avg_reward: 7.086285591125488\n","Example train output: [4, 39, 34, 21, 12, 8, 26, 1, 40, 29, 18, 48, 31, 33, 22, 35, 42, 37, 45, 0, 13, 9, 20, 25, 6, 5, 38, 7, 3, 15, 27, 16, 32, 23, 41, 14, 10, 24, 2, 19, 46, 30, 28, 36, 43, 44, 11, 47, 17, 49]\n","epoch: 0, train_batch_id: 9199, avg_reward: 7.019648551940918\n","Example train output: [9, 30, 24, 16, 17, 4, 40, 38, 37, 19, 44, 28, 43, 36, 7, 47, 8, 35, 42, 26, 34, 12, 2, 25, 13, 11, 32, 39, 31, 23, 18, 20, 22, 15, 41, 10, 27, 29, 5, 49, 48, 0, 3, 45, 1, 6, 21, 46, 33, 14]\n","epoch: 0, train_batch_id: 9299, avg_reward: 7.084596157073975\n","Example train output: [21, 23, 49, 0, 37, 29, 25, 8, 34, 12, 2, 18, 45, 38, 19, 14, 4, 44, 46, 1, 36, 42, 5, 15, 40, 20, 22, 43, 35, 26, 48, 6, 27, 28, 33, 3, 24, 32, 47, 7, 13, 16, 9, 41, 30, 17, 39, 31, 10, 11]\n","epoch: 0, train_batch_id: 9399, avg_reward: 7.050015449523926\n","Example train output: [41, 25, 14, 32, 20, 16, 45, 49, 15, 43, 38, 39, 12, 34, 44, 10, 1, 8, 36, 0, 47, 19, 21, 42, 22, 3, 40, 33, 13, 6, 26, 18, 24, 28, 29, 46, 7, 35, 11, 5, 37, 23, 4, 2, 9, 48, 31, 30, 17, 27]\n","epoch: 0, train_batch_id: 9499, avg_reward: 7.024662494659424\n","Example train output: [32, 19, 2, 15, 1, 37, 31, 36, 3, 18, 26, 43, 33, 17, 39, 4, 47, 24, 28, 42, 12, 22, 10, 41, 30, 38, 45, 0, 7, 48, 20, 40, 13, 11, 29, 25, 49, 9, 23, 8, 34, 6, 35, 16, 44, 21, 46, 27, 14, 5]\n","epoch: 0, train_batch_id: 9599, avg_reward: 7.188841342926025\n","Example train output: [7, 3, 4, 30, 27, 25, 20, 12, 16, 42, 31, 23, 17, 39, 8, 22, 45, 1, 2, 15, 14, 47, 44, 29, 11, 18, 41, 6, 24, 48, 49, 40, 36, 9, 32, 26, 10, 33, 0, 28, 37, 35, 34, 46, 5, 21, 38, 43, 19, 13]\n","epoch: 0, train_batch_id: 9699, avg_reward: 7.023584842681885\n","Example train output: [4, 44, 1, 49, 16, 41, 39, 7, 25, 14, 38, 5, 34, 20, 15, 26, 8, 23, 9, 32, 36, 10, 42, 11, 22, 48, 3, 6, 19, 24, 29, 45, 33, 17, 12, 43, 46, 18, 31, 30, 27, 13, 40, 37, 47, 0, 35, 2, 28, 21]\n","epoch: 0, train_batch_id: 9799, avg_reward: 7.027076721191406\n","Example train output: [15, 14, 16, 43, 7, 8, 36, 37, 30, 20, 40, 6, 10, 49, 4, 28, 22, 17, 2, 24, 12, 45, 25, 9, 47, 32, 44, 42, 41, 48, 34, 0, 1, 19, 29, 33, 18, 13, 38, 3, 11, 21, 23, 27, 35, 31, 5, 39, 46, 26]\n","epoch: 0, train_batch_id: 9899, avg_reward: 7.074728965759277\n","Example train output: [30, 33, 23, 0, 42, 2, 24, 3, 12, 15, 4, 48, 38, 18, 17, 32, 22, 35, 34, 27, 25, 1, 44, 36, 5, 43, 46, 40, 9, 16, 7, 20, 21, 13, 37, 26, 10, 41, 49, 8, 14, 45, 29, 28, 47, 6, 19, 11, 39, 31]\n","epoch: 0, train_batch_id: 9999, avg_reward: 7.008036136627197\n","Example train output: [19, 45, 20, 24, 4, 11, 8, 31, 40, 27, 5, 39, 25, 49, 46, 32, 48, 3, 21, 26, 15, 14, 36, 43, 38, 22, 42, 35, 16, 9, 44, 18, 0, 30, 33, 2, 17, 1, 6, 41, 34, 12, 28, 13, 23, 29, 7, 37, 47, 10]\n"],"name":"stdout"},{"output_type":"display_data","data":{"application/vnd.jupyter.widget-view+json":{"model_id":"34692e8820a54fc38d314645f4a6537c","version_minor":0,"version_major":2},"text/plain":["HBox(children=(IntProgress(value=0, max=16), HTML(value='')))"]},"metadata":{"tags":[]}},{"output_type":"stream","text":[" 1%|▏ | 18151/1280000 [00:00<00:06, 181505.85it/s]"],"name":"stderr"},{"output_type":"stream","text":["\n","Validation overall avg_reward: 6.663022041320801\n","Validation overall reward var: 0.002205916913226247\n"],"name":"stdout"},{"output_type":"stream","text":["100%|██████████| 1280000/1280000 [00:07<00:00, 165155.42it/s]\n"],"name":"stderr"}]},{"metadata":{"id":"FSl0QQA3j2kj","colab_type":"code","colab":{}},"cell_type":"code","source":["Reward = np.array(Reward)\n","print(Reward.shape)\n","k = np.arange(0, 50*Reward.shape[0], 50)\n","plt.plot(k, Reward)\n","plt.show()"],"execution_count":0,"outputs":[]},{"metadata":{"id":"UgPrb6ALj2h9","colab_type":"code","colab":{}},"cell_type":"code","source":[""],"execution_count":0,"outputs":[]},{"metadata":{"id":"MPhcafUDj2fN","colab_type":"code","colab":{}},"cell_type":"code","source":[""],"execution_count":0,"outputs":[]},{"metadata":{"id":"SeS2Dg2fxx1k","colab_type":"code","colab":{}},"cell_type":"code","source":["torch.save(model,'drive/My Drive/HRL-CO/model/TSP50.pt')"],"execution_count":0,"outputs":[]},{"metadata":{"id":"dIi-nkLC2VmS","colab_type":"code","colab":{}},"cell_type":"code","source":["model = torch.load('drive/My Drive/HRL-CO/model/TSP50.pt')"],"execution_count":0,"outputs":[]},{"metadata":{"id":"iZy0LvRr2lsH","colab_type":"code","colab":{}},"cell_type":"code","source":["# validation\n","example_tour = []\n","avg_reward = []\n","val_step = 0\n","model.eval()\n","model.actor_net.decoder.decode_type = \"greedy\"\n"," \n","for batch_id, val_batch in enumerate(tqdm_notebook(val_dataloader)):\n"," bat = Variable(val_batch)\n","\n"," if use_cuda:\n"," bat = bat.cuda()\n","\n"," R, probs, actions, action_idxs = model(bat)\n"," avg_reward.append(R.mean().cpu().numpy())\n"," val_step += 1.\n","\n"," if val_step % 5 == 0:\n"," example_output = []\n"," example_input = []\n"," for idx, action in enumerate(actions):\n","\n"," example_output.append(action_idxs[idx][0].item())\n"," # example_input.append(bat[0, :, idx].item())\n"," # example_tour.append(action.item())\n"," print('Step: {}'.format(batch_id))\n"," #print('Example test input: {}'.format(example_input))\n"," print('Example test output: {}'.format(example_output))\n"," print('Example test reward: {}'.format(R.mean()))\n","\n","print('Validation overall avg_reward: {}'.format(np.mean(avg_reward)))\n","print('Validation overall reward var: {}'.format(np.var(avg_reward)))"],"execution_count":0,"outputs":[]}]} -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Hierarchical Reinforcement Learning for Combinatorial Optimization 2 | 3 | Solve combinatorial optimization problem with hierarchical reinforcement learning (RL) approach. 4 | 5 | ## Pointer-Net-Reproduce 6 | Reproduce the result of pointer network. 7 | 8 | See Pointer-Net-Reproduce.ipynb 9 | 10 | ## HRL-for-constrained-TSP 11 | Solve TSP with constraints by hierarchical RL framework. The implemtation is based on pointer networks. 12 | 13 | See GPN.ipynb, GPN_TSPTW.ipynb 14 | 15 | ## TBD 16 | Full code will be released soon. 17 | 18 | ## Example 19 | ![TSPTW sample tour](figure/tsptw.png) 20 | -------------------------------------------------------------------------------- /figure/tsptw.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/qiang-ma/HRL-for-combinatorial-optimization/ebff0b484fbb3ccd2a0bca9a328d7a3c8d3d3826/figure/tsptw.png --------------------------------------------------------------------------------