├── README.md ├── LICENSE ├── .gitignore └── encoder-decoder-attention.ipynb /README.md: -------------------------------------------------------------------------------- 1 | # seq2seq-attention-model 2 | An implementation for attention model in Keras for sequence to sequence model. 3 | 4 | # Known bugs 5 | - This implementation is wrong! The attention model is using the future information in order to predict the next word but since we only tested it on known answers to see how the attention can be used as implicit alignment, the result truns to be fine. The bug is fixable. I have other versions of this code but I have no plan in near future to update this repository. 6 | 7 | - The dataset I used is not available now because it wasn't my contribution to this experiment. Maybe in future I can add it here. But first I have to ask if I am alowed. But the idea was to have a synthetic data which two languages represent two different syntax for sentence generation. You can generate such dataset on your own and if you want to test on natural language you need a parallel corpora but of course the vocabulary size might be an issue. 8 | 9 | 10 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Mehdi 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | local_settings.py 56 | 57 | # Flask stuff: 58 | instance/ 59 | .webassets-cache 60 | 61 | # Scrapy stuff: 62 | .scrapy 63 | 64 | # Sphinx documentation 65 | docs/_build/ 66 | 67 | # PyBuilder 68 | target/ 69 | 70 | # Jupyter Notebook 71 | .ipynb_checkpoints 72 | 73 | # pyenv 74 | .python-version 75 | 76 | # celery beat schedule file 77 | celerybeat-schedule 78 | 79 | # SageMath parsed files 80 | *.sage.py 81 | 82 | # dotenv 83 | .env 84 | 85 | # virtualenv 86 | .venv 87 | venv/ 88 | ENV/ 89 | 90 | # Spyder project settings 91 | .spyderproject 92 | .spyproject 93 | 94 | # Rope project settings 95 | .ropeproject 96 | 97 | # mkdocs documentation 98 | /site 99 | 100 | # mypy 101 | .mypy_cache/ 102 | -------------------------------------------------------------------------------- /encoder-decoder-attention.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "# GPU selection\n", 12 | "import os\n", 13 | "os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" # see issue #152\n", 14 | "os.environ[\"CUDA_VISIBLE_DEVICES\"]=\"1\"" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": 2, 20 | "metadata": { 21 | "collapsed": true 22 | }, 23 | "outputs": [], 24 | "source": [ 25 | "import numpy as np\n", 26 | "np.random.seed(int(np.pi*10**5)) # Don't be too random on the test suit!" 27 | ] 28 | }, 29 | { 30 | "cell_type": "code", 31 | "execution_count": 3, 32 | "metadata": {}, 33 | "outputs": [ 34 | { 35 | "name": "stderr", 36 | "output_type": "stream", 37 | "text": [ 38 | "Using TensorFlow backend.\n" 39 | ] 40 | } 41 | ], 42 | "source": [ 43 | "from keras.models import Sequential, Model" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": 4, 49 | "metadata": { 50 | "collapsed": true 51 | }, 52 | "outputs": [], 53 | "source": [ 54 | "from keras.layers import Dense, GRU, LSTM\n", 55 | "from keras.layers import Input, TimeDistributed, Embedding, RepeatVector, Lambda, Bidirectional\n", 56 | "from keras.layers import Flatten, Reshape, Permute, Activation\n", 57 | "from keras.layers import Dot, Concatenate, Multiply\n", 58 | "from keras.layers import merge\n", 59 | "from keras.callbacks import EarlyStopping\n", 60 | "from keras import backend as K" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "## Preprocessing\n", 68 | "\n", 69 | "Process the text and find the vocabulary size for source input language + create one-hot vectors for all words in target language" 70 | ] 71 | }, 72 | { 73 | "cell_type": "code", 74 | "execution_count": 5, 75 | "metadata": { 76 | "collapsed": true 77 | }, 78 | "outputs": [], 79 | "source": [ 80 | "src_word_chunker = lambda sent: list(sent)\n", 81 | "trg_word_chunker = lambda sent: list(sent)\n", 82 | "\n", 83 | "# depending on the preprocessing that we need we might need different word chunching functions\n", 84 | "#src_word_chunker = lambda sent: sent.split()\n", 85 | "#trg_word_chunker = lambda sent: sent.split()" 86 | ] 87 | }, 88 | { 89 | "cell_type": "markdown", 90 | "metadata": {}, 91 | "source": [ 92 | "#### Read from synthetic tree translation" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": 6, 98 | "metadata": { 99 | "collapsed": true 100 | }, 101 | "outputs": [], 102 | "source": [ 103 | "source, target = [], []\n", 104 | "src_vocab, trg_vocab = set({}), set({})\n", 105 | "src_max_len = 1\n", 106 | "trg_max_len = 1\n", 107 | "with open('synthtrees.txt', 'r') as sythtree:\n", 108 | " for line in sythtree:\n", 109 | " src_sent_raw, trg_sent_raw = line.strip().split('\\t')\n", 110 | "\n", 111 | " src_sent = src_word_chunker(src_sent_raw)\n", 112 | " trg_sent = trg_word_chunker(trg_sent_raw)\n", 113 | "\n", 114 | " # add to the list (chunked)\n", 115 | " source.append(src_sent)\n", 116 | " target.append(trg_sent)\n", 117 | " \n", 118 | " # update vocabulary with new words\n", 119 | " src_vocab.update(src_sent)\n", 120 | " trg_vocab.update(trg_sent)\n", 121 | " \n", 122 | " src_max_len = src_max_len if src_max_len > len(src_sent) else len(src_sent)\n", 123 | " trg_max_len = trg_max_len if trg_max_len > len(src_sent) else len(src_sent)\n", 124 | " \n", 125 | "# the first words is the padding sign:\n", 126 | "src_vocab = [''] + list(src_vocab)\n", 127 | "trg_vocab = [''] + list(trg_vocab)\n", 128 | "\n", 129 | "src_vocab_size = len(src_vocab)\n", 130 | "trg_vocab_size = len(trg_vocab)\n", 131 | "\n", 132 | "# convert words to word_id \n", 133 | "source_sents = [ \n", 134 | " [src_vocab.index(w) for w in sent]\n", 135 | " for sent in source\n", 136 | "]\n", 137 | "\n", 138 | "target_sents = [ \n", 139 | " [trg_vocab.index(w) for w in sent]\n", 140 | " for sent in target\n", 141 | "]\n" 142 | ] 143 | }, 144 | { 145 | "cell_type": "markdown", 146 | "metadata": {}, 147 | "source": [ 148 | "\n", 149 | "\n", 150 | "\n", 151 | "#### General preprocessing" 152 | ] 153 | }, 154 | { 155 | "cell_type": "code", 156 | "execution_count": 7, 157 | "metadata": { 158 | "collapsed": true 159 | }, 160 | "outputs": [], 161 | "source": [ 162 | "# fixed length of source and target inputs after padding\n", 163 | "T_x = src_max_len\n", 164 | "T_y = trg_max_len + 1\n", 165 | "\n", 166 | "from keras.preprocessing import sequence\n", 167 | "\n", 168 | "# padded sentences\n", 169 | "# https://keras.io/preprocessing/sequence/\n", 170 | "source_pp = sequence.pad_sequences(source_sents, maxlen=T_x)\n", 171 | "target_pp = sequence.pad_sequences(sequence.pad_sequences(target_sents, maxlen=T_y-1), maxlen=T_y)\n", 172 | "trg_end_padding = np.pad(target_pp[:,1:], [(0,0), (0,1)], 'constant', constant_values=0)\n", 173 | "\n" 174 | ] 175 | }, 176 | { 177 | "cell_type": "code", 178 | "execution_count": 8, 179 | "metadata": {}, 180 | "outputs": [ 181 | { 182 | "name": "stdout", 183 | "output_type": "stream", 184 | "text": [ 185 | "source vocab size: 13\n", 186 | "target vocab size: 13\n", 187 | "source shape: (10000, 21)\n", 188 | "target shape: (10000, 22)\n" 189 | ] 190 | } 191 | ], 192 | "source": [ 193 | "print(\"source vocab size:\", src_vocab_size)\n", 194 | "print(\"target vocab size:\", trg_vocab_size)\n", 195 | "\n", 196 | "print('source shape:', source_pp.shape)\n", 197 | "print('target shape:', target_pp.shape)" 198 | ] 199 | }, 200 | { 201 | "cell_type": "markdown", 202 | "metadata": {}, 203 | "source": [ 204 | "## Models" 205 | ] 206 | }, 207 | { 208 | "cell_type": "markdown", 209 | "metadata": {}, 210 | "source": [ 211 | "#### Usefull functions" 212 | ] 213 | }, 214 | { 215 | "cell_type": "code", 216 | "execution_count": 9, 217 | "metadata": { 218 | "collapsed": true 219 | }, 220 | "outputs": [], 221 | "source": [ 222 | "def one_hot_initializer(shape, dtype=None):\n", 223 | " \"\"\"Keras friendly initialization for one-hot encodedings as embedings\"\"\"\n", 224 | " output = K.eye(shape[0], dtype=dtype)\n", 225 | " output = K.concatenate([K.zeros_like(output[:1,:]), output[1:,:]], 0)\n", 226 | " output = K.concatenate([K.zeros_like(output[:,:1]), output[:,1:]], 1)\n", 227 | " return output\n", 228 | "\n", 229 | "def sequential_layer_composition(input_tensor, layers):\n", 230 | " \"\"\"This function takes a list of layers similar to Sequential but it is designed to work for Model in Keras\"\"\"\n", 231 | " if len(layers) > 1:\n", 232 | " return sequential_layer_composition(layers[0](input_tensor), layers[1:])\n", 233 | " else:\n", 234 | " return layers[0](input_tensor)\n" 235 | ] 236 | }, 237 | { 238 | "cell_type": "markdown", 239 | "metadata": {}, 240 | "source": [ 241 | "#### Encoder" 242 | ] 243 | }, 244 | { 245 | "cell_type": "code", 246 | "execution_count": 10, 247 | "metadata": { 248 | "collapsed": true 249 | }, 250 | "outputs": [], 251 | "source": [ 252 | "encoder_unit_size = 32\n", 253 | "encoder_embedding_size = 8\n", 254 | "#encoder_embedding_size = src_vocab_size" 255 | ] 256 | }, 257 | { 258 | "cell_type": "code", 259 | "execution_count": 11, 260 | "metadata": { 261 | "collapsed": true 262 | }, 263 | "outputs": [], 264 | "source": [ 265 | "# the length of source and target inputs can be different but I chose the same max_len=10\n", 266 | "src_input = Input(shape=(T_x,))\n", 267 | "trg_input = Input(shape=(T_y,)) # +1 for initial padding" 268 | ] 269 | }, 270 | { 271 | "cell_type": "code", 272 | "execution_count": 12, 273 | "metadata": { 274 | "collapsed": true 275 | }, 276 | "outputs": [], 277 | "source": [ 278 | "source_embeddings = Embedding(\n", 279 | " src_vocab_size,\n", 280 | " encoder_embedding_size,\n", 281 | " input_shape=(T_x,),\n", 282 | " #embeddings_initializer=one_hot_initializer,\n", 283 | " #trainable=False,\n", 284 | ")\n", 285 | "\n", 286 | "encoder_model = Sequential([\n", 287 | " source_embeddings, \n", 288 | " Bidirectional(LSTM(int(encoder_unit_size/2), return_sequences=True)),\n", 289 | " Bidirectional(LSTM(int(encoder_unit_size/2), return_sequences=True)),\n", 290 | "])" 291 | ] 292 | }, 293 | { 294 | "cell_type": "code", 295 | "execution_count": 13, 296 | "metadata": {}, 297 | "outputs": [ 298 | { 299 | "name": "stdout", 300 | "output_type": "stream", 301 | "text": [ 302 | "_________________________________________________________________\n", 303 | "Layer (type) Output Shape Param # \n", 304 | "=================================================================\n", 305 | "embedding_1 (Embedding) (None, 21, 8) 104 \n", 306 | "_________________________________________________________________\n", 307 | "bidirectional_1 (Bidirection (None, 21, 32) 3200 \n", 308 | "_________________________________________________________________\n", 309 | "bidirectional_2 (Bidirection (None, 21, 32) 6272 \n", 310 | "=================================================================\n", 311 | "Total params: 9,576\n", 312 | "Trainable params: 9,576\n", 313 | "Non-trainable params: 0\n", 314 | "_________________________________________________________________\n" 315 | ] 316 | } 317 | ], 318 | "source": [ 319 | "encoder_model.summary()" 320 | ] 321 | }, 322 | { 323 | "cell_type": "code", 324 | "execution_count": 14, 325 | "metadata": { 326 | "collapsed": true 327 | }, 328 | "outputs": [], 329 | "source": [ 330 | "encoder_output = encoder_model(src_input)\n" 331 | ] 332 | }, 333 | { 334 | "cell_type": "markdown", 335 | "metadata": {}, 336 | "source": [ 337 | "### Decoder + Attention" 338 | ] 339 | }, 340 | { 341 | "cell_type": "code", 342 | "execution_count": 15, 343 | "metadata": { 344 | "collapsed": true 345 | }, 346 | "outputs": [], 347 | "source": [ 348 | "decoder_unit_size = 32\n", 349 | "decoder_embedding_size = 8\n", 350 | "#decoder_embedding_size = trg_vocab_size" 351 | ] 352 | }, 353 | { 354 | "cell_type": "markdown", 355 | "metadata": {}, 356 | "source": [ 357 | "#### Attention Model " 358 | ] 359 | }, 360 | { 361 | "cell_type": "code", 362 | "execution_count": 16, 363 | "metadata": { 364 | "collapsed": true 365 | }, 366 | "outputs": [], 367 | "source": [ 368 | "# Understanding the following code require both understanding the desgin, some math behind it, and Keras tricks.\n", 369 | "m = T_x # source lenth\n", 370 | "n = T_y # target lenth\n", 371 | "d1 = encoder_unit_size\n", 372 | "d2 = decoder_unit_size\n", 373 | "\n", 374 | "# concatenate all H = (h_i) to all S = (s_j) ===> H_S = [s_(j-1) h_i] (S lag one phase behind)\n", 375 | "S = Input((n, d2,))\n", 376 | "S_shift = Lambda(lambda x: K.concatenate([K.zeros_like(K.expand_dims(x[:,0], 1)), x[:,:-1]], 1))(S)\n", 377 | "S_flat = Flatten()(S_shift)\n", 378 | "S_flat_rep = RepeatVector(m)(S_flat)\n", 379 | "S_rep_n = Reshape((m, n, d2))(S_flat_rep)\n", 380 | "# (m, n, d2,)\n", 381 | "\n", 382 | "H = Input((m, d1,))\n", 383 | "H_flat = Flatten()(H)\n", 384 | "H_flat_rep = RepeatVector(n)(H_flat)\n", 385 | "H_flat_rep_ = Reshape((n, m, d1))(H_flat_rep)\n", 386 | "H_rep_m = Permute((2,1,3))(H_flat_rep_) \n", 387 | "# (m, n, d1,)\n", 388 | "\n", 389 | "# concatenate everything with everything:\n", 390 | "S_H_ = Concatenate(-1)([S_rep_n, H_rep_m]) \n", 391 | "# (m, n, d1+d2)\n", 392 | "S_H_flat = Flatten()(S_H_)\n", 393 | "S_H = Reshape((m*n, (d1+d2)))(S_H_flat) \n", 394 | "# (m*n, (d1+d2),)\n", 395 | "\n", 396 | "# make the e_ji\n", 397 | "E_T_1 = TimeDistributed(Dense(d1+d2, activation='tanh'))(S_H)\n", 398 | "E_T_ = TimeDistributed(Dense(1, activation='linear'))(E_T_1)\n", 399 | "E_T = Reshape((m, n))(E_T_) \n", 400 | "E = Permute((2,1))(E_T) # E = {E_j} = {{ e_{ji} }} \n", 401 | "# (n, m,)\n", 402 | "\n", 403 | "# the alignemtns\n", 404 | "alpha = TimeDistributed(Activation('softmax'))(E) # alpha_j = softmax(E_j}\n", 405 | "# (n, m,)\n", 406 | "\n", 407 | "C = Dot((2,1))([alpha, H])\n", 408 | "# (n, d1,)\n", 409 | "\n", 410 | "attention_model = Model([S, H], C)\n" 411 | ] 412 | }, 413 | { 414 | "cell_type": "markdown", 415 | "metadata": {}, 416 | "source": [ 417 | "#### Decoder layers" 418 | ] 419 | }, 420 | { 421 | "cell_type": "code", 422 | "execution_count": 17, 423 | "metadata": { 424 | "collapsed": true 425 | }, 426 | "outputs": [], 427 | "source": [ 428 | "target_embeddings = Embedding(\n", 429 | " trg_vocab_size,\n", 430 | " decoder_embedding_size,\n", 431 | " input_shape=(T_y,),\n", 432 | " #embeddings_initializer=one_hot_initializer,\n", 433 | " #trainable=False,\n", 434 | ")\n", 435 | "decoder_rnn = LSTM(decoder_unit_size, return_sequences=True, input_shape=(T_y, decoder_embedding_size+encoder_unit_size))\n", 436 | "\n", 437 | "decoder_model = Model(\n", 438 | " [src_input, trg_input],\n", 439 | " sequential_layer_composition(trg_input, [\n", 440 | " target_embeddings, \n", 441 | " decoder_rnn, \n", 442 | " lambda S: Concatenate(2)([S, attention_model([S, encoder_output])]),\n", 443 | " TimeDistributed(Dense(trg_vocab_size, activation='softmax')),\n", 444 | " ])\n", 445 | ")\n", 446 | "\n", 447 | "alignments_model = Model(\n", 448 | " [src_input, trg_input],\n", 449 | " sequential_layer_composition(trg_input, [\n", 450 | " target_embeddings,\n", 451 | " decoder_rnn, \n", 452 | " lambda x: Model([S, H], alpha)([x, encoder_output]),\n", 453 | " ])\n", 454 | ")" 455 | ] 456 | }, 457 | { 458 | "cell_type": "markdown", 459 | "metadata": {}, 460 | "source": [ 461 | "#### Decoder summary" 462 | ] 463 | }, 464 | { 465 | "cell_type": "code", 466 | "execution_count": 18, 467 | "metadata": {}, 468 | "outputs": [ 469 | { 470 | "name": "stdout", 471 | "output_type": "stream", 472 | "text": [ 473 | "____________________________________________________________________________________________________\n", 474 | "Layer (type) Output Shape Param # Connected to \n", 475 | "====================================================================================================\n", 476 | "input_2 (InputLayer) (None, 22) 0 \n", 477 | "____________________________________________________________________________________________________\n", 478 | "embedding_2 (Embedding) (None, 22, 8) 104 input_2[0][0] \n", 479 | "____________________________________________________________________________________________________\n", 480 | "input_1 (InputLayer) (None, 21) 0 \n", 481 | "____________________________________________________________________________________________________\n", 482 | "lstm_3 (LSTM) (None, 22, 32) 5248 embedding_2[0][0] \n", 483 | "____________________________________________________________________________________________________\n", 484 | "sequential_1 (Sequential) (None, 21, 32) 9576 input_1[0][0] \n", 485 | "____________________________________________________________________________________________________\n", 486 | "model_1 (Model) (None, 22, 32) 4225 lstm_3[0][0] \n", 487 | " sequential_1[1][0] \n", 488 | "____________________________________________________________________________________________________\n", 489 | "concatenate_2 (Concatenate) (None, 22, 64) 0 lstm_3[0][0] \n", 490 | " model_1[1][0] \n", 491 | "____________________________________________________________________________________________________\n", 492 | "time_distributed_4 (TimeDistribu (None, 22, 13) 845 concatenate_2[0][0] \n", 493 | "====================================================================================================\n", 494 | "Total params: 19,998\n", 495 | "Trainable params: 19,998\n", 496 | "Non-trainable params: 0\n", 497 | "____________________________________________________________________________________________________\n" 498 | ] 499 | } 500 | ], 501 | "source": [ 502 | "decoder_model.summary()" 503 | ] 504 | }, 505 | { 506 | "cell_type": "markdown", 507 | "metadata": {}, 508 | "source": [ 509 | "### Finishing structure" 510 | ] 511 | }, 512 | { 513 | "cell_type": "code", 514 | "execution_count": 19, 515 | "metadata": { 516 | "collapsed": true 517 | }, 518 | "outputs": [], 519 | "source": [ 520 | "# input of the encoder-decoder model is a list of two inputs: source, target\n", 521 | "encoder_decoder = Model([src_input, trg_input], decoder_model([src_input, trg_input]))" 522 | ] 523 | }, 524 | { 525 | "cell_type": "code", 526 | "execution_count": 20, 527 | "metadata": { 528 | "collapsed": true 529 | }, 530 | "outputs": [], 531 | "source": [ 532 | "encoder_decoder.compile('adam', 'categorical_crossentropy')" 533 | ] 534 | }, 535 | { 536 | "cell_type": "code", 537 | "execution_count": 21, 538 | "metadata": {}, 539 | "outputs": [ 540 | { 541 | "name": "stdout", 542 | "output_type": "stream", 543 | "text": [ 544 | "____________________________________________________________________________________________________\n", 545 | "Layer (type) Output Shape Param # Connected to \n", 546 | "====================================================================================================\n", 547 | "input_1 (InputLayer) (None, 21) 0 \n", 548 | "____________________________________________________________________________________________________\n", 549 | "input_2 (InputLayer) (None, 22) 0 \n", 550 | "____________________________________________________________________________________________________\n", 551 | "model_2 (Model) (None, 22, 13) 19998 input_1[0][0] \n", 552 | " input_2[0][0] \n", 553 | "====================================================================================================\n", 554 | "Total params: 19,998\n", 555 | "Trainable params: 19,998\n", 556 | "Non-trainable params: 0\n", 557 | "____________________________________________________________________________________________________\n" 558 | ] 559 | } 560 | ], 561 | "source": [ 562 | "encoder_decoder.summary()" 563 | ] 564 | }, 565 | { 566 | "cell_type": "markdown", 567 | "metadata": {}, 568 | "source": [ 569 | "## Training" 570 | ] 571 | }, 572 | { 573 | "cell_type": "code", 574 | "execution_count": 22, 575 | "metadata": { 576 | "scrolled": false 577 | }, 578 | "outputs": [ 579 | { 580 | "name": "stdout", 581 | "output_type": "stream", 582 | "text": [ 583 | "Train on 8000 samples, validate on 2000 samples\n", 584 | "Epoch 1/1000\n", 585 | "8000/8000 [==============================] - 13s - loss: 2.3978 - val_loss: 2.0210\n", 586 | "Epoch 2/1000\n", 587 | "8000/8000 [==============================] - 4s - loss: 1.7828 - val_loss: 1.6616\n", 588 | "Epoch 3/1000\n", 589 | "8000/8000 [==============================] - 4s - loss: 1.6058 - val_loss: 1.5618\n", 590 | "Epoch 4/1000\n", 591 | "8000/8000 [==============================] - 4s - loss: 1.4731 - val_loss: 1.3558\n", 592 | "Epoch 5/1000\n", 593 | "8000/8000 [==============================] - 4s - loss: 1.2626 - val_loss: 1.1747\n", 594 | "Epoch 6/1000\n", 595 | "8000/8000 [==============================] - 4s - loss: 1.1061 - val_loss: 1.0401\n", 596 | "Epoch 7/1000\n", 597 | "8000/8000 [==============================] - 4s - loss: 0.9918 - val_loss: 0.9424\n", 598 | "Epoch 8/1000\n", 599 | "8000/8000 [==============================] - 4s - loss: 0.9066 - val_loss: 0.8690\n", 600 | "Epoch 9/1000\n", 601 | "8000/8000 [==============================] - 4s - loss: 0.8478 - val_loss: 0.8263\n", 602 | "Epoch 10/1000\n", 603 | "8000/8000 [==============================] - 4s - loss: 0.8078 - val_loss: 0.7900\n", 604 | "Epoch 11/1000\n", 605 | "8000/8000 [==============================] - 4s - loss: 0.7756 - val_loss: 0.7634\n", 606 | "Epoch 12/1000\n", 607 | "8000/8000 [==============================] - 4s - loss: 0.7498 - val_loss: 0.7366\n", 608 | "Epoch 13/1000\n", 609 | "8000/8000 [==============================] - 4s - loss: 0.7236 - val_loss: 0.7129\n", 610 | "Epoch 14/1000\n", 611 | "8000/8000 [==============================] - 4s - loss: 0.7007 - val_loss: 0.6889\n", 612 | "Epoch 15/1000\n", 613 | "8000/8000 [==============================] - 4s - loss: 0.6775 - val_loss: 0.6676\n", 614 | "Epoch 16/1000\n", 615 | "8000/8000 [==============================] - 4s - loss: 0.7251 - val_loss: 0.6998\n", 616 | "Epoch 17/1000\n", 617 | "8000/8000 [==============================] - 4s - loss: 0.6649 - val_loss: 0.6427\n", 618 | "Epoch 18/1000\n", 619 | "8000/8000 [==============================] - 4s - loss: 0.6335 - val_loss: 0.6270\n", 620 | "Epoch 19/1000\n", 621 | "8000/8000 [==============================] - 4s - loss: 0.6190 - val_loss: 0.6145\n", 622 | "Epoch 20/1000\n", 623 | "8000/8000 [==============================] - 4s - loss: 0.6075 - val_loss: 0.6038\n", 624 | "Epoch 21/1000\n", 625 | "8000/8000 [==============================] - 4s - loss: 0.5974 - val_loss: 0.5942\n", 626 | "Epoch 22/1000\n", 627 | "8000/8000 [==============================] - 4s - loss: 0.5883 - val_loss: 0.5856\n", 628 | "Epoch 23/1000\n", 629 | "8000/8000 [==============================] - 4s - loss: 0.5802 - val_loss: 0.5778\n", 630 | "Epoch 24/1000\n", 631 | "8000/8000 [==============================] - 4s - loss: 0.5728 - val_loss: 0.5708\n", 632 | "Epoch 25/1000\n", 633 | "8000/8000 [==============================] - 4s - loss: 0.5660 - val_loss: 0.5643\n", 634 | "Epoch 26/1000\n", 635 | "8000/8000 [==============================] - 4s - loss: 0.5598 - val_loss: 0.5587\n", 636 | "Epoch 27/1000\n", 637 | "8000/8000 [==============================] - 4s - loss: 0.5541 - val_loss: 0.5526\n", 638 | "Epoch 28/1000\n", 639 | "8000/8000 [==============================] - 4s - loss: 0.5487 - val_loss: 0.5475\n", 640 | "Epoch 29/1000\n", 641 | "8000/8000 [==============================] - 4s - loss: 0.5436 - val_loss: 0.5428\n", 642 | "Epoch 30/1000\n", 643 | "8000/8000 [==============================] - 4s - loss: 0.5389 - val_loss: 0.5377\n", 644 | "Epoch 31/1000\n", 645 | "8000/8000 [==============================] - 4s - loss: 0.5343 - val_loss: 0.5326\n", 646 | "Epoch 32/1000\n", 647 | "8000/8000 [==============================] - 4s - loss: 0.5288 - val_loss: 0.5262\n", 648 | "Epoch 33/1000\n", 649 | "8000/8000 [==============================] - 4s - loss: 0.5226 - val_loss: 0.5191\n", 650 | "Epoch 34/1000\n", 651 | "8000/8000 [==============================] - 4s - loss: 0.5198 - val_loss: 0.5139\n", 652 | "Epoch 35/1000\n", 653 | "8000/8000 [==============================] - 4s - loss: 0.5103 - val_loss: 0.5080\n", 654 | "Epoch 36/1000\n", 655 | "8000/8000 [==============================] - 4s - loss: 0.5049 - val_loss: 0.5028\n", 656 | "Epoch 37/1000\n", 657 | "8000/8000 [==============================] - 4s - loss: 0.5000 - val_loss: 0.4981\n", 658 | "Epoch 38/1000\n", 659 | "8000/8000 [==============================] - 4s - loss: 0.4953 - val_loss: 0.4937\n", 660 | "Epoch 39/1000\n", 661 | "8000/8000 [==============================] - 4s - loss: 0.4914 - val_loss: 0.4901\n", 662 | "Epoch 40/1000\n", 663 | "8000/8000 [==============================] - 4s - loss: 0.4874 - val_loss: 0.4866\n", 664 | "Epoch 41/1000\n", 665 | "8000/8000 [==============================] - 4s - loss: 0.4839 - val_loss: 0.4820\n", 666 | "Epoch 42/1000\n", 667 | "8000/8000 [==============================] - 4s - loss: 0.4796 - val_loss: 0.4778\n", 668 | "Epoch 43/1000\n", 669 | "8000/8000 [==============================] - 4s - loss: 0.4742 - val_loss: 0.4715\n", 670 | "Epoch 44/1000\n", 671 | "8000/8000 [==============================] - 4s - loss: 0.5503 - val_loss: 0.5047\n", 672 | "Epoch 45/1000\n", 673 | "8000/8000 [==============================] - 4s - loss: 0.4870 - val_loss: 0.4747\n", 674 | "Epoch 46/1000\n", 675 | "8000/8000 [==============================] - 4s - loss: 0.4682 - val_loss: 0.4655\n", 676 | "Epoch 47/1000\n", 677 | "8000/8000 [==============================] - 4s - loss: 0.4624 - val_loss: 0.4612\n", 678 | "Epoch 48/1000\n", 679 | "8000/8000 [==============================] - 4s - loss: 0.4585 - val_loss: 0.4578\n", 680 | "Epoch 49/1000\n", 681 | "8000/8000 [==============================] - 4s - loss: 0.4552 - val_loss: 0.4549\n", 682 | "Epoch 50/1000\n", 683 | "8000/8000 [==============================] - 4s - loss: 0.4525 - val_loss: 0.4521\n", 684 | "Epoch 51/1000\n", 685 | "8000/8000 [==============================] - 4s - loss: 0.4500 - val_loss: 0.4499\n", 686 | "Epoch 52/1000\n", 687 | "8000/8000 [==============================] - 4s - loss: 0.4476 - val_loss: 0.4478\n", 688 | "Epoch 53/1000\n", 689 | "8000/8000 [==============================] - 4s - loss: 0.4455 - val_loss: 0.4456\n", 690 | "Epoch 54/1000\n", 691 | "8000/8000 [==============================] - 4s - loss: 0.4435 - val_loss: 0.4436\n", 692 | "Epoch 55/1000\n", 693 | "8000/8000 [==============================] - 4s - loss: 0.4416 - val_loss: 0.4418\n", 694 | "Epoch 56/1000\n", 695 | "8000/8000 [==============================] - 4s - loss: 0.4397 - val_loss: 0.4399\n", 696 | "Epoch 57/1000\n", 697 | "8000/8000 [==============================] - 4s - loss: 0.4378 - val_loss: 0.4383\n", 698 | "Epoch 58/1000\n", 699 | "8000/8000 [==============================] - 4s - loss: 0.4360 - val_loss: 0.4364\n", 700 | "Epoch 59/1000\n", 701 | "8000/8000 [==============================] - 4s - loss: 0.4341 - val_loss: 0.4352\n", 702 | "Epoch 60/1000\n", 703 | "8000/8000 [==============================] - 4s - loss: 0.4321 - val_loss: 0.4327\n", 704 | "Epoch 61/1000\n", 705 | "8000/8000 [==============================] - 4s - loss: 0.4301 - val_loss: 0.4307\n", 706 | "Epoch 62/1000\n", 707 | "8000/8000 [==============================] - 4s - loss: 0.4277 - val_loss: 0.4276\n", 708 | "Epoch 63/1000\n", 709 | "8000/8000 [==============================] - 4s - loss: 0.4249 - val_loss: 0.4248\n", 710 | "Epoch 64/1000\n", 711 | "8000/8000 [==============================] - 4s - loss: 0.4220 - val_loss: 0.4212\n", 712 | "Epoch 65/1000\n", 713 | "8000/8000 [==============================] - 4s - loss: 0.4188 - val_loss: 0.4177\n", 714 | "Epoch 66/1000\n", 715 | "8000/8000 [==============================] - 4s - loss: 0.4150 - val_loss: 0.4139\n", 716 | "Epoch 67/1000\n", 717 | "8000/8000 [==============================] - 4s - loss: 0.4122 - val_loss: 0.4116\n", 718 | "Epoch 68/1000\n", 719 | "8000/8000 [==============================] - 4s - loss: 0.4097 - val_loss: 0.4087\n", 720 | "Epoch 69/1000\n", 721 | "8000/8000 [==============================] - 4s - loss: 0.4077 - val_loss: 0.4071\n", 722 | "Epoch 70/1000\n", 723 | "8000/8000 [==============================] - 4s - loss: 0.4059 - val_loss: 0.4060\n", 724 | "Epoch 71/1000\n", 725 | "8000/8000 [==============================] - 4s - loss: 0.4042 - val_loss: 0.4041\n", 726 | "Epoch 72/1000\n", 727 | "8000/8000 [==============================] - 4s - loss: 0.4023 - val_loss: 0.4012\n", 728 | "Epoch 73/1000\n", 729 | "8000/8000 [==============================] - 4s - loss: 0.3999 - val_loss: 0.3993\n", 730 | "Epoch 74/1000\n", 731 | "8000/8000 [==============================] - 4s - loss: 0.3982 - val_loss: 0.3966\n", 732 | "Epoch 75/1000\n", 733 | "8000/8000 [==============================] - 4s - loss: 0.3958 - val_loss: 0.3943\n", 734 | "Epoch 76/1000\n", 735 | "8000/8000 [==============================] - 4s - loss: 0.3928 - val_loss: 0.3911\n", 736 | "Epoch 77/1000\n", 737 | "8000/8000 [==============================] - 4s - loss: 0.4202 - val_loss: 0.4779\n", 738 | "Epoch 78/1000\n", 739 | "8000/8000 [==============================] - 4s - loss: 0.4118 - val_loss: 0.3921\n", 740 | "Epoch 79/1000\n", 741 | "8000/8000 [==============================] - 4s - loss: 0.3874 - val_loss: 0.3850\n", 742 | "Epoch 80/1000\n", 743 | "8000/8000 [==============================] - 4s - loss: 0.3825 - val_loss: 0.3815\n", 744 | "Epoch 81/1000\n", 745 | "8000/8000 [==============================] - 4s - loss: 0.3798 - val_loss: 0.3794\n", 746 | "Epoch 82/1000\n", 747 | "8000/8000 [==============================] - 4s - loss: 0.3775 - val_loss: 0.3773\n", 748 | "Epoch 83/1000\n", 749 | "8000/8000 [==============================] - 4s - loss: 0.3753 - val_loss: 0.3753\n", 750 | "Epoch 84/1000\n", 751 | "8000/8000 [==============================] - 4s - loss: 0.3733 - val_loss: 0.3732\n", 752 | "Epoch 85/1000\n" 753 | ] 754 | }, 755 | { 756 | "name": "stdout", 757 | "output_type": "stream", 758 | "text": [ 759 | "8000/8000 [==============================] - 4s - loss: 0.3710 - val_loss: 0.3711\n", 760 | "Epoch 86/1000\n", 761 | "8000/8000 [==============================] - 4s - loss: 0.3688 - val_loss: 0.3689\n", 762 | "Epoch 87/1000\n", 763 | "8000/8000 [==============================] - 4s - loss: 0.3666 - val_loss: 0.3671\n", 764 | "Epoch 88/1000\n", 765 | "8000/8000 [==============================] - 4s - loss: 0.3645 - val_loss: 0.3653\n", 766 | "Epoch 89/1000\n", 767 | "8000/8000 [==============================] - 4s - loss: 0.3622 - val_loss: 0.3628\n", 768 | "Epoch 90/1000\n", 769 | "8000/8000 [==============================] - 4s - loss: 0.3602 - val_loss: 0.3610\n", 770 | "Epoch 91/1000\n", 771 | "8000/8000 [==============================] - 4s - loss: 0.3579 - val_loss: 0.3585\n", 772 | "Epoch 92/1000\n", 773 | "8000/8000 [==============================] - 4s - loss: 0.3558 - val_loss: 0.3566\n", 774 | "Epoch 93/1000\n", 775 | "8000/8000 [==============================] - 4s - loss: 0.3538 - val_loss: 0.3542\n", 776 | "Epoch 94/1000\n", 777 | "8000/8000 [==============================] - 4s - loss: 0.3518 - val_loss: 0.3525\n", 778 | "Epoch 95/1000\n", 779 | "8000/8000 [==============================] - 4s - loss: 0.3499 - val_loss: 0.3508\n", 780 | "Epoch 96/1000\n", 781 | "8000/8000 [==============================] - 4s - loss: 0.3483 - val_loss: 0.3491\n", 782 | "Epoch 97/1000\n", 783 | "8000/8000 [==============================] - 4s - loss: 0.3464 - val_loss: 0.3473\n", 784 | "Epoch 98/1000\n", 785 | "8000/8000 [==============================] - 4s - loss: 0.3448 - val_loss: 0.3461\n", 786 | "Epoch 99/1000\n", 787 | "8000/8000 [==============================] - 4s - loss: 0.3432 - val_loss: 0.3437\n", 788 | "Epoch 100/1000\n", 789 | "8000/8000 [==============================] - 4s - loss: 0.3415 - val_loss: 0.3430\n", 790 | "Epoch 101/1000\n", 791 | "8000/8000 [==============================] - 4s - loss: 0.3400 - val_loss: 0.3409\n", 792 | "Epoch 102/1000\n", 793 | "8000/8000 [==============================] - 4s - loss: 0.3382 - val_loss: 0.3392\n", 794 | "Epoch 103/1000\n", 795 | "8000/8000 [==============================] - 4s - loss: 0.3367 - val_loss: 0.3372\n", 796 | "Epoch 104/1000\n", 797 | "8000/8000 [==============================] - 4s - loss: 0.3350 - val_loss: 0.3355\n", 798 | "Epoch 105/1000\n", 799 | "8000/8000 [==============================] - 4s - loss: 0.3334 - val_loss: 0.3335\n", 800 | "Epoch 106/1000\n", 801 | "8000/8000 [==============================] - 4s - loss: 0.3315 - val_loss: 0.3333\n", 802 | "Epoch 107/1000\n", 803 | "8000/8000 [==============================] - 4s - loss: 0.3300 - val_loss: 0.3307\n", 804 | "Epoch 108/1000\n", 805 | "8000/8000 [==============================] - 4s - loss: 0.3280 - val_loss: 0.3292\n", 806 | "Epoch 109/1000\n", 807 | "8000/8000 [==============================] - 4s - loss: 0.3266 - val_loss: 0.3274\n", 808 | "Epoch 110/1000\n", 809 | "8000/8000 [==============================] - 4s - loss: 0.3253 - val_loss: 0.3256\n", 810 | "Epoch 111/1000\n", 811 | "8000/8000 [==============================] - 4s - loss: 0.3239 - val_loss: 0.3252\n", 812 | "Epoch 112/1000\n", 813 | "8000/8000 [==============================] - 4s - loss: 0.3225 - val_loss: 0.3240\n", 814 | "Epoch 113/1000\n", 815 | "8000/8000 [==============================] - 4s - loss: 0.3205 - val_loss: 0.3218\n", 816 | "Epoch 114/1000\n", 817 | "8000/8000 [==============================] - 4s - loss: 0.3184 - val_loss: 0.3200\n", 818 | "Epoch 115/1000\n", 819 | "8000/8000 [==============================] - 4s - loss: 0.3173 - val_loss: 0.3181\n", 820 | "Epoch 116/1000\n", 821 | "8000/8000 [==============================] - 4s - loss: 0.3150 - val_loss: 0.3168\n", 822 | "Epoch 117/1000\n", 823 | "8000/8000 [==============================] - 4s - loss: 0.3136 - val_loss: 0.3146\n", 824 | "Epoch 118/1000\n", 825 | "8000/8000 [==============================] - 4s - loss: 0.3115 - val_loss: 0.3126\n", 826 | "Epoch 119/1000\n", 827 | "8000/8000 [==============================] - 4s - loss: 0.3102 - val_loss: 0.3108\n", 828 | "Epoch 120/1000\n", 829 | "8000/8000 [==============================] - 4s - loss: 0.3085 - val_loss: 0.3105\n", 830 | "Epoch 121/1000\n", 831 | "8000/8000 [==============================] - 4s - loss: 0.3066 - val_loss: 0.3086\n", 832 | "Epoch 122/1000\n", 833 | "8000/8000 [==============================] - 4s - loss: 0.3047 - val_loss: 0.3068\n", 834 | "Epoch 123/1000\n", 835 | "8000/8000 [==============================] - 4s - loss: 0.3031 - val_loss: 0.3035\n", 836 | "Epoch 124/1000\n", 837 | "8000/8000 [==============================] - 4s - loss: 0.3014 - val_loss: 0.3027\n", 838 | "Epoch 125/1000\n", 839 | "8000/8000 [==============================] - 4s - loss: 0.2993 - val_loss: 0.3019\n", 840 | "Epoch 126/1000\n", 841 | "8000/8000 [==============================] - 4s - loss: 0.2978 - val_loss: 0.2988\n", 842 | "Epoch 127/1000\n", 843 | "8000/8000 [==============================] - 4s - loss: 0.2958 - val_loss: 0.2965\n", 844 | "Epoch 128/1000\n", 845 | "8000/8000 [==============================] - 4s - loss: 0.2941 - val_loss: 0.2949\n", 846 | "Epoch 129/1000\n", 847 | "8000/8000 [==============================] - 4s - loss: 0.2926 - val_loss: 0.2949\n", 848 | "Epoch 130/1000\n", 849 | "8000/8000 [==============================] - 4s - loss: 0.2909 - val_loss: 0.2925\n", 850 | "Epoch 131/1000\n", 851 | "8000/8000 [==============================] - 4s - loss: 0.2890 - val_loss: 0.2918\n", 852 | "Epoch 132/1000\n", 853 | "8000/8000 [==============================] - 4s - loss: 0.2882 - val_loss: 0.2900\n", 854 | "Epoch 133/1000\n", 855 | "8000/8000 [==============================] - 4s - loss: 0.2866 - val_loss: 0.2883\n", 856 | "Epoch 134/1000\n", 857 | "8000/8000 [==============================] - 4s - loss: 0.2850 - val_loss: 0.2870\n", 858 | "Epoch 135/1000\n", 859 | "8000/8000 [==============================] - 4s - loss: 0.2837 - val_loss: 0.2871\n", 860 | "Epoch 136/1000\n", 861 | "8000/8000 [==============================] - 4s - loss: 0.2827 - val_loss: 0.2844\n", 862 | "Epoch 137/1000\n", 863 | "8000/8000 [==============================] - 4s - loss: 0.2811 - val_loss: 0.2833\n", 864 | "Epoch 138/1000\n", 865 | "8000/8000 [==============================] - 4s - loss: 0.2795 - val_loss: 0.2817\n", 866 | "Epoch 139/1000\n", 867 | "8000/8000 [==============================] - 4s - loss: 0.2783 - val_loss: 0.2804\n", 868 | "Epoch 140/1000\n", 869 | "8000/8000 [==============================] - 4s - loss: 0.2766 - val_loss: 0.2789\n", 870 | "Epoch 141/1000\n", 871 | "8000/8000 [==============================] - 4s - loss: 0.2752 - val_loss: 0.2782\n", 872 | "Epoch 142/1000\n", 873 | "8000/8000 [==============================] - 4s - loss: 0.2744 - val_loss: 0.2784\n", 874 | "Epoch 143/1000\n", 875 | "8000/8000 [==============================] - 4s - loss: 0.2733 - val_loss: 0.2746\n", 876 | "Epoch 144/1000\n", 877 | "8000/8000 [==============================] - 4s - loss: 0.2716 - val_loss: 0.2751\n", 878 | "Epoch 145/1000\n", 879 | "8000/8000 [==============================] - 4s - loss: 0.2703 - val_loss: 0.2723\n", 880 | "Epoch 146/1000\n", 881 | "8000/8000 [==============================] - 4s - loss: 0.2683 - val_loss: 0.2707\n", 882 | "Epoch 147/1000\n", 883 | "8000/8000 [==============================] - 4s - loss: 0.2672 - val_loss: 0.2697\n", 884 | "Epoch 148/1000\n", 885 | "8000/8000 [==============================] - 4s - loss: 0.2650 - val_loss: 0.2668\n", 886 | "Epoch 149/1000\n", 887 | "8000/8000 [==============================] - 4s - loss: 0.2629 - val_loss: 0.2651\n", 888 | "Epoch 150/1000\n", 889 | "8000/8000 [==============================] - 4s - loss: 0.2621 - val_loss: 0.2648\n", 890 | "Epoch 151/1000\n", 891 | "8000/8000 [==============================] - 4s - loss: 0.2609 - val_loss: 0.2632\n", 892 | "Epoch 152/1000\n", 893 | "8000/8000 [==============================] - 4s - loss: 0.2580 - val_loss: 0.2604\n", 894 | "Epoch 153/1000\n", 895 | "8000/8000 [==============================] - 4s - loss: 0.2568 - val_loss: 0.2613\n", 896 | "Epoch 154/1000\n", 897 | "8000/8000 [==============================] - 4s - loss: 0.2548 - val_loss: 0.2569\n", 898 | "Epoch 155/1000\n", 899 | "8000/8000 [==============================] - 4s - loss: 0.2525 - val_loss: 0.2548\n", 900 | "Epoch 156/1000\n", 901 | "8000/8000 [==============================] - 4s - loss: 0.2502 - val_loss: 0.2529\n", 902 | "Epoch 157/1000\n", 903 | "8000/8000 [==============================] - 4s - loss: 0.2487 - val_loss: 0.2514\n", 904 | "Epoch 158/1000\n", 905 | "8000/8000 [==============================] - 4s - loss: 0.2471 - val_loss: 0.2493\n", 906 | "Epoch 159/1000\n", 907 | "8000/8000 [==============================] - 4s - loss: 0.2453 - val_loss: 0.2485\n", 908 | "Epoch 160/1000\n", 909 | "8000/8000 [==============================] - 4s - loss: 0.2435 - val_loss: 0.2462\n", 910 | "Epoch 161/1000\n", 911 | "8000/8000 [==============================] - 4s - loss: 0.2413 - val_loss: 0.2441\n", 912 | "Epoch 162/1000\n", 913 | "8000/8000 [==============================] - 4s - loss: 0.2394 - val_loss: 0.2431\n", 914 | "Epoch 163/1000\n", 915 | "8000/8000 [==============================] - 4s - loss: 0.2380 - val_loss: 0.2413\n", 916 | "Epoch 164/1000\n", 917 | "8000/8000 [==============================] - 4s - loss: 0.2379 - val_loss: 0.2397\n", 918 | "Epoch 165/1000\n", 919 | "8000/8000 [==============================] - 4s - loss: 0.2344 - val_loss: 0.2384\n", 920 | "Epoch 166/1000\n", 921 | "8000/8000 [==============================] - 4s - loss: 0.2322 - val_loss: 0.2348\n", 922 | "Epoch 167/1000\n", 923 | "8000/8000 [==============================] - 4s - loss: 0.2301 - val_loss: 0.2339\n", 924 | "Epoch 168/1000\n", 925 | "8000/8000 [==============================] - 4s - loss: 0.2289 - val_loss: 0.2321\n", 926 | "Epoch 169/1000\n" 927 | ] 928 | }, 929 | { 930 | "name": "stdout", 931 | "output_type": "stream", 932 | "text": [ 933 | "8000/8000 [==============================] - 4s - loss: 0.2280 - val_loss: 0.2378\n", 934 | "Epoch 170/1000\n", 935 | "8000/8000 [==============================] - 4s - loss: 0.2270 - val_loss: 0.2295\n", 936 | "Epoch 171/1000\n", 937 | "8000/8000 [==============================] - 4s - loss: 0.2238 - val_loss: 0.2273\n", 938 | "Epoch 172/1000\n", 939 | "8000/8000 [==============================] - 4s - loss: 0.2227 - val_loss: 0.2265\n", 940 | "Epoch 173/1000\n", 941 | "8000/8000 [==============================] - 4s - loss: 0.2206 - val_loss: 0.2246\n", 942 | "Epoch 174/1000\n", 943 | "8000/8000 [==============================] - 4s - loss: 0.2188 - val_loss: 0.2234\n", 944 | "Epoch 175/1000\n", 945 | "8000/8000 [==============================] - 4s - loss: 0.2173 - val_loss: 0.2210\n", 946 | "Epoch 176/1000\n", 947 | "8000/8000 [==============================] - 4s - loss: 0.2151 - val_loss: 0.2193\n", 948 | "Epoch 177/1000\n", 949 | "8000/8000 [==============================] - 4s - loss: 0.2150 - val_loss: 0.2196\n", 950 | "Epoch 178/1000\n", 951 | "8000/8000 [==============================] - 4s - loss: 0.2127 - val_loss: 0.2165\n", 952 | "Epoch 179/1000\n", 953 | "8000/8000 [==============================] - 4s - loss: 0.2105 - val_loss: 0.2150\n", 954 | "Epoch 180/1000\n", 955 | "8000/8000 [==============================] - 4s - loss: 0.2092 - val_loss: 0.2178\n", 956 | "Epoch 181/1000\n", 957 | "8000/8000 [==============================] - 4s - loss: 0.2991 - val_loss: 0.2252\n", 958 | "Epoch 182/1000\n", 959 | "8000/8000 [==============================] - 4s - loss: 0.2133 - val_loss: 0.2132\n", 960 | "Epoch 183/1000\n", 961 | "8000/8000 [==============================] - 4s - loss: 0.2065 - val_loss: 0.2120\n", 962 | "Epoch 184/1000\n", 963 | "8000/8000 [==============================] - 4s - loss: 0.2046 - val_loss: 0.2093\n", 964 | "Epoch 185/1000\n", 965 | "8000/8000 [==============================] - 4s - loss: 0.2027 - val_loss: 0.2078\n", 966 | "Epoch 186/1000\n", 967 | "8000/8000 [==============================] - 4s - loss: 0.2017 - val_loss: 0.2068\n", 968 | "Epoch 187/1000\n", 969 | "8000/8000 [==============================] - 4s - loss: 0.2002 - val_loss: 0.2056\n", 970 | "Epoch 188/1000\n", 971 | "8000/8000 [==============================] - 4s - loss: 0.1987 - val_loss: 0.2047\n", 972 | "Epoch 189/1000\n", 973 | "8000/8000 [==============================] - 4s - loss: 0.1972 - val_loss: 0.2034\n", 974 | "Epoch 190/1000\n", 975 | "8000/8000 [==============================] - 4s - loss: 0.1965 - val_loss: 0.2014\n", 976 | "Epoch 191/1000\n", 977 | "8000/8000 [==============================] - 4s - loss: 0.1943 - val_loss: 0.2001\n", 978 | "Epoch 192/1000\n", 979 | "8000/8000 [==============================] - 4s - loss: 0.1933 - val_loss: 0.2001\n", 980 | "Epoch 193/1000\n", 981 | "8000/8000 [==============================] - 4s - loss: 0.1926 - val_loss: 0.1998\n", 982 | "Epoch 194/1000\n", 983 | "8000/8000 [==============================] - 4s - loss: 0.1913 - val_loss: 0.1975\n", 984 | "Epoch 195/1000\n", 985 | "8000/8000 [==============================] - 4s - loss: 0.1899 - val_loss: 0.1959\n", 986 | "Epoch 196/1000\n", 987 | "8000/8000 [==============================] - 4s - loss: 0.1879 - val_loss: 0.1946\n", 988 | "Epoch 197/1000\n", 989 | "8000/8000 [==============================] - 4s - loss: 0.1870 - val_loss: 0.1936\n", 990 | "Epoch 198/1000\n", 991 | "8000/8000 [==============================] - 4s - loss: 0.1858 - val_loss: 0.1926\n", 992 | "Epoch 199/1000\n", 993 | "8000/8000 [==============================] - 4s - loss: 0.1845 - val_loss: 0.1921\n", 994 | "Epoch 200/1000\n", 995 | "8000/8000 [==============================] - 4s - loss: 0.1836 - val_loss: 0.1893\n", 996 | "Epoch 201/1000\n", 997 | "8000/8000 [==============================] - 4s - loss: 0.1817 - val_loss: 0.1913\n", 998 | "Epoch 202/1000\n", 999 | "8000/8000 [==============================] - 4s - loss: 0.1809 - val_loss: 0.1874\n", 1000 | "Epoch 203/1000\n", 1001 | "8000/8000 [==============================] - 4s - loss: 0.1785 - val_loss: 0.1856\n", 1002 | "Epoch 204/1000\n", 1003 | "8000/8000 [==============================] - 4s - loss: 0.1779 - val_loss: 0.1851\n", 1004 | "Epoch 205/1000\n", 1005 | "8000/8000 [==============================] - 4s - loss: 0.1765 - val_loss: 0.1843\n", 1006 | "Epoch 206/1000\n", 1007 | "8000/8000 [==============================] - 4s - loss: 0.1761 - val_loss: 0.1815\n", 1008 | "Epoch 207/1000\n", 1009 | "8000/8000 [==============================] - 4s - loss: 0.1725 - val_loss: 0.1792\n", 1010 | "Epoch 208/1000\n", 1011 | "8000/8000 [==============================] - 4s - loss: 0.1705 - val_loss: 0.1783\n", 1012 | "Epoch 209/1000\n", 1013 | "8000/8000 [==============================] - 4s - loss: 0.1694 - val_loss: 0.1776\n", 1014 | "Epoch 210/1000\n", 1015 | "8000/8000 [==============================] - 4s - loss: 0.1673 - val_loss: 0.1760\n", 1016 | "Epoch 211/1000\n", 1017 | "8000/8000 [==============================] - 4s - loss: 0.1660 - val_loss: 0.1743\n", 1018 | "Epoch 212/1000\n", 1019 | "8000/8000 [==============================] - 4s - loss: 0.1646 - val_loss: 0.1760\n", 1020 | "Epoch 213/1000\n", 1021 | "8000/8000 [==============================] - 4s - loss: 0.1637 - val_loss: 0.1709\n", 1022 | "Epoch 214/1000\n", 1023 | "8000/8000 [==============================] - 4s - loss: 0.1618 - val_loss: 0.1693\n", 1024 | "Epoch 215/1000\n", 1025 | "8000/8000 [==============================] - 4s - loss: 0.1605 - val_loss: 0.1695\n", 1026 | "Epoch 216/1000\n", 1027 | "8000/8000 [==============================] - 4s - loss: 0.1595 - val_loss: 0.1687\n", 1028 | "Epoch 217/1000\n", 1029 | "8000/8000 [==============================] - 4s - loss: 0.1591 - val_loss: 0.1703\n", 1030 | "Epoch 218/1000\n", 1031 | "8000/8000 [==============================] - 4s - loss: 0.1573 - val_loss: 0.1645\n", 1032 | "Epoch 219/1000\n", 1033 | "8000/8000 [==============================] - 4s - loss: 0.1550 - val_loss: 0.1635\n", 1034 | "Epoch 220/1000\n", 1035 | "8000/8000 [==============================] - 4s - loss: 0.1536 - val_loss: 0.1628\n", 1036 | "Epoch 221/1000\n", 1037 | "8000/8000 [==============================] - 4s - loss: 0.1523 - val_loss: 0.1614\n", 1038 | "Epoch 222/1000\n", 1039 | "8000/8000 [==============================] - 4s - loss: 0.1519 - val_loss: 0.1600\n", 1040 | "Epoch 223/1000\n", 1041 | "8000/8000 [==============================] - 4s - loss: 0.1503 - val_loss: 0.1586\n", 1042 | "Epoch 224/1000\n", 1043 | "8000/8000 [==============================] - 4s - loss: 0.1498 - val_loss: 0.1584\n", 1044 | "Epoch 225/1000\n", 1045 | "8000/8000 [==============================] - 4s - loss: 0.1479 - val_loss: 0.1571\n", 1046 | "Epoch 226/1000\n", 1047 | "8000/8000 [==============================] - 4s - loss: 0.1462 - val_loss: 0.1556\n", 1048 | "Epoch 227/1000\n", 1049 | "8000/8000 [==============================] - 4s - loss: 0.1449 - val_loss: 0.1542\n", 1050 | "Epoch 228/1000\n", 1051 | "8000/8000 [==============================] - 4s - loss: 0.1443 - val_loss: 0.1541\n", 1052 | "Epoch 229/1000\n", 1053 | "8000/8000 [==============================] - 4s - loss: 0.1428 - val_loss: 0.1516\n", 1054 | "Epoch 230/1000\n", 1055 | "8000/8000 [==============================] - 4s - loss: 0.1415 - val_loss: 0.1508\n", 1056 | "Epoch 231/1000\n", 1057 | "8000/8000 [==============================] - 4s - loss: 0.1405 - val_loss: 0.1510\n", 1058 | "Epoch 232/1000\n", 1059 | "8000/8000 [==============================] - 4s - loss: 0.1403 - val_loss: 0.1482\n", 1060 | "Epoch 233/1000\n", 1061 | "8000/8000 [==============================] - 4s - loss: 0.1385 - val_loss: 0.1486\n", 1062 | "Epoch 234/1000\n", 1063 | "8000/8000 [==============================] - 4s - loss: 0.1373 - val_loss: 0.1456\n", 1064 | "Epoch 235/1000\n", 1065 | "8000/8000 [==============================] - 4s - loss: 0.1359 - val_loss: 0.1456\n", 1066 | "Epoch 236/1000\n", 1067 | "8000/8000 [==============================] - 4s - loss: 0.1349 - val_loss: 0.1442\n", 1068 | "Epoch 237/1000\n", 1069 | "8000/8000 [==============================] - 4s - loss: 0.1332 - val_loss: 0.1423\n", 1070 | "Epoch 238/1000\n", 1071 | "8000/8000 [==============================] - 4s - loss: 0.1324 - val_loss: 0.1423\n", 1072 | "Epoch 239/1000\n", 1073 | "8000/8000 [==============================] - 4s - loss: 0.1313 - val_loss: 0.1423\n", 1074 | "Epoch 240/1000\n", 1075 | "8000/8000 [==============================] - 4s - loss: 0.1308 - val_loss: 0.1396\n", 1076 | "Epoch 241/1000\n", 1077 | "8000/8000 [==============================] - 4s - loss: 0.1290 - val_loss: 0.1403\n", 1078 | "Epoch 242/1000\n", 1079 | "8000/8000 [==============================] - 4s - loss: 0.1287 - val_loss: 0.1389\n", 1080 | "Epoch 243/1000\n", 1081 | "8000/8000 [==============================] - 4s - loss: 0.1265 - val_loss: 0.1362\n", 1082 | "Epoch 244/1000\n", 1083 | "8000/8000 [==============================] - 4s - loss: 0.1263 - val_loss: 0.1368\n", 1084 | "Epoch 245/1000\n", 1085 | "8000/8000 [==============================] - 4s - loss: 0.1250 - val_loss: 0.1353\n", 1086 | "Epoch 246/1000\n", 1087 | "8000/8000 [==============================] - 4s - loss: 0.1233 - val_loss: 0.1341\n", 1088 | "Epoch 247/1000\n", 1089 | "8000/8000 [==============================] - 4s - loss: 0.1219 - val_loss: 0.1316\n", 1090 | "Epoch 248/1000\n", 1091 | "8000/8000 [==============================] - 4s - loss: 0.1207 - val_loss: 0.1325\n", 1092 | "Epoch 249/1000\n", 1093 | "8000/8000 [==============================] - 4s - loss: 0.1202 - val_loss: 0.1306\n", 1094 | "Epoch 250/1000\n", 1095 | "8000/8000 [==============================] - 4s - loss: 0.1198 - val_loss: 0.1302\n", 1096 | "Epoch 251/1000\n", 1097 | "8000/8000 [==============================] - 4s - loss: 0.1176 - val_loss: 0.1284\n", 1098 | "Epoch 252/1000\n", 1099 | "8000/8000 [==============================] - 4s - loss: 0.1172 - val_loss: 0.1275\n", 1100 | "Epoch 253/1000\n" 1101 | ] 1102 | }, 1103 | { 1104 | "name": "stdout", 1105 | "output_type": "stream", 1106 | "text": [ 1107 | "8000/8000 [==============================] - 4s - loss: 0.1153 - val_loss: 0.1256\n", 1108 | "Epoch 254/1000\n", 1109 | "8000/8000 [==============================] - 4s - loss: 0.1148 - val_loss: 0.1256\n", 1110 | "Epoch 255/1000\n", 1111 | "8000/8000 [==============================] - 4s - loss: 0.1142 - val_loss: 0.1252\n", 1112 | "Epoch 256/1000\n", 1113 | "8000/8000 [==============================] - 4s - loss: 0.1122 - val_loss: 0.1234\n", 1114 | "Epoch 257/1000\n", 1115 | "8000/8000 [==============================] - 4s - loss: 0.1115 - val_loss: 0.1221\n", 1116 | "Epoch 258/1000\n", 1117 | "8000/8000 [==============================] - 4s - loss: 0.1106 - val_loss: 0.1217\n", 1118 | "Epoch 259/1000\n", 1119 | "8000/8000 [==============================] - 4s - loss: 0.1091 - val_loss: 0.1201\n", 1120 | "Epoch 260/1000\n", 1121 | "8000/8000 [==============================] - 4s - loss: 0.1077 - val_loss: 0.1195\n", 1122 | "Epoch 261/1000\n", 1123 | "8000/8000 [==============================] - 4s - loss: 0.1080 - val_loss: 0.1197\n", 1124 | "Epoch 262/1000\n", 1125 | "8000/8000 [==============================] - 4s - loss: 0.1063 - val_loss: 0.1160\n", 1126 | "Epoch 263/1000\n", 1127 | "8000/8000 [==============================] - 4s - loss: 0.1047 - val_loss: 0.1196\n", 1128 | "Epoch 264/1000\n", 1129 | "8000/8000 [==============================] - 4s - loss: 0.1038 - val_loss: 0.1135\n", 1130 | "Epoch 265/1000\n", 1131 | "8000/8000 [==============================] - 4s - loss: 0.1022 - val_loss: 0.1130\n", 1132 | "Epoch 266/1000\n", 1133 | "8000/8000 [==============================] - 4s - loss: 0.1017 - val_loss: 0.1113\n", 1134 | "Epoch 267/1000\n", 1135 | "8000/8000 [==============================] - 4s - loss: 0.0997 - val_loss: 0.1097\n", 1136 | "Epoch 268/1000\n", 1137 | "8000/8000 [==============================] - 4s - loss: 0.0997 - val_loss: 0.1090\n", 1138 | "Epoch 269/1000\n", 1139 | "8000/8000 [==============================] - 4s - loss: 0.0972 - val_loss: 0.1088\n", 1140 | "Epoch 270/1000\n", 1141 | "8000/8000 [==============================] - 4s - loss: 0.0969 - val_loss: 0.1067\n", 1142 | "Epoch 271/1000\n", 1143 | "8000/8000 [==============================] - 4s - loss: 0.0964 - val_loss: 0.1082\n", 1144 | "Epoch 272/1000\n", 1145 | "8000/8000 [==============================] - 4s - loss: 0.0965 - val_loss: 0.1044\n", 1146 | "Epoch 273/1000\n", 1147 | "8000/8000 [==============================] - 4s - loss: 0.0935 - val_loss: 0.1038\n", 1148 | "Epoch 274/1000\n", 1149 | "8000/8000 [==============================] - 4s - loss: 0.0922 - val_loss: 0.1052\n", 1150 | "Epoch 275/1000\n", 1151 | "8000/8000 [==============================] - 4s - loss: 0.0913 - val_loss: 0.1005\n", 1152 | "Epoch 276/1000\n", 1153 | "8000/8000 [==============================] - 4s - loss: 0.0899 - val_loss: 0.1014\n", 1154 | "Epoch 277/1000\n", 1155 | "8000/8000 [==============================] - 4s - loss: 0.0891 - val_loss: 0.0991\n", 1156 | "Epoch 278/1000\n", 1157 | "8000/8000 [==============================] - 4s - loss: 0.0873 - val_loss: 0.0978\n", 1158 | "Epoch 279/1000\n", 1159 | "8000/8000 [==============================] - 4s - loss: 0.0865 - val_loss: 0.0973\n", 1160 | "Epoch 280/1000\n", 1161 | "8000/8000 [==============================] - 4s - loss: 0.0860 - val_loss: 0.0978\n", 1162 | "Epoch 281/1000\n", 1163 | "8000/8000 [==============================] - 4s - loss: 0.0846 - val_loss: 0.0951\n", 1164 | "Epoch 282/1000\n", 1165 | "8000/8000 [==============================] - 4s - loss: 0.0833 - val_loss: 0.0938\n", 1166 | "Epoch 283/1000\n", 1167 | "8000/8000 [==============================] - 4s - loss: 0.0828 - val_loss: 0.0927\n", 1168 | "Epoch 284/1000\n", 1169 | "8000/8000 [==============================] - 4s - loss: 0.0821 - val_loss: 0.0914\n", 1170 | "Epoch 285/1000\n", 1171 | "8000/8000 [==============================] - 4s - loss: 0.4959 - val_loss: 0.5446\n", 1172 | "Epoch 286/1000\n", 1173 | "8000/8000 [==============================] - 4s - loss: 0.2484 - val_loss: 0.1325\n", 1174 | "Epoch 287/1000\n", 1175 | "8000/8000 [==============================] - 4s - loss: 0.1091 - val_loss: 0.1053\n", 1176 | "Epoch 288/1000\n", 1177 | "8000/8000 [==============================] - 4s - loss: 0.0925 - val_loss: 0.0987\n", 1178 | "Epoch 289/1000\n", 1179 | "8000/8000 [==============================] - 4s - loss: 0.0874 - val_loss: 0.0959\n", 1180 | "Epoch 290/1000\n", 1181 | "8000/8000 [==============================] - 4s - loss: 0.0842 - val_loss: 0.0939\n", 1182 | "Epoch 291/1000\n", 1183 | "8000/8000 [==============================] - 4s - loss: 0.0820 - val_loss: 0.0917\n", 1184 | "Epoch 292/1000\n", 1185 | "8000/8000 [==============================] - 4s - loss: 0.0801 - val_loss: 0.0902\n", 1186 | "Epoch 293/1000\n", 1187 | "8000/8000 [==============================] - 4s - loss: 0.0788 - val_loss: 0.0892\n", 1188 | "Epoch 294/1000\n", 1189 | "8000/8000 [==============================] - 4s - loss: 0.0775 - val_loss: 0.0878\n", 1190 | "Epoch 295/1000\n", 1191 | "8000/8000 [==============================] - 4s - loss: 0.0762 - val_loss: 0.0870\n", 1192 | "Epoch 296/1000\n", 1193 | "8000/8000 [==============================] - 4s - loss: 0.0752 - val_loss: 0.0858\n", 1194 | "Epoch 297/1000\n", 1195 | "8000/8000 [==============================] - 4s - loss: 0.0745 - val_loss: 0.0850\n", 1196 | "Epoch 298/1000\n", 1197 | "8000/8000 [==============================] - 4s - loss: 0.0735 - val_loss: 0.0843\n", 1198 | "Epoch 299/1000\n", 1199 | "8000/8000 [==============================] - 4s - loss: 0.0725 - val_loss: 0.0830\n", 1200 | "Epoch 300/1000\n", 1201 | "8000/8000 [==============================] - 4s - loss: 0.0718 - val_loss: 0.0829\n", 1202 | "Epoch 301/1000\n", 1203 | "8000/8000 [==============================] - 4s - loss: 0.0710 - val_loss: 0.0816\n", 1204 | "Epoch 302/1000\n", 1205 | "8000/8000 [==============================] - 4s - loss: 0.0700 - val_loss: 0.0811\n", 1206 | "Epoch 303/1000\n", 1207 | "8000/8000 [==============================] - 4s - loss: 0.0694 - val_loss: 0.0814\n", 1208 | "Epoch 304/1000\n", 1209 | "8000/8000 [==============================] - 4s - loss: 0.0687 - val_loss: 0.0801\n", 1210 | "Epoch 305/1000\n", 1211 | "8000/8000 [==============================] - 4s - loss: 0.0679 - val_loss: 0.0785\n", 1212 | "Epoch 306/1000\n", 1213 | "8000/8000 [==============================] - 4s - loss: 0.0673 - val_loss: 0.0779\n", 1214 | "Epoch 307/1000\n", 1215 | "8000/8000 [==============================] - 4s - loss: 0.0662 - val_loss: 0.0770\n", 1216 | "Epoch 308/1000\n", 1217 | "8000/8000 [==============================] - 4s - loss: 0.0653 - val_loss: 0.0764\n", 1218 | "Epoch 309/1000\n", 1219 | "8000/8000 [==============================] - 4s - loss: 0.0648 - val_loss: 0.0764\n", 1220 | "Epoch 310/1000\n", 1221 | "8000/8000 [==============================] - 4s - loss: 0.0644 - val_loss: 0.0758\n", 1222 | "Epoch 311/1000\n", 1223 | "8000/8000 [==============================] - 4s - loss: 0.0637 - val_loss: 0.0755\n", 1224 | "Epoch 312/1000\n", 1225 | "8000/8000 [==============================] - 4s - loss: 0.0631 - val_loss: 0.0738\n", 1226 | "Epoch 313/1000\n", 1227 | "8000/8000 [==============================] - 4s - loss: 0.0621 - val_loss: 0.0734\n", 1228 | "Epoch 314/1000\n", 1229 | "8000/8000 [==============================] - 4s - loss: 0.0616 - val_loss: 0.0725\n", 1230 | "Epoch 315/1000\n", 1231 | "8000/8000 [==============================] - 4s - loss: 0.0607 - val_loss: 0.0719\n", 1232 | "Epoch 316/1000\n", 1233 | "8000/8000 [==============================] - 4s - loss: 0.0603 - val_loss: 0.0719\n", 1234 | "Epoch 317/1000\n", 1235 | "8000/8000 [==============================] - 4s - loss: 0.0597 - val_loss: 0.0713\n", 1236 | "Epoch 318/1000\n", 1237 | "8000/8000 [==============================] - 4s - loss: 0.0590 - val_loss: 0.0709\n", 1238 | "Epoch 319/1000\n", 1239 | "8000/8000 [==============================] - 4s - loss: 0.0585 - val_loss: 0.0702\n", 1240 | "Epoch 320/1000\n", 1241 | "8000/8000 [==============================] - 4s - loss: 0.0578 - val_loss: 0.0690\n", 1242 | "Epoch 321/1000\n", 1243 | "8000/8000 [==============================] - 4s - loss: 0.0575 - val_loss: 0.0692\n", 1244 | "Epoch 322/1000\n", 1245 | "8000/8000 [==============================] - 4s - loss: 0.0564 - val_loss: 0.0678\n", 1246 | "Epoch 323/1000\n", 1247 | "8000/8000 [==============================] - 4s - loss: 0.0561 - val_loss: 0.0679\n", 1248 | "Epoch 324/1000\n", 1249 | "8000/8000 [==============================] - 4s - loss: 0.0555 - val_loss: 0.0671\n", 1250 | "Epoch 325/1000\n", 1251 | "8000/8000 [==============================] - 4s - loss: 0.0545 - val_loss: 0.0665\n", 1252 | "Epoch 326/1000\n", 1253 | "8000/8000 [==============================] - 4s - loss: 0.0544 - val_loss: 0.0656\n", 1254 | "Epoch 327/1000\n", 1255 | "8000/8000 [==============================] - 4s - loss: 0.0534 - val_loss: 0.0656\n", 1256 | "Epoch 328/1000\n", 1257 | "8000/8000 [==============================] - 4s - loss: 0.0529 - val_loss: 0.0645\n", 1258 | "Epoch 329/1000\n", 1259 | "8000/8000 [==============================] - 4s - loss: 0.0522 - val_loss: 0.0641\n", 1260 | "Epoch 330/1000\n", 1261 | "8000/8000 [==============================] - 4s - loss: 0.0516 - val_loss: 0.0637\n", 1262 | "Epoch 331/1000\n", 1263 | "8000/8000 [==============================] - 4s - loss: 0.0515 - val_loss: 0.0630\n", 1264 | "Epoch 332/1000\n", 1265 | "8000/8000 [==============================] - 4s - loss: 0.0507 - val_loss: 0.0624\n", 1266 | "Epoch 333/1000\n", 1267 | "8000/8000 [==============================] - 4s - loss: 0.0509 - val_loss: 0.0616\n", 1268 | "Epoch 334/1000\n", 1269 | "8000/8000 [==============================] - 4s - loss: 0.0497 - val_loss: 0.0615\n", 1270 | "Epoch 335/1000\n", 1271 | "8000/8000 [==============================] - 4s - loss: 0.0490 - val_loss: 0.0603\n", 1272 | "Epoch 336/1000\n", 1273 | "8000/8000 [==============================] - 4s - loss: 0.0480 - val_loss: 0.0602\n", 1274 | "Epoch 337/1000\n" 1275 | ] 1276 | }, 1277 | { 1278 | "name": "stdout", 1279 | "output_type": "stream", 1280 | "text": [ 1281 | "8000/8000 [==============================] - 4s - loss: 0.0480 - val_loss: 0.0600\n", 1282 | "Epoch 338/1000\n", 1283 | "8000/8000 [==============================] - 4s - loss: 0.0479 - val_loss: 0.0598\n", 1284 | "Epoch 339/1000\n", 1285 | "8000/8000 [==============================] - 4s - loss: 0.0470 - val_loss: 0.0594\n", 1286 | "Epoch 340/1000\n", 1287 | "8000/8000 [==============================] - 4s - loss: 0.0467 - val_loss: 0.0587\n", 1288 | "Epoch 341/1000\n", 1289 | "8000/8000 [==============================] - 4s - loss: 0.0460 - val_loss: 0.0578\n", 1290 | "Epoch 342/1000\n", 1291 | "8000/8000 [==============================] - 4s - loss: 0.0452 - val_loss: 0.0579\n", 1292 | "Epoch 343/1000\n", 1293 | "8000/8000 [==============================] - 4s - loss: 0.0446 - val_loss: 0.0565\n", 1294 | "Epoch 344/1000\n", 1295 | "8000/8000 [==============================] - 4s - loss: 0.0444 - val_loss: 0.0567\n", 1296 | "Epoch 345/1000\n", 1297 | "8000/8000 [==============================] - 4s - loss: 0.0440 - val_loss: 0.0568\n", 1298 | "Epoch 346/1000\n", 1299 | "8000/8000 [==============================] - 4s - loss: 0.0438 - val_loss: 0.0555\n", 1300 | "Epoch 347/1000\n", 1301 | "8000/8000 [==============================] - 4s - loss: 0.0429 - val_loss: 0.0553\n", 1302 | "Epoch 348/1000\n", 1303 | "8000/8000 [==============================] - 4s - loss: 0.0430 - val_loss: 0.0541\n", 1304 | "Epoch 349/1000\n", 1305 | "8000/8000 [==============================] - 4s - loss: 0.0421 - val_loss: 0.0553\n", 1306 | "Epoch 350/1000\n", 1307 | "8000/8000 [==============================] - 4s - loss: 0.0418 - val_loss: 0.0543\n", 1308 | "Epoch 351/1000\n", 1309 | "8000/8000 [==============================] - 4s - loss: 0.0414 - val_loss: 0.0546\n", 1310 | "Epoch 352/1000\n", 1311 | "8000/8000 [==============================] - 4s - loss: 0.0405 - val_loss: 0.0527\n", 1312 | "Epoch 353/1000\n", 1313 | "8000/8000 [==============================] - 4s - loss: 0.0398 - val_loss: 0.0518\n", 1314 | "Epoch 354/1000\n", 1315 | "8000/8000 [==============================] - 4s - loss: 0.0392 - val_loss: 0.0516\n", 1316 | "Epoch 355/1000\n", 1317 | "8000/8000 [==============================] - 4s - loss: 0.0389 - val_loss: 0.0512\n", 1318 | "Epoch 356/1000\n", 1319 | "8000/8000 [==============================] - 4s - loss: 0.0388 - val_loss: 0.0510\n", 1320 | "Epoch 357/1000\n", 1321 | "8000/8000 [==============================] - 4s - loss: 0.0386 - val_loss: 0.0501\n", 1322 | "Epoch 358/1000\n", 1323 | "8000/8000 [==============================] - 4s - loss: 0.0376 - val_loss: 0.0511\n", 1324 | "Epoch 359/1000\n", 1325 | "8000/8000 [==============================] - 4s - loss: 0.0373 - val_loss: 0.0493\n", 1326 | "Epoch 360/1000\n", 1327 | "8000/8000 [==============================] - 4s - loss: 0.0366 - val_loss: 0.0495\n", 1328 | "Epoch 361/1000\n", 1329 | "8000/8000 [==============================] - 4s - loss: 0.0364 - val_loss: 0.0488\n", 1330 | "Epoch 362/1000\n", 1331 | "8000/8000 [==============================] - 4s - loss: 0.0363 - val_loss: 0.0484\n", 1332 | "Epoch 363/1000\n", 1333 | "8000/8000 [==============================] - 4s - loss: 0.0356 - val_loss: 0.0476\n", 1334 | "Epoch 364/1000\n", 1335 | "8000/8000 [==============================] - 4s - loss: 0.0350 - val_loss: 0.0490\n", 1336 | "Epoch 365/1000\n", 1337 | "8000/8000 [==============================] - 4s - loss: 0.0351 - val_loss: 0.0472\n", 1338 | "Epoch 366/1000\n", 1339 | "8000/8000 [==============================] - 4s - loss: 0.0346 - val_loss: 0.0486\n", 1340 | "Epoch 367/1000\n", 1341 | "8000/8000 [==============================] - 4s - loss: 0.0337 - val_loss: 0.0464\n", 1342 | "Epoch 368/1000\n", 1343 | "8000/8000 [==============================] - 4s - loss: 0.0333 - val_loss: 0.0459\n", 1344 | "Epoch 369/1000\n", 1345 | "8000/8000 [==============================] - 4s - loss: 0.0329 - val_loss: 0.0451\n", 1346 | "Epoch 370/1000\n", 1347 | "8000/8000 [==============================] - 4s - loss: 0.0324 - val_loss: 0.0449\n", 1348 | "Epoch 371/1000\n", 1349 | "8000/8000 [==============================] - 4s - loss: 0.0328 - val_loss: 0.0449\n", 1350 | "Epoch 372/1000\n", 1351 | "8000/8000 [==============================] - 4s - loss: 0.0318 - val_loss: 0.0438\n", 1352 | "Epoch 373/1000\n", 1353 | "8000/8000 [==============================] - 4s - loss: 0.0325 - val_loss: 0.0446\n", 1354 | "Epoch 374/1000\n", 1355 | "8000/8000 [==============================] - 4s - loss: 0.0309 - val_loss: 0.0435\n", 1356 | "Epoch 375/1000\n", 1357 | "8000/8000 [==============================] - 4s - loss: 0.0306 - val_loss: 0.0428\n", 1358 | "Epoch 376/1000\n", 1359 | "8000/8000 [==============================] - 4s - loss: 0.0298 - val_loss: 0.0431\n", 1360 | "Epoch 377/1000\n", 1361 | "8000/8000 [==============================] - 4s - loss: 0.0297 - val_loss: 0.0423\n", 1362 | "Epoch 378/1000\n", 1363 | "8000/8000 [==============================] - 4s - loss: 0.0296 - val_loss: 0.0429\n", 1364 | "Epoch 379/1000\n", 1365 | "8000/8000 [==============================] - 4s - loss: 0.0290 - val_loss: 0.0419\n", 1366 | "Epoch 380/1000\n", 1367 | "8000/8000 [==============================] - 4s - loss: 0.0284 - val_loss: 0.0410\n", 1368 | "Epoch 381/1000\n", 1369 | "8000/8000 [==============================] - 4s - loss: 0.0286 - val_loss: 0.0416\n", 1370 | "Epoch 382/1000\n", 1371 | "8000/8000 [==============================] - 4s - loss: 0.0285 - val_loss: 0.0408\n", 1372 | "Epoch 383/1000\n", 1373 | "8000/8000 [==============================] - 4s - loss: 0.0279 - val_loss: 0.0411\n", 1374 | "Epoch 384/1000\n", 1375 | "8000/8000 [==============================] - 4s - loss: 0.0274 - val_loss: 0.0395\n", 1376 | "Epoch 385/1000\n", 1377 | "8000/8000 [==============================] - 4s - loss: 0.0268 - val_loss: 0.0407\n", 1378 | "Epoch 386/1000\n", 1379 | "8000/8000 [==============================] - 4s - loss: 0.0271 - val_loss: 0.0388\n", 1380 | "Epoch 387/1000\n", 1381 | "8000/8000 [==============================] - 4s - loss: 0.0261 - val_loss: 0.0391\n", 1382 | "Epoch 388/1000\n", 1383 | "8000/8000 [==============================] - 4s - loss: 0.0262 - val_loss: 0.0393\n", 1384 | "Epoch 389/1000\n", 1385 | "8000/8000 [==============================] - 4s - loss: 0.0255 - val_loss: 0.0374\n", 1386 | "Epoch 390/1000\n", 1387 | "8000/8000 [==============================] - 4s - loss: 0.0248 - val_loss: 0.0379\n", 1388 | "Epoch 391/1000\n", 1389 | "8000/8000 [==============================] - 4s - loss: 0.0246 - val_loss: 0.0376\n", 1390 | "Epoch 392/1000\n", 1391 | "8000/8000 [==============================] - 4s - loss: 0.0244 - val_loss: 0.0367\n", 1392 | "Epoch 393/1000\n", 1393 | "8000/8000 [==============================] - 4s - loss: 0.0243 - val_loss: 0.0384\n", 1394 | "Epoch 394/1000\n", 1395 | "8000/8000 [==============================] - 4s - loss: 0.0238 - val_loss: 0.0366\n", 1396 | "Epoch 395/1000\n", 1397 | "8000/8000 [==============================] - 4s - loss: 0.0240 - val_loss: 0.0358\n", 1398 | "Epoch 396/1000\n", 1399 | "8000/8000 [==============================] - 4s - loss: 0.0233 - val_loss: 0.0358\n", 1400 | "Epoch 397/1000\n", 1401 | "8000/8000 [==============================] - 4s - loss: 0.0227 - val_loss: 0.0342\n", 1402 | "Epoch 398/1000\n", 1403 | "8000/8000 [==============================] - 4s - loss: 0.0224 - val_loss: 0.0346\n", 1404 | "Epoch 399/1000\n", 1405 | "8000/8000 [==============================] - 4s - loss: 0.0219 - val_loss: 0.0342\n", 1406 | "Epoch 400/1000\n", 1407 | "8000/8000 [==============================] - 4s - loss: 0.0215 - val_loss: 0.0337\n", 1408 | "Epoch 401/1000\n", 1409 | "8000/8000 [==============================] - 4s - loss: 0.0211 - val_loss: 0.0334\n", 1410 | "Epoch 402/1000\n", 1411 | "8000/8000 [==============================] - 4s - loss: 0.0218 - val_loss: 0.0357\n", 1412 | "Epoch 403/1000\n", 1413 | "8000/8000 [==============================] - 4s - loss: 0.0213 - val_loss: 0.0329\n", 1414 | "Epoch 404/1000\n", 1415 | "8000/8000 [==============================] - 4s - loss: 0.0207 - val_loss: 0.0326\n", 1416 | "Epoch 405/1000\n", 1417 | "8000/8000 [==============================] - 4s - loss: 0.0201 - val_loss: 0.0316\n", 1418 | "Epoch 406/1000\n", 1419 | "8000/8000 [==============================] - 4s - loss: 0.0196 - val_loss: 0.0323\n", 1420 | "Epoch 407/1000\n", 1421 | "8000/8000 [==============================] - 4s - loss: 0.0196 - val_loss: 0.0323\n", 1422 | "Epoch 408/1000\n", 1423 | "8000/8000 [==============================] - 4s - loss: 0.0193 - val_loss: 0.0313\n", 1424 | "Epoch 409/1000\n", 1425 | "8000/8000 [==============================] - 4s - loss: 0.0191 - val_loss: 0.0338\n", 1426 | "Epoch 410/1000\n", 1427 | "8000/8000 [==============================] - 4s - loss: 0.0197 - val_loss: 0.0382\n", 1428 | "Epoch 411/1000\n", 1429 | "8000/8000 [==============================] - 4s - loss: 0.0198 - val_loss: 0.0318\n", 1430 | "Epoch 412/1000\n", 1431 | "8000/8000 [==============================] - 4s - loss: 0.0183 - val_loss: 0.0294\n", 1432 | "Epoch 413/1000\n", 1433 | "8000/8000 [==============================] - 4s - loss: 0.0176 - val_loss: 0.0293\n", 1434 | "Epoch 414/1000\n", 1435 | "8000/8000 [==============================] - 4s - loss: 0.0173 - val_loss: 0.0290\n", 1436 | "Epoch 415/1000\n", 1437 | "8000/8000 [==============================] - 4s - loss: 0.0177 - val_loss: 0.0295\n", 1438 | "Epoch 416/1000\n", 1439 | "8000/8000 [==============================] - 4s - loss: 0.0171 - val_loss: 0.0286\n", 1440 | "Epoch 417/1000\n", 1441 | "8000/8000 [==============================] - 4s - loss: 0.0167 - val_loss: 0.0283\n", 1442 | "Epoch 418/1000\n", 1443 | "8000/8000 [==============================] - 4s - loss: 0.0163 - val_loss: 0.0284\n", 1444 | "Epoch 419/1000\n", 1445 | "8000/8000 [==============================] - 4s - loss: 0.0164 - val_loss: 0.0283\n", 1446 | "Epoch 420/1000\n", 1447 | "8000/8000 [==============================] - 4s - loss: 0.0165 - val_loss: 0.0275\n", 1448 | "Epoch 421/1000\n" 1449 | ] 1450 | }, 1451 | { 1452 | "name": "stdout", 1453 | "output_type": "stream", 1454 | "text": [ 1455 | "8000/8000 [==============================] - 4s - loss: 0.0156 - val_loss: 0.0275\n", 1456 | "Epoch 422/1000\n", 1457 | "8000/8000 [==============================] - 4s - loss: 0.0158 - val_loss: 0.0272\n", 1458 | "Epoch 423/1000\n", 1459 | "8000/8000 [==============================] - 4s - loss: 0.0153 - val_loss: 0.0274\n", 1460 | "Epoch 424/1000\n", 1461 | "8000/8000 [==============================] - 4s - loss: 0.0153 - val_loss: 0.0276\n", 1462 | "Epoch 425/1000\n", 1463 | "8000/8000 [==============================] - 4s - loss: 0.0148 - val_loss: 0.0263\n", 1464 | "Epoch 426/1000\n", 1465 | "8000/8000 [==============================] - 4s - loss: 0.0145 - val_loss: 0.0261\n", 1466 | "Epoch 427/1000\n", 1467 | "8000/8000 [==============================] - 4s - loss: 0.0143 - val_loss: 0.0262\n", 1468 | "Epoch 428/1000\n", 1469 | "8000/8000 [==============================] - 4s - loss: 0.0141 - val_loss: 0.0259\n", 1470 | "Epoch 429/1000\n", 1471 | "8000/8000 [==============================] - 4s - loss: 0.0137 - val_loss: 0.0253\n", 1472 | "Epoch 430/1000\n", 1473 | "8000/8000 [==============================] - 4s - loss: 0.0137 - val_loss: 0.0253\n", 1474 | "Epoch 431/1000\n", 1475 | "8000/8000 [==============================] - 4s - loss: 0.0134 - val_loss: 0.0245\n", 1476 | "Epoch 432/1000\n", 1477 | "8000/8000 [==============================] - 4s - loss: 0.0129 - val_loss: 0.0255\n", 1478 | "Epoch 433/1000\n", 1479 | "8000/8000 [==============================] - 4s - loss: 0.0130 - val_loss: 0.0242\n", 1480 | "Epoch 434/1000\n", 1481 | "8000/8000 [==============================] - 4s - loss: 0.0126 - val_loss: 0.0244\n", 1482 | "Epoch 435/1000\n", 1483 | "8000/8000 [==============================] - 4s - loss: 0.0126 - val_loss: 0.0239\n", 1484 | "Epoch 436/1000\n", 1485 | "8000/8000 [==============================] - 4s - loss: 0.0125 - val_loss: 0.0237\n", 1486 | "Epoch 437/1000\n", 1487 | "8000/8000 [==============================] - 4s - loss: 0.0120 - val_loss: 0.0237\n", 1488 | "Epoch 438/1000\n", 1489 | "8000/8000 [==============================] - 4s - loss: 0.0118 - val_loss: 0.0231\n", 1490 | "Epoch 439/1000\n", 1491 | "8000/8000 [==============================] - 4s - loss: 0.0117 - val_loss: 0.0236\n", 1492 | "Epoch 440/1000\n", 1493 | "8000/8000 [==============================] - 4s - loss: 0.0120 - val_loss: 0.0230\n", 1494 | "Epoch 441/1000\n", 1495 | "8000/8000 [==============================] - 4s - loss: 0.0115 - val_loss: 0.0225\n", 1496 | "Epoch 442/1000\n", 1497 | "8000/8000 [==============================] - 4s - loss: 0.0115 - val_loss: 0.0228\n", 1498 | "Epoch 443/1000\n", 1499 | "8000/8000 [==============================] - 4s - loss: 0.0109 - val_loss: 0.0225\n", 1500 | "Epoch 444/1000\n", 1501 | "8000/8000 [==============================] - 4s - loss: 0.0108 - val_loss: 0.0219\n", 1502 | "Epoch 445/1000\n", 1503 | "8000/8000 [==============================] - 4s - loss: 0.0106 - val_loss: 0.0220\n", 1504 | "Epoch 446/1000\n", 1505 | "8000/8000 [==============================] - 4s - loss: 0.0108 - val_loss: 0.0220\n", 1506 | "Epoch 447/1000\n", 1507 | "8000/8000 [==============================] - 4s - loss: 0.0112 - val_loss: 0.0228\n", 1508 | "Epoch 448/1000\n", 1509 | "8000/8000 [==============================] - 4s - loss: 0.0105 - val_loss: 0.0216\n", 1510 | "Epoch 449/1000\n", 1511 | "8000/8000 [==============================] - 4s - loss: 0.0100 - val_loss: 0.0215\n", 1512 | "Epoch 450/1000\n", 1513 | "8000/8000 [==============================] - 4s - loss: 0.0097 - val_loss: 0.0208\n", 1514 | "Epoch 451/1000\n", 1515 | "8000/8000 [==============================] - 4s - loss: 0.0097 - val_loss: 0.0212\n", 1516 | "Epoch 452/1000\n", 1517 | "8000/8000 [==============================] - 4s - loss: 0.0096 - val_loss: 0.0214\n", 1518 | "Epoch 453/1000\n", 1519 | "8000/8000 [==============================] - 4s - loss: 0.0098 - val_loss: 0.0209\n", 1520 | "Epoch 454/1000\n", 1521 | "8000/8000 [==============================] - 4s - loss: 0.0091 - val_loss: 0.0201\n", 1522 | "Epoch 455/1000\n", 1523 | "8000/8000 [==============================] - 4s - loss: 0.0090 - val_loss: 0.0207\n", 1524 | "Epoch 456/1000\n", 1525 | "8000/8000 [==============================] - 4s - loss: 0.0091 - val_loss: 0.0212\n", 1526 | "Epoch 457/1000\n", 1527 | "8000/8000 [==============================] - 4s - loss: 0.0089 - val_loss: 0.0202\n", 1528 | "Epoch 458/1000\n", 1529 | "8000/8000 [==============================] - 4s - loss: 0.0085 - val_loss: 0.0198\n", 1530 | "Epoch 459/1000\n", 1531 | "8000/8000 [==============================] - 4s - loss: 0.0083 - val_loss: 0.0196\n", 1532 | "Epoch 460/1000\n", 1533 | "8000/8000 [==============================] - 4s - loss: 0.0082 - val_loss: 0.0193\n", 1534 | "Epoch 461/1000\n", 1535 | "8000/8000 [==============================] - 4s - loss: 0.0098 - val_loss: 0.0266\n", 1536 | "Epoch 462/1000\n", 1537 | "8000/8000 [==============================] - 4s - loss: 0.0095 - val_loss: 0.0195\n", 1538 | "Epoch 463/1000\n", 1539 | "8000/8000 [==============================] - 4s - loss: 0.0083 - val_loss: 0.0199\n", 1540 | "Epoch 464/1000\n", 1541 | "8000/8000 [==============================] - 4s - loss: 0.0078 - val_loss: 0.0196\n", 1542 | "Epoch 465/1000\n", 1543 | "8000/8000 [==============================] - 4s - loss: 0.0077 - val_loss: 0.0186\n", 1544 | "Epoch 466/1000\n", 1545 | "8000/8000 [==============================] - 4s - loss: 0.0075 - val_loss: 0.0195\n", 1546 | "Epoch 467/1000\n", 1547 | "8000/8000 [==============================] - 4s - loss: 0.0077 - val_loss: 0.0186\n", 1548 | "Epoch 468/1000\n", 1549 | "8000/8000 [==============================] - 4s - loss: 0.0070 - val_loss: 0.0186\n", 1550 | "Epoch 469/1000\n", 1551 | "8000/8000 [==============================] - 4s - loss: 0.0069 - val_loss: 0.0187\n", 1552 | "Epoch 470/1000\n", 1553 | "8000/8000 [==============================] - 4s - loss: 0.0071 - val_loss: 0.0184\n", 1554 | "Epoch 471/1000\n", 1555 | "8000/8000 [==============================] - 4s - loss: 0.0067 - val_loss: 0.0182\n", 1556 | "Epoch 472/1000\n", 1557 | "8000/8000 [==============================] - 4s - loss: 0.0064 - val_loss: 0.0179\n", 1558 | "Epoch 473/1000\n", 1559 | "8000/8000 [==============================] - 4s - loss: 0.0064 - val_loss: 0.0183\n", 1560 | "Epoch 474/1000\n", 1561 | "8000/8000 [==============================] - 4s - loss: 0.0065 - val_loss: 0.0177\n", 1562 | "Epoch 475/1000\n", 1563 | "8000/8000 [==============================] - 4s - loss: 0.0062 - val_loss: 0.0177\n", 1564 | "Epoch 476/1000\n", 1565 | "8000/8000 [==============================] - 4s - loss: 0.0061 - val_loss: 0.0178\n", 1566 | "Epoch 477/1000\n", 1567 | "8000/8000 [==============================] - 4s - loss: 0.0061 - val_loss: 0.0176\n", 1568 | "Epoch 478/1000\n", 1569 | "8000/8000 [==============================] - 4s - loss: 0.0060 - val_loss: 0.0174\n", 1570 | "Epoch 479/1000\n", 1571 | "8000/8000 [==============================] - 4s - loss: 0.0059 - val_loss: 0.0169\n", 1572 | "Epoch 480/1000\n", 1573 | "8000/8000 [==============================] - 4s - loss: 0.0055 - val_loss: 0.0171\n", 1574 | "Epoch 481/1000\n", 1575 | "8000/8000 [==============================] - 4s - loss: 0.0055 - val_loss: 0.0169\n", 1576 | "Epoch 482/1000\n", 1577 | "8000/8000 [==============================] - 4s - loss: 0.0054 - val_loss: 0.0167\n", 1578 | "Epoch 483/1000\n", 1579 | "8000/8000 [==============================] - 4s - loss: 0.0053 - val_loss: 0.0169\n", 1580 | "Epoch 484/1000\n", 1581 | "8000/8000 [==============================] - 4s - loss: 0.0052 - val_loss: 0.0168\n", 1582 | "Epoch 485/1000\n", 1583 | "8000/8000 [==============================] - 4s - loss: 0.0051 - val_loss: 0.0167\n", 1584 | "Epoch 486/1000\n", 1585 | "8000/8000 [==============================] - 4s - loss: 0.0051 - val_loss: 0.0167\n", 1586 | "Epoch 487/1000\n", 1587 | "8000/8000 [==============================] - 4s - loss: 0.0050 - val_loss: 0.0164\n", 1588 | "Epoch 488/1000\n", 1589 | "8000/8000 [==============================] - 4s - loss: 0.0048 - val_loss: 0.0165\n", 1590 | "Epoch 489/1000\n", 1591 | "8000/8000 [==============================] - 4s - loss: 0.0049 - val_loss: 0.0163\n", 1592 | "Epoch 490/1000\n", 1593 | "8000/8000 [==============================] - 4s - loss: 0.0048 - val_loss: 0.0158\n", 1594 | "Epoch 491/1000\n", 1595 | "8000/8000 [==============================] - 4s - loss: 0.0048 - val_loss: 0.0164\n", 1596 | "Epoch 492/1000\n", 1597 | "8000/8000 [==============================] - 4s - loss: 0.0053 - val_loss: 0.0181\n", 1598 | "Epoch 493/1000\n", 1599 | "8000/8000 [==============================] - 4s - loss: 0.0050 - val_loss: 0.4631\n", 1600 | "Epoch 494/1000\n", 1601 | "8000/8000 [==============================] - 4s - loss: 0.2760 - val_loss: 0.1283\n", 1602 | "Epoch 495/1000\n", 1603 | "8000/8000 [==============================] - 4s - loss: 0.0317 - val_loss: 0.0236\n", 1604 | "Epoch 496/1000\n", 1605 | "8000/8000 [==============================] - 4s - loss: 0.0102 - val_loss: 0.0182\n", 1606 | "Epoch 497/1000\n", 1607 | "8000/8000 [==============================] - 4s - loss: 0.0074 - val_loss: 0.0168\n", 1608 | "Epoch 498/1000\n", 1609 | "8000/8000 [==============================] - 4s - loss: 0.0064 - val_loss: 0.0162\n", 1610 | "Epoch 499/1000\n", 1611 | "8000/8000 [==============================] - 4s - loss: 0.0060 - val_loss: 0.0159\n", 1612 | "Epoch 500/1000\n", 1613 | "8000/8000 [==============================] - 4s - loss: 0.0057 - val_loss: 0.0158\n", 1614 | "Epoch 501/1000\n", 1615 | "8000/8000 [==============================] - 4s - loss: 0.0053 - val_loss: 0.0154\n", 1616 | "Epoch 502/1000\n", 1617 | "8000/8000 [==============================] - 4s - loss: 0.0051 - val_loss: 0.0152\n", 1618 | "Epoch 503/1000\n", 1619 | "8000/8000 [==============================] - 4s - loss: 0.0049 - val_loss: 0.0150\n", 1620 | "Epoch 504/1000\n", 1621 | "8000/8000 [==============================] - 4s - loss: 0.0048 - val_loss: 0.0148\n", 1622 | "Epoch 505/1000\n" 1623 | ] 1624 | }, 1625 | { 1626 | "name": "stdout", 1627 | "output_type": "stream", 1628 | "text": [ 1629 | "8000/8000 [==============================] - 4s - loss: 0.0047 - val_loss: 0.0149\n", 1630 | "Epoch 506/1000\n", 1631 | "8000/8000 [==============================] - 4s - loss: 0.0046 - val_loss: 0.0148\n", 1632 | "Epoch 507/1000\n", 1633 | "8000/8000 [==============================] - 4s - loss: 0.0044 - val_loss: 0.0146\n", 1634 | "Epoch 508/1000\n", 1635 | "8000/8000 [==============================] - 4s - loss: 0.0043 - val_loss: 0.0149\n", 1636 | "Epoch 509/1000\n", 1637 | "8000/8000 [==============================] - 4s - loss: 0.0042 - val_loss: 0.0145\n", 1638 | "Epoch 510/1000\n", 1639 | "8000/8000 [==============================] - 4s - loss: 0.0041 - val_loss: 0.0143\n", 1640 | "Epoch 511/1000\n", 1641 | "8000/8000 [==============================] - 4s - loss: 0.0041 - val_loss: 0.0144\n", 1642 | "Epoch 512/1000\n", 1643 | "8000/8000 [==============================] - 4s - loss: 0.0040 - val_loss: 0.0142\n", 1644 | "Epoch 513/1000\n", 1645 | "8000/8000 [==============================] - 4s - loss: 0.0039 - val_loss: 0.0141\n", 1646 | "Epoch 514/1000\n", 1647 | "8000/8000 [==============================] - 4s - loss: 0.0038 - val_loss: 0.0141\n", 1648 | "Epoch 515/1000\n", 1649 | "8000/8000 [==============================] - 4s - loss: 0.0037 - val_loss: 0.0141\n", 1650 | "Epoch 516/1000\n", 1651 | "8000/8000 [==============================] - 4s - loss: 0.0037 - val_loss: 0.0140\n", 1652 | "Epoch 517/1000\n", 1653 | "8000/8000 [==============================] - 4s - loss: 0.0036 - val_loss: 0.0139\n", 1654 | "Epoch 518/1000\n", 1655 | "8000/8000 [==============================] - 4s - loss: 0.0036 - val_loss: 0.0139\n", 1656 | "Epoch 519/1000\n", 1657 | "8000/8000 [==============================] - 4s - loss: 0.0035 - val_loss: 0.0139\n", 1658 | "Epoch 520/1000\n", 1659 | "8000/8000 [==============================] - 4s - loss: 0.0034 - val_loss: 0.0138\n", 1660 | "Epoch 521/1000\n", 1661 | "8000/8000 [==============================] - 4s - loss: 0.0034 - val_loss: 0.0138\n", 1662 | "Epoch 522/1000\n", 1663 | "8000/8000 [==============================] - 4s - loss: 0.0033 - val_loss: 0.0138\n", 1664 | "Epoch 523/1000\n", 1665 | "8000/8000 [==============================] - 4s - loss: 0.0033 - val_loss: 0.0136\n", 1666 | "Epoch 524/1000\n", 1667 | "8000/8000 [==============================] - 4s - loss: 0.0032 - val_loss: 0.0136\n", 1668 | "Epoch 525/1000\n", 1669 | "8000/8000 [==============================] - 4s - loss: 0.0032 - val_loss: 0.0135\n", 1670 | "Epoch 526/1000\n", 1671 | "8000/8000 [==============================] - 4s - loss: 0.0031 - val_loss: 0.0138\n", 1672 | "Epoch 527/1000\n", 1673 | "8000/8000 [==============================] - 4s - loss: 0.0031 - val_loss: 0.0136\n", 1674 | "Epoch 528/1000\n", 1675 | "8000/8000 [==============================] - 4s - loss: 0.0031 - val_loss: 0.0135\n", 1676 | "Epoch 529/1000\n", 1677 | "8000/8000 [==============================] - 4s - loss: 0.0030 - val_loss: 0.0135\n", 1678 | "Epoch 530/1000\n", 1679 | "8000/8000 [==============================] - 4s - loss: 0.0030 - val_loss: 0.0136\n", 1680 | "Epoch 531/1000\n", 1681 | "8000/8000 [==============================] - 4s - loss: 0.0029 - val_loss: 0.0134\n", 1682 | "Epoch 532/1000\n", 1683 | "8000/8000 [==============================] - 4s - loss: 0.0029 - val_loss: 0.0134\n", 1684 | "Epoch 533/1000\n", 1685 | "8000/8000 [==============================] - 4s - loss: 0.0029 - val_loss: 0.0135\n", 1686 | "Epoch 534/1000\n", 1687 | "8000/8000 [==============================] - 4s - loss: 0.0029 - val_loss: 0.0136\n", 1688 | "Epoch 535/1000\n", 1689 | "8000/8000 [==============================] - 4s - loss: 0.0028 - val_loss: 0.0134\n", 1690 | "Epoch 536/1000\n", 1691 | "8000/8000 [==============================] - 4s - loss: 0.0027 - val_loss: 0.0134\n", 1692 | "Epoch 537/1000\n", 1693 | "8000/8000 [==============================] - 4s - loss: 0.0027 - val_loss: 0.0133\n", 1694 | "Epoch 538/1000\n", 1695 | "8000/8000 [==============================] - 4s - loss: 0.0027 - val_loss: 0.0133\n", 1696 | "Epoch 539/1000\n", 1697 | "8000/8000 [==============================] - 4s - loss: 0.0026 - val_loss: 0.0133\n", 1698 | "Epoch 540/1000\n", 1699 | "8000/8000 [==============================] - 4s - loss: 0.0026 - val_loss: 0.0134\n", 1700 | "Epoch 541/1000\n", 1701 | "8000/8000 [==============================] - 4s - loss: 0.0026 - val_loss: 0.0133\n", 1702 | "Epoch 542/1000\n", 1703 | "8000/8000 [==============================] - 4s - loss: 0.0025 - val_loss: 0.0132\n", 1704 | "Epoch 543/1000\n", 1705 | "8000/8000 [==============================] - 4s - loss: 0.0025 - val_loss: 0.0130\n", 1706 | "Epoch 544/1000\n", 1707 | "8000/8000 [==============================] - 4s - loss: 0.0025 - val_loss: 0.0129\n", 1708 | "Epoch 545/1000\n", 1709 | "8000/8000 [==============================] - 4s - loss: 0.0025 - val_loss: 0.0132\n", 1710 | "Epoch 546/1000\n", 1711 | "8000/8000 [==============================] - 4s - loss: 0.0024 - val_loss: 0.0129\n", 1712 | "Epoch 547/1000\n", 1713 | "8000/8000 [==============================] - 4s - loss: 0.0024 - val_loss: 0.0128\n", 1714 | "Epoch 548/1000\n", 1715 | "8000/8000 [==============================] - 4s - loss: 0.0023 - val_loss: 0.0130\n", 1716 | "Epoch 549/1000\n", 1717 | "8000/8000 [==============================] - 4s - loss: 0.0023 - val_loss: 0.0128\n", 1718 | "Epoch 550/1000\n", 1719 | "8000/8000 [==============================] - 4s - loss: 0.0023 - val_loss: 0.0130\n", 1720 | "Epoch 551/1000\n", 1721 | "8000/8000 [==============================] - 4s - loss: 0.0023 - val_loss: 0.0131\n", 1722 | "Epoch 552/1000\n", 1723 | "8000/8000 [==============================] - 4s - loss: 0.0023 - val_loss: 0.0129\n", 1724 | "Epoch 553/1000\n", 1725 | "8000/8000 [==============================] - 4s - loss: 0.0022 - val_loss: 0.0130\n", 1726 | "Epoch 554/1000\n", 1727 | "8000/8000 [==============================] - 4s - loss: 0.0021 - val_loss: 0.0128\n", 1728 | "Epoch 555/1000\n", 1729 | "8000/8000 [==============================] - 4s - loss: 0.0021 - val_loss: 0.0128\n", 1730 | "Epoch 556/1000\n", 1731 | "8000/8000 [==============================] - 4s - loss: 0.0021 - val_loss: 0.0127\n", 1732 | "Epoch 557/1000\n", 1733 | "8000/8000 [==============================] - 4s - loss: 0.0021 - val_loss: 0.0128\n", 1734 | "Epoch 558/1000\n", 1735 | "8000/8000 [==============================] - 4s - loss: 0.0020 - val_loss: 0.0128\n", 1736 | "Epoch 559/1000\n", 1737 | "8000/8000 [==============================] - 4s - loss: 0.0020 - val_loss: 0.0128\n", 1738 | "Epoch 560/1000\n", 1739 | "8000/8000 [==============================] - 4s - loss: 0.0020 - val_loss: 0.0133\n", 1740 | "Epoch 561/1000\n", 1741 | "8000/8000 [==============================] - 4s - loss: 0.0020 - val_loss: 0.0124\n", 1742 | "Epoch 562/1000\n", 1743 | "8000/8000 [==============================] - 4s - loss: 0.0020 - val_loss: 0.0125\n", 1744 | "Epoch 563/1000\n", 1745 | "8000/8000 [==============================] - 4s - loss: 0.0019 - val_loss: 0.0125\n", 1746 | "Epoch 564/1000\n", 1747 | "8000/8000 [==============================] - 4s - loss: 0.0019 - val_loss: 0.0127\n", 1748 | "Epoch 565/1000\n", 1749 | "8000/8000 [==============================] - 4s - loss: 0.0018 - val_loss: 0.0125\n", 1750 | "Epoch 566/1000\n", 1751 | "8000/8000 [==============================] - 4s - loss: 0.0018 - val_loss: 0.0124\n", 1752 | "Epoch 567/1000\n", 1753 | "8000/8000 [==============================] - 4s - loss: 0.0018 - val_loss: 0.0125\n", 1754 | "Epoch 568/1000\n", 1755 | "8000/8000 [==============================] - 4s - loss: 0.0017 - val_loss: 0.0125\n", 1756 | "Epoch 569/1000\n", 1757 | "8000/8000 [==============================] - 4s - loss: 0.0017 - val_loss: 0.0125\n", 1758 | "Epoch 570/1000\n", 1759 | "8000/8000 [==============================] - 4s - loss: 0.0017 - val_loss: 0.0124\n", 1760 | "Epoch 571/1000\n", 1761 | "8000/8000 [==============================] - 4s - loss: 0.0017 - val_loss: 0.0122\n", 1762 | "Epoch 572/1000\n", 1763 | "8000/8000 [==============================] - 4s - loss: 0.0017 - val_loss: 0.0124\n", 1764 | "Epoch 573/1000\n", 1765 | "8000/8000 [==============================] - 4s - loss: 0.0016 - val_loss: 0.0124\n", 1766 | "Epoch 574/1000\n", 1767 | "8000/8000 [==============================] - 4s - loss: 0.0016 - val_loss: 0.0123\n", 1768 | "Epoch 575/1000\n", 1769 | "8000/8000 [==============================] - 4s - loss: 0.0016 - val_loss: 0.0125\n", 1770 | "Epoch 576/1000\n", 1771 | "8000/8000 [==============================] - 4s - loss: 0.0016 - val_loss: 0.0123\n", 1772 | "Epoch 577/1000\n", 1773 | "8000/8000 [==============================] - 4s - loss: 0.0015 - val_loss: 0.0122\n", 1774 | "Epoch 578/1000\n", 1775 | "8000/8000 [==============================] - 4s - loss: 0.0015 - val_loss: 0.0123\n", 1776 | "Epoch 579/1000\n", 1777 | "8000/8000 [==============================] - 4s - loss: 0.0015 - val_loss: 0.0121\n", 1778 | "Epoch 580/1000\n", 1779 | "8000/8000 [==============================] - 4s - loss: 0.0015 - val_loss: 0.0120\n", 1780 | "Epoch 581/1000\n", 1781 | "8000/8000 [==============================] - 4s - loss: 0.0014 - val_loss: 0.0121\n", 1782 | "Epoch 582/1000\n", 1783 | "8000/8000 [==============================] - 4s - loss: 0.0014 - val_loss: 0.0122\n", 1784 | "Epoch 583/1000\n", 1785 | "8000/8000 [==============================] - 4s - loss: 0.0014 - val_loss: 0.0122\n", 1786 | "Epoch 584/1000\n", 1787 | "8000/8000 [==============================] - 4s - loss: 0.0014 - val_loss: 0.0120\n", 1788 | "Epoch 585/1000\n", 1789 | "8000/8000 [==============================] - 4s - loss: 0.0013 - val_loss: 0.0121\n", 1790 | "Epoch 586/1000\n", 1791 | "8000/8000 [==============================] - 4s - loss: 0.0013 - val_loss: 0.0119\n", 1792 | "Epoch 587/1000\n", 1793 | "8000/8000 [==============================] - 4s - loss: 0.0013 - val_loss: 0.0119\n", 1794 | "Epoch 588/1000\n", 1795 | "8000/8000 [==============================] - 4s - loss: 0.0013 - val_loss: 0.0120\n", 1796 | "Epoch 589/1000\n" 1797 | ] 1798 | }, 1799 | { 1800 | "name": "stdout", 1801 | "output_type": "stream", 1802 | "text": [ 1803 | "8000/8000 [==============================] - 4s - loss: 0.0013 - val_loss: 0.0120\n", 1804 | "Epoch 590/1000\n", 1805 | "8000/8000 [==============================] - 4s - loss: 0.0013 - val_loss: 0.0118\n", 1806 | "Epoch 591/1000\n", 1807 | "8000/8000 [==============================] - 4s - loss: 0.0012 - val_loss: 0.0121\n", 1808 | "Epoch 592/1000\n", 1809 | "8000/8000 [==============================] - 4s - loss: 0.0012 - val_loss: 0.0117\n", 1810 | "Epoch 593/1000\n", 1811 | "8000/8000 [==============================] - 4s - loss: 0.0012 - val_loss: 0.0119\n", 1812 | "Epoch 594/1000\n", 1813 | "8000/8000 [==============================] - 4s - loss: 0.0012 - val_loss: 0.0119\n", 1814 | "Epoch 595/1000\n", 1815 | "8000/8000 [==============================] - 4s - loss: 0.0012 - val_loss: 0.0118\n", 1816 | "Epoch 596/1000\n", 1817 | "8000/8000 [==============================] - 4s - loss: 0.0011 - val_loss: 0.0119\n", 1818 | "Epoch 597/1000\n", 1819 | "8000/8000 [==============================] - 4s - loss: 0.0011 - val_loss: 0.0118\n", 1820 | "Epoch 598/1000\n", 1821 | "8000/8000 [==============================] - 4s - loss: 0.0011 - val_loss: 0.0117\n", 1822 | "Epoch 599/1000\n", 1823 | "8000/8000 [==============================] - 4s - loss: 0.0011 - val_loss: 0.0119\n", 1824 | "Epoch 600/1000\n", 1825 | "8000/8000 [==============================] - 4s - loss: 0.0011 - val_loss: 0.0117\n", 1826 | "Epoch 601/1000\n", 1827 | "8000/8000 [==============================] - 4s - loss: 0.0011 - val_loss: 0.0115\n", 1828 | "Epoch 602/1000\n", 1829 | "8000/8000 [==============================] - 4s - loss: 0.0010 - val_loss: 0.0115\n", 1830 | "Epoch 603/1000\n", 1831 | "8000/8000 [==============================] - 4s - loss: 0.0010 - val_loss: 0.0115\n", 1832 | "Epoch 604/1000\n", 1833 | "8000/8000 [==============================] - 4s - loss: 0.0010 - val_loss: 0.0118\n", 1834 | "Epoch 605/1000\n", 1835 | "8000/8000 [==============================] - 4s - loss: 0.0011 - val_loss: 0.0125\n", 1836 | "Epoch 606/1000\n", 1837 | "8000/8000 [==============================] - 4s - loss: 0.0011 - val_loss: 0.0115\n", 1838 | "Epoch 607/1000\n", 1839 | "8000/8000 [==============================] - 4s - loss: 0.0011 - val_loss: 0.0118\n", 1840 | "Epoch 608/1000\n", 1841 | "8000/8000 [==============================] - 4s - loss: 9.8087e-04 - val_loss: 0.0117\n", 1842 | "Epoch 609/1000\n", 1843 | "8000/8000 [==============================] - 4s - loss: 9.3519e-04 - val_loss: 0.0115\n", 1844 | "Epoch 610/1000\n", 1845 | "8000/8000 [==============================] - 4s - loss: 9.0346e-04 - val_loss: 0.0114\n", 1846 | "Epoch 611/1000\n", 1847 | "8000/8000 [==============================] - 4s - loss: 8.7982e-04 - val_loss: 0.0115\n", 1848 | "Epoch 612/1000\n", 1849 | "8000/8000 [==============================] - 4s - loss: 8.9166e-04 - val_loss: 0.0116\n", 1850 | "Epoch 613/1000\n", 1851 | "8000/8000 [==============================] - 4s - loss: 8.6360e-04 - val_loss: 0.0113\n", 1852 | "Epoch 614/1000\n", 1853 | "8000/8000 [==============================] - 4s - loss: 8.4972e-04 - val_loss: 0.0112\n", 1854 | "Epoch 615/1000\n", 1855 | "8000/8000 [==============================] - 4s - loss: 8.3074e-04 - val_loss: 0.0113\n", 1856 | "Epoch 616/1000\n", 1857 | "8000/8000 [==============================] - 4s - loss: 8.2805e-04 - val_loss: 0.0114\n", 1858 | "Epoch 617/1000\n", 1859 | "8000/8000 [==============================] - 4s - loss: 8.1815e-04 - val_loss: 0.0116\n", 1860 | "Epoch 618/1000\n", 1861 | "8000/8000 [==============================] - 4s - loss: 7.9241e-04 - val_loss: 0.0115\n", 1862 | "Epoch 619/1000\n", 1863 | "8000/8000 [==============================] - 4s - loss: 7.8687e-04 - val_loss: 0.0114\n", 1864 | "Epoch 620/1000\n", 1865 | "8000/8000 [==============================] - 4s - loss: 7.7754e-04 - val_loss: 0.0115\n", 1866 | "Epoch 621/1000\n", 1867 | "8000/8000 [==============================] - 4s - loss: 7.6052e-04 - val_loss: 0.0116\n", 1868 | "Epoch 622/1000\n", 1869 | "8000/8000 [==============================] - 4s - loss: 7.5653e-04 - val_loss: 0.0113\n", 1870 | "Epoch 623/1000\n", 1871 | "8000/8000 [==============================] - 4s - loss: 7.3070e-04 - val_loss: 0.0114\n", 1872 | "Epoch 624/1000\n", 1873 | "8000/8000 [==============================] - 4s - loss: 7.3158e-04 - val_loss: 0.0111\n", 1874 | "Epoch 625/1000\n", 1875 | "8000/8000 [==============================] - 4s - loss: 7.1152e-04 - val_loss: 0.0110\n", 1876 | "Epoch 626/1000\n", 1877 | "8000/8000 [==============================] - 4s - loss: 6.8854e-04 - val_loss: 0.0110\n", 1878 | "Epoch 627/1000\n", 1879 | "8000/8000 [==============================] - 4s - loss: 6.7316e-04 - val_loss: 0.0113\n", 1880 | "Epoch 628/1000\n", 1881 | "8000/8000 [==============================] - 4s - loss: 6.7175e-04 - val_loss: 0.0111\n", 1882 | "Epoch 629/1000\n", 1883 | "8000/8000 [==============================] - 4s - loss: 6.6764e-04 - val_loss: 0.0112\n", 1884 | "Epoch 630/1000\n", 1885 | "8000/8000 [==============================] - 4s - loss: 6.4652e-04 - val_loss: 0.0110\n", 1886 | "Epoch 631/1000\n", 1887 | "8000/8000 [==============================] - 4s - loss: 6.3715e-04 - val_loss: 0.0111\n", 1888 | "Epoch 632/1000\n", 1889 | "8000/8000 [==============================] - 4s - loss: 6.2689e-04 - val_loss: 0.0109\n", 1890 | "Epoch 633/1000\n", 1891 | "8000/8000 [==============================] - 4s - loss: 6.1210e-04 - val_loss: 0.0109\n", 1892 | "Epoch 634/1000\n", 1893 | "8000/8000 [==============================] - 4s - loss: 6.0018e-04 - val_loss: 0.0111\n", 1894 | "Epoch 635/1000\n", 1895 | "8000/8000 [==============================] - 4s - loss: 5.8909e-04 - val_loss: 0.0111\n", 1896 | "Epoch 636/1000\n", 1897 | "8000/8000 [==============================] - 4s - loss: 5.7852e-04 - val_loss: 0.0111\n", 1898 | "Epoch 637/1000\n", 1899 | "8000/8000 [==============================] - 4s - loss: 5.7004e-04 - val_loss: 0.0109\n", 1900 | "Epoch 638/1000\n", 1901 | "8000/8000 [==============================] - 4s - loss: 5.6212e-04 - val_loss: 0.0112\n", 1902 | "Epoch 639/1000\n", 1903 | "8000/8000 [==============================] - 4s - loss: 5.4993e-04 - val_loss: 0.0110\n", 1904 | "Epoch 640/1000\n", 1905 | "8000/8000 [==============================] - 4s - loss: 5.4731e-04 - val_loss: 0.0110\n", 1906 | "Epoch 641/1000\n", 1907 | "8000/8000 [==============================] - 4s - loss: 5.3194e-04 - val_loss: 0.0112\n", 1908 | "Epoch 642/1000\n", 1909 | "8000/8000 [==============================] - 4s - loss: 5.7325e-04 - val_loss: 0.0113\n", 1910 | "Epoch 643/1000\n", 1911 | "8000/8000 [==============================] - 4s - loss: 5.6104e-04 - val_loss: 0.0109\n", 1912 | "Epoch 644/1000\n", 1913 | "8000/8000 [==============================] - 4s - loss: 5.0908e-04 - val_loss: 0.0107\n", 1914 | "Epoch 645/1000\n", 1915 | "8000/8000 [==============================] - 4s - loss: 5.0013e-04 - val_loss: 0.0112\n", 1916 | "Epoch 646/1000\n", 1917 | "8000/8000 [==============================] - 4s - loss: 4.8081e-04 - val_loss: 0.0109\n", 1918 | "Epoch 647/1000\n", 1919 | "8000/8000 [==============================] - 4s - loss: 4.8715e-04 - val_loss: 0.0107\n", 1920 | "Epoch 648/1000\n", 1921 | "8000/8000 [==============================] - 4s - loss: 4.8986e-04 - val_loss: 0.0108\n", 1922 | "Epoch 649/1000\n", 1923 | "8000/8000 [==============================] - 4s - loss: 4.7330e-04 - val_loss: 0.0109\n", 1924 | "Epoch 650/1000\n", 1925 | "8000/8000 [==============================] - 4s - loss: 4.5998e-04 - val_loss: 0.0109\n", 1926 | "Epoch 651/1000\n", 1927 | "8000/8000 [==============================] - 4s - loss: 4.4313e-04 - val_loss: 0.0107\n", 1928 | "Epoch 652/1000\n", 1929 | "8000/8000 [==============================] - 4s - loss: 4.4079e-04 - val_loss: 0.0107\n", 1930 | "Epoch 653/1000\n", 1931 | "8000/8000 [==============================] - 4s - loss: 4.2508e-04 - val_loss: 0.0108\n", 1932 | "Epoch 654/1000\n", 1933 | "8000/8000 [==============================] - 4s - loss: 4.1648e-04 - val_loss: 0.0108\n", 1934 | "Epoch 655/1000\n", 1935 | "8000/8000 [==============================] - 4s - loss: 4.1288e-04 - val_loss: 0.0107\n", 1936 | "Epoch 656/1000\n", 1937 | "8000/8000 [==============================] - 4s - loss: 4.0229e-04 - val_loss: 0.0108\n", 1938 | "Epoch 657/1000\n", 1939 | "8000/8000 [==============================] - 4s - loss: 3.9365e-04 - val_loss: 0.0106\n", 1940 | "Epoch 658/1000\n", 1941 | "8000/8000 [==============================] - 4s - loss: 3.8922e-04 - val_loss: 0.0108\n", 1942 | "Epoch 659/1000\n", 1943 | "8000/8000 [==============================] - 4s - loss: 3.8277e-04 - val_loss: 0.0109\n", 1944 | "Epoch 660/1000\n", 1945 | "8000/8000 [==============================] - 4s - loss: 3.7290e-04 - val_loss: 0.0105\n", 1946 | "Epoch 661/1000\n", 1947 | "8000/8000 [==============================] - 4s - loss: 3.6522e-04 - val_loss: 0.0105\n", 1948 | "Epoch 662/1000\n", 1949 | "8000/8000 [==============================] - 4s - loss: 3.5671e-04 - val_loss: 0.0105\n", 1950 | "Epoch 663/1000\n", 1951 | "8000/8000 [==============================] - 4s - loss: 3.5386e-04 - val_loss: 0.0104\n", 1952 | "Epoch 664/1000\n", 1953 | "8000/8000 [==============================] - 4s - loss: 3.4274e-04 - val_loss: 0.0107\n", 1954 | "Epoch 665/1000\n", 1955 | "8000/8000 [==============================] - 4s - loss: 3.3927e-04 - val_loss: 0.0107\n", 1956 | "Epoch 666/1000\n", 1957 | "8000/8000 [==============================] - 4s - loss: 3.3696e-04 - val_loss: 0.0105\n", 1958 | "Epoch 667/1000\n", 1959 | "8000/8000 [==============================] - 4s - loss: 3.3605e-04 - val_loss: 0.0106\n", 1960 | "Epoch 668/1000\n", 1961 | "8000/8000 [==============================] - 4s - loss: 3.2893e-04 - val_loss: 0.0104\n", 1962 | "Epoch 669/1000\n", 1963 | "8000/8000 [==============================] - 4s - loss: 3.2078e-04 - val_loss: 0.0104\n", 1964 | "Epoch 670/1000\n", 1965 | "8000/8000 [==============================] - 4s - loss: 3.1446e-04 - val_loss: 0.0103\n", 1966 | "Epoch 671/1000\n" 1967 | ] 1968 | }, 1969 | { 1970 | "name": "stdout", 1971 | "output_type": "stream", 1972 | "text": [ 1973 | "8000/8000 [==============================] - 4s - loss: 3.0515e-04 - val_loss: 0.0105\n", 1974 | "Epoch 672/1000\n", 1975 | "8000/8000 [==============================] - 4s - loss: 2.9818e-04 - val_loss: 0.0106\n", 1976 | "Epoch 673/1000\n", 1977 | "8000/8000 [==============================] - 4s - loss: 2.9215e-04 - val_loss: 0.0104\n", 1978 | "Epoch 674/1000\n", 1979 | "8000/8000 [==============================] - 4s - loss: 2.8666e-04 - val_loss: 0.0103\n", 1980 | "Epoch 675/1000\n", 1981 | "8000/8000 [==============================] - 4s - loss: 2.8311e-04 - val_loss: 0.0105\n", 1982 | "Epoch 676/1000\n", 1983 | "8000/8000 [==============================] - 4s - loss: 2.8475e-04 - val_loss: 0.0104\n", 1984 | "Epoch 677/1000\n", 1985 | "8000/8000 [==============================] - 4s - loss: 2.8411e-04 - val_loss: 0.0103\n", 1986 | "Epoch 678/1000\n", 1987 | "8000/8000 [==============================] - 4s - loss: 2.8107e-04 - val_loss: 0.0101\n", 1988 | "Epoch 679/1000\n", 1989 | "8000/8000 [==============================] - 4s - loss: 2.6768e-04 - val_loss: 0.0102\n", 1990 | "Epoch 680/1000\n", 1991 | "8000/8000 [==============================] - 4s - loss: 2.6502e-04 - val_loss: 0.0103\n", 1992 | "Epoch 681/1000\n", 1993 | "8000/8000 [==============================] - 4s - loss: 2.5615e-04 - val_loss: 0.0105\n", 1994 | "Epoch 682/1000\n", 1995 | "8000/8000 [==============================] - 4s - loss: 2.5339e-04 - val_loss: 0.0104\n", 1996 | "Epoch 683/1000\n", 1997 | "8000/8000 [==============================] - 4s - loss: 2.6768e-04 - val_loss: 0.0105\n", 1998 | "Epoch 684/1000\n", 1999 | "8000/8000 [==============================] - 4s - loss: 0.0042 - val_loss: 0.2628\n", 2000 | "Epoch 685/1000\n", 2001 | "8000/8000 [==============================] - 4s - loss: 0.5051 - val_loss: 0.0838\n", 2002 | "Epoch 686/1000\n", 2003 | "8000/8000 [==============================] - 4s - loss: 0.0264 - val_loss: 0.0205\n", 2004 | "Epoch 687/1000\n", 2005 | "8000/8000 [==============================] - 4s - loss: 0.0065 - val_loss: 0.0151\n", 2006 | "Epoch 688/1000\n", 2007 | "8000/8000 [==============================] - 4s - loss: 0.0039 - val_loss: 0.0112\n", 2008 | "Epoch 689/1000\n", 2009 | "8000/8000 [==============================] - 4s - loss: 0.0031 - val_loss: 0.0110\n" 2010 | ] 2011 | }, 2012 | { 2013 | "data": { 2014 | "text/plain": [ 2015 | "" 2016 | ] 2017 | }, 2018 | "execution_count": 22, 2019 | "metadata": {}, 2020 | "output_type": "execute_result" 2021 | } 2022 | ], 2023 | "source": [ 2024 | "encoder_decoder.fit(\n", 2025 | " x = [source_pp, target_pp],\n", 2026 | " y = np.eye(trg_vocab_size)[trg_end_padding],\n", 2027 | " batch_size = 300,\n", 2028 | " epochs = 1000,\n", 2029 | " validation_split=0.2,\n", 2030 | " callbacks=[EarlyStopping(patience=10)],\n", 2031 | ")" 2032 | ] 2033 | }, 2034 | { 2035 | "cell_type": "markdown", 2036 | "metadata": {}, 2037 | "source": [ 2038 | "## Investigations" 2039 | ] 2040 | }, 2041 | { 2042 | "cell_type": "code", 2043 | "execution_count": 23, 2044 | "metadata": { 2045 | "collapsed": true 2046 | }, 2047 | "outputs": [], 2048 | "source": [ 2049 | "import matplotlib.pyplot as plt\n", 2050 | "import numpy as np\n", 2051 | "\n", 2052 | "% matplotlib inline" 2053 | ] 2054 | }, 2055 | { 2056 | "cell_type": "markdown", 2057 | "metadata": {}, 2058 | "source": [ 2059 | "#### Visualising the alignments" 2060 | ] 2061 | }, 2062 | { 2063 | "cell_type": "code", 2064 | "execution_count": 24, 2065 | "metadata": { 2066 | "collapsed": true 2067 | }, 2068 | "outputs": [], 2069 | "source": [ 2070 | "#test_srcs = source_pp[929:]\n", 2071 | "#test_trgs = target_pp[929:]\n", 2072 | "test_srcs = source_pp[8000:]\n", 2073 | "test_trgs = target_pp[8000:]\n", 2074 | "#test_srcs = source_pp[7666:]\n", 2075 | "#test_trgs = target_pp[7666:]\n", 2076 | "\n", 2077 | "\n", 2078 | "#test_srcs = np.array([[0,0,0,0,0,0,0,0,1,2]])\n", 2079 | "#test_trgs = np.array([[0,0,0,0,0,0,0,0,0,2,1]])\n", 2080 | "\n", 2081 | "alphas = alignments_model.predict([\n", 2082 | " test_srcs,\n", 2083 | " test_trgs,\n", 2084 | "])\n" 2085 | ] 2086 | }, 2087 | { 2088 | "cell_type": "code", 2089 | "execution_count": 25, 2090 | "metadata": {}, 2091 | "outputs": [ 2092 | { 2093 | "data": { 2094 | "text/plain": [ 2095 | "" 2096 | ] 2097 | }, 2098 | "metadata": {}, 2099 | "output_type": "display_data" 2100 | }, 2101 | { 2102 | "data": { 2103 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAnIAAAKBCAYAAADTKEYiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAMTQAADE0B0s6tTgAAIABJREFUeJzs3X+MXOWd7/nPt6rd3Xa5q7qBYEx3xiQTcnfQzO6C2BsW\nCUKuiG7fYYkymuEuICQi0rZxwD/SOKAQQwgmZGEMARpsY7cZvNKQkS5t66IlCcZ7x7tGs14JiBlp\nLwjEJEO6ZyJlLt1Vbtv9s777R1dHPcFtzvPYdbpP+f2SSqJcz1Ofc6pPVX36VFOPubsAAACQPbmF\n3gAAAADEocgBAABkFEUOAAAgoyhyAAAAGUWRAwAAyCiKHAAAQEZR5AAAADKKIgcAAJBRFDkAAICM\nosgBAABkVNNCb8CnaWlp8c985jOpZI2Pj6ulpYUccshZxDlpZpFDDjmLPyfNrLRyhoaGJtw9WZC7\nL+pLZ2enp+XnP/85OeSQs8hz0swihxxyFn9Omllp5Uga9IQ9iY9WAQAAMooiBwAAkFEUOQAAgIyi\nyAEAAGQURQ4AACCjKHIAAAAZRZEDAADIKIocAABARlHkAAAAMooiBwAAkFEUOQAAgIyiyAEAAGRU\n00JvQL2MjY1pYmIiaI67q1KpBM2pVqvK5cL6MDnkLPacsbExSVJra+uiy4nNSuuxc/eonGPHjgXP\nyefzwXOOHz8ePMfM6p4zPT0dNH42Z7EeB+Qs7teENF97mpubg3NCNGSRGxsb00UXXRRc5J599lld\ndNFFQXNCX0gl6ZlnnlFnZ2fQnNAXUkl6+umn1dXVlXi8u8vdg3P6+vp08cUXB+XE6Ovr08qVK4Ny\nYrKee+45rVixou45O3bs0Gc+85mgnBg7d+7UBRdcEDU3jZyYYzv0sZNmXkxDxTxXOzo6gvfp4Ycf\n1h//8R8nHm9m+sIXvhCc841vfENf//rXE493d1Wr1aAMSbrjjjt04403Jh4/PT2tN954I/gY3717\nt9rb2xOPj30O9ff3q1QqRc0lp/45aWbF5DQ3N6tcLtetzDXkR6sTExPBJQ5A+mJK3GKXxj6ZWUM9\ndrG/DAFZUO9O0pBFDgAA4FxAkQMAAMgoihwAAEBGUeQAAAAyiiIHAACQURQ5AACAjKLIAQAAZBRF\nDgAAIKPOqMiZWbuZzbu0gZmdfyb3DwAAgPkFFzkzW2pmN5nZfklHJbWa2RYze9fMjtYuq2rDB8zs\nkJmtMbPzzuqWAwAAnOMSrbVaO+t2vaRbJV0r6XVJfZIOSSpJ2ixppbufNLNlkqqS5O7XmdkVkm6W\ndMTM3pP0kqRX3P3EWd4XAACAc4olWd/OzN6RtELSRkn73H1yzm15SUckfSTpgKRX3X3wFPdhkq6R\ntF3SKs0Uv9FTjOuV1Dt7vVAodA4MDATtlLvrn/7pn4LmSFJ7e7tGRkaC5sSsdxiTE2Ox5sSuqbhY\n94ecdHNis9J6ruZy4X+xUiwWValUguY0NSX6PfxfWbZsmU6cCPsdOub5WigUdPz48aA54+PjwTkd\nHR0aHh4OnkdOY+WkmRWb09nZGfQa1N3dPeTuXUnGJi1yV0q6TdKNkt6S9BNJP3X38drteUlXS7pO\n0lpJt7j74dptOUlf1sxZua9KOqyZs3IHPEF4V1eXDw5+oheeVqVS0UUXXRQ0R5KeffZZ3X333UFz\n8vl5/0RwXs8884w2bNgQNCfmTejpp5/Wxo0bE4+PXbi6r69P69evD8qJEZMTk/Xcc8/prrvuqnvO\njh07tG7duqCcGDt37tSdd94ZNbfeObELv4c+dpLU3NwcnBPzXD3vvPC/Inn44Yf14IMPJh6fy+X0\nhS98ITjnG9/4hl588cXE491d1Wo1OOeOO+7QCy+8kHj81NSU3njjjeCc3bt3a/Xq1YnHxz6H+vv7\n1dPTEzWXnPrnpJkVm1Mul1UsFhOPN7PERS7Rr47u/qa7b5J0qaRdmil075vZXjMrSVrh7ofdfauk\nNyRdXtuQrZI+lLRB0kFJl7n77e7+WpISBwAAgPkFnZt396pmCtlBM2uRdIOkNkkvm1lBkkv6QNLe\n2pS3JW1z9/LZ22QAAABIgUVurtrHqvtqV6+aZ8z+2PsHAADA6fGFwAAAABlFkQMAAMgoihwAAEBG\nUeQAAAAyiiIHAACQURQ5AACAjKLIAQAAZBRFDgAAIKMasshVq9XoNfVCNVoOkKY0j+tGeq7GroG6\nWJlZ9Lq7QBbU8/kavbLDYpbL5TQ2NhY8z9118uTJOmzRv1atVjU6OppKzrFjx1LJOX78eN1z3F0n\nTpxIJSfm+InJmZiYSCVncnKyYXJms0Ifu5jHOua5Ojo6GlxKpqam9Otf/zpozq9//evgnFtuuUV/\n+7d/GzQnpmDddtttOnz4cFDGqlWrgnOampp0ySWXJB6fy+X0R3/0R8H7tHTpUt14442Jx+fzeV1x\nxRXK5cLOlZRKJT3yyCOJx4+NjQWNz4LZn03oz8jMEj/e7v67X7hiju+YOaHHQoiGLHIAsJBizsqF\nzonNmJ6eDpoT86YVmhPyJvz7Qublcjk1NzdH5SxZsiTx2Hw+r2XLlgVnmJlaW1uD5zWSMyk8Icdq\n7BngxXj2uCE/WgUAADgXUOQAAAAyiiIHAACQURQ5AACAjKLIAQAAZBRFDgAAIKMocgAAABm1IEXO\nZhw2s88tRD4AAEAjWJAi5zPfZPmEpB8sRD4AAEAjWMiPVl+V9B/MrLSA2wAAAJBZtpCLsZvZf5H0\npLv/H3P+rVdS7+z1QqHQOTAwEHS/7q6hoaHg7eno6NDw8HDwPHLIISe9nDSzyEknx8yUz+eDc4rF\noiqVStCckKW2Zi1dujRoHW4zi1oKLJ/PBy1t5u4ql8vBOYv1OJDil85qb2/XyMhI1Nw0ci6++OKg\nfevu7h5y964kYxe6yL0k6f92953zjenq6vLBwcGg+61UKiqVwk/09ff3q6enJ3geOeSQk15Omlnk\nxL2x7t69W6tXrw7K+Nznwv9k+oEHHtDWrVsTj8/lcvqTP/mT4Jw///M/V8gJhXw+ry996UvBOZdc\ncol+9atfJR4/NjamLVu2BOcs5uMtptBL0q5du7RmzZpEY8+k94Qe27OGh4dVLBYTjzezxEVuof+v\n1VZJyX/NAQAAwO8sdJH7I0nvLPA2AAAAZNKCFTkzu0RSXhQ5AACAKAt5Ru5OSY/7Qv6RHgAAQIYt\nZJH7J0kvLGA+AABApjUtVLC7P7NQ2QAAAI1gof9nBwAAAESiyAEAAGQURQ4AACCjKHIAAAAZRZED\nAADIqIYsctVqdaE3AQAaQlpf9ZlGjrun8v5QrVZTycnn89Frky5WjfrVsvU8Hhbs60fqKZdryH4K\nAAsi5s01ZI67By0WP2t6elq//OUvE483M1WrVZlZUM7XvvY1vfNO2CJE7777bnDOvffeq7/6q79K\nPH56ejo4I4aZadWqVcHvrU1NTfr85z+fePySJUu0du3a3/13iFKppKeeeirR2LGxMX3nO98Juv9Z\nsb8M1LOXNGSRAwBkS2xZDJ0XM0cKP6MyNTUVnOHumpycTDx+eno6usyFzDEzNTXF1YWQApPP51Us\nFqNyzExLly6Nmpt1nLoCAADIKIocAABARlHkAAAAMooiBwAAkFEUOQAAgIyiyAEAAGQURQ4AACCj\nFrTImdm/mNklC7kNAAAAWcUZOQAAgIxKdWUHM/uapMcklSX9X2lmAwAANBpLa4FaM7tQ0ruSrnH3\n/2pmayQ9L+lz7v6rOeN6JfXOXi8UCp0DAwNBWe6uoaGh4G3s6OjQ8PBw8DxyyCEnvZw0s8hprBwz\ni1pkvlgsqlKpBM9LIydmKbDF+riZmZYvXx6cI80sBZZ0GTV318jISFRO7LHd2dkZtCxad3f3kLt3\nJRmbZpH7mqRvu/tXatfzkk5I+jdzi9zv6+rq8sHBwaCsSqWiUqkUvI39/f3q6ekJnkcOOeSkl5Nm\nFjnp5cSsF7p7926tXr06KGPVqlXBOd///vf1gx/8IGhO6KLvknT//ffr0UcfTTx+enpa//AP/xCc\nE/O4ff7znw/O2bJlix555JHE45ubm9Xb2/vpA08hpGCdPHlS69evj8qJfQ6Vy+WgdWTNLHGRW8i/\nkXNJ4c9cAAAASEq3yP0/kv57M/vvatfvkBT+KwsAAAAkpVjk3P23milv+83siKSLJf23tPIBAAAa\nTar/16q7/2dJ/3nOPz2cZj4AAEAj4XvkAAAAMooiBwAAkFEUOQAAgIyiyAEAAGQURQ4AACCjKHIA\nAAAZRZEDAADIqIYscmNjYwu9CQAAAJLq20sassgBALLF3ckJZGYyS2fJ8jT2Z3p6WlNTU3XPaWpq\nUlNTqush1FXj7Mkcra2tC70JAIBAMWUhZI676x//8R+DM6ampvSrX/0qaE4uF36eZHp6Wh9++GHi\n8W1tbfr444+Dy9zf/d3faWRkJPH4SqWiP/iDPwjKkGYet5D9kaRNmzapqakp+Fjo6+vTxo0bE49v\nbW3V1NRUcHE0s6gSWM9e0pBFDgCAU4k9sxQ6L40cd1epVArOMDMVi8WgOWk9bpOTk5qcnIzKCfn4\ncrZox5zRTOssaFJ8tAoAAJBRFDkAAICMosgBAABkFEUOAAAgoyhyAAAAGUWRAwAAyCiKHAAAQEZR\n5AAAADIq1SJnZv+Tmf0XM3vTzH5hZjelmQ8AANBILK1158ysXdLfSvpTd/9nM7tA0tuS/md3H5oz\nrldS7+z1QqHQOTAwEJTl7hoaGvr0gb+no6NDw8PDwfPIIYec9HLSzCKHnNicmG//b29vD1o6y8x0\n8cUXB+eMjY0FLRmV5ntq7KoJoY9drNiciy++OGjfuru7h9y9K9Fgd0/lIulPJZUlHZ1z+UjSvzvd\nvM7OTg9VLpddUvClv78/ah455JCTXk4j7hM5jZeTy+WCL3v27AkaXyqVgt8f3d1//vOfL9r31Hw+\nH3XZs2dP0PglS5ZEXfbs2RM1r1wuBz3mkgY9Yb9Kc61Vk/T/ufvVKWYCAAA0rDT/Ru7vJH3OzK6f\n/Qcz+x/NrDnFbQAAAGgYqRU5dx+WdIOk+83sHTP7r5L+tzS3AQAAoJGk+dGq3P1tSf8uzUwAAIBG\nxdkwAACAjKLIAQAAZBRFDgAAIKMocgAAABlFkQMAAMgoihwAAEBGUeQAAAAyqiGL3NjY2EJvAgAA\ngKT69pKGLHIAADS6iYkJjY+P1z2nublZzc3prKY5s1584+SkkZXqyg5paW1tXehNAAAsQs3NzTKz\noDlmppaWlsTjC4WC/v7v/165XNi5kl/84hcaHBxMPH50dFQXXnhh0LZJ0uOPP64LL7ww8Xh3D37M\nYqWVMzU1FTXP3TU5ORk8r569pCGLHAAApxJarmaFFIxcLqf29vbgjFwup2KxGLRNExMTmpiYCMqp\nVqs6duxY4vHuromJiaiSFVOaYyWdm+bZuDTw0SoAAEBGUeQAAAAyiiIHAACQURQ5AACAjKLIAQAA\nZBRFDgAAIKNS/foRMzsg6SJJVUnHJG1w91+kuQ0AAACNIu3vkfuP7j4iSWb2Z5JelPQ/pLwNAAAA\nDSHVj1ZnS1xNSVJjfSsfAABAilJf2cHM/ndJX6ld/dO08wEAABqFLdRSFWZ2u6T/1d3/9Pf+vVdS\n7+z1QqHQOTAwEHTf7q6hoaHgbero6NDw8HDwPHLIISe9nDSzyGm8nJgloNrb2zUyMvLpA+dkrFy5\nMjhnfHw8aN3UarWq3/zmN8E5ofsjxS1rldbPRwrfp9juE3tsd3Z2Bu1bd3f3kLt3JRrs7gt2kXRS\n0vmnG9PZ2emhyuWya+Zj26BLf39/1DxyyCEnvZxG3Cdy0stpbW0NvuzZsydo/AUXXOCjo6PBl5/9\n7GdB4//5n/85lf1paWlxMwu+9Pf3B89pamqKuuzZsyfx2Hw+n/qxXS6Xg3qMpMGkXSq1v5Ezs3Yz\nu3jO9a9L+m+SPk5rGwAAABpJmn8jV5L0n8xsqWa+fuS3kv6XWvMEAABAoNSKnLv/o6R/m1YeAABA\no2NlBwAAgIyiyAEAAGQURQ4AACCjKHIAAAAZRZEDAADIKIocAABARlHkAAAAMooiBwAAkFENWeSq\n1epCbwIAYBFKYzGhyclJjY+P1z1nyZIlWrJkSd1z0tSoiz3Vs5ekuURXanK5huynAIAz1NTUJDML\nmmNmampK/na5dOlSVavVqFISMmd8fFxTU1OanJwMzgmZM7s4e4zQedPT09E5U1NTicfH9gQzi5pb\nz17SkEUOAJAtoeUqdt6ZvIGHjG1tbY3KCTUxMZFKTqOJPd7OdG49cOoKAAAgoyhyAAAAGUWRAwAA\nyCiKHAAAQEZR5AAAADKKIgcAAJBRC1bkzOxlM7tqofIBAACybkGKnJn9W0kd7n5kIfIBAAAawUKd\nkVsr6aUFygYAAGgIC1XkrpP0/y5QNgAAQEOwhVig1szGJXW5+29PcVuvpN7Z64VCoXNgYCDo/t1d\nQ0NDwdvV0dGh4eHh4HnkkENOejlpZpHTeDkxS3S1t7drZGQk8Xgz00UXXRScMz4+rpaWlsTjq9Wq\nfvOb3wTnhO6PFLeY/WJ+TYhdZivmsZOkiy++OCizu7t7yN27Eg2eXQw3zYukYUmfTTK2s7PTQ5XL\nZZcUfOnv74+aRw455KSX04j7RI7czIIv/f39wXPa2tqCLy+88ELQ+JUrV/qxY8eCLz/72c+Cxg8N\nDXk+nw++7NmzJ2h8Lpdb1MdbTFbM4xbz2M1eyuVyUI+RNJi0Uy3UR6t/L+nfLFA2AABAQ1ioIvey\npH+/QNkAAAANYaGK3F9J+vdmVligfAAAgMxbkCLn7qOSvi3pcwuRDwAA0AiaFirY3f/PhcoGAABo\nBKy1CgAAkFEUOQAAgIyiyAEAAGQURQ4AACCjKHIAAAAZRZEDAADIqIYsctVqdaE3AQAQwCMWZV+s\nOZOTkxofH697TnNzs5qbm+ue04jSOt5m1bOXLNj3yNVTLpdOPzUzffaznw3Oa2pq0iWXXBKUc+GF\nFwZu3cyT/Etf+lLi8e4edbA1NzfryiuvDMo5ceKEzCwoZ8mSJbrsssuCco4dOxaUIUn5fF5dXV3B\nOaH7k8vlVCqVgnLGx8eDc8xMra2tQTnT09O/mxuSs2TJkrrnzI4Ped7NLi6NxS3mZxQ6Z3R0NDij\nWq0GvZYcO3ZMnZ2dwSXr6aef1sUXXxw0Z3p6+nfPo6TmPvfOVbHFKvaxq2cvacgiJ4W/McTMMzM1\nNcU9hCE/1Nic0HmxRU5ScE7s45bP54NyYp48MSUh9kkaU/7IictKI4eyiFnj4+PBZ+VCCyMgNehH\nqwAAAOcCihwAAEBGUeQAAAAyiiIHAACQURQ5AACAjKLIAQAAZBRFDgAAIKPOqMiZWbuZzfvFXmZ2\n/pncPwAAAOYXXOTMbKmZ3WRm+yUdldRqZlvM7F0zO1q7rKoNHzCzQ2a2xszOO6tbDgAAcI5L9PX6\ntbNu10u6VdK1kl6X1CfpkKSSpM2SVrr7STNbJqkqSe5+nZldIelmSUfM7D1JL0l6xd1PnOV9AQAA\nOKdYkiVlzOwdSSskbZS0z90n59yWl3RE0keSDkh61d0HT3EfJukaSdslrdJM8fvEondm1iupd/Z6\noVDoHBgYCNopd9fQ0FDQHEnq6OjQ8PBw4vFmFrRk1KxisahKpRI0JyanUCjo+PHjQXNilhhavnx5\n8PqFMTltbW3By9fE5IT+fGLX8Wxvb9fIyEjQnLRyYqSVE5sV89iFvibEIocccrKRFZvT2dkZtNxf\nd3f3kLsnWvQ7aZG7UtJtkm6U9Jakn0j6qbuP127PS7pa0nWS1kq6xd0P127LSfqyZs7KfVXSYc2c\nlTvgCcK7urp8cPATvfC0KpWK2tvbg+ZI0u7du7V69erE481Ml1xySXDOgw8+qIcffjgo56KLLgrO\nufPOO7Vz587E42PXWv3Wt76l7du3B+WcOBF+Qnbz5s3atm1bUE5oYZakhx9+WA8++GBQTsz6iE8+\n+aR6e3s/feCcnImJieCcZ599VnfffXdQTsxxsGPHDq1bt67uOZK0a9curVmzJigrpsiFvibErrXa\n39+vnp6eqLnkkHOu56SZFZtTLpdVLBYTjzezxEUu0d/Iufub7r5J0qWSdmmm0L1vZnvNrCRphbsf\ndvetkt6QdHltQ7ZK+lDSBkkHJV3m7re7+2tJShwAAADml+hv5Ga5e1UzheygmbVIukFSm6SXzawg\nySV9IGlvbcrbkra5e/nsbTIAAACkwCI3V+1j1X21q1fNM2Z/7P0DAADg9PhCYAAAgIyiyAEAAGQU\nRQ4AACCjKHIAAAAZRZEDAADIKIocAABARlHkAAAAMooiBwAAkFENWeRi12+MkcZKY7HrRAIAgIVX\nz14SvbLDYpbL5aKLT8g8d9fHH38sMwvKqFar+vjjjxOPz+VyuuCCC4Jz3F1TU1NBc1paWoJzzEwt\nLS1B2xXz8zEzLV26NPH4arWqkydPBudI0pIlSxKPdffgx2xW7DyES+M1AQBOJZer33mzhixyaUrr\njTitnNiDLab81TvHzFIrWBQ5AMBCaMiPVgEAAM4FFDkAAICMosgBAABkFEUOAAAgoyhyAAAAGUWR\nAwAAyCiKHAAAQEadUZEzs3Yzy5/m9vPP5P4BAAAwv+AiZ2ZLzewmM9sv6aikVjPbYmbvmtnR2mVV\nbfiAmR0yszVmdt5Z3XIAAIBzXKKVHWpn3a6XdKukayW9LqlP0iFJJUmbJa1095NmtkxSVZLc/Toz\nu0LSzZKOmNl7kl6S9Iq7nzjL+wIAAHBOsSTrCJrZO5JWSNooaZ+7T865LS/piKSPJB2Q9Kq7D57i\nPkzSNZK2S1qlmeI3eopxvZJ6Z68XCoXOgYGBoJ1ydw0NDQXNkaSOjg4NDw8HzYlZ0qq9vV0jIyNB\nc5qawldTW758uUZHP/EQn1bMklGFQkHHjx8PmhOzfmXM/sQsVFwsFlWpVOqeE3McxDxuMTkx0sqJ\nzYp57GJeE2KQQw452ciKzens7Ax6f+3u7h5y964kY5MWuSsl3SbpRklvSfqJpJ+6+3jt9rykqyVd\nJ2mtpFvc/XDttpykL2vmrNxXJR3WzFm5A54gvKurywcHP9ELT6tSqahUKgXNkaT+/n719PQEzWlv\nbw/OefLJJ9Xb2/vpA2tyuZwuvfTS4Jy77rpLzz33XNCckEXpZ33zm9/Unj17Eo93d42NjQXnhO5P\ntVqNesJt2bJFjzzySOLx7h6Vs23bNm3evDkoJ+Zxe/bZZ3X33XcH5cQU0x07dmjdunV1z5GkXbt2\nac2aNXXPinlNiEEOOeRkIys2p1wuq1gsJh5vZomLXKLTSe7+prtvknSppF2aKXTvm9leMytJWuHu\nh919q6Q3JF1e25Ctkj6UtEHSQUmXufvt7v5akhIHAACA+QV9XufuVc0UsoNm1iLpBkltkl42s4Ik\nl/SBpL21KW9L2ubu5bO3yQAAAJACi9xctY9V99WuXjXPmP2x9w8AAIDT4wuBAQAAMooiBwAAkFEU\nOQAAgIyiyAEAAGQURQ4AACCjKHIAAAAZRZEDAADIKIocAABARjVkkYtdvzFGGiuNncmalDFZjSJk\ngWIAAOqlnu/h0Ss7LGa5XHr9dHp6OqowTE9PJx6by+WiD4LQefl8Pnh/zEz5fD7xeHcPGh+bk2b5\njS3AIfMaMQcAzgX17CUNWeTSlNZZn7TKaWxOTPmrdw5n5AAAja4hP1oFAAA4F1DkAAAAMooiBwAA\nkFEUOQAAgIyiyAEAAGQURQ4AACCjKHIAAAAZdUZFzszazWzeb2g1s/PP5P4BAAAwv+AiZ2ZLzewm\nM9sv6aikVjPbYmbvmtnR2mVVbfiAmR0yszVmdt5Z3XIAAIBzXKKVHWpn3a6XdKukayW9LqlP0iFJ\nJUmbJa1095NmtkxSVZLc/Tozu0LSzZKOmNl7kl6S9Iq7nzjL+wIAAHBOsSTrHZrZO5JWSNooaZ+7\nT865LS/piKSPJB2Q9Kq7D57iPkzSNZK2S1qlmeI3eopxvZJ6Z68XCoXOgYGBoJ1ydw0NDQXNkaSO\njg4NDw8HzYlZ0qq9vV0jIyNBc5qawldTW758uUZHP/EQn1bM/ixbtkwnToT18ph1NguFgo4fPx40\nJ2RN21nFYlGVSiVoTsy6rjHHQczjFpMTI62c2KyYxy7mNSEGOeSQk42s2JzOzs6gZSO7u7uH3L0r\nydikRe5KSbdJulHSW5J+Iumn7j5euz0v6WpJ10laK+kWdz9cuy0n6cuaOSv3VUmHNXNW7oAnCO/q\n6vLBwU/0wtOqVCoqlUpBcySpv79fPT09QXPa2tqCc55++mlt3Lgx8fhcLqcvfvGLwTnr169XX19f\n0JyYx+0b3/iGXnzxxcTj3V1jY2PBOXfeead27tyZeHy1WtVvf/vb4JwtW7bokUceCcqJeWI/8cQT\nuueeexKPj33cnnvuOd11111BOTHFdOfOnbrzzjuDcmLKlSTt2rVLa9asCcqK2aeY14QY5JBDTjay\nYnPK5bKKxWLi8WaWuMglOv3i7m+6+yZJl0rapZlC976Z7TWzkqQV7n7Y3bdKekPS5bUN2SrpQ0kb\nJB2UdJm73+7uryUpcQAAAJhf0Od17l7VTCE7aGYtkm6Q1CbpZTMrSHJJH0jaW5vytqRt7l4+e5sM\nAAAAKbDIzVX7WHVf7epV84zZH3v/AAAAOD2+EBgAACCjKHIAAAAZRZEDAADIKIocAABARlHkAAAA\nMooiBwAAkFEUOQAAgIyiyAEAAGRUQxa5mDUVY6Wx0ljsOpGxWY0iZIHiLOQAALKpnu/h0Ss7LGa5\nXHr9tKmpKeqNvKkp+UOfy+XU1NQUvF9mFpQjzRxsMfsTcpC6uyYnJ4MzQudVq1VNTU0F50gKmncm\nRTv0cWskZpbaPlG2ASykevaShixyaYr94YS8sZiZ8vl83XNixqMxxR4HIfPc/YyOt7SyQnMAIE0N\n+dEqAADAuYAiBwAAkFEUOQAAgIyiyAEAAGQURQ4AACCjKHIAAAAZRZEDAADIqDMqcmbWbmbzfsGZ\nmZ1/Jvc0lHEuAAAgAElEQVQPAACA+QUXOTNbamY3mdl+SUcltZrZFjN718yO1i6rasMHzOyQma0x\ns/PO6pYDAACc4xKt7FA763a9pFslXSvpdUl9kg5JKknaLGmlu580s2WSqpLk7teZ2RWSbpZ0xMze\nk/SSpFfc/cRZ3hcAAIBziiVZUsbM3pG0QtJGSfvcfXLObXlJRyR9JOmApFfdffAU92GSrpG0XdIq\nzRS/0VOM65XUO3u9UCh0DgwMBO2Uu2toaChojiR1dHRoeHg4aE7MEl3t7e0aGRkJmrNkyZLgnEKh\noOPHjwfNiVnGaNmyZTpxInkvd/eopYyWL1+u0dFPHDKnFbMGarFYVKVSqXtOzHEQ87jF5MRIKyc2\nK+axi3lNiEEOOeRkIys2p7OzM+j9tbu7e8jdu5KMTVrkrpR0m6QbJb0l6SeSfuru47Xb85KulnSd\npLWSbnH3w7XbcpK+rJmzcl+VdFgzZ+UOeILwrq4uHxz8RC88rUqlolKpFDRHkvr7+9XT0xM057zz\nwj8x3rZtmzZv3px4fC6X0xe/+MXgnHXr1mnHjh1Bc1pbW4Nz7rjjDr3wwguJx7t7cMGUpLvvvlvP\nPvts4vHValX/8i//Epzz0EMP6aGHHko83t2jCsyPf/xjffvb3w7KmZiYCM557rnndNdddwXlxJSe\nHTt2aN26dXXPkaTnn39ea9euDcqKKdu7d+/W6tWrg3JixLz2kEMOOelnxeaUy2UVi8XE480scZFL\ndDrJ3d90902SLpW0SzOF7n0z22tmJUkr3P2wu2+V9Iaky2sbslXSh5I2SDoo6TJ3v93dX0tS4gAA\nADC/RH8jN8vdq5opZAfNrEXSDZLaJL1sZgVJLukDSXtrU96WtM3dy2dvkwEAACAFFrm5ah+r7qtd\nvWqeMftj7x8AAACnxxcCAwAAZBRFDgAAIKMocgAAABlFkQMAAMgoihwAAEBGUeQAAAAyiiIHAACQ\nURQ5AACAjGrIIhezpmKsNFYai10nMjarUYQsUAwAQL3U8z08emWHxSyXS6+fVqvVqMIQ8kPN5XKa\nnJwM3i931+TkZNAcMwven9DF3N1dU1NTQRmzQualWUpjS2PovNhjO2Te3F8c6rl97v67n1HM45fP\n54OyYphZ9GMHALPq2UsassilKa2zPmmVU85ixUmryMXkxJTz2OMtjf1JO2sx5jTSmXMAZ6YhP1oF\nAAA4F1DkAAAAMooiBwAAkFEUOQAAgIyiyAEAAGQURQ4AACCjKHIAAAAZdUZFzszazWzeb+U0s/PP\n5P4BAAAwv+AiZ2ZLzewmM9sv6aikVjPbYmbvmtnR2mVVbfiAmR0yszVmdt5Z3XIAAIBzXKKVHWpn\n3a6XdKukayW9LqlP0iFJJUmbJa1095NmtkxSVZLc/Tozu0LSzZKOmNl7kl6S9Iq7nzjL+wIAAHBO\nsSRLvZjZO5JWSNooaZ+7T865LS/piKSPJB2Q9Kq7D57iPkzSNZK2S1qlmeI3eopxvZJ6Z68XCoXO\ngYGBoJ1ydw0NDQXNkaSOjg4NDw8HzYlZyqi9vV0jIyNBc5qawldTW758uUZHP/EQn1bM8kKFQkHH\njx8PmhOzxFDM/sSse1ksFlWpVOqeE3McxAjNiV3+Ka39STMrJifm8Yt57YlBDjmNmJNmVmxOZ2dn\n0Ptrd3f3kLt3JRmbtMhdKek2STdKekvSTyT91N3Ha7fnJV0t6TpJayXd4u6Ha7flJH1ZM2flvirp\nsGbOyh3wBOFdXV0+OPiJXnhalUpFpVIpaI4k9ff3q6enJ2hOe3t7cM6TTz6p3t7eTx9Yk8vl9Id/\n+IfBOevXr1dfX1/QnJaWluCctWvX6vnnn0883t118uTJ4JxNmzbpqaeeCsqJecI99NBDeuihh4Jy\nQoufJD3xxBO65557gnImJyc/feDv6evr0/r164NyYsrIs88+q7vvvjsoJ9Zzzz2nu+66Kyhreno6\nOOf555/X2rVrg3JiSv3u3bu1evXqoJwYMa9x5JCz2HPSzIrNKZfLKhaLicebWeIil+h0kru/6e6b\nJF0qaZdmCt37ZrbXzEqSVrj7YXffKukNSZfXNmSrpA8lbZB0UNJl7n67u7+WpMQBAABgfkGf17l7\nVTOF7KCZtUi6QVKbpJfNrCDJJX0gaW9tytuStrl7+extMgAAAKTAIjdX7WPVfbWrV80zZn/s/QMA\nAOD0+EJgAACAjKLIAQAAZBRFDgAAIKMocgAAABlFkQMAAMgoihwAAEBGUeQAAAAyiiIHAACQUQ1Z\n5GLWOoyVxkpjsWtfxmYBAICzp569JHplh8Usl0uvn05OTsrMouYlZWYaGxsLznF3jY2NBc3J5XJR\nOVNTU0Hj0xJ7LITMc3fl8/monJB5sQu/S+H7M/uiE3IsmFkqOXPz6jk+dl7s8c0vUUDjqmcvacgi\nl6bYN4dzPSctab15xxaRxVpGYl900sqJyUrrsQOANDXkR6sAAADnAoocAABARlHkAAAAMooiBwAA\nkFEUOQAAgIyiyAEAAGQURQ4AACCjKHIAAAAZdUZFzszazWzer6Y3s/PP5P4BAAAwv+AiZ2ZLzewm\nM9sv6aikVjPbYmbvmtnR2mVVbfiAmR0yszVmdt5Z3XIAAIBznCVZ36921u16SbdKulbS65L+RtIh\nSSVJv5S00t1PmtkySVV3H6vNvULSzZK+Luk9SS9JesXdT8yT1Supd/Z6oVDoHBgYCNopd9fQ0FDQ\nHEnq6OjQ8PBw0JyYJYba29s1MjISNCdmLc+2tjYdO3YsaE7MckTLly/X6Oho8Lw0cmIWKi4Wi6pU\nKkFzYtbJLJVKKpfLdc2Q4o63tHLYpxkxrz0xyCGnEXPSzIrN6ezsDHp/7e7uHnL3riRjkxa5dySt\nkLRR0j53n5xzW17SEUkfSTog6VV3HzzFfZikayRtl7RKM8XvU9+Vu7q6fHDwE3d3WpVKRaVSKWiO\nJPX396unpydoTqFQCM7p6+vT+vXrE483M33uc58Lztm8ebO2bdsWNCdmf771rW9p+/btice7uyYn\nJz994O/ZtGmTnnrqqaCc0EImSQ888IC2bt0alBNamCXp8ccf17333huUMzExEZzz1FNPadOmTUE5\nMWXkmWee0YYNG+qeI4U/h9xd09PTwTk7duzQunXrgnJiju2Y154Y5JDTiDlpZsXmlMtlFYvFxOPN\nLHGRS3o66ZuaOQP3qKS/NrM/M7MWSXL3aUlXSXpK0oWSjpjZNXM2JmdmX5G0U9KLkt6S9BeSjifM\nBgAAwCkkKnLu/qa7b5J0qaRdkm6U9L6Z7TWzkqQV7n7Y3bdKekPS5ZJkZlslfShpg6SDki5z99vd\n/TWP/VUcAAAAkqSmkMHuXtVMITtYOyN3g6Q2SS+bWUGSS/pA0t7alLclbXP35H8IBAAAgESCitxc\n7j4uaV/t6lXzjNkfe/8AAAA4Pb4QGAAAIKMocgAAABlFkQMAAMgoihwAAEBGUeQAAAAyiiIHAACQ\nURQ5AACAjGrIIhezUHqstBaoaLQcAADOFfXsJdFfCLyY5XLp9dOpqSmZWdAcd9fU1FTi8WamsbGx\nqJyxsbGgOc3NzcE5koL2J62yaGZqago/xEPnuXtUjqTgnJiF3yUpn89H5YQeCyHPvTPZn1BmFv1C\nGjKPX4QAnEo9e0lDFrk0xZQecuJzQua5eyo5MeMXe07si06a+xN6LABAI2rIj1YBAADOBRQ5AACA\njKLIAQAAZBRFDgAAIKMocgAAABlFkQMAAMgoihwAAEBGUeQAAAAy6oyKnJm1m9m8XxlvZuefyf0D\nAABgfsFFzsyWmtlNZrZf0lFJrWa2xczeNbOjtcuq2vABMztkZmvM7LyzuuUAAADnOEuydE3trNv1\nkm6VdK2k1yX9jaRDkkqSfilppbufNLNlkqruPlabe4WkmyV9XdJ7kl6S9Iq7n5gnq1dS7+z1QqHQ\nOTAwELRT7q6hoaGgOZLU0dGh4eHhoDkxSwy1t7drZGQkaE7IWpmzisWiKpVK0JyYpZmWL1+u0dHR\n4Hlp5MQszdTW1qZjx44FzYlZx7NUKqlcLgfNidmfmOMtRlo5MVmxS3TF7FNMVsxrTwxyyGnEnDSz\nYnM6OzuD+kJ3d/eQu3clGZu0yL0jaYWkjZL2ufvknNvyko5I+kjSAUmvuvvgKe7DJF0jabukVZop\nfp/6rtzV1eWDg5+4u9OqVCoqlUpBcySpv79fPT09QXNaWlqCc7Zv365vfetbicebmT772c8G53zv\ne9/TD3/4w6A5MY/bhg0b9MwzzyQeH/umunHjRj399NNBOSdOnPL3hdP67ne/qx/96EdBOaHFT5J+\n+MMf6nvf+15Qzvj4eHDOtm3btHnz5qCcmGL64x//WN/+9reDcqanp4NzJKmvr0/r168PypqYmAjO\nef7557V27dqgnJh9inntiUEOOY2Yk2ZWbE65XFaxWEw83swSF7mkp1++qZkzcI9K+msz+zMza5Ek\nd5+WdJWkpyRdKOmImV0zZ2NyZvYVSTslvSjpLUl/Iel4wmwAAACcQqIi5+5vuvsmSZdK2iXpRknv\nm9leMytJWuHuh919q6Q3JF0uSWa2VdKHkjZIOijpMne/3d1f89jTMgAAAJAkNYUMdveqZgrZwdoZ\nuRsktUl62cwKklzSB5L21qa8LWmbu4f9IRAAAAA+VVCRm8vdxyXtq129ap4x+2PvHwAAAKfHFwID\nAABkFEUOAAAgoyhyAAAAGUWRAwAAyCiKHAAAQEZR5AAAADKKIgcAAJBRDVnkxsbGFnoTAAAAJNW3\nlzRkkUtTGiuNuXsqObNZAAAgG6JXdljMWltbU8uanp6WmUXNS8rMdOLEieAcd9eJEyeC5jQ1NaWW\nE8rdNTk5GTQ+5mcjKXjekiVLojJC5sUWejNTc3NzUM7s8RnyOMTkzP48Y35OoY9dyLEzV+wxBACz\n6tlLGrLIpSmtF/lGy0lLWkWu0XJyubiT9WnlxGSl9dgBQJr4aBUAACCjKHIAAAAZRZEDAADIKIoc\nAABARlHkAAAAMooiBwAAkFEUOQAAgIw6oyJnZu1mlj/N7eefyf0DAABgfsFFzsyWmtlNZrZf0lFJ\nrWa2xczeNbOjtcuq2vABMztkZmvM7LyzuuUAAADnOEuy5E/trNv1km6VdK2k1yX9jaRDkkqSfilp\npbufNLNlkqruPlabe4WkmyV9XdJ7kl6S9Iq7n3JNJzPrldQ7e71QKHQODAwE7ZS7a2hoKGiOJHV0\ndGh4eDhoTsy3vre3t2tkZCRoTsw34JdKJZXL5brntLW16dixY8HzGiknZumsYrGoSqVS95yY4yBG\nTE7s2r4xz6GYrLRyYl57YpBDTiPmpJkVm9PZ2RnUF7q7u4fcvSvJ2KRF7h1JKyRtlLTP3Sfn3JaX\ndETSR5IOSHrV3QdPcR8m6RpJ2yWt0kzxG/207K6uLh8c/MTdnValUlGpVAqaI0n9/f3q6ekJmhOz\nZujzzz+vtWvXJh5vZrrwwguDcx555BFt2bIlaE7M43bffffpscceC5oT87jdc889euKJJxKPn7tm\naIjQ/XF3nTx5MjjnoYce0kMPPRSUMzExEZzz6KOP6v777w/KqVarwTmPPfaY7rvvvqCcqamp4BxJ\neuKJJ3TPPfcEZYWuByxJO3bs0Lp164JyYtZ0jXntiUEOOY2Yk2ZWbE65XFaxWEw83swSF7mkp1++\nqZkzcI9K+msz+zMza5Ekd5+WdJWkpyRdKOmImV0zZ2NyZvYVSTslvSjpLUl/Iel4wmwAAACcQqIi\n5+5vuvsmSZdK2iXpRknvm9leMytJWuHuh919q6Q3JF0uSWa2VdKHkjZIOijpMne/3d1f89jPVAAA\nACBJCvp8y92rmilkB2tn5G6Q1CbpZTMrSHJJH0jaW5vytqRt7l7/P9ABAAA4x4T/oVKNu49L2le7\netU8Y/bH3j8AAABOjy8EBgAAyCiKHAAAQEZR5AAAADKKIgcAAJBRFDkAAICMosgBAABkFEUOAAAg\noxqyyI2NjS30JgAAAEiqby9pyCKXpjRWGnP3VHJmsxopBwCARha9ssNi1tramlpWtVqVmUXNC3Hi\nxIngHHfXiRMngubkcrmonNHR0aA5MT8jdw/+raapKZ1DvLm5ue7z3D3q52NmWrp0aVDO1NTU7+aG\n5IT8XGN+nnOzQn62/OIAYCHVs5c0ZJFLU0yJI6fxxDxuZhY8L5eLO4m+WHPO5HjjWAUAPloFAADI\nLIocAABARlHkAAAAMooiBwAAkFEUOQAAgIyiyAEAAGQURQ4AACCjzqjImVm7meVPc/v5Z3L/AAAA\nmF9wkTOzpWZ2k5ntl3RUUquZbTGzd83saO2yqjZ8wMwOmdkaMzvvrG45AADAOS7Ryg61s27XS7pV\n0rWSXpfUJ+mQpJKkzZJWuvtJM1smqSpJ7n6dmV0h6WZJR8zsPUkvSXrF3cPWjgIAAMC/YknWIDSz\ndyStkLRR0j53n5xzW17SEUkfSTog6VV3HzzFfZikayRtl7RKM8XvEwt0mlmvpN7Z64VCoXNgYCBo\np9xdQ0NDQXMkqaOjQ8PDw0FzYpYJam9v18jIyKLMiVmaqVgsqlKpBM9LIyfmcWtra9OxY8eC59U7\nJ3a90MX884ndp1KppHK5HDQndH1jKe45FLNPMa89McghpxFz0syKzens7Ax6P+ru7h5y964kY5MW\nuSsl3SbpRklvSfqJpJ+6+3jt9rykqyVdJ2mtpFvc/XDttpykL2vmrNxXJR3WzFm5A54gvKurywcH\nP9ELT6tSqahUKgXNkaT+/n719PQEzYkpPrt379bq1auD5sTszxNPPKF77rknaE5bW1twzsMPP6wH\nH3wwaE7MAsL333+/Hn300aA5IQurz7rvvvv02GOPJR7v7lElIXR/3F3T09PBOQ888IC2bt1a95wf\n/OAH+v73vx+UMzY2FpwjSY899pjuu+++oKzQ4idJO3bs0Lp164JyJicnP33g74l57YlBDjmNmJNm\nVmxOuVxWsVhMPN7MEhe5RC3E3d90902SLpW0SzOF7n0z22tmJUkr3P2wu2+V9Iaky2sbslXSh5I2\nSDoo6TJ3v93dX0tS4gAAADC/oNMV7l7VTCE7aGYtkm6Q1CbpZTMrSHJJH0jaW5vytqRt7h7+qzAA\nAABOK/xzp5rax6r7alevmmfM/tj7BwAAwOnxhcAAAAAZRZEDAADIKIocAABARlHkAAAAMooiBwAA\nkFEUOQAAgIyiyAEAAGQURQ4AACCjGrLIxax7GSutlcbIWdw5AADMp569JHplh8UsZiH7WDFFwd2D\n542Pj8vMgnPGx8eD5uTz+aickydPBs2JFbrIemtra/D+SApeNL6pqSkqZ8mSJYnHuntUjpmptbU1\nKGdqaup3c+uZE/o4zxXy2FWr1ehSHzKPXxwAnEo9e0lDFrk0xbx5k5OetPYn9kkaU8rSyFns+xM6\nt9GOawCY1ZAfrQIAAJwLKHIAAAAZRZEDAADIKIocAABARlHkAAAAMooiBwAAkFEUOQAAgIw6oyJn\nZu1mlj/N7eefyf0DAABgfsFFzsyWmtlNZrZf0lFJrWa2xczeNbOjtcuq2vABMztkZmvM7LyzuuUA\nAADnuEQrO9TOul0v6VZJ10p6XVKfpEOSSpI2S1rp7ifNbJmkqiS5+3VmdoWkmyUdMbP3JL0k6RV3\nP3GW9wUAAOCcYknWBjSzdyStkLRR0j53n5xzW17SEUkfSTog6VV3HzzFfZikayRtl7RKM8Vv9BTj\neiX1zl4vFAqdAwMDQTvl7hoaGgqaI0kdHR0aHh4OnpdGTswSQ+3t7RoZGQmaE7M0U6lUUrlcDpoT\nsz/FYlGVSqXuOW1tbTp27FjwPHLCc2IXko455mKyYp5DMeutLubXHnLIWew5aWbF5nR2dga9H3V3\ndw+5e1eSsUmL3JWSbpN0o6S3JP1E0k/dfbx2e17S1ZKuk7RW0i3ufrh2W07SlzVzVu6rkg5r5qzc\nAU8Q3tXV5YODn+iFp1WpVFQqlYLmSFJ/f796enqC5sQUhd27d2v16tVBc5YuXRqc09fXp/Xr1wfN\nKRQKwTmPP/647r333qA5Mfvz8MMP68EHH6x7zv33369HH300aE5zc3NwzubNm7Vt27bE42MXZP/O\nd76jv/zLvwzKiVnM/rvf/a5+9KMfBeUcP348OEeStm7dqgceeCDx+Gq1qt/+9rfBOTt37tSdd96Z\neLy7a2pqKjgn5rUnBjnkNGJOmlmxOeVyWcViMfF4M0tc5BKdfnH3N919k6RLJe3STKF738z2mllJ\n0gp3P+zuWyW9Ieny2oZslfShpA2SDkq6zN1vd/fXkpQ4AAAAzC/R38jNcveqZgrZQTNrkXSDpDZJ\nL5tZQZJL+kDS3tqUtyVtc/ewz0AAAADwqYKK3Fy1j1X31a5eNc+Y/bH3DwAAgNPjC4EBAAAyiiIH\nAACQURQ5AACAjKLIAQAAZBRFDgAAIKMocgAAABlFkQMAAMgoihwAAEBGNWSRi12IO0ZaK42RQw7i\nxayJDABnSz17SfTKDotZLpduP415Ew+dMzU1FfVmFLqA99jYWHCOu2tsbCxoTgx318mTJ4Pm5HK5\nqP2ZmJgImtPc3Bw0PoaZacmSJcH7Y2ZqaWkJmjN7fIZkmZkKhUJQxuTkZNB2zc0KeczT/OUOAH5f\nPXtJQxa5RpTWGYVGO3Ox2B+30HmxLwYx5W8x54TObbTjGgBmNeRHqwAAAOcCihwAAEBGUeQAAAAy\niiIHAACQURQ5AACAjKLIAQAAZBRFDgAAIKPOqMiZWbuZ5U9z+/lncv8AAACYX3CRM7OlZnaTme2X\ndFRSq5ltMbN3zexo7bKqNnzAzA6Z2RozO++sbjkAAMA5LtHKDrWzbtdLulXStZJel9Qn6ZCkkqTN\nkla6+0kzWyapKknufp2ZXSHpZklHzOw9SS9JesXdT5zlfQEAADinWJI1P83sHUkrJG2UtM/dJ+fc\nlpd0RNJHkg5IetXdB09xHybpGknbJa3STPEbPcW4Xkm9s9cLhULnwMBA0E65u4aGhoLmSFJHR4eG\nh4eD56WRE7PEUHt7u0ZGRhomp1QqqVwu1z2nWCyqUqnUPaetrU3Hjh2re87y5cs1OvqJp9pZF5MT\nuwZqzM9oeno6OCfm2I5Ze3kxv/aQQ85iz0kzKzans7Mz6PW7u7t7yN27koxNWuSulHSbpBslvSXp\nJ5J+6u7jtdvzkq6WdJ2ktZJucffDtdtykr6smbNyX5V0WDNn5Q54gvCuri4fHPxELzytSqWiUqkU\nNEeS+vv71dPTEzwvjZyYRdl37NihdevWBc0JXVxdkp5++mlt3LgxaE5ra2twzuOPP6577703aM7y\n5cuDc77//e/rBz/4QdCctra24Jx77rlHTzzxRNCcmONgw4YNeuaZZ4Ln1TvH3YNL0qwHHnhAW7du\nTTy+Wq0q9HVEknbu3Kk777wz8Xh319TUVHDOYn7tIYecxZ6TZlZsTrlcVrFYTDzezBIXuUR/I+fu\nb7r7JkmXStqlmUL3vpntNbOSpBXuftjdt0p6Q9LltQ3ZKulDSRskHZR0mbvf7u6vJSlxAAAAmF+i\nv5Gb5e5VzRSyg2bWIukGSW2SXjazgiSX9IGkvbUpb0va5u5hn4cBAADgUwUVublqH6vuq129ap4x\n+2PvHwAAAKfHFwIDAABkFEUOAAAgoyhyAAAAGUWRAwAAyCiKHAAAQEZR5AAAADKKIgcAAJBRFDkA\nAICMasgiF7sQ92KW1opm5JDTiEIWqwaAs62evSR6ZYfFLJdrvH5arVaj3oxCD57JycngHHfX5ORk\n0Bwp/M3V3TU+Ph40J5/PR+WMjY0FzWltbY36+YQusB6bk8/ng8a3tLRICvsZmZkKhULi8dVqNfjn\nOTdr6dKlicdPT09Hl+CQeRRtAKdSz17SkEWuEaV1RoGcxZ0T+2IQun1p5JjZGT1uoVkA0Iga79QV\nAADAOYIiBwAAkFEUOQAAgIyiyAEAAGQURQ4AACCjKHIAAAAZRZEDAADIqDMqcmbWbmbzftOomZ1/\nJvcPAACA+QUXOTNbamY3mdl+SUcltZrZFjN718yO1i6rasMHzOyQma0xs/PO6pYDAACc4xKt7FA7\n63a9pFslXSvpdUl9kg5JKknarP+/vfuPsau88zv+/s7gn/Mbylp0pvX2D9QKtVVAtEKRIOyKKFYp\nu5uqtCSKhCIcG8XBGOMokeMQBRMr2ZjgQIyC4zTrSiWrirElJFBxrC2VDbJaYM22KQRESV1PGynb\neGZsg42Z+faPe71yjcfc53ju9Zzx+yVdict9nvM5Z+6vz5w7vg9ck5nvR8RSYBogM2+NiBuAu4CD\nEfEm8DTwbGa+N8vHIkmSdFmJVtYGjIjXgWXA/cDuzDx91m3dwEHgMLAXeC4zj5xnGwHcDDwJLKdR\n/I6fZ9x6YP2Z6z09PcOjo6NFB5WZjI2NFc0BGBoa4ujRo8XzOpFTZYmhwcFBxsfHi+d1IqdTx1Ml\nZ2BggImJiaI5VZa06uvr49ixY0VzqhxPb28vx49/5Kk26zk9PT2cOHGiaE7VhaSr/OxK17WFao+5\nKuutzuXXHnPMmes5ncyqmjM8PFz0urpixYqxzBxpZWyrRe5G4AvAHcCrwM+B5zPzVPP2buCTwK3A\nauBzmbm/eVsX8CkaZ+U+DeyncVZub7YQPjIykkeOfKQXXtDk5CQDAwNFcwB27tzJypUri+d1IueK\nK8qXxX3qqadYvXp123O2b9/OmjVriuYsXLiwOGfbtm2sW7euaE7JIu5nbNmyhY0bNxbNGRoaKs75\n2te+xve+972iOf39/cU5X/7yl3nyySeL5ixevLg455577uGnP/1py+Onp6crv/Bu2LCBrVu3tjx+\namqKt956qzin9DmUmUxNTRXnzOXXHnPMmes5ncyqmjMxMVH0+h0RLRe5lk4jZOYrmbkOuBbYQaPQ\nvXiqZswAABROSURBVBURuyJiAFiWmfszczNwALi+uSObgXeAtcA+4LrMvDszX2ilxEmSJGlmRadf\nMnOaRiHbFxGLgNuBPuCZiOgBEngb2NWc8hqwNTPLPqeSJEnSxyr/HK2p+bHq7ubVm2YYs6fq9iVJ\nknRhfiGwJElSTVnkJEmSasoiJ0mSVFMWOUmSpJqyyEmSJNWURU6SJKmmLHKSJEk1ZZGTJEmqqXlZ\n5KouxD2XdWpFM3PM6VROyQLSs5HVyTxJOls7e0nllR3msq6uedlP55Xp6elKb6ylT4ZO5UxNTVXK\nKV1gvWrBKp135jlUckwRUfTcy0wWLFhQtF9nZ5XMrbKQfRWWRUnn085eMi+L3HzUqTcIc8yB6i86\npft3McdTWjKr5pXM6dQZU0k6w1NXkiRJNWWRkyRJqimLnCRJUk1Z5CRJkmrKIidJklRTFjlJkqSa\nsshJkiTV1EUVuYgYjIjuC9x+1cVsX5IkSTMrLnIRsSQi7oyIPcAhYHFEbIqINyLiUPOyvDl8NCJe\njIhVEXHlrO65JEnSZa6llR2aZ91uAz4P3AL8AngCeBEYADYA12Tm+xGxFJgGyMxbI+IG4C7gYES8\nCTwNPJuZ783ysUiSJF1WopUlZSLidWAZcD+wOzNPn3VbN3AQOAzsBZ7LzCPn2UYANwNPAstpFL/j\n5xm3Hlh/5npPT8/w6Oho0UFlJmNjY0VzAIaGhjh69GjxvE7kVFlaaHBwkPHx8eJ5ncjp1PFUyRkY\nGGBiYqJoTpUlrfr6+jh27FjRnCrH09vby/HjH3mqXVCV41m6dCnvvVf2+1nVhaSrHNPp06c/ftA5\nqjzmqizTNZdfe8wxZ67ndDKras7w8HDR6/eKFSvGMnOklbGtFrkbgS8AdwCvAj8Hns/MU83bu4FP\nArcCq4HPZeb+5m1dwKdonJX7NLCfxlm5vdlC+MjISB458pFeeEGTk5MMDAwUzQHYuXMnK1euLJ7X\niZwrrihfFvepp55i9erVbc/Zvn07a9asaXvO448/ztq1a4vm9PT0FOd897vf5etf/3rRnMHBweKc\njRs3smXLlrbnrFmzhu3btxfN6e3tLc754he/yM9+9rOWx2dmcWE+4/777+eHP/xhy+Onpqb45S9/\nWZzz4x//mHvvvbfl8ZnJhx9+WJwzl197zDFnrud0MqtqzsTEBP39/S2Pj4iWi1xLv3Zn5iuZuQ64\nFthBo9C9FRG7ImIAWJaZ+zNzM3AAuL65I5uBd4C1wD7gusy8OzNfaKXESZIkaWZFp0Uyc5pGIdsX\nEYuA24E+4JmI6AESeBvY1ZzyGrA1M6v92i1JkqQZlX++1dT8WHV38+pNM4zZU3X7kiRJujC/EFiS\nJKmmLHKSJEk1ZZGTJEmqKYucJElSTVnkJEmSasoiJ0mSVFMWOUmSpJqyyEmSJNXUvCxyVRfinss6\ntaKZOeZ0MqdTIqJowWpJmk3t7CWVV3aYy7q6OtdPq7xBRETxPi5YsKDSG9GCBQuKxnd3d1c6nk7k\nQGeO58y8EtPT08U5mVn85D5x4kSlnBMnThTNqfKiMz09zeTkZMvjqxz/uXmtmpqaoru7u1JBLXks\nZCYffvhhcYak+a2dvWReFrlOqnrnlL4ZmzO3czp1tsezShenys/Pn7mkuWxefrQqSZJ0ObDISZIk\n1ZRFTpIkqaYscpIkSTVlkZMkSaopi5wkSVJNWeQkSZJq6qKKXEQMRsSM35YZEVddzPYlSZI0s+Ii\nFxFLIuLOiNgDHAIWR8SmiHgjIg41L8ubw0cj4sWIWBURV87qnkuSJF3mWlrZoXnW7Tbg88AtwC+A\nJ4AXgQFgA3BNZr4fEUuBaYDMvDUibgDuAg5GxJvA08CzmfneLB+LJEnSZSVaWXswIl4HlgH3A7sz\n8/RZt3UDB4HDwF7gucw8cp5tBHAz8CSwnEbxO36eceuB9Weu9/T0DI+OjhYdVGYyNjZWNAdgaGiI\no0ePFs2psnzP4OAg4+PjxfM6kePxwMDAABMTE23P6e/vL1qbtGpOX18fx44da3tOb28vx49/5Cnd\nFqVZVddArfKYq7Kea5XXnirMMWc+5nQyq2rO8PBw0evqihUrxjJzpJWxrRa5G4EvAHcArwI/B57P\nzFPN27uBTwK3AquBz2Xm/uZtXcCnaJyV+zSwn8ZZub3ZQvjIyEgeOfKRXnhBk5OTDAwMFM0B2Llz\nJytXriyaU7q4OsCOHTtYtWpV0ZxFixYV5zzxxBPcd999RXOuuKJ8+d1t27axbt26tuds3bqVDRs2\nFM1ZsmRJcc4jjzzCpk2biuYsXbq0OOcb3/gG3/nOd4rmVHkcfPWrX+X73/9+0ZwqP7f77ruPJ554\nouXxmcnU1FRxDsADDzzAY4891vL4qakp3n777eKcH/3oR3zlK19peXxmcvLkyeKcKq89VZhjznzM\n6WRW1ZyJiQn6+/tbHh8RLRe5lv5GLjNfycx1wLXADhqF7q2I2BURA8CyzNyfmZuBA8D1zR3ZDLwD\nrAX2Addl5t2Z+UIrJU6SJEkzKzotkpnTNArZvohYBNwO9AHPREQPkMDbwK7mlNeArZlZ9jmVJEmS\nPlb551tNzY9Vdzev3jTDmD1Vty9JkqQL8wuBJUmSasoiJ0mSVFMWOUmSpJqyyEmSJNWURU6SJKmm\nLHKSJEk1ZZGTJEmqKYucJElSTc3LIjc9Pd2xrE6tNGaOOfMxp1NKFquWpNnWzl5SeWWHuayrq3P9\ndOnSpcVvEhFRvMh6X19fcU5XVxdDQ0NFc6osZl8lJyIqHc/AwEDRnMysdP8sXLiwKOPUqVOVysKp\nU6eKct57773ijOnpaY4ePVo0p8r9MzU1xW9+85uWx19MWZyamuK3v/1ty+PPvIhWySyZM98KsKTZ\n0c5eMi+LXCdVvXOqlJi5nFM6r+oZkiqlbC7nzGVzvfSUZlmyJM1H8/KjVUmSpMuBRU6SJKmmLHKS\nJEk1ZZGTJEmqKYucJElSTVnkJEmSasoiJ0mSVFMXVeQiYjAiui9w+1UXs31JkiTNrLjIRcSSiLgz\nIvYAh4DFEbEpIt6IiEPNy/Lm8NGIeDEiVkXElbO655IkSZe5llZ2aJ51uw34PHAL8AvgCeBFYADY\nAFyTme9HxFJgGiAzb42IG4C7gIMR8SbwNPBsZpavNSRJkqS/Ea0sWxMRrwPLgPuB3Zl5+qzbuoGD\nwGFgL/BcZh45zzYCuBl4ElhOo/gdP8+49cD6M9d7enqGR0dHiw4qMxkbGyuaAzA0NFS8JmWVJa0G\nBwcZHx8vmlNlCaiBgQEmJibantPf38/k5GTxPHPKcqouMVXlcTCXc6pmVVm0uspztcr9VOW1pwpz\nzJmPOZ3MqpozPDxc9P66YsWKscwcaWVsq0XuRuALwB3Aq8DPgecz81Tz9m7gk8CtwGrgc5m5v3lb\nF/ApGmflPg3sp3FWbm+2ED4yMpJHjnykF17Q5ORk8eLqADt37mTlypVFc/r7+4tztm3bxrp169qe\ns3nzZr75zW8WzVmwYEFxzre+9S2+/e1vF82pUhgfeughHn744eJ57c7JTKampopzHn74YR566KGi\nnCpl5JFHHmHTpk1Fc6rcP6WPt4tZ+7T0mKanpyu9+G7fvp01a9a0PD4zOXXqVHFOldeeKswxZz7m\ndDKras7ExETR+3hEtFzkWjqdlJmvZOY64FpgB41C91ZE7IqIAWBZZu7PzM3AAeD65o5sBt4B1gL7\ngOsy8+7MfKGVEidJkqSZtfQ3cmdk5jSNQrYvIhYBtwN9wDMR0QMk8DawqznlNWBrZnbm8xZJkqTL\nSFGRO1vzY9Xdzas3zTBmT9XtS5Ik6cL8QmBJkqSasshJkiTVlEVOkiSppixykiRJNWWRkyRJqimL\nnCRJUk1Z5CRJkmrKIidJklRT87LIVVmPsqpOrTRmjjppvt0/VdaOlaTZ0s5eUnllh7msq6tz/bSr\nq6vSm0TJPkYES5cuLc45M6/E4sWLK+WULAZ8ZvH3Kjk9PT1FOSdPnqx0/3R3dxfnlMpMTp8+XTT+\ngw8+qJTz/vvvF42v8qKTmRw7dqztOWfmTky0vvJf6c/6bKX3kSSdq529ZF4WOaj+G3jpvKp3TklO\nRFTKiYiOHE+ncs5kzcUcSZIuhXn50aokSdLlwCInSZJUUxY5SZKkmrLISZIk1ZRFTpIkqaYscpIk\nSTVlkZMkSaopi5wkSVJNXVSRi4jBiJjxK/Aj4qqL2b4kSZJmVlzkImJJRNwZEXuAQ8DiiNgUEW9E\nxKHmZXlz+GhEvBgRqyLiylndc0mSpMtctLI2YPOs223A54FbgF8Afw68CAwA7wLXZOb7EbEUmM7M\nk825NwB3AX8CvAk8DTybme/NkLUeWH/mek9Pz/Do6GjRQWUmY2NjRXMAhoaGOHr0aNGcKktADQ4O\nMj4+XjSnZO3PM/r6+orWvoRqS1P19vZy/PjxlsdXXY+yyvFUyerv72dycrLtOQMDA0XrhVbNqfJ4\nq6JTOVWzOvWzq5JT5bWnCnPMmY85ncyqmjM8PFz0/rpixYqxzBxpZWyrRe51YBlwP7A7M0+fdVs3\ncBA4DOwFnsvMI+fZRgA3A08Cy2kUv4999x8ZGckjRz6yuQuanJxkcHCwaA7AT37yE770pS8VzamS\n8+ijj/Lggw+2PD4iuPrqq4tzNm7cyJYtW4rmLFmypDhn/fr1/OAHP2h5fGZWerN78MEHefTRR4ty\nTp06VZxT+nPLzKIie8aWLVvYuHFjUc4HH3xQnLN161Y2bNhQlFNlMfvHHnuMBx54oO05AI8//jhr\n164tyjp58mRxzo4dO1i1alVRTpVj2rlzJytXriyeZ4455nQ2q2rOxMQE/f39LY+PiJaLXKunk+6h\ncQZuC/DvIuKzEbEIIDOngJuAbcDvAQcj4uazdqYrIv4A+DHwZ8CrwL8ETrSYLUmSpPNoqchl5iuZ\nuQ64FtgB3AG8FRG7ImIAWJaZ+zNzM3AAuB4gIjYD7wBrgX3AdZl5d2a+kFU/X5MkSRIAV5QMzsxp\nGoVsX/OM3O1AH/BMRPQACbwN7GpOeQ3YmpllfwgkSZKkj1VU5M6WmaeA3c2rN80wZk/V7UuSJOnC\n/EJgSZKkmrLISZIk1ZRFTpIkqaYscpIkSTVlkZMkSaopi5wkSVJNWeQkSZJqal4WuYULF7Jw4cKO\nZHVigYqqa5NWzZIkSbOj3Z2k8hcCz2WLFy9mfHy8eIHxl19+mfHx8aI509PTdHWV9eGXX36ZX//6\n10VzMpPu7u6iOQcOHOCVV14pmlPleA4cOMD+/fs7kvPSSy91JOfVV18tzomIojkvvfQSv/rVr4rm\nTE1NVXq8vfvuu0VzTp48SUSwaNGiopzDhw+3PedM1tjYWNGcqs/V3/3udx3JmZgoWwDHHHPmY87J\nkyeBxvt4O7M6lQONIleaU2JeFjlo3DmlP7iIoL+/v017dGlyent7zZnDOX19fR3JKX28VXl8diqn\napY55pgz93M69Zow1197SszLj1YlSZIuBxY5SZKkmrLISZIk1ZRFTpIkqaYscpIkSTVlkZMkSaop\ni5wkSVJNWeQkSZJqyiInSZJUUxY5SZKkmrLISZIk1ZRFTpIkqaYiMy/1PlxQRJwCftuhuF7guDnm\nmDOnczqZZY455sz9nE5mdSrn6sxc1MrAOV/kOikijmTmiDnmmDN3czqZZY455sz9nE5mdfKYWuVH\nq5IkSTVlkZMkSaopi9z/7wfmmGPOnM/pZJY55pgz93M6mdXJY2qJfyMnSZJUU56RkyRJqimLnCRJ\nUk1Z5CRJukxExN6I+KuIOBQR+yPi+ku9T7MpIv46In7/Uu9HJ122RS4iBiOi+wK3X2WOOeZc2pxO\nZpljTidzZtj2MxFxU7u23/SvMvMfZ+YnaPzh/p+1OW9eiIb9EfH3LvW+nOuyKnIRsSQi7oyIPcAh\nYHFEbIqIN5q/nRyKiOXN4aMR8WJErIqIK80xx5zO5MzHYzLHnBb24Z8CQ5l5cLa2eT6ZOX7W1QGg\nLf/iMSL+SUT8RUS8EhF/GRF3tinnj5r308GI+F47MgCy8S9DHwW+3a6MyjJzXl+AbuAzwC7gXWAH\n8Ic0SuwQMA4saY5dCiw+a+4NwJ8CbwHPAncBS80xx5zZzZmPx2SOOSUX4KfAPRezjYKsfwv8r+bl\nH7Vh+4PAXwLXNK//LeAwMDzLOb8H/F/guub1VTSK6e+36ee2gMaSoQOduJ9a3q9LvQNtP0B4HfgN\n8K+BBefc1g38F2AUWA2MzLCNAG4B/htwDOg1xxxzZi9nPh6TOeaUXIB3gH9YdX7FzLuB59uw3X8G\nTNA4u3nmchj4w1nO+SPgP55zv52iTUWumfEXwD/v5P30sft0qXeg7QcINwLbmk+Sfw98Flh0zh1/\nM/BN4Ahw81m3dQF/ADwF/A8av7l9hub375ljjjmzkzMfj8kcc0ouNArI1VXnX0Tu+8BVs7zN24GX\nO7Dv5xa5LuAD2lvkngbu7fT9dMF9utQ70LEDbdzBtwH/BvifzSfeAPC3zxrz58Da5n9vpnG6fQ9w\nJ2edVjfHHHPakzMfj8kcc1rcj6PA35mNbV0gY/Cc4/oTGiW1cgGdIWcI+D/AbWf9v08AC2c552oa\nH63+g+b1lbTxo9Vmxm7g7nbeT8X7dKl34JIcNCwC/gUwAhwE/ivwVzROow80x3yWi/wc3BxzzPG5\nao45LWb/J84qPu24AMuB/9w8rteBfcAn2pR1A42PIV8H/jvwH5il0ntOzh8DbzTvr4eAv6a9Re6N\ndv3Mql5cokuSpEssIu4D/m5mfvVS74vOLxrfT7cX+Ps5h8rTZfX1I5IkzVE/Az4TET2Xekc0o3uB\nP51LJQ4scpIkXXKZeRx4AJhzXzirv/G/afwt5ZziR6uSJEk15Rk5SZKkmrLISZIk1ZRFTpIkqaYs\ncpIkSTVlkZMkSaopi5wkSVJN/T+GV56MzUXy7AAAAABJRU5ErkJggg==\n", 2104 | "text/plain": [ 2105 | "" 2106 | ] 2107 | }, 2108 | "metadata": {}, 2109 | "output_type": "display_data" 2110 | } 2111 | ], 2112 | "source": [ 2113 | "i = 203\n", 2114 | "plt.gray()\n", 2115 | "fig = plt.figure(figsize=(10, 10), dpi= 80)\n", 2116 | "ax = plt.subplot(111, aspect='equal')\n", 2117 | "ax.scatter(\n", 2118 | " np.tile(np.arange(T_x), T_y),\n", 2119 | " np.tile(np.arange(T_y), (T_x,1)).T.reshape(T_y*T_x),\n", 2120 | " s=560,\n", 2121 | " marker='s',\n", 2122 | " c=(alphas[i].flatten())\n", 2123 | ")\n", 2124 | "ax.set_xticks(np.arange(T_x))\n", 2125 | "ax.set_yticks(np.arange(T_y))\n", 2126 | "ax.set_xticklabels(np.array(src_vocab)[test_srcs[i]])\n", 2127 | "ax.set_yticklabels(np.array(trg_vocab)[np.concatenate([test_trgs[i][1:], np.array([0])])])\n", 2128 | "#ax.gray()\n", 2129 | "\n", 2130 | "plt.grid()\n", 2131 | "plt.show()\n" 2132 | ] 2133 | } 2134 | ], 2135 | "metadata": { 2136 | "kernelspec": { 2137 | "display_name": "Python 3", 2138 | "language": "python", 2139 | "name": "python3" 2140 | }, 2141 | "language_info": { 2142 | "codemirror_mode": { 2143 | "name": "ipython", 2144 | "version": 3 2145 | }, 2146 | "file_extension": ".py", 2147 | "mimetype": "text/x-python", 2148 | "name": "python", 2149 | "nbconvert_exporter": "python", 2150 | "pygments_lexer": "ipython3", 2151 | "version": "3.5.3" 2152 | } 2153 | }, 2154 | "nbformat": 4, 2155 | "nbformat_minor": 2 2156 | } 2157 | --------------------------------------------------------------------------------