├── Create DataSet.ipynb ├── Data Processing and Learning.ipynb ├── README.md ├── annotation.txt ├── faster_rcnn ├── FixedBatchNormalization.py ├── RoiPoolingConv.py ├── __init__.py ├── config.py ├── data_augment.py ├── data_generators.py ├── losses.py ├── parser.py ├── resnet.py └── roi_helpers.py ├── label_data.py ├── label_pointer.py ├── pics └── mapple_lisa.png ├── test_frcnn.py ├── train.py └── train_frcnn.py /Create DataSet.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "ExecuteTime": { 8 | "end_time": "2017-06-27T21:12:14.927631Z", 9 | "start_time": "2017-06-27T21:12:11.912537Z" 10 | } 11 | }, 12 | "outputs": [ 13 | { 14 | "name": "stderr", 15 | "output_type": "stream", 16 | "text": [ 17 | "Using TensorFlow backend.\n" 18 | ] 19 | } 20 | ], 21 | "source": [ 22 | "import os \n", 23 | "import sys\n", 24 | "import numpy as np\n", 25 | "from selenium import webdriver\n", 26 | "import glob\n", 27 | "import itertools\n", 28 | "import urllib\n", 29 | "import time\n", 30 | "import cv2\n", 31 | "from random import shuffle\n", 32 | "import ffmpy\n", 33 | "import matplotlib.pyplot as plt\n", 34 | "import keras\n", 35 | "import imp\n", 36 | "import label_data\n", 37 | "import imp\n", 38 | "import pandas as pd\n", 39 | "imp.reload(label_data)\n", 40 | "%matplotlib inline" 41 | ] 42 | }, 43 | { 44 | "cell_type": "markdown", 45 | "metadata": { 46 | "heading_collapsed": true 47 | }, 48 | "source": [ 49 | "#### Convert movies to avi" 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": 54, 55 | "metadata": { 56 | "ExecuteTime": { 57 | "end_time": "2017-06-20T23:37:00.154737Z", 58 | "start_time": "2017-06-20T23:28:20.979158Z" 59 | }, 60 | "hidden": true 61 | }, 62 | "outputs": [ 63 | { 64 | "name": "stdout", 65 | "output_type": "stream", 66 | "text": [ 67 | "9/10" 68 | ] 69 | } 70 | ], 71 | "source": [ 72 | "movies = glob.glob('/Users/alexandreattia/Music/iTunes/iTunes Music/TV Shows/Les Simpson/*.mp4') + \\\n", 73 | "glob.glob('/Users/alexandreattia/Music/iTunes/iTunes Music/TV Shows/Les Simpson/*.m4v')\n", 74 | "top_k = 10\n", 75 | "for p in range(top_k):\n", 76 | " print('\\r%i/%i' % (p,top_k), end='')\n", 77 | " ff = ffmpy.FFmpeg(\n", 78 | " inputs={np.random.choice(movies): None},\n", 79 | " outputs={'./video%d.avi'%p: None})\n", 80 | " ff.run()" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": 174, 86 | "metadata": { 87 | "ExecuteTime": { 88 | "end_time": "2017-06-16T19:21:14.847482Z", 89 | "start_time": "2017-06-16T19:20:26.890895Z" 90 | }, 91 | "collapsed": true, 92 | "hidden": true 93 | }, 94 | "outputs": [], 95 | "source": [ 96 | "movies = glob.glob('/Users/alexandreattia/Music/iTunes/iTunes Music/TV Shows/Les Simpson/*.mp4')\n", 97 | "top_k = 1\n", 98 | "movies = [k for k in movies if 'un ennemi tr' in k.lower()]\n", 99 | "for i, p in enumerate(movies[:top_k]):\n", 100 | " ff = ffmpy.FFmpeg(\n", 101 | " inputs={p: None},\n", 102 | " outputs={'./video_target_%s.avi' % i: None})\n", 103 | " ff.run()" 104 | ] 105 | }, 106 | { 107 | "cell_type": "markdown", 108 | "metadata": { 109 | "heading_collapsed": true 110 | }, 111 | "source": [ 112 | "#### Download pictures" 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": 77, 118 | "metadata": { 119 | "ExecuteTime": { 120 | "end_time": "2017-05-24T00:28:48.758267Z", 121 | "start_time": "2017-05-24T00:28:48.752673Z" 122 | }, 123 | "collapsed": true, 124 | "hidden": true 125 | }, 126 | "outputs": [], 127 | "source": [ 128 | "for character in characters:\n", 129 | " char = character.split('/')[2]\n", 130 | " browser = webdriver.Chrome()\n", 131 | " browser.set_page_load_timeout(10)\n", 132 | " time.sleep(np.random.randint(5, 13)*0.1)\n", 133 | " for p in range(5):\n", 134 | " link = \"https://search.aol.com/aol/image?q=%s+%s&s_it=searchtabs&v_t=na&page=%d\" % ('simpson', char.replace('_','+'), p+1)\n", 135 | " try:\n", 136 | " browser.get(link)\n", 137 | " except:\n", 138 | " time.sleep(np.random.randint(7)*0.1)\n", 139 | " images = browser.find_elements_by_tag_name('img')\n", 140 | " for i, image in enumerate(images):\n", 141 | " src = image.get_attribute(\"src\")\n", 142 | " if src:\n", 143 | " path, _ = urllib.request.urlretrieve(src)\n", 144 | " os.rename(path, './characters/%s/aol_%d_%s.png' % (char,p,i))\n", 145 | " time.sleep(np.random.randint(10)*0.1)\n", 146 | " browser.quit()" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": null, 152 | "metadata": { 153 | "ExecuteTime": { 154 | "end_time": "2017-06-12T17:12:17.396815Z", 155 | "start_time": "2017-06-12T17:10:41.004Z" 156 | }, 157 | "collapsed": true, 158 | "hidden": true 159 | }, 160 | "outputs": [], 161 | "source": [ 162 | "char = 'kent brockman'\n", 163 | "link = \"https://images.search.yahoo.com/search/images;_ylt=AwrTcYSZtSVZ\\\n", 164 | "boAA7FuJzbkF;_ylu=X3oDMTBsZ29xY3ZzBHNlYwNzZWFyY2gEc2xrA2J1dHRvbg--;_ylc=X1MDOTYwNjI4NTcEX\\\n", 165 | "3IDMgRhY3RuA2NsawRiY2sDZWltbXRkdGFpcHQ0aCUyNmIlM0Q0JTI2ZCUzREJVYVpCTE5wWUVLSUIxbWNQSzRYNEdYZTF\\\n", 166 | "5NC0lMjZzJTNEOTglMjZpJTNEbG93NXlrSzFTUXdtTEhLby5Md2EEY3NyY3B2aWQDUno0T0tUSXdOaTdwV3QxdlZTejBrUWU3Tnp\\\n", 167 | "NdU53QUFBQUNtT00yVgRmcgNzZnAEZnIyA3NhLWdwBGdwcmlkA3lnQ05QTnlSU2dxRTVvRl8zOWt4NkEEbXRlc3RpZANudWxsBG5fc3VnZwMxBG9yaWdpb\\\n", 168 | "gNpbWFnZXMuc2VhcmNoLnlhaG9vLmNvbQRwb3MDMARwcXN0cgMEcHFzdHJsAwRxc3RybAMyMwRxdWVyeQNhYnJhaGFtIGdyYW5kcGEgc2ltcHNvbgR0X3N0\\\n", 169 | "bXADMTQ5NTY0MzgwNQR2dGVzdGlkA251bGw-?gprid=ygCNPNyRSgqE5oF_39kx6A&pvid=Rz4OKTIwNi7pWt1vVSz0kQe7NzMuNwAAAACmO\\\n", 170 | "M2V&p={}&fr=sfp&fr2=sb-top-images.search.yahoo.com&ei=UTF-8&n=60&x=wrt\".format(char.replace('_','+') + 'simpson')\n", 171 | "\n", 172 | "#link='http://www.simpsoncrazy.com/pictures/bart'\n", 173 | "\n", 174 | "browser = webdriver.Chrome()\n", 175 | "browser.set_page_load_timeout(10)\n", 176 | "try:\n", 177 | " browser.get(link)\n", 178 | "except:\n", 179 | " time.sleep(np.random.randint(7)*0.1)\n", 180 | "time.sleep(7)\n", 181 | "images = browser.find_elements_by_tag_name('img')\n", 182 | "for i, image in enumerate(images):\n", 183 | " src = image.get_attribute(\"src\")\n", 184 | " if src:\n", 185 | " path, _ = urllib.request.urlretrieve(src)\n", 186 | " os.rename(path, './characters/%s/yahoo_%d_%s.jpg' % (char,np.random.randint(20), i))\n", 187 | "for elem in glob.glob('./characters/%s/*.png'):\n", 188 | " k = cv2.imread(elem)\n", 189 | " if not k:\n", 190 | " os.remove(elem)\n", 191 | "browser.quit()" 192 | ] 193 | }, 194 | { 195 | "cell_type": "markdown", 196 | "metadata": { 197 | "heading_collapsed": true 198 | }, 199 | "source": [ 200 | "#### Counting" 201 | ] 202 | }, 203 | { 204 | "cell_type": "code", 205 | "execution_count": 53, 206 | "metadata": { 207 | "ExecuteTime": { 208 | "end_time": "2017-06-20T18:16:18.193295Z", 209 | "start_time": "2017-06-20T18:16:17.740241Z" 210 | }, 211 | "hidden": true, 212 | "scrolled": false 213 | }, 214 | "outputs": [ 215 | { 216 | "name": "stderr", 217 | "output_type": "stream", 218 | "text": [ 219 | "/Users/alexandreattia/Desktop/Work/workenv/lib/python3.5/site-packages/ipykernel/__main__.py:19: DeprecationWarning: \n", 220 | ".ix is deprecated. Please use\n", 221 | ".loc for label based indexing or\n", 222 | ".iloc for positional indexing\n", 223 | "\n", 224 | "See the documentation here:\n", 225 | "http://pandas.pydata.org/pandas-docs/stable/indexing.html#deprecate_ix\n" 226 | ] 227 | }, 228 | { 229 | "data": { 230 | "text/html": [ 231 | "
\n", 232 | "\n", 245 | "\n", 246 | " \n", 247 | " \n", 248 | " \n", 249 | " \n", 250 | " \n", 251 | " \n", 252 | " \n", 253 | " \n", 254 | " \n", 255 | " \n", 256 | " \n", 257 | " \n", 258 | " \n", 259 | " \n", 260 | " \n", 261 | " \n", 262 | " \n", 263 | " \n", 264 | " \n", 265 | " \n", 266 | " \n", 267 | " \n", 268 | " \n", 269 | " \n", 270 | " \n", 271 | " \n", 272 | " \n", 273 | " \n", 274 | " \n", 275 | " \n", 276 | " \n", 277 | " \n", 278 | " \n", 279 | " \n", 280 | " \n", 281 | " \n", 282 | " \n", 283 | " \n", 284 | " \n", 285 | " \n", 286 | " \n", 287 | " \n", 288 | " \n", 289 | " \n", 290 | " \n", 291 | " \n", 292 | " \n", 293 | " \n", 294 | " \n", 295 | " \n", 296 | " \n", 297 | " \n", 298 | " \n", 299 | " \n", 300 | " \n", 301 | " \n", 302 | " \n", 303 | " \n", 304 | " \n", 305 | " \n", 306 | " \n", 307 | " \n", 308 | " \n", 309 | " \n", 310 | " \n", 311 | " \n", 312 | " \n", 313 | " \n", 314 | " \n", 315 | " \n", 316 | " \n", 317 | " \n", 318 | " \n", 319 | " \n", 320 | " \n", 321 | " \n", 322 | " \n", 323 | " \n", 324 | " \n", 325 | " \n", 326 | " \n", 327 | " \n", 328 | " \n", 329 | " \n", 330 | " \n", 331 | " \n", 332 | " \n", 333 | " \n", 334 | " \n", 335 | " \n", 336 | " \n", 337 | " \n", 338 | " \n", 339 | " \n", 340 | " \n", 341 | " \n", 342 | " \n", 343 | " \n", 344 | " \n", 345 | " \n", 346 | " \n", 347 | " \n", 348 | " \n", 349 | " \n", 350 | " \n", 351 | " \n", 352 | " \n", 353 | " \n", 354 | " \n", 355 | " \n", 356 | " \n", 357 | " \n", 358 | " \n", 359 | " \n", 360 | " \n", 361 | " \n", 362 | " \n", 363 | " \n", 364 | " \n", 365 | " \n", 366 | " \n", 367 | " \n", 368 | " \n", 369 | " \n", 370 | " \n", 371 | " \n", 372 | " \n", 373 | " \n", 374 | " \n", 375 | " \n", 376 | " \n", 377 | " \n", 378 | " \n", 379 | " \n", 380 | " \n", 381 | " \n", 382 | " \n", 383 | " \n", 384 | " \n", 385 | " \n", 386 | " \n", 387 | " \n", 388 | " \n", 389 | " \n", 390 | " \n", 391 | " \n", 392 | " \n", 393 | " \n", 394 | " \n", 395 | " \n", 396 | " \n", 397 | " \n", 398 | " \n", 399 | " \n", 400 | " \n", 401 | " \n", 402 | " \n", 403 | " \n", 404 | " \n", 405 | " \n", 406 | " \n", 407 | " \n", 408 | " \n", 409 | " \n", 410 | " \n", 411 | " \n", 412 | " \n", 413 | " \n", 414 | " \n", 415 | " \n", 416 | " \n", 417 | " \n", 418 | " \n", 419 | " \n", 420 | " \n", 421 | " \n", 422 | " \n", 423 | " \n", 424 | " \n", 425 | " \n", 426 | " \n", 427 | " \n", 428 | " \n", 429 | " \n", 430 | " \n", 431 | " \n", 432 | " \n", 433 | " \n", 434 | " \n", 435 | " \n", 436 | " \n", 437 | " \n", 438 | " \n", 439 | " \n", 440 | " \n", 441 | " \n", 442 | " \n", 443 | " \n", 444 | " \n", 445 | " \n", 446 | " \n", 447 | " \n", 448 | " \n", 449 | " \n", 450 | " \n", 451 | " \n", 452 | " \n", 453 | " \n", 454 | " \n", 455 | " \n", 456 | " \n", 457 | " \n", 458 | " \n", 459 | " \n", 460 | " \n", 461 | " \n", 462 | " \n", 463 | " \n", 464 | " \n", 465 | " \n", 466 | " \n", 467 | " \n", 468 | " \n", 469 | " \n", 470 | " \n", 471 | " \n", 472 | " \n", 473 | " \n", 474 | " \n", 475 | " \n", 476 | " \n", 477 | " \n", 478 | " \n", 479 | " \n", 480 | " \n", 481 | " \n", 482 | " \n", 483 | " \n", 484 | " \n", 485 | " \n", 486 | " \n", 487 | " \n", 488 | " \n", 489 | " \n", 490 | " \n", 491 | " \n", 492 | "
namenon-filterededitedlabeledtotaltraintesttrue_total
0Homer Simpson102233223318983351176
1Ned Flanders191001348144812312171176
2Moe Szyslak732321215144712302171176
3Lisa Simpson02511142139311842091176
4Bart Simpson12401327132711281991176
5Marge Simpson12201291129110971941176
6Krusty The Clown0281935121610341821176
7Principal Skinner32831121120410231811176
8Charles Montgomery Burns0298905120310231801176
9Milhouse Van Houten18610694510518931581051
10Chief Wiggum1790897987839148987
11Abraham Grampa Simpson00934934794140934
12Sideshow Bob26220685905769136905
13Apu Nahasapeemapetilon406456062453094624
14Kent Brockman011338349642274496
15Edna Krabappel299237246439470464
16Comic Book Guy3638038246239369462
17Nelson Muntz02933136030654360
18Lenny Leonard255325330626046306
19Mayor Quimby415019624620937246
20TOTAL10982142174551959716657294017419
\n", 493 | "
" 494 | ], 495 | "text/plain": [ 496 | " name non-filtered edited labeled total train \\\n", 497 | "0 Homer Simpson 1 0 2233 2233 1898 \n", 498 | "1 Ned Flanders 19 100 1348 1448 1231 \n", 499 | "2 Moe Szyslak 73 232 1215 1447 1230 \n", 500 | "3 Lisa Simpson 0 251 1142 1393 1184 \n", 501 | "4 Bart Simpson 124 0 1327 1327 1128 \n", 502 | "5 Marge Simpson 122 0 1291 1291 1097 \n", 503 | "6 Krusty The Clown 0 281 935 1216 1034 \n", 504 | "7 Principal Skinner 32 83 1121 1204 1023 \n", 505 | "8 Charles Montgomery Burns 0 298 905 1203 1023 \n", 506 | "9 Milhouse Van Houten 186 106 945 1051 893 \n", 507 | "10 Chief Wiggum 17 90 897 987 839 \n", 508 | "11 Abraham Grampa Simpson 0 0 934 934 794 \n", 509 | "12 Sideshow Bob 26 220 685 905 769 \n", 510 | "13 Apu Nahasapeemapetilon 40 64 560 624 530 \n", 511 | "14 Kent Brockman 0 113 383 496 422 \n", 512 | "15 Edna Krabappel 29 92 372 464 394 \n", 513 | "16 Comic Book Guy 363 80 382 462 393 \n", 514 | "17 Nelson Muntz 0 29 331 360 306 \n", 515 | "18 Lenny Leonard 25 53 253 306 260 \n", 516 | "19 Mayor Quimby 41 50 196 246 209 \n", 517 | "20 TOTAL 1098 2142 17455 19597 16657 \n", 518 | "\n", 519 | " test true_total \n", 520 | "0 335 1176 \n", 521 | "1 217 1176 \n", 522 | "2 217 1176 \n", 523 | "3 209 1176 \n", 524 | "4 199 1176 \n", 525 | "5 194 1176 \n", 526 | "6 182 1176 \n", 527 | "7 181 1176 \n", 528 | "8 180 1176 \n", 529 | "9 158 1051 \n", 530 | "10 148 987 \n", 531 | "11 140 934 \n", 532 | "12 136 905 \n", 533 | "13 94 624 \n", 534 | "14 74 496 \n", 535 | "15 70 464 \n", 536 | "16 69 462 \n", 537 | "17 54 360 \n", 538 | "18 46 306 \n", 539 | "19 37 246 \n", 540 | "20 2940 17419 " 541 | ] 542 | }, 543 | "execution_count": 53, 544 | "metadata": {}, 545 | "output_type": "execute_result" 546 | } 547 | ], 548 | "source": [ 549 | "characters = glob.glob('./characters/*')\n", 550 | "d = []\n", 551 | "dd = []\n", 552 | "for character in characters:\n", 553 | " a = len([k for k in glob.glob(character + '/*') if 'edited' in k])\n", 554 | " b = len([k for k in glob.glob(character + '/*') if 'pic_video' in k])\n", 555 | " c = len([k for k in glob.glob(character + '/*') if 'pic_video' not in k and 'edited' not in k])\n", 556 | " dd.append({'name' : character.split('/')[2].replace('_',' ').title(),\n", 557 | " 'edited' :a,\n", 558 | " 'labeled':b,\n", 559 | " 'total':a+b,\n", 560 | " 'non-filtered':c,\n", 561 | " 'train': round((a+b)*0.85),\n", 562 | " 'test': round((a+b)*0.15)})\n", 563 | "df = pd.DataFrame(dd)[['name', 'non-filtered',\n", 564 | " 'edited', 'labeled','total',\n", 565 | " 'train', 'test']].sort_values('total', ascending=False).reset_index(drop=True)\n", 566 | "df['true_total'] = df.total\n", 567 | "df.ix[df.total > (1000 / 0.85), 'true_total'] = int((1000 / 0.85))\n", 568 | "top_k = 20\n", 569 | "df2 = pd.concat([df,pd.DataFrame({'name' : \"TOTAL\",\n", 570 | " 'edited' :df[:top_k].edited.sum(),\n", 571 | " 'labeled':df[:top_k].labeled.sum(),\n", 572 | " 'total':df[:top_k].total.sum(),\n", 573 | " 'non-filtered':df[:top_k]['non-filtered'].sum(),\n", 574 | " 'train': df[:top_k].train.sum(),\n", 575 | " 'test': df[:top_k].test.sum(),\n", 576 | " 'true_total': df[:top_k].true_total.sum()}, index=[top_k])])\n", 577 | "df2 = df2.sort_values('total', ascending=False).sort_index().reset_index(drop=True)\n", 578 | "df2[['name', 'non-filtered',\n", 579 | " 'edited', 'labeled','total',\n", 580 | " 'train', 'test', \"true_total\"]][:1+df2[df2.name == 'TOTAL'].index[0]]" 581 | ] 582 | }, 583 | { 584 | "cell_type": "markdown", 585 | "metadata": { 586 | "heading_collapsed": true 587 | }, 588 | "source": [ 589 | "#### Filtering pictures " 590 | ] 591 | }, 592 | { 593 | "cell_type": "code", 594 | "execution_count": 8, 595 | "metadata": { 596 | "ExecuteTime": { 597 | "end_time": "2017-06-27T21:41:18.041945Z", 598 | "start_time": "2017-06-27T21:41:18.026754Z" 599 | }, 600 | "hidden": true 601 | }, 602 | "outputs": [], 603 | "source": [ 604 | "characters = glob.glob('./characters/*')\n", 605 | "d = []\n", 606 | "for character in characters:\n", 607 | " a = len([k for k in glob.glob(character + '/*') if 'edited' in k])\n", 608 | " b = len([k for k in glob.glob(character + '/*') if 'pic_video' in k])\n", 609 | " d.append((character,a+b))\n", 610 | "characters = [e[0] for e in sorted(d, key=lambda x:x[1], reverse=True)]\n", 611 | "\n", 612 | "top_k = 10\n", 613 | "characters = [e for e in characters if 'nelson' in e]\n", 614 | "for character in characters:\n", 615 | " print(character.split('/')[2].replace('_',' ').upper())\n", 616 | " pictures = [k for k in glob.glob(character + '/*') if 'edited' not in k and 'pic_vid' not in k]\n", 617 | " for pic in pictures:\n", 618 | " img = cv2.imread(pic)\n", 619 | " if img is not None:\n", 620 | " plt.imshow(img)\n", 621 | " plt.show()\n", 622 | " f = input('Keep the pictures, crop it or delete it ? [Y,C,N]')\n", 623 | " if f.lower() == 'skip':\n", 624 | " break\n", 625 | " if f.lower() in ['y', 'yes', '', 'keep', 'k']:\n", 626 | " title = pic.replace('.jpg', '%d_edited.jpg' % np.random.randint(500, 5000)).replace(\n", 627 | " '.png', '%d_edited.jpg' % np.random.randint(500, 5000))\n", 628 | " cv2.imwrite(title, img)\n", 629 | " os.remove(pic)\n", 630 | " elif f.lower() in ['crop', 'c']:\n", 631 | " g = input('Right or Left half ? [R,L]')\n", 632 | " if g.lower() in ['right', 'r']:\n", 633 | " img2 = img[:, int(img.shape[1]/2):]\n", 634 | " plt.imshow(img2)\n", 635 | " plt.show()\n", 636 | " h = input('Good')\n", 637 | " if h.lower() not in ['n', 'no']:\n", 638 | " title = pic.replace('.jpg', '%d_edited.jpg' % np.random.randint(500, 5000)).replace(\n", 639 | " '.png', '%d_edited.jpg' % np.random.randint(500, 5000))\n", 640 | " cv2.imwrite(title, img2)\n", 641 | " os.remove(pic)\n", 642 | " else:\n", 643 | " os.remove(pic)\n", 644 | " elif g.lower() in ['left', 'l']:\n", 645 | " img2 = img[:, :int(img.shape[1]/2)]\n", 646 | " plt.imshow(img2)\n", 647 | " plt.show()\n", 648 | " h = input('Good')\n", 649 | " if h.lower() not in ['n', 'no']:\n", 650 | " title = pic.replace('.jpg', '%d_edited.jpg' % np.random.randint(500, 5000)).replace(\n", 651 | " '.png', '%d_edited.jpg' % np.random.randint(500, 5000))\n", 652 | " cv2.imwrite(title, img2)\n", 653 | " os.remove(pic)\n", 654 | " else:\n", 655 | " os.remove(pic)\n", 656 | " elif f.lower() in ['no','delete','d','n']:\n", 657 | " os.remove(pic)\n", 658 | " else:\n", 659 | " os.remove(pic)\n", 660 | " print('%d pictures edited' % len([k for k in glob.glob(character + '/*') if 'edited' in k]))" 661 | ] 662 | }, 663 | { 664 | "cell_type": "markdown", 665 | "metadata": { 666 | "heading_collapsed": true 667 | }, 668 | "source": [ 669 | "#### Data from videos" 670 | ] 671 | }, 672 | { 673 | "cell_type": "code", 674 | "execution_count": 9, 675 | "metadata": { 676 | "ExecuteTime": { 677 | "end_time": "2017-06-27T21:41:32.597131Z", 678 | "start_time": "2017-06-27T21:41:32.594514Z" 679 | }, 680 | "hidden": true 681 | }, 682 | "outputs": [], 683 | "source": [ 684 | "imp.reload(label_data)\n", 685 | "label_data.labelized_data(to_shuffle=True)" 686 | ] 687 | }, 688 | { 689 | "cell_type": "markdown", 690 | "metadata": { 691 | "heading_collapsed": true 692 | }, 693 | "source": [ 694 | "#### Auto-creating set" 695 | ] 696 | }, 697 | { 698 | "cell_type": "code", 699 | "execution_count": 43, 700 | "metadata": { 701 | "ExecuteTime": { 702 | "end_time": "2017-06-20T02:14:43.664640Z", 703 | "start_time": "2017-06-20T02:14:43.591143Z" 704 | }, 705 | "collapsed": true, 706 | "hidden": true 707 | }, 708 | "outputs": [], 709 | "source": [ 710 | "for k in map_characters.values():\n", 711 | " if k not in [k.split('/')[2] for k in glob.glob('./autogenerate/*')]:\n", 712 | " os.mkdir('./autogenerate/%s/'% k)" 713 | ] 714 | }, 715 | { 716 | "cell_type": "code", 717 | "execution_count": 48, 718 | "metadata": { 719 | "ExecuteTime": { 720 | "end_time": "2017-06-20T02:16:52.165214Z", 721 | "start_time": "2017-06-20T02:16:29.254972Z" 722 | }, 723 | "hidden": true, 724 | "scrolled": false 725 | }, 726 | "outputs": [ 727 | { 728 | "name": "stdout", 729 | "output_type": "stream", 730 | "text": [ 731 | "1644/1644" 732 | ] 733 | } 734 | ], 735 | "source": [ 736 | "imp.reload(label_data)\n", 737 | "import train\n", 738 | "imp.reload(train)\n", 739 | "# map_characters = label_data.map_characters\n", 740 | "# label_data.generate_pic_from_videos()\n", 741 | "label_data.classify_pics()" 742 | ] 743 | }, 744 | { 745 | "cell_type": "code", 746 | "execution_count": 7, 747 | "metadata": { 748 | "ExecuteTime": { 749 | "end_time": "2017-06-27T21:41:05.359658Z", 750 | "start_time": "2017-06-27T21:41:05.355310Z" 751 | }, 752 | "hidden": true 753 | }, 754 | "outputs": [], 755 | "source": [ 756 | "for char in glob.glob('./autogenerate/*'):\n", 757 | " character = char.split('/')[2]\n", 758 | " pics = glob.glob(char+'/*')\n", 759 | " shuffle(pics)\n", 760 | " for pic in pics:\n", 761 | " img = cv2.imread(pic)\n", 762 | " plt.imshow(img)\n", 763 | " plt.show()\n", 764 | " corr = input('%s : Correct ? [Y/n] ' % character)\n", 765 | " if corr.lower() in ['y', 'yes']:\n", 766 | " os.rename(pic, './characters/%s/%s' % (character, pic.split('/')[3]))\n", 767 | " elif corr.lower() == 'skip':\n", 768 | " break\n", 769 | " else:\n", 770 | " os.remove(pic)" 771 | ] 772 | }, 773 | { 774 | "cell_type": "markdown", 775 | "metadata": { 776 | "collapsed": true, 777 | "heading_collapsed": true 778 | }, 779 | "source": [ 780 | "#### Clean Dataset" 781 | ] 782 | }, 783 | { 784 | "cell_type": "code", 785 | "execution_count": 16, 786 | "metadata": { 787 | "ExecuteTime": { 788 | "end_time": "2017-06-21T23:49:14.949054Z", 789 | "start_time": "2017-06-21T23:49:14.939999Z" 790 | }, 791 | "collapsed": true, 792 | "hidden": true 793 | }, 794 | "outputs": [], 795 | "source": [ 796 | "def mse(imageA, imageB):\n", 797 | " if imageA.shape[0] == imageB.shape[0] and imageA.shape[1] == imageB.shape[1]:\n", 798 | " err = np.sum((imageA.astype(\"float\") - imageB.astype(\"float\")) ** 2)\n", 799 | " err /= float(imageA.shape[0] * imageA.shape[1])\n", 800 | " return err\n", 801 | " else:\n", 802 | " return imageA.shape[0]*imageA.shape[1]" 803 | ] 804 | }, 805 | { 806 | "cell_type": "code", 807 | "execution_count": 31, 808 | "metadata": { 809 | "ExecuteTime": { 810 | "end_time": "2017-06-22T00:04:56.114157Z", 811 | "start_time": "2017-06-22T00:04:52.914777Z" 812 | }, 813 | "collapsed": true, 814 | "hidden": true 815 | }, 816 | "outputs": [], 817 | "source": [ 818 | "pics = glob.glob('./characters/abraham_grampa_simpson/*.*')\n", 819 | "pics = {pic.split('/')[3].replace('.jpg', '') : cv2.imread(pic) for pic in pics}" 820 | ] 821 | }, 822 | { 823 | "cell_type": "code", 824 | "execution_count": 18, 825 | "metadata": { 826 | "ExecuteTime": { 827 | "end_time": "2017-06-21T23:49:27.785674Z", 828 | "start_time": "2017-06-21T23:49:27.526969Z" 829 | }, 830 | "hidden": true 831 | }, 832 | "outputs": [ 833 | { 834 | "name": "stdout", 835 | "output_type": "stream", 836 | "text": [ 837 | "(416, 576, 3) (368, 496, 3)\n" 838 | ] 839 | }, 840 | { 841 | "data": { 842 | "text/plain": [ 843 | "" 844 | ] 845 | }, 846 | "execution_count": 18, 847 | "metadata": {}, 848 | "output_type": "execute_result" 849 | }, 850 | { 851 | "data": { 852 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAACUCAYAAACdmeLWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvWmMZNl15/e7922xb5mRe1VlVWXW0l1d1dUrm2xuoqQh\nqYVDrSNpYEm2QRuY8dgGbFhjGDBm/GVgGLAHmMEAsi1r5LEki5alkaVuUeIuks1e2M1eauvas3Lf\nIjK2F/GWe/3hRURGZkZVZVVndReb+QcCmfHiLffdd9+5557lf4TWmn3sYx/72MeHF/KDbsA+9rGP\nfezjwWJf0O9jH/vYx4cc+4J+H/vYxz4+5NgX9PvYxz728SHHvqDfxz72sY8POfYF/T72sY99fMix\nL+j3sY97gBDis0KIS0KIK0KI3/mg27OPfewGYj+Ofh/72B2EEAbwLvBTwCzwKvBrWuvzH2jD9rGP\nu+CBaPT7Ws8+PqR4Briitb6mtfaAPwa+8AG3aR/7uCv2XNC3tZ5/DXwOeAT4NSHEI3t9nX3s4wPA\nOHCr5/tse9s+9vFQw3wA5+xqPQBCiI7Ws7+83cePBYQQXwK+BGBK48l8IrP19z7HKAUK0EKAIRGG\nJNQGABoIw5DA9xEIpNCk4g6GUAih0YjoWCnwVEigFCGRSTbwg+41AqW3NEAIEDr6LqREaLB6Wtcx\n6zZVuKO9hrkpOoTo7K927NdrGNZKtbdtXkPISNeUfY7t11FmezcpN3VUYZqYCGwhMDQYQBCqHfsZ\nxk5x12lzrwlbyOjCnudjSImQEsMwthynws0+kdJo/91ssGr3tVIhhmniNhV+AAqJ7DsC+kDsNKub\nnXYIUKGi3izT8up3PeGDEPT9tJ5n73RAOh7Tg+kkojNi2jfY/X4P6DywVtMDNLFYfMs5fxShtSAM\nQ8IgxLZtDMt6MNe5y++7eRr32stag0BSb9RJJJLRdYx7f+53wvL6OpXa3V+GXWAOONDzfaK9bQu0\n1r8L/C7AWH5I/yef/pUtvxt9OkmpGK6EUuBBKoGVTVMJ0+3fFBsbGywtLRE3bOKixePHxskYdWxT\noUyLliH43sVzuLYksC0aViTgfN/vXiNwov4120LaMQCtEUIQCjCVJi82RUIQRJNEMxnb0V63R/h3\nhGAmsVOcBD3/q7ag99kUmh1BbAWtHcf2kcuMmNE9rK+vd7dV63VOjR9kSEnyIaRDWKu4AOTz+R3X\n6kVHxvTKmlBGD2h1dZVUKkU8Hu+2vQNTb56r00+9+3Sulc7G0UaCb790A1cXaAVJHGvnjfUbE8id\nE6wfNNsN1xiGwV9+83/pc+BOPAhBvyv0aj0DqQT/7Fc+i23b0Y8y6jjTvPfmdQb2333rJZ5++mnS\n6ehlCY1gx759O/chRBjGKJVKWJbFwYMHCYwHEyzl30UUWuFWrabvQNTGjm13Qr2miMViXLl2nZMn\nTwKgYnduSLiLSVv0aGj/1f+4u5dhF3gVmBZCHCYS8P8A+PU7HaCVptlsbtnWGXe9WqLnaVwJwjYj\n7VqIrgCBSIgopfBCj1zGIRaLETNCLCNkrVrj1toKIyMj1E1oGZKEbey4Rm3bszHZfH66LfCxzZ7f\nI9jeZjustpLh9NO2jZ3joR8EO8dIzNx5Qs93t1wTYF23r5HPdrcNxuMsLS0xUBgiDCEMN4Xu/ciQ\nfgrmlhWEEFg9E6J1B8XL8zwuXL6CEFkEAiFE3/P3G/F+z/MPgoBiscjcfBmtNVIKDMNA7TKY5kEI\n+nvWeg4PDeyZyJVScu7cOT7xiU/s1Sm76H3xWq1IA+lMTo7jAOyY+ffqunNzczz//PPR+R+QoO8M\n2M5kuX0AW3K7oO/Tjm3CpFej7ActNaHWmLaJYUXHKva+D/cCWutACPGPga8QWQh+T2t97gNuFhAJ\n6nQ6jTA0qBBX71Rs+kEJkBqkEHhowj7P9N6m7l1ck/t3Djb6HOi8l8bcI4QQCMXed8pdEIvFuHbt\nGpoQrRVCCIrFYt9VSj88CEF/z1rPnkBHc2Jlo4pj71xq7jWWl5eBnULx8OHDe36tIAgYHh7e8/M+\nDPB1SGljhUPTk4TGwynge6G1fgF44V6O2a6tyfZY7fzdC0gpmZ+fp2ZoSMQhnbh7u2RkX9dEwl4J\n8PqslpJ9jg37iGqL3Wn070VNaX1AmT9CgRJ6T5/ZvWBxcZF4PM7ISCHyy2iN4ziYu1T69lzQ35fW\nI3Rkrtk2O3U06N0sv7wgGmSXr15jenoa0Z7mQ3an2ewGq8vl7v+ZVCE6f/u5e57H8vIy4tosxWKR\nZDJ6PbabjHZjLlJaopForVleXmZpvcrx48dRfWx7t8PtzDB3WmbebR9/N0tzsXUfy9n6TP3WVmGu\npWB1dZWpqSl830f2OL46k2hH0+983427pdm+f8uyIgfnBwipt3/fKegLRoz5Rg0sG1cLhJBYZqSw\n2LbN3OwiiXiaoN4ELWnUWySSgplrN7FTglTCQsQsHAmBgMxqiZZU2PksVVMQak112/gRptVVTH3f\nxwSMcKv9vFQqcXFhmUKhQKvVolgsApCyN/drNBqkUik2lLvj3kW4+fzvNPbcfo/ISgFsmT6cPo7h\nhhlDmR5rwkADntidP+l2kIHR/dv5IEIMLeh6oe6i0XfG6qtXlglVCiUyGCKFYxlYKFR7EHuehxAC\n05RYZtRXrutSKpVwDDDNyIx37NBQ1CbZXiELKC3P0Wru7PN+eCA2+vvRet4rhDSZmZkhlx8gk83D\nHgr4DwpKRUu06enp7sTxYYPlOPhhSLM94IUQ7+kl/XFEGIYsrCzjJhwabUGfTWb2zIx49uxZAGZm\nZrrbShvV7v/1ep1AQSq749Afe9S0BdrA1hYWJnYoCPqsXJvNJjU/MgdXKhUmJibIpxNoHSKEwPM8\nlFKUyxvdYwzDwOr4Ne+CD8wZeyfcjwNFSsnc3BzPPfccQoh7jv64EzoriztpJKZpYpom8XichYUF\npqam3tM1tdZks1muXr3K1MnT932e7Xb3hw21Wo1sNovneSQSW80NnbZ3bPbb+7/fPXX2CT+ks4Vh\nGJimSTKZxDDqHDlyhDcvvIbjOKSKRZYbNapNl+vXr+Ob4C8vYg0NkMnlYJtG39t/qVSKS5cu0fRN\nTpw4QavVolqtYlkWYRgyNDTElStXWFtbo1qt8pGnno6c6FeuUCwWsW0bTXV7c3e1gvwg0fG1dXxs\newXDMFhZWSFo+dimiWUZmFqiDYElLXwVyRTP8yiXy6ACRoaLZDIZxsej1AzTFDQaTSqVCmE7nHOg\nmN9yHbnL1epDKejvB7Ozs5w8eRIpI5PHj7JaqImcPj944w2OHjnyQTfngWJ9fZ3BwUFM09y1Y2kf\nW5HL5VhzG1u2xeNxDEPjq4Dd0JzU63VisRgr6yW+9pWvMDk1hdaaQqHA4OAghmGQSqUol8ucOnWK\nZDJJOp2mVquRSCQIggBjd8rlQ4VOVJ7neQ/k/CrwkVYMIQR+GKBCQcOt0WhFJhetNWNjY2RSCRzb\nxDAMSqUSQgiWSqtAtKrP5/O7eo63w0Mj6FX7Aztt83fSzprNJsvLy0gpSWfj3dDM3WD7eU1r6wbh\naVS7HUEQEHpB17ZmWRYShQ4lWpp4XgXLsqjWSyRSiW471LYQRLNfcPA21BoeXhCi4hbxwRzI2w/C\nrsa+7V429Sivvd+2t/AO5+xC7f2bK+yoZbZts7a2Rr1e59ixY1sGcSDvvPoQOupDs71s1aInDLFt\n0e1tufwRzqHYDYaGhijP3tqyLZ1O0xQhtVoFrfVdhUSlUiEej/OZzzyB67pcvXqV69evk8vlGBsb\nY21tjWw2SzqdZmRkBILIpNAxD/2ocmbNzs4CUR8+CChAI/E8j2Y1EvSGI8nlcgBd3wdoyuUy1Wp1\nM8/BcUink6RSqa4tP1D3NyE9FIJeEwXNaLE7R9v7hZAQgYFqZx5i6ihioJ0soZBoKdCEtFqtthPx\nvdtFpZRUq2XGx8fvy4y1j4cb2ogmu16VpGVV0FYDrwUyWUQHeegXxSIVCIUmQBAgjJDrboNm3CYB\nhKUNhtNphnSdugSZtqi0agw4BcptJ2NHOTBlAtd1qdVqZDIjXLp0icREilhumCM/eZoBHeJJxdue\ngnQe53RkNlgBkAFLLY912yYjDOIKhHgfAh375GkINKYGG4GlNTbQau+menZvhNHqJh6PMzMzw9LS\nEjEjwfj4OK0e72q/u9jNajPs9dBKg5rrkfQDWoGJlDVi6RTxpEPcTCF1JPxr5UWklJTdyPZu2FAY\niJwdcRm1JAiCzeur+1v1PhRSRArxUNrypCNRvsYMJFYo2ZAWkig8rLMC8bTAkkF7697MUloKao0G\nj505gwoCUA/R7LePDyWGh4cpl8soIVCGRGqNbZpRXH7wox/Y0IGUklu3bnVXk7HM3cNQ3wts28Zy\nUhiGZmhojFqjQmOjjFDtbHflE2i9JXw6eAD9/VAIej8IurZa7nEJuLi4SCaToVAo7Hm7tAqRGEgt\nSSTTiNYqIpTYgU3T1YBJqVkjaK4yWBhidX2Z4WKBYrG4OXFtj3PdhdBeXV2lWCx2nWXOQzgJvlf4\nvk+5XOaZZ575oJvyvmALV8o9LvparRZaa2KxGNVGHcvafQSWUgrbcXBdFxKRPbozrl57/fskk0ky\nmQylUomZmRmGsqMMiVHS2TRaCEI0jrFTi5amQKkoDb9cLpMuDGL0xA6H4e5i6rdjSz+1tff7OVfH\nyWoYBrVajUajgbYtZmdnicfjDA8PMzk5yfLcKisrUVaxbducP3+eg0PvPWdFSsnq6irZ7GFCK41p\nwpUrV5AmZJ0MWoJlOWTiOZB6ixm5Kzvurwv74qEQ9CCYWy5RaymmjxwldCt3PaKTst2x8T4IZ0oc\ng1Ba1FVIqVaiESxjaQfZyqBqKQLlEJoNtBRUKhWKQ4MU8jkKhUL0Yt0nNjY2mJo+jrTMHTHY+9jH\nXiGbzZJKpchms0xPTzM2NsbNpsRJJNoZ2MZtYxqCICBm2wwNDbExO8/y8jKF4maior3LsL/3E5VK\npXvPpVIpSnr0I0XswoULDAwMdPME9gqNRoOKt4IQIel0Gi1CHCsGqm0OEm2b9QPOBn8oBL3lxHni\nuZ9gbm6Or3/7+wxkc4xPjBGLmRgCLMNvd0qPxuALrl27xvjoAUK/D8ubdfewm9DTIBRKina23tZj\nmkKCCLGTYOOQyR3r/hZpDC6xqgmMdlcUXhiwVi/d1hSl5c52SbWZEHTlyhWOTh3DsiwMosyPPQmM\n3OZ87Wg66+vrrK2tRRvbztfh4WGKxSLK2jxGaomzLXGp3z26ra33F2/v09GwENE5VldXqVQq9Jp1\ng13eaa/z9XbozbLdnyv749ChQzQaDVZXV3nppZd46qmnyGYL+GjcMMSwTRx6koR64HkehYEMhw8f\nZvKpZ2iulXj5tW90fx8cHHwf72R3qNddUqkUSsGZM2cZGRnBb0RjrtFo8O677xKPx6G5k2DtfpFI\nJHCyRUwTlJIgFdoNkEIiuryjDx4PhaBvui5owcTEBNOTh7n0zlvMzc1x5MiRLo3p7SCEEZEE3cfb\n3Ag3l8BKKLJG8w5790dnQO8lx83o6OgDsdP1olwuU6/XKZfLmwJbRX/n5ubwfZ+RQw8mEuHHDsrs\nTnCwaYroNVPoVgLpCbAl2nIRTgWa95YkFwQBvu/j+z5NEeAqSdXzMeM2Znynhm3ik42bZGMDZLNZ\nVudm2DiySakcUwCSptw5tmN2inK1CdkCV4IQshl++vP/kAsXLjA3N8dG3YjyItTO1bnVL8O6Y6bp\nGfa6u9tmP4ldEqd1+rbVamGaJktLS4S2zUajzs35ObRpoE2jG63WcBtUjQAV1Cla0cokFJufjm1l\nU7m5ezsSiQTSOhi1PmhTPygDt4c4TghBtVqlHNSo1+tdMx2AKQ0MEZGXJZNJBjNZnB7T9nbq5Dvh\noRD0pmVx4dw7lGtVjh0+xJNPPkm9UaNWq3HlymUq6/OMjo4yMFggHo+jtebmzRuMj4+TTCYjoXib\n+aATtdLVKNnsoGVvgKXZOdZWKuSnTvB08Sqwu+SJvSQx62jI5XKZycnJByrkE4kE6+vrzM1FPHNb\nBksPZevKygq+bDE1NdXtu/eSdNXpL9eL+ksp1Y1h3scHA611pB+JKLzwzXfeJn7k1H2fr9RyGTw4\nQW5shLfffpuFjRIHM7sTRjroI4r2OARvrVRicHCQp555hkQiQag1iXbGeaPRuMvRDwYrKytUKhUW\nqiskk0lisVj3fRNag9Z4YYjv+9TWSxw/cOi+rvNQCHrHtjj5yHG0IamsrvHNb32LkdFhJiYm+OhH\nP4r2q9y8eZOFhQXi8ThSSuLJJJbjoNtZsLfT+y9fvgxszbY9cCAi11xY8lle8Tn9sV/gy2/f4Om9\nNc/tHlqgteb8uQv84i/+Ihth+EBYMO8Vy8vL5HK5fYG8x7gXTWw7TNPccbxlWViWhWmam/9jYgpJ\nUA/IJpPUajVI57r7A9imQej7VCs1XNcl9Py+5rheZ2g3W7lnfHa2lVo+SJC2iTNY4PrFZU6OjuG6\nLs1ms2u3721/99x+FP2SSqWYn5/n+vXrJHKaZDLJ2NjYHfvE931kEETZ6ToKteygk0mstSYMw66G\n32g0qFQqHM5FoYxDQ0NRmGO5vCeKVq1WY2BggFIfV12z2SSXy3Hx4sXutXpNXbFYtKKQKgrnFm2K\naiFgYWGBI0eOsLKyQja7e86Jh0LQa62xTIlpmiRGhykODRKGIbNLi9RqNSYPHiU7PEW6eIRKpcLS\n0hIb5VUyxRECJQATU0okmzblql7DDOK8aRa5+rO/RMH8Za4vXcJ652/44mtvcjTpYA4olk88ztcf\nL1Ic2eBrbwgeP5hikDoJyyXwM9vaaQAaFAghI0q7bbA7CVG3ia7x+rgOAhWRl02ffAQzFke6DaSU\n3ZfpfjJGd9AFhJpAB9RaTcr1Gla/5rW3NVrtJCvbwS23yCcGou/O3TUscxuXfNB5j9urgVbbhOFJ\nwfChg/0FyzbiM4w7O9pVn9hqo6f/f4STpB8oFFG0hxKwUasyPDbK3UIafvCDHwCb/De9qBibCtdM\ntUwQs/nud7/L8PDwXaPirp6PVpidKLriwAHqzNzxmHuFbds0GtG7Zds2juN0C5hIKalUKlQqFXKJ\n/F3O9N5gmiY3btzAsqw7Tiqq568iqsbleR43btzoJlzt+pr33doHiM4sPDk5ydLiMtevX8eyLGKx\nGJOTk9GMtrrElauXcRwn4oVJJojFZDfG3RNgac2w4fDSK2+w9pOfojGZoHDgk/ztX1UonX+Ham2N\n1tGjnLMSWIkhatlD2KUFHitK0rK1gxZNKAfa5dvaW/bE0adkVOFnbGKCMNh3He7j/YGvNYHWGLbN\n+sYGg4ODdxX0Tz75JNDfZBn0zKiPnT1LEAQ8n/gM586d4/r163c0iaYSkeA6Pj3QJberPyALZidi\nT2vNwsICEGWoNhqNqEjMgw2txzAMlpeX7x7h02E31QIpDCSCIGhFEU73GE7+UAp6iB6G7/sUCgUG\nClGHVKtV3nrrLTKZDOlMklOnTqGUYnFxkaXlJWxTMTExgdSahoQMgkcTWZY9iy+vfJdWcYjZzAgj\nn/1VpjMJ/uYb/w+TKQeZSKCmpjHzSV79yz+imMxwMAh3PPB0PcA3oeJotLawtbFr59CdUEilWV9e\nIR6PU2vV+xf0uAvuRl5mGJHTWkp524ig7dqF1nrPWDM71zSsGOvr67iuSz6fx+9XK3QPruO3lxKm\naW7xz7zfCDWUQ01vAaWCv/P5NnGoeh6x3AA6iGF7DuUde90G2gBt0Ki3kMIiEU+DW7/rYdUwjPwy\nMZsbc3MMTB7sO35i5qaAFkogpCbUKhKYUpNJxKjX6yS2LJ0Util5JZC0RidZW6lRr/qMZHLEW5sO\nWltaLC0tgWsSd9LgxhFhHJRFbiCBEUgSalNMuX3eN8uyEG2akiDQBBoaNY90Oo1b9wEfy0oSq0f3\ntnpjnqNHj+J5HknhEEjB0uwSG6US+YEBlpeXyefztFot0uk0sVgMv9nuT3Fv7+Z2psowDDl3bQaZ\nTLDWqGO2ieZEP+eu0S7S0s7F8QEjZhOL2dxYXmRoaAi1y+XqQyroe7RaEZVjg2jWPXDgAOvr6yws\nzrFeWmNgYIADBw4wfXiSSmWV+dlZtBC0RiJffcGHEwE8r1e5oWPMBAcp+Unkpz5FJltnNpYgobM0\nfIPlTBJx8pOUFq4T+goSWw1sb6kMTthiQIFE0RKwFyVOvvfDNzl96hE0CkNH1Ar72Btcu3btA/V3\nKCGoGhLL3DQvBX2SAqWGqgGOjLLc7c7S9CGDUgqB7po5NCFzc3NkMhkSuZ02Y9d1abVt9M2NKjEF\nmczm+F5ZWSEWizE8ZLGxXsWxrE5Zc5LS4tDBQ1tyZNx7zB0UIgpg7B0D5XKZcrmM53nkioO0tMIy\nTYTWZDIZatW7T5L3gyAIuHXr1t13fAB4OAX9NlKrdDyD67pce/cKuVwOrTUHJg5Rr9epVCrcuHGO\n0dFBBnJ5nnvmk2jf5/tvvcq5W28zkhpnIJPn669O45wucub0CJe8Zf4iGGXk+X/EzPkbjKg0yg5Y\nNZIcP/OzvDn3VeZv3uS3Exe2tOMYDVCKVBlqusSGyHBLOWRtSdxQjB4c5+atK0zmM6woH+21iLUg\nbga42sQxQt60BE1Dk5A25bqHXWoyODREOpFGCkE87rDuR7N7Z6kb+Fs1UtPauQTu9pjtbP3eRide\nt9Vqkc1muzS06+vrm74AYaECSSIVp7JR5ZlnoggMEeu8eneHCLcNqW32dYVFuVZlZGKchtfqu7ow\nthUrqXtbQwPtbcVNvB7iOK/t3DM7kUxra5jm/Ts/3yuU0Li2RvUUWa20BVevUzJr2TiJAWJDg9Ql\nVITqW9mpVquROrLVOdkJqQzDMCLf63Ge1ut18kph2jv7ubfvU6kUiUSCihA4jsPly5fJZrMUCgVU\nz/mUUty4eY1arRqFNQvVZso0+gp6y7JIFgpMfPzjNJdWePeHb9EwBYVCgfn5eQBOnjzJ6WSWyxdv\nUFvXzN5YwzaSPH32aXzfZ2FhgTCMSNSI7+yTRqOBdF3oY7Y22klfhmF0nZyxWKwbdbZUWsdDYxoG\nfqvF0soKp+4zsqUXqVSKxcXFXnZBXNdFKYVp7k0yWaVS2TWZ3MMp6Leh0WiwtrbG6upqN7knX3WZ\nnJzEcRxGxsbYqNY4d22G7799gWKxSEEM8oknf5H1Upnl6jxG4xLuLcHi8DjakWBrZutNCofHaGkP\nGYBpQMswSZ+Y5sLKAv/DG+e3tOOg0CQDl1jK4eTRo7zbEvybNxsMOR4HBuOk1hcoxFKcaSwxYDSI\nWXFqdZvxvI0KTWQYkpIm2RA822DAB2E5jA6NIkyBI/QOtssHhWKxSDwexzTNKCIDqNeaCCGYuXmL\n55577oFcNypsLLv/f9hh2Db5Awe3hO8NjEwAW6lxY2ECLcHt+byfpWZqtSicuRbEo1C+dp2ADjo2\n7bfffpvRsWGOHDmMUgpp0A1xbvap/tSLsbExksKgMnOejY0N8vl81xka1td49vHj/Lvf+zPMMMHp\nR05STEdtKS2EqG7gw+5d61KDUBrDkBhCdCPvNjY2i3c46RQemtmlJSbGxpienobmg6Es/iBxV0Ev\nhPg94GeBZa31qfa2AvB/A5PADeBXtNYlEZU3/5fA54EG8Fta69ffayPX1tZYW1sjk9mMgrlx4wYb\nGxtMTk4yUCwymstB3OY7Ny7z2tJ1ROwIzWoVv6W4Mb+B71yCpM3C7FHSh48QyoiZUhOCAKUFho5e\nsPTUJMeOHkCpR7e0Y1D4iFYFsVTmn//5CxQe/yiP/epJbtx6h8yzZ0gODPLCX/0NX/3eFX712CRJ\nAxbKBuPLs5gDSR4fP8BSaYFxO8a0naOcTFNy1zHNJI6pMUSLlg55kJWHe7W4QqFAoVDoOqQWwmVC\nX/DzP//zkTC+S7TL/SAIAkqlEmNjY5GG1oM7ZdzeK0zTZHl5mUwmg/oASeGUktRcG9tJdbdtBFGB\njkSqxwlUj0JWPBS+eP8Lp7iui+u6NFSDMAy3TEwdmmOtNY8++iimJfG8VrTiFArDMNjY2MCI39mQ\nWalEVN62bbO0tMShQ4e6gj6TNAmaVX7h5z9HrRwyMXqYprNCs9kkm06xsbFxT9FnppDdspRaRoW0\nvfbKtV7fNM3UvBaWaTI9PU0mlYrG3o+joAd+H/hXwB/0bPsd4Gta638hhPid9vf/BvgcMN3+PAv8\nm/bf9wytNWi6wqE4NEDo+7z51hsIIfjoidO8+NarLB0d4/R/+SW+XjrMzWoKB4NCxuGwbrGCQc1K\nRQ46bJTXRGnRNZGYWtMQggsxgaUNmv6pbW1oYiUVmXgT65kM0x97lnlhMnbqBLNK8e43v0/h8LM8\n+/n/gNlqlcAMmRgq8u5rf8oLX/lLUm+/zMCxAxwNPX7ONzmw4VFLDpAJs6TDOnXL/0C03IGBKHxy\ncGAIHRpI4/4yjXcDKeUOAf9hRqgFtcDeYvcKwqi/S+VNu/H2F9EmogNWQKvlsVKXZM04yZhFrbKO\nmRvAkgqDMKrTu/3TOW877l70o7vuWT2OjBWp1EqsL7fIDRRICQOj0cTOQ7UjX4XAsGwUkMs7SCnx\nfZ9Wq0UynabR9HY8W4d2yGUyzroKaLg1xk6cRZpXeevqLI35EtPDLUoTeQZSaQ7mTZwstOQ6OgGe\n8skNxqk0VjBME6PPffRzZUghkEJgts1j2eTmRFtM94RNG5sJfEIICDyEE01YJR3VeDY0hKJduaz9\nHPuVlZA6ihZSWlIpV0nGMzR6nnvHXBQmd6fIKX/nCinWUyXMDQPUXplutNbfFkJMbtv8BeBT7f//\nLfBNIkH/BeAPdCStvi+EyAkhRrXWC3e6hlIaJSwCLZEoDB0J3kQiTqlUYn11ndDfjKIQQuCYPpiQ\niEcP7Y2LP2Q0nURXG7zxv/+fBI//BuODk6yLJC0tuGi0H7QKaXlbbeAduEKAGSVVRG7Y7QIpjq+h\nMZKEkZ8COBwNAAAgAElEQVTg+4DhOEgNyzdvwtQJ6hOjfNMPIJOCtTLuX/8No0dO8+x/8TOYCM6v\nXWdBCP7pa2/AzBVy46PMr60zslxBzs0ymEry333sGYLmPD4eOd+EPjb57fD6xPT34k4h8DsJqDox\n77s/x+3Q0lvP7bXfFMdxokIKPdfoFCW52yTjbYub70f85huK/OgIzYWFu9JoPEi0mk3efffdLduC\nMOqT3vHX70XsxE9LKanVakgpeVAR3qlUilQqxXh+lGwhT8K5s3b+6quvdiPjlFL4YcgzH/lo1G6l\ntmjfWgiCICCVTJJLp2lVa2TGRkin01xruFxdWuDEwTwJCVXac2IfTqgHAdle7YmeonQPoQ/8PeN+\nbfTDPcJ7Eejweo4DvW7l2fa2Owr6lufx8ssvc/z4cUwJmXQS0zRpNpusLq3sKvU+k8uRCTWjgWB+\nboEvX/6XeCce57nP/ypXhtKsrz0Yi2dIFO6aHI54YeJmCqSm6bvUah7i8EnC4THe9TVaCEoNh7Xr\nM3D6Yxz44m9iNGsMCc1gMsRZn6Vy9TJ/8PpLfPqxNAMYOFphfYiicB4mjf79MEtKKUkkElsyszuC\nvhd3EvRKKfL5dtGPhRUeOdo/U1QpheM4kbnjHrs4DEPm5+fJTx3EcRxs26a0tsag1hg96qthGFSr\n1YiJUWvy+Ty5XA4FhG3tcnFxsbtvOp2O6sm2f2sCgWXgFHIkczkeMUwuv/UOb12/yrNnHkd6LWKh\nRmhFJpOi1WoxMDDAjRs3btt2y7KQ7czgTmZsGIa7ojfuCPXeItvKuz9R37me0ps+qF7Z9UEWEXrP\nEqStvd+zrieE+JIQ4jUhxGteEHLmzBmCIODSlcssLixhGEaX73p2dpalpSUGBgYwDKM7kLa0w5Zo\ny8SOOxyZnOQ/Lk7zuXXBzT//Oubr17fs6zhO3+SNVqt1zzHXnQ6MxePE2jw8WoBp2Di5PPnhIvGE\njSkVcUugsUh88pNkB0fx6x6LNVDVgBsLHjPxYa5PneLt0YOEa6V2Fp+6r3Y9bOiSbTWb7wuzYYcK\nYHR09E42/98HPrttW8csOQ18rf0dtpolv0RklvyxxfT0NMeOHduSuCPaBFzHjh1jbGwMwzB48803\nWVxainI4iJKqXAn19mfw0AEe/8TH8GM2f/aVv+bVi+d59eJ5fnjhPJcuXWJlZeWDu8n3gIct0OB+\np5iljklGCDEKLLe3zwEHevabaG/bAa317wK/CzA+kNfxeJx8Pk9xIM/i/DzvvPMOdpvv+rOf/SzL\ny8ucP3+eRCLBsWPHqNRWt5zPURIlBIHSaKGJC8Fo3edbKzPMrtwi+Z999D5v9c7YQeIqVDvJAZKF\nDAm3hV9rgWVTrtSIHRwmHwb40iWJi5EwsGs+VlMRNKscKOYRyRzly3WscQeN1d8g+COMh0Wjfz/M\nkmboM1Sa35J41olOqVY2E4f6rVrryZ3KyGg+hWjWiUkLhxBTB6Q9AzcwWA09lhpNcukMjhdSE1CR\nDpYvoxXB9kv01NkLffBbilB7CAIMCxpeHVMqDEQ36oZQkU4kWQ+9tj1aEaqQWCzGtEzQQuOWKqSF\nYHhgGP3WJYYvz3PMyTOQzxMoxXgQUWNvbGxEvC/A2OBBbmQ9hgdGsRJx4ukUMVknDYSmSTnwGMxl\niPstNkyBaGvugYDxbI7FpVW05/fwBez0V/QrQ9gxMW3V/kX3mSTbPEG74dfvJCUqHa1am80mGatC\nxcsipcI0LLQIMYKd7XD75ST3eU1EsKnwmaa56xik+5UgfwH8JvAv2n//fc/2fyyE+GMiJ+zG3V4E\niNgrv/vd76K15pETxxibmKDlRZTBSwsLLM7cYnx8nF//9V/n29/+NpcvX2Z4dKu10tWKUIOQJiBw\nV8qs1Uy+8LGf4tKTh3jpPm/0bugdHhEBkUS2a99qAS0zwBKSjCVYvXKR8Op55kSIPWCzvnKToSee\nYJRRjsaK6PoCn/Bb/LO5df70++/w21/8NHUsYh8iRvVarUY6lUUp9Z7IvR4g9tQsmUgmeeIjz24R\nFJ7eyWHUr3DOoh+9A73RZmatTiFhAHdO6nFdF/ce1utKKVKpVJe2ujcEUbJToXHU5kI+k8vTaDSY\nXVnEF5Hz0teKuoaT08dwl1Zo1GqU1tZYXl3l0ZTqJjp2xsDqfBnf97l27Rp2MkE8ncKJRX1SqVRY\nr9dQlkmQcBBadv0yD4vS8LBjN+GVf0Sk4QwKIWaB/55IwP+JEOI/Am4Cv9Le/QUiG+YVIjvmb++m\nEYaUPP34WbTW1Go1XvrOSxiWxZPPPM2xk4/RbDYplUr8xd9+nVQqxUc+/ZMgFDMzM6wsLkWp+r7L\nhrR4df4ch376c/zZr36cMmPoms2pRJ4wHiKDkJqKYyofcDDaPNsKCIlI1VY8F159HVohYmgQ0zQZ\nPHyYStPFAOJtzUwTaRO6XaSgS1vc1ugFGkMKTMtBYFLWASNPPMXff3qK+Ve/xW98bJrp8FMMZDQq\n1mx3VxSN8dljP434Dz/L6+fe4k/+5E944vRJUrkccR9ylsC8D8F/P47V+3G+7jhH29zZoTSfm13g\nqafG7yjkg7u8u047oaqjBSux0xbb9Da33e9taK21EPfOlSuE+BKReQdDOvz+n355S2Zmy48c0r7v\nd23E/cpFBlZ0n4nEZhhmKgj5xDNnePTonWsF1Go1ah0JvQuEYcjg4CC3lla7LI69k9N2U8RguPn8\nKjcXmLlxg0IQcUyZGsx4DImgrhW3bl7n1q2bHDlyhI999FmmnYBWq4XneaRSUZDE6MABMgMF/vyF\nv+oKensguka9XqdwcIJUKkWtutGuP9FuT6j2wAD94cduom5+7TY/fabPvhr4R/faCNEuDh6Lxcjl\nchw6dIhypcIPf/hDqtUqk5OTHD9+nGPHjrGxsUG5XGZ+bgHbtpk+9ggSeOH8GyxVV+H4ES6667Re\n/v84OPU8N5sZ3p06hXXzKk4uixKjmMpGG4oNWSclHaxQgpbY0sJZnaPluqTPnOVQKsvc8iILr/8w\nCm0fGSGeSqHCsCvojW0aRehHWkiAJiFNkJJARffoS8GNb3+bj02PkdqoM5jU2MLA3fYYGskSBA4H\nDx7gt3/rN3nxL1/gkJMmETcjCtZ9JeZBYk/Nkpad0b7jbOEREokovTPRxznXG7/eJQPoOdZuZz4H\nQUAgA3rjNpPJJFkhKJVKnMhmqdcqPProozRkux6DjCaMzgTp98SXVKtVpqamGG4TPOXzeZrNZuQr\nY5OyWghBPB7HvzyD53m4rksikWAqkeWRRJbpR07SqtXxtCJtO5R9l5uFQW5evsIzj52mkC9gtFZJ\nJBIkEonuvb344ouMT0YZqZZlkU6nWWwXLbEyKco6pFzdIJQaoTWhCvHbk2cod+d4vVc4joNSkY/s\nfksjGoYROYvbms79FknqhygCcZf77s0l3xsEdGl5lVKEYYjjODz55JMsL0fslbOzsxw8eJATJ06Q\nSCSYPjbF2vo6G5UyBoInDh0hOzHGi++eY+7iu/xSM+DG19/FcIrMf6uAc3SUsdNncDNpSmEcKxbD\nEjoqS6cMZAh1LQjmFzAfOUEhn8eoNym9fR5jfIT08aPEnDhBGBL4fvSs4k6XjnizVJ6FrUFLSagE\nyndRysRyBIuLC5w5eJwnJ4tMygrZpKZuKLY/+ZhIYjgWmcE0+UyKT//UZ/jy//unPHPqEbJOsu9D\n28tkowcBKSWu63LgwIEH8lLuIfbULBloxYrf2PJcfC8S5kL3oSVIbAqUIScSur1mlDthbGwMu14n\n0XQZMTSqdu80jFNTU93/FxYWqFarVEJNfWMDqRTlchnXdflPpx7Ftm3i8XjX/zBQqrMxs8hL3/gG\ntUaDjz/zLONThyGepdbUGEtlRnLD1Ppcd2JiguGRkS3b6u3FlNWzqJLaxCLiVlZqnxVqt3goBD0Q\ncbu3Y8F1OzzJMAwmJiY4ePAgnuexvLzMK6+8AkR2y9HRUbKZDISKiUCw8O5NPlKDn3n6kxwONYGS\nfO/lt1ibucl6ZZ7axYucP7SA8/d+DkyXsZZFGQWYaAy07xIODTJ06Ch6rcSFt97GHChw4NhxGpaN\n9jwsKfHcJi3fJxOPEWwT0oaApTffxhgdJj1UwBCaQIClFGjNqewIR6XBuG2gTBcjsLYkrgDEE0lU\nGBB4TQzHZDSbIG0IhNfE0pld1Uzdx93xfpglTdOhUDi6ZVvYjz+/j/nJlQopNLGBVFfY15s1MoU8\nPppAaARwPDuA2qhiV11Wl1aoriwznx2lKm02qlVcCYEUvNuIFiAdm3/W2nQQz19f5NGpgHBgczzn\nipHgPZPO8MOFBQ62FMITPHZkiseGDSIPVQ1TRL6EIBOQz8AXfuPT3YxXV5RpNZfIjycIzCpVf5k1\nzyfZZoGMp9JUy2XKoyMkMxnCZJpyuUHJm+cHs9dIZjLdKC0NXbbG3lDFZFgmpyV5aZFTmhSwZhi7\n8gH1pVvuZEWJEGlo4gkbpbd6sg12TtIao2tR0lpjWRYN39+xAhfmzmdt7rJ4m+hJmGqFAbsN7nk4\nBL2gnbEQtdowOyQAAYTRElXaMDKRZWA40lJK6w0unL+IFgGPPvIYq9k4VnaKI2HAxWtXma9WGRgY\n49jzp4iZMRLKwhMVnvY9vvHlP2LquU/x5xMjNJsBnu8Rt2NYGsbGD+Kvr5M0bUQqwdlHH2OxsoFp\nCCwhWb41R6O0xuiTTyCkiSUVSkQxxCGKR22LV1tVMhuCg0mD5bm3mDgyTGp4kIs3rpA98FlKbonx\nuKCBQBk7B1qHRMq0HYKWh07F+JlPPM8rr/+AYjKJEpob164gBcwvLjCQL/DkU0+3+0/R7+nrHbrP\nHqSF9IkH346gvbIxbJuFuUUmp7YKPbPPQL2bjX6v8H6YJd8PCCHI5/Ocv3mTJpFNv4UmAFL5HJYh\naKGZGovovjuO305hbIDJ6Sn0bcjflucXWJydY3BMcnxklEz87iuFjpZfMBI0Gh7ri6ucPHGKghnD\nclvE2lE8qlwjriTLiRRXzl/g1sI86UQSKxbnsccfJ5VKdc1ZtxP0srTWzTPYR388HIL+PhCPx3ns\nsccQhuLWzBz1ep3RoWEsy+LkocNYtsHqconl5WWarsdgvMjQSIykEZDQPraU5AKfaw0Xv6IZLQ7h\nK40yTQJDsiEUOpVgNfQIbROCkJVLV2lUNsh9/GkCNGHoIZTAQOEJSUZ6xG68yROld/j8yedZLb3K\nY2dO8o23XsQtSVLnKxx/4jME5TIVGdEc67ZW1gvfC9piWRNqGB4oUhtYY31pnXD8ANfmZhguDoIh\nGJsYZ31xiZdf+R5nzpwhk07jt1ro/WiEHxsYhkGxWORYJs5StcpqtcKGq6lL8LxmpNELkLHodW82\nIw08N7qZeHXp0iVGONK35kY2k2KoWKCQzTI6NEzKVESpT7dHhxBtbcOnYVuoTJp1FRL6Hs14DFcp\nDCGouy6GFHzv29+htLzEgeERlB8wMzNLKx3rsmrCVkHfawqTUkZFPFoPtUnwA8WPnKDvPGDTsCNK\nXBly+PDhrtMkDENmZmaIxS0KuSEOHZ6gUW9SX/dZL8+zEYZoz6a6XuFYPsaSCiivV4ins9QBP+Gg\n4g4tDQOHJymjcew4DoJ6q84jn3qeFd8DrfAlWEKyurpCKlMgsTpH8sZ1fueLP8OIo7HTZwmly9M/\n/RNUQp/XzGu88J3f57nHH2PJDTnkRGGGZlusdxxTVkziNhV+00XVA5o1eO2VS7i+zVtX5xgq5khm\nBhgdHeXFv/5bxorDSFyuX53j6NEjWLbZv5BBn36E91b0e7cIw5D19XUmOXr3ne+C96O9Dxs6Mdqd\ncGvTNDENExO69VANw8B1XbLZLI2chW0InHQS0TYf2u3Vk+u65HI5hh892T3/0PFpbt26RXD1apcV\ntuNsvbqyQqAUTd/HSTjIbWaMzrh1XZdqtUqz2SQMQ8rlMt+4cJVKpcLRo0f51h//OzzPwxgpEuiI\nWyaXzlCp10i0bKTWOPE4WJqDhRRlAlzX7XIx9Qr6Xsi1VntMbK5aG43G+5KYdzv4vk8QBEhT4ns+\nUqqon6SCPpTRu0WvY/5eMm1/5AT97aCJ4vFN02TywEFm5m9GCVYzDoMDQxSSBZLpDKtejtLKEnG/\nRsIeIaWzNKSPqQ20it4iqaMho4UglIJQQ7PpQsujEnhYCOoisuwrASOjoywvrZFcL/OzR44xaUAh\n7hMEHiVLk1IGSQSf++hZ/vAP/5B3LvsMnTiOGebQWtJ2SXSzAN8ub/Ctl95mfaNMQiWI50YJmw7v\nkmPt3E3+/pmTpAcSvP7aBXw3IGgFrK9UaTVgYLBBtpjDuYug38eHG042gydhg5CWiGz0dtiOe89k\nOHHiBAs91jwZsxk+fIgbF2a4dOkShw4dIh6PooNyuRwt1+VrX/0qU/lBnGImYivbhsuXL5NOp4nH\n4yQSCQzDwBks8fiZx1hcXCQ5MsSZqSlSj0wTyCgJq1oqc/n6NZZfuYzwmownUxhCoR4sieuPHR4K\nQa+Uwvf9rXa3nmSSfqRVgfRAbiaZGGwdF4cPH+bw4SjZolQqsbI8T9BOCvnlHDxemOe/rk9jY5Cw\nJEvry6QmhhGhYj1mEwiN0gKLdrKdbZF+5DGUZdMSikRosqGXmBQS79Y8jwSKf/7JMU5nXKBJJ07C\nhrYzRmK4LX7rF3+ZP/k//oBTuQPoVoaJqVEu1S/SXHf5v974Pm8uB3if+CILz58hH8/hCFisazL5\nFIFS5GobfPtf/U+8+uY3eKSYJpOCRrPJ+NEp5ubmqDUD0r7EN+1u1TOtNWqbgUj4OxN0tsfa3w13\np1qDUEUv9OrqKhMHD3SJy+7FnRzb9vybfRyXDy2kD/F1hJ9BezEIbapWQNJwidEgF/OJGyGWHzkv\nezNoq5bNlUs3OTBynHhTU6u0MC1zh5Nx1a+RGBhidb1KM25RrlVIra1jmoKBbBrXEPho3CYsNWoc\nf+oJVnWAu1Yhm81SLBbZ2NjAiic4+PFnefHFFymvWsRHRxkq5si4IUJr9DNP8udv/4A4kifHhzh1\ndJpcm2RwYWmJ71YMTkweJWbb3Lpxk1cuXME5PEE1liZ+cpiFmzeRmUG8Zg9rip1GHD/D9LHTXH77\nHV5+8wKJUPPkI6fwasu4fpWw2erhSIqefW9c/7rXpOC3iFkxHEMgUaANWs2AdOq9zRZKKVzX3VFS\ns9+qssNx04s1Px0FmWjwO3/Vzv2CXY5pe5fO1+14KAS9IBLmvUUp9gpHjhxhfX2derVKOpulVqvx\nd3/3d9xamMf57S+QtWMwprj2+qsU/CaJEycQWiP6BKunR4uEhgIlMLTFhJlgaHWedGOZ//bZCY6m\n12nqPiVwemDJgKeefIb/+X/7X/nDv/o+P//Ff8ipzx/nxtI82Y88ydyIzZwzgZuIs9gKSWsT6QSs\nt0LSrmDeyPLpf/JPkRdeY+mbf830aJyFW7NszM+xuLjI6OgorpvGsCJWSsOUEXvjB1hO78cbAtGZ\ncYXeEmHleR7jRyd49uyjFJPR8+kVKCUh+coLX2dl/s5ZsAIjYnldX2e9VqPsNkm1aXlXVla2CPrl\nepXj7eNs22Z2dpbXXnuNyclJhoeHWVtb4+zZs1y7epWvv/giVjaLWqugggAnHicej2Mj+JvKOudX\nVjl7/Di+7xNPJYnFYty8eZPS2hqh55NJpQh6qH47n37Cyvd9jh8/jlGq47V5noBuuHUHYVvQ925z\nlO6e12p/9rEVD4Wgr9XrfOs7f8djZ86QSCSI2XaX8U5KuRuW3ttidTXixOlUwYnFYnzyk59kdnGB\nv7h0EXn0GK4wOPyR57n+g1dxGgcgm0YrvYPe1vO8iDvDtil7DVbfeJ0nXJ9f//hJDsVvkbASd61Z\noIXF4x85zak3z8Jfvc5f/Nm/5bXib/DpX/gC308Os2AN0RIOaI00QKkQlInQCs+BGJrzTZczx47S\nfD3J0MQQTtzg5o1bPProo0gpKZVKXJ+5zODgIKYZ2XZzuXy3HwAwNnXq7uT6HmyHd7zndqiZ/jH0\nD9u2TTaboboW5VagTTqkM+l0usvS6ttR55w7d657rM4X2uXv7izowzBkeXUV1S4C0ut/KRaLWwS9\nSm4qImNjY+RyOYrFIq+99hrJZJLR0VEqlQqFQoH4E0+QzeWQGOTzeSzHptFsYiqNKQxUq8nLN2e4\nPnOddCpLpuFx4rFTPPfcczQaDd65eBEvhHQI8RAy7U8fmnWaQuA2Gpw+fZrrb71DrVpDOD3ZzVpH\nxGjtyLx4z30YnosbBrhGSFMJzH2lZgceCkFvmSbZZIKXv/sdkskkJx89xdjYGIlEohshsJfoVMsJ\nKyu0jKMRT07GJnX2FAtf+SqD/+AX2BkLAz4aU2sMBL7XJNGq85HCQc6agmK8Hdp4FwQbmortsJY4\nTuLn/gneSJKBn/oorwQ2ZX8UP3BIo0EZmAJMLVFAXOso1cCUxEZz3Lp5kdn5G5yQq6R0lA3pOA5n\nz55FoBgcyjM4OMildy+ilOLq1avAJvf8U6ePd9v0sDHtfZgQCfos9VIVLQwQBh1B36mDmkgksKyA\nWq22hdemKiWxWBR5srqwSDKZxVD1tiDfnKj9IKITkHGTeCKGkYiTxKDq1og7DsoQSDRum65jdnaW\nI6dPUS6XgaiG6tmzZ1laWupGuAwMDm7y76TTNDSowEOZEkNpTKUICMgdHOWpqUMIwJpZRgJXrlyh\nVCrRdF2WVlYox+M4jsPi4iJTw6PIpoeUcgtldQNFLBajvlpBSkmr1cIyBZlMhsXFRRKJBJlMhpYf\nVYpwXbd7/4dicWQQYpgaQ4FsKxb3mzxo2zblchnTNLHafr/doPd63dq9DzCj60cuM1YKGMqmGB+M\ntM75S5eZv3SZ8fFxRkdHyR84TL1ep1wudz3pa82oHFu3c7eVvbO2G3ZDu2vDtyyLE1PTjP7wr3GP\np/APPc6C65PIJxj47EdZ/eY3GD/9FBnHpCYkvhOL2PukQiNo1ltYS5f5z88c49hQngG7xTXhkFIW\nyq5ghmlQJgiN8BWhGRJ4YHrgVcq0zDInDo/w93yfv7zVYsPKUzMdWkLjmD4tDbZQSCVRQiG1xEcQ\nD23qVp3UhXf47r//M46liyy2EjRuzrI8epJXFht87avnyJpQ3agwlEhgGQLPMogdfZyGbVEoFLBs\nk5dff4GVH77Dl774S9xcXWK5vsFPH3kEkU4Stryu8O9o+6ZpdqvwdAb+DuJkobq5EN1ub3+9tTDP\ns88+ixvce8RM0/xRLu0mNjNgZVs4PwiFU2jS6TRSaAJpwEaUfxopNaDRFItFVCMKwaxUKiRzm1px\noVDYwfmeTqcBWJSb+Q4KMA1BaEgMKw0otGkT+gFupcLCwgLNZhMFZBNJzkwdw3XdiJ768FHOvfIa\ny6bm/2fvzYPkuq4zz9+9b8s9syqz9ipUASgUdoAAuC8iKYlaKGuzLVu2LNuSl+4Jzx8TMxHTHdOe\niBlHdI+jJ8YOhyPGPdb0OEbtvS2NLFmiVpOiuICACIIEQOxAofaqrC33zLfcO3+8zEQVUCKLIClA\nkk9EohIvX2a+fO/dc8895zvft337dgYGBnBbue5UuGwfGhpi+twF6vU6nb3dHDp0FydOnGB5eRnH\ncUimw4lobQDoei6eCCGkvgDvZ3Dl+GZ2Rzj6G60Fp6o31XlOnDpLPB5fz32dfBv5nKZ9ZKCTfzz2\nIh2RUazOFMueSS2aIrllmML4ZUR/P7qJB9ZaowOJITWZapnswgwffHCEhK6w4FQR0sMXFropJyZF\n2KKdyXYwMXmNWi3g6NGjxJIpTrx+kmu6gyuig0OPvo9xW1K3oSY0tjJAaJQ2UcIA7ZGs+9RMzVAq\nwdH/72ucvvASu3p7KeRnWB4bZfC9j5EyLSK2Tb1SJJOM4noNHK2IRmw8qVlVIX+radmYhmZvfRfL\nvqYn20m8I82QAcuXJ1ldmKNaLKGUItrMyUaj0XVQtVYU9i8rgTc3FUCtrEJtYjMAGnQYYal+Op8n\nVY7T4wn6HIuEGScQ15WdtDNCPB6loc7QMBQYVXqiIW1uZo0+qysUImKwUi2wWKtQrFZIL8xRMcFX\ny8RzOTo7Mkx7ZZIOlJdLVM+eJn13N9FotN2UtGt4OwvLq/T09GCaZrsXI7dB8bAsPSpBQFxIOgNF\neXGJQn4JAUQJHTbA3LWJ9ntG9+xhS3fPdUe8WuGVV14Jf0Nz9VBIpehLp9l5cICphRnOn77E1QsT\nRCIR5ibzTHQ6+IvL3L19jEwAUQUzMsDXkJQGKHCFpBZUMWMGak0XqnyLNXzf99swyRv1KzZbS7TW\nZAaiZjjhb5Sj2HRt8hZxCHeko197Uh3HwY5fZ7GbmAhvnN7RLQAMDzeJkJopiXbX3ya4X4aSKfaU\nNM+dOkNy5zYOHjzAqaUljFyOoquYnphkcF8aT+hm4TwAJPFqnYfsOFm3hpRGKIHoOyg0SoYFJENp\noo7DseMv8YOnf8ChI/fw3g88wcTqNPd/9Ak+9x++wKnpFR78QCd1ExIGUKuSchKIwMUtFBjtzlEt\nFJg8+Qq20qyaDg+MdHHXQ79CYWUVy5REoxGCIKBDhGh8K5FBSTAjNgpFRSoC7WObkmgAdkUTUwbJ\ndJpzS4u8eOxFUv0DFHyfR4eGGUrE8GrhrWiaJrZtUy6XuXTpEvl8Hq013d3dSCkZGRlpXyPDMHC9\n+k10u24ziI1Go1QqFaRza+RQP7mmwfDCxw0WjUZpNBpMT0/zXGmGjo4Odu3a1X7dDMxbTj+Mjo5S\nNjTTleJNr/X395PP5ykWi3heqFNcqVQoFotM5GdIpRNYjoNlWQixMWolqsBGYvmKkz98maiEI/v3\nXz/25qqvK51pb9uI4+jhhx8GoFavIITAtu1Qh9bz2JbYxtT0NE888QTJZJJUKsWJch7bDxg/dQZP\nNe1B9LkAACAASURBVAuvb53S52fO7khHf6PJNWvd7lwY1U9NTQHXo8tMNrzareXmZiwhDXZ1aBZq\nJ1i6dJWJUgH6BnDTndilGNbKCpYGTwUhu2XUwdESw7JJWBECIfC0T0xqPKmwRBASmTXhonPLeX7w\n0lF+/Xc+j0bhNwS1KxlePF3GiT/M+z+9j2fPnCW+Z4yYNOmo+ywd+z5BrcLKzGUKboXH7r+P9w/n\nEJZNjQAhNW6jTiIexdRgBSqchITC02Boo52+VYZGCwUKHCExJcStUAyjOlmkI9bBXXffSzlokPZ9\nlpaWKObnUW44+B3HwTRNHMfhwIED4WcqFeaDpeTcuXNtgYV6vY5lN4XFgYGBAZRS5Lo3lr37mTGh\nEVYdYa2J45o+P5vNEovF8DwP2ZGmY2QLV5evC+qMj89x7dq1W/raUqlEJZRmuMmy2Sxzc3M0mnnu\nVqNVrVajVKlQqlSwXJfOzk60DhAbANoNDTJQFCan6U+m6U93rJvkWxBQb4Nta63VO9Ka0KQwqVUb\nuL5Prr8b07K4ePkyiViMRCJB37YBXnz+Bdx6nRbPZ0/sXzz9m9lPhKNfa61IYXBwELh+o5hOmO55\nS44+ItgVdegyBWfnVnh68nkimXuYT1lQb4AfgPZRSiIQBFogm+mKSrWI26gStSO4ykB7Jp6hkU08\nrLRMZmZn+eVf+wynLlxgbOc2HCdBsfoK3/7nKfSee9HRCK4vccplqsLEnZ9j8YfH6Eqn+eR9d9Hf\nlcCr1nCEi9/wMB0DT0MUiWq2CHoCMMAKQrUfpTVWIEALlNbYjk1xpUyuK0u+uAJSc+7cCQbrNe47\nfA/VapV4Kkoi4mA0oBF4ZLt7MAyDfD7fRi3NzMyEyJlmRB+Lxdi7dy+GYdBoNEK8cb1CvcniOT8/\nj+d55JfCNEWIHvnZs0C5RJKKbbleHMchGo1i6Q66urpoNBrtoqRbnuC5V0+0AxgAoxpy03ieh+u6\nIa3AG+T3W52p2e4unEoJ3wS/WiIIgmZHZbiaWlxcpKenh3PXrpGfm+Oh97wHaZrEYjEefvjhEOHy\nIzIJre7zWnGZyQuX6FCCfVtHcNwGyri++mg5fWuNc2+N3bXoq7XF59ZvNSyLaFP8RAPbRkaoVquU\nSiVe/cd/Ysf+faR6umnIMJOxcv40s1PTDG7ZShCEFPXvhL3VYuydbHfkLzCa7IytEyzVmnSADp+L\nYjifd9nhHVmdD4uzZyeXgPAG6B8aZHBwkFKpBLjtlJBo5s+DmCCifIY8A6c3gTt+CffaMhc6O3kt\nZeIugE0ITTOUhy1tKlqQVAFThTB3F0mWyCgfW0iWGgaBNKiU6+Tnpog4cUo6YMv2QaSGixde569+\ncJKLme1k791PPmFDxMZdKJDJdjI/PU/3wDDvOzJG0lZoJbCsOCUIr1STF8eH9UVPLQgEGCJU3hEi\nFCLXCkylMapFSlfzjPTkGL9ylTPPPsevfupJEAFCWE3noSETI3QtGo1PtruDju4OgiDA8zxsYbcF\noWdnZ5menm6LMNfrdSzHJpVKkensZNtouo3gaDEEFotF3IbHysoKnhtQXCOl19UVfvOePXvQBLhe\nOHmk02kilk2lUkFrje/7bQ2AtuNYUyuQUq77vxDiJs2AH6cZpmTbjiESiVDoular0ZPqI5/PMzEx\n0YY35ssl5ktFRHxNdFr90Z/7Tpht21CrhRTgzTRc61qJZvOP1HIdP2ulUmF+fp5gYYZO2+HI9jEa\npTJCi7fVyRoIPxyXzUR68ygQgNJhobm3p4eRrhwX5mcYGBsFGerP3nXXXVz5Ceqhux12Rzr6t2Ot\ntu2AMKo8ceIE+/fvp1gsksvlSKVSxGIxyuUyotlcYdgWnhAMDQ3xyswcRiAJYn1w7STB3oOklEBr\nQUE3SHsmK7ksX9+S438uB4wjUHMz9HVmsbMdVAqrzC4vcfyFF/nIh56kp+QQSAdlmhw9cZ7LdoyO\nRx9k0QoQbo1t/V1MX7yGuDRF5PIUDxzYT8J2QPgESiHX+aibFGrb1mJ8lIAU4QCRSpOUmmPHn2db\n/wDfeuqr7E+n+Mz+PVzfOxwhtuXgehsLkDuOQzqd5oXvvwDA9PQ0yWSSeDxskknEYhw4cABF6Iiv\nTU7ieR4LCwsYhkE8HkcIwfbt29uC3alkppkDDgWly+UyWmsWFhbQBLzw4vOhWEuz1tLX14fWmsHB\nQYQQJBKJdjRoNClplVJUq9V2iqCVDridNWPLknSkHWQjzfFnXuPyuRnsyEkgjGbnmOe8uEDfoCLi\nBTQaLtVqlZWVFTo7s5jEQ5bQwAE3AebN3r9kCDwBsUAzYDg45QYWYCmBrrmhlKUTYb6ZPWrlyiP1\nGr2jIywuLnJw3x6CZIzqmhpKICAQCsd3cQyTxavXkLNzHIgnyGxr8tY3asRjYQBlqpvdiQ6u1yY8\n1aybSau9MFFNHQ5Dm6DBlEYbkdhKzsjwg6hUGlieTzqZpF6vU5VQlzAZi1C9aw9f/s7TfKhnmJFy\ng1Sik2rZJWIn1hzMBhdog45UZdz82IwJgrZmbEsq01tz8zVVLLDkzUsOz795W2wNm2iL48ZY8xuM\nt3Bf39GOvk3g8wYF6VZUp/T6nyIIo7nBwUFmZ2cRQnDx4kUgVM8ZHBzE0gIhTJSWJJSkqDWJAOqr\nKzixPpIHDjI3PkV6dCsS2OIZVDWUpcItuHznWp7f//3/Hm9hgn/1a5/l3sM7aTQ0Q9uHSHX1EMkM\nMXNllsjIAH//zA94wdUs79jPeK2OU1hk38gIF0++iH9lHrWwzMcfew85x6QqGnjCwNJigxtxY2e/\nuLrK9PQ0W4eGkBos5ZMyNX/zN3+FW1pBLMzz0PadxMuL5LIpQoKdUPQwRPlc/8y2g2zuYRhh52WL\nWKslTtEqntUbDV586ShBEFAoFPCasnQ7d+7EMAyy2WyIRJKS1ZUCqytFLpy/tA5HHY1G29dLE/D4\n44+Hl74ZobcizRasrlKpMDs7S7VapVAotNMT9Xr9eiGwq6tdQ7hdFgQhPp5a6EA7OjrwlhoIIZif\nDXHsWmsSac22bdvaOqqGYXDq1GmmxvMEG8+/b9ssyyIWi6GaqJofhaKybZv83DyL+TyDySTJeGLD\n/TZrreG8VnJHNPM5Rvif8Lk28YIgFCVq7rfqVdCGgWh12Krwr9AhRHSlUOCuTC+NuXd5OfQTZpvR\njB0CvkgokqyBP9da/4kQohP4O2AEGAd+SWu9IsKR+yeEIg1V4De11ifencPfnKXTaWJOBMdxmJqa\nol6pcvzoS2TSSTIdWcbGxqDqMmTFGOrexlfOXqbr0D5Ub475uRliTXxuQQiSgUTiM3zkHv7g//gf\nGXC24KTijCR2c2jsEWJb0kTxuXxlltOFOpHsCN986STfuzjLyMMPEswpsgOjGFHF+JUr6IUFhF/j\ngzvvIRuxWKaCgbEuFdvi+tmIua9lZvMhlMYQgphjMXvuFe7fvpWh3odZKq3SmM8Tk4JauYjKdYXE\nbc33bwoqqS20htmZMG9fqVTCPLMQ+EGAloLu7gyZaBTXdXnttdfQWrebW5RSJKMpent7GeofpLOz\nk4bvtovXABcuXEATUG9cb4jRBPT0hBrd8XgcKcLc6cDAAEIIguZKREqJZVnU63WEabCwsBCqlunb\n1ylpWTYrKyvMXJ7l8uU5grpNph4ixqJr6DJc9yozMzNcu3aNZDJJLpdjYGCAuakVGu8K8D4MkmQk\nQhC8cWprfn6epYUFsh0dREV4jpW+dfEbJa5rNSsR/l27clVNRx+xLZZn8iEYomklwyDmxJBATEFU\nwIoOUKbBlpER5OlLOELe3OPxM26bieh94H/QWp8QQiSBl4UQ3wF+E/ie1voPhRD/Fvi3wL8BPgzs\naD7uA/6s+fetH1wzMtvoPrwxyLc3UF2Ktn5d0KBSdKkAsWaePt7XR61Wo1wocOzFFynkl9nZtwO3\nN0t8uJeBWINoTTLcPcyy4RCR4HoKx5IIO8Kp7z6DHnoIc28nydkl5NADTDu7cS8co6Mvx6oreeoL\nf8y3uh/m/o9+iMSOu7lSqiIzFTzPoyuZ5tT3niaWX+DBe+5noDvJvFdFYBCI8MIESAIt0cLFRtBo\nNKjX69i2RTabo1qvE/ECLj5/nJJb44G9+zk/vcDVV4+zxapxMJOlvytDMplkprzMww8fIlUsoCM2\nCyurxG2LuGWDYeDq63SBgeejlEW1EubmU6kUp06O852nvgdsXFhtFcFnJ6+0G6uE1mRSaVCKnXv2\nkssm8esllOuxMD1FdzpF0JTWsZu1lt27R9qf2XL+rdrKxMQElWKB8tIyhpCUSqWmwLZGWBbCNEjG\n4uGEk83Rm0ph6OvX/HZYq75x8eJFtE6gtcYLwjqTE7VxXZdGo9GGqp4/f57V1VUmJyfZsmW4eQ7e\neRrHcrmMk+omnsvRaMxTrVZRUnBlYZZUKkU2m22neBqNBlprDMPANq2QWtu69WRA3atj2FYIyW00\nEKaBFgLXdbEsA2maTM/PUi7WuXzxEg898AAQjvnlCKS7erh66TL9HVm29Pez2ChhI+jp7WX10iQr\nbh3d6kz9MZrnee3UTRAEYUF6DTq2naEwNgeZXUtJ3ArE1gZkvv8OKkw1dTFnm89LQoizwADwcUIZ\nNoD/F3iG0NF/HPhiU5HnqBAi0xJb3twh/fist6ubIAiYmp1h+9at+OUGk9NXuVpQTPgZtBNhbGiM\n+YKHX4Xx+StYnke0O0d6x3ZGP/ERZl49z8gT3fzV1ByrLx9l7644lakCg+/9JKcjEYzEFhb8Bqrq\nkjIdeuMpUkS5+PXvkdEGOw8cYl93N+VqgcA2woarGzBxEnH9QgvB+MwMXt0lnkxj+ho5Nc3Wzk5O\nP/VNvCR88qHt7O206LAjFJeXGJ+9QGzB51r9dToimlS6g4gdhcBDOmH6RGvRjqSECJu+5ufnOX36\nNK+//jqdmQxbtoSR6EbRfys9ks1m0VpTKpWQzc9CCC5euMDxpTn27N6O5Vhk0ptXBGp9dm9TU9RP\nppifnyeuFFprTNuio6sLYRokojECNBHLZm5qCpPbm7rROkzJUOtkacZjenwZT4RIsarn0Tvcy/Dw\nDgYGwkF9/vz5d/2YWjWAu/fdjdXVQzyeJj83QyCgq7er3QW9GSm+W7FGo8H5kyeZnpxkz85djIyM\nkEgkGJ+aZWJigmKthKsDrKU8phRs6f0AFRkizHqzPbx27IdcfuEldvYOYGwr0L1zAE+AE/i4BCwa\nAdl35chvn62srADgqOvOv7u7e9Pvf0vTshBiBDgEvAT0rHHec4SpHQgngck1b5tqblvn6IUQvwv8\nLkBH/I0ZH98tK9fCPF5nNotWAiPu0GE4JKYXKZReZypQvK6OgkrDXAXrUA9GTwcjwwdZUJL5uQX8\noUGuIajt2EJMbeeUVcTUFldUwOLwEZyqz5WnX2Akk0V09/D61SsMRhNsyaUolEzu37qdqGlQC3xC\nl35zDl5qiZSaXHc3r58+TaRaJdkDly6eYocp+Xef+zB7D+/mt3/9s+zbMUZn+TKlkmQVSSAE9QAe\n3nsApStIw6VYqqH9Gq7boFYuU/c8hPARhkEul+PsuTN8/evfJdvZj9aanp4eJOsjjButlddv7ROL\nxa6zgBoGrueRSKeYmJwM0T6GRa6rg9yW6zdrK1//Zk1Cdd+nWq7Q3x9i9F0VsLC0SKAUi/k8gVKs\nLi4Rs20MDfXa7XP0QsKqu0D//ggdo4rewyZpGUKDT58+TUfHMp29Nql0J67r0t1vMT8/T09PD5VS\nHRUIPC+Ey7r1UrOzbmNrdXEKIVpISnzfbz/Q4UrMkBa9Pf0s+JJ+YeEFZeqBZmhoCC8euz6RNzNG\nrZrBLcEMjbDwaqCQvgdCc+J73+HimdeRUjPy2EP41SLzx76Bt5DnnmiCmBbYSrOSTTFbq+C4qyxb\nUDSAiUkyU6/ykF2hb/4CmZkLCPP9dPT3Ml+vshCUaSRjpAur9COwO3KkgqajszeJvlImtUq4ktJB\n+LjRU26kNdsQzftWQMX3QpJAHY7mQpNXKBDgBzcHShtJajoirOWUSiV0o4QtwC+H1Ba2bZMgg/lO\nc90IIRLAl4D/TmtdbA1KCPU0hRCbXES03/PnwJ8DbMllbgsuwlTNQiRgoMO0CRoqLg8fOkj1wG4W\nBxLMBgZVIFYvMHnqNK89/yxjD72fQEoK0kcosIXACCwm45AMNDU0QbaLvqEu/P7txCM2lUKBYVVj\nZyTJ1OuvsG3nCE4yQr1RAUuCDmiSNq87TtWsyebn51mYmeZed47CD07y+Q/9HPtHBhl//Tmir1js\nGB6jPzcE0ieXC52c1ppuZTK7cIX5+Sl8LyAajzO6YzcxJ6R3qAc+QaPOuQsX+Iu/+As6sxl27drF\n0mJ5wxt6sxYuYzVyzQpFYBJoQSqa5r/85d/xa7/z6fZrm41QWnea00TtOCiUH8cNfHpGR1EC3G0B\nuUwGQ8O3zk688Qe+m6ZlEzVjE4K8bQrFcOJ5+KH34XkenhdCTltd3++ehdchpE0WmI5NuVZlcmYa\n3/ep1Gs4scRGPVbvqOV6epmemCRoNDCVpF4pUy6Xw96YpdX2fiaC6mqBxsIivglIKJ45w/50JxVv\niZwPCQXPHz3G7sN3Ee/OEQ00sQB6enowNqLJ/Bm1TTl6IYRF6OT/Smv95ebm+VZKRgjRByw0t08D\nQ2vePtjc9qNtDVc1vDWJrM1aVN4cCalm9NkCGdZr81wuFwh+7lf5574YfWKWiruNWXeZOoJeDLJb\nx4ilMtQMTdAQxJot4kpDzfLodMNjtzDAzlLyFVYkyhJAupNkvIPZ6cs4s8tsHRjBdxSVmg8qZPFE\ngK/BVBLfEFgKBAptgG4oHFvxe5/8Bb74xS+yrztD2jY5eNdjWMVRFi7/Lf0dSXr6oghVRlpeyO9h\nBVgZh53du3AUNHyPSmmJ8SurIaJBKY6/9Ar1MvRmdmJJi3JeEdNNTHcTIac2UK9/I1MAIhQ1NOyw\nXNwSfi9VPLq6h/nrv/ga27aPcPjwYfyEgxM18BqNkCCt5XJujCF8n45kknq12kTzCGKmRdTStOCi\nUQe8xjIeYV/B7TKtBH7VIdu1hfnqPOXlEivTIUGXX5ujUCiEMFTCdE4ikWgTx609260egmg08Zaa\nz0zTbD9Yw8JQr9dxXI9rc1dxa3X27t0LQMX3sW2bYrGIUopUKkU8HmduapqaNLHTGRwEDbe27jtu\ntLWrvxBdo1qLDO6++27uPXIYAGEYVNwG6XQa3/fXOaS5ySkGOjtYOH6SUuBTC3xGGi7mxBL7Mp1U\nFhbZuXUb52eXmHrxhwTxKJ3JOI+P7aEYTFGez2/6PG1kQggKhUITsn17Jo0WVbtSiuXCKpkm1UuL\nnXOzfFNvyqTTRNH8Z+Cs1vqP1rz0VeA3ms9/A/jHNdt/XYR2P1C4E/PzG9mstDnavZvXdu+jlsph\naJ+cY5BJJLDsKIl6QG+2B9OM4fvQ2ITCgdtE7LTiYhFzOPVP32Rn7wADqRR6AyhNXAdkVUCPG9Dp\nB6TLdZzZRc499S0+deSBDb/n6Lmv0DksKHpzTMxdxhOglI8SGqkhF41jEebhDcMgnU4zumMbvb29\nHDt2bN3A/XHa4FA/8wtzPPfcc0xNT4ZdmcgQvKzlxiBmKcOGMCE2TdN6u0wpgVsxeeY7x/jhi69z\n4cwUC9Ml8jNlOpMDGCpBccmjUChQKBSYmJjAdd12PeTdsJbg9sTsDMulIto0QtZHQhirlJJarUax\nWERrjW3bbdRTvV5fRxF8qyaEQOqwg3ZuZoaFhQUWFhbW7ZOKx7H8gPr4FImFFQYqHlEVEoVdnJzA\njUeoJqPkAuj0QeZXMFeKGEuFNqXDT4NFIhEymcy6R6tfaLO2mdD5IeCzwCkhxMnmtv8J+EPg74UQ\nvwVcA36p+do3CKGVlwjhlZ97S0f0LtlmRHUFARWhqRoC082yq/cAk1cXmXv2eWjUWfzgXUyn6ggZ\npQunuRJ4Y2fv6DD/1vA8hNYUX36OoZRFbmcPdavB1PQsuY4bSkdCYxDmoRGa137wZQ7t2c0f/6+/\nQ9SxSdVrHD58mEqlQiqVwjRNTqwUGHnifRQKBU6cPEav2UMUiSEttARHCUzTwkfjN9EBpXKZL3/p\nKyil0Mq4ZQKtW7HWd3luQCKeRCnFsZeOE09EyHaF0V0kaqMVtLx5C0VhCIHvulydmESjEIaB1pp4\n0kYIQTqdRghBrjvkRRI/Yjb4cUCHq5UqJ374WlicbtIddCW7QrI+I87o1t3s2LaHSDJM2ziOQ7lc\nZseOHVw88/Qtnt03Ns/zQsRZvhY2w3VHSfqhsEe9Oa+2eheEEDiOw9DQEDPnL1KpVPADRTqzeaqR\nG01aBtr18N0a5cUlKtNzDKTTOBqoX192dCZTlItF5s5foiebJdvXTy1pUROaSF83Zi7Hc1cu0K1i\nSAHT+SVEo0F9YZFyOUwH/TRYLBYjGosRs8x2RH8jeeCb2WZQN8+xITUSAO/bYH8N/N5bOoo7xBq+\nQSKRYK7hovptLheqzJ8/x75PPInuTDH30tepeXliPYO40X48DMw3SWH7WrebOxwtmPruU3ziIx9i\ncTUPHZ3sHB1jaWlp3XtSwqC6usTrZ8+CH/Cvf+UT7B/Zhhm3uPH6Xrt2jXPnzvH9c1fJLWS4+/Bh\nrpZXqClIAtIQKMJVhdYCKUAIgyuXr/LNb30XwwhJy25XaNziPRHSpFav81d//dcMDfUxOjraTmPE\nm1zlLUoFz/UYHNnK7v0HEFphtkDY0kMpxenTpwGYnJxESknlRw/4dx06rAOoF8CR6bApKIDCqgJq\nPPP0y+39OtKh8lcyKQGHC69NsLTkgpZ42sWICdwNOioBar5HzffafOxCAK5GS412NaIR/u1o1qQX\nF5fpjsXYc89docMvl8lojee6dGSSVEqrxIRCEBBVHkEtQClNbnCA5eVlTo2P8/OxzHXNgqZvrpsu\nVakItMZqXpMAjYkAw6QuNKJa59TJl1GFIrMXrnBoZBujUYOkjFIsFplaXqa3t5dSqcTytTmy2Sxj\n9z2IBGpaUyoGaOXguQ3c+jymVNgpg1qhRjrpMF1e5NVTL0PXKDKe5OzEFDtyPXSm0ghujvA3Km0b\nBNQrxZD/PpalXC7epBn7VmzdyNICjJvHmtwA4VTywxRdqYUlsNPIZncxpgnaQm1y3N4ZnbEirEa3\nxmsQrOe6ubWPXD8owhSGgWlYgEZ5YWZAoEEIZoWiaikcr8JAoUy0S2GkG8QeGyadClDVS/Tcs5ur\nU5NcvXoGPaTQOk4qkcVCEKBRMrhJr9LGRKBx6jUGolFmojn++bvP8OSj9yEMi/pCnpSlcVmhp6eH\nL33573m04XLo0CE++nMH+cIXvsDVxQfYtTVgZm6BkS29nDf7+IYY5Pm/fo777nuAenwPXY/38Mjd\nBxk/9RqrQQHTNsCLU3IFoq1GJFCB4uz5izz9vWexrQiWlWBlZQlLWlhSkslkqFTLGAhM+52Xx7m5\nJ0ICsimOYZJJ96GUQVdXD/19A2GxXPmUKyU818V1XQIFqzNzKDVDoVDAD9yQt0XokEe9eQ2EsBka\n3EI0kd7wWH480GGBDEyMDVzK2hVUizxufn6+vc2Q4cQWBAotfXzxzgiwRCIRxsbGiCQ7qZk1ysJm\n9sJVuru76UgKrAAMX1OaX8QzHLpzuVDNSGu0rylryYkTJ+js7KS/v7/d52AFinhTj0G3OHMMQIEp\noVwpMzs5RXm5gFcokM10UiwWMSI2yXh4r/X29mKaJktLS8wWyriWTV8s3ob8BoEJvgGBjVACSyrK\ny0XKxQpe4CG1wNSCrr4+MpbD3PmL78g5+0m3O8PR32DXi2ctZ/3mDse9gQLBvKF4YigdzqamQglF\nYLSEyA20iuAu1WnMrIAaJ1P+IdNfepXZ7hQyFuW8Ugx1pYkYDkL7bO/sJBrPUjDjxKNJ3HqDulcP\nP/8GR6aFjyUEOaWwrlyjV2sefc8jbNvWT7FaC+FXNRevtMCffuEL/Nbv/ja/uXOsLUXW3d2NysRI\ndg0ijAKLJY9/+u53OXN+lo997GNoHfKc5PF55cUXUdUaXUmb2fFp0oP9+FoTlQZ1vxbio7XPiVdP\nYEpJdy5HuVijryvH1pFRZqcWeenYy+wc237bInwIWRiPHj3K3j37EDLseM3mMlimQzKRbg/6lgyf\n5zdCgjShqTYLtBBC4GZnZ/G9m7ngb7R3CzosjGjYqbvmfLZyx2tXckaT6mJtcOPYYR7W9dw2+qml\n1kRycwXZdDpNKpUimUwytbjK6uoqe/bsobu7G9/V2NIkFo3Tnwwnw2889e027US2o4P6xDQnT54i\ncMN037aREbZEk+x4YCcLCwu8fu4cW7duJR6Pk4mGfEm1Wu26EtzSEqVimdXVVWr1Kvfcdx9OzODs\nseNIy8SvN1gpreKtBnR1dbGyskIymeTgwYOok+eJRWPr0q4isBBKNLugDXwaTC3MEGBSMwRWLMKR\n++/FTffiF0qbOkc3WqvQ+XbYK8vlMr29vazewiFc/73v3Bi8Ixy97/usrKy0VWlaJ7ZSCUWRa7Xr\n4sitDsyW9mmhENLgGpH1OcOIWI+drgkbZIhS932TinI4dXmSlVKJLQM7ufveD3Pg/Tk+mk5QExLT\nyLCAyUrDJTBNbK1xA5crVy/TWCzz/S/9I/UtW4juP4AZj+L7DVwMIvb6U+oIE4IAXa5y6rkX+Vef\n/Hk8WaNQrWGbDsX5GS6dPs19W1P82uNP8OTuuzBlHdMMkQ8jI1uIaDh3vsCxoy/zw6vnsXLdHDp8\nhGq1isYEw2dkYID81AxOcgsdex4nFknyjae/RWlhmpHt29i7+wCmgOJSgamrE4zuOsjufYfo7epF\nGvDNp75DrVpn195dbQHmd0fzbnNWqVQwTZMDB/c3KWoL+IHHyvIqgfJD8jOtaTQa+IEbFqearjmm\nyAAAIABJREFUcnotIWkhBGNjYzjOM2/4Xe8mdFhaKX2jo9+suX44IQSBCvPlt9Ahm8/nyVfKyGsO\nNR0W8lpUC48ffpS4bRPNJNtMop96/P03fYZliHanc2tsekhSiQTduVw7VZYyBG65RCadZubSRdCS\nrmyWbCpFXGuqpomuVxkZ3oqs1Lj62hlMy0ZqH1T4W5PJJIZhUKlU6B8K+w1afgAg5sQQgYnthFxN\n9Xods8nVY5mSocEtdEYTzL3lM/XTbXeEo7csm76+vrZYcbkUOu+2SpR9PXppzXatG671t83u1sx1\n1fz1g8LGxPMDajWfEwsVLuoYH//UfxNGNqZFw9d4QlBvNAi0T8QsYwqDjBlG/tIAWxl07d2KgWSs\nkOVSucR3nvqvpPft5vD7P8iJi7PceErrGhIaJo4d5/EdO3A8F2W4GMpj6upVUjGL3/ulj3BkoIvv\nPf00PWYQNmgAQlgIYTGYKXP67BQXp67w3ic/zHypxlzNwzEEjUBgKM3CmUnGT8zw3o8cJpIbJaU1\nj/zyThKGoKEVZ3/4DJfPnwWvzmMf+yXu3r0Xx7Qprq7yzNPPkM/nSdiZEPOudbiGMm6Pow8dddgl\n2tPbTTTq4ERsItLC6nDQBHT35NpEadBMfQhNNBptO3qtNcvLy2/YCv+uQ4fX/Ka3ajqQTZhp8/+3\nAPFrNBrs3LObrbvGsFPDbUWppaUlXjv+UptTp51+aR6nEqD98PuEF55PCahayLxTUeG2jmSK9zz4\nEJVKBVUtsDo9yz1HDmHFwuuQSsSpNuoYSocUFvh0xbM4wztIapuYFsxOXEEtXAVC0reW2pXhpEOg\nQKsWoCEZT0JgEahVyuUKpXqFkmHhBoJAmmwb2ELdvvV8+k+r3RGOXhqCWMYilQs5yWPmelWitWK/\nLee/nF+iVm5gCknd1ZSL87h+QL3qETTK7Np9F3lPgrLwhaRYrLMcSMZXKoxt3c0n9u/BDwRuQ2No\njSlBmCbxtcu0wMZqNar6EEhBi+JjKJtlKJvl8eERAM793T+wE0g8+jh/WapjWXEqQrNULmAVlrCr\nK3QmtvDq69/gg48+xj/86Rf43Ps+xP3bd2L02OAX6O+M4lbKdHbsYH5qGmUkmFySfOXPv0bP+97H\nwEMPcKFSwqsrhBYII0pMegSu5vsvv857Dt5LZ6wDVwdUhItWCYpagB8weu9HOXDfJ4hKg6M/PMp/\n+odnqNXKXDx9hqHBYZzEADUrzkI+j1Y6HFyNIMRSxxNkEzFQDWzTIBA+BhJLhcyCOvDaRV95A9um\nMDbvnNpIHC8Uj5iaWubC+Wm2jnaQSCQwTQtpQKA0hinDjk/Pw7IsOrMhpYLv+xiGxLBCB5lI2ZjW\nxqm/TUCH/5CbocP/rRDibwmLsG8KHXakZntsPcWW425AjVsP9/Fra/hN4gkMNJYFhgS0YnSLTcpp\n4HqaBc/C9WBlyULLLPkVhdQREJrLlbCo+b4PfYZ4IkNUJpA1h1DRFToyadRDI6yurnLs1VOkUin2\n7duHIUIhmbXWui4BtLOokVY6zA8wTZNCpcrV145z//33E4kaWM1WT88rYoVlGAaGcpw5c4aI00FH\nIkfHvhwzMzMcHN0NQrO0vMhLL7/M8rJHNjvIYMMLewbkdbbMRNxjdTXP3Nz1mH3KiDBy10HuO3SY\neCJB2ZPUI4JoJkUQsakKjfQadEY2uBc32NRqYmvRmcdisZuChQ01Xt9GXOSrNYFAs+dno7t2bVPt\nm3DRrbM7wtHfimWzWSLdDqXlVTpySaqGoKojSKeb+fwCX/v+c+RrAVKZjO3Zw9DoTgzLZijXx+ry\nMssNyczyHGJqjnvuuw9tKN4O9VWLG/3kf/0SWw7dS2xrmktBBd9vMPWlL/PpRx4i3umwe9sjTE9P\nt1ciqVSKMh7okG9Ga8FiuUTf1hG+9p2nuDY7wwc+8lEuGBJXhYIiJha2KTEUaDdgdW6Bg1u3s3P7\naJut0bTNNvBTCkk9MPCVoAJs2Xk3/+ahJ3jmmW/zvg9/iI5MlkQqRTwVRlOe5+E3XNxyhYWlRaav\nXSU/OUmgDeaWFkEEdCbTSMPEUwppNG9JoXknFWGFEMzOzjIwHMO00iA0QZProxXltVIijuPg+34T\nB96sAL65vevQ4Vxnhs//8ifWbTO8m6kkkpHQAa/NBxcDD61Nqp4i0AZaSE6fP8fVfI25uav4hI1V\nDSuOAlZLDVAaITUd6QSHDx8mErWJJxKY0uHGr61Wq2QyGQ4dOsT4+Djz8/OkUikikUh7RbQZW1pa\nYnFxkWQy2X5sll9oZWWF3t5e0pkUua6QSXZlZYXLly9TvXIFHY1Q868Xoa8tLrC6uspavfK7j9zD\n9n0HiTkOQmp0oNuMr75SoVaDvH01pzvBfmIdvZYQtyPkhvcwW69QD+qcmqhw/OwrdPcPcfen/jW7\n9h0KUTCmpKE0vmGyVCyR6B3C1wGHk1ls00TqUPy7fXfcgrUaSj6zbxdnry3yzW99hZ69o2RiCVZ2\nDeEaVUSyg/HxcU688CJ/9Ad/wIXvfp/Ori5KboihFiogEY1wolTk9//0j9l75CDv/fQvMl2o4Eij\n3UHse+DrgGQqTsS0mV2+wK6xMXQQEBgGFuGE0T5XAtzAoYZAIwm0RX56gb59B4nEFFJYKARlLTHS\ncSLNZbktJT1KcUg8ivQ8DENjCZibmeHF7z/LmReeBaXo6+jECBRKBe9k/QiAy5cvc/f929dtazn3\nQqFAvV6nXC6zvLxMIpFgeHgYKSWJ1JtPOT8O6LD2Grizl9Y58MXV1Zv2myjdDAGNZTPkSx6LNYty\nEKOhbSq+whCS5VqsXYfQcYOAADvVS6Meyg5++P69RKNREskYSrdSm+uHeyQSQSlFJBJhx44dAMzN\nzVGr1RgeHqZYLLJt27Z2HWytrY1w8/k8QRAQtW1mZmbo7+9vvx6LOu3jLBaLXLx4ke12F729vczN\nzbW7T6Uh2h3QjuOwZ88eJoVmbGyMqakpzp49y/HjxxnrTYBj0Wg0yOVy9Pf3c+A9j4QpTEAbYCmD\nuqkxFBw6eJD5189j2ptvMGoVY3+a7I5y9O2Tuwl/G2jNSk1xenyRZ8+dIj44xHs/9ovsf0+oy+kh\nKeoALTSmaPbd45FKhfl+rTWBMPDQCKEwpMJ6G16qVRtYWinQ4Zh88PB+/voH36eeivGBPWNQq7OQ\nX0YvLrJjxw6klJw9e5biex5DR8O8aToV42//5r/wbT/KfU8+QS1wuVot4hlm2JwlQkUsOx5etpOn\nT1CbX+L9Dz5EPJ1GGBITgaEVSt94fEFIMKYVZkRjRBIgPXwjFD4PfZggCBRSg5QCvym+KZFoYSA8\nL1SvynTxwU/9Kp//7GeYvHSZo9/+DqeOH6Mzk3lHGHXXDjLLsjh27BhPPvlkG3ly6dIl6vU6XV1d\nxGIxOjo6GBsba3LfV+8ojU8DRQc1zDVDzUjevChPdfXctG21skpZagQRasqhGMSpBCokOLMchAxX\nAIYlMVBoKVguzPPww48RjZbfcvckhE62Xq8zMTFBV1cX09PTJBJvLDSitSaRSNBpWaFOcD7fpmmY\nmwux8Ol0mqmpKUZHRzl/4QIXL15k3759VKtVarUaHWRu+tyyISgbgtEjhxCZFEEixuSZlwCwYhG6\nRrdxz4MPUmz4aKyQPgR50y2ohXjTxsafdrtzRsRbNAPJ7HKFiWWLxz/2GTp3H0BJjVH18ZDEhIkr\nDVwC0AJfhAG71DJ8TtjEpBCgmzfH24joWyaVBK0YiiX47Yce5+uvvMYLl8axtc9oJsPixATve+hh\nJiYm6OzsJDUwwN+dX+DcuXPUajWG9jzMTgcW3TrYElsIAuEQeCIUHtEQMTxWV1e5fPUcv/6JX8Kp\nh3UGo4kgdQUY+vrtrgRooRFaIJskbqbSKC1BCZQ2QKiwj0M1dVaVwNd+iJ9GoIWBbjpgIaKUlMd5\nT2MMjPDez/0WH/6Nz5Gfn+Or/+cfbXhe3o61HPzy8jIAPbmuNjoDwLLNcKISgo50OuR817ePsXKt\nSTQx4WKuKapGUjdDI2ul4k3bIrKOiYnSOozmVbTp6CWReCfCUEgEqYRC6Rrl6jIPPXqAka1ZUo4k\nmUxSrb217tCOjlC4XGvNtWvXCILgTR2953kMDg4y3Glz7tw5rl69yu7duwEYHBxkfHyckydPUq1W\n2bZtG3ZqiEwmw/j4OMVikUQiQf9A303porrQ1FAsVcsku3Pc99h7+MWPX1ceg/DeCNwAA9HsxQC0\nCBsZNWA75N0GloDNE2P/9Nkd4ejzK0VevVpg67YMvu8zkLgBA9/kY1cIzGiGuXyB770ywyMf+Sz9\ne+OIni7yjeZ7mvL1oum1jDVRuhLr1W3cNTo0QeC0CzNtwRNuSGpukPaVNxB9Kd/AtKBRKZIQcN+B\nPfxfT32VfbvH6OvrZyz2UWrXpvnexdNUaoLf/d/+hPGeLgYHB4nlIkwDqnVYNXABaYVNV1IIFILt\n3cN88R//b3pkElELiEfiBErjax/DsjCQ64jAQs3N9j/XqRt0SH8rWs+bv0UTEqutjayFuM5BqZvd\neaaKg4RVoOE1ILedX/j3f0a0WObV107ylX/4Eo706YlaocA6gojx1tvSJ64W+csvfpWenh4syyJi\ne5giwDQspCEQwXWn3poU7CbMNRKJ0KjePs4TjcATDlpcH2reani8ax1oNBbWeNY6upPn4ZKIMmdm\nUUGCTmyKtSkcO4JjaKTQmBpiHnh+mT37R8h1RUlHirjaYrFYwzCaKSwBBusbrtZGvobR6mS9Lq6+\nfft2JicnOX78OLZts3//frTW1Ot1enp6mJ2d5ejRo9x9991YlsVsGTJDu1laWuLP/vIrrK6s0NfT\nRSKRaKeHVusmkXiEuhfQOzDEuYuXkZZDoA0IwrHnui5nz57FFBZ+IUAKA4mBBcyu3HyOzbDhOCxO\nKqgLcFoNW7bNYqNBXNBGs623TWrG3rDbm7G6tgRnNmtvhyV2M3ZHOHrX9fne0z/gs0P9P2KPACFA\nKZvpxQZnJ4t07HuA+M59uLU6wW1iltuMTZ8+w28/8WGe/+FRyqkU0biNtXWQSOCxtaeHyfFrDCRT\nJJKJH8n3bgQCrRWOHaFWqXDslWNU6lUeefwJvvLVr/LpT34Cx4myqZzXu2yNwMeOO+x/8H56Rrfx\nZ3/yv1PzPRKpFCgF3i3wjwjVJNeyiESiZBIRLBmKiktTgn9dH7QV6bVyxJ7noW/jeanVapw6dWrd\npJmMhM/XOvpgw5zXzXwyJgGOBFsoBAoTWC0USGeckOaXBkq9M7XHIAglHIeGhlhaWuL5558nEonQ\n09PD0tISjUaDgwcP3lS0TSaT3HXXXWQ7O+npyrKwsNAWzrjRuru72/UtCCe6xcVFuru7yeffHvvk\nv9h1uyMcvQCWF0v8P//5L3nyyQ/QFwm79FqDQxIyGpZdxdxSne4te9nxwHtZbdTWwPfe+p2tVNiI\nEo1G8dcgyloOV7wFBgC/TXm8fsAePrAXU0rOAid/8H1+4ZMfZVIVkaZJsbAInQkiptPGCxsSlF7/\nWxoyzJOjAgqVCq88f4yPvPf9GF7AJ3/uY5w6eZb9d+1FmAbGO1hEavcxtGGP6yOUG2F4AHVDNDVO\nNcZgL//+D/8jpdOv8sW//TuqxRLdG7MRbGit741G4riuy6VLlwGIOy65TJJUuoNcVxZzzerlRhic\n1eSsv11m26G27VqzNuA1acVzaycEv+jjC5+1UCbDCH+PUgp0gNdwiZpRtgwOh/TGloFp6A0pF96q\ntShyfd8nnU7zyCOPtF+7cOECJ06c4PDhw6Hm8hpLJpNcuXKFZCLRdvCLi4vE43GSySTuGozg8PAw\ni4uLoZ5wMtnuBgc2rDFsVCRtiXZsFEE3Go22QLwfCc/Jm9Vw6vX6xvDJO8zeipTgHfFrAqAiYKVQ\nZ3Xu5llcEl6cSq1MpVTk4O49VDwvPHglUerW5iutNc8++yzlcvmWGlo2Y7ZUJJXPQ3cfQteqmDVF\nQ5h42qSGSTEQ5KwIncIk7gbENmClS9Z9+qXNK1/7JpeffYHP/+onEe4Sfm2K1cVxhvozXLly5Y5A\nCkQCG+2HfCS6IZj1JLHRPXz+f/kPvOfTv35rH6rDOkr4XFKuac5enGZ8Ms/M/Gp72bt2cJprHnca\nsM4U6qaHoQMMHWAJ3X5sZFJKhDTwA41GYlgO/YPd9PZ3YZjNlN2PgX//yJEj/Mqv/ApHjhx5174j\nlUq1Ycv/Ym/P7oiIPgCKhIQiHfL6hW3N0Ka0WKm4TBVNdr/351mOdeGbUVrTmYl60+TNRlJdhpBs\nG9rCwtQ8w0PXu+naDuOGtJmwNUJolLgewWv3etcgwI3685KwOJqLxfnlj/4CX/3KU2zfvRORThK4\nLlv7+qlUXGZXl+jqzJBJpGkUqwih0SjSEZt6Mc+Jb36XDx7cxQeefJLZlQb7B+6iJyk5e+4qzx49\nzZWpBQLTJJdJUXU9Io5FIpHAskIRZt8LCIKgjYYQMoyYLGngum5Y3L0Qolm6+3qxLIuBoaEwGg5C\nzg9hhFQVjuPw6quvsrq6ShCEDTMtyuSWvF82myWRSBB1bOYxEIFg25EH2X/oXr78n/6MpcsXSGWS\n1HyXpCPDyLXm4hrg3HA1wwhsTY7bk8STceYWytQai8StBMlkkng8ThAEGEIgmrWYd2sCfzt2aYOM\nhDBCArP1keTGOd6wXqNBK+LRKLneJPGkhaKBFAabDvPehhUKBSzLav+Fm1dTb9daEf3t1Pz9abE7\nwtG/mXl+jJrrURMGhSCKJeI47wAPSzQapburl1Ovvc6RQ/exsBAyB7Zwv29kLfUkLcJ2fVP/iOWR\nCAud2g8wtWQglyMXi7OyXMDUmvOzJ5lazJPt7yHjB1w5f5F4qhO0T71W4sylC+hylf/47z7PYE+K\n1UoRIyowVI2sLbjv0FZWV2tUXTh8+DDaazAxMcGJV17h0KFDKKVwHIfVlRBzPj09jZSSYrEYQvOE\noLC8QiqVoq+vj1wuh1dvsLKyQiQWwzRN8vk8lUqFU6dOheyRQcBjjz1Gd3c3lmWRSqVIJBIUi0Vm\nZ+YIlM/Xv/517EiEsZFhBrcMh4008RgNy+Hjv/kbnPj2tzlx7CimJdAKTEOiMQjbvDbnMHp7eykW\niywvL4dRvSFxTGudo7udaRv+f/bePEquu8rz/PzeFvuSkUvkviqVSiklWftqSd5taAOmTZlmgIKG\nU/SZougZGMrNOdBFeaA4VdUwNZjqKg/laSh7wHZVGYMKs9hGRrI2W2ta+5qp3Pcl9njLb/54kaFM\nZcqWjI1dc+aeE0epFy9e/OL33rvv/u793u8XyDoGF/J1cx44wjP/2jUW8M3H0mlUbwm66SUlVUY0\nCHsFkMdQ3fx/tLKUWMxBUVMu8MAGEDd0Z9sLXLHCWCA1spD4S6EDeoY6AebWRUpKSpCAZgQZHO7F\nljpef5RMTqLOGpvX66W2tpaLFy/i8XiKVNQAWaewup3ForoQDYRVUHmzZp3qvOIQcMAvJXXSoUVX\n0T0zO8w6xgKXmo6Kmc7h170oNij2jV2RtgDdEdiKjZDSLTBzTeVMyAVPzUK8pMqb1JY0Tbth7sH3\nlKP3aHqhMDP3BzqmTS5rEa9uRA9HsBWNhafm5kzTVfwBL7qhcuHC+SIh1vVyeKZtg3Bx5k4+j6Yo\n6IaG4feTSiTQdZ1sxo3or/YEuGdCSvB5fJSXl3Nw7z4+/tBHicRKmJyeJmPmyFo2Xk0l1LCIZDqD\nbebp6kqxumUxSmYaJzeJYatEhMQbkiiOF2krpIHqxdUYZ7vp7+lFVcDw+mhra6Orq4tcLkc4HKZt\n8RJ8Ph9r167FNF1FoxOdnVRWVtK2qJVsJuPC+HI5hoaGSCQSHDlyhGg0it/vJx6Ps6KjA0VRyJkm\nHl3Htu0iJM42TXRVpamuFsOj49N0+geHEMJDYjzFK7v20biogTtv24FlW2z8wL8jvmwpO5/9EdmJ\nUcqjAUzhop3fLLs8M7fJZBJFURgcuMLo2AjrYuuwzByKouAt7JPNZn8vEe51zdCR1RVzHKKywHgU\ne/4KJD4wxURyYRcz40wbGhqAsQX3+bdkHo+HycnJOY7+/7e3z94zjl4wuwI/l3vOtm3S2SylsTJy\nuk7esd6W6kImZaIqBps2buHA/leR0mHJkiVouurSTVwTyWgSHBQs4ZBVBbaEJx//IY7j8MF776Ou\nvGJ+QliqrjPEwRawavUtjI4Oc+joYRYvbiMUChHx+QkJhWAgwERiGo+mkzUtxobG2XL3do4dPYyp\nxEnbBiY5NGnioJBBxdE9BMJBYuEIUcWD7Zj0TI3R2NRIc3NzoYkog1fREJZNPpVGSolHUdm8YSOx\nWIxkMkkimWRoaIiRkREqKirYtm0bWTOHqqqoqupSC0iX6ErHXaEohfNm2w6KqhL0+Tl/upOJsTE6\nlq1kactiysrKkJZkbUcH3UN9vLjnN0RDPhYvWUSwsYb//JWvcvC5Zzl2YA+G7mEG5HczpnpD9PT1\nEbnUQzAQQFVVQrp7pFA4+K5jkWxVIetc/U35Ak3xbDGLbOHx5vVdxdhLOemmCGcF2aqqkkgksG2b\n5uZm/H4/4bBZfG/Wt775uGalWmZSRrMpnReq+VwlGpx/A86AG86fP09ZWRmhUAjLssjn85SULIxi\nnxlzOBwu0jbPrMJu9LwtVIS1bRtHCkzTQlGUAtvtjcMdf1czTbNINS55Z1KIgUDghmOYN3X0Qggv\nsBvwFPb/ZynlnwkhmoCngFLgMPAJKWVeCOHBlWdbgxtqPCSl7HrD7wCqwwofuHcZHnsErkF5jOdN\nzvYMcfddlSRNHcPr3DSBkLXQEscEgYZtOaxbt5bLly+TzqR49bWD7Lj9dq50XSRWHsMb8LmY7ULT\nlUQlqdgcPnmC9f/uowxoCo/+y8/42qf/iGBuEr+uIDBwHAdzhgFSVdFVGJ+cYtWqNVw8d459+/ax\nbuN6ymNlCCHJpFJoQkE1NPz+AOXxSnr7RwhFqtm9/xj3bltKZVkU07bJ53KYDpw6e4bRyTGmksOs\niQRJZsEIBbG5enEZhoGVNYtpHEVRisyPfX19nDhxAsuyWL58OU1NTUUyJzTh5rxV1SUvs+amRGzb\nwTTzxGIlnD59mqGhISoiQVavWo6qaOQS44xLCxWYGBthemQYb2qag0deo/tKPxvWb2JcaKx48ONU\n37KOZ376HOmJUeKOipccjlBQpIMjfQiZQxQw3h6vgdenk7fSpDJTXB6YJJ0VHDnRRTQaZXo6QT6f\nIxAIMjI4wsi7iL7NpVJc3vfqHKceLOSe7VmOOa25Dn4Ontq4fnTb2tpKSUnJNc79rVsi4RKn+yPv\nTvEzFotx5cqVeRxGb9Vc8RNJf38/JSUl73oK7922G4noc8DtUspkgdL1FSHEL4AvAv+HlPIpIcTf\nA5/BlVb7DDAhpVwkhPgo8JfAQ2/2JfUN5ai2F1Vx5uUOp3WVtKZhoqBJAc47gxhvX+5G896gh117\nfkM+n2dzfDOOtMjkcpQYEUCQFvDaqRPkPH66/FH6FIm2eDn/z89+zUTfZaST4X3bb2PlksVo19yH\ntm3h83kwfB6am5v58ZM/YuXKlWzZsgV/MEg+k8axLFA1hBC0trXRPXCFsf5hDhw9RVtLNeWxKJoq\nkNh0dnZy5PVz3Lbx7kLRyouqUHT0J06cQFEUKmMuM+hMgev1UycZGhpC0zTWrVtHWVkZmUwGwzDI\nWSbegJ90bm6jkTPTXVyI/oQisew8e/fuRdM0WltbqYyFcHJppGNiqCYDF87zq58/z5LFi2lpbaWy\nPEZb451MSsHOnz/LndvvpcJfS+3iFfzP/2srT//gH5m4eBxVUfH6g5iZFJMjE5QEPWSyWRw7T97R\naVzUgaYL4lWl1FfVUFpaim3bJKZTRKNRvF4v+XyeeEUZH/pP756ypU836KiqndMjEfC4Tn12RD3j\n9POztvW+wXFra2vndAf/rjYjhvJuOXpVVQmHw6TTLoWFYRhviX//WhsbG2NJvOJtGOG/bbsRzVgJ\nzHS56IWXBG4HPlbY/kPg67iO/oOFvwH+GfieEELIN1i7CAFrNq6iIlKJhllcdM4sHaWULpLiHc61\npjMpnKxDSVkJGzauI5FKMTDYVxTgDiluVOYogpMnT7L1fXfTo6kkPRBfv4pz/+PHfPA/PICjWLz6\n8issXtxITPeSyWSKkZrf78fn8VBeXk7A62fHjh2MjIzw/PPPc+fdd6NqGj6fH1VAtCTCKwcPULuo\nheqWFSQnBtl75BL51DiZxBQDw4O0rFrPnXc9yCsv/ZYH7l1KMp3CF/TTfekig4ODbNq0yZ0/+2qE\n7zgOgVCIza2theWfJJvPI1SVvGWh67rbPW7MJQaTUiKlm2I7c+YMoWiEVw8dJh6Ps6StnVwuR3lQ\nJ5236e/p4uSJV1GRfO4TD2DoOiWRUnJ5i+HJKcrCUTKLG/jlzmdYs/02Gmtq0T0K7//Q+zn68zxj\n3RdJJ5Kowsbv11BVi/e//058frdhR9NVbNtkanoSHJVs2kQAXsNLNp0lm3aRGpdT3Tjy3RNQEdLB\nkGn8nllcN7ZbX5rtpBO2m7xRNAfdUZjy+vEq4DMEDhaasLFRiXoNJobHiVcoSHUUFBtVuOdJFTf3\nO/0eFzUzOjpKb28vq1atmoNOK/49i3p6oZTNzMpbVdVi+iUWi2HbNoahYlpZ90EnZhzufAeezWap\nqXE1af1+v1vTsG6sDhcojC+ZTOL1evH7/XjHphkYGKC9KgZYpB0L/w0CbW+kM3Yh8yk5UFynWl7m\nB3uC9KiKLS1sM49umS4fzwJ0ycYCrtgU8xsoZ0tESNO64ZTQDeXohRAqbnpmEfC3wEVgUko5M5IZ\nSTWYJbcmpbSEEFO46Z3R6x0/EvLSVBpHkWahmWLuCTFUFx2SSaXxR6tIytQ7shRTpBvzs0m6AAAg\nAElEQVS15i2LcDiMPxgkFolx/uQ5AoEAIiJxbJcTWtE0dK+PYb+GrSiYqiTYUs+loI6pCUb9Ose7\nu1kaj1JbW1sUVdGkCg4EQyGGhoa4Zc0aXn75N6xftpS9v32Z+sZGoqUx6uvrqayq4okXXuKTS1e6\n7IClDXjCVXhVC9s0GTpwkMr6dSSmktQvWsWBE8dobGrh7OuXkAq0tbUVm15U1eUZ7+/vp7Ozk9vu\nuKNYdJ49l284r4qC4zgMDg6iaRq7XnyB9evXE42GsawMfp/G1OvHeXX/y2zZfCstG7aQV/N4NAuk\nxfT4AKoKJYpEpEbYWh+jufoe9p+5zE/2H+CjH/0QZbEyli1vo1tm0Dw67UsWUVkSJhIwSBTqCzOm\nKjoVZa7zELgPsHnjl4L3HpL++ubg0lPI69Sg+rv7uX37doTMo2gZUCx4G8ihN2zY4NZifodjzBTn\nZ5r/bvYenf0Zl2765r4/Ho+TSqUYGhri7G9+Q2NjYxFO/G5ZTWUVEpvxsSnG8yNIKa8lVvm92A05\neimlDdwihIgCPwGW/K5fPFtXszTsJ1qQg5MIxDXUi7p04XeOI/EoNkpOx1EsCsy97sXpzL3Ys9dc\nsdpC+DV1fsSgSHCQLvbcsjA8Gh0rOxjsHeLUuTO0Ni5mKJPHCQYY01WGPQECtoouMyTQGDfCpFVJ\n9I77eerJp1hRFiAgD/EHt9+DB5usYWE5Jrrfx/DEOKGxYW7dto0jBw6yeeNGMqbJoUOH2L1nD1s2\nbaG+qZED+/eyZOlS4vE4jq67/TCKRU54SWZMjECQRe1LefLJJ6lubqI8Hqe0LDZHgSmRSnH69Gm6\nu7tZvnw5ZWVljIyMFKNKKaVLFQtFzndFXH3PlhJFgFc3uHz5Mj09Pdy29Va8hoMuJznffYGzF87y\nH1cv5e47NuIWvkwihmeWWoIFFgW/a4OEhmCAqpVNnAurHP3nZ6iqrCbmkaxbsxKPoeDzGEg7RyI1\nS5BjpiFIgMQla5vnWIREKIWL4N2M6IW4LoprdurGUUSh10mgOQJdqGiqhmmmsYSFZVtYUqE6Xk4g\nFEDTDVA9IG4eT2EYBrlcDtu2SSaTWJa14BgXKnL6/a6GazabRVXVuUyjHi/jk1NUVtcgVM3tqPX5\nqK2t5dixY2QyGUpLSxfsqJ6Zp66uLhoaGlxfUFihBAIBRkZG8Hg8xfPs8/mKfDJ+r0tbPDo6ypUr\nV+jq6qLS5yMSiRRrTLZtg3Jjc6UoSnGMhmHQ09NDbfx69CwLW19fH/F4nJ7McYRqkZdAJI10DBSl\nvLhfvtAgKW4QFzM7BejCK2/sYXpTV4mUclIIsQvYBESFEFohqp8tqTYjt9YrhNCACAvgv2brajZV\nlr7h+qNC2KxuqiM7MUKgsoF0JoMuRHFyXJKut9dmLnIpXYB8TW01IwcGGO4ZZsjQqFi2kmFvoclF\nuvs5ikBXfJiagyVUfPFqKnasZvDIcQ6dPs+G+vo5wdcMyiifydKxYgXHjx5l3YYN3HfPPagBPy/v\n2Y3u8+EPhcjm8ySTSXRVRZHu+BzHQVVVNzVTQMZECuyNquIyOiYTrpDIiRMnaG9vZ+3atdi2zfT0\nNF6vt3jjqKqKvCaemx1dSSkxAn7273mFRGKatatXM5qYIB7x8ctnnmLl4jYe2rIDMsPcjDmpSQxF\nYXFNKcubbuOll3ahVZUTj0ZJpqaRpoXHdxWaeDNEUTP2bqNubsR0bBQUTEVi4WBfp7u1tr4Kn98o\npFOkC/K+yR+oaRr19fXYtk13dzfl5eVv/qGCpdNp6urqGBpye07mrLCEjSpsdFUS9BtIqRf7NUpK\nSkgmkxiGMS8lCBRhuvF4nGPHjlFdXU046t5fGzdupLm5mWg0ysDAAP39/SiKwuTkJMPDw9jhAAMD\nA1y6dIm1a9cihCCaSiGlpL6+nt7eXurr68kN9N/Qb1RVlZqaGsLhcJGZE/OtBQtJeRbIkQfyThLH\nCmHlr1I7LDQX74S96eJICFFeiOQRQviAu4DTwC7gwcJu18qt/WHh7weB37xRfv5GzGNLfMDp14+T\nTk3j0VWy2SyXLl0qIkjebpuNfnCkhS0s1q7ZQPeVPrp6e8iqggQC3WV5J5dK441GihNqKYKMtDnR\n30+wqYHn9u/hSiY95zuam5td4QddJ5lMMjAwgJlzmWL6RoZZvXkTazetZ/HSdkIlUWzpSvylshmm\nkoliqkJKiW3beDwebNsmm82STqcxTVeK7fDhwyxevJhoNFp8QMzM2cy/C3F7zEDEUqkUuqry/M92\nEvB6WL18GQGPQExP8Novf8G969fTGK/AXECgIpfLveGLQgewoUoU8tz//jsYGR3kX3+5E93QsAqw\nxN/lHP9bQFxYqQS5TALTypFz8qSv0ydSU9GA3wgjHN2Vk3RuHme8bNky/v7v/56xsTFaW1tvijkx\nm80Si8V4/vnnOXPmzBxiNmnm0IXEUEBxLGorK0in0yiKS5mcyWSKRd9rbcZFbN68mUwmQz6fJ5VK\nkUqlGBsbY+fOndxzzz0cOHCAiYkJ/uzP/oydO3eSzWbZtWsXzz33HOXl5VRVVbF//34ymQy1tbUc\nOHCA5557jgMHDtzwb1QUhbIyV/Zw7969XLx48YY/e621VgdZVOujtcZHW12AxXXR4jxcby7eCbuR\nq6QK2CWE6AReA16QUv4r8DDwRSHEBdwc/OOF/R8HSgvbvwj8l995kIaO5jHIW5a7dDRNFEeSSqVc\n2N/vpSHGYSqdQgqFoYEBvKpGOpVCSFe2TKGgbSscFCQeTQHHIef3YJWESek63ujCjF5CUTClQ8rM\nM5FKYqsqoZKYm3EQKkJ1ycoUTUUzDBzc5q1YWVkxIjdNk2AwyNTUFD6fj0AgQDqdZvfu3VRUVBQj\n/+J3Frp/36gLeMb5q0Jw+uRJAoZKe3MTtRXlXDh9ilcP7mVV+xJaSiuoj0RJXL5807OaymRQNA1F\n08jlLaamkzQ1NVFSUsKhI0fw+XxksnmX2+U9SGfwZialnM9KqligWKi6JG+lGZ8cpkRVCCgO0nGw\nRA5H+rDUEIqmU+oXGOku7lxRgqdinLw+iOrLo6oeVBEAVQfVXRNc+/L4g0ynMnT39tPbfYnpiVEO\n7tvDi7/4V3a98Bx1VWF+9uwTLGospyyq09v9OhvWLqaiVKe2KkBDbZh8ZoiO9hoqSnUqSnVKowpX\nzh9haUsFPReO0lIboaU2Qmr8ClvWLqEsJLBSQ6zpaCRk5BgfOEd93E97cznvv3MDVRVelrZVUVPp\nJ5saoKO9hnDApjruozym8cAHdiCcSUbzEySVDLVLG+ga78EJQNeV11m+spGGphgf/+QHWb22lT/6\n1Ef5nz5yP7GgRntLDXZmgpU7NuCvibFi+3pW37GZpVtWc8s9dxBZ3EzN6hWMaeBvqqNkURNXUtOo\nFaVcSU0jYxFaN6wn0tTIxvfdR7C+jpKWZlef+DrssldPq4piqag4RGIO3sgkhpjGY1oEdYuoYVDq\nT7Kjfbr4qlYPUa0eQuRPYGf6sbO9eBhFZwzN1ua9FKEj0BBoSEe5YfK1G0HddAKrFth+CVi/wPYs\n8JEb+vYbNA0LFQ1H2pw5e5ryhkVMjyfcZp3fw82vKAKEwLZB17yEA2HKyivJh3xkJQgpGRkapqKm\nGlO4J9plD1TwGn7UUISGdevonpom5p/PyJc083giYWJ1NWQ0BUu7Kg4827nlLYt0MsXQwACaprFo\n0aI5zto0TQzDYGBggGA4TPeVKyxfuZJYLIZjWfNy2LNtJi8/27LpNPl8nmPHjhEJhti2/VZU2+TF\nF3/NxOgIH1i/kZZImLAjmXIsFre3kXESNzW3moDk1CQCBc3QURUN4cCmdRs4e/YM58+eo23ZUjcn\nrIB8RxJ17775/X40IZlIZ5Dq3Ah7ZmVWV1dHhpuj7h0cHCSbzSKlJBQKEY/H8fv9fPd73yPgV/jW\nt75FPp/HcRy+8pWv8B8+8XGOHDnCI488Qi6XY+fOnUgpOXv2LF/72tcIh8M8+uijtLe309jYyJNP\nPllk51yyZAl/9Vd/RV9fH0uXLqWlpYW//du/5cEHH2RsbIz9+/cTi8V47B++z8c+9jGmp6fZu3cv\nTU1NHD16lLq6Ov7mb/6GM2fOoCgKq++6fZ7oiS8QIBwO86EPfYh0Os0PfvAD/vRzf0xXVxfJZJJc\nLsfixYv5yU9+QiwWY/v27ezevZu2tjYeffRRXnjBBRBs27aNI0eOsG7Vanbt2kV7ezvDw8Ns376d\nxx57jA0bNpDL5Xj11VcZGxvjzuXzXOBbNsd/VU++fqnrD/QhjcRYCikEyalRpCLxBucHYGbaVdJK\nJpNkzSlMc369YyF7T3TGTlk5RjSDBtMimBNYqmRSUfCpGpYiMLySiDPBjkY/+w7+nBrupr5pMecL\nSyohFVQxd6lrX4NEsPLzJ03RC0oFBXOus8KXUqI5KiUlYUJN1XQdukw2PYVS4sWyXDETK5nCV1ZO\nXhdoioaWy4FlEfZEGDQtwh3LOPXKYVZVtRePO1OIQUoCfj+b129g165dVLzv/e7mWWkVaZpMDo/Q\n1dXF+vXr543v5z//Odu3byfg85HO55hIJUlmM5TEK8imUnjUNz/Vqqq6alyFyMXweunt7SWXzrB0\n/QbsZILXDh9mcHCQ2xY3sqhcR8gMk1IiFRtLAwOvmz5Kp9EUlWg0iqqqjI6NcOHSRZLZHOl0mrGx\nMfr6+tCHsmSyaVSvTkNrI8lcmrLKyiJMb3R0lLPnzzGVSBCPx1nU2oJtOSxpayWRmMDQPQvyzUtA\nFtMa783UTS7n0jWEw2HymTRZYSMUBaGqaD4dobuIkYsXL7Jx40YmJyeJVN8cisS27aIGanfXeXRd\nJ51O89WvfpWSqJepqSk6OjqKgiJVVVUMD7t1lurqavx+f1Ew3jRN7rjjDioqKhgcHMTj8dDZ2VnU\nb52enmRkZIRIJMLKlS5SbGRkhOrq6qJy1Y9//GOWL1/OqVOnaGpq4sKFC0QikeK9kMvlioR4mqaR\nSCRIJpOEw2EMw8DM5dA0jd7eXnbs2MGXv/xlvvWVr7F+/Xr6+/vp6+ujvr6eprpaSkpK+M53vsPK\nlSvxer0kCjQlkUiEeDzOE088wf33vY90Ok02m8Xj8ZDL5bj77rvZtWsXw8PDrFmzhldffZUd7ctv\nSqLSMAy3J2ABcS5hXE3ZZO1C/0JA4Nd9bsBW6jZndvWcLe43Q6HheOsBCBkqmibm1dWuZ+8JR5+1\nBMdHJL5YBZoiQU67ePoCN1NGsxAyS1hAe0uU3svHWd1QTiQzgsfKM5kz3xKMyiUme/P9FMBQVaIl\npdB9mbqaGuzCdstdRBHEhcVZitvmoc5CDgkUwpEIw9Pz5eJmTOLm9fMLLFDS09M4ls3p06e56667\nrj4gZtkMS+UMV40W8LN+40Yy+dwNN9VYlgVSkMlk0DSNwf5+NKHwgQ98gJ6eXvxTvXgSozywbQMx\nxcZSXa5v4QBCouB20kop+enOnWhCxeNxxaFrampobm4mGA5TU9dAd38fuqKCtIqQPFUrtKrnTDRN\nY2Jigo6ODoZGhhkYGGBweJjEdJLOzk6ee/ZZ3ve+u1m0aBGK0IrHEG+DHOS7YaZUcZCA2w2s2QK7\nAGgJhUIEg8FCdHtzed3ZXPgtdZVEIhFSqRSVlZVs3bKGaDTq9iVoGo7jsHfvXj796U9TWVlJe3s7\nTz/9NLt27eLzn/88jz76KMFgkGeeeYYTJ05w8uRJvvOd71BZWcnOnTs5fvggf/qnf0pFRQXT09Ms\nW7asGLm3tbWRz+f5xCc+wd6DB3jxxRf58pe/zNe//nWSySSHDh2irKyMsbExSkpKWLduHd3JKQKB\nACUlJaxdu5a777qLmCb4+tf+K8ePH6epqQkpJV/84hc5c+YM3d3dKIrCiy++yF9+8xtcvHiRb3zj\nG/zgBz/gkUce4cm//weGh4ddicV0GsMwyGazfOlLXyIUCnHq1Ck6Ozt55C++STweZ+vWrfzd3/0d\nodB8AZjfycz5vsqjJzHUHCgWpq2AI2mou7r6n5x0A5+xhNvLoygKpWXVeLw35sLfE45eeP2cSOaI\n+SU5x6TcrxPQBXnHxsxbePWrNNvxsgpXR/LKBazEIKqw0d5iwNbf309ZeYwjR47w6quv0tTUhOM4\njI66kH/DMIhGozTU1bKirYOJscuUlpZyS1WMfZkJrEQCxRvCUhUmL1yidsVyLCGwpcQsRJmBQACp\nSzLJJNZ1il5CCHRdx4Ng5er5S8RUIskru3dz9513ks9m0WcRZM3YDNRsamKCS5cucf8DD2AXOPtn\n+PyBN80zzow5l8tx/Fgn99xzD+PjE6QSSXyDl7l9ZRt19XEyyQlee+1I8TPDg4MgBU2LmhFC8LGP\nfQxpupq8Mw1vSIWsKujqHcLMw2hinLp40GWvVBScvNsfgHCpkz0eD+fPn0dV3QdGbW0tyWSSxgb3\nBj/ZeZz/828eZfWqNdTX19O+dAlTU1MoioLmefPc5e+H3kPgsTVXoL5g2gL6CZYmQUCZqpKYnKKM\nKQbqo0TDHlQRpqYqRtCv3ribLzAnplNXl/ZpYDwxxunTp5FaiFUbthXfe+34GcJlNdx254e51D3B\nvv0n+d/+l/9aOJbNb186yLlz5zh37hymaVJav4iXO89y7sI5Lp44CUKwfOkyvv/UTgAuX75MQ8PT\npBI5ui7+lDXr11NZWcnPn3sOVJuOlSv5xYuvcPLECc53duIrLWXnL19mfCRBvHYRJRX1jNpXmJ5K\n8eMnnuHsuTOcPH0cdXocW4FFGzfy/MlOekZG6T+wHyaTtNU28sP/9h2qHXj8L/7KTTPt3QuAF3jm\nH59gcHAQIQT33Xcf08MjPPrf/hohBGfOnCnOxc9+8EOOHTvGj7u6CAaDblrtRqJ5RwXFRmIjVAcp\nFk4xhsT8+1faOcACGwwcUMAbvAreKA26GYo6eTV1d/nyAczMwspd19p7wtF7/WFOjDvk82nK7CSb\nQxkaAzaa10PA48OUOTQVLEshlckgLYXuK30MDA8xNpEgEC55S1nb1w4eRNdVVq5cyZ0P/xemU8ki\ngsVxHFKpFPF4nH2v7OHcuXM01q+YdwwPkoQiQdMYsB2QDh4ERmGpMGFIHFXgM3N43wBKlUwm8fj8\nVFXE57332v79vP/ue/D7/ddtaqmqqnIRSKrK1m3byJp5VASKrqFKbhiCN0ND/Mtf/pL77rkbW9jo\nPpWBkR7W1FeRTU/y3FM/wh/y09hcC7gpr/aOZQAIx0XRpNNpkCoew+tG2+6bhCwF3VaQpoKWd3Pu\nsiBY7uAWXIV0isggXdeLBWfHttE1A6/XnYdNm7dw6623kkgkOXz4kLviufvOmyna/l7oPa41e4HA\nJF/Y7gi3qG++g9mmW265ZV7uOxqN8tBDDzE4OMilS5cIRyL80Z/8CQII+t0U3i1r17Ju0yb3Ogz6\nqKyrZfnK5Yxs3oJUBMuaWorHO3fuHNFolIAvwunTpwkEApSVlZHL5aiIl1BVXU1ZWRnLli1jcNs2\nsrkcTU1NTE5OFumKY/VV/GbXS5SXltHSuoiSmnIGnCRCQk1VFX7NQzgSIWxbyMkkW9uWUioFAQfS\nZp5gMDhHpvADD3yYYDDI2bNn+Yu/+AtKSkrw+/10dHSwbNmy4n5N1TWkUimWL19OIBBYUOnqvWAN\nDQ0Yx1I3tO97wtGn0In/4cP0+730ixzdY33EB07RdOEMdwQdfKEAeTsAAoJOlmBhJTXp1QlnTEIx\nlbFrbm5VXiPaLVwGnWwyRWdnJ1PJaWoqK9i0aRNenw/LNFFtgSNtVwhbAKEQIp1l/YqV2OPTlNYE\nOHT+HL883cn4qjupLm1GZl1+6chHH8QCPIqKgqBPs6G6goTqw6M6REq9HJsYR9fmO/ucAE13C8uq\nqiKkxHIcFEUhn0zR1tZWvNjcJhuJcCSmIlC8Hs6dO0c8HscwDJcHXAjsmRSGbbnCycWJcf/RF/CF\ngUAAMzvBkUOH2H7rrXi9PkTA4fDe3cRUh0gohmma3LrDlZS7KuM4yxQBKKgzS0o5t18ph8K0YWFa\nJueGrlCqlFERDWFJm2i0hKOv7SNeXkpdVSVmziRnS0ZG06RzFpWVlei6B0Nki6UV2wGPoXLHbbdx\npbeXfXv2ogANLYtQVKisjl83O/f7oPdwkEyrDtqsAqtqz9/dUgSWcEW605ooinW/E5ZOp6mvrwfm\nCnp4PB5eP3GSRYsWUVVVVYT/WYbGysZGXn75Zaqrq6moqsJ28oyNj2MYBvXNTTjMFQiprq7GcRxi\nsRjr1q0rIuTWrl1LWUUUy7IwHRu/38/itjZM05xDtqdpGiOJadauXc3Bg69RVlFBTW0t4VwSR7hF\n2f6BASKxEqrq6xDhJMFgkIADIceVEfZ4XD6pGTt27Bj79u1j3759RZjoLbfcQjgcprKysrif7kh0\nXae6utoVz/H5yEzfmEP9fdpMmvRG7D3h6PMCej2SqG2jKypm40ouyghWv0ONnWTVAkgDTRVEIlF6\nr1yhtiw8n+lPmZuiUKSgt7eX3p4eOpYuxev149HdFIFSaDbKYyKQqFLBVDQGR0c5PDxKIpuhuTzO\nyPEj1C9qRe/rRglEXMm8a6hPHSExEQhFI3bnbWA6SCSO49xwMSc/k15xHA4dOsRtm7fM4cl3KCid\nOA42ksuXu7h727aiPqZl29x0/zhu3u/gwYO0L26jpbGJ8fFxDu3aTRTJpoYWIPk7yxWqjkKJo2E5\nELIN/HkNJe/Bh8r4UJbzpwYo21JLJq8TCJZy6txJTnWeYeuOHWg25HHm0PaCq7E7nUxSVlHO+s2b\n6Om+wsjIMOFICF3T3pCP/p2m95CKguXzzGl7Vwvncfb1kC48kE3TRPo8jOQyBHBJ6BzHwTAM9yFf\n+MjvQmY2U8/K5VznOgNR7u/vZ/Xq1QCkUil8Ph/JTJrDZ86hqipGLIqvvBTb50GTKtl0muTkJOFo\nFCkl3lnX3Ewj0Awr5sxvDYVCBXoDBV1VsPNur8bM/rNrbaFAEFkIWLLZLEpKI2uoKMIhlU4TiUQQ\ntoPXMMhaNoau43HAY1+lTJ7bD+M2X61adTU9GvC6aZTZdS9VUfH7/fj9/uLK3riJQuzvy25GD/k9\nMXrp2FQ6UN49QCRrMZY+jZKZpDIaoH+8fz62E3AcDVWxUbLTeKU1r71ESK3YVp1MJhkY7OPosSPc\necftlJWUks1mMQIatpQMTY0TLK2gf2yMHz7zDMFYOZ6KSjwda9FXNJFKJDh4+RL3KX6y44L6ikVQ\nVkHOzjITIs9cyBKJ14a0xyDoQFoT4JhMTUyyeNGiN5yHIjmUdDtRJ8cnXTChIlAQRVSQLUBBYDkW\n6ckMg709jI+PU1VVhW3bKIpyU5wlM01Xe/bsYXHLIurr60kmk1zpuoA/k2D9yg585luHNObzLr1F\nLpfDkpKk5VBeGmUknWSgV2eyN4HIK1zsuYQn62fwcp7BC1fwhYL0dQ2xffudeAyBNHwEpYNpTs45\nvu5Y+ISGzJroQmdZcyuOCqdOneKnz+18w7rEO03v4fME6E0m59yQZtod/2xn7QsFySExVA1NN4qI\nq3fSKioqmJx0kTIDAwPU19cTjUaL76dSKXp7e1m8fhWJRILx8XEujg2S7rlEbnQMO5XGr2s0t7Yi\npGRxTd1Nfb8i3xgo6zgOCPjQ+z/A2fPnOXflAtG6cmZwDtdmJGfoUK6Hnpux31c36nvJ3hOOXlV1\nohNJmsfG+U9btlAR87Pr6C/oGR1BVgILQEVzWYdI2E/nmWO0rb3FLafNNuEwODREQ0MDtmPS3dXF\n1q1bqagsdwujZg5NeBkYHubSdIbf7H6VYQzWfvYLlJRVkkClPy8Y0xT0QJxgaTWH9u/DOnyEumVt\naLYgrzhcy8SnS8gqBUSOoIhXv5nuTE0IhKIQ8PkKnB8CVLV4IdsFjhdXXamP+tq6ORG8wg3Q9cvZ\n43G5uxOJBNG2dlKpFJZl0d99hntXNBNSsmgab5lLYOZB4vV66U9leL7zGMPjQ9iql9CKlUyFS5CO\ngbN5DW2RMvZePo9XOFzu6eXo6VMsu3ScT9+1hXhjC5pjEQrMPb4iFbyGh1Qih+7xcuVKH460uWXZ\nWiamMqQzb94V+U7Re5SVVcnSGe3dgjne+VqoiteDVtg24/wsy8Ln8zEwMMD4+HhBxGUmpJnFhrlA\ndG8vwBCpCjeQ6Onu4sEHH0QXacrKynjmmWfw11UQCXsQcopVq1YRjUYZGxtj2ZJqnp3so761mZHO\nJPHSKoaGhtixtJ7N69bzs5/9jIc+/hG+9KUvMTY5wBe+8AUUIRgZHWVwcBBrMo1VYESd4Y8Jqhq3\n3357EW2TTCZZu3wZTU1NPPHEE8TjcTdtlPeDDXbCZmn9EpKD07RGK+no6KCjo4NTp05x8OBBVt+z\nman+QWqCYfwOeBzofv55cokkNc0tbNu2jdHRUZ588kk0TeNTn/oUhmEwOjpKTma5fPkyzc0uiODU\nqVOMjEzz4U99iiNHjhQZMZ1scs5cKtYCc67aCNVGqiaWMF16Cme+81oQGHaD7sG4psB7o17lPeHo\nhZUnmunntrUVVIUH8Dkm9y+vp/P0JP/8Ly+QWr4Cx7HnNhBJCPu8bFwVo/O3r3DHQ/+ey0ODJH0e\ndNWHLQVlpTEcM8/+Pa+worUdj+HFTCSxhYPf0JmaTPA/fvkyWn0r6m3vpyxWzVkEdk6gSJdkCluS\nUwSGESJ22xbOvryHmOUgdRsbfV5rsTnrhrYEWE6OkGMzfeEKy8sW5sVWTPfmnf2smumovHDhAsvX\nrnJlDBewpqYmKioq6OvqQVdVSkpKsIXEviasuRZ144pLSxQpSExN0dPXx5Z1G3EEJDMprHSG1qhO\nWFPwORZSAduee3Gb8mpTl4kECyanE5QEI4SNAAkrR32wlD3nu/mX1w9wNuYjdfo/9GkAACAASURB\nVO/n4DZX2cqdr7m/pxsQpQ0YEqxbHBo/9BkypPm/O8/jPP1rHqqrZsPGMEEjQSRcwfjkMJ5QDDUn\n6Rkdor21ginbpK41zBTDrN7cRvCVhQU8hBDlgFlw8jP0Hn/JVXqPp1iY3mM/N0rvIQQYOtqslJdH\ndSPKoHZ1mzPDK6Re3ZYvQGxmt8zrb0MwWl9fTzab5XN//B8ZHx/nc5/7HB0dHXR3d7Np8yZee+01\nvvnNbzI5OcnPfvYzel86To3mpc7w033iDHU1NaxZs4YLFy7w4osvcv+HHyCbTrNmzRoee+wx/uWf\n/ok77rqL7du3o1uS4eFhGhoaio7e4/HQ1dWFoig8++yzrF69GsuyOHHiBLt37wbgvvvumzWFAr/f\nz7333svE1BW+/OUvMzY2VuRgWn3iCNMDQ3zvm9/iH/7mu7zywkuUlJTw8MMPE41GeeCBB0gkEiQS\nCT7/+c/z7LPP8u3vfJtbt97Kk888weOPP05rayvl5eUcPXqU3l5XCeDSpUucPn2aW265hdpIYP5E\n/huy94SjN4BSmcenm0g1jVfTEbbkjq2bSI0OMXjhEoaho6vq1WWXYqMIAZZFf28Pr768i4qmenw+\nFdtO4lhevF4v33/sMe67933UNTYymZrGNl3IW14IvveLXXjue5CR5iZSlsSjqW5zsSNRpEJecYrR\ncV5ajKQs6las4mTnSXJj43giUbxvkrtTpY0fOHfmNGVbd9zUvJSXl79hTlyRBWimovL68ePc0tFx\n3X2vTV9kpVvQtTMZjr/+Osva27HtHJoBmeQovadf52NrGnFkDluoWFKZFyPaUscEbGw3f+xAJFLK\n+HieJ14/wHk7hSkFbRvvIv7ZPyLjUxgtMPfZ13lwwdUViaqqDF66TPbSGTh/Cf22FTytq3x3z6+I\niSyW0FjVvoj6wSmadQ/VtdV0jY+yfGk7k2bfdY8/y6qAHxby9ArwjJTyX4UQp4CnhBDfAI4yl97j\niQK9xzjw0Tf7AiuTY/Tk+Tn5+Blt8Nk6solcdt42X0MrANPT00xPT2MYBqVvg8MZHR0t9ilUVFRw\n4sQJ1q5dy969e1mzYT2HDx92RXcKvDMtWclL332Mb3zjG3zykb/kls9+Fl1VefHFF+nv72d6cpLJ\nyUnOnj3LwMAA/kCA9vZ2+vr6qAqV0tXVRUvLVUSO3+/n2LFjboDS18fWrVsxTZNdu3bR2trKD37w\nA7Zu3Ypa0I8tLy/nypUrVFRUsHr1aoQQbN++vXg8b0UUnwMdHR0MDw9TX1/Ptm3b2LBhA3/+539O\nIBCgvr6exYsXFx9GLc0t3H777fT09HDo0CE+85nP8NWvfpVHHnmEnf/yhWK3+enTp7nzzjvB/P3x\n0rwT9p5w9PZ0kuhUnuTIOFS2YFo5NOD5n/+Sc2cuMjXQS2vrIlIpl4c+l8vh9fgwdIVjR44zldD5\nxQsv8ODHP4EadnBUhbGJCQzDYPP2WyktL2dkapKAx48iPAyNTfP4T58j9ulPMxapQMkJIoqKYbso\nCVtAXrVBCneZJdymJ0UPkclJUpoHplOuPNybOPqgruCMjRK2TMoCPkxzger9PJ1ZN9WRz+ffPOUj\nJX6Ph6amJs6cOcOSpUvd3KbyxsU6RVEQSIbHxqivrSUej5NNjuHTTF7d+xIbFzdgOQJHuEAUQ8xX\n/vJ5vOScPHnTJJ1Ok86ZvHbqHKdSfkq3bMFXHiHn8XEyWk9KSFKKIJq/sVy/I2B0aAiv10v57dsx\n79hCSlUYElC5fT0RSzIyPsFZ8uz/zT+zTJFsCnuIZiS6Y6EtTCt0zdS98/QehqLQ4JvbcONdYMFd\nHnAHPDudkyg8DKPRKKdOneL+++9ncnKUaDT65v0QyvwAYebhGgqFsG27mFJZu3YtuVyO119/nYmJ\nCRobG3n00UdpbGwkFovx+GP/F1Nj46SmplnWtoTjh4/Qun5lkWVyRtA7nU4Ti8WKhGRVVVXce/u9\n/OM//iNnz57lE5/4RJGqeOfOnUxPT/PBD34QcOGYL7/8Mp/85CcpLS11G+gK4x4dHS3KW7700ks8\n/PDDlJWVkU6n2bdvH73Tbi28t7cXx3EoKytztZ81jVWrVrFlyxY8Hg+//vWvSSQS3HPPPSxdupTm\n5mZOXzxFNBpF13X6+/vZtWsXn/zkJ0kmk8VejlQqhXcBwZU3MneuF0qgvTv2nnD0fk3DN53EsP3u\nk1S6zSNDoyM8/6tf8NmH/oC+vl7CwSD19fUudMuWOLZJeXUTAwmFl159jYGebuorSkGCIVReP3KU\nW2+9FSEVooqXxEiCk8kJ/mnfa6z4gz+kp7SSPAqGqiARZMTV3LYixdU8t1RwhINQVfzRCOFojIwU\n4Lx50lpHYlgWfkCzb7xEKoRLRVBbW/uG+ym4qfOmpiZ6urvd+VvAkSgF0ZAZU20wc3nOnjzJpvUb\n0ISCX9U4sutXdDQ0UlNSYOKcEWMR83WBJiYmsFSJriokE2l+e/QI3lgDpXfei1MeJ6tDTtVI2l5s\nlAJY5sZUgzQJkYg7BsuxMFVBhdARikYGHUsxcMIRsiLHmn//UZLH9rK3+wL/ecf9WFfeOtvg222q\nhOg1z7aFbjq14Ec8sy6RGdagkpISurq6OHnyJA3t8/ss3g7zeDzk83lisRh79+7lW9/6FoZh8Nd/\n/dckVYqvtXfs4NixY7zw0ksMDQ1x9913L1gPSqfT+P1+rly5Qk9PD4ZhMDg4SCwWIxqN0tPTw5Ur\nV/jsZz9LPp/nySefZGhoiFgshqZpTE1NURasmndcv9/P2bNn+e53v8sXvvAFWlpa6D06H/Q0k6p5\n+umnixw4tm1z6623snv3bh5//HEGBgb47f6XCYfDBINBdF3nqaee4h/+++NFGub/r9h7wtGXR4JU\nKyns8VE0uwlFsZDAhx+4nw8/cD+KI/nVL35BZ2cnyyMhzKCPoE8wMjrNYCKLPxznrls3MnppCOXs\nBRralzA62Ee0vAy/0MhLiwsXL+FpXMr3X95F659+ieOZHD5FcRlxZCGoFteh8xQOAdtBlYKclPjq\nasm9foqU30v0Glz8tfomupGmXIPK2gpMK4V/AUiAI1xem5nofUZwobevh9Ky2IKY9+LQpIqQCmWx\nSk50niaZTBII+jFkoXArJSgCtfCQsQvNR5Zl0tvdRV11DTXV1ZiZNI4tEAmTpfUxIqEoOBkc1cLj\nCCyh4wgHny1IksfvjVLqr2D3kbPsu9hF9frleP/wYQ57yhlVVDwSFCSmEBjSFbZwBFxLJ78ABRGG\nLCCLCtA32xKETYeLv96F7rjHRIASDuEAl30qpMuoVFQe+LufsDQQ4U+21tJRGUUReeS7KDwinTT5\n9KE54uDZpLuqm71tBqXrm/U09RUYM8RwD01+yHYfpjSwgqgi8JRGGXNsUpZEmgbWzOoTcFCw/VlQ\nLISjYgtQsQk4V+s0/f39OGgY3iC19c2EIqVYjkLnqUsMjaeRJXFu/ciHGWtsZCRlMZQyOXb6Mu1b\nb6OiuZ1v//nXyY3lidUsxsx7QAbxCQ/9Q/14hQctD8saWnnq2/8dkbEx4mW8Pj5OVXkpbckk58Yn\nGBodo7RtCWfOneXY0deZnkqRzZj09gwQCcdw7IyrgzALJTM5lmX50jVgG6xasYFvf/vbXDpxgmUt\niwhaKiXSoNwf5fKZ08SjEf7qf3+EPXv2uBz1pWXES0uprqjgzIkTfO0rX4FEDhI5kgNjjHX14/F4\nOPDKK9xzzz2URiKoUpJNJnH87qrsjSDSlmUhpM1VZO7vZte7N96KvSccfS6b5H23b+QXv/o15lQH\nXKNPnLdMNm7dwonTJxDYKHaeSdvEEw0wMjxGR2UbqYuXuG1xOxe7LnPh+Rc4nbFpWLUCy8wwlU6y\n+JYV/N0/PU/ZF/6YnsQ0UVXnxliar5orQqIS9QcYSmeoK4m9aUu6Jh1Gz57jE8tWkFVgITyMIgsg\nmALf/M0MS0pZ+JzD0qVL6e66QlNzY7HBSggxBy2jFh4myakEE8OjbNiwganpaTTpMHWuk9aqUsoD\nPoRtYlomjuWQRMHjmAQcB81UcTTB0cEe9vSq2PFGQg/dxx6vRcoI4xUKYQdsFCzhFJ2PwI1u34rL\n1XDIKBDbsbnwf9f0gmKQYik4mkOJY7Hsvo8w0vk6j+17ga9uDRDxp1320XfJPF4PLS0tbqdwwWri\nbnPO7G0LWVuh6ayi7CrkcaL3Ehcvn2bJmpX4yuOE/QHSVg4LtRioONLlTHIcUArSdbMTOYqicPbs\nWR544AFyuRwPP/wwuVyOVCrFkROv0zM8hDB0fOEQPf39jKuAhCEdogMD/PbVA0yPjqE1VuMrifLM\nb35Nf/8VXj4CKoINO7ZxsbeHj3/6k6S6hmhcs4q1t20npykkJyc5c/Q4+alpfD4/+176DT/duZPJ\nyUmWLl3KK6+8gpSSzs5OugdHqKmpYfPmzcWx53I5XnrpJQD+8+f/hM7OTmobKhgYGODpp59mbGyM\nsM+P1/IyODjIyZMnSaVSlJeX89hjj/GRj3yEc+fO4fV6SSaTnD9/nuHhYX70ox8VUzX9/f18//vf\n5/Dhw0VOnH/r9p5w9IODgxw8sI94ZQUXL11ixTIXjztTiFQdQcgbYP3aTXgDIWwECIe8aREsiXL8\nzElCtsmRi6fx+v2kbQs0i5WtjWRMSWlZnH/61Qu03Hcf/VkHn+rBEvZN/3i/FGSkxBQK3voarIlJ\niEXf8DPStjl2/Dh/0LAE7Q1SPTPyiQJAVenr6+P06dN0vEGBdbb5fD78AS9DwwNzC52O46bCChC8\nGaRCJpsmGAkh7SwhDcgkyaZHaamtQMgcjglSAdXWUBSJx9KQQqcvb/G9vbuZaGkidO/95KIxzo+N\nM+X3oo1PEXCgpq6OiWya9PAYkZoqTNMkZ1sLw8puwGauA0eAlApa4XExc7iEz2FKl4zYMJCaomr5\nIqJ2H789dZwty2pQ5Lvn6B3bJpFIzKEcmGkiupaG4FrzZF0hl8rw1QJt0tGYtBXSvWfJpoaJlFcS\nLanGEiqzAbZJx4MlFBerLkCXSjEo8fl8ZLNZ9u3bx7lz5zAMA1VV2bBhAyIcRI2EENIm7zWw/V4m\nVcBQGNVAn57gcnKSbDqJqGghH/YTLItQt34NPceO8v+2d+Yxdp3nef+9Z7n7PvtCznAXRZqiJGu1\noip0Y0u2UBUoGrhwGzQO0KJNARdI0dopUKBA2rp1kcYFWgRBLNRu2sRRkqZBbKOVZUlO41C0KFEU\nKYrikMOZ4exz587M3e9Zvv5xzp255Aw5I3HIy+U8wMHcOffc8531/d71eTO7dvHa9/8Us7PL63u7\no5ehJx6le99upqamKJfK1C1FWjfp60oTthyGO7s5XioxMDDAY489xrlz5xgfH2e2sLIudXR2dpY3\n33yT+ekZ4okEju01yF5aWmJqamqVnTOlUhQKBbLZLABvvPEGpmly/PhxZmZmiMfjVKvV1cl2fHyc\nUChEo9FgeXmZqakpRkZGaDQanqBvn1G4LZA7oZnD8OAO9V+/8Q36+gd56y/f5LPPPQa0vOBOHeXC\n2NgY01eucGDvPpQGRTR+evojFmyLYTOCW/P8v1EV5tzl93nh8y8xbWb4g9dO0PXXXuT9SJTpTB8W\nCtNdb4ZZ12jSLn4BqkDccQk5OoKXEnjlgzPYH43Q/dIXV/PE8bdvRbY6xvnvvsJvvPQlbBTJyPWv\ndzMAG07GmJycZHp6msOHD9+4GtXVQGlYTg3btimVSnR2dq5qIU3entVr6bq4ts3PTp7kyOFDRMKQ\n0hq8+8ZrHOrK0t+ZxRGNlaUlzHAYQ2kkU3GWG3FeO/0R7xfK9P+tF5nNxPlQj1PG9YjLXCFnRAi5\nJjWngV0s4xbLLFsWg7uGcZVCWJ+KvxXz1PRzh+u+36d511x/sZVQM1xigLO4Qv6DM+zr1cn86Xf5\nR08f5t/93u/x0fhUW6T9jmxG/dpzP3eVwNJv8Mq13uvoBlzjhnIpi7BgVSkaGhUbXCI4yqC7q5NM\nJkMqnWHejdLZu4OVlWWUJoRFY+6awH4ipjMyMkJ/fz9KKSYnJzGOHKJSq1GoV1HZFCUN7EYD8SeM\nUCjkNdlxLeIhr3q0uLzsZUmtLFEql6nW6wztHkLXdT44fZ7OXI7Bji5CtsP7757ihXQn2WyWRCKB\nUopwOMyya6/WDdTrdarVKqGkp0S1Bqib7s1kMrmqOJz76D3ee+sEzxw6QsL13sG55Tm6urqu4qnR\n/NzUVksqYnpPU2uLSjMSZWRkhO7utXTo/vg1boYNULeVn0df4fzoSdAbhGLrky+26n7Zyrvx7T85\nx/R8edNn+47Q6JPJBH3dHXSkU/R3rc81l0QIZdlkO9JcOPM+IeXSaJhYmiJfL1LPRHANIZ3LIg2L\nTj0BpShLI+9j7TrKyMRliERJRpLMOjYJ9E9EGlXWXRKOhqHAKZaRxSUW8vPeA9G8AdcE3vaW65g9\n3Thi+772G/tlWifeVCpFOp3e1MQHMEMGoVCIeDy+LlOnVRlRSqFE2L/7ADo6CoeF5SWK9Rp9vf0o\np45Swvz8PF0dHaS7esmvLPIXI5cZT3ax/7kX+NBUVBwDTTkkNcEGYq5GqVRkdnqJ8JVJGrMLqL5O\nIjuG0G0H8aONn6S+1sVn4AQERb3J06+8eoAwDpprE3MUEjMZfPgwl0/8BKXHuDhfxVV3Su7Dx8OK\nWVu3rscKYeLQk4iS1nRqDliaiYPO3MQFLp9ZYXl5GTu3l47unewc2kG2u4Ncppu5+tVCp9mqr1wu\nr2bOXCnkCUcihLJplkRRF3ByKUIeoSKlWg1XoMOMkbfqTE8WiKQTaAriXVnmi0vML+UxVrL09/Qi\n/d1cmJom19mJOA6zE1f46Yn3KBQKrJRKHNi3z2uB2ZVlcHCQgYEBwuEw2WyWsn19iVgqrRUwDQ4O\nkp/cvB/sjd6jVuuqbntPaSrVItzv8l43Wxb0fq7x28CkUurF7aRyRdkkogpxl+juTfCX7+YpuVV2\n7s/QPZRDqiCuTiqeI26bOItlkulOHAF7pUgqEWfYbYDPV2FTJtvZxweXRnGTw6R7eqiJYlpzMUTD\nAqoahK9JUWuKx2s1fV3hdZLShZoGam6BrmyauWwKVVgk1tNPzXawNQdLF6oixCt1UjWbiR/+iBee\n/XkMFK7m4ug3EvSCaUSoVCpks1l6e3t9Fsg1wd1oNK5qNuDlXbteSiZr7fYca22fGjqWX1Hpuq7H\nyR1Po5kaKMXExDTpzg4+Gh3nwb5BXNdhz+B+fvB/X6fWN81sJkXqc0/zUfwg71sKlI7uukitgVWr\n4paKTI6OYdQsBg4fIvfQI9RdB8sQnHAIcR1i/nlf238qvpFJrHlvlZf1o0Gj4RWfVTznQyYUxtI1\nbIGQCxEXag6IctEVVCTCwc88T6R3kD/4we9j6+3zsWoootek2W047fgBeOw1zVLWlXtD0fR/7Z93\nBPybbdHbkYaONLADS9OABVYuXcK65BH0jBkGyZ5udvT1k4zFKTgxBtJZ5sZm6O3rIRNOcHzi7VVS\nsf379wOwaK1lvyT8tN2dEmO5oZgP67hEULqDqjfYOTDMzoFhJiYmUM4cA31hqpdmMUbqyOgUvxQx\n6BtOwLAnWNcsmGWYXCZ/+ifU8YrhS9nc6riZTIauri5qoRx6JkPJhLIGjiNI3cGuWz4ltqcAJDBI\nYGC2vOIR2UDcNb+316wn3bJIOGCU1yZa+xp+oo0I/VTNQbkNoIEOiHZz/p5PGnjdCB9Ho/8qXlPw\n5jT379lmKlfHcRgeHma5ssz/fPciPypXOfHqKTJzcwwn4hzM9mCqfs6/+hbhUt5zqdTrPJnrx41d\nfVETsRgD3R385OKHNEIJSoU6TjhNXYEuEHa3RBRwNVyFEq+Qp1gsQiYF+WUqvmnpuKDExtV0IvE4\n1JZRi8uYaqt9YG4flDjYVo2oISxOTvPUww9QGpvgx8ePs/+xR5ivV3h3xSZ1YCeNJ44ylchQqdqg\n6YRwqaGo5Bep5vPEYjGGn/05XNdltl6j3u2Vz+c0jSRrNsxWr0HYbeUrcbHEe+gX5xewBPSeHoyW\nbKeia2M3GihlUc0vUpqe5cLYKMxcIteoYtbXa8b3C1o11QcjCaavzHH2nTPUajWqTgTTTJBIZ7lw\n6m3C4TB9GRvLsmg0Gky8dQaA3O5HV/fRdHHU9CixVJTd3R240TBOLMJoYo2QrGw10Bp1jkqcsYUl\nlqwldhMivInwak0nvuxr4LFYjOWFeUY/PMdczWDHoUN0799FTQMXg7AZ9xQC5T1pG6UX3+/YkqAX\nkUHgi8C/wWsILmwjlWsrarUaZncHK5EkY0OPUjiQxBCLKXEplxtcOn4GczLPp9JVjh39NA/FUxi1\nxjo6nJptk8l10BeOM17Vsbvj2CGvGhQRLM3BvA770UbFKJqmoRwHV9OIRUyqxQrpw59i5cxZZsZG\n6Rwa9kkSNXRXUVhcJDozz86ebsKxqJfCqdRqGfhWsFoyHlp7gUKhEPVG7brbNFn4QsbVHNorfner\nwcFBTp06xYEHD6BripjrMhRPMqBFiQ3v4WStwXe+/31k3xB7fvlXGCVOMd1FwVVgRrCLKyzUKzgX\nRokf2M/evkPexOfahC1Fp9LIli3siVnykxPk/ZcPTYh2dZLY7xG73SjuUPHNEb25jeMyOz9PeWWF\njp07cFCYxQoVHapWg9KFi9SWlkBcug/uZ+9TjxN96CH22EWqx1/lrfMntnzN7zVc5aMuNUiG4hiD\nuz1XmpggOppmYDkZdBFqVg0t6jFaNpkkL06tuUWasR+RJfKzDebPVAnlkvQcfJC5FtK+UN0iFLIw\nz11iuGxzqKuXSH6FWNMPdx00A9UAEX9DqdRIGyGS3b10qiQTE5M0rDqRzg5iHd1IKBDsm2GrGv1v\nAf8caJb4dbCNVK7rENEJmSZ5S6iFo0xIlFnNIpFKEXruc3TuE7Spt/neyb/i2aG9PLpjF/q1hTia\nAY4inc1h1leojZwjd+AwM3oUVxQm6wnJNoMSr92b5ioiCuKxGKV4FGdqlsjgEA3xCc1c0BsNVkZH\n2b9rN3HDxLIsbNkoHNkeGAoiusHI2dPs6OtDuS7luoWezjGva3z62DHyid2sqCRLtkVdg6RpMLe4\niBQWyR45RCqWoOLYlAvLdKczLJWWWT77IXnbxoxG2XvoEFaHV/RUKpXIdXexWNpcuxblHd/oqffQ\nFew8eAALRf+e3bhAyHLI64rimXNolQrxnQP0HT1MJJthZX6B2aUi/TrkZ6cJK4tw9OO3mbxX0KpY\nhLUwuun1BUYUXnSlAUphiqCJIkNkLcrt36uDidb3xA+M60JHNEQvBkXXYub0u5gqRCqV8rJ6Kg4L\n4yOYdomH9BjxhRXSukkccIzrT/KtbtNsa2xFAF1Hcw2KepgrH12iX3SiHX1cGh+nXC4zNzeHRGLE\nU17lcLPyN8AWBL2IvAjMKaVOishz2zVwK5Vrb3cXxYZDpzLJiommVXj+4V2c/tk7pA48jhVLgIKq\ncqgaVZaHnuG9HU+Re0RRH53lN//3D/nyU2meTtaJOAmulIrs6VDouqLbLmAsLRFNJ4jUl1iMxalq\nBqZanyFzLVzx/H22eNq44QK4uJZLOBGlo1QmfuAgF370JjMfnOPI4U8xKQ2qmsuuSISTb52k8+9+\nnvnFeVKplNfWbzMOVTxa4la0avDXQ3ObNa6Uq62SRDKN4PLj19+kt6eTUmmR3s4kbq2I1hHlrcvj\nTM3UmOzOsfurX+dnoSjzDROorc6HxVKFZDKJSiQQ17N83LBBaXKcwvHjZB8+Qt+zj/mFWi4VQLMV\njiZE4knKpSq65mAoQXza49ageOtLaWnQtWcXS+cvMPryf2fwkaMk0ikmamXsi5dpaEJu3zCdoQiz\nEZ1FXFhaBFMDM0K+UaUwN8Fz0Qj6HZBZdiegJApXBAcvAyqkOYjW8HijXB1EUXU2mBRD65+/sIKY\na+CKwpEa3aKx8vZZQskk4ViM/kKBzkaDPREvdTiMEMHrpVzconoZ36AVV75UoLZcIK65xHWXkOYw\nOTnJQ0eOMD869rFYYu8nbOWSfwb4GyLyBbzYTwr4FttI5brvgX2qaAhxEZKaIkSNTup0axZlt7Gh\nKSBKYQnYO/s49tV/yPde/TPmPrzA3z7yIE6lTKgngWiKsGmiaRrJmMGK5rEwaopVf96N0EzfMhR+\n84oWZkrbJpfLkbcsBp55goUrk141p+sQ0V3ihobUKri239zhDtDkvTZ9fiVNqcjZC6f44Ow5unt/\ngTfPn6P34F8n89jDnNFd5tzwOk9nNBolFItjGAaL+TwrFy5SnJli8PGjJJ54jKm5eQqNGpqCiJ+G\np4mLQrD9bJ+443VScmSVwXn9cQqYLqhwhIGHHyaxdx8TJ99h9sRJrGSU7n176E6lWS6XKV9nJzUD\norqGNOw7z2Mr66MV9gZFXcYGmR5qg74AhrneMrUbnvurdfIMydVWr3JdLz23xZci5gaZKc76oHAz\nZBxVECUBYUiFBZEy1Mv0R1wkKiRaFA4bm81qRltpOmbqy4TDYfL5vFeLUa8z0LuboZ4cJVEUFmYR\nA5594DDpdJpzb7+DncngJOPoutfCtbFBlepmDYBWbJeipljRvXc/KtpabMHy9+es7be5PyunKFdq\nVMpVGlYaMNEi6yk/3HBp3TrVWN+usN4ywTZrY7ItXgjHcdhq9G9TQa+U+jrwdQBfo/9nSqkvi8gr\nbBOV6+TMDJfKRVRcw8EgJJ5/tmbbhDdovQcQ1g0cHa4YMKscBj//OdSVPr75v/6YX3j6GVYq8+Tt\nCJXqNIOZBJeKNZTScVyF7Vi4gLqGdvdaM0/wBFJhdg7HcejuG1j7zqcsMBAS6RTL0zNMT1zB6szQ\nkCoTozMYovPyyy/zla98ZbPLvG24XgzAVYLgUiwWObB/N2Lr7Mt9ikv5Pv+PAwAACFtJREFUAt96\n5RWGXniJ/I7dLEd15kzBcoXmlW9eF81V1Ks1pucXcF2X5GAffbsHvZTHYolsNOZdV1mz/kUER8CI\nhHFR1C0bTTfR6411j2gz0GcqqGsAguM45E2h9/GHmb08RvHSOL17DrJYrRPVdBxdo9FSubgao9B0\ndg4OUPtgZrsubYBNkNNKq1lfovuUHs7Hy+DWWvoqVKtVpqen0XWdZDJJT08Pji83LcsiEo3Q39+P\nFY2utj28FbBlfUsMaZmY3Y0qr8X1Wmg66+WXstZbTaLWT6ZXzYpO0wK+VtBvTY25mTz6f8E2UbmG\nE3H+48vf5sUnnuazhw4zkErg4pn1xnU072bOzKIBiDArGvbwHnJPHuOn5TpPRFJQXiYTSlBfXKKm\ncijbwFY64GxtHhSFK2Boct2UEUsg6kJVICqeBosoRt95G/DapL3zzjscO3ZsKyPeUjQtFA3PhNaw\nqRgu6aP7+av6IukYhMJCCRdjg7lZdJ2l+QXCCqJdnayUqxiGVznreaS8NorNO6bhXZ+wgrE3/p+X\nllRYxHxgHz1796Ffp8OQiyfsNQWWv++CAbuGhxi/fIWxEyeJPfsolnv9uJ6twFFw7sxZrMbWiNQC\n3CRc0yuKU17CA58gJjU+Pr762bZtOjo66O3tpVgssrS0RDLWufpdOJyis7OTRTtMoVDYxhPxoLHW\nrH1dOK/lwTOafZhpcpl4EAzE2aCxuL1B3GAjl5PdIvx9QV82tJZVzpZrRO6IylgRKQLn230c24xO\nPk4A+u7A3XpOQ0qprrYMnE2rX3/uM1dXxm6gNWzsutmitraB66ZZ9HPD3xkbuHw2+J2przVusa5l\npWuBE9bXbRdx1wRTM360UFxrBTnlZ/SYpkkmk1mlLLjuWE6YigZWJMQHE2M89fPPYUX7GB8f58qV\nK1iWRTqdps9wNu3nABu7cRYqFWZmZigtr3jxMkPHCemrxwkQjqz9rlwuUywW6Yt490tEVtNa9Q2G\nN931K5W+Pg4yOlVgaGiI3t5estks8XicZWPtmbAsi3/7G99k7PL43VEZC5xXSn263QexnRCRt4Nz\nCnA/oTW5YSP98eJFjz660RKj6OvzirE6Ojqo1+tX+ei3ikuXLjE2NkZPTw+FQoFCoUBf1+aUBdeD\nrutomsbzzz+Po4lXnBdeoxABsFqcOfl8nosXL9IbUjSJUhtWHZSGqa93yRhqA0G/Qf+IHb1dVJZr\nnJu7zAMPRMjtHaDSUtil6TG2mjl4p2j095wACc4pALTdWm2XBdZOy+9+G3tL1uqdotEHCHCvom3W\narsm5nYqBPfr2Jvh4xGy3zr8TrsP4BYgOKcAAQLcEbgjBL2fU39PITinAAEC3Cm4IwR9gAD3MNo5\nObZr7PvxnNs99g3R9mCsiDyPV2mrA7+rlPpGWw9oCxCRHXhUzD14icK/o5T6lojkgO8Bw8Bl4BeV\nUgWfBO5bwBeACvD3lVLvtOPYN8MtpaMOECBAW9BWjd4XKv8FeAF4EPg7IvJgO49pi7CBX1NKPQg8\nCfyqf9xfA15TSu0DXvP/B+/89vnLP8Bj+bxT0aSjbqJJR70XKODRUEMLHTXwn/ztAgQIcAei3a6b\nx4ERpdQlpVQDT3N8qc3HtCmUUtNNjVwpVcQTjAN4x/4df7PvAH/T//wS8F3l4TgeT1Afdxha6Kh/\n1/+/SUf9R/4m155T81z/CPisBIxSqxCR50XkvIiMiMjXNv/Fx97/yyIyJyJnWtblRORVEbng/836\n60VE/rN/LKdF5JGbHHuHiLwuIh+IyFkR+ertGF9EIiJyQkTe88f91/76XSLylr//74lIyF8f9v8f\n8b8fvpnz9vepi8i7IvLnt3vsm0G7Bf0qpbGPVrrjuwL+DXwYeAvoUUpN+1/N4Ll24O45zyYddbNq\nZct01ECTjvq+x22yVP8b8Pw1626XRdkui7YOHFNKPQQcBZ4XkSe5vVbnXWnxtlvQ39UQkQTwx8A/\nVUqttH7nE7m1vxpti2ilo273sdwDuOWWqlLqJ3hcUq24LRZluyxa//dN6kfTXxS3yeq8my3edgv6\nJqVxE610x3c0RMTEE/L/Qyn1J/7q2eYD7P+d89ffDefZpKO+jCeYjtFCR+1vsxEdNTeio75P0S4L\n7rZblLfbovVdJ6fw3q1XgYvcPqvzrrV42y3ofwbs8/1cITymyz9r8zFtCn9m/jZwTin1my1fNSma\nYT118y/5vsongeWWF+KOgFLq60qpQaXUMN59+LFS6svA63h007AxHTVskY46wO3D7bAo22HRKqUc\npdRRPKXjceCB7R5jI9ztFm9bKRD8VoP/BPg/eOmVLyulzrbzmLaIzwB/D3jf1y4Afh34BvCHIvIr\nwBjwi/53P8BLrRzBS6/85dt7uDeFbaOjvo/QLgtuVkT6lFLTt9qivJFFezvGV0oticjrwFPcZBOk\nLeKWNGC6bVBKBUuwBMs2LngK1CVgFxAC3gMO3YJxhoEzLf9/E/ia//lrwH/wP38R+CFeL50ngRM3\nOa7g1VD81jXrb+n4QBeQ8T9Hgb8AXgReAb7kr/9t4B/7n38V+G3/85eAP9ym6/4c8Of+59s69ic+\n5nYOHizBcq8ueBbcR3g+5H95C/b/+8A0Xle/K3hZHh142S4XgB8BOX9bwcsCugi8D3z6Jsd+Bs8t\ncxo45S9fuNXjA0fwrMrTwBngX/nrdwMn8CzmV4Cwvz7i/z/if797m659q6C/rWN/0qXtlbEBAgQI\nEODWot3B2AABAgQIcIsRCPoAAQIEuMcRCPoAAQIEuMcRCPoAAQIEuMcRCPoAAQIEuMcRCPoAAQIE\nuMcRCPoAAQIEuMcRCPoAAQIEuMfx/wF1P/8QHNldogAAAABJRU5ErkJggg==\n", 853 | "text/plain": [ 854 | "" 855 | ] 856 | }, 857 | "metadata": {}, 858 | "output_type": "display_data" 859 | } 860 | ], 861 | "source": [ 862 | "img1 = cv2.imread(pics[0])\n", 863 | "img2 = cv2.imread(pics[1])\n", 864 | "print(img1.shape, img2.shape)\n", 865 | "f,ax = plt.subplots(ncols=2)\n", 866 | "ax[0].imshow(img1)\n", 867 | "ax[1].imshow(img2)" 868 | ] 869 | }, 870 | { 871 | "cell_type": "code", 872 | "execution_count": 6, 873 | "metadata": { 874 | "ExecuteTime": { 875 | "end_time": "2017-06-27T21:40:48.261697Z", 876 | "start_time": "2017-06-27T21:40:48.256252Z" 877 | }, 878 | "hidden": true, 879 | "scrolled": true 880 | }, 881 | "outputs": [], 882 | "source": [ 883 | "from label_data import map_characters\n", 884 | "for char in map_characters.values():\n", 885 | " to_remove = []\n", 886 | " print(char)\n", 887 | " print(len(glob.glob('./characters/%s/*.*' % char)))\n", 888 | " pics = glob.glob('./characters/%s/*.*'% char)\n", 889 | " pics = {pic.split('/')[3]: cv2.imread(pic) for pic in pics}\n", 890 | " for a, b in itertools.combinations(pics.items(), 2):\n", 891 | " err = mse(a[1], b[1])\n", 892 | " if err == 0:\n", 893 | " to_remove.append('./characters/%s/' % char + b[0])\n", 894 | " for e in set(to_remove):\n", 895 | " try:\n", 896 | " os.remove(e)\n", 897 | " except:\n", 898 | " pass\n", 899 | " print(len(glob.glob('./characters/%s/*.*'% char)))" 900 | ] 901 | }, 902 | { 903 | "cell_type": "markdown", 904 | "metadata": {}, 905 | "source": [ 906 | "#### Create bounding box" 907 | ] 908 | }, 909 | { 910 | "cell_type": "code", 911 | "execution_count": 2, 912 | "metadata": { 913 | "ExecuteTime": { 914 | "end_time": "2017-06-27T21:12:47.375086Z", 915 | "start_time": "2017-06-27T21:12:47.358529Z" 916 | }, 917 | "collapsed": true 918 | }, 919 | "outputs": [], 920 | "source": [ 921 | "with open('./annotation.txt') as f:\n", 922 | " already_labeled = [k.strip().split(',') for k in f.readlines()]\n", 923 | "pics, x1, y1, x2, y2, char = zip(*already_labeled)" 924 | ] 925 | }, 926 | { 927 | "cell_type": "code", 928 | "execution_count": 5, 929 | "metadata": { 930 | "ExecuteTime": { 931 | "end_time": "2017-06-27T21:40:37.359030Z", 932 | "start_time": "2017-06-27T21:40:37.355586Z" 933 | } 934 | }, 935 | "outputs": [], 936 | "source": [ 937 | "ind = np.random.choice(range(len(pics)), 3)\n", 938 | "f, ax = plt.subplots(ncols=3, figsize=(10,5))\n", 939 | "for k, i in enumerate(ind):\n", 940 | " img = cv2.imread(pics[i])\n", 941 | " img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n", 942 | " cv2.rectangle(img, (int(x1[i]),int(y1[i])),(int(x2[i]),int(y2[i])),(0,255,0),3)\n", 943 | " font = cv2.FONT_HERSHEY_SIMPLEX\n", 944 | " cv2.putText(img, char[i].split('_')[0], (int(x1[i]),int(y1[i]) - 5), font, 0.7, (0,255,0), 1, cv2.LINE_AA)\n", 945 | " ax[k].imshow(img)" 946 | ] 947 | }, 948 | { 949 | "cell_type": "code", 950 | "execution_count": 4, 951 | "metadata": { 952 | "ExecuteTime": { 953 | "end_time": "2017-06-27T21:13:20.666994Z", 954 | "start_time": "2017-06-27T21:13:15.289826Z" 955 | } 956 | }, 957 | "outputs": [ 958 | { 959 | "name": "stderr", 960 | "output_type": "stream", 961 | "text": [ 962 | "100%|██████████| 5324/5324 [00:05<00:00, 994.88it/s] \n" 963 | ] 964 | } 965 | ], 966 | "source": [ 967 | "from shutil import copyfile\n", 968 | "from tqdm import tqdm\n", 969 | "for p in tqdm(pics):\n", 970 | " char = p.split('/')[8]\n", 971 | " pic_name = p.split('/')[::-1][0]\n", 972 | " copyfile('./characters/%s/%s' % (char, pic_name), './char_transfers/%s/%s' % (char, pic_name))" 973 | ] 974 | }, 975 | { 976 | "cell_type": "code", 977 | "execution_count": null, 978 | "metadata": { 979 | "collapsed": true 980 | }, 981 | "outputs": [], 982 | "source": [] 983 | } 984 | ], 985 | "metadata": { 986 | "kernelspec": { 987 | "display_name": "workenv", 988 | "language": "python", 989 | "name": "workenv" 990 | }, 991 | "language_info": { 992 | "codemirror_mode": { 993 | "name": "ipython", 994 | "version": 3 995 | }, 996 | "file_extension": ".py", 997 | "mimetype": "text/x-python", 998 | "name": "python", 999 | "nbconvert_exporter": "python", 1000 | "pygments_lexer": "ipython3", 1001 | "version": "3.5.3" 1002 | }, 1003 | "nav_menu": {}, 1004 | "toc": { 1005 | "navigate_menu": true, 1006 | "number_sections": true, 1007 | "sideBar": true, 1008 | "threshold": 6, 1009 | "toc_cell": false, 1010 | "toc_section_display": "block", 1011 | "toc_window_display": false 1012 | } 1013 | }, 1014 | "nbformat": 4, 1015 | "nbformat_minor": 2 1016 | } 1017 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SimpsonRecognition 2 | 3 | Training a Convolutional Neural Network to recognize The Simpson TV Show characters using Keras (TensorFlow backend). 4 | To have more detailed explanations, see the blog articles on Medium [Part 1](https://medium.com/alex-attia-blog/the-simpsons-character-recognition-using-keras-d8e1796eae36) and [Part 2](https://medium.com/alex-attia-blog/the-simpsons-characters-recognition-and-detection-part-2-c44f9d5abf37). If you like it, don't hesitate to recommend it (as for the dataset on [Kaggle](https://www.kaggle.com/alexattia/the-simpsons-characters-dataset)) 5 | 6 | ### Part 0 : Collecting data 7 | 8 | This part is about collecting and labeling Simpson pictures. 9 | Most of the pictures are from Simpson video, analyzed frame by frame. 10 | 11 | Run ``python3 label_data.py`` into a folder with Simpson episodes (.avi format) to analyze them and label frames. 12 | You crop each frame (left part, right part, full-frame, nothing) and then label it. 13 | 14 | You can find the dataset on [Kaggle](https://www.kaggle.com/alexattia/the-simpsons-characters-dataset) 15 | 16 | ### Part 1 : Training with Keras 17 | 18 | The first part is training the classification model, it's described in [this post](https://medium.com/alex-attia-blog/the-simpsons-character-recognition-using-keras-d8e1796eae36). 19 | I aim to have 1000 pictures per class (for 20 classes), unfortunately some characters are not often on screen so I have fewer pictures for those characters. 20 | As you can see on the Jupyter notebook, I benchmark two models : 4 and 6 convolutional layers neural networks. Because of the small number of pictures (approx. 1k pictures per class), I use data augmentation. 21 | Currently, I have 96% of accuracy (F1-Score) for 18 classes. 22 | 23 | ### Part 2 : Faster R-CNN 24 | 25 | The second part is described in this [Medium post](https://medium.com/alex-attia-blog/the-simpsons-characters-recognition-and-detection-part-2-c44f9d5abf37). 26 | The second part is about upgrading the deep learning model to detect and recognize characters. I have to annotate data to get bounding boxes for characters for each picture in order to train a new model : [Faster R-CNN](https://arxiv.org/abs/1506.01497) (which is based on a Region Proposal Network). As usual, the annotation text file with the bounding boxes coordinates will be released soon. 27 | The implementation on this network on Keras is from [here](https://github.com/yhenon/keras-frcnn) by Yann Henon. I edited the code : remove parts which are useless for my purpose. 28 | 29 | ### Files description 30 | 31 | 1. `label_data.py` : tools functions for notebooks + script to name characters from frames from .avi videos 32 | 2. `label_pointer.py` : point with mouse clicks to save bounding box coordinates on annotations text file (from already labeled pictures) 33 | 3. `train.py` : training simple convnet 34 | 4. `train_frcnn.py -p annotation.txt` : training Faster R-CNN with data from the annotation text file 35 | 5. `test_frcnn.py -p path/test_data/` : testing Faster R-CNN 36 | 37 | ![Lisa picture](https://github.com/alexattia/SimpsonRecognition/blob/master/pics/mapple_lisa.png) 38 | -------------------------------------------------------------------------------- /faster_rcnn/FixedBatchNormalization.py: -------------------------------------------------------------------------------- 1 | from keras.engine import Layer, InputSpec 2 | from keras import initializers, regularizers 3 | from keras import backend as K 4 | 5 | 6 | class FixedBatchNormalization(Layer): 7 | 8 | def __init__(self, epsilon=1e-3, axis=-1, 9 | weights=None, beta_init='zero', gamma_init='one', 10 | gamma_regularizer=None, beta_regularizer=None, **kwargs): 11 | 12 | self.supports_masking = True 13 | self.beta_init = initializers.get(beta_init) 14 | self.gamma_init = initializers.get(gamma_init) 15 | self.epsilon = epsilon 16 | self.axis = axis 17 | self.gamma_regularizer = regularizers.get(gamma_regularizer) 18 | self.beta_regularizer = regularizers.get(beta_regularizer) 19 | self.initial_weights = weights 20 | super(FixedBatchNormalization, self).__init__(**kwargs) 21 | 22 | def build(self, input_shape): 23 | self.input_spec = [InputSpec(shape=input_shape)] 24 | shape = (input_shape[self.axis],) 25 | 26 | self.gamma = self.add_weight(shape, 27 | initializer=self.gamma_init, 28 | regularizer=self.gamma_regularizer, 29 | name='{}_gamma'.format(self.name), 30 | trainable=False) 31 | self.beta = self.add_weight(shape, 32 | initializer=self.beta_init, 33 | regularizer=self.beta_regularizer, 34 | name='{}_beta'.format(self.name), 35 | trainable=False) 36 | self.running_mean = self.add_weight(shape, initializer='zero', 37 | name='{}_running_mean'.format(self.name), 38 | trainable=False) 39 | self.running_std = self.add_weight(shape, initializer='one', 40 | name='{}_running_std'.format(self.name), 41 | trainable=False) 42 | 43 | if self.initial_weights is not None: 44 | self.set_weights(self.initial_weights) 45 | del self.initial_weights 46 | 47 | self.built = True 48 | 49 | def call(self, x, mask=None): 50 | 51 | assert self.built, 'Layer must be built before being called' 52 | input_shape = K.int_shape(x) 53 | 54 | reduction_axes = list(range(len(input_shape))) 55 | del reduction_axes[self.axis] 56 | broadcast_shape = [1] * len(input_shape) 57 | broadcast_shape[self.axis] = input_shape[self.axis] 58 | 59 | if sorted(reduction_axes) == range(K.ndim(x))[:-1]: 60 | x_normed = K.batch_normalization( 61 | x, self.running_mean, self.running_std, 62 | self.beta, self.gamma, 63 | epsilon=self.epsilon) 64 | else: 65 | # need broadcasting 66 | broadcast_running_mean = K.reshape(self.running_mean, broadcast_shape) 67 | broadcast_running_std = K.reshape(self.running_std, broadcast_shape) 68 | broadcast_beta = K.reshape(self.beta, broadcast_shape) 69 | broadcast_gamma = K.reshape(self.gamma, broadcast_shape) 70 | x_normed = K.batch_normalization( 71 | x, broadcast_running_mean, broadcast_running_std, 72 | broadcast_beta, broadcast_gamma, 73 | epsilon=self.epsilon) 74 | 75 | return x_normed 76 | 77 | def get_config(self): 78 | config = {'epsilon': self.epsilon, 79 | 'axis': self.axis, 80 | 'gamma_regularizer': self.gamma_regularizer.get_config() if self.gamma_regularizer else None, 81 | 'beta_regularizer': self.beta_regularizer.get_config() if self.beta_regularizer else None} 82 | base_config = super(FixedBatchNormalization, self).get_config() 83 | return dict(list(base_config.items()) + list(config.items())) -------------------------------------------------------------------------------- /faster_rcnn/RoiPoolingConv.py: -------------------------------------------------------------------------------- 1 | from keras.engine.topology import Layer 2 | import keras.backend as K 3 | 4 | if K.backend() == 'tensorflow': 5 | import tensorflow as tf 6 | 7 | class RoiPoolingConv(Layer): 8 | '''ROI pooling layer for 2D inputs. 9 | See Spatial Pyramid Pooling in Deep Convolutional Networks for Visual Recognition, 10 | K. He, X. Zhang, S. Ren, J. Sun 11 | # Arguments 12 | pool_size: int 13 | Size of pooling region to use. pool_size = 7 will result in a 7x7 region. 14 | num_rois: number of regions of interest to be used 15 | # Input shape 16 | list of two 4D tensors [X_img,X_roi] with shape: 17 | X_img: 18 | `(1, channels, rows, cols)` if dim_ordering='th' 19 | or 4D tensor with shape: 20 | `(1, rows, cols, channels)` if dim_ordering='tf'. 21 | X_roi: 22 | `(1,num_rois,4)` list of rois, with ordering (x,y,w,h) 23 | # Output shape 24 | 3D tensor with shape: 25 | `(1, num_rois, channels, pool_size, pool_size)` 26 | ''' 27 | def __init__(self, pool_size, num_rois, **kwargs): 28 | 29 | self.dim_ordering = K.image_dim_ordering() 30 | assert self.dim_ordering in {'tf', 'th'}, 'dim_ordering must be in {tf, th}' 31 | 32 | self.pool_size = pool_size 33 | self.num_rois = num_rois 34 | 35 | super(RoiPoolingConv, self).__init__(**kwargs) 36 | 37 | def build(self, input_shape): 38 | if self.dim_ordering == 'th': 39 | self.nb_channels = input_shape[0][1] 40 | elif self.dim_ordering == 'tf': 41 | self.nb_channels = input_shape[0][3] 42 | 43 | def compute_output_shape(self, input_shape): 44 | if self.dim_ordering == 'th': 45 | return None, self.num_rois, self.nb_channels, self.pool_size, self.pool_size 46 | else: 47 | return None, self.num_rois, self.pool_size, self.pool_size, self.nb_channels 48 | 49 | def call(self, x, mask=None): 50 | 51 | assert(len(x) == 2) 52 | 53 | img = x[0] 54 | rois = x[1] 55 | 56 | input_shape = K.shape(img) 57 | 58 | outputs = [] 59 | 60 | for roi_idx in range(self.num_rois): 61 | 62 | x = rois[0, roi_idx, 0] 63 | y = rois[0, roi_idx, 1] 64 | w = rois[0, roi_idx, 2] 65 | h = rois[0, roi_idx, 3] 66 | 67 | row_length = w / float(self.pool_size) 68 | col_length = h / float(self.pool_size) 69 | 70 | num_pool_regions = self.pool_size 71 | 72 | #NOTE: the RoiPooling implementation differs between theano and tensorflow due to the lack of a resize op 73 | # in theano. The theano implementation is much less efficient and leads to long compile times 74 | 75 | if self.dim_ordering == 'th': 76 | for jy in range(num_pool_regions): 77 | for ix in range(num_pool_regions): 78 | x1 = x + ix * row_length 79 | x2 = x1 + row_length 80 | y1 = y + jy * col_length 81 | y2 = y1 + col_length 82 | 83 | x1 = K.cast(x1, 'int32') 84 | x2 = K.cast(x2, 'int32') 85 | y1 = K.cast(y1, 'int32') 86 | y2 = K.cast(y2, 'int32') 87 | 88 | x2 = x1 + K.maximum(1,x2-x1) 89 | y2 = y1 + K.maximum(1,y2-y1) 90 | 91 | new_shape = [input_shape[0], input_shape[1], 92 | y2 - y1, x2 - x1] 93 | 94 | x_crop = img[:, :, y1:y2, x1:x2] 95 | xm = K.reshape(x_crop, new_shape) 96 | pooled_val = K.max(xm, axis=(2, 3)) 97 | outputs.append(pooled_val) 98 | 99 | elif self.dim_ordering == 'tf': 100 | x = K.cast(x, 'int32') 101 | y = K.cast(y, 'int32') 102 | w = K.cast(w, 'int32') 103 | h = K.cast(h, 'int32') 104 | 105 | rs = tf.image.resize_images(img[:, y:y+h, x:x+w, :], (self.pool_size, self.pool_size)) 106 | outputs.append(rs) 107 | 108 | final_output = K.concatenate(outputs, axis=0) 109 | final_output = K.reshape(final_output, (1, self.num_rois, self.pool_size, self.pool_size, self.nb_channels)) 110 | 111 | if self.dim_ordering == 'th': 112 | final_output = K.permute_dimensions(final_output, (0, 1, 4, 2, 3)) 113 | else: 114 | final_output = K.permute_dimensions(final_output, (0, 1, 2, 3, 4)) 115 | 116 | return final_output 117 | -------------------------------------------------------------------------------- /faster_rcnn/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexattia/SimpsonRecognition/fa65cc3124ed606e0ad6456ae49c734a2685db52/faster_rcnn/__init__.py -------------------------------------------------------------------------------- /faster_rcnn/config.py: -------------------------------------------------------------------------------- 1 | from keras import backend as K 2 | 3 | 4 | class Config: 5 | 6 | def __init__(self): 7 | 8 | self.verbose = True 9 | 10 | # setting for data augmentation 11 | self.use_horizontal_flips = False 12 | self.use_vertical_flips = False 13 | self.rot_90 = False 14 | 15 | # Number of ROIs per iteration. Higher means more memory use. 16 | self.num_rois = 32 17 | 18 | # Output path for weights. 19 | self.model_path = './model_frcnn.hdf5' 20 | 21 | # Location to store all the metadata related to the training (to be used when testing). 22 | self.config_filename = "config.pickle" 23 | 24 | # anchor box scales 25 | self.anchor_box_scales = [64, 128, 256, 512] 26 | 27 | # anchor box ratios 28 | self.anchor_box_ratios = [[1, 1], [1, 2], [2, 1]] 29 | 30 | # size to resize the smallest side of the image 31 | self.im_size = 300 32 | 33 | # image channel-wise mean to subtract 34 | self.img_channel_mean = [103.939, 116.779, 123.68] 35 | self.img_scaling_factor = 1.0 36 | 37 | # number of ROIs at once 38 | self.num_rois = 4 39 | 40 | # stride at the RPN (this depends on the network configuration) 41 | self.rpn_stride = 16 42 | 43 | self.balanced_classes = False 44 | 45 | # scaling the stdev 46 | self.std_scaling = 4.0 47 | self.classifier_regr_std = [8.0, 8.0, 4.0, 4.0] 48 | 49 | # overlaps for RPN 50 | self.rpn_min_overlap = 0.3 51 | self.rpn_max_overlap = 0.7 52 | 53 | # overlaps for classifier ROIs 54 | self.classifier_min_overlap = 0.1 55 | self.classifier_max_overlap = 0.5 56 | 57 | # placeholder for the class mapping, automatically generated by the parser 58 | self.class_mapping = None 59 | 60 | #location of pretrained weights for the base network 61 | # weight files can be found at: 62 | # https://github.com/fchollet/deep-learning-models/releases/download/v0.2/resnet50_weights_th_dim_ordering_th_kernels_notop.h5 63 | # https://github.com/fchollet/deep-learning-models/releases/download/v0.2/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5 64 | if K.image_dim_ordering() == 'th': 65 | self.base_net_weights = 'resnet50_weights_th_dim_ordering_th_kernels_notop.h5' 66 | else: 67 | self.base_net_weights = 'resnet50_weights_tf_dim_ordering_tf_kernels.h5' 68 | -------------------------------------------------------------------------------- /faster_rcnn/data_augment.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import numpy as np 3 | import copy 4 | 5 | 6 | def augment(img_data, config, augment=True): 7 | assert 'filepath' in img_data 8 | assert 'bboxes' in img_data 9 | assert 'width' in img_data 10 | assert 'height' in img_data 11 | 12 | img_data_aug = copy.deepcopy(img_data) 13 | 14 | img = cv2.imread(img_data_aug['filepath']) 15 | 16 | if augment: 17 | rows, cols = img.shape[:2] 18 | 19 | if config.use_horizontal_flips and np.random.randint(0, 2) == 0: 20 | img = cv2.flip(img, 1) 21 | for bbox in img_data_aug['bboxes']: 22 | x1 = bbox['x1'] 23 | x2 = bbox['x2'] 24 | bbox['x2'] = cols - x1 25 | bbox['x1'] = cols - x2 26 | 27 | if config.use_vertical_flips and np.random.randint(0, 2) == 0: 28 | img = cv2.flip(img, 0) 29 | for bbox in img_data_aug['bboxes']: 30 | y1 = bbox['y1'] 31 | y2 = bbox['y2'] 32 | bbox['y2'] = rows - y1 33 | bbox['y1'] = rows - y2 34 | 35 | if config.rot_90: 36 | angle = np.random.choice([0,90,180,270],1)[0] 37 | if angle == 270: 38 | img = np.transpose(img, (1,0,2)) 39 | img = cv2.flip(img, 0) 40 | elif angle == 180: 41 | img = cv2.flip(img, -1) 42 | elif angle == 90: 43 | img = np.transpose(img, (1,0,2)) 44 | img = cv2.flip(img, 1) 45 | elif angle == 0: 46 | pass 47 | 48 | for bbox in img_data_aug['bboxes']: 49 | x1 = bbox['x1'] 50 | x2 = bbox['x2'] 51 | y1 = bbox['y1'] 52 | y2 = bbox['y2'] 53 | if angle == 270: 54 | bbox['x1'] = y1 55 | bbox['x2'] = y2 56 | bbox['y1'] = cols - x2 57 | bbox['y2'] = cols - x1 58 | elif angle == 180: 59 | bbox['x2'] = cols - x1 60 | bbox['x1'] = cols - x2 61 | bbox['y2'] = rows - y1 62 | bbox['y1'] = rows - y2 63 | elif angle == 90: 64 | bbox['x1'] = rows - y2 65 | bbox['x2'] = rows - y1 66 | bbox['y1'] = x1 67 | bbox['y2'] = x2 68 | elif angle == 0: 69 | pass 70 | 71 | img_data_aug['width'] = img.shape[1] 72 | img_data_aug['height'] = img.shape[0] 73 | return img_data_aug, img 74 | -------------------------------------------------------------------------------- /faster_rcnn/data_generators.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import numpy as np 3 | import cv2 4 | import random 5 | import copy 6 | from . import data_augment 7 | import threading 8 | import itertools 9 | 10 | def get_img_output_length(width, height): 11 | def get_output_length(input_length): 12 | # zero_pad 13 | input_length += 6 14 | # apply 4 strided convolutions 15 | filter_sizes = [7, 3, 1, 1] 16 | stride = 2 17 | for filter_size in filter_sizes: 18 | input_length = (input_length - filter_size + stride) // stride 19 | return input_length 20 | 21 | return get_output_length(width), get_output_length(height) 22 | 23 | 24 | def union(au, bu, area_intersection): 25 | area_a = (au[2] - au[0]) * (au[3] - au[1]) 26 | area_b = (bu[2] - bu[0]) * (bu[3] - bu[1]) 27 | area_union = area_a + area_b - area_intersection 28 | return area_union 29 | 30 | 31 | def intersection(ai, bi): 32 | x = max(ai[0], bi[0]) 33 | y = max(ai[1], bi[1]) 34 | w = min(ai[2], bi[2]) - x 35 | h = min(ai[3], bi[3]) - y 36 | if w < 0 or h < 0: 37 | return 0 38 | return w*h 39 | 40 | def normalize_img(x_img, C): 41 | # Zero-center by mean pixel, and preprocess image 42 | x_img = x_img[:,:, (2, 1, 0)] # BGR -> RGB 43 | x_img = x_img.astype(np.float32) 44 | x_img[:, :, 0] -= C.img_channel_mean[0] 45 | x_img[:, :, 1] -= C.img_channel_mean[1] 46 | x_img[:, :, 2] -= C.img_channel_mean[2] 47 | x_img /= C.img_scaling_factor 48 | 49 | x_img = np.transpose(x_img, (2, 0, 1)) 50 | x_img = np.expand_dims(x_img, axis=0) 51 | return x_img 52 | 53 | def iou(a, b): 54 | # a and b should be (x1,y1,x2,y2) 55 | 56 | if a[0] >= a[2] or a[1] >= a[3] or b[0] >= b[2] or b[1] >= b[3]: 57 | return 0.0 58 | 59 | area_i = intersection(a, b) 60 | area_u = union(a, b, area_i) 61 | 62 | return float(area_i) / float(area_u) 63 | 64 | 65 | def get_new_img_size(width, height, img_min_side=600): 66 | """ 67 | Get the resized shape, keeping the same ratio 68 | """ 69 | if width <= height: 70 | f = float(img_min_side) / width 71 | resized_height = int(f * height) 72 | resized_width = img_min_side 73 | else: 74 | f = float(img_min_side) / height 75 | resized_width = int(f * width) 76 | resized_height = img_min_side 77 | 78 | return resized_width, resized_height, f 79 | 80 | 81 | class SampleSelector: 82 | def __init__(self, class_count): 83 | # ignore classes that have zero samples 84 | self.classes = [b for b in class_count.keys() if class_count[b] > 0] 85 | self.class_cycle = itertools.cycle(self.classes) 86 | self.curr_class = next(self.class_cycle) 87 | 88 | def skip_sample_for_balanced_class(self, img_data): 89 | 90 | class_in_img = False 91 | 92 | for bbox in img_data['bboxes']: 93 | 94 | cls_name = bbox['class'] 95 | 96 | if cls_name == self.curr_class: 97 | class_in_img = True 98 | self.curr_class = next(self.class_cycle) 99 | break 100 | 101 | if class_in_img: 102 | return False 103 | else: 104 | return True 105 | 106 | 107 | def calc_rpn(C, img_data, width, height, resized_width, resized_height): 108 | 109 | downscale = float(C.rpn_stride) 110 | anchor_sizes = C.anchor_box_scales 111 | anchor_ratios = C.anchor_box_ratios 112 | num_anchors = len(anchor_sizes) * len(anchor_ratios) 113 | 114 | # calculate the output map size based on the network architecture 115 | (output_width, output_height) = get_img_output_length(resized_width, resized_height) 116 | 117 | n_anchratios = len(anchor_ratios) 118 | 119 | # initialise empty output objectives 120 | y_rpn_overlap = np.zeros((output_height, output_width, num_anchors)) 121 | y_is_box_valid = np.zeros((output_height, output_width, num_anchors)) 122 | y_rpn_regr = np.zeros((output_height, output_width, num_anchors * 4)) 123 | 124 | num_bboxes = len(img_data['bboxes']) 125 | 126 | num_anchors_for_bbox = np.zeros(num_bboxes).astype(int) 127 | best_anchor_for_bbox = -1*np.ones((num_bboxes, 4)).astype(int) 128 | best_iou_for_bbox = np.zeros(num_bboxes).astype(np.float32) 129 | best_x_for_bbox = np.zeros((num_bboxes, 4)).astype(int) 130 | best_dx_for_bbox = np.zeros((num_bboxes, 4)).astype(np.float32) 131 | 132 | # get the GT box coordinates, and resize to account for image resizing 133 | gta = np.zeros((num_bboxes, 4)) 134 | for bbox_num, bbox in enumerate(img_data['bboxes']): 135 | # get the GT box coordinates, and resize to account for image resizing 136 | gta[bbox_num, 0] = bbox['x1'] * (resized_width / float(width)) 137 | gta[bbox_num, 1] = bbox['x2'] * (resized_width / float(width)) 138 | gta[bbox_num, 2] = bbox['y1'] * (resized_height / float(height)) 139 | gta[bbox_num, 3] = bbox['y2'] * (resized_height / float(height)) 140 | 141 | # rpn ground truth 142 | for anchor_size_idx in range(len(anchor_sizes)): 143 | for anchor_ratio_idx in range(n_anchratios): 144 | anchor_x = anchor_sizes[anchor_size_idx] * anchor_ratios[anchor_ratio_idx][0] 145 | anchor_y = anchor_sizes[anchor_size_idx] * anchor_ratios[anchor_ratio_idx][1] 146 | 147 | for ix in range(output_width): 148 | # x-coordinates of the current anchor box 149 | x1_anc = downscale * (ix + 0.5) - anchor_x / 2 150 | x2_anc = downscale * (ix + 0.5) + anchor_x / 2 151 | 152 | # ignore boxes that go across image boundaries 153 | if x1_anc < 0 or x2_anc > resized_width: 154 | continue 155 | 156 | for jy in range(output_height): 157 | 158 | # y-coordinates of the current anchor box 159 | y1_anc = downscale * (jy + 0.5) - anchor_y / 2 160 | y2_anc = downscale * (jy + 0.5) + anchor_y / 2 161 | 162 | # ignore boxes that go across image boundaries 163 | if y1_anc < 0 or y2_anc > resized_height: 164 | continue 165 | 166 | # bbox_type indicates whether an anchor should be a target 167 | bbox_type = 'neg' 168 | 169 | # this is the best IOU for the (x,y) coord and the current anchor 170 | # note that this is different from the best IOU for a GT bbox 171 | best_iou_for_loc = 0.0 172 | 173 | for bbox_num in range(num_bboxes): 174 | 175 | # get IOU of the current GT box and the current anchor box 176 | curr_iou = iou([gta[bbox_num, 0], gta[bbox_num, 2], gta[bbox_num, 1], gta[bbox_num, 3]], [x1_anc, y1_anc, x2_anc, y2_anc]) 177 | # calculate the regression targets if they will be needed 178 | if curr_iou > best_iou_for_bbox[bbox_num] or curr_iou > C.rpn_max_overlap: 179 | cx = (gta[bbox_num, 0] + gta[bbox_num, 1]) / 2.0 180 | cy = (gta[bbox_num, 2] + gta[bbox_num, 3]) / 2.0 181 | cxa = (x1_anc + x2_anc)/2.0 182 | cya = (y1_anc + y2_anc)/2.0 183 | 184 | tx = (cx - cxa) / (x2_anc - x1_anc) 185 | ty = (cy - cya) / (y2_anc - y1_anc) 186 | tw = np.log((gta[bbox_num, 1] - gta[bbox_num, 0]) / (x2_anc - x1_anc)) 187 | th = np.log((gta[bbox_num, 3] - gta[bbox_num, 2]) / (y2_anc - y1_anc)) 188 | 189 | if img_data['bboxes'][bbox_num]['class'] != 'bg': 190 | 191 | # all GT boxes should be mapped to an anchor box, so we keep track of which anchor box was best 192 | if curr_iou > best_iou_for_bbox[bbox_num]: 193 | best_anchor_for_bbox[bbox_num] = [jy, ix, anchor_ratio_idx, anchor_size_idx] 194 | best_iou_for_bbox[bbox_num] = curr_iou 195 | best_x_for_bbox[bbox_num,:] = [x1_anc, x2_anc, y1_anc, y2_anc] 196 | best_dx_for_bbox[bbox_num,:] = [tx, ty, tw, th] 197 | 198 | # we set the anchor to positive if the IOU is >0.7 (it does not matter if there was another better box, it just indicates overlap) 199 | if curr_iou > C.rpn_max_overlap: 200 | bbox_type = 'pos' 201 | num_anchors_for_bbox[bbox_num] += 1 202 | # we update the regression layer target if this IOU is the best for the current (x,y) and anchor position 203 | if curr_iou > best_iou_for_loc: 204 | best_iou_for_loc = curr_iou 205 | best_regr = (tx, ty, tw, th) 206 | 207 | # if the IOU is >0.3 and <0.7, it is ambiguous and no included in the objective 208 | if C.rpn_min_overlap < curr_iou < C.rpn_max_overlap: 209 | # gray zone between neg and pos 210 | if bbox_type != 'pos': 211 | bbox_type = 'neutral' 212 | 213 | # turn on or off outputs depending on IOUs 214 | if bbox_type == 'neg': 215 | y_is_box_valid[jy, ix, anchor_ratio_idx + n_anchratios * anchor_size_idx] = 1 216 | y_rpn_overlap[jy, ix, anchor_ratio_idx + n_anchratios * anchor_size_idx] = 0 217 | elif bbox_type == 'neutral': 218 | y_is_box_valid[jy, ix, anchor_ratio_idx + n_anchratios * anchor_size_idx] = 0 219 | y_rpn_overlap[jy, ix, anchor_ratio_idx + n_anchratios * anchor_size_idx] = 0 220 | elif bbox_type == 'pos': 221 | y_is_box_valid[jy, ix, anchor_ratio_idx + n_anchratios * anchor_size_idx] = 1 222 | y_rpn_overlap[jy, ix, anchor_ratio_idx + n_anchratios * anchor_size_idx] = 1 223 | start = 4 * (anchor_ratio_idx + n_anchratios * anchor_size_idx) 224 | y_rpn_regr[jy, ix, start:start+4] = best_regr 225 | 226 | # we ensure that every bbox has at least one positive RPN region 227 | for idx in range(num_anchors_for_bbox.shape[0]): 228 | if num_anchors_for_bbox[idx] == 0: 229 | # no box with an IOU greater than zero ... 230 | if best_anchor_for_bbox[idx, 0] == -1: 231 | continue 232 | y_is_box_valid[ 233 | best_anchor_for_bbox[idx,0], best_anchor_for_bbox[idx,1], best_anchor_for_bbox[idx,2] + n_anchratios * 234 | best_anchor_for_bbox[idx,3]] = 1 235 | y_rpn_overlap[ 236 | best_anchor_for_bbox[idx,0], best_anchor_for_bbox[idx,1], best_anchor_for_bbox[idx,2] + n_anchratios * 237 | best_anchor_for_bbox[idx,3]] = 1 238 | start = 4 * (best_anchor_for_bbox[idx,2] + n_anchratios * best_anchor_for_bbox[idx,3]) 239 | y_rpn_regr[ 240 | best_anchor_for_bbox[idx,0], best_anchor_for_bbox[idx,1], start:start+4] = best_dx_for_bbox[idx, :] 241 | 242 | y_rpn_overlap = np.transpose(y_rpn_overlap, (2, 0, 1)) 243 | y_rpn_overlap = np.expand_dims(y_rpn_overlap, axis=0) 244 | 245 | y_is_box_valid = np.transpose(y_is_box_valid, (2, 0, 1)) 246 | y_is_box_valid = np.expand_dims(y_is_box_valid, axis=0) 247 | 248 | y_rpn_regr = np.transpose(y_rpn_regr, (2, 0, 1)) 249 | y_rpn_regr = np.expand_dims(y_rpn_regr, axis=0) 250 | 251 | pos_locs = np.where(np.logical_and(y_rpn_overlap[0, :, :, :] == 1, y_is_box_valid[0, :, :, :] == 1)) 252 | neg_locs = np.where(np.logical_and(y_rpn_overlap[0, :, :, :] == 0, y_is_box_valid[0, :, :, :] == 1)) 253 | 254 | num_pos = len(pos_locs[0]) 255 | 256 | # one issue is that the RPN has many more negative than positive regions, so we turn off some of the negative 257 | # regions. We also limit it to 256 regions. 258 | num_regions = 256 259 | 260 | if len(pos_locs[0]) > num_regions/2: 261 | val_locs = random.sample(range(len(pos_locs[0])), len(pos_locs[0]) - num_regions/2) 262 | y_is_box_valid[0, pos_locs[0][val_locs], pos_locs[1][val_locs], pos_locs[2][val_locs]] = 0 263 | num_pos = num_regions/2 264 | 265 | if len(neg_locs[0]) + num_pos > num_regions: 266 | val_locs = random.sample(range(len(neg_locs[0])), len(neg_locs[0]) - num_pos) 267 | y_is_box_valid[0, neg_locs[0][val_locs], neg_locs[1][val_locs], neg_locs[2][val_locs]] = 0 268 | 269 | y_rpn_cls = np.concatenate([y_is_box_valid, y_rpn_overlap], axis=1) 270 | y_rpn_regr = np.concatenate([np.repeat(y_rpn_overlap, 4, axis=1), y_rpn_regr], axis=1) 271 | 272 | return np.copy(y_rpn_cls), np.copy(y_rpn_regr) 273 | 274 | 275 | class threadsafe_iter: 276 | """Takes an iterator/generator and makes it thread-safe by 277 | serializing call to the `next` method of given iterator/generator. 278 | """ 279 | def __init__(self, it): 280 | self.it = it 281 | self.lock = threading.Lock() 282 | 283 | def __iter__(self): 284 | return self 285 | 286 | def next(self): 287 | with self.lock: 288 | return next(self.it) 289 | 290 | 291 | def threadsafe_generator(f): 292 | """A decorator that takes a generator function and makes it thread-safe. 293 | """ 294 | def g(*a, **kw): 295 | return threadsafe_iter(f(*a, **kw)) 296 | return g 297 | 298 | def get_anchor_gt(all_img_data, class_count, C, backend, mode='train'): 299 | sample_selector = SampleSelector(class_count) 300 | while True: 301 | if mode == 'train': 302 | random.shuffle(all_img_data) 303 | 304 | for img_data in all_img_data: 305 | try: 306 | 307 | if C.balanced_classes and sample_selector.skip_sample_for_balanced_class(img_data): 308 | continue 309 | 310 | # read in image, and optionally add augmentation 311 | if mode == 'train': 312 | img_data_aug, x_img = data_augment.augment(img_data, C, augment=True) 313 | else: 314 | img_data_aug, x_img = data_augment.augment(img_data, C, augment=False) 315 | 316 | (width, height) = (img_data_aug['width'], img_data_aug['height']) 317 | (rows, cols, _) = x_img.shape 318 | 319 | assert cols == width 320 | assert rows == height 321 | 322 | # get image dimensions for resizing 323 | resized_width, resized_height, _ = get_new_img_size(width, height, C.im_size) 324 | 325 | # resize the image so that smalles side is length = 600px 326 | x_img = cv2.resize(x_img, (resized_width, resized_height), interpolation=cv2.INTER_CUBIC) 327 | 328 | try: 329 | y_rpn_cls, y_rpn_regr = calc_rpn(C, img_data_aug, width, height, resized_width, resized_height) 330 | except: 331 | continue 332 | x_img = normalize_img(x_img, C) 333 | 334 | y_rpn_regr[:, y_rpn_regr.shape[1]//2:, :, :] *= C.std_scaling 335 | 336 | if backend == 'tf': 337 | x_img = np.transpose(x_img, (0, 2, 3, 1)) 338 | y_rpn_cls = np.transpose(y_rpn_cls, (0, 2, 3, 1)) 339 | y_rpn_regr = np.transpose(y_rpn_regr, (0, 2, 3, 1)) 340 | 341 | yield np.copy(x_img), [np.copy(y_rpn_cls), np.copy(y_rpn_regr)], img_data_aug 342 | 343 | except Exception as e: 344 | print(e) 345 | continue 346 | -------------------------------------------------------------------------------- /faster_rcnn/losses.py: -------------------------------------------------------------------------------- 1 | from keras import backend as K 2 | from keras.objectives import categorical_crossentropy 3 | 4 | if K.image_dim_ordering() == 'tf': 5 | import tensorflow as tf 6 | 7 | lambda_rpn_regr = 1.0 8 | lambda_rpn_class = 1.0 9 | 10 | lambda_cls_regr = 1.0 11 | lambda_cls_class = 1.0 12 | 13 | epsilon = 1e-4 14 | 15 | 16 | def rpn_loss_regr(num_anchors): 17 | def rpn_loss_regr_fixed_num(y_true, y_pred): 18 | if K.image_dim_ordering() == 'th': 19 | x = y_true[:, 4 * num_anchors:, :, :] - y_pred 20 | x_abs = K.abs(x) 21 | x_bool = K.less_equal(x_abs, 1.0) 22 | return lambda_rpn_regr * K.sum( 23 | y_true[:, :4 * num_anchors, :, :] * (x_bool * (0.5 * x * x) + (1 - x_bool) * (x_abs - 0.5))) / K.sum(epsilon + y_true[:, :4 * num_anchors, :, :]) 24 | else: 25 | x = y_true[:, :, :, 4 * num_anchors:] - y_pred 26 | x_abs = K.abs(x) 27 | x_bool = K.cast(K.less_equal(x_abs, 1.0), tf.float32) 28 | 29 | return lambda_rpn_regr * K.sum( 30 | y_true[:, :, :, :4 * num_anchors] * (x_bool * (0.5 * x * x) + (1 - x_bool) * (x_abs - 0.5))) / K.sum(epsilon + y_true[:, :, :, :4 * num_anchors]) 31 | 32 | return rpn_loss_regr_fixed_num 33 | 34 | 35 | def rpn_loss_cls(num_anchors): 36 | def rpn_loss_cls_fixed_num(y_true, y_pred): 37 | if K.image_dim_ordering() == 'tf': 38 | return lambda_rpn_class * K.sum(y_true[:, :, :, :num_anchors] * K.binary_crossentropy(y_pred[:, :, :, :], y_true[:, :, :, num_anchors:])) / K.sum(epsilon + y_true[:, :, :, :num_anchors]) 39 | else: 40 | return lambda_rpn_class * K.sum(y_true[:, :num_anchors, :, :] * K.binary_crossentropy(y_pred[:, :, :, :], y_true[:, num_anchors:, :, :])) / K.sum(epsilon + y_true[:, :num_anchors, :, :]) 41 | 42 | return rpn_loss_cls_fixed_num 43 | 44 | 45 | def class_loss_regr(num_classes): 46 | def class_loss_regr_fixed_num(y_true, y_pred): 47 | x = y_true[:, :, 4*num_classes:] - y_pred 48 | x_abs = K.abs(x) 49 | x_bool = K.cast(K.less_equal(x_abs, 1.0), 'float32') 50 | return lambda_cls_regr * K.sum(y_true[:, :, :4*num_classes] * (x_bool * (0.5 * x * x) + (1 - x_bool) * (x_abs - 0.5))) / K.sum(epsilon + y_true[:, :, :4*num_classes]) 51 | return class_loss_regr_fixed_num 52 | 53 | 54 | def class_loss_cls(y_true, y_pred): 55 | return lambda_cls_class * K.mean(categorical_crossentropy(y_true[0, :, :], y_pred[0, :, :])) 56 | -------------------------------------------------------------------------------- /faster_rcnn/parser.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import numpy as np 3 | import random 4 | import pprint 5 | 6 | def get_data(input_path): 7 | all_imgs = {} 8 | classes_count = {} 9 | class_mapping = {} 10 | with open(input_path,'r') as f: 11 | print('Parsing annotation files') 12 | for line in f: 13 | line_split = line.strip().split(',') 14 | (filename,x1,y1,x2,y2,class_name) = line_split 15 | 16 | if class_name not in classes_count: 17 | classes_count[class_name] = 1 18 | else: 19 | classes_count[class_name] += 1 20 | 21 | if class_name not in class_mapping: 22 | class_mapping[class_name] = len(class_mapping) 23 | 24 | if filename not in all_imgs: 25 | all_imgs[filename] = {} 26 | 27 | img = cv2.imread(filename) 28 | (rows,cols) = img.shape[:2] 29 | all_imgs[filename]['filepath'] = filename 30 | all_imgs[filename]['width'] = cols 31 | all_imgs[filename]['height'] = rows 32 | all_imgs[filename]['bboxes'] = [] 33 | if np.random.randint(0,6) > 0: 34 | all_imgs[filename]['imageset'] = 'trainval' 35 | else: 36 | all_imgs[filename]['imageset'] = 'test' 37 | 38 | all_imgs[filename]['bboxes'].append({'class': class_name, 'x1': int(x1), 'x2': int(x2), 'y1': int(y1), 'y2': int(y2)}) 39 | 40 | all_data = [] 41 | for key in all_imgs: 42 | all_data.append(all_imgs[key]) 43 | 44 | classes_count['bg'] = 0 45 | class_mapping['bg'] = len(class_mapping) 46 | random.shuffle(all_data) 47 | print('Training images per class ({} classes) :'.format(len(classes_count))) 48 | pprint.pprint(classes_count) 49 | return all_data, classes_count, class_mapping 50 | 51 | 52 | -------------------------------------------------------------------------------- /faster_rcnn/resnet.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | '''ResNet50 model for Keras. 3 | # Reference: 4 | - [Deep Residual Learning for Image Recognition](https://arxiv.org/abs/1512.03385) 5 | Adapted from code contributed by BigMoyan. 6 | ''' 7 | 8 | from __future__ import print_function 9 | from __future__ import absolute_import 10 | 11 | from keras.layers import Input, Add, Dense, Activation, Flatten, Convolution2D, MaxPooling2D, ZeroPadding2D, \ 12 | AveragePooling2D, TimeDistributed 13 | 14 | from keras import backend as K 15 | 16 | from faster_rcnn.RoiPoolingConv import RoiPoolingConv 17 | from faster_rcnn.FixedBatchNormalization import FixedBatchNormalization 18 | 19 | def identity_block(input_tensor, kernel_size, filters, stage, block, trainable=True): 20 | 21 | nb_filter1, nb_filter2, nb_filter3 = filters 22 | 23 | if K.image_dim_ordering() == 'tf': 24 | bn_axis = 3 25 | else: 26 | bn_axis = 1 27 | 28 | conv_name_base = 'res' + str(stage) + block + '_branch' 29 | bn_name_base = 'bn' + str(stage) + block + '_branch' 30 | 31 | x = Convolution2D(nb_filter1, (1, 1), name=conv_name_base + '2a', trainable=trainable)(input_tensor) 32 | x = FixedBatchNormalization(axis=bn_axis, name=bn_name_base + '2a')(x) 33 | x = Activation('relu')(x) 34 | 35 | x = Convolution2D(nb_filter2, (kernel_size, kernel_size), padding='same', name=conv_name_base + '2b', trainable=trainable)(x) 36 | x = FixedBatchNormalization(axis=bn_axis, name=bn_name_base + '2b')(x) 37 | x = Activation('relu')(x) 38 | 39 | x = Convolution2D(nb_filter3, (1, 1), name=conv_name_base + '2c', trainable=trainable)(x) 40 | x = FixedBatchNormalization(axis=bn_axis, name=bn_name_base + '2c')(x) 41 | 42 | x = Add()([x, input_tensor]) 43 | x = Activation('relu')(x) 44 | return x 45 | 46 | 47 | def identity_block_td(input_tensor, kernel_size, filters, stage, block, trainable=True): 48 | 49 | # identity block time distributed 50 | 51 | nb_filter1, nb_filter2, nb_filter3 = filters 52 | if K.image_dim_ordering() == 'tf': 53 | bn_axis = 3 54 | else: 55 | bn_axis = 1 56 | 57 | conv_name_base = 'res' + str(stage) + block + '_branch' 58 | bn_name_base = 'bn' + str(stage) + block + '_branch' 59 | 60 | x = TimeDistributed(Convolution2D(nb_filter1, (1, 1), trainable=trainable, kernel_initializer='normal'), name=conv_name_base + '2a')(input_tensor) 61 | x = TimeDistributed(FixedBatchNormalization(axis=bn_axis), name=bn_name_base + '2a')(x) 62 | x = Activation('relu')(x) 63 | 64 | x = TimeDistributed(Convolution2D(nb_filter2, (kernel_size, kernel_size), trainable=trainable, kernel_initializer='normal',padding='same'), name=conv_name_base + '2b')(x) 65 | x = TimeDistributed(FixedBatchNormalization(axis=bn_axis), name=bn_name_base + '2b')(x) 66 | x = Activation('relu')(x) 67 | 68 | x = TimeDistributed(Convolution2D(nb_filter3, (1, 1), trainable=trainable, kernel_initializer='normal'), name=conv_name_base + '2c')(x) 69 | x = TimeDistributed(FixedBatchNormalization(axis=bn_axis), name=bn_name_base + '2c')(x) 70 | 71 | x = Add()([x, input_tensor]) 72 | x = Activation('relu')(x) 73 | 74 | return x 75 | 76 | def conv_block(input_tensor, kernel_size, filters, stage, block, strides=(2, 2), trainable=True): 77 | 78 | nb_filter1, nb_filter2, nb_filter3 = filters 79 | if K.image_dim_ordering() == 'tf': 80 | bn_axis = 3 81 | else: 82 | bn_axis = 1 83 | 84 | conv_name_base = 'res' + str(stage) + block + '_branch' 85 | bn_name_base = 'bn' + str(stage) + block + '_branch' 86 | 87 | x = Convolution2D(nb_filter1, (1, 1), strides=strides, name=conv_name_base + '2a', trainable=trainable)(input_tensor) 88 | x = FixedBatchNormalization(axis=bn_axis, name=bn_name_base + '2a')(x) 89 | x = Activation('relu')(x) 90 | 91 | x = Convolution2D(nb_filter2, (kernel_size, kernel_size), padding='same', name=conv_name_base + '2b', trainable=trainable)(x) 92 | x = FixedBatchNormalization(axis=bn_axis, name=bn_name_base + '2b')(x) 93 | x = Activation('relu')(x) 94 | 95 | x = Convolution2D(nb_filter3, (1, 1), name=conv_name_base + '2c', trainable=trainable)(x) 96 | x = FixedBatchNormalization(axis=bn_axis, name=bn_name_base + '2c')(x) 97 | 98 | shortcut = Convolution2D(nb_filter3, (1, 1), strides=strides, name=conv_name_base + '1', trainable=trainable)(input_tensor) 99 | shortcut = FixedBatchNormalization(axis=bn_axis, name=bn_name_base + '1')(shortcut) 100 | 101 | x = Add()([x, shortcut]) 102 | x = Activation('relu')(x) 103 | return x 104 | 105 | 106 | def conv_block_td(input_tensor, kernel_size, filters, stage, block, input_shape, strides=(2, 2), trainable=True): 107 | 108 | # conv block time distributed 109 | 110 | nb_filter1, nb_filter2, nb_filter3 = filters 111 | if K.image_dim_ordering() == 'tf': 112 | bn_axis = 3 113 | else: 114 | bn_axis = 1 115 | 116 | conv_name_base = 'res' + str(stage) + block + '_branch' 117 | bn_name_base = 'bn' + str(stage) + block + '_branch' 118 | 119 | x = TimeDistributed(Convolution2D(nb_filter1, (1, 1), strides=strides, trainable=trainable, kernel_initializer='normal'), input_shape=input_shape, name=conv_name_base + '2a')(input_tensor) 120 | x = TimeDistributed(FixedBatchNormalization(axis=bn_axis), name=bn_name_base + '2a')(x) 121 | x = Activation('relu')(x) 122 | 123 | x = TimeDistributed(Convolution2D(nb_filter2, (kernel_size, kernel_size), padding='same', trainable=trainable, kernel_initializer='normal'), name=conv_name_base + '2b')(x) 124 | x = TimeDistributed(FixedBatchNormalization(axis=bn_axis), name=bn_name_base + '2b')(x) 125 | x = Activation('relu')(x) 126 | 127 | x = TimeDistributed(Convolution2D(nb_filter3, (1, 1), kernel_initializer='normal'), name=conv_name_base + '2c', trainable=trainable)(x) 128 | x = TimeDistributed(FixedBatchNormalization(axis=bn_axis), name=bn_name_base + '2c')(x) 129 | 130 | shortcut = TimeDistributed(Convolution2D(nb_filter3, (1, 1), strides=strides, trainable=trainable, kernel_initializer='normal'), name=conv_name_base + '1')(input_tensor) 131 | shortcut = TimeDistributed(FixedBatchNormalization(axis=bn_axis), name=bn_name_base + '1')(shortcut) 132 | 133 | x = Add()([x, shortcut]) 134 | x = Activation('relu')(x) 135 | return x 136 | 137 | def nn_base(input_tensor=None, trainable=False): 138 | 139 | # Determine proper input shape 140 | if K.image_dim_ordering() == 'th': 141 | input_shape = (3, None, None) 142 | else: 143 | input_shape = (None, None, 3) 144 | 145 | if input_tensor is None: 146 | img_input = Input(shape=input_shape) 147 | else: 148 | if not K.is_keras_tensor(input_tensor): 149 | img_input = Input(tensor=input_tensor, shape=input_shape) 150 | else: 151 | img_input = input_tensor 152 | 153 | if K.image_dim_ordering() == 'tf': 154 | bn_axis = 3 155 | else: 156 | bn_axis = 1 157 | 158 | x = ZeroPadding2D((3, 3))(img_input) 159 | 160 | x = Convolution2D(64, (7, 7), strides=(2, 2), name='conv1', trainable = trainable)(x) 161 | x = FixedBatchNormalization(axis=bn_axis, name='bn_conv1')(x) 162 | x = Activation('relu')(x) 163 | x = MaxPooling2D((3, 3), strides=(2, 2))(x) 164 | 165 | x = conv_block(x, 3, [64, 64, 256], stage=2, block='a', strides=(1, 1), trainable = trainable) 166 | x = identity_block(x, 3, [64, 64, 256], stage=2, block='b', trainable = trainable) 167 | x = identity_block(x, 3, [64, 64, 256], stage=2, block='c', trainable = trainable) 168 | 169 | x = conv_block(x, 3, [128, 128, 512], stage=3, block='a', trainable = trainable) 170 | x = identity_block(x, 3, [128, 128, 512], stage=3, block='b', trainable = trainable) 171 | x = identity_block(x, 3, [128, 128, 512], stage=3, block='c', trainable = trainable) 172 | x = identity_block(x, 3, [128, 128, 512], stage=3, block='d', trainable = trainable) 173 | 174 | x = conv_block(x, 3, [256, 256, 1024], stage=4, block='a', trainable = trainable) 175 | x = identity_block(x, 3, [256, 256, 1024], stage=4, block='b', trainable = trainable) 176 | x = identity_block(x, 3, [256, 256, 1024], stage=4, block='c', trainable = trainable) 177 | x = identity_block(x, 3, [256, 256, 1024], stage=4, block='d', trainable = trainable) 178 | x = identity_block(x, 3, [256, 256, 1024], stage=4, block='e', trainable = trainable) 179 | x = identity_block(x, 3, [256, 256, 1024], stage=4, block='f', trainable = trainable) 180 | 181 | return x 182 | 183 | 184 | def classifier_layers(x, input_shape, trainable=False): 185 | 186 | x = conv_block_td(x, 3, [512, 512, 2048], stage=5, block='a', input_shape=input_shape, strides=(2, 2), trainable=trainable) 187 | x = identity_block_td(x, 3, [512, 512, 2048], stage=5, block='b', trainable=trainable) 188 | x = identity_block_td(x, 3, [512, 512, 2048], stage=5, block='c', trainable=trainable) 189 | x = TimeDistributed(AveragePooling2D((7, 7)), name='avg_pool')(x) 190 | 191 | return x 192 | 193 | 194 | def rpn(base_layers,num_anchors): 195 | 196 | x = Convolution2D(512, (3, 3), padding='same', activation='relu', kernel_initializer='normal', name='rpn_conv1')(base_layers) 197 | 198 | x_class = Convolution2D(num_anchors, (1, 1), activation='sigmoid', kernel_initializer='uniform', name='rpn_out_class')(x) 199 | x_regr = Convolution2D(num_anchors * 4, (1, 1), activation='linear', kernel_initializer='zero', name='rpn_out_regress')(x) 200 | 201 | return [x_class, x_regr, base_layers] 202 | 203 | def classifier(base_layers, input_rois, num_rois, nb_classes = 21, trainable=False): 204 | 205 | pooling_regions = 14 206 | input_shape = (num_rois,14,14,1024) 207 | out_roi_pool = RoiPoolingConv(pooling_regions, num_rois)([base_layers, input_rois]) 208 | out = classifier_layers(out_roi_pool, input_shape=input_shape, trainable=True) 209 | 210 | out = TimeDistributed(Flatten())(out) 211 | 212 | out_class = TimeDistributed(Dense(nb_classes, activation='softmax', kernel_initializer='zero'), name='dense_class_{}'.format(nb_classes))(out) 213 | # note: no regression target for bg class 214 | out_regr = TimeDistributed(Dense(4 * (nb_classes-1), activation='linear', kernel_initializer='zero'), name='dense_regress_{}'.format(nb_classes))(out) 215 | return [out_class, out_regr] 216 | 217 | -------------------------------------------------------------------------------- /faster_rcnn/roi_helpers.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pdb 3 | import math 4 | from . import data_generators 5 | import copy 6 | 7 | 8 | def calc_iou(R, img_data, C, class_mapping): 9 | 10 | bboxes = img_data['bboxes'] 11 | (width, height) = (img_data['width'], img_data['height']) 12 | # get image dimensions for resizing 13 | resized_width, resized_height, _ = data_generators.get_new_img_size(width, height, C.im_size) 14 | 15 | gta = np.zeros((len(bboxes), 4)) 16 | 17 | for bbox_num, bbox in enumerate(bboxes): 18 | # get the GT box coordinates, and resize to account for image resizing 19 | gta[bbox_num, 0] = int(round(bbox['x1'] * (resized_width / float(width))/C.rpn_stride)) 20 | gta[bbox_num, 1] = int(round(bbox['x2'] * (resized_width / float(width))/C.rpn_stride)) 21 | gta[bbox_num, 2] = int(round(bbox['y1'] * (resized_height / float(height))/C.rpn_stride)) 22 | gta[bbox_num, 3] = int(round(bbox['y2'] * (resized_height / float(height))/C.rpn_stride)) 23 | 24 | x_roi = [] 25 | y_class_num = [] 26 | y_class_regr_coords = [] 27 | y_class_regr_label = [] 28 | 29 | for ix in range(R.shape[0]): 30 | (x1, y1, x2, y2) = R[ix, :] 31 | x1 = int(round(x1)) 32 | y1 = int(round(y1)) 33 | x2 = int(round(x2)) 34 | y2 = int(round(y2)) 35 | 36 | best_iou = 0.0 37 | best_bbox = -1 38 | for bbox_num in range(len(bboxes)): 39 | curr_iou = data_generators.iou([gta[bbox_num, 0], gta[bbox_num, 2], gta[bbox_num, 1], gta[bbox_num, 3]], [x1, y1, x2, y2]) 40 | if curr_iou > best_iou: 41 | best_iou = curr_iou 42 | best_bbox = bbox_num 43 | 44 | if best_iou < C.classifier_min_overlap: 45 | continue 46 | else: 47 | w = x2 - x1 48 | h = y2 - y1 49 | x_roi.append([x1, y1, w, h]) 50 | 51 | if C.classifier_min_overlap <= best_iou < C.classifier_max_overlap: 52 | # hard negative example 53 | cls_name = 'bg' 54 | elif C.classifier_max_overlap <= best_iou: 55 | cls_name = bboxes[best_bbox]['class'] 56 | cxg = (gta[best_bbox, 0] + gta[best_bbox, 1]) / 2.0 57 | cyg = (gta[best_bbox, 2] + gta[best_bbox, 3]) / 2.0 58 | 59 | cx = x1 + w / 2.0 60 | cy = y1 + h / 2.0 61 | 62 | tx = (cxg - cx) / float(w) 63 | ty = (cyg - cy) / float(h) 64 | tw = np.log((gta[best_bbox, 1] - gta[best_bbox, 0]) / float(w)) 65 | th = np.log((gta[best_bbox, 3] - gta[best_bbox, 2]) / float(h)) 66 | else: 67 | print('roi = {}'.format(best_iou)) 68 | raise RuntimeError 69 | 70 | class_num = class_mapping[cls_name] 71 | class_label = len(class_mapping) * [0] 72 | class_label[class_num] = 1 73 | y_class_num.append(copy.deepcopy(class_label)) 74 | coords = [0] * 4 * (len(class_mapping) - 1) 75 | labels = [0] * 4 * (len(class_mapping) - 1) 76 | if cls_name != 'bg': 77 | label_pos = 4 * class_num 78 | sx, sy, sw, sh = C.classifier_regr_std 79 | coords[label_pos:4+label_pos] = [sx*tx, sy*ty, sw*tw, sh*th] 80 | labels[label_pos:4+label_pos] = [1, 1, 1, 1] 81 | y_class_regr_coords.append(copy.deepcopy(coords)) 82 | y_class_regr_label.append(copy.deepcopy(labels)) 83 | else: 84 | y_class_regr_coords.append(copy.deepcopy(coords)) 85 | y_class_regr_label.append(copy.deepcopy(labels)) 86 | 87 | if len(x_roi) == 0: 88 | return None, None, None 89 | 90 | X = np.array(x_roi) 91 | Y1 = np.array(y_class_num) 92 | Y2 = np.concatenate([np.array(y_class_regr_label),np.array(y_class_regr_coords)],axis=1) 93 | 94 | return np.expand_dims(X, axis=0), np.expand_dims(Y1, axis=0), np.expand_dims(Y2, axis=0) 95 | 96 | def apply_regr(x, y, w, h, tx, ty, tw, th): 97 | try: 98 | cx = x + w/2. 99 | cy = y + h/2. 100 | cx1 = tx * w + cx 101 | cy1 = ty * h + cy 102 | w1 = math.exp(tw) * w 103 | h1 = math.exp(th) * h 104 | x1 = cx1 - w1/2. 105 | y1 = cy1 - h1/2. 106 | x1 = int(round(x1)) 107 | y1 = int(round(y1)) 108 | w1 = int(round(w1)) 109 | h1 = int(round(h1)) 110 | 111 | return x1, y1, w1, h1 112 | 113 | except ValueError: 114 | return x, y, w, h 115 | except OverflowError: 116 | return x, y, w, h 117 | except Exception as e: 118 | print(e) 119 | return x, y, w, h 120 | 121 | def apply_regr_np(X, T): 122 | try: 123 | x = X[0, :, :] 124 | y = X[1, :, :] 125 | w = X[2, :, :] 126 | h = X[3, :, :] 127 | 128 | tx = T[0, :, :] 129 | ty = T[1, :, :] 130 | tw = T[2, :, :] 131 | th = T[3, :, :] 132 | 133 | cx = x + w/2. 134 | cy = y + h/2. 135 | cx1 = tx * w + cx 136 | cy1 = ty * h + cy 137 | 138 | w1 = np.exp(tw.astype(np.float64)) * w 139 | h1 = np.exp(th.astype(np.float64)) * h 140 | x1 = cx1 - w1/2. 141 | y1 = cy1 - h1/2. 142 | 143 | x1 = np.round(x1) 144 | y1 = np.round(y1) 145 | w1 = np.round(w1) 146 | h1 = np.round(h1) 147 | return np.stack([x1, y1, w1, h1]) 148 | except Exception as e: 149 | print(e) 150 | return X 151 | 152 | def non_max_suppression_fast(boxes, probs, overlap_thresh=0.9, max_boxes=300): 153 | """ 154 | Eliminating redundant object detection windows with a faster non maximum suppression method 155 | Greedily select high-scoring detections and skip detections that are significantly covered by 156 | a previously selected detection. 157 | :param boxes: list of boxes 158 | :param probs: list of probabilities relatives to the boxes 159 | """ 160 | 161 | # code used from here: http://www.pyimagesearch.com/2015/02/16/faster-non-maximum-suppression-python/ 162 | # if there are no boxes, return an empty list 163 | if len(boxes) == 0: 164 | return [] 165 | 166 | # grab the coordinates of the bounding boxes 167 | x1 = boxes[:, 0] 168 | y1 = boxes[:, 1] 169 | x2 = boxes[:, 2] 170 | y2 = boxes[:, 3] 171 | 172 | np.testing.assert_array_less(x1, x2) 173 | np.testing.assert_array_less(y1, y2) 174 | 175 | # if the bounding boxes integers, convert them to floats -- 176 | # this is important since we'll be doing a bunch of divisions 177 | if boxes.dtype.kind == "i": 178 | boxes = boxes.astype("float") 179 | 180 | # initialize the list of picked indexes 181 | pick = [] 182 | 183 | # calculate the areas 184 | area = (x2 - x1 + 1) * (y2 - y1 + 1) 185 | 186 | # sort the bounding boxes 187 | idxs = np.argsort(probs) 188 | 189 | # keep looping while some indexes still remain in the indexes 190 | # list 191 | while len(idxs) > 0: 192 | # grab the last index in the indexes list and add the 193 | # index value to the list of picked indexes 194 | last = len(idxs) - 1 195 | i = idxs[last] 196 | pick.append(i) 197 | 198 | # find the intersection 199 | 200 | xx1_int = np.maximum(x1[i], x1[idxs[:last]]) 201 | yy1_int = np.maximum(y1[i], y1[idxs[:last]]) 202 | xx2_int = np.minimum(x2[i], x2[idxs[:last]]) 203 | yy2_int = np.minimum(y2[i], y2[idxs[:last]]) 204 | 205 | ww_int = np.maximum(0, xx2_int - xx1_int + 0.5) 206 | hh_int = np.maximum(0, yy2_int - yy1_int + 0.5) 207 | 208 | area_int = ww_int * hh_int 209 | 210 | # find the union 211 | area_union = area[i] + area[idxs[:last]] - area_int 212 | 213 | # compute the ratio of overlap 214 | overlap = area_int / (area_union + 1e-6) 215 | 216 | # delete all indexes from the index list that have 217 | idxs = np.delete(idxs, np.concatenate(([last], 218 | np.where(overlap > overlap_thresh)[0]))) 219 | 220 | if len(pick) >= max_boxes: 221 | break 222 | 223 | # return only the bounding boxes that were picked using the integer data type 224 | boxes = boxes[pick].astype("int") 225 | probs = probs[pick] 226 | return boxes, probs 227 | 228 | import time 229 | def rpn_to_roi(rpn_layer, regr_layer, C, dim_ordering, use_regr=True, max_boxes=300,overlap_thresh=0.9): 230 | 231 | regr_layer = regr_layer / C.std_scaling 232 | 233 | anchor_sizes = C.anchor_box_scales 234 | anchor_ratios = C.anchor_box_ratios 235 | 236 | assert rpn_layer.shape[0] == 1 237 | 238 | if dim_ordering == 'th': 239 | (rows,cols) = rpn_layer.shape[2:] 240 | 241 | elif dim_ordering == 'tf': 242 | (rows, cols) = rpn_layer.shape[1:3] 243 | 244 | curr_layer = 0 245 | if dim_ordering == 'tf': 246 | A = np.zeros((4, rpn_layer.shape[1], rpn_layer.shape[2], rpn_layer.shape[3])) 247 | elif dim_ordering == 'th': 248 | A = np.zeros((4, rpn_layer.shape[2], rpn_layer.shape[3], rpn_layer.shape[1])) 249 | 250 | for anchor_size in anchor_sizes: 251 | for anchor_ratio in anchor_ratios: 252 | 253 | anchor_x = (anchor_size * anchor_ratio[0])/C.rpn_stride 254 | anchor_y = (anchor_size * anchor_ratio[1])/C.rpn_stride 255 | if dim_ordering == 'th': 256 | regr = regr_layer[0, 4 * curr_layer:4 * curr_layer + 4, :, :] 257 | else: 258 | regr = regr_layer[0, :, :, 4 * curr_layer:4 * curr_layer + 4] 259 | regr = np.transpose(regr, (2, 0, 1)) 260 | 261 | X, Y = np.meshgrid(np.arange(cols),np. arange(rows)) 262 | 263 | A[0, :, :, curr_layer] = X - anchor_x/2 264 | A[1, :, :, curr_layer] = Y - anchor_y/2 265 | A[2, :, :, curr_layer] = anchor_x 266 | A[3, :, :, curr_layer] = anchor_y 267 | 268 | if use_regr: 269 | A[:, :, :, curr_layer] = apply_regr_np(A[:, :, :, curr_layer], regr) 270 | 271 | A[2, :, :, curr_layer] = np.maximum(1, A[2, :, :, curr_layer]) 272 | A[3, :, :, curr_layer] = np.maximum(1, A[3, :, :, curr_layer]) 273 | A[2, :, :, curr_layer] += A[0, :, :, curr_layer] 274 | A[3, :, :, curr_layer] += A[1, :, :, curr_layer] 275 | 276 | A[0, :, :, curr_layer] = np.maximum(0, A[0, :, :, curr_layer]) 277 | A[1, :, :, curr_layer] = np.maximum(0, A[1, :, :, curr_layer]) 278 | A[2, :, :, curr_layer] = np.minimum(cols-1, A[2, :, :, curr_layer]) 279 | A[3, :, :, curr_layer] = np.minimum(rows-1, A[3, :, :, curr_layer]) 280 | 281 | curr_layer += 1 282 | 283 | all_boxes = np.reshape(A.transpose((0, 3, 1,2)), (4, -1)).transpose((1, 0)) 284 | all_probs = rpn_layer.transpose((0, 3, 1, 2)).reshape((-1)) 285 | 286 | x1 = all_boxes[:, 0] 287 | y1 = all_boxes[:, 1] 288 | x2 = all_boxes[:, 2] 289 | y2 = all_boxes[:, 3] 290 | 291 | idxs = np.where((x1 - x2 >= 0) | (y1 - y2 >= 0)) 292 | 293 | all_boxes = np.delete(all_boxes, idxs, 0) 294 | all_probs = np.delete(all_probs, idxs, 0) 295 | 296 | result = non_max_suppression_fast(all_boxes, all_probs, overlap_thresh=overlap_thresh, max_boxes=max_boxes)[0] 297 | 298 | return result 299 | -------------------------------------------------------------------------------- /label_data.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import numpy as np 4 | from selenium import webdriver 5 | import glob 6 | import urllib 7 | import train 8 | import time 9 | import keras 10 | import cv2 11 | import ffmpy 12 | from random import shuffle 13 | import matplotlib.pyplot as plt 14 | 15 | map_characters = {0: 'abraham_grampa_simpson', 1: 'apu_nahasapeemapetilon', 2: 'bart_simpson', 16 | 3: 'charles_montgomery_burns', 4: 'chief_wiggum', 5: 'comic_book_guy', 6: 'edna_krabappel', 17 | 7: 'homer_simpson', 8: 'kent_brockman', 9: 'krusty_the_clown', 10: 'lisa_simpson', 18 | 11: 'marge_simpson', 12: 'milhouse_van_houten', 13: 'moe_szyslak', 19 | 14: 'ned_flanders', 15: 'nelson_muntz', 16: 'principal_skinner', 17: 'sideshow_bob'} 20 | pic_size = 64 21 | 22 | 23 | def get_character_name(name): 24 | """ 25 | Get the character name from just a part of it, comparing to saved characters 26 | :param name: part of the character name 27 | :return: full name 28 | """ 29 | chars = [k.split('/')[2] for k in glob.glob('./characters/*')] 30 | char_name = [k for k in chars if name.lower().replace(' ', '_') in k] 31 | if len(char_name) > 0: 32 | return char_name[0] 33 | else: 34 | print('FAKE NAME') 35 | return 'ERROR' 36 | 37 | def labelized_data(to_shuffle=False, interactive=False): 38 | """ 39 | Interactive labeling data with the possibility to crop the picture shown : full picture, 40 | left part, right part. Manually labeling data from .avi videos in the same folder. Analzying 41 | frame (randomly chosen) of each video and then save the picture into the right character 42 | folder. 43 | :param interactive: boolean to label from terminal 44 | """ 45 | movies = glob.glob('./*.avi') 46 | if to_shuffle: 47 | shuffle(movies) 48 | for fname in movies[::-1]: 49 | try: 50 | m,s = np.random.randint(0,3), np.random.randint(0,59) 51 | cap = cv2.VideoCapture(fname) #video_name is the video being called 52 | fps = cap.get(cv2.CAP_PROP_FPS) 53 | cap.set(1, fps*(m*60+s)) # Where frame_no is the frame you want 54 | i = 0 55 | while True: 56 | i+=1 57 | ret, frame = cap.read() # Read the frame 58 | # Resizing HD pictures (we don't need HD) 59 | if np.min(frame.shape[:2]) > 900: 60 | frame = cv2.resize(frame, (int(frame.shape[1]/2), int(frame.shape[0]/2))) 61 | if i % np.random.randint(100, 250) == 0: 62 | if interactive: 63 | f = plt.ion() 64 | plt.imshow(frame) 65 | plt.show() 66 | where = input('Where is the character ?[No,Right,Left,Full] ') 67 | if where.lower() == 'stop': 68 | # os.remove(fname) 69 | raise 70 | 71 | elif where.lower() in ['left', 'l']: 72 | plt.close() 73 | plt.imshow(frame[:,:int(frame.shape[1]/2)]) 74 | plt.show() 75 | name = input('Name ?[Name or No] ') 76 | plt.close() 77 | if name.lower() not in ['no','n','']: 78 | name_char = get_character_name(name) 79 | name_new_pic = 'pic_{:04d}.jpg'.format(len(glob.glob('./characters/%s/*' % name_char))) 80 | title = './characters/%s/%s' % (name_char, name_new_pic) 81 | cv2.imwrite(title, frame[:,:int(frame.shape[1]/2)]) 82 | print('Saved at %s' % title) 83 | print('%s : %d photos labeled' % (name_char, len(glob.glob('./characters/%s/*' % name_char)))) 84 | 85 | elif where.lower() in ['right', 'r']: 86 | plt.close() 87 | plt.imshow(frame[:,int(frame.shape[1]/2):]) 88 | plt.show() 89 | name = input('Name ?[Name or No] ') 90 | plt.close() 91 | if name.lower() not in ['no','n','']: 92 | name_char = get_character_name(name) 93 | name_new_pic = 'pic_{:04d}.jpg'.format(len(glob.glob('./characters/%s/*'% name_char))) 94 | title = './characters/%s/%s' % (name_char, name_new_pic) 95 | cv2.imwrite(title, frame[:,int(frame.shape[1]/2):]) 96 | print('Saved at %s' % title) 97 | print('%s : %d photos labeled' % (name_char, len(glob.glob('./characters/%s/*' % name_char)))) 98 | 99 | elif where.lower() in ['full', 'f']: 100 | name = input('Name ?[Name or No] ') 101 | plt.close() 102 | if name.lower() not in ['no','n','']: 103 | name_char = get_character_name(name) 104 | name_new_pic = 'pic_{:04d}.jpg'.format(len(glob.glob('./characters/%s/*'% name_char))) 105 | title = './characters/%s/%s' % (name_char, name_new_pic) 106 | cv2.imwrite(title, frame) 107 | print('Saved at %s' % title) 108 | print('%s : %d photos labeled' % (name_char, len(glob.glob('./characters/%s/*'% name_char)))) 109 | except Exception as e: 110 | if e == KeyboardInterrupt: 111 | return 112 | else: 113 | continue 114 | 115 | def generate_pic_from_videos(): 116 | """ 117 | Randomly generate pictures from videos : get the full picture, the right part, the left part. 118 | So, three pictures are saved for each analyzed frame (chosen randomly). 119 | """ 120 | for k, fname in enumerate(glob.glob('./*.avi')): 121 | m,s = np.random.randint(0,3), np.random.randint(0,59) 122 | cap = cv2.VideoCapture(fname) 123 | fps = cap.get(cv2.CAP_PROP_FPS) 124 | cap.set(1, fps*(m*60+s)) # Where frame_no is the frame you want 125 | i = 0 126 | while i < cap.get(cv2.CAP_PROP_FRAME_COUNT): 127 | try: 128 | i+=1 129 | ret, frame = cap.read() # Read the frame 130 | if i % np.random.randint(400, 700) == 0: 131 | pics = {'pic_%s_r_%d_%d.jpg' % (fname.split('/')[1].split('.')[0], 132 | i, np.random.randint(10000)):frame[:,:int(frame.shape[1]/2)], 133 | 'pic_%s_l_%d_%d.jpg' % (fname.split('/')[1].split('.')[0], 134 | i, np.random.randint(10000)): frame[:,int(frame.shape[1]/2):], 135 | 'pic_%s_f_%d_%d.jpg' % (fname.split('/')[1].split('.')[0], 136 | i, np.random.randint(10000)): frame} 137 | for name, img in pics.items(): 138 | cv2.imwrite('./autogenerate/' + name, img) 139 | except: 140 | pass 141 | print('\r%d/%d' % (k+1, len(glob.glob('./*.avi'))), end='') 142 | 143 | def classify_pics(): 144 | """ 145 | Use a Keras saved model to classify pictures and move them into the right character folder. 146 | """ 147 | l = glob.glob('./autogenerate/*.jpg') 148 | model = train.load_model_from_checkpoint('./models/weights.best_6conv2.hdf5', six_conv=True) 149 | d = len(l) 150 | for i, p in enumerate(l): 151 | img = cv2.imread(p) 152 | img = cv2.resize(img, (pic_size, pic_size)).astype('float32') / 255. 153 | a = model.predict(img.reshape((-1, pic_size, pic_size, 3)), verbose=0)[0] 154 | if np.max(a) > 0.6: 155 | char = map_characters[np.argmax(a)] 156 | os.rename(p, './autogenerate/%s/%s' % (char, p.split('/')[2])) 157 | else: 158 | os.remove(p) 159 | print('\r%d/%d'%(i+1, d), end='') 160 | 161 | if __name__ == '__main__': 162 | labelized_data(interactive=True) 163 | 164 | -------------------------------------------------------------------------------- /label_pointer.py: -------------------------------------------------------------------------------- 1 | """ 2 | Label pictures and get bounding boxes coordinates (upper left and lower right). 3 | Using mouse click we can get those coordinates. 4 | """ 5 | import numpy as np 6 | import matplotlib.pyplot as plt 7 | from collections import Counter 8 | import cv2 9 | import sys 10 | import glob 11 | from random import shuffle 12 | import os 13 | from train import map_characters 14 | 15 | # List of already bounded pictures 16 | with open('./annotation.txt') as f: 17 | already_labeled = [k.strip().split(',')[0] for k in f.readlines()] 18 | 19 | # List of characters 20 | characters = list(map_characters.values()) 21 | shuffle(characters) 22 | 23 | for char in characters: 24 | print('Working on %s' % char.replace('_', ' ').title()) 25 | # all labeled (just name, no bounding box) pictures of the character 26 | pics = glob.glob('./characters/%s/*.*' % char) 27 | shuffle(pics) 28 | i = 0 29 | for p in pics: 30 | if p not in already_labeled: 31 | try: 32 | im = cv2.imread(p) 33 | im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB) 34 | ax = plt.gca() 35 | fig = plt.gcf() 36 | 37 | implot = ax.imshow(im) 38 | position = [] 39 | def onclick(event): 40 | """ 41 | If click, add the mouse position to the list. 42 | Closing the plotted picture after 2 clicks (= 2 corners.) 43 | Write the position for each picture into the text file. 44 | """ 45 | if event.xdata != None and event.ydata != None: 46 | position.append((event.xdata, event.ydata)) 47 | n_clicks = len(position) 48 | if n_clicks == 2: 49 | if position[0] == position[1]: 50 | r = input('Delete this picture[Y/n] ? ') 51 | if r.lower() in ['yes','y']: 52 | os.remove(p) 53 | plt.close() 54 | return 55 | line = '{0},{1},{2},{3}'.format(p, 56 | ','.join([str(int(k)) for k in position[0]]), 57 | ','.join([str(int(k)) for k in position[1]]), 58 | char) 59 | 60 | # Open the annotations file to continue to write 61 | target = open('annotation.txt', 'a') 62 | # Write picture and coordinates 63 | target.write(line) 64 | target.write("\n") 65 | plt.close() 66 | fig.canvas.set_window_title('%s pictures labeled' % i) 67 | cid = fig.canvas.mpl_connect('button_press_event', onclick) 68 | plt.show() 69 | i += 1 70 | # Common errors, just pass and close the plotting window 71 | except UnicodeDecodeError: 72 | plt.close() 73 | continue 74 | # When process is interrupted, juste print the number of labeled pictures 75 | except KeyboardInterrupt: 76 | plt.close() 77 | print('\nNumber of pictures with bounding box :') 78 | with open('./annotation.txt') as f: 79 | already_labeled = [k.strip().split(',')[5] for k in f.readlines()] 80 | nb_pic_tot = {p:len([k for k in glob.glob('./characters/%s/*.*' % p)]) for p in characters} 81 | 82 | print('\n'.join(['%s : %d/%d' % (char, nb, nb_pic_tot[char]) for char, nb in sorted(Counter(already_labeled).items(), 83 | key =lambda x:x[1], reverse=True)])) 84 | t = np.sum(list(nb_pic_tot.values())) 85 | sys.exit("Total {}/{} ({}%)" .format(len(already_labeled), 86 | t, 87 | round(100*len(already_labeled)/t))) 88 | 89 | plt.close() 90 | 91 | -------------------------------------------------------------------------------- /pics/mapple_lisa.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexattia/SimpsonRecognition/fa65cc3124ed606e0ad6456ae49c734a2685db52/pics/mapple_lisa.png -------------------------------------------------------------------------------- /test_frcnn.py: -------------------------------------------------------------------------------- 1 | import os 2 | import cv2 3 | import numpy as np 4 | import sys 5 | import pickle 6 | from optparse import OptionParser 7 | import time 8 | from faster_rcnn import config, data_generators 9 | import faster_rcnn.resnet as nn 10 | from keras import backend as K 11 | from keras.layers import Input 12 | from keras.models import Model 13 | from faster_rcnn import roi_helpers 14 | overlap_thresh = 0.2 15 | bbox_threshold = 0.5 16 | 17 | def format_img(img, C): 18 | """ 19 | Normalize and resize image to have the smallest side equal to 600 20 | """ 21 | img_min_side = float(C.im_size) 22 | (height,width,_) = img.shape 23 | (resized_width, resized_height, ratio) = data_generators.get_new_img_size(width, height, C.im_size) 24 | img = cv2.resize(img, (resized_width, resized_height), interpolation=cv2.INTER_CUBIC) 25 | img = data_generators.normalize_img(img, C) 26 | return img, ratio 27 | 28 | def get_real_coordinates(ratio, x1, y1, x2, y2): 29 | """ 30 | Method to transform the coordinates of the bounding box to its original size 31 | """ 32 | real_x1 = int(round(x1 // ratio)) 33 | real_y1 = int(round(y1 // ratio)) 34 | real_x2 = int(round(x2 // ratio)) 35 | real_y2 = int(round(y2 // ratio)) 36 | 37 | return (real_x1, real_y1, real_x2, real_y2) 38 | 39 | def get_models(C): 40 | """ 41 | Create models : rpn, classifier and classifier only 42 | :param C: config object 43 | :return: models 44 | """ 45 | img_input = Input(shape=(None, None, 3)) 46 | roi_input = Input(shape=(C.num_rois, 4)) 47 | feature_map_input = Input(shape=(None, None, 1024)) 48 | 49 | # define the base network (resnet here) 50 | shared_layers = nn.nn_base(img_input, trainable=True) 51 | 52 | # define the RPN, built on the base layers 53 | num_anchors = len(C.anchor_box_scales) * len(C.anchor_box_ratios) 54 | rpn_layers = nn.rpn(shared_layers, num_anchors) 55 | 56 | # define the classifer, built on the feature map 57 | classifier = nn.classifier(feature_map_input, roi_input, C.num_rois, nb_classes=len(C.class_mapping), trainable=True) 58 | 59 | model_rpn = Model(img_input, rpn_layers) 60 | model_classifier_only = Model([feature_map_input, roi_input], classifier) 61 | 62 | model_classifier = Model([feature_map_input, roi_input], classifier) 63 | 64 | model_rpn.load_weights(C.model_path, by_name=True) 65 | model_classifier.load_weights(C.model_path, by_name=True) 66 | 67 | model_rpn.compile(optimizer='sgd', loss='mse') 68 | model_classifier.compile(optimizer='sgd', loss='mse') 69 | return model_rpn, model_classifier, model_classifier_only 70 | 71 | def detect_predict(pic, C, model_rpn, model_classifier, model_classifier_only, class_mapping, class_to_color, print_dets=False, export=False): 72 | """ 73 | Detect and predict object in the picture 74 | :param pic: picture numpy array 75 | :param C: config object 76 | :params model_*: models from get_models function 77 | :params class_*: mapping and colors, need to be loaded to keep the same colors/classes 78 | :return: picture with bounding boxes 79 | """ 80 | img = pic 81 | X, ratio = format_img(img, C) 82 | 83 | img_scaled = np.transpose(X.copy()[0, (2, 1, 0), :, :], (1, 2, 0)).copy() 84 | img_scaled[:, :, 0] += 123.68 85 | img_scaled[:, :, 1] += 116.779 86 | img_scaled[:, :, 2] += 103.939 87 | img_scaled = img_scaled.astype(np.uint8) 88 | 89 | if K.image_dim_ordering() == 'tf': 90 | X = np.transpose(X, (0, 2, 3, 1)) 91 | 92 | # get the feature maps and output from the RPN 93 | [Y1, Y2, F] = model_rpn.predict(X) 94 | 95 | 96 | R = roi_helpers.rpn_to_roi(Y1, Y2, C, K.image_dim_ordering(), overlap_thresh=0.7) 97 | 98 | # convert from (x1,y1,x2,y2) to (x,y,w,h) 99 | R[:, 2] -= R[:, 0] 100 | R[:, 3] -= R[:, 1] 101 | 102 | # apply the spatial pyramid pooling to the proposed regions 103 | bboxes = {} 104 | probs = {} 105 | # print(class_mapping) 106 | for jk in range(R.shape[0]//C.num_rois + 1): 107 | ROIs = np.expand_dims(R[C.num_rois*jk:C.num_rois*(jk+1), :], axis=0) 108 | if ROIs.shape[1] == 0: 109 | break 110 | 111 | if jk == R.shape[0]//C.num_rois: 112 | #pad R 113 | curr_shape = ROIs.shape 114 | target_shape = (curr_shape[0],C.num_rois,curr_shape[2]) 115 | ROIs_padded = np.zeros(target_shape).astype(ROIs.dtype) 116 | ROIs_padded[:, :curr_shape[1], :] = ROIs 117 | ROIs_padded[0, curr_shape[1]:, :] = ROIs[0, 0, :] 118 | ROIs = ROIs_padded 119 | 120 | [P_cls, P_regr] = model_classifier_only.predict([F, ROIs]) 121 | 122 | for ii in range(P_cls.shape[1]): 123 | 124 | if np.max(P_cls[0, ii, :]) < bbox_threshold or np.argmax(P_cls[0, ii, :]) == (P_cls.shape[2] - 1): 125 | continue 126 | 127 | cls_name = class_mapping[np.argmax(P_cls[0, ii, :])] 128 | 129 | if cls_name not in bboxes: 130 | bboxes[cls_name] = [] 131 | probs[cls_name] = [] 132 | 133 | (x, y, w, h) = ROIs[0, ii, :] 134 | 135 | cls_num = np.argmax(P_cls[0, ii, :]) 136 | try: 137 | (tx, ty, tw, th) = P_regr[0, ii, 4*cls_num:4*(cls_num+1)] 138 | tx /= C.classifier_regr_std[0] 139 | ty /= C.classifier_regr_std[1] 140 | tw /= C.classifier_regr_std[2] 141 | th /= C.classifier_regr_std[3] 142 | x, y, w, h = roi_helpers.apply_regr(x, y, w, h, tx, ty, tw, th) 143 | except: 144 | pass 145 | bboxes[cls_name].append([C.rpn_stride*x, C.rpn_stride*y, C.rpn_stride*(x+w), C.rpn_stride*(y+h)]) 146 | probs[cls_name].append(np.max(P_cls[0, ii, :])) 147 | 148 | all_dets = [] 149 | boxes_export = {} 150 | for key in bboxes: 151 | bbox = np.array(bboxes[key]) 152 | # Eliminating redundant object detection windows 153 | new_boxes, new_probs = roi_helpers.non_max_suppression_fast(bbox, np.array(probs[key]), overlap_thresh=overlap_thresh) 154 | 155 | # Keep only the best prediction per character 156 | jk = np.argmax(new_probs) 157 | 158 | # Threshold for best prediction 159 | if new_probs[jk] > 0.55: 160 | (x1, y1, x2, y2) = new_boxes[jk,:] 161 | 162 | # Convert predicted picture box coordinates to real-size picture coordinates 163 | (real_x1, real_y1, real_x2, real_y2) = get_real_coordinates(ratio, x1, y1, x2, y2) 164 | 165 | # Exporting box coordinates instead of draw on the picture 166 | if export: 167 | boxes_export[key] = [(real_x1, real_y1, real_x2, real_y2), int(100*new_probs[jk])] 168 | 169 | else: 170 | cv2.rectangle(img,(real_x1, real_y1), (real_x2, real_y2), (int(class_to_color[key][0]), int(class_to_color[key][1]), int(class_to_color[key][2])),2) 171 | 172 | textLabel = '{}: {}%'.format(key,int(100*new_probs[jk])) 173 | all_dets.append((key,100*new_probs[jk])) 174 | 175 | (retval,baseLine) = cv2.getTextSize(textLabel,cv2.FONT_HERSHEY_COMPLEX,1,1) 176 | 177 | # To avoid putting text outside the frame 178 | # replace the legende if the box is outside the image 179 | if real_y1 < 20 and real_y2 < img.shape[0]: 180 | textOrg = (real_x1, real_y2+5) 181 | 182 | elif real_y1 < 20 and real_y2 > img.shape[0]: 183 | textOrg = (real_x1, img.shape[0]-10) 184 | else: 185 | textOrg = (real_x1, real_y1+5) 186 | 187 | cv2.rectangle(img, (textOrg[0] - 5, textOrg[1]+baseLine - 5), (textOrg[0]+retval[0] + 5, textOrg[1]-retval[1] - 5), (0, 0, 0), 2) 188 | cv2.rectangle(img, (textOrg[0] - 5,textOrg[1]+baseLine - 5), (textOrg[0]+retval[0] + 5, textOrg[1]-retval[1] - 5), (255, 255, 255), -1) 189 | cv2.putText(img, textLabel, textOrg, cv2.FONT_HERSHEY_DUPLEX, 1, (0, 0, 0), 1) 190 | 191 | 192 | if print_dets: 193 | print(all_dets) 194 | if export: 195 | return boxes_export 196 | else: 197 | return img 198 | 199 | if __name__ == "__main__": 200 | parser = OptionParser() 201 | parser.add_option("-p", "--path", dest="test_path", help="Path to test data.") 202 | (options, args) = parser.parse_args() 203 | if not options.test_path: # if filename is not given 204 | parser.error('Error: path to test data must be specified. Pass --path to command line') 205 | 206 | ## Load config object 207 | with open('./config.pickle', 'rb') as f_in: 208 | C = pickle.load(f_in) 209 | 210 | # turn off any data augmentation at test time 211 | C.use_horizontal_flips = False 212 | C.use_vertical_flips = False 213 | C.rot_90 = False 214 | model_rpn, model_classifier, model_classifier_only = get_models(C) 215 | class_mapping = C.class_mapping 216 | if 'bg' not in class_mapping: 217 | class_mapping['bg'] = len(class_mapping) 218 | class_mapping = {v: k for k, v in class_mapping.items()} 219 | class_to_color = {class_mapping[v]: np.random.randint(0, 255, 3) for v in class_mapping} 220 | 221 | for idx, img_name in enumerate(sorted(os.listdir(options.test_path))): 222 | if not img_name.lower().endswith(('.bmp', '.jpeg', '.jpg', '.png', '.tif', '.tiff')): 223 | continue 224 | print(img_name) 225 | filepath = os.path.join(options.test_path,img_name) 226 | img = cv2.imread(filepath) 227 | st = time.time() 228 | img = detect_predict(img, C, model_rpn, model_classifier, model_classifier_only, class_mapping, class_to_color, True) 229 | print('Elapsed time = {}'.format(time.time() - st)) 230 | cv2.imwrite('./results_test/result_{}.png'.format(img_name.replace('.png', '')),img) 231 | -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import cv2 3 | import matplotlib.pyplot as plt 4 | import pickle 5 | import h5py 6 | import glob 7 | import time 8 | from random import shuffle 9 | from collections import Counter 10 | 11 | from sklearn.model_selection import train_test_split 12 | 13 | import keras 14 | from keras.preprocessing.image import ImageDataGenerator 15 | from keras.callbacks import LearningRateScheduler, ModelCheckpoint 16 | from keras.models import Sequential 17 | from keras.layers import Dense, Dropout, Activation, Flatten 18 | from keras.layers import Conv2D, MaxPooling2D 19 | from keras.optimizers import SGD, Adam 20 | 21 | map_characters = {0: 'abraham_grampa_simpson', 1: 'apu_nahasapeemapetilon', 2: 'bart_simpson', 22 | 3: 'charles_montgomery_burns', 4: 'chief_wiggum', 5: 'comic_book_guy', 6: 'edna_krabappel', 23 | 7: 'homer_simpson', 8: 'kent_brockman', 9: 'krusty_the_clown', 10: 'lisa_simpson', 24 | 11: 'marge_simpson', 12: 'milhouse_van_houten', 13: 'moe_szyslak', 25 | 14: 'ned_flanders', 15: 'nelson_muntz', 16: 'principal_skinner', 17: 'sideshow_bob'} 26 | 27 | pic_size = 64 28 | batch_size = 32 29 | epochs = 200 30 | num_classes = len(map_characters) 31 | pictures_per_class = 1000 32 | test_size = 0.15 33 | 34 | def load_pictures(BGR): 35 | """ 36 | Load pictures from folders for characters from the map_characters dict and create a numpy dataset and 37 | a numpy labels set. Pictures are re-sized into picture_size square. 38 | :param BGR: boolean to use true color for the picture (RGB instead of BGR for plt) 39 | :return: dataset, labels set 40 | """ 41 | pics = [] 42 | labels = [] 43 | for k, char in map_characters.items(): 44 | pictures = [k for k in glob.glob('./characters/%s/*' % char)] 45 | nb_pic = round(pictures_per_class/(1-test_size)) if round(pictures_per_class/(1-test_size)) 0: 115 | neg_samples = neg_samples[0] 116 | else: 117 | neg_samples = [] 118 | 119 | if len(pos_samples) > 0: 120 | pos_samples = pos_samples[0] 121 | else: 122 | pos_samples = [] 123 | 124 | rpn_accuracy_rpn_monitor.append(len(pos_samples)) 125 | rpn_accuracy_for_epoch.append((len(pos_samples))) 126 | 127 | if len(pos_samples) < C.num_rois//2: 128 | selected_pos_samples = pos_samples.tolist() 129 | else: 130 | selected_pos_samples = np.random.choice(pos_samples, C.num_rois//2, replace=False).tolist() 131 | try: 132 | selected_neg_samples = np.random.choice(neg_samples, C.num_rois - len(selected_pos_samples), replace=False).tolist() 133 | except: 134 | selected_neg_samples = np.random.choice(neg_samples, C.num_rois - len(selected_pos_samples), replace=True).tolist() 135 | 136 | sel_samples = selected_pos_samples + selected_neg_samples 137 | 138 | loss_class = model_classifier.train_on_batch([X, X2[:, sel_samples, :]], [Y1[:, sel_samples, :], Y2[:, sel_samples, :]]) 139 | 140 | losses[iter_num, 0] = loss_rpn[1] 141 | losses[iter_num, 1] = loss_rpn[2] 142 | 143 | losses[iter_num, 2] = loss_class[1] 144 | losses[iter_num, 3] = loss_class[2] 145 | losses[iter_num, 4] = loss_class[3] 146 | 147 | iter_num += 1 148 | 149 | progbar.update(iter_num, [('rpn_cls', np.mean(losses[:iter_num, 0])), ('rpn_regr', np.mean(losses[:iter_num, 1])), 150 | ('detector_cls', np.mean(losses[:iter_num, 2])), ('detector_regr', np.mean(losses[:iter_num, 3]))]) 151 | 152 | if iter_num == epoch_length: 153 | loss_rpn_cls = np.mean(losses[:, 0]) 154 | loss_rpn_regr = np.mean(losses[:, 1]) 155 | loss_class_cls = np.mean(losses[:, 2]) 156 | loss_class_regr = np.mean(losses[:, 3]) 157 | class_acc = np.mean(losses[:, 4]) 158 | 159 | mean_overlapping_bboxes = float(sum(rpn_accuracy_for_epoch)) / len(rpn_accuracy_for_epoch) 160 | rpn_accuracy_for_epoch = [] 161 | 162 | if C.verbose: 163 | print('Mean number of bounding boxes from RPN overlapping ground truth boxes: {}'.format(mean_overlapping_bboxes)) 164 | print('Classifier accuracy for bounding boxes from RPN: {}'.format(class_acc)) 165 | print('Loss RPN classifier: {}'.format(loss_rpn_cls)) 166 | print('Loss RPN regression: {}'.format(loss_rpn_regr)) 167 | print('Loss Detector classifier: {}'.format(loss_class_cls)) 168 | print('Loss Detector regression: {}'.format(loss_class_regr)) 169 | print('Elapsed time: {}'.format(time.time() - start_time)) 170 | 171 | target_text_file = open('out.csv', 'a') 172 | target_text_file.write('{},{},{},{},{},{}'.format(class_acc, loss_rpn_cls, 173 | loss_rpn_regr, loss_class_cls, loss_class_regr, 174 | loss_rpn_cls + loss_rpn_regr + loss_class_cls + loss_class_regr)) 175 | target_text_file.write('\t') 176 | 177 | curr_loss = loss_rpn_cls + loss_rpn_regr + loss_class_cls + loss_class_regr 178 | iter_num = 0 179 | start_time = time.time() 180 | 181 | if curr_loss < best_loss: 182 | if C.verbose: 183 | print('Total loss decreased from {} to {}, saving weights'.format(best_loss,curr_loss)) 184 | best_loss = curr_loss 185 | model_all.save_weights(C.model_path) 186 | 187 | break 188 | 189 | except Exception as e: 190 | print('Exception: {}'.format(e)) 191 | continue 192 | except KeyboardInterrupt: 193 | t1 = time.time() 194 | print('\nIt took {:.2f}s'.format(t1-t0)) 195 | sys.exit('Keyboard Interrupt') --------------------------------------------------------------------------------