├── .gitattributes ├── .gitignore ├── README.md ├── Violence_Detection.py ├── app.py ├── camerawriter.py ├── destination_path.eps ├── destination_path1.eps ├── download.py ├── infer.py ├── infer_cam.py ├── ip_cam.py ├── model sandbox ├── BuildModel.py ├── BuildModel_basic.py ├── DatasetBuilder.py ├── __init__.py ├── results │ ├── hockey.csv │ ├── movies.csv │ ├── results.csv │ └── violentflow.csv └── run.py ├── requirements.txt ├── templates └── index.html └── violence_detection.ipynb /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.zip 2 | *.wmv 3 | *.mpg 4 | *.mpeg 5 | *.mp4 6 | *.mov 7 | *.flv 8 | *.avi 9 | *.ogv 10 | *.ogg 11 | *.webm 12 | *.pdf -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Violence Detection using CNN-LSTM 2 | Violence rates however have been brought down about 57% during the span of the past 4 decades yet it doesn't change the way that the demonstration of violence actually happens, unseen by the law. Violence can be mass controlled sometimes by higher authorities, however, to hold everything in line one must "Microgovern" over each movement occurring in every road of each square. To address the butterfly effects impact in our setting, I made a unique model and a theorized system to handle the issue utilizing deep learning. The model takes the input of the CCTV video feeds and after drawing inference, recognizes if a violent movement is going on. And hypothesized architecture aims towards probability-driven computation of video feeds and reduces overhead from naively computing for every CCTV video feeds. 3 | 4 | [Link to the paper](https://arxiv.org/abs/2107.07578) 5 | 6 | ## Citation 7 | ``` 8 | @misc{patel2021realtime, 9 | title={Real-Time Violence Detection Using CNN-LSTM}, 10 | author={Mann Patel}, 11 | year={2021}, 12 | eprint={2107.07578}, 13 | archivePrefix={arXiv}, 14 | primaryClass={cs.CV} 15 | } 16 | ``` 17 | -------------------------------------------------------------------------------- /Violence_Detection.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | # Primero instalar openCV package para importar cv2 4 | 5 | import cv2 6 | import os 7 | import numpy as np 8 | import matplotlib.pyplot as plt 9 | 10 | 11 | # Para descargar los datasets 12 | import download 13 | 14 | from random import shuffle 15 | 16 | from keras.applications import VGG16 17 | 18 | from keras import backend as K 19 | from keras.models import Model, Sequential 20 | from keras.layers import Input 21 | from keras.layers import LSTM 22 | from keras.layers import Dense, Activation 23 | import sys 24 | 25 | 26 | import h5py 27 | 28 | 29 | def print_progress(count, max_count): 30 | # Percentage completion. 31 | pct_complete = count / max_count 32 | 33 | # Status-message. Note the \r which means the line should 34 | # overwrite itself. 35 | msg = "\r- Progress: {0:.1%}".format(pct_complete) 36 | 37 | # Print it. 38 | sys.stdout.write(msg) 39 | sys.stdout.flush() 40 | 41 | 42 | 43 | 44 | # Directorio donde vamos a poner todos los videos 45 | in_dir = "data" 46 | 47 | # Tamanyo de cada imagen 48 | img_size = 224 49 | 50 | img_size_touple = (img_size, img_size) 51 | 52 | 53 | # Donde se van a almacenar todas las imagene 54 | #images = [] 55 | 56 | # Numero de canales 57 | num_channels = 3 58 | 59 | # Tamanyo imagen cuando se aplana en vector 1 dimension 60 | img_size_flat = img_size * img_size * num_channels 61 | 62 | # Numero de clases 63 | num_classes = 2 64 | 65 | # Numero de videos para entreno 66 | _num_files_train = 1 67 | 68 | # Numero de frames por video 69 | _images_per_file = 20 70 | 71 | # Numero de imagenes total en el training-set 72 | _num_images_train = _num_files_train * _images_per_file 73 | 74 | # Extension de video 75 | video_exts = ".avi" 76 | 77 | # Url de descarga directa 78 | url_hockey = "http://visilab.etsii.uclm.es/personas/oscar/FightDetection/HockeyFights.zip" 79 | 80 | url_movies = "http://visilab.etsii.uclm.es/personas/oscar/FightDetection/Peliculas.rar" 81 | 82 | in_dir = "data" 83 | 84 | 85 | # Funcion para descargar los datos 86 | def download_data(in_dir, url): 87 | 88 | # Si la carpeta no existe la creamos 89 | if not os.path.exists(in_dir): 90 | os.makedirs(in_dir) 91 | 92 | # Para descargar del link directo y extraer los archivos 93 | download.maybe_download_and_extract(url,in_dir) 94 | 95 | 96 | #def label_vid(vid_name): 97 | # 98 | # word_label = 99 | # 100 | download_data(in_dir,url_hockey) 101 | 102 | 103 | 104 | def get_frames(current_dir, file_name): 105 | 106 | in_file = os.path.join(current_dir, file_name) 107 | 108 | images = [] 109 | 110 | vidcap = cv2.VideoCapture(in_file) 111 | 112 | success,image = vidcap.read() 113 | 114 | count = 0 115 | 116 | while count<_images_per_file: 117 | 118 | RGB_img = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) 119 | 120 | res = cv2.resize(RGB_img, dsize=(img_size, img_size), 121 | interpolation=cv2.INTER_CUBIC) 122 | 123 | # Convertir imagen en un vector y añadirlo 124 | #images.append(res.flatten()) 125 | 126 | images.append(res) 127 | 128 | success,image = vidcap.read() 129 | 130 | count += 1 131 | 132 | resul = np.array(images) 133 | 134 | # Mirar esto alomejor no va despues 135 | 136 | resul = (resul / 255.).astype(np.float16) 137 | 138 | return resul 139 | 140 | 141 | 142 | 143 | image_model = VGG16(include_top=True, weights='imagenet') 144 | 145 | 146 | image_model.summary() 147 | 148 | 149 | # We will use the output of the layer prior to the final 150 | # classification-layer which is named fc2. This is a fully-connected (or dense) layer. 151 | transfer_layer = image_model.get_layer('fc2') 152 | 153 | image_model_transfer = Model(inputs=image_model.input, 154 | outputs=transfer_layer.output) 155 | 156 | transfer_values_size = K.int_shape(transfer_layer.output)[1] 157 | 158 | 159 | print("La entrada de la red dimensiones:",K.int_shape(image_model.input)[1:3]) 160 | 161 | print("La salida de la red dimensiones: ", transfer_values_size) 162 | 163 | 164 | def get_transfer_values(current_dir, file_name): 165 | 166 | 167 | # Pre-allocate input-batch-array for images. 168 | shape = (_images_per_file,) + img_size_touple + (3,) 169 | 170 | image_batch = np.zeros(shape=shape, dtype=np.float16) 171 | 172 | image_batch = get_frames(current_dir, file_name) 173 | 174 | # Arreglar esto para obtener los valores de los filtros despues de pooling 175 | 176 | 177 | # Pre-allocate output-array for transfer-values. 178 | # Note that we use 16-bit floating-points to save memory. 179 | shape = (_images_per_file, transfer_values_size) 180 | transfer_values = np.zeros(shape=shape, dtype=np.float16) 181 | 182 | transfer_values = \ 183 | image_model_transfer.predict(image_batch) 184 | 185 | return transfer_values 186 | 187 | in_dir_prueba = 'data' 188 | 189 | def proces_transfer(vid_names, in_dir, labels): 190 | 191 | 192 | count = 0 193 | 194 | tam = len(vid_names) 195 | 196 | # Pre-allocate input-batch-array for images. 197 | shape = (_images_per_file,) + img_size_touple + (3,) 198 | 199 | while count/', methods=["GET"]) 33 | def video_feed(id): 34 | 35 | """Video streaming route. Put this in the src attribute of an img tag.""" 36 | return Response(gen_frames(id), 37 | mimetype='multipart/x-mixed-replace; boundary=frame') 38 | 39 | 40 | @app.route('/', methods=["GET"]) 41 | def index(): 42 | return render_template('index.html') 43 | 44 | 45 | if __name__ == '__main__': 46 | app.run() 47 | -------------------------------------------------------------------------------- /camerawriter.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import cv2 3 | 4 | cap = cv2.VideoCapture(0) 5 | 6 | # Define the codec and create VideoWriter object 7 | #fourcc = cv2.cv.CV_FOURCC(*'DIVX') 8 | #out = cv2.VideoWriter('output.avi',fourcc, 20.0, (640,480)) 9 | out = cv2.VideoWriter('output.avi', -1, 20.0, (640,480)) 10 | 11 | while(cap.isOpened()): 12 | ret, frame = cap.read() 13 | if ret==True: 14 | frame = cv2.flip(frame,0) 15 | 16 | # write the flipped frame 17 | out.write(frame) 18 | 19 | cv2.imshow('frame',frame) 20 | if cv2.waitKey(1) & 0xFF == ord('q'): 21 | break 22 | else: 23 | break 24 | 25 | # Release everything if job is finished 26 | cap.release() 27 | out.release() 28 | cv2.destroyAllWindows() -------------------------------------------------------------------------------- /destination_path.eps: -------------------------------------------------------------------------------- 1 | %!PS-Adobe-3.0 EPSF-3.0 2 | %%Title: destination_path.eps 3 | %%Creator: matplotlib version 3.3.2, http://matplotlib.org/ 4 | %%CreationDate: Mon Oct 26 19:22:24 2020 5 | %%Orientation: portrait 6 | %%BoundingBox: 90.0 252.0 522.0 540.0 7 | %%EndComments 8 | %%BeginProlog 9 | /mpldict 8 dict def 10 | mpldict begin 11 | /m { moveto } bind def 12 | /l { lineto } bind def 13 | /r { rlineto } bind def 14 | /c { curveto } bind def 15 | /cl { closepath } bind def 16 | /box { 17 | m 18 | 1 index 0 r 19 | 0 exch r 20 | neg 0 r 21 | cl 22 | } bind def 23 | /clipbox { 24 | box 25 | clip 26 | newpath 27 | } bind def 28 | %!PS-Adobe-3.0 Resource-Font 29 | %%Title: DejaVu Sans 30 | %%Copyright: Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Copyright (c) 2006 by Tavmjong Bah. All Rights Reserved. DejaVu changes are in public domain 31 | %%Creator: Converted from TrueType to type 3 by PPR 32 | 25 dict begin 33 | /_d{bind def}bind def 34 | /_m{moveto}_d 35 | /_l{lineto}_d 36 | /_cl{closepath eofill}_d 37 | /_c{curveto}_d 38 | /_sc{7 -1 roll{setcachedevice}{pop pop pop pop pop pop}ifelse}_d 39 | /_e{exec}_d 40 | /FontName /DejaVuSans def 41 | /PaintType 0 def 42 | /FontMatrix[.001 0 0 .001 0 0]def 43 | /FontBBox[-1021 -463 1793 1232]def 44 | /FontType 3 def 45 | /Encoding [ /space /period /zero /one /two /five /six /seven /eight /nine /a /c /d /e /h /i /l /m /n /o /p /r /t /u /v /y ] def 46 | /FontInfo 10 dict dup begin 47 | /FamilyName (DejaVu Sans) def 48 | /FullName (DejaVu Sans) def 49 | /Notice (Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Copyright (c) 2006 by Tavmjong Bah. All Rights Reserved. DejaVu changes are in public domain ) def 50 | /Weight (Book) def 51 | /Version (Version 2.35) def 52 | /ItalicAngle 0.0 def 53 | /isFixedPitch false def 54 | /UnderlinePosition -130 def 55 | /UnderlineThickness 90 def 56 | end readonly def 57 | /CharStrings 27 dict dup begin 58 | /.notdef 0 def 59 | /space{318 0 0 0 0 0 _sc 60 | }_d 61 | /period{318 0 107 0 210 124 _sc 62 | 107 124 _m 63 | 210 124 _l 64 | 210 0 _l 65 | 107 0 _l 66 | 107 124 _l 67 | _cl}_d 68 | /zero{636 0 66 -13 570 742 _sc 69 | 318 664 _m 70 | 267 664 229 639 203 589 _c 71 | 177 539 165 464 165 364 _c 72 | 165 264 177 189 203 139 _c 73 | 229 89 267 64 318 64 _c 74 | 369 64 407 89 433 139 _c 75 | 458 189 471 264 471 364 _c 76 | 471 464 458 539 433 589 _c 77 | 407 639 369 664 318 664 _c 78 | 318 742 _m 79 | 399 742 461 709 505 645 _c 80 | 548 580 570 486 570 364 _c 81 | 570 241 548 147 505 83 _c 82 | 461 19 399 -13 318 -13 _c 83 | 236 -13 173 19 130 83 _c 84 | 87 147 66 241 66 364 _c 85 | 66 486 87 580 130 645 _c 86 | 173 709 236 742 318 742 _c 87 | _cl}_d 88 | /one{636 0 110 0 544 729 _sc 89 | 124 83 _m 90 | 285 83 _l 91 | 285 639 _l 92 | 110 604 _l 93 | 110 694 _l 94 | 284 729 _l 95 | 383 729 _l 96 | 383 83 _l 97 | 544 83 _l 98 | 544 0 _l 99 | 124 0 _l 100 | 124 83 _l 101 | _cl}_d 102 | /two{{636 0 73 0 536 742 _sc 103 | 192 83 _m 104 | 536 83 _l 105 | 536 0 _l 106 | 73 0 _l 107 | 73 83 _l 108 | 110 121 161 173 226 239 _c 109 | 290 304 331 346 348 365 _c 110 | 380 400 402 430 414 455 _c 111 | 426 479 433 504 433 528 _c 112 | 433 566 419 598 392 622 _c 113 | 365 646 330 659 286 659 _c 114 | 255 659 222 653 188 643 _c 115 | 154 632 117 616 78 594 _c 116 | 78 694 _l 117 | 118 710 155 722 189 730 _c 118 | 223 738 255 742 284 742 _c 119 | }_e{359 742 419 723 464 685 _c 120 | 509 647 532 597 532 534 _c 121 | 532 504 526 475 515 449 _c 122 | 504 422 484 390 454 354 _c 123 | 446 344 420 317 376 272 _c 124 | 332 227 271 164 192 83 _c 125 | _cl}_e}_d 126 | /five{{636 0 77 -13 549 729 _sc 127 | 108 729 _m 128 | 495 729 _l 129 | 495 646 _l 130 | 198 646 _l 131 | 198 467 _l 132 | 212 472 227 476 241 478 _c 133 | 255 480 270 482 284 482 _c 134 | 365 482 429 459 477 415 _c 135 | 525 370 549 310 549 234 _c 136 | 549 155 524 94 475 51 _c 137 | 426 8 357 -13 269 -13 _c 138 | 238 -13 207 -10 175 -6 _c 139 | 143 -1 111 6 77 17 _c 140 | 77 116 _l 141 | 106 100 136 88 168 80 _c 142 | 199 72 232 69 267 69 _c 143 | }_e{323 69 368 83 401 113 _c 144 | 433 143 450 183 450 234 _c 145 | 450 284 433 324 401 354 _c 146 | 368 384 323 399 267 399 _c 147 | 241 399 214 396 188 390 _c 148 | 162 384 135 375 108 363 _c 149 | 108 729 _l 150 | _cl}_e}_d 151 | /six{{636 0 70 -13 573 742 _sc 152 | 330 404 _m 153 | 286 404 251 388 225 358 _c 154 | 199 328 186 286 186 234 _c 155 | 186 181 199 139 225 109 _c 156 | 251 79 286 64 330 64 _c 157 | 374 64 409 79 435 109 _c 158 | 461 139 474 181 474 234 _c 159 | 474 286 461 328 435 358 _c 160 | 409 388 374 404 330 404 _c 161 | 526 713 _m 162 | 526 623 _l 163 | 501 635 476 644 451 650 _c 164 | 425 656 400 659 376 659 _c 165 | 310 659 260 637 226 593 _c 166 | }_e{192 549 172 482 168 394 _c 167 | 187 422 211 444 240 459 _c 168 | 269 474 301 482 336 482 _c 169 | 409 482 467 459 509 415 _c 170 | 551 371 573 310 573 234 _c 171 | 573 159 550 99 506 54 _c 172 | 462 9 403 -13 330 -13 _c 173 | 246 -13 181 19 137 83 _c 174 | 92 147 70 241 70 364 _c 175 | 70 479 97 571 152 639 _c 176 | 206 707 280 742 372 742 _c 177 | 396 742 421 739 447 735 _c 178 | 472 730 498 723 526 713 _c 179 | _cl}_e}_d 180 | /seven{636 0 82 0 551 729 _sc 181 | 82 729 _m 182 | 551 729 _l 183 | 551 687 _l 184 | 286 0 _l 185 | 183 0 _l 186 | 432 646 _l 187 | 82 646 _l 188 | 82 729 _l 189 | _cl}_d 190 | /eight{{636 0 68 -13 568 742 _sc 191 | 318 346 _m 192 | 271 346 234 333 207 308 _c 193 | 180 283 167 249 167 205 _c 194 | 167 161 180 126 207 101 _c 195 | 234 76 271 64 318 64 _c 196 | 364 64 401 76 428 102 _c 197 | 455 127 469 161 469 205 _c 198 | 469 249 455 283 429 308 _c 199 | 402 333 365 346 318 346 _c 200 | 219 388 _m 201 | 177 398 144 418 120 447 _c 202 | 96 476 85 511 85 553 _c 203 | 85 611 105 657 147 691 _c 204 | 188 725 245 742 318 742 _c 205 | }_e{390 742 447 725 489 691 _c 206 | 530 657 551 611 551 553 _c 207 | 551 511 539 476 515 447 _c 208 | 491 418 459 398 417 388 _c 209 | 464 377 501 355 528 323 _c 210 | 554 291 568 251 568 205 _c 211 | 568 134 546 80 503 43 _c 212 | 459 5 398 -13 318 -13 _c 213 | 237 -13 175 5 132 43 _c 214 | 89 80 68 134 68 205 _c 215 | 68 251 81 291 108 323 _c 216 | 134 355 171 377 219 388 _c 217 | 183 544 _m 218 | 183 506 194 476 218 455 _c 219 | }_e{242 434 275 424 318 424 _c 220 | 360 424 393 434 417 455 _c 221 | 441 476 453 506 453 544 _c 222 | 453 582 441 611 417 632 _c 223 | 393 653 360 664 318 664 _c 224 | 275 664 242 653 218 632 _c 225 | 194 611 183 582 183 544 _c 226 | _cl}_e}_d 227 | /nine{{636 0 63 -13 566 742 _sc 228 | 110 15 _m 229 | 110 105 _l 230 | 134 93 159 84 185 78 _c 231 | 210 72 235 69 260 69 _c 232 | 324 69 374 90 408 134 _c 233 | 442 178 462 244 468 334 _c 234 | 448 306 424 284 396 269 _c 235 | 367 254 335 247 300 247 _c 236 | 226 247 168 269 126 313 _c 237 | 84 357 63 417 63 494 _c 238 | 63 568 85 628 129 674 _c 239 | 173 719 232 742 306 742 _c 240 | 390 742 455 709 499 645 _c 241 | 543 580 566 486 566 364 _c 242 | }_e{566 248 538 157 484 89 _c 243 | 429 21 356 -13 264 -13 _c 244 | 239 -13 214 -10 189 -6 _c 245 | 163 -2 137 5 110 15 _c 246 | 306 324 _m 247 | 350 324 385 339 411 369 _c 248 | 437 399 450 441 450 494 _c 249 | 450 546 437 588 411 618 _c 250 | 385 648 350 664 306 664 _c 251 | 262 664 227 648 201 618 _c 252 | 175 588 162 546 162 494 _c 253 | 162 441 175 399 201 369 _c 254 | 227 339 262 324 306 324 _c 255 | _cl}_e}_d 256 | /a{{613 0 60 -13 522 560 _sc 257 | 343 275 _m 258 | 270 275 220 266 192 250 _c 259 | 164 233 150 205 150 165 _c 260 | 150 133 160 107 181 89 _c 261 | 202 70 231 61 267 61 _c 262 | 317 61 357 78 387 114 _c 263 | 417 149 432 196 432 255 _c 264 | 432 275 _l 265 | 343 275 _l 266 | 522 312 _m 267 | 522 0 _l 268 | 432 0 _l 269 | 432 83 _l 270 | 411 49 385 25 355 10 _c 271 | 325 -5 287 -13 243 -13 _c 272 | 187 -13 142 2 109 33 _c 273 | 76 64 60 106 60 159 _c 274 | }_e{60 220 80 266 122 298 _c 275 | 163 329 224 345 306 345 _c 276 | 432 345 _l 277 | 432 354 _l 278 | 432 395 418 427 391 450 _c 279 | 364 472 326 484 277 484 _c 280 | 245 484 215 480 185 472 _c 281 | 155 464 127 453 100 439 _c 282 | 100 522 _l 283 | 132 534 164 544 195 550 _c 284 | 226 556 256 560 286 560 _c 285 | 365 560 424 539 463 498 _c 286 | 502 457 522 395 522 312 _c 287 | _cl}_e}_d 288 | /c{{550 0 55 -13 488 560 _sc 289 | 488 526 _m 290 | 488 442 _l 291 | 462 456 437 466 411 473 _c 292 | 385 480 360 484 334 484 _c 293 | 276 484 230 465 198 428 _c 294 | 166 391 150 339 150 273 _c 295 | 150 206 166 154 198 117 _c 296 | 230 80 276 62 334 62 _c 297 | 360 62 385 65 411 72 _c 298 | 437 79 462 90 488 104 _c 299 | 488 21 _l 300 | 462 9 436 0 410 -5 _c 301 | 383 -10 354 -13 324 -13 _c 302 | 242 -13 176 12 128 64 _c 303 | }_e{79 115 55 185 55 273 _c 304 | 55 362 79 432 128 483 _c 305 | 177 534 244 560 330 560 _c 306 | 358 560 385 557 411 551 _c 307 | 437 545 463 537 488 526 _c 308 | _cl}_e}_d 309 | /d{{635 0 55 -13 544 760 _sc 310 | 454 464 _m 311 | 454 760 _l 312 | 544 760 _l 313 | 544 0 _l 314 | 454 0 _l 315 | 454 82 _l 316 | 435 49 411 25 382 10 _c 317 | 353 -5 319 -13 279 -13 _c 318 | 213 -13 159 13 117 65 _c 319 | 75 117 55 187 55 273 _c 320 | 55 359 75 428 117 481 _c 321 | 159 533 213 560 279 560 _c 322 | 319 560 353 552 382 536 _c 323 | 411 520 435 496 454 464 _c 324 | 148 273 _m 325 | 148 207 161 155 188 117 _c 326 | 215 79 253 61 301 61 _c 327 | }_e{348 61 385 79 413 117 _c 328 | 440 155 454 207 454 273 _c 329 | 454 339 440 390 413 428 _c 330 | 385 466 348 485 301 485 _c 331 | 253 485 215 466 188 428 _c 332 | 161 390 148 339 148 273 _c 333 | _cl}_e}_d 334 | /e{{615 0 55 -13 562 560 _sc 335 | 562 296 _m 336 | 562 252 _l 337 | 149 252 _l 338 | 153 190 171 142 205 110 _c 339 | 238 78 284 62 344 62 _c 340 | 378 62 412 66 444 74 _c 341 | 476 82 509 95 541 113 _c 342 | 541 28 _l 343 | 509 14 476 3 442 -3 _c 344 | 408 -9 373 -13 339 -13 _c 345 | 251 -13 182 12 131 62 _c 346 | 80 112 55 181 55 268 _c 347 | 55 357 79 428 127 481 _c 348 | 175 533 241 560 323 560 _c 349 | 397 560 455 536 498 489 _c 350 | }_e{540 441 562 377 562 296 _c 351 | 472 322 _m 352 | 471 371 457 410 431 440 _c 353 | 404 469 368 484 324 484 _c 354 | 274 484 234 469 204 441 _c 355 | 174 413 156 373 152 322 _c 356 | 472 322 _l 357 | _cl}_e}_d 358 | /h{634 0 91 0 549 760 _sc 359 | 549 330 _m 360 | 549 0 _l 361 | 459 0 _l 362 | 459 327 _l 363 | 459 379 448 417 428 443 _c 364 | 408 469 378 482 338 482 _c 365 | 289 482 251 466 223 435 _c 366 | 195 404 181 362 181 309 _c 367 | 181 0 _l 368 | 91 0 _l 369 | 91 760 _l 370 | 181 760 _l 371 | 181 462 _l 372 | 202 494 227 519 257 535 _c 373 | 286 551 320 560 358 560 _c 374 | 420 560 468 540 500 501 _c 375 | 532 462 549 405 549 330 _c 376 | _cl}_d 377 | /i{278 0 94 0 184 760 _sc 378 | 94 547 _m 379 | 184 547 _l 380 | 184 0 _l 381 | 94 0 _l 382 | 94 547 _l 383 | 94 760 _m 384 | 184 760 _l 385 | 184 646 _l 386 | 94 646 _l 387 | 94 760 _l 388 | _cl}_d 389 | /l{278 0 94 0 184 760 _sc 390 | 94 760 _m 391 | 184 760 _l 392 | 184 0 _l 393 | 94 0 _l 394 | 94 760 _l 395 | _cl}_d 396 | /m{{974 0 91 0 889 560 _sc 397 | 520 442 _m 398 | 542 482 569 511 600 531 _c 399 | 631 550 668 560 711 560 _c 400 | 767 560 811 540 842 500 _c 401 | 873 460 889 403 889 330 _c 402 | 889 0 _l 403 | 799 0 _l 404 | 799 327 _l 405 | 799 379 789 418 771 444 _c 406 | 752 469 724 482 686 482 _c 407 | 639 482 602 466 575 435 _c 408 | 548 404 535 362 535 309 _c 409 | 535 0 _l 410 | 445 0 _l 411 | 445 327 _l 412 | 445 379 435 418 417 444 _c 413 | 398 469 369 482 331 482 _c 414 | }_e{285 482 248 466 221 435 _c 415 | 194 404 181 362 181 309 _c 416 | 181 0 _l 417 | 91 0 _l 418 | 91 547 _l 419 | 181 547 _l 420 | 181 462 _l 421 | 201 495 226 520 255 536 _c 422 | 283 552 317 560 357 560 _c 423 | 397 560 430 550 458 530 _c 424 | 486 510 506 480 520 442 _c 425 | _cl}_e}_d 426 | /n{634 0 91 0 549 560 _sc 427 | 549 330 _m 428 | 549 0 _l 429 | 459 0 _l 430 | 459 327 _l 431 | 459 379 448 417 428 443 _c 432 | 408 469 378 482 338 482 _c 433 | 289 482 251 466 223 435 _c 434 | 195 404 181 362 181 309 _c 435 | 181 0 _l 436 | 91 0 _l 437 | 91 547 _l 438 | 181 547 _l 439 | 181 462 _l 440 | 202 494 227 519 257 535 _c 441 | 286 551 320 560 358 560 _c 442 | 420 560 468 540 500 501 _c 443 | 532 462 549 405 549 330 _c 444 | _cl}_d 445 | /o{612 0 55 -13 557 560 _sc 446 | 306 484 _m 447 | 258 484 220 465 192 427 _c 448 | 164 389 150 338 150 273 _c 449 | 150 207 163 156 191 118 _c 450 | 219 80 257 62 306 62 _c 451 | 354 62 392 80 420 118 _c 452 | 448 156 462 207 462 273 _c 453 | 462 337 448 389 420 427 _c 454 | 392 465 354 484 306 484 _c 455 | 306 560 _m 456 | 384 560 445 534 490 484 _c 457 | 534 433 557 363 557 273 _c 458 | 557 183 534 113 490 63 _c 459 | 445 12 384 -13 306 -13 _c 460 | 227 -13 165 12 121 63 _c 461 | 77 113 55 183 55 273 _c 462 | 55 363 77 433 121 484 _c 463 | 165 534 227 560 306 560 _c 464 | _cl}_d 465 | /p{{635 0 91 -207 580 560 _sc 466 | 181 82 _m 467 | 181 -207 _l 468 | 91 -207 _l 469 | 91 547 _l 470 | 181 547 _l 471 | 181 464 _l 472 | 199 496 223 520 252 536 _c 473 | 281 552 316 560 356 560 _c 474 | 422 560 476 533 518 481 _c 475 | 559 428 580 359 580 273 _c 476 | 580 187 559 117 518 65 _c 477 | 476 13 422 -13 356 -13 _c 478 | 316 -13 281 -5 252 10 _c 479 | 223 25 199 49 181 82 _c 480 | 487 273 _m 481 | 487 339 473 390 446 428 _c 482 | 418 466 381 485 334 485 _c 483 | }_e{286 485 249 466 222 428 _c 484 | 194 390 181 339 181 273 _c 485 | 181 207 194 155 222 117 _c 486 | 249 79 286 61 334 61 _c 487 | 381 61 418 79 446 117 _c 488 | 473 155 487 207 487 273 _c 489 | _cl}_e}_d 490 | /r{411 0 91 0 411 560 _sc 491 | 411 463 _m 492 | 401 469 390 473 378 476 _c 493 | 366 478 353 480 339 480 _c 494 | 288 480 249 463 222 430 _c 495 | 194 397 181 350 181 288 _c 496 | 181 0 _l 497 | 91 0 _l 498 | 91 547 _l 499 | 181 547 _l 500 | 181 462 _l 501 | 199 495 224 520 254 536 _c 502 | 284 552 321 560 365 560 _c 503 | 371 560 378 559 386 559 _c 504 | 393 558 401 557 411 555 _c 505 | 411 463 _l 506 | _cl}_d 507 | /t{392 0 27 0 368 702 _sc 508 | 183 702 _m 509 | 183 547 _l 510 | 368 547 _l 511 | 368 477 _l 512 | 183 477 _l 513 | 183 180 _l 514 | 183 135 189 106 201 94 _c 515 | 213 81 238 75 276 75 _c 516 | 368 75 _l 517 | 368 0 _l 518 | 276 0 _l 519 | 206 0 158 13 132 39 _c 520 | 106 65 93 112 93 180 _c 521 | 93 477 _l 522 | 27 477 _l 523 | 27 547 _l 524 | 93 547 _l 525 | 93 702 _l 526 | 183 702 _l 527 | _cl}_d 528 | /u{634 0 85 -13 543 560 _sc 529 | 85 216 _m 530 | 85 547 _l 531 | 175 547 _l 532 | 175 219 _l 533 | 175 167 185 129 205 103 _c 534 | 225 77 255 64 296 64 _c 535 | 344 64 383 79 411 110 _c 536 | 439 141 453 183 453 237 _c 537 | 453 547 _l 538 | 543 547 _l 539 | 543 0 _l 540 | 453 0 _l 541 | 453 84 _l 542 | 431 50 405 26 377 10 _c 543 | 348 -5 315 -13 277 -13 _c 544 | 214 -13 166 6 134 45 _c 545 | 101 83 85 140 85 216 _c 546 | 311 560 _m 547 | 311 560 _l 548 | _cl}_d 549 | /v{592 0 30 0 562 547 _sc 550 | 30 547 _m 551 | 125 547 _l 552 | 296 88 _l 553 | 467 547 _l 554 | 562 547 _l 555 | 357 0 _l 556 | 235 0 _l 557 | 30 547 _l 558 | _cl}_d 559 | /y{592 0 30 -207 562 547 _sc 560 | 322 -50 _m 561 | 296 -114 271 -157 247 -177 _c 562 | 223 -197 191 -207 151 -207 _c 563 | 79 -207 _l 564 | 79 -132 _l 565 | 132 -132 _l 566 | 156 -132 175 -126 189 -114 _c 567 | 203 -102 218 -75 235 -31 _c 568 | 251 9 _l 569 | 30 547 _l 570 | 125 547 _l 571 | 296 119 _l 572 | 467 547 _l 573 | 562 547 _l 574 | 322 -50 _l 575 | _cl}_d 576 | end readonly def 577 | 578 | /BuildGlyph 579 | {exch begin 580 | CharStrings exch 581 | 2 copy known not{pop /.notdef}if 582 | true 3 1 roll get exec 583 | end}_d 584 | 585 | /BuildChar { 586 | 1 index /Encoding get exch get 587 | 1 index /BuildGlyph get exec 588 | }_d 589 | 590 | FontName currentdict end definefont pop 591 | end 592 | %%EndProlog 593 | mpldict begin 594 | 90 252 translate 595 | 432 288 0 0 clipbox 596 | gsave 597 | 0 0 m 598 | 432 0 l 599 | 432 288 l 600 | 0 288 l 601 | cl 602 | grestore 603 | gsave 604 | 54 36 m 605 | 388.8 36 l 606 | 388.8 253.44 l 607 | 54 253.44 l 608 | cl 609 | 1.000 setgray 610 | fill 611 | grestore 612 | 0.800 setlinewidth 613 | 1 setlinejoin 614 | 0 setlinecap 615 | [] 0 setdash 616 | 0.000 setgray 617 | gsave 618 | /o { 619 | gsave 620 | newpath 621 | translate 622 | 0.8 setlinewidth 623 | 1 setlinejoin 624 | 0 setlinecap 625 | 0 0 m 626 | 0 -3.5 l 627 | 628 | gsave 629 | 0.000 setgray 630 | fill 631 | grestore 632 | stroke 633 | grestore 634 | } bind def 635 | 69.2182 36 o 636 | grestore 637 | /DejaVuSans findfont 638 | 10.000 scalefont 639 | setfont 640 | gsave 641 | 66.038494 21.406250 translate 642 | 0.000000 rotate 643 | 0.000000 0 m /zero glyphshow 644 | grestore 645 | gsave 646 | /o { 647 | gsave 648 | newpath 649 | translate 650 | 0.8 setlinewidth 651 | 1 setlinejoin 652 | 0 setlinecap 653 | 0 0 m 654 | 0 -3.5 l 655 | 656 | gsave 657 | 0.000 setgray 658 | fill 659 | grestore 660 | stroke 661 | grestore 662 | } bind def 663 | 107.455 36 o 664 | grestore 665 | gsave 666 | 101.095445 21.406250 translate 667 | 0.000000 rotate 668 | 0.000000 0 m /two glyphshow 669 | 6.362305 0 m /five glyphshow 670 | grestore 671 | gsave 672 | /o { 673 | gsave 674 | newpath 675 | translate 676 | 0.8 setlinewidth 677 | 1 setlinejoin 678 | 0 setlinecap 679 | 0 0 m 680 | 0 -3.5 l 681 | 682 | gsave 683 | 0.000 setgray 684 | fill 685 | grestore 686 | stroke 687 | grestore 688 | } bind def 689 | 145.691 36 o 690 | grestore 691 | gsave 692 | 139.332082 21.406250 translate 693 | 0.000000 rotate 694 | 0.000000 0 m /five glyphshow 695 | 6.362305 0 m /zero glyphshow 696 | grestore 697 | gsave 698 | /o { 699 | gsave 700 | newpath 701 | translate 702 | 0.8 setlinewidth 703 | 1 setlinejoin 704 | 0 setlinecap 705 | 0 0 m 706 | 0 -3.5 l 707 | 708 | gsave 709 | 0.000 setgray 710 | fill 711 | grestore 712 | stroke 713 | grestore 714 | } bind def 715 | 183.928 36 o 716 | grestore 717 | gsave 718 | 177.568720 21.406250 translate 719 | 0.000000 rotate 720 | 0.000000 0 m /seven glyphshow 721 | 6.362305 0 m /five glyphshow 722 | grestore 723 | gsave 724 | /o { 725 | gsave 726 | newpath 727 | translate 728 | 0.8 setlinewidth 729 | 1 setlinejoin 730 | 0 setlinecap 731 | 0 0 m 732 | 0 -3.5 l 733 | 734 | gsave 735 | 0.000 setgray 736 | fill 737 | grestore 738 | stroke 739 | grestore 740 | } bind def 741 | 222.165 36 o 742 | grestore 743 | gsave 744 | 212.625670 21.406250 translate 745 | 0.000000 rotate 746 | 0.000000 0 m /one glyphshow 747 | 6.362305 0 m /zero glyphshow 748 | 12.724609 0 m /zero glyphshow 749 | grestore 750 | gsave 751 | /o { 752 | gsave 753 | newpath 754 | translate 755 | 0.8 setlinewidth 756 | 1 setlinejoin 757 | 0 setlinecap 758 | 0 0 m 759 | 0 -3.5 l 760 | 761 | gsave 762 | 0.000 setgray 763 | fill 764 | grestore 765 | stroke 766 | grestore 767 | } bind def 768 | 260.401 36 o 769 | grestore 770 | gsave 771 | 250.862308 21.406250 translate 772 | 0.000000 rotate 773 | 0.000000 0 m /one glyphshow 774 | 6.362305 0 m /two glyphshow 775 | 12.724609 0 m /five glyphshow 776 | grestore 777 | gsave 778 | /o { 779 | gsave 780 | newpath 781 | translate 782 | 0.8 setlinewidth 783 | 1 setlinejoin 784 | 0 setlinecap 785 | 0 0 m 786 | 0 -3.5 l 787 | 788 | gsave 789 | 0.000 setgray 790 | fill 791 | grestore 792 | stroke 793 | grestore 794 | } bind def 795 | 298.638 36 o 796 | grestore 797 | gsave 798 | 289.098946 21.406250 translate 799 | 0.000000 rotate 800 | 0.000000 0 m /one glyphshow 801 | 6.362305 0 m /five glyphshow 802 | 12.724609 0 m /zero glyphshow 803 | grestore 804 | gsave 805 | /o { 806 | gsave 807 | newpath 808 | translate 809 | 0.8 setlinewidth 810 | 1 setlinejoin 811 | 0 setlinecap 812 | 0 0 m 813 | 0 -3.5 l 814 | 815 | gsave 816 | 0.000 setgray 817 | fill 818 | grestore 819 | stroke 820 | grestore 821 | } bind def 822 | 336.875 36 o 823 | grestore 824 | gsave 825 | 327.335583 21.406250 translate 826 | 0.000000 rotate 827 | 0.000000 0 m /one glyphshow 828 | 6.362305 0 m /seven glyphshow 829 | 12.724609 0 m /five glyphshow 830 | grestore 831 | gsave 832 | /o { 833 | gsave 834 | newpath 835 | translate 836 | 0.8 setlinewidth 837 | 1 setlinejoin 838 | 0 setlinecap 839 | 0 0 m 840 | 0 -3.5 l 841 | 842 | gsave 843 | 0.000 setgray 844 | fill 845 | grestore 846 | stroke 847 | grestore 848 | } bind def 849 | 375.111 36 o 850 | grestore 851 | gsave 852 | 365.572221 21.406250 translate 853 | 0.000000 rotate 854 | 0.000000 0 m /two glyphshow 855 | 6.362305 0 m /zero glyphshow 856 | 12.724609 0 m /zero glyphshow 857 | grestore 858 | gsave 859 | 206.165625 7.734375 translate 860 | 0.000000 rotate 861 | 0.000000 0 m /e glyphshow 862 | 6.152344 0 m /p glyphshow 863 | 12.500000 0 m /o glyphshow 864 | 18.618164 0 m /c glyphshow 865 | 24.116211 0 m /h glyphshow 866 | grestore 867 | gsave 868 | /o { 869 | gsave 870 | newpath 871 | translate 872 | 0.8 setlinewidth 873 | 1 setlinejoin 874 | 0 setlinecap 875 | -0 0 m 876 | -3.5 0 l 877 | 878 | gsave 879 | 0.000 setgray 880 | fill 881 | grestore 882 | stroke 883 | grestore 884 | } bind def 885 | 54 61.4484 o 886 | grestore 887 | gsave 888 | 31.093750 57.651540 translate 889 | 0.000000 rotate 890 | 0.000000 0 m /zero glyphshow 891 | 6.362305 0 m /period glyphshow 892 | 9.541016 0 m /five glyphshow 893 | grestore 894 | gsave 895 | /o { 896 | gsave 897 | newpath 898 | translate 899 | 0.8 setlinewidth 900 | 1 setlinejoin 901 | 0 setlinecap 902 | -0 0 m 903 | -3.5 0 l 904 | 905 | gsave 906 | 0.000 setgray 907 | fill 908 | grestore 909 | stroke 910 | grestore 911 | } bind def 912 | 54 100.36 o 913 | grestore 914 | gsave 915 | 31.093750 96.563495 translate 916 | 0.000000 rotate 917 | 0.000000 0 m /zero glyphshow 918 | 6.362305 0 m /period glyphshow 919 | 9.541016 0 m /six glyphshow 920 | grestore 921 | gsave 922 | /o { 923 | gsave 924 | newpath 925 | translate 926 | 0.8 setlinewidth 927 | 1 setlinejoin 928 | 0 setlinecap 929 | -0 0 m 930 | -3.5 0 l 931 | 932 | gsave 933 | 0.000 setgray 934 | fill 935 | grestore 936 | stroke 937 | grestore 938 | } bind def 939 | 54 139.272 o 940 | grestore 941 | gsave 942 | 31.093750 135.475451 translate 943 | 0.000000 rotate 944 | 0.000000 0 m /zero glyphshow 945 | 6.362305 0 m /period glyphshow 946 | 9.541016 0 m /seven glyphshow 947 | grestore 948 | gsave 949 | /o { 950 | gsave 951 | newpath 952 | translate 953 | 0.8 setlinewidth 954 | 1 setlinejoin 955 | 0 setlinecap 956 | -0 0 m 957 | -3.5 0 l 958 | 959 | gsave 960 | 0.000 setgray 961 | fill 962 | grestore 963 | stroke 964 | grestore 965 | } bind def 966 | 54 178.184 o 967 | grestore 968 | gsave 969 | 31.093750 174.387406 translate 970 | 0.000000 rotate 971 | 0.000000 0 m /zero glyphshow 972 | 6.362305 0 m /period glyphshow 973 | 9.541016 0 m /eight glyphshow 974 | grestore 975 | gsave 976 | /o { 977 | gsave 978 | newpath 979 | translate 980 | 0.8 setlinewidth 981 | 1 setlinejoin 982 | 0 setlinecap 983 | -0 0 m 984 | -3.5 0 l 985 | 986 | gsave 987 | 0.000 setgray 988 | fill 989 | grestore 990 | stroke 991 | grestore 992 | } bind def 993 | 54 217.096 o 994 | grestore 995 | gsave 996 | 31.093750 213.299361 translate 997 | 0.000000 rotate 998 | 0.000000 0 m /zero glyphshow 999 | 6.362305 0 m /period glyphshow 1000 | 9.541016 0 m /nine glyphshow 1001 | grestore 1002 | gsave 1003 | 25.015625 122.157500 translate 1004 | 90.000000 rotate 1005 | 0.000000 0 m /a glyphshow 1006 | 6.127930 0 m /c glyphshow 1007 | 11.625977 0 m /c glyphshow 1008 | 17.124023 0 m /u glyphshow 1009 | 23.461914 0 m /r glyphshow 1010 | 27.573242 0 m /a glyphshow 1011 | 33.701172 0 m /c glyphshow 1012 | 39.199219 0 m /y glyphshow 1013 | grestore 1014 | 1.500 setlinewidth 1015 | 2 setlinecap 1016 | 0.122 0.467 0.706 setrgbcolor 1017 | gsave 1018 | 334.8 217.4 54 36 clipbox 1019 | 69.218182 64.042543 m 1020 | 70.747647 69.230799 l 1021 | 72.277113 65.08019 l 1022 | 73.806578 66.117859 l 1023 | 75.336044 57.816629 l 1024 | 76.865509 64.042543 l 1025 | 78.394975 65.08019 l 1026 | 79.92444 65.08019 l 1027 | 81.453906 146.535893 l 1028 | 82.983371 161.581849 l 1029 | 84.512837 110.218056 l 1030 | 86.042302 210.35149 l 1031 | 87.571768 216.577415 l 1032 | 89.101233 218.652708 l 1033 | 90.630699 217.615062 l 1034 | 92.160164 208.276197 l 1035 | 93.68963 217.615062 l 1036 | 95.219095 210.870325 l 1037 | 96.748561 205.163234 l 1038 | 98.278026 201.012625 l 1039 | 99.807492 210.870325 l 1040 | 101.336958 215.539769 l 1041 | 102.866423 216.05858 l 1042 | 104.395889 213.983287 l 1043 | 105.925354 220.20919 l 1044 | 107.45482 215.539769 l 1045 | 108.984285 220.20919 l 1046 | 110.513751 218.133897 l 1047 | 112.043216 219.690378 l 1048 | 113.572682 224.359799 l 1049 | 115.102147 223.840964 l 1050 | 116.631613 220.728024 l 1051 | 118.161078 224.878634 l 1052 | 119.690544 223.322152 l 1053 | 121.220009 222.284506 l 1054 | 122.749475 224.359799 l 1055 | 124.27894 224.878634 l 1056 | 125.808406 224.878634 l 1057 | 127.337871 226.435115 l 1058 | 128.867337 226.435115 l 1059 | 130.396802 227.472762 l 1060 | 134.985199 227.472762 l 1061 | 138.04413 229.548055 l 1062 | 139.573595 221.246836 l 1063 | 141.103061 229.029243 l 1064 | 142.632526 224.878634 l 1065 | 144.161992 227.472762 l 1066 | 145.691457 230.585724 l 1067 | 147.220923 224.359799 l 1068 | 148.750388 222.284506 l 1069 | 150.279854 223.322152 l 1070 | 151.809319 229.548055 l 1071 | 153.338785 227.991573 l 1072 | 154.86825 229.029243 l 1073 | 156.397716 230.585724 l 1074 | 157.927181 229.029243 l 1075 | 159.456647 223.840964 l 1076 | 162.515578 227.991573 l 1077 | 164.045043 223.322152 l 1078 | 165.574509 223.840964 l 1079 | 167.103974 225.91628 l 1080 | 168.63344 225.91628 l 1081 | 170.162905 220.728024 l 1082 | 171.692371 217.096227 l 1083 | 173.221836 227.472762 l 1084 | 174.751302 230.066889 l 1085 | 176.280767 223.322152 l 1086 | 177.810233 227.472762 l 1087 | 179.339698 229.029243 l 1088 | 180.869164 227.991573 l 1089 | 182.39863 229.029243 l 1090 | 183.928095 224.878634 l 1091 | 185.457561 229.029243 l 1092 | 186.987026 231.623371 l 1093 | 188.516492 230.585724 l 1094 | 190.045957 233.698664 l 1095 | 191.575423 231.104536 l 1096 | 193.104888 233.698664 l 1097 | 194.634354 234.217499 l 1098 | 196.163819 233.698664 l 1099 | 197.693285 234.217499 l 1100 | 199.22275 234.217499 l 1101 | 202.281681 233.179852 l 1102 | 203.811147 234.217499 l 1103 | 206.870078 234.217499 l 1104 | 208.399543 237.330461 l 1105 | 209.929009 236.811627 l 1106 | 211.458474 235.77398 l 1107 | 212.98794 236.292792 l 1108 | 214.517405 236.292792 l 1109 | 216.046871 235.77398 l 1110 | 217.576336 237.330461 l 1111 | 220.635267 236.292792 l 1112 | 222.164733 237.330461 l 1113 | 223.694198 237.849273 l 1114 | 225.223664 237.849273 l 1115 | 226.753129 238.88692 l 1116 | 228.282595 236.292792 l 1117 | 229.81206 238.88692 l 1118 | 231.341526 237.849273 l 1119 | 232.870991 238.368108 l 1120 | 234.400457 236.811627 l 1121 | 235.929922 237.330461 l 1122 | 237.459388 239.405754 l 1123 | 238.988853 238.368108 l 1124 | 240.518319 238.88692 l 1125 | 242.047784 238.88692 l 1126 | 243.57725 239.405754 l 1127 | 246.636181 238.368108 l 1128 | 248.165646 238.368108 l 1129 | 249.695112 233.698664 l 1130 | 251.224577 225.397445 l 1131 | 252.754043 234.736334 l 1132 | 254.283508 237.849273 l 1133 | 255.812974 235.255145 l 1134 | 257.342439 235.255145 l 1135 | 258.871905 237.330461 l 1136 | 260.40137 237.849273 l 1137 | 261.930836 239.405754 l 1138 | 263.460302 239.405754 l 1139 | 264.989767 240.443401 l 1140 | 266.519233 238.88692 l 1141 | 268.048698 237.849273 l 1142 | 269.578164 239.405754 l 1143 | 272.637095 234.217499 l 1144 | 274.16656 237.849273 l 1145 | 275.696026 240.962236 l 1146 | 277.225491 239.405754 l 1147 | 278.754957 238.88692 l 1148 | 280.284422 240.443401 l 1149 | 281.813888 239.924589 l 1150 | 283.343353 240.962236 l 1151 | 286.402284 239.924589 l 1152 | 287.93175 239.924589 l 1153 | 289.461215 241.481071 l 1154 | 290.990681 236.811627 l 1155 | 292.520146 241.481071 l 1156 | 294.049612 238.368108 l 1157 | 295.579077 239.405754 l 1158 | 297.108543 239.924589 l 1159 | 298.638008 237.330461 l 1160 | 300.167474 238.368108 l 1161 | 301.696939 241.481071 l 1162 | 303.226405 240.962236 l 1163 | 304.75587 238.88692 l 1164 | 306.285336 240.443401 l 1165 | 307.814801 239.924589 l 1166 | 309.344267 242.518717 l 1167 | 310.873732 242.518717 l 1168 | 312.403198 240.962236 l 1169 | 313.932663 241.999882 l 1170 | 315.462129 240.443401 l 1171 | 316.991594 239.924589 l 1172 | 318.52106 239.924589 l 1173 | 320.050525 237.330461 l 1174 | 321.579991 226.953927 l 1175 | 323.109456 238.368108 l 1176 | 324.638922 240.962236 l 1177 | 326.168387 238.88692 l 1178 | 327.697853 238.88692 l 1179 | 329.227318 239.924589 l 1180 | 330.756784 241.481071 l 1181 | 332.286249 240.443401 l 1182 | 333.815715 241.481071 l 1183 | 335.34518 231.104536 l 1184 | 336.874646 236.292792 l 1185 | 338.404111 239.924589 l 1186 | 339.933577 239.924589 l 1187 | 341.463042 242.518717 l 1188 | 347.580905 242.518717 l 1189 | 349.11037 237.330461 l 1190 | 350.639836 229.548055 l 1191 | 352.169301 229.548055 l 1192 | 353.698767 242.518717 l 1193 | 355.228232 242.518717 l 1194 | 356.757698 243.556364 l 1195 | 358.287163 241.999882 l 1196 | 359.816629 241.999882 l 1197 | 361.346094 240.443401 l 1198 | 362.87556 242.518717 l 1199 | 364.405025 243.037529 l 1200 | 365.934491 243.037529 l 1201 | 367.463956 241.999882 l 1202 | 368.993422 243.556364 l 1203 | 373.581818 243.556364 l 1204 | 373.581818 243.556364 l 1205 | stroke 1206 | grestore 1207 | 1.000 0.498 0.055 setrgbcolor 1208 | gsave 1209 | 334.8 217.4 54 36 clipbox 1210 | 69.218182 45.883636 m 1211 | 70.747647 77.013206 l 1212 | 72.277113 77.013206 l 1213 | 73.806578 45.883636 l 1214 | 75.336044 45.883636 l 1215 | 76.865509 77.013206 l 1216 | 78.394975 77.013206 l 1217 | 79.92444 170.401879 l 1218 | 81.453906 170.401879 l 1219 | 82.983371 77.013206 l 1220 | 84.512837 193.749053 l 1221 | 86.042302 193.749053 l 1222 | 89.101233 209.313843 l 1223 | 90.630699 170.401879 l 1224 | 92.160164 209.313843 l 1225 | 93.68963 170.401879 l 1226 | 95.219095 162.619495 l 1227 | 96.748561 170.401879 l 1228 | 98.278026 201.53146 l 1229 | 99.807492 201.53146 l 1230 | 101.336958 185.966669 l 1231 | 104.395889 217.096227 l 1232 | 105.925354 185.966669 l 1233 | 107.45482 209.313843 l 1234 | 110.513751 193.749053 l 1235 | 112.043216 217.096227 l 1236 | 113.572682 201.53146 l 1237 | 115.102147 217.096227 l 1238 | 118.161078 201.53146 l 1239 | 119.690544 209.313843 l 1240 | 121.220009 209.313843 l 1241 | 122.749475 201.53146 l 1242 | 124.27894 217.096227 l 1243 | 134.985199 217.096227 l 1244 | 136.514664 224.878634 l 1245 | 138.04413 201.53146 l 1246 | 139.573595 224.878634 l 1247 | 141.103061 217.096227 l 1248 | 142.632526 217.096227 l 1249 | 144.161992 224.878634 l 1250 | 145.691457 201.53146 l 1251 | 148.750388 201.53146 l 1252 | 150.279854 224.878634 l 1253 | 151.809319 217.096227 l 1254 | 153.338785 224.878634 l 1255 | 154.86825 224.878634 l 1256 | 156.397716 201.53146 l 1257 | 162.515578 201.53146 l 1258 | 164.045043 224.878634 l 1259 | 165.574509 201.53146 l 1260 | 167.103974 201.53146 l 1261 | 168.63344 185.966669 l 1262 | 170.162905 185.966669 l 1263 | 171.692371 224.878634 l 1264 | 173.221836 217.096227 l 1265 | 174.751302 201.53146 l 1266 | 176.280767 217.096227 l 1267 | 177.810233 224.878634 l 1268 | 179.339698 201.53146 l 1269 | 180.869164 209.313843 l 1270 | 182.39863 201.53146 l 1271 | 183.928095 201.53146 l 1272 | 185.457561 217.096227 l 1273 | 186.987026 201.53146 l 1274 | 188.516492 224.878634 l 1275 | 190.045957 224.878634 l 1276 | 191.575423 217.096227 l 1277 | 193.104888 224.878634 l 1278 | 202.281681 224.878634 l 1279 | 205.340612 209.313843 l 1280 | 206.870078 224.878634 l 1281 | 208.399543 209.313843 l 1282 | 209.929009 224.878634 l 1283 | 211.458474 209.313843 l 1284 | 212.98794 224.878634 l 1285 | 214.517405 209.313843 l 1286 | 216.046871 224.878634 l 1287 | 217.576336 224.878634 l 1288 | 219.105802 209.313843 l 1289 | 220.635267 224.878634 l 1290 | 222.164733 224.878634 l 1291 | 223.694198 217.096227 l 1292 | 225.223664 224.878634 l 1293 | 226.753129 209.313843 l 1294 | 228.282595 224.878634 l 1295 | 229.81206 209.313843 l 1296 | 231.341526 224.878634 l 1297 | 232.870991 209.313843 l 1298 | 234.400457 224.878634 l 1299 | 235.929922 217.096227 l 1300 | 237.459388 224.878634 l 1301 | 238.988853 217.096227 l 1302 | 240.518319 217.096227 l 1303 | 242.047784 224.878634 l 1304 | 243.57725 217.096227 l 1305 | 245.106715 224.878634 l 1306 | 246.636181 217.096227 l 1307 | 248.165646 193.749053 l 1308 | 251.224577 209.313843 l 1309 | 252.754043 224.878634 l 1310 | 254.283508 217.096227 l 1311 | 255.812974 224.878634 l 1312 | 257.342439 217.096227 l 1313 | 260.40137 217.096227 l 1314 | 261.930836 224.878634 l 1315 | 263.460302 217.096227 l 1316 | 264.989767 217.096227 l 1317 | 266.519233 209.313843 l 1318 | 268.048698 217.096227 l 1319 | 269.578164 209.313843 l 1320 | 272.637095 209.313843 l 1321 | 274.16656 217.096227 l 1322 | 275.696026 217.096227 l 1323 | 277.225491 224.878634 l 1324 | 278.754957 224.878634 l 1325 | 280.284422 217.096227 l 1326 | 281.813888 224.878634 l 1327 | 283.343353 224.878634 l 1328 | 284.872819 217.096227 l 1329 | 287.93175 217.096227 l 1330 | 289.461215 209.313843 l 1331 | 290.990681 217.096227 l 1332 | 292.520146 209.313843 l 1333 | 298.638008 209.313843 l 1334 | 300.167474 217.096227 l 1335 | 301.696939 217.096227 l 1336 | 303.226405 209.313843 l 1337 | 304.75587 217.096227 l 1338 | 307.814801 217.096227 l 1339 | 309.344267 224.878634 l 1340 | 310.873732 209.313843 l 1341 | 312.403198 209.313843 l 1342 | 313.932663 217.096227 l 1343 | 315.462129 217.096227 l 1344 | 318.52106 201.53146 l 1345 | 320.050525 185.966669 l 1346 | 321.579991 209.313843 l 1347 | 323.109456 209.313843 l 1348 | 324.638922 201.53146 l 1349 | 326.168387 209.313843 l 1350 | 332.286249 209.313843 l 1351 | 333.815715 193.749053 l 1352 | 335.34518 209.313843 l 1353 | 336.874646 209.313843 l 1354 | 338.404111 217.096227 l 1355 | 346.051439 217.096227 l 1356 | 347.580905 201.53146 l 1357 | 349.11037 193.749053 l 1358 | 350.639836 201.53146 l 1359 | 352.169301 217.096227 l 1360 | 355.228232 217.096227 l 1361 | 356.757698 209.313843 l 1362 | 358.287163 217.096227 l 1363 | 359.816629 209.313843 l 1364 | 361.346094 217.096227 l 1365 | 364.405025 217.096227 l 1366 | 365.934491 209.313843 l 1367 | 367.463956 224.878634 l 1368 | 368.993422 217.096227 l 1369 | 372.052353 217.096227 l 1370 | 373.581818 224.878634 l 1371 | 373.581818 224.878634 l 1372 | stroke 1373 | grestore 1374 | 0.800 setlinewidth 1375 | 0 setlinejoin 1376 | 0.000 setgray 1377 | gsave 1378 | 54 36 m 1379 | 54 253.44 l 1380 | stroke 1381 | grestore 1382 | gsave 1383 | 388.8 36 m 1384 | 388.8 253.44 l 1385 | stroke 1386 | grestore 1387 | gsave 1388 | 54 36 m 1389 | 388.8 36 l 1390 | stroke 1391 | grestore 1392 | gsave 1393 | 54 253.44 m 1394 | 388.8 253.44 l 1395 | stroke 1396 | grestore 1397 | /DejaVuSans findfont 1398 | 12.000 scalefont 1399 | setfont 1400 | gsave 1401 | 173.728125 259.440000 translate 1402 | 0.000000 rotate 1403 | 0.000000 0 m /m glyphshow 1404 | 11.689453 0 m /o glyphshow 1405 | 19.031250 0 m /d glyphshow 1406 | 26.648438 0 m /e glyphshow 1407 | 34.031250 0 m /l glyphshow 1408 | 37.365234 0 m /space glyphshow 1409 | 41.179688 0 m /a glyphshow 1410 | 48.533203 0 m /c glyphshow 1411 | 55.130859 0 m /c glyphshow 1412 | 61.728516 0 m /u glyphshow 1413 | 69.333984 0 m /r glyphshow 1414 | 74.267578 0 m /a glyphshow 1415 | 81.621094 0 m /c glyphshow 1416 | 88.218750 0 m /y glyphshow 1417 | grestore 1418 | 1.000 setlinewidth 1419 | 0 setlinecap 1420 | 0.800 setgray 1421 | gsave 1422 | 61 216.09625 m 1423 | 142.25 216.09625 l 1424 | 143.583333 216.09625 144.25 216.762917 144.25 218.09625 c 1425 | 144.25 246.44 l 1426 | 144.25 247.773333 143.583333 248.44 142.25 248.44 c 1427 | 61 248.44 l 1428 | 59.666667 248.44 59 247.773333 59 246.44 c 1429 | 59 218.09625 l 1430 | 59 216.762917 59.666667 216.09625 61 216.09625 c 1431 | cl 1432 | gsave 1433 | 1.000 setgray 1434 | fill 1435 | grestore 1436 | stroke 1437 | grestore 1438 | 1.500 setlinewidth 1439 | 1 setlinejoin 1440 | 2 setlinecap 1441 | 0.122 0.467 0.706 setrgbcolor 1442 | gsave 1443 | 63 240.34625 m 1444 | 83 240.34625 l 1445 | stroke 1446 | grestore 1447 | 0.000 setgray 1448 | /DejaVuSans findfont 1449 | 10.000 scalefont 1450 | setfont 1451 | gsave 1452 | 91.000000 236.846250 translate 1453 | 0.000000 rotate 1454 | 0.000000 0 m /t glyphshow 1455 | 3.920898 0 m /r glyphshow 1456 | 8.032227 0 m /a glyphshow 1457 | 14.160156 0 m /i glyphshow 1458 | 16.938477 0 m /n glyphshow 1459 | grestore 1460 | 1.000 0.498 0.055 setrgbcolor 1461 | gsave 1462 | 63 225.674375 m 1463 | 83 225.674375 l 1464 | stroke 1465 | grestore 1466 | 0.000 setgray 1467 | gsave 1468 | 91.000000 222.174375 translate 1469 | 0.000000 rotate 1470 | 0.000000 0 m /v glyphshow 1471 | 5.917969 0 m /a glyphshow 1472 | 12.045898 0 m /l glyphshow 1473 | 14.824219 0 m /i glyphshow 1474 | 17.602539 0 m /d glyphshow 1475 | 23.950195 0 m /a glyphshow 1476 | 30.078125 0 m /t glyphshow 1477 | 33.999023 0 m /i glyphshow 1478 | 36.777344 0 m /o glyphshow 1479 | 42.895508 0 m /n glyphshow 1480 | grestore 1481 | 1482 | end 1483 | showpage 1484 | -------------------------------------------------------------------------------- /destination_path1.eps: -------------------------------------------------------------------------------- 1 | %!PS-Adobe-3.0 EPSF-3.0 2 | %%Title: destination_path1.eps 3 | %%Creator: matplotlib version 3.3.2, http://matplotlib.org/ 4 | %%CreationDate: Mon Oct 26 19:22:24 2020 5 | %%Orientation: portrait 6 | %%BoundingBox: 90.0 252.0 522.0 540.0 7 | %%EndComments 8 | %%BeginProlog 9 | /mpldict 8 dict def 10 | mpldict begin 11 | /m { moveto } bind def 12 | /l { lineto } bind def 13 | /r { rlineto } bind def 14 | /c { curveto } bind def 15 | /cl { closepath } bind def 16 | /box { 17 | m 18 | 1 index 0 r 19 | 0 exch r 20 | neg 0 r 21 | cl 22 | } bind def 23 | /clipbox { 24 | box 25 | clip 26 | newpath 27 | } bind def 28 | %!PS-Adobe-3.0 Resource-Font 29 | %%Title: DejaVu Sans 30 | %%Copyright: Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Copyright (c) 2006 by Tavmjong Bah. All Rights Reserved. DejaVu changes are in public domain 31 | %%Creator: Converted from TrueType to type 3 by PPR 32 | 25 dict begin 33 | /_d{bind def}bind def 34 | /_m{moveto}_d 35 | /_l{lineto}_d 36 | /_cl{closepath eofill}_d 37 | /_c{curveto}_d 38 | /_sc{7 -1 roll{setcachedevice}{pop pop pop pop pop pop}ifelse}_d 39 | /_e{exec}_d 40 | /FontName /DejaVuSans def 41 | /PaintType 0 def 42 | /FontMatrix[.001 0 0 .001 0 0]def 43 | /FontBBox[-1021 -463 1793 1232]def 44 | /FontType 3 def 45 | /Encoding [ /space /period /zero /one /two /three /five /seven /a /c /d /e /h /i /l /m /n /o /p /r /s /t /v ] def 46 | /FontInfo 10 dict dup begin 47 | /FamilyName (DejaVu Sans) def 48 | /FullName (DejaVu Sans) def 49 | /Notice (Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Copyright (c) 2006 by Tavmjong Bah. All Rights Reserved. DejaVu changes are in public domain ) def 50 | /Weight (Book) def 51 | /Version (Version 2.35) def 52 | /ItalicAngle 0.0 def 53 | /isFixedPitch false def 54 | /UnderlinePosition -130 def 55 | /UnderlineThickness 90 def 56 | end readonly def 57 | /CharStrings 24 dict dup begin 58 | /.notdef 0 def 59 | /space{318 0 0 0 0 0 _sc 60 | }_d 61 | /period{318 0 107 0 210 124 _sc 62 | 107 124 _m 63 | 210 124 _l 64 | 210 0 _l 65 | 107 0 _l 66 | 107 124 _l 67 | _cl}_d 68 | /zero{636 0 66 -13 570 742 _sc 69 | 318 664 _m 70 | 267 664 229 639 203 589 _c 71 | 177 539 165 464 165 364 _c 72 | 165 264 177 189 203 139 _c 73 | 229 89 267 64 318 64 _c 74 | 369 64 407 89 433 139 _c 75 | 458 189 471 264 471 364 _c 76 | 471 464 458 539 433 589 _c 77 | 407 639 369 664 318 664 _c 78 | 318 742 _m 79 | 399 742 461 709 505 645 _c 80 | 548 580 570 486 570 364 _c 81 | 570 241 548 147 505 83 _c 82 | 461 19 399 -13 318 -13 _c 83 | 236 -13 173 19 130 83 _c 84 | 87 147 66 241 66 364 _c 85 | 66 486 87 580 130 645 _c 86 | 173 709 236 742 318 742 _c 87 | _cl}_d 88 | /one{636 0 110 0 544 729 _sc 89 | 124 83 _m 90 | 285 83 _l 91 | 285 639 _l 92 | 110 604 _l 93 | 110 694 _l 94 | 284 729 _l 95 | 383 729 _l 96 | 383 83 _l 97 | 544 83 _l 98 | 544 0 _l 99 | 124 0 _l 100 | 124 83 _l 101 | _cl}_d 102 | /two{{636 0 73 0 536 742 _sc 103 | 192 83 _m 104 | 536 83 _l 105 | 536 0 _l 106 | 73 0 _l 107 | 73 83 _l 108 | 110 121 161 173 226 239 _c 109 | 290 304 331 346 348 365 _c 110 | 380 400 402 430 414 455 _c 111 | 426 479 433 504 433 528 _c 112 | 433 566 419 598 392 622 _c 113 | 365 646 330 659 286 659 _c 114 | 255 659 222 653 188 643 _c 115 | 154 632 117 616 78 594 _c 116 | 78 694 _l 117 | 118 710 155 722 189 730 _c 118 | 223 738 255 742 284 742 _c 119 | }_e{359 742 419 723 464 685 _c 120 | 509 647 532 597 532 534 _c 121 | 532 504 526 475 515 449 _c 122 | 504 422 484 390 454 354 _c 123 | 446 344 420 317 376 272 _c 124 | 332 227 271 164 192 83 _c 125 | _cl}_e}_d 126 | /three{{636 0 76 -13 556 742 _sc 127 | 406 393 _m 128 | 453 383 490 362 516 330 _c 129 | 542 298 556 258 556 212 _c 130 | 556 140 531 84 482 45 _c 131 | 432 6 362 -13 271 -13 _c 132 | 240 -13 208 -10 176 -4 _c 133 | 144 1 110 10 76 22 _c 134 | 76 117 _l 135 | 103 101 133 89 166 81 _c 136 | 198 73 232 69 268 69 _c 137 | 330 69 377 81 409 105 _c 138 | 441 129 458 165 458 212 _c 139 | 458 254 443 288 413 312 _c 140 | 383 336 341 349 287 349 _c 141 | }_e{202 349 _l 142 | 202 430 _l 143 | 291 430 _l 144 | 339 430 376 439 402 459 _c 145 | 428 478 441 506 441 543 _c 146 | 441 580 427 609 401 629 _c 147 | 374 649 336 659 287 659 _c 148 | 260 659 231 656 200 650 _c 149 | 169 644 135 635 98 623 _c 150 | 98 711 _l 151 | 135 721 170 729 203 734 _c 152 | 235 739 266 742 296 742 _c 153 | 370 742 429 725 473 691 _c 154 | 517 657 539 611 539 553 _c 155 | 539 513 527 479 504 451 _c 156 | 481 423 448 403 406 393 _c 157 | _cl}_e}_d 158 | /five{{636 0 77 -13 549 729 _sc 159 | 108 729 _m 160 | 495 729 _l 161 | 495 646 _l 162 | 198 646 _l 163 | 198 467 _l 164 | 212 472 227 476 241 478 _c 165 | 255 480 270 482 284 482 _c 166 | 365 482 429 459 477 415 _c 167 | 525 370 549 310 549 234 _c 168 | 549 155 524 94 475 51 _c 169 | 426 8 357 -13 269 -13 _c 170 | 238 -13 207 -10 175 -6 _c 171 | 143 -1 111 6 77 17 _c 172 | 77 116 _l 173 | 106 100 136 88 168 80 _c 174 | 199 72 232 69 267 69 _c 175 | }_e{323 69 368 83 401 113 _c 176 | 433 143 450 183 450 234 _c 177 | 450 284 433 324 401 354 _c 178 | 368 384 323 399 267 399 _c 179 | 241 399 214 396 188 390 _c 180 | 162 384 135 375 108 363 _c 181 | 108 729 _l 182 | _cl}_e}_d 183 | /seven{636 0 82 0 551 729 _sc 184 | 82 729 _m 185 | 551 729 _l 186 | 551 687 _l 187 | 286 0 _l 188 | 183 0 _l 189 | 432 646 _l 190 | 82 646 _l 191 | 82 729 _l 192 | _cl}_d 193 | /a{{613 0 60 -13 522 560 _sc 194 | 343 275 _m 195 | 270 275 220 266 192 250 _c 196 | 164 233 150 205 150 165 _c 197 | 150 133 160 107 181 89 _c 198 | 202 70 231 61 267 61 _c 199 | 317 61 357 78 387 114 _c 200 | 417 149 432 196 432 255 _c 201 | 432 275 _l 202 | 343 275 _l 203 | 522 312 _m 204 | 522 0 _l 205 | 432 0 _l 206 | 432 83 _l 207 | 411 49 385 25 355 10 _c 208 | 325 -5 287 -13 243 -13 _c 209 | 187 -13 142 2 109 33 _c 210 | 76 64 60 106 60 159 _c 211 | }_e{60 220 80 266 122 298 _c 212 | 163 329 224 345 306 345 _c 213 | 432 345 _l 214 | 432 354 _l 215 | 432 395 418 427 391 450 _c 216 | 364 472 326 484 277 484 _c 217 | 245 484 215 480 185 472 _c 218 | 155 464 127 453 100 439 _c 219 | 100 522 _l 220 | 132 534 164 544 195 550 _c 221 | 226 556 256 560 286 560 _c 222 | 365 560 424 539 463 498 _c 223 | 502 457 522 395 522 312 _c 224 | _cl}_e}_d 225 | /c{{550 0 55 -13 488 560 _sc 226 | 488 526 _m 227 | 488 442 _l 228 | 462 456 437 466 411 473 _c 229 | 385 480 360 484 334 484 _c 230 | 276 484 230 465 198 428 _c 231 | 166 391 150 339 150 273 _c 232 | 150 206 166 154 198 117 _c 233 | 230 80 276 62 334 62 _c 234 | 360 62 385 65 411 72 _c 235 | 437 79 462 90 488 104 _c 236 | 488 21 _l 237 | 462 9 436 0 410 -5 _c 238 | 383 -10 354 -13 324 -13 _c 239 | 242 -13 176 12 128 64 _c 240 | }_e{79 115 55 185 55 273 _c 241 | 55 362 79 432 128 483 _c 242 | 177 534 244 560 330 560 _c 243 | 358 560 385 557 411 551 _c 244 | 437 545 463 537 488 526 _c 245 | _cl}_e}_d 246 | /d{{635 0 55 -13 544 760 _sc 247 | 454 464 _m 248 | 454 760 _l 249 | 544 760 _l 250 | 544 0 _l 251 | 454 0 _l 252 | 454 82 _l 253 | 435 49 411 25 382 10 _c 254 | 353 -5 319 -13 279 -13 _c 255 | 213 -13 159 13 117 65 _c 256 | 75 117 55 187 55 273 _c 257 | 55 359 75 428 117 481 _c 258 | 159 533 213 560 279 560 _c 259 | 319 560 353 552 382 536 _c 260 | 411 520 435 496 454 464 _c 261 | 148 273 _m 262 | 148 207 161 155 188 117 _c 263 | 215 79 253 61 301 61 _c 264 | }_e{348 61 385 79 413 117 _c 265 | 440 155 454 207 454 273 _c 266 | 454 339 440 390 413 428 _c 267 | 385 466 348 485 301 485 _c 268 | 253 485 215 466 188 428 _c 269 | 161 390 148 339 148 273 _c 270 | _cl}_e}_d 271 | /e{{615 0 55 -13 562 560 _sc 272 | 562 296 _m 273 | 562 252 _l 274 | 149 252 _l 275 | 153 190 171 142 205 110 _c 276 | 238 78 284 62 344 62 _c 277 | 378 62 412 66 444 74 _c 278 | 476 82 509 95 541 113 _c 279 | 541 28 _l 280 | 509 14 476 3 442 -3 _c 281 | 408 -9 373 -13 339 -13 _c 282 | 251 -13 182 12 131 62 _c 283 | 80 112 55 181 55 268 _c 284 | 55 357 79 428 127 481 _c 285 | 175 533 241 560 323 560 _c 286 | 397 560 455 536 498 489 _c 287 | }_e{540 441 562 377 562 296 _c 288 | 472 322 _m 289 | 471 371 457 410 431 440 _c 290 | 404 469 368 484 324 484 _c 291 | 274 484 234 469 204 441 _c 292 | 174 413 156 373 152 322 _c 293 | 472 322 _l 294 | _cl}_e}_d 295 | /h{634 0 91 0 549 760 _sc 296 | 549 330 _m 297 | 549 0 _l 298 | 459 0 _l 299 | 459 327 _l 300 | 459 379 448 417 428 443 _c 301 | 408 469 378 482 338 482 _c 302 | 289 482 251 466 223 435 _c 303 | 195 404 181 362 181 309 _c 304 | 181 0 _l 305 | 91 0 _l 306 | 91 760 _l 307 | 181 760 _l 308 | 181 462 _l 309 | 202 494 227 519 257 535 _c 310 | 286 551 320 560 358 560 _c 311 | 420 560 468 540 500 501 _c 312 | 532 462 549 405 549 330 _c 313 | _cl}_d 314 | /i{278 0 94 0 184 760 _sc 315 | 94 547 _m 316 | 184 547 _l 317 | 184 0 _l 318 | 94 0 _l 319 | 94 547 _l 320 | 94 760 _m 321 | 184 760 _l 322 | 184 646 _l 323 | 94 646 _l 324 | 94 760 _l 325 | _cl}_d 326 | /l{278 0 94 0 184 760 _sc 327 | 94 760 _m 328 | 184 760 _l 329 | 184 0 _l 330 | 94 0 _l 331 | 94 760 _l 332 | _cl}_d 333 | /m{{974 0 91 0 889 560 _sc 334 | 520 442 _m 335 | 542 482 569 511 600 531 _c 336 | 631 550 668 560 711 560 _c 337 | 767 560 811 540 842 500 _c 338 | 873 460 889 403 889 330 _c 339 | 889 0 _l 340 | 799 0 _l 341 | 799 327 _l 342 | 799 379 789 418 771 444 _c 343 | 752 469 724 482 686 482 _c 344 | 639 482 602 466 575 435 _c 345 | 548 404 535 362 535 309 _c 346 | 535 0 _l 347 | 445 0 _l 348 | 445 327 _l 349 | 445 379 435 418 417 444 _c 350 | 398 469 369 482 331 482 _c 351 | }_e{285 482 248 466 221 435 _c 352 | 194 404 181 362 181 309 _c 353 | 181 0 _l 354 | 91 0 _l 355 | 91 547 _l 356 | 181 547 _l 357 | 181 462 _l 358 | 201 495 226 520 255 536 _c 359 | 283 552 317 560 357 560 _c 360 | 397 560 430 550 458 530 _c 361 | 486 510 506 480 520 442 _c 362 | _cl}_e}_d 363 | /n{634 0 91 0 549 560 _sc 364 | 549 330 _m 365 | 549 0 _l 366 | 459 0 _l 367 | 459 327 _l 368 | 459 379 448 417 428 443 _c 369 | 408 469 378 482 338 482 _c 370 | 289 482 251 466 223 435 _c 371 | 195 404 181 362 181 309 _c 372 | 181 0 _l 373 | 91 0 _l 374 | 91 547 _l 375 | 181 547 _l 376 | 181 462 _l 377 | 202 494 227 519 257 535 _c 378 | 286 551 320 560 358 560 _c 379 | 420 560 468 540 500 501 _c 380 | 532 462 549 405 549 330 _c 381 | _cl}_d 382 | /o{612 0 55 -13 557 560 _sc 383 | 306 484 _m 384 | 258 484 220 465 192 427 _c 385 | 164 389 150 338 150 273 _c 386 | 150 207 163 156 191 118 _c 387 | 219 80 257 62 306 62 _c 388 | 354 62 392 80 420 118 _c 389 | 448 156 462 207 462 273 _c 390 | 462 337 448 389 420 427 _c 391 | 392 465 354 484 306 484 _c 392 | 306 560 _m 393 | 384 560 445 534 490 484 _c 394 | 534 433 557 363 557 273 _c 395 | 557 183 534 113 490 63 _c 396 | 445 12 384 -13 306 -13 _c 397 | 227 -13 165 12 121 63 _c 398 | 77 113 55 183 55 273 _c 399 | 55 363 77 433 121 484 _c 400 | 165 534 227 560 306 560 _c 401 | _cl}_d 402 | /p{{635 0 91 -207 580 560 _sc 403 | 181 82 _m 404 | 181 -207 _l 405 | 91 -207 _l 406 | 91 547 _l 407 | 181 547 _l 408 | 181 464 _l 409 | 199 496 223 520 252 536 _c 410 | 281 552 316 560 356 560 _c 411 | 422 560 476 533 518 481 _c 412 | 559 428 580 359 580 273 _c 413 | 580 187 559 117 518 65 _c 414 | 476 13 422 -13 356 -13 _c 415 | 316 -13 281 -5 252 10 _c 416 | 223 25 199 49 181 82 _c 417 | 487 273 _m 418 | 487 339 473 390 446 428 _c 419 | 418 466 381 485 334 485 _c 420 | }_e{286 485 249 466 222 428 _c 421 | 194 390 181 339 181 273 _c 422 | 181 207 194 155 222 117 _c 423 | 249 79 286 61 334 61 _c 424 | 381 61 418 79 446 117 _c 425 | 473 155 487 207 487 273 _c 426 | _cl}_e}_d 427 | /r{411 0 91 0 411 560 _sc 428 | 411 463 _m 429 | 401 469 390 473 378 476 _c 430 | 366 478 353 480 339 480 _c 431 | 288 480 249 463 222 430 _c 432 | 194 397 181 350 181 288 _c 433 | 181 0 _l 434 | 91 0 _l 435 | 91 547 _l 436 | 181 547 _l 437 | 181 462 _l 438 | 199 495 224 520 254 536 _c 439 | 284 552 321 560 365 560 _c 440 | 371 560 378 559 386 559 _c 441 | 393 558 401 557 411 555 _c 442 | 411 463 _l 443 | _cl}_d 444 | /s{{521 0 54 -13 472 560 _sc 445 | 443 531 _m 446 | 443 446 _l 447 | 417 458 391 468 364 475 _c 448 | 336 481 308 485 279 485 _c 449 | 234 485 200 478 178 464 _c 450 | 156 450 145 430 145 403 _c 451 | 145 382 153 366 169 354 _c 452 | 185 342 217 330 265 320 _c 453 | 296 313 _l 454 | 360 299 405 279 432 255 _c 455 | 458 230 472 195 472 151 _c 456 | 472 100 452 60 412 31 _c 457 | 372 1 316 -13 246 -13 _c 458 | 216 -13 186 -10 154 -5 _c 459 | }_e{122 0 89 8 54 20 _c 460 | 54 113 _l 461 | 87 95 120 82 152 74 _c 462 | 184 65 216 61 248 61 _c 463 | 290 61 323 68 346 82 _c 464 | 368 96 380 117 380 144 _c 465 | 380 168 371 187 355 200 _c 466 | 339 213 303 226 247 238 _c 467 | 216 245 _l 468 | 160 257 119 275 95 299 _c 469 | 70 323 58 356 58 399 _c 470 | 58 450 76 490 112 518 _c 471 | 148 546 200 560 268 560 _c 472 | 301 560 332 557 362 552 _c 473 | 391 547 418 540 443 531 _c 474 | }_e{_cl}_e}_d 475 | /t{392 0 27 0 368 702 _sc 476 | 183 702 _m 477 | 183 547 _l 478 | 368 547 _l 479 | 368 477 _l 480 | 183 477 _l 481 | 183 180 _l 482 | 183 135 189 106 201 94 _c 483 | 213 81 238 75 276 75 _c 484 | 368 75 _l 485 | 368 0 _l 486 | 276 0 _l 487 | 206 0 158 13 132 39 _c 488 | 106 65 93 112 93 180 _c 489 | 93 477 _l 490 | 27 477 _l 491 | 27 547 _l 492 | 93 547 _l 493 | 93 702 _l 494 | 183 702 _l 495 | _cl}_d 496 | /v{592 0 30 0 562 547 _sc 497 | 30 547 _m 498 | 125 547 _l 499 | 296 88 _l 500 | 467 547 _l 501 | 562 547 _l 502 | 357 0 _l 503 | 235 0 _l 504 | 30 547 _l 505 | _cl}_d 506 | end readonly def 507 | 508 | /BuildGlyph 509 | {exch begin 510 | CharStrings exch 511 | 2 copy known not{pop /.notdef}if 512 | true 3 1 roll get exec 513 | end}_d 514 | 515 | /BuildChar { 516 | 1 index /Encoding get exch get 517 | 1 index /BuildGlyph get exec 518 | }_d 519 | 520 | FontName currentdict end definefont pop 521 | end 522 | %%EndProlog 523 | mpldict begin 524 | 90 252 translate 525 | 432 288 0 0 clipbox 526 | gsave 527 | 0 0 m 528 | 432 0 l 529 | 432 288 l 530 | 0 288 l 531 | cl 532 | grestore 533 | gsave 534 | 54 36 m 535 | 388.8 36 l 536 | 388.8 253.44 l 537 | 54 253.44 l 538 | cl 539 | 1.000 setgray 540 | fill 541 | grestore 542 | 0.800 setlinewidth 543 | 1 setlinejoin 544 | 0 setlinecap 545 | [] 0 setdash 546 | 0.000 setgray 547 | gsave 548 | /o { 549 | gsave 550 | newpath 551 | translate 552 | 0.8 setlinewidth 553 | 1 setlinejoin 554 | 0 setlinecap 555 | 0 0 m 556 | 0 -3.5 l 557 | 558 | gsave 559 | 0.000 setgray 560 | fill 561 | grestore 562 | stroke 563 | grestore 564 | } bind def 565 | 69.2182 36 o 566 | grestore 567 | /DejaVuSans findfont 568 | 10.000 scalefont 569 | setfont 570 | gsave 571 | 66.038494 21.406250 translate 572 | 0.000000 rotate 573 | 0.000000 0 m /zero glyphshow 574 | grestore 575 | gsave 576 | /o { 577 | gsave 578 | newpath 579 | translate 580 | 0.8 setlinewidth 581 | 1 setlinejoin 582 | 0 setlinecap 583 | 0 0 m 584 | 0 -3.5 l 585 | 586 | gsave 587 | 0.000 setgray 588 | fill 589 | grestore 590 | stroke 591 | grestore 592 | } bind def 593 | 107.455 36 o 594 | grestore 595 | gsave 596 | 101.095445 21.406250 translate 597 | 0.000000 rotate 598 | 0.000000 0 m /two glyphshow 599 | 6.362305 0 m /five glyphshow 600 | grestore 601 | gsave 602 | /o { 603 | gsave 604 | newpath 605 | translate 606 | 0.8 setlinewidth 607 | 1 setlinejoin 608 | 0 setlinecap 609 | 0 0 m 610 | 0 -3.5 l 611 | 612 | gsave 613 | 0.000 setgray 614 | fill 615 | grestore 616 | stroke 617 | grestore 618 | } bind def 619 | 145.691 36 o 620 | grestore 621 | gsave 622 | 139.332082 21.406250 translate 623 | 0.000000 rotate 624 | 0.000000 0 m /five glyphshow 625 | 6.362305 0 m /zero glyphshow 626 | grestore 627 | gsave 628 | /o { 629 | gsave 630 | newpath 631 | translate 632 | 0.8 setlinewidth 633 | 1 setlinejoin 634 | 0 setlinecap 635 | 0 0 m 636 | 0 -3.5 l 637 | 638 | gsave 639 | 0.000 setgray 640 | fill 641 | grestore 642 | stroke 643 | grestore 644 | } bind def 645 | 183.928 36 o 646 | grestore 647 | gsave 648 | 177.568720 21.406250 translate 649 | 0.000000 rotate 650 | 0.000000 0 m /seven glyphshow 651 | 6.362305 0 m /five glyphshow 652 | grestore 653 | gsave 654 | /o { 655 | gsave 656 | newpath 657 | translate 658 | 0.8 setlinewidth 659 | 1 setlinejoin 660 | 0 setlinecap 661 | 0 0 m 662 | 0 -3.5 l 663 | 664 | gsave 665 | 0.000 setgray 666 | fill 667 | grestore 668 | stroke 669 | grestore 670 | } bind def 671 | 222.165 36 o 672 | grestore 673 | gsave 674 | 212.625670 21.406250 translate 675 | 0.000000 rotate 676 | 0.000000 0 m /one glyphshow 677 | 6.362305 0 m /zero glyphshow 678 | 12.724609 0 m /zero glyphshow 679 | grestore 680 | gsave 681 | /o { 682 | gsave 683 | newpath 684 | translate 685 | 0.8 setlinewidth 686 | 1 setlinejoin 687 | 0 setlinecap 688 | 0 0 m 689 | 0 -3.5 l 690 | 691 | gsave 692 | 0.000 setgray 693 | fill 694 | grestore 695 | stroke 696 | grestore 697 | } bind def 698 | 260.401 36 o 699 | grestore 700 | gsave 701 | 250.862308 21.406250 translate 702 | 0.000000 rotate 703 | 0.000000 0 m /one glyphshow 704 | 6.362305 0 m /two glyphshow 705 | 12.724609 0 m /five glyphshow 706 | grestore 707 | gsave 708 | /o { 709 | gsave 710 | newpath 711 | translate 712 | 0.8 setlinewidth 713 | 1 setlinejoin 714 | 0 setlinecap 715 | 0 0 m 716 | 0 -3.5 l 717 | 718 | gsave 719 | 0.000 setgray 720 | fill 721 | grestore 722 | stroke 723 | grestore 724 | } bind def 725 | 298.638 36 o 726 | grestore 727 | gsave 728 | 289.098946 21.406250 translate 729 | 0.000000 rotate 730 | 0.000000 0 m /one glyphshow 731 | 6.362305 0 m /five glyphshow 732 | 12.724609 0 m /zero glyphshow 733 | grestore 734 | gsave 735 | /o { 736 | gsave 737 | newpath 738 | translate 739 | 0.8 setlinewidth 740 | 1 setlinejoin 741 | 0 setlinecap 742 | 0 0 m 743 | 0 -3.5 l 744 | 745 | gsave 746 | 0.000 setgray 747 | fill 748 | grestore 749 | stroke 750 | grestore 751 | } bind def 752 | 336.875 36 o 753 | grestore 754 | gsave 755 | 327.335583 21.406250 translate 756 | 0.000000 rotate 757 | 0.000000 0 m /one glyphshow 758 | 6.362305 0 m /seven glyphshow 759 | 12.724609 0 m /five glyphshow 760 | grestore 761 | gsave 762 | /o { 763 | gsave 764 | newpath 765 | translate 766 | 0.8 setlinewidth 767 | 1 setlinejoin 768 | 0 setlinecap 769 | 0 0 m 770 | 0 -3.5 l 771 | 772 | gsave 773 | 0.000 setgray 774 | fill 775 | grestore 776 | stroke 777 | grestore 778 | } bind def 779 | 375.111 36 o 780 | grestore 781 | gsave 782 | 365.572221 21.406250 translate 783 | 0.000000 rotate 784 | 0.000000 0 m /two glyphshow 785 | 6.362305 0 m /zero glyphshow 786 | 12.724609 0 m /zero glyphshow 787 | grestore 788 | gsave 789 | 206.165625 7.734375 translate 790 | 0.000000 rotate 791 | 0.000000 0 m /e glyphshow 792 | 6.152344 0 m /p glyphshow 793 | 12.500000 0 m /o glyphshow 794 | 18.618164 0 m /c glyphshow 795 | 24.116211 0 m /h glyphshow 796 | grestore 797 | gsave 798 | /o { 799 | gsave 800 | newpath 801 | translate 802 | 0.8 setlinewidth 803 | 1 setlinejoin 804 | 0 setlinecap 805 | -0 0 m 806 | -3.5 0 l 807 | 808 | gsave 809 | 0.000 setgray 810 | fill 811 | grestore 812 | stroke 813 | grestore 814 | } bind def 815 | 54 57.6469 o 816 | grestore 817 | gsave 818 | 24.734375 53.850002 translate 819 | 0.000000 rotate 820 | 0.000000 0 m /zero glyphshow 821 | 6.362305 0 m /period glyphshow 822 | 9.541016 0 m /zero glyphshow 823 | 15.903320 0 m /five glyphshow 824 | grestore 825 | gsave 826 | /o { 827 | gsave 828 | newpath 829 | translate 830 | 0.8 setlinewidth 831 | 1 setlinejoin 832 | 0 setlinecap 833 | -0 0 m 834 | -3.5 0 l 835 | 836 | gsave 837 | 0.000 setgray 838 | fill 839 | grestore 840 | stroke 841 | grestore 842 | } bind def 843 | 54 90.947 o 844 | grestore 845 | gsave 846 | 24.734375 87.150140 translate 847 | 0.000000 rotate 848 | 0.000000 0 m /zero glyphshow 849 | 6.362305 0 m /period glyphshow 850 | 9.541016 0 m /one glyphshow 851 | 15.903320 0 m /zero glyphshow 852 | grestore 853 | gsave 854 | /o { 855 | gsave 856 | newpath 857 | translate 858 | 0.8 setlinewidth 859 | 1 setlinejoin 860 | 0 setlinecap 861 | -0 0 m 862 | -3.5 0 l 863 | 864 | gsave 865 | 0.000 setgray 866 | fill 867 | grestore 868 | stroke 869 | grestore 870 | } bind def 871 | 54 124.247 o 872 | grestore 873 | gsave 874 | 24.734375 120.450279 translate 875 | 0.000000 rotate 876 | 0.000000 0 m /zero glyphshow 877 | 6.362305 0 m /period glyphshow 878 | 9.541016 0 m /one glyphshow 879 | 15.903320 0 m /five glyphshow 880 | grestore 881 | gsave 882 | /o { 883 | gsave 884 | newpath 885 | translate 886 | 0.8 setlinewidth 887 | 1 setlinejoin 888 | 0 setlinecap 889 | -0 0 m 890 | -3.5 0 l 891 | 892 | gsave 893 | 0.000 setgray 894 | fill 895 | grestore 896 | stroke 897 | grestore 898 | } bind def 899 | 54 157.547 o 900 | grestore 901 | gsave 902 | 24.734375 153.750417 translate 903 | 0.000000 rotate 904 | 0.000000 0 m /zero glyphshow 905 | 6.362305 0 m /period glyphshow 906 | 9.541016 0 m /two glyphshow 907 | 15.903320 0 m /zero glyphshow 908 | grestore 909 | gsave 910 | /o { 911 | gsave 912 | newpath 913 | translate 914 | 0.8 setlinewidth 915 | 1 setlinejoin 916 | 0 setlinecap 917 | -0 0 m 918 | -3.5 0 l 919 | 920 | gsave 921 | 0.000 setgray 922 | fill 923 | grestore 924 | stroke 925 | grestore 926 | } bind def 927 | 54 190.847 o 928 | grestore 929 | gsave 930 | 24.734375 187.050556 translate 931 | 0.000000 rotate 932 | 0.000000 0 m /zero glyphshow 933 | 6.362305 0 m /period glyphshow 934 | 9.541016 0 m /two glyphshow 935 | 15.903320 0 m /five glyphshow 936 | grestore 937 | gsave 938 | /o { 939 | gsave 940 | newpath 941 | translate 942 | 0.8 setlinewidth 943 | 1 setlinejoin 944 | 0 setlinecap 945 | -0 0 m 946 | -3.5 0 l 947 | 948 | gsave 949 | 0.000 setgray 950 | fill 951 | grestore 952 | stroke 953 | grestore 954 | } bind def 955 | 54 224.148 o 956 | grestore 957 | gsave 958 | 24.734375 220.350694 translate 959 | 0.000000 rotate 960 | 0.000000 0 m /zero glyphshow 961 | 6.362305 0 m /period glyphshow 962 | 9.541016 0 m /three glyphshow 963 | 15.903320 0 m /zero glyphshow 964 | grestore 965 | gsave 966 | 18.656250 135.048125 translate 967 | 90.000000 rotate 968 | 0.000000 0 m /l glyphshow 969 | 2.778320 0 m /o glyphshow 970 | 8.896484 0 m /s glyphshow 971 | 14.106445 0 m /s glyphshow 972 | grestore 973 | 1.500 setlinewidth 974 | 2 setlinecap 975 | 0.122 0.467 0.706 setrgbcolor 976 | gsave 977 | 334.8 217.4 54 36 clipbox 978 | 69.218182 243.556364 m 979 | 70.747647 203.763518 l 980 | 72.277113 204.804568 l 981 | 73.806578 190.030102 l 982 | 75.336044 193.866079 l 983 | 76.865509 188.732839 l 984 | 78.394975 189.517745 l 985 | 79.92444 186.393255 l 986 | 81.453906 184.613664 l 987 | 82.983371 179.866853 l 988 | 84.512837 174.187036 l 989 | 86.042302 164.547855 l 990 | 87.571768 151.58427 l 991 | 89.101233 135.848843 l 992 | 90.630699 116.597772 l 993 | 92.160164 103.057461 l 994 | 93.68963 88.335901 l 995 | 95.219095 88.013364 l 996 | 96.748561 91.433075 l 997 | 98.278026 93.019302 l 998 | 99.807492 83.955978 l 999 | 101.336958 77.881416 l 1000 | 102.866423 79.278398 l 1001 | 104.395889 81.117811 l 1002 | 105.925354 75.413983 l 1003 | 107.45482 80.212594 l 1004 | 108.984285 75.678607 l 1005 | 110.513751 76.552504 l 1006 | 112.043216 75.841285 l 1007 | 113.572682 73.135086 l 1008 | 116.631613 72.713609 l 1009 | 118.161078 71.808635 l 1010 | 119.690544 71.610036 l 1011 | 121.220009 71.659707 l 1012 | 122.749475 70.176479 l 1013 | 124.27894 71.182908 l 1014 | 125.808406 69.457286 l 1015 | 127.337871 68.716061 l 1016 | 128.867337 68.112207 l 1017 | 136.514664 65.898951 l 1018 | 138.04413 66.108523 l 1019 | 139.573595 68.824463 l 1020 | 141.103061 66.615179 l 1021 | 142.632526 66.232665 l 1022 | 145.691457 64.182643 l 1023 | 147.220923 65.678907 l 1024 | 148.750388 71.487825 l 1025 | 150.279854 68.073677 l 1026 | 151.809319 63.32364 l 1027 | 153.338785 64.701294 l 1028 | 154.86825 63.234835 l 1029 | 156.397716 62.094178 l 1030 | 157.927181 62.742845 l 1031 | 159.456647 68.314314 l 1032 | 160.986112 65.957705 l 1033 | 162.515578 66.016476 l 1034 | 164.045043 69.621045 l 1035 | 165.574509 67.712147 l 1036 | 167.103974 66.288593 l 1037 | 168.63344 66.889847 l 1038 | 170.162905 72.664033 l 1039 | 171.692371 77.029392 l 1040 | 173.221836 64.008698 l 1041 | 174.751302 62.734938 l 1042 | 176.280767 68.889234 l 1043 | 179.339698 64.439474 l 1044 | 180.869164 65.376764 l 1045 | 182.39863 64.912149 l 1046 | 183.928095 66.725159 l 1047 | 185.457561 63.156834 l 1048 | 186.987026 61.111562 l 1049 | 188.516492 60.915909 l 1050 | 190.045957 58.584879 l 1051 | 191.575423 60.097831 l 1052 | 193.104888 58.156989 l 1053 | 194.634354 57.571292 l 1054 | 196.163819 57.959421 l 1055 | 197.693285 57.21866 l 1056 | 199.22275 57.337306 l 1057 | 200.752216 57.230251 l 1058 | 202.281681 57.61871 l 1059 | 203.811147 58.445831 l 1060 | 205.340612 56.668913 l 1061 | 206.870078 57.696565 l 1062 | 208.399543 55.156569 l 1063 | 209.929009 55.302441 l 1064 | 211.458474 56.236733 l 1065 | 214.517405 55.403707 l 1066 | 216.046871 55.55065 l 1067 | 217.576336 54.341742 l 1068 | 219.105802 53.946691 l 1069 | 220.635267 55.76571 l 1070 | 222.164733 53.707321 l 1071 | 223.694198 53.445967 l 1072 | 226.753129 53.917147 l 1073 | 228.282595 54.456061 l 1074 | 229.81206 53.971512 l 1075 | 231.341526 53.310159 l 1076 | 232.870991 53.126021 l 1077 | 234.400457 54.096152 l 1078 | 235.929922 54.493498 l 1079 | 237.459388 52.194792 l 1080 | 238.988853 52.269526 l 1081 | 245.106715 51.729321 l 1082 | 246.636181 52.222441 l 1083 | 248.165646 53.325544 l 1084 | 249.695112 56.818709 l 1085 | 251.224577 64.51039 l 1086 | 252.754043 55.725006 l 1087 | 255.812974 54.233114 l 1088 | 257.342439 55.568444 l 1089 | 258.871905 53.059727 l 1090 | 260.40137 51.850689 l 1091 | 261.930836 51.792303 l 1092 | 263.460302 51.374565 l 1093 | 264.989767 50.557267 l 1094 | 266.519233 52.255779 l 1095 | 268.048698 51.665727 l 1096 | 269.578164 51.752363 l 1097 | 271.107629 54.266241 l 1098 | 272.637095 56.157506 l 1099 | 274.16656 51.996295 l 1100 | 275.696026 51.066101 l 1101 | 277.225491 50.376529 l 1102 | 278.754957 51.221943 l 1103 | 280.284422 49.777667 l 1104 | 281.813888 51.471446 l 1105 | 283.343353 49.716048 l 1106 | 284.872819 49.449402 l 1107 | 286.402284 49.468987 l 1108 | 287.93175 50.623933 l 1109 | 289.461215 48.902697 l 1110 | 290.990681 54.859692 l 1111 | 292.520146 50.194105 l 1112 | 294.049612 51.845608 l 1113 | 295.579077 51.930857 l 1114 | 297.108543 50.163677 l 1115 | 298.638008 52.935329 l 1116 | 300.167474 51.179071 l 1117 | 301.696939 48.582405 l 1118 | 303.226405 48.550471 l 1119 | 304.75587 51.280047 l 1120 | 306.285336 49.8651 l 1121 | 307.814801 49.247114 l 1122 | 309.344267 48.131528 l 1123 | 310.873732 48.009743 l 1124 | 313.932663 50.319668 l 1125 | 315.462129 49.723955 l 1126 | 318.52106 51.301717 l 1127 | 320.050525 51.794107 l 1128 | 321.579991 64.05895 l 1129 | 323.109456 51.611466 l 1130 | 324.638922 48.979852 l 1131 | 326.168387 51.668198 l 1132 | 327.697853 51.265518 l 1133 | 329.227318 49.185165 l 1134 | 330.756784 48.885352 l 1135 | 332.286249 49.875272 l 1136 | 333.815715 48.80437 l 1137 | 335.34518 56.784039 l 1138 | 336.874646 53.498546 l 1139 | 338.404111 48.919975 l 1140 | 339.933577 49.140746 l 1141 | 341.463042 47.005137 l 1142 | 342.992508 46.900672 l 1143 | 344.521974 47.114204 l 1144 | 346.051439 47.082323 l 1145 | 347.580905 48.215926 l 1146 | 349.11037 51.937869 l 1147 | 350.639836 60.761947 l 1148 | 352.169301 58.012652 l 1149 | 353.698767 47.8147 l 1150 | 356.757698 46.558788 l 1151 | 358.287163 47.725275 l 1152 | 359.816629 47.230305 l 1153 | 361.346094 48.611815 l 1154 | 362.87556 46.881251 l 1155 | 364.405025 46.54673 l 1156 | 365.934491 46.560711 l 1157 | 367.463956 47.237255 l 1158 | 368.993422 46.060339 l 1159 | 372.052353 45.883636 l 1160 | 373.581818 46.049433 l 1161 | 373.581818 46.049433 l 1162 | stroke 1163 | grestore 1164 | 1.000 0.498 0.055 setrgbcolor 1165 | gsave 1166 | 334.8 217.4 54 36 clipbox 1167 | 69.218182 221.896745 m 1168 | 70.747647 201.717363 l 1169 | 72.277113 189.554911 l 1170 | 73.806578 198.056122 l 1171 | 75.336044 190.513779 l 1172 | 76.865509 187.77016 l 1173 | 78.394975 186.144008 l 1174 | 79.92444 185.32657 l 1175 | 81.453906 182.89873 l 1176 | 84.512837 170.843975 l 1177 | 86.042302 160.122152 l 1178 | 87.571768 148.145143 l 1179 | 89.101233 132.903207 l 1180 | 90.630699 126.045753 l 1181 | 92.160164 110.654418 l 1182 | 93.68963 122.592345 l 1183 | 95.219095 118.573832 l 1184 | 96.748561 135.726963 l 1185 | 98.278026 102.867129 l 1186 | 99.807492 99.743746 l 1187 | 101.336958 118.103285 l 1188 | 102.866423 101.317974 l 1189 | 104.395889 95.4336 l 1190 | 105.925354 118.500799 l 1191 | 107.45482 93.68218 l 1192 | 108.984285 95.811311 l 1193 | 110.513751 105.367486 l 1194 | 112.043216 92.709507 l 1195 | 113.572682 93.003869 l 1196 | 115.102147 95.925647 l 1197 | 116.631613 91.814844 l 1198 | 118.161078 92.067857 l 1199 | 119.690544 95.660582 l 1200 | 121.220009 90.394739 l 1201 | 122.749475 91.456213 l 1202 | 124.27894 91.035734 l 1203 | 125.808406 88.966461 l 1204 | 130.396802 87.721031 l 1205 | 131.926268 87.097677 l 1206 | 133.455733 86.842609 l 1207 | 134.985199 86.355752 l 1208 | 136.514664 86.111775 l 1209 | 138.04413 93.19037 l 1210 | 139.573595 86.033637 l 1211 | 141.103061 87.064143 l 1212 | 142.632526 84.825597 l 1213 | 144.161992 83.857316 l 1214 | 145.691457 96.211931 l 1215 | 147.220923 100.826622 l 1216 | 148.750388 99.247813 l 1217 | 150.279854 83.231679 l 1218 | 151.809319 86.922464 l 1219 | 153.338785 83.386998 l 1220 | 154.86825 82.82164 l 1221 | 156.397716 93.855213 l 1222 | 157.927181 97.594657 l 1223 | 159.456647 101.760108 l 1224 | 160.986112 94.30678 l 1225 | 162.515578 100.459163 l 1226 | 164.045043 82.766318 l 1227 | 165.574509 87.067239 l 1228 | 167.103974 102.053423 l 1229 | 168.63344 112.307911 l 1230 | 170.162905 119.322058 l 1231 | 171.692371 81.976057 l 1232 | 173.221836 86.870055 l 1233 | 174.751302 109.501492 l 1234 | 176.280767 86.234865 l 1235 | 177.810233 82.299711 l 1236 | 179.339698 104.490617 l 1237 | 180.869164 89.759321 l 1238 | 182.39863 92.194589 l 1239 | 183.928095 93.761534 l 1240 | 185.457561 86.710473 l 1241 | 186.987026 91.017821 l 1242 | 188.516492 83.880678 l 1243 | 190.045957 83.612808 l 1244 | 191.575423 84.812036 l 1245 | 193.104888 82.022582 l 1246 | 194.634354 82.040073 l 1247 | 196.163819 81.61098 l 1248 | 197.693285 82.035498 l 1249 | 199.22275 83.097757 l 1250 | 202.281681 81.855235 l 1251 | 205.340612 87.699565 l 1252 | 206.870078 82.206914 l 1253 | 208.399543 86.751371 l 1254 | 209.929009 83.184623 l 1255 | 211.458474 88.132008 l 1256 | 212.98794 81.949282 l 1257 | 214.517405 87.239766 l 1258 | 216.046871 81.948652 l 1259 | 217.576336 81.946339 l 1260 | 219.105802 89.505713 l 1261 | 220.635267 82.456434 l 1262 | 222.164733 82.408768 l 1263 | 223.694198 83.773903 l 1264 | 225.223664 82.484108 l 1265 | 226.753129 88.847033 l 1266 | 228.282595 82.98472 l 1267 | 229.81206 86.797831 l 1268 | 231.341526 82.521666 l 1269 | 232.870991 89.327767 l 1270 | 234.400457 83.745356 l 1271 | 235.929922 84.170515 l 1272 | 237.459388 82.867014 l 1273 | 240.518319 84.870445 l 1274 | 242.047784 83.04428 l 1275 | 243.57725 83.648109 l 1276 | 245.106715 83.393146 l 1277 | 246.636181 86.332133 l 1278 | 248.165646 103.795689 l 1279 | 249.695112 98.47822 l 1280 | 251.224577 89.693692 l 1281 | 252.754043 83.354734 l 1282 | 254.283508 86.755083 l 1283 | 255.812974 83.016785 l 1284 | 257.342439 85.563189 l 1285 | 258.871905 83.410191 l 1286 | 260.40137 85.090281 l 1287 | 261.930836 82.555646 l 1288 | 263.460302 83.322644 l 1289 | 264.989767 85.122108 l 1290 | 266.519233 89.048072 l 1291 | 268.048698 85.647788 l 1292 | 269.578164 95.689794 l 1293 | 271.107629 94.378641 l 1294 | 272.637095 90.455847 l 1295 | 274.16656 83.663015 l 1296 | 275.696026 84.325521 l 1297 | 277.225491 82.981465 l 1298 | 278.754957 82.808694 l 1299 | 280.284422 84.152353 l 1300 | 281.813888 82.796165 l 1301 | 283.343353 83.124373 l 1302 | 284.872819 84.627281 l 1303 | 286.402284 84.548582 l 1304 | 287.93175 84.637474 l 1305 | 289.461215 88.939382 l 1306 | 290.990681 84.060688 l 1307 | 292.520146 90.168764 l 1308 | 294.049612 89.950829 l 1309 | 295.579077 91.039813 l 1310 | 297.108543 91.906212 l 1311 | 298.638008 89.472025 l 1312 | 300.167474 83.694817 l 1313 | 301.696939 85.152258 l 1314 | 303.226405 86.861689 l 1315 | 304.75587 84.991202 l 1316 | 306.285336 87.29427 l 1317 | 307.814801 83.817828 l 1318 | 309.344267 84.098842 l 1319 | 310.873732 91.191172 l 1320 | 312.403198 88.289714 l 1321 | 313.932663 86.768203 l 1322 | 315.462129 87.898933 l 1323 | 316.991594 93.071299 l 1324 | 318.52106 102.865929 l 1325 | 320.050525 115.778468 l 1326 | 321.579991 99.022241 l 1327 | 323.109456 90.002559 l 1328 | 324.638922 100.360943 l 1329 | 326.168387 93.458994 l 1330 | 327.697853 94.227982 l 1331 | 329.227318 88.383438 l 1332 | 330.756784 96.231462 l 1333 | 332.286249 86.212675 l 1334 | 333.815715 109.196173 l 1335 | 335.34518 96.782792 l 1336 | 336.874646 93.41042 l 1337 | 338.404111 84.90118 l 1338 | 339.933577 83.524979 l 1339 | 341.463042 83.643092 l 1340 | 342.992508 84.0867 l 1341 | 344.521974 86.164245 l 1342 | 346.051439 85.231889 l 1343 | 347.580905 104.537132 l 1344 | 349.11037 112.610768 l 1345 | 350.639836 110.813394 l 1346 | 352.169301 84.397337 l 1347 | 353.698767 88.947699 l 1348 | 355.228232 83.258291 l 1349 | 356.757698 92.317059 l 1350 | 358.287163 84.473203 l 1351 | 359.816629 94.46258 l 1352 | 361.346094 83.724317 l 1353 | 362.87556 88.598739 l 1354 | 364.405025 83.099851 l 1355 | 365.934491 91.501507 l 1356 | 367.463956 82.855898 l 1357 | 368.993422 85.261349 l 1358 | 370.522887 83.36515 l 1359 | 372.052353 83.500883 l 1360 | 373.581818 83.457222 l 1361 | 373.581818 83.457222 l 1362 | stroke 1363 | grestore 1364 | 0.800 setlinewidth 1365 | 0 setlinejoin 1366 | 0.000 setgray 1367 | gsave 1368 | 54 36 m 1369 | 54 253.44 l 1370 | stroke 1371 | grestore 1372 | gsave 1373 | 388.8 36 m 1374 | 388.8 253.44 l 1375 | stroke 1376 | grestore 1377 | gsave 1378 | 54 36 m 1379 | 388.8 36 l 1380 | stroke 1381 | grestore 1382 | gsave 1383 | 54 253.44 m 1384 | 388.8 253.44 l 1385 | stroke 1386 | grestore 1387 | /DejaVuSans findfont 1388 | 12.000 scalefont 1389 | setfont 1390 | gsave 1391 | 189.220313 259.440000 translate 1392 | 0.000000 rotate 1393 | 0.000000 0 m /m glyphshow 1394 | 11.689453 0 m /o glyphshow 1395 | 19.031250 0 m /d glyphshow 1396 | 26.648438 0 m /e glyphshow 1397 | 34.031250 0 m /l glyphshow 1398 | 37.365234 0 m /space glyphshow 1399 | 41.179688 0 m /l glyphshow 1400 | 44.513672 0 m /o glyphshow 1401 | 51.855469 0 m /s glyphshow 1402 | 58.107422 0 m /s glyphshow 1403 | grestore 1404 | 1.000 setlinewidth 1405 | 0 setlinecap 1406 | 0.800 setgray 1407 | gsave 1408 | 61 216.09625 m 1409 | 142.25 216.09625 l 1410 | 143.583333 216.09625 144.25 216.762917 144.25 218.09625 c 1411 | 144.25 246.44 l 1412 | 144.25 247.773333 143.583333 248.44 142.25 248.44 c 1413 | 61 248.44 l 1414 | 59.666667 248.44 59 247.773333 59 246.44 c 1415 | 59 218.09625 l 1416 | 59 216.762917 59.666667 216.09625 61 216.09625 c 1417 | cl 1418 | gsave 1419 | 1.000 setgray 1420 | fill 1421 | grestore 1422 | stroke 1423 | grestore 1424 | 1.500 setlinewidth 1425 | 1 setlinejoin 1426 | 2 setlinecap 1427 | 0.122 0.467 0.706 setrgbcolor 1428 | gsave 1429 | 63 240.34625 m 1430 | 83 240.34625 l 1431 | stroke 1432 | grestore 1433 | 0.000 setgray 1434 | /DejaVuSans findfont 1435 | 10.000 scalefont 1436 | setfont 1437 | gsave 1438 | 91.000000 236.846250 translate 1439 | 0.000000 rotate 1440 | 0.000000 0 m /t glyphshow 1441 | 3.920898 0 m /r glyphshow 1442 | 8.032227 0 m /a glyphshow 1443 | 14.160156 0 m /i glyphshow 1444 | 16.938477 0 m /n glyphshow 1445 | grestore 1446 | 1.000 0.498 0.055 setrgbcolor 1447 | gsave 1448 | 63 225.674375 m 1449 | 83 225.674375 l 1450 | stroke 1451 | grestore 1452 | 0.000 setgray 1453 | gsave 1454 | 91.000000 222.174375 translate 1455 | 0.000000 rotate 1456 | 0.000000 0 m /v glyphshow 1457 | 5.917969 0 m /a glyphshow 1458 | 12.045898 0 m /l glyphshow 1459 | 14.824219 0 m /i glyphshow 1460 | 17.602539 0 m /d glyphshow 1461 | 23.950195 0 m /a glyphshow 1462 | 30.078125 0 m /t glyphshow 1463 | 33.999023 0 m /i glyphshow 1464 | 36.777344 0 m /o glyphshow 1465 | 42.895508 0 m /n glyphshow 1466 | grestore 1467 | 1468 | end 1469 | showpage 1470 | -------------------------------------------------------------------------------- /download.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | import urllib.request 4 | import tarfile 5 | import zipfile 6 | 7 | def _print_download_progress(count, block_size, total_size): 8 | # Percentage completion. 9 | pct_complete = float(count * block_size) / total_size 10 | 11 | # Status-message. Note the \r which means the line should overwrite itself. 12 | msg = "\r- Download progress: {0:.1%}".format(pct_complete) 13 | 14 | # Print it. 15 | sys.stdout.write(msg) 16 | sys.stdout.flush() 17 | 18 | 19 | 20 | 21 | 22 | def maybe_download_and_extract(url, download_dir): 23 | 24 | # Filename for saving the file downloaded from the internet. 25 | # Use the filename from the URL and add it to the download_dir. 26 | filename = url.split('/')[-1] 27 | file_path = os.path.join(download_dir, filename) 28 | 29 | if not os.path.exists(file_path): 30 | # Check if the download directory exists, otherwise create it. 31 | if not os.path.exists(download_dir): 32 | os.makedirs(download_dir) 33 | 34 | # Download the file from the internet. 35 | file_path, _ = urllib.request.urlretrieve(url=url, 36 | filename=file_path, 37 | reporthook=_print_download_progress) 38 | 39 | print() 40 | print("Download finished. Extracting files.") 41 | 42 | if file_path.endswith(".zip"): 43 | # Unpack the zip-file. 44 | zipfile.ZipFile(file=file_path, mode="r").extractall(download_dir) 45 | elif file_path.endswith((".tar.gz", ".tgz")): 46 | # Unpack the tar-ball. 47 | tarfile.open(name=file_path, mode="r:gz").extractall(download_dir) 48 | 49 | print("Done.") 50 | else: 51 | print("Data has apparently already been downloaded and unpacked.") 52 | 53 | 54 | -------------------------------------------------------------------------------- /infer.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import os 3 | import numpy as np 4 | import keras 5 | from keras.applications import VGG16 6 | from keras import backend as K 7 | from keras.models import Model 8 | import sys 9 | import time 10 | import multiprocessing 11 | from termcolor import colored 12 | 13 | model = keras.models.load_model('model/vlstm_92.h5') 14 | image_model = VGG16(include_top=True, weights='imagenet') 15 | #We will use the output of the layer prior to the final 16 | # classification-layer which is named fc2. This is a fully-connected (or dense) layer. 17 | transfer_layer = image_model.get_layer('fc2') 18 | image_model_transfer = Model(inputs=image_model.input,outputs=transfer_layer.output) 19 | transfer_values_size = K.int_shape(transfer_layer.output)[1] 20 | 21 | # Frame size 22 | img_size = 224 23 | 24 | img_size_touple = (img_size, img_size) 25 | 26 | # Number of channels (RGB) 27 | num_channels = 3 28 | 29 | # Flat frame size 30 | img_size_flat = img_size * img_size * num_channels 31 | 32 | # Number of classes for classification (Violence-No Violence) 33 | num_classes = 2 34 | 35 | # Number of files to train 36 | _num_files_train = 1 37 | 38 | # Number of frames per video 39 | _images_per_file = 20 40 | 41 | # Number of frames per training set 42 | _num_images_train = _num_files_train * _images_per_file 43 | 44 | # Video extension 45 | video_exts = ".avi" 46 | 47 | in_dir = "data" 48 | 49 | def get_frames(current_dir, file_name): 50 | in_file = os.path.join(current_dir, file_name) 51 | images = [] 52 | vidcap = cv2.VideoCapture(in_file) 53 | success,image = vidcap.read() 54 | count = 0 55 | while count<_images_per_file: 56 | RGB_img = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) 57 | res = cv2.resize(RGB_img, dsize=(img_size, img_size),interpolation=cv2.INTER_CUBIC) 58 | images.append(res) 59 | success,image = vidcap.read() 60 | count += 1 61 | resul = np.array(images) 62 | resul = (resul / 255.).astype(np.float16) 63 | return resul 64 | 65 | 66 | def get_transfer_values(current_dir, file_name): 67 | 68 | # Pre-allocate input-batch-array for images. 69 | shape = (_images_per_file,) + img_size_touple + (3,) 70 | image_batch = np.zeros(shape=shape, dtype=np.float16) 71 | image_batch = get_frames(current_dir, file_name) 72 | # Pre-allocate output-array for transfer-values. 73 | # Note that we use 16-bit floating-points to save memory. 74 | shape = (_images_per_file, transfer_values_size) 75 | transfer_values = np.zeros(shape=shape, dtype=np.float16) 76 | 77 | transfer_values = \ 78 | image_model_transfer.predict(image_batch) 79 | 80 | return transfer_values 81 | 82 | 83 | def infer(curr_dir,file_name): 84 | tr = get_transfer_values(curr_dir,file_name) 85 | tr = tr[np.newaxis,...] 86 | pred = model.predict(np.array(tr)) 87 | res = np.argmax(pred[0]) 88 | if res == 0: 89 | print("\n\n"+ colored('VIOLENT','red')+" Video with confidence: "+str(round(pred[0][res]*100,2))+" %") 90 | else: 91 | print("\n\n" + colored('NON-VIOLENT','green') +" Video with confidence: "+str(round(pred[0][res]*100,2))+" %") 92 | 93 | 94 | if __name__ == "__main__": 95 | arg = sys.argv 96 | start_time = time.time() 97 | infer(in_dir,arg[1]) 98 | 99 | end_time = time.time() 100 | delta = round(end_time-start_time,2) 101 | fps = round(20/delta,2) 102 | print("Inferrence time: "+str(delta)+" s") 103 | print(str(fps)+" fps ^_^") 104 | vpath = in_dir + '\\' + arg[1] 105 | cap = cv2.VideoCapture(vpath) 106 | while(cap.isOpened()): 107 | ret, frame = cap.read() 108 | time.sleep(0.05) 109 | try: 110 | cv2.imshow('frame', frame) 111 | except: 112 | break 113 | if cv2.waitKey(1) & 0xFF == ord('q'): 114 | break 115 | cap.release() 116 | cv2.destroyAllWindows() 117 | -------------------------------------------------------------------------------- /infer_cam.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import os 3 | import numpy as np 4 | import keras 5 | from keras.applications import VGG16 6 | from keras import backend as K 7 | from keras.models import Model 8 | import sys 9 | import time 10 | import multiprocessing 11 | from termcolor import colored 12 | os.environ["CUDA_VISIBLE_DEVICES"] = "-1" 13 | model = keras.models.load_model('model/vlstm_92.h5') 14 | image_model = VGG16(include_top=True, weights='imagenet') 15 | model.summary() 16 | #We will use the output of the layer prior to the final 17 | # classification-layer which is named fc2. This is a fully-connected (or dense) layer. 18 | transfer_layer = image_model.get_layer('fc2') 19 | image_model_transfer = Model(inputs=image_model.input,outputs=transfer_layer.output) 20 | transfer_values_size = K.int_shape(transfer_layer.output)[1] 21 | 22 | # Frame size 23 | img_size = 224 24 | 25 | img_size_touple = (img_size, img_size) 26 | 27 | # Number of channels (RGB) 28 | num_channels = 3 29 | 30 | # Flat frame size 31 | img_size_flat = img_size * img_size * num_channels 32 | 33 | # Number of classes for classification (Violence-No Violence) 34 | num_classes = 2 35 | 36 | # Number of files to train 37 | _num_files_train = 1 38 | 39 | # Number of frames per video 40 | _images_per_file = 20 41 | 42 | # Number of frames per training set 43 | _num_images_train = _num_files_train * _images_per_file 44 | 45 | # Video extension 46 | video_exts = ".avi" 47 | 48 | in_dir = "data" 49 | 50 | #url of video stream 51 | url = 'http://26.146.143.10:8080/video' 52 | # url = 'http://26.72.110.56:8080/video' 53 | 54 | 55 | if __name__ == "__main__": 56 | cap = cv2.VideoCapture(url) 57 | count = 0 58 | images=[] 59 | shape = (_images_per_file,) + img_size_touple + (3,) 60 | image_batch = np.zeros(shape=shape, dtype=np.float16) 61 | while(True): 62 | ret, frame = cap.read() 63 | count+=1 64 | 65 | if count <= _images_per_file: 66 | RGB_img = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) 67 | res = cv2.resize(RGB_img, dsize=(img_size, img_size),interpolation=cv2.INTER_CUBIC) 68 | images.append(res) 69 | else: 70 | resul = np.array(images) 71 | resul = (resul / 255.).astype(np.float16) 72 | transfer_shape = (_images_per_file, transfer_values_size) 73 | transfer_values = np.zeros(shape=transfer_shape, dtype=np.float16) 74 | transfer_values = image_model_transfer.predict(image_batch) 75 | print(transfer_values.shape) 76 | inp = np.array(transfer_values) 77 | inp = inp[np.newaxis,...] 78 | print(inp.shape) 79 | pred = model.predict(inp) 80 | res = np.argmax(pred[0]) 81 | count = 0 82 | images = [] 83 | shape = (_images_per_file,) + img_size_touple + (3,) 84 | image_batch = np.zeros(shape=shape, dtype=np.float16) 85 | 86 | #print result 87 | if res == 0: 88 | print("\n\n"+ colored('VIOLENT','red')+" Video with confidence: "+str(round(pred[0][res]*100,2))+" %") 89 | else: 90 | print("\n\n" + colored('NON-VIOLENT','green') +" Video with confidence: "+str(round(pred[0][res]*100,2))+" %") 91 | # showing the video stream 92 | if frame is not None: 93 | cv2.imshow('frame',frame) 94 | q = cv2.waitKey(1) 95 | if q == ord("q"): 96 | break 97 | cv2.destroyAllWindows() 98 | 99 | 100 | -------------------------------------------------------------------------------- /ip_cam.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import time 3 | import numpy as np 4 | url = 'http://26.146.143.10:8080/video' 5 | cap = cv2.VideoCapture(url) 6 | # cap = cv2.VideoCapture(0) 7 | _frames_per_batch_ = 20 8 | 9 | 10 | show = 0 11 | while(True): 12 | images = [] 13 | for count in range(0,20): 14 | if count <=_frames_per_batch_ and show == 0 : 15 | ret, frame = cap.read() 16 | images.append(frame) 17 | print('frame:',count,'/20') 18 | else: 19 | for frame in images: 20 | if frame is not None: 21 | cv2.imshow('frame',frame) 22 | time.sleep(0.05) 23 | q = cv2.waitKey(1) 24 | if q == ord("q"): 25 | break 26 | count = 0 27 | images = [] 28 | cv2.destroyAllWindows() -------------------------------------------------------------------------------- /model sandbox/BuildModel.py: -------------------------------------------------------------------------------- 1 | from keras.layers import Dense, Flatten, Dropout, ZeroPadding3D 2 | from keras.layers.recurrent import LSTM 3 | from keras.models import Sequential, load_model 4 | from keras.optimizers import Adam, RMSprop 5 | from keras.layers.wrappers import TimeDistributed 6 | from keras.layers.convolutional import (Conv2D, MaxPooling3D, Conv3D, 7 | MaxPooling2D) 8 | from collections import deque 9 | import sys 10 | import logging 11 | from keras.applications import Xception, ResNet50, InceptionV3 12 | from keras.layers import Dense, GlobalAveragePooling2D 13 | from keras.models import Model 14 | 15 | 16 | logger = logging.getLogger('Builder_moudle') 17 | logger.setLevel(logging.DEBUG) 18 | ch = logging.StreamHandler() 19 | ch.setLevel(logging.DEBUG) 20 | formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') 21 | ch.setFormatter(formatter) 22 | logger.addHandler(ch) 23 | 24 | 25 | epoch = 10 26 | learning_rate = 0.0004 27 | batch_size = 16 28 | optimizer ='RMSprop' 29 | initial_weights = 'Xavier' 30 | 31 | default_values = dict(epoch=10,\ 32 | learning_rate=0.0004,\ 33 | batch_size=16,\ 34 | optimizer=Adam,\ 35 | initial_weights=0,\ 36 | cnn_class=Xception,\ 37 | pre_weights='Xavier',\ 38 | lstm_conf=(LSTM,dict(units = 256)),\ 39 | cnn_train_type='static' 40 | ) 41 | 42 | 43 | def build(epoch = default_values["epoch"],\ 44 | learning_rate = default_values["learning_rate"], \ 45 | batch_size = default_values["batch_size"],\ 46 | optimizer = default_values["optimizer"],\ 47 | initial_weights = default_values["initial_weights"],\ 48 | cnn_class = default_values["cnn_class"],\ 49 | pre_weights = default_values["pre_weights"], \ 50 | lstm_conf = default_values["lstm_conf"], \ 51 | cnn_train_type=default_values["cnn_train_type"]): 52 | 53 | model=0 54 | #Create CNN 55 | if(cnn_train_type!='train'): 56 | logger.info("CNN Created with Pre-weights:{}".format(pre_weights)) 57 | base_model = cnn_class(weights=pre_weights,include_top=False) 58 | else: 59 | logger.info("CNN Created with no Pre-weights") 60 | base_model = cnn_class() 61 | 62 | #control Train_able of CNNN 63 | if(cnn_train_type=='static'): 64 | logger.info("CNN set to NOT-Train") 65 | for layer in base_model.layers: 66 | layer.trainable = False 67 | if(cnn_train_type=='retrain'): 68 | logger.info("CNN set to retrain") 69 | for layer in base_model.layers: 70 | layer.trainable = True 71 | 72 | # print(base_model.summary()) 73 | # add a global spatial average pooling layer 74 | x = base_model.output 75 | logger.info("base_model.output: {}".format(base_model.output)) 76 | x = GlobalAveragePooling2D()(x) 77 | # let's add a fully-connected layer 78 | x = Dense(1024, activation='relu')(x) 79 | # and a logistic layer -- let's say we have 200 classes 80 | predictions = Dense(100 , activation='softmax')(x) 81 | 82 | model = Model(inputs=base_model.input, outputs=predictions) 83 | model.compile(optimizer=optimizer, loss='categorical_crossentropy') 84 | 85 | model.summary() 86 | print("Commit update2") 87 | print("Commit update3") 88 | return model -------------------------------------------------------------------------------- /model sandbox/BuildModel_basic.py: -------------------------------------------------------------------------------- 1 | from keras import Input 2 | from keras.callbacks import Callback 3 | from keras.layers import Dense, Flatten, Dropout, ZeroPadding3D, ConvLSTM2D, Reshape, BatchNormalization, Activation 4 | from keras.layers.recurrent import LSTM 5 | from keras.models import Sequential, load_model 6 | from keras.optimizers import Adam, RMSprop 7 | from keras.layers.wrappers import TimeDistributed 8 | from keras.layers.convolutional import (Conv2D, MaxPooling3D, Conv3D, 9 | MaxPooling2D) 10 | from collections import deque 11 | import sys 12 | import logging 13 | from keras.applications import Xception, ResNet50, InceptionV3 14 | from keras.layers import Dense, GlobalAveragePooling2D 15 | from keras.models import Model 16 | 17 | 18 | def build(size, seq_len , learning_rate , 19 | optimizer_class ,\ 20 | initial_weights ,\ 21 | cnn_class ,\ 22 | pre_weights , \ 23 | lstm_conf , \ 24 | cnn_train_type, classes = 1, dropout = 0.0): 25 | input_layer = Input(shape=(seq_len, size, size, 3)) 26 | if(cnn_train_type!='train'): 27 | if cnn_class.__name__ == "ResNet50": 28 | cnn = cnn_class(weights=pre_weights, include_top=False,input_shape =(size, size, 3)) 29 | else: 30 | cnn = cnn_class(weights=pre_weights,include_top=False) 31 | else: 32 | cnn = cnn_class(include_top=False) 33 | 34 | #control Train_able of CNNN 35 | if(cnn_train_type=='static'): 36 | for layer in cnn.layers: 37 | layer.trainable = False 38 | if(cnn_train_type=='retrain'): 39 | for layer in cnn.layers: 40 | layer.trainable = True 41 | 42 | cnn = TimeDistributed(cnn)(input_layer) 43 | #the resnet output shape is 1,1,20148 and need to be reshape for the ConvLSTM filters 44 | # if cnn_class.__name__ == "ResNet50": 45 | # cnn = Reshape((seq_len,4, 4, 128), input_shape=(seq_len,1, 1, 2048))(cnn) 46 | lstm = lstm_conf[0](**lstm_conf[1])(cnn) 47 | lstm = MaxPooling2D(pool_size=(2, 2))(lstm) 48 | flat = Flatten()(lstm) 49 | 50 | flat = BatchNormalization()(flat) 51 | flat = Dropout(dropout)(flat) 52 | linear = Dense(1000)(flat) 53 | 54 | relu = Activation('relu')(linear) 55 | linear = Dense(256)(relu) 56 | linear = Dropout(dropout)(linear) 57 | relu = Activation('relu')(linear) 58 | linear = Dense(10)(relu) 59 | linear = Dropout(dropout)(linear) 60 | relu = Activation('relu')(linear) 61 | 62 | activation = 'sigmoid' 63 | loss_func = 'binary_crossentropy' 64 | 65 | if classes > 1: 66 | activation = 'softmax' 67 | loss_func = 'categorical_crossentropy' 68 | predictions = Dense(classes, activation=activation)(relu) 69 | 70 | model = Model(inputs=input_layer, outputs=predictions) 71 | optimizer = optimizer_class[0](lr=learning_rate, **optimizer_class[1]) 72 | model.compile(optimizer=optimizer, loss=loss_func,metrics=['acc']) 73 | 74 | print(model.summary()) 75 | 76 | return model -------------------------------------------------------------------------------- /model sandbox/DatasetBuilder.py: -------------------------------------------------------------------------------- 1 | import scipy 2 | import os 3 | import cv2 4 | import pickle 5 | import glob 6 | import numpy as np 7 | from keras.preprocessing.image import load_img, img_to_array 8 | from keras.preprocessing.sequence import pad_sequences 9 | from keras.utils import to_categorical 10 | from sklearn.model_selection import train_test_split 11 | from collections import defaultdict 12 | from keras.preprocessing import image 13 | import random 14 | 15 | corner_keys = ["Center","Left_up","Left_down","Right_up","Right_down"] 16 | 17 | Debug_Print_AUG=False 18 | 19 | def save_figures_from_video(dataset_video_path, video_filename, suffix,figures_path,skip_frames = 25,apply_norm = True, apply_diff = True,fix_len = None): 20 | seq_len = 0 21 | 22 | video_figures_path = os.path.join(figures_path ,video_filename) 23 | if not os.path.exists(video_figures_path): 24 | os.makedirs(video_figures_path) 25 | 26 | video_file = os.path.join(dataset_video_path, video_filename + suffix) 27 | label = 0 28 | print('Extracting frames from video: ', video_file) 29 | 30 | videoCapture = cv2.VideoCapture(video_file) 31 | if fix_len is not None: 32 | vid_len = int(videoCapture.get(cv2.CAP_PROP_FRAME_COUNT)) 33 | skip_frames = int(float(vid_len)/float(fix_len)) 34 | videoCapture.set(cv2.CAP_PROP_POS_MSEC, (seq_len * skip_frames)) 35 | success, figure_ = videoCapture.read() 36 | success = True 37 | files = [] 38 | while success: 39 | success, figure = videoCapture.read() 40 | 41 | if seq_len % skip_frames == 0: 42 | if success: 43 | figure_curr = figure 44 | image_file = os.path.join(video_figures_path , "frame_%d.jpg" % seq_len) 45 | files.append(image_file) 46 | cv2.imwrite(image_file, figure_curr) 47 | seq_len += 1 48 | video_images = dict(images_path = video_figures_path, name = video_filename, 49 | images_files = files, sequence_length = seq_len, label = label) 50 | 51 | return video_images 52 | 53 | def createDataset(datasets_video_path, figure_output_path,fix_len, force = False): 54 | videos_seq_length = [] 55 | datasets_images = {} 56 | videos_frames_paths = [] 57 | videos_labels = [] 58 | #Extract images for each video for each dataset 59 | for dataset_name, dataset_video_path in datasets_video_path.items(): 60 | dataset_figures_path = os.path.join(figure_output_path,dataset_name) 61 | if not os.path.exists(dataset_figures_path): 62 | os.makedirs(dataset_figures_path) 63 | dataset_images = [] 64 | for filename in os.listdir(dataset_video_path): 65 | if filename.endswith(".avi") or filename.endswith(".mpg"): 66 | video_images_file = os.path.join(dataset_figures_path,filename[:-4], 'video_summary.pkl') 67 | if os.path.isfile(video_images_file) and not force: 68 | with open(video_images_file, 'rb') as f: 69 | video_images = pickle.load(f) 70 | else: 71 | video_images = save_figures_from_video(dataset_video_path, filename[:-4],filename[-4:], dataset_figures_path, fix_len =fix_len) 72 | if dataset_name == "hocky": 73 | if filename.startswith("fi"): 74 | video_images['label'] = 1 75 | elif dataset_name == "violentflow": 76 | if "violence" in filename: 77 | video_images['label'] = 1 78 | elif dataset_name == "movies": 79 | if "fi" in filename: 80 | video_images['label'] = 1 81 | with open(video_images_file, 'wb') as f: 82 | pickle.dump(video_images, f, pickle.HIGHEST_PROTOCOL) 83 | dataset_images.append(video_images) 84 | videos_seq_length.append(video_images['sequence_length']) 85 | videos_frames_paths.append(video_images['images_path']) 86 | videos_labels.append(video_images['label']) 87 | datasets_images[dataset_name] = dataset_images 88 | avg_length = int(float(sum(videos_seq_length)) / max(len(videos_seq_length), 1)) 89 | 90 | train_path, test_path, train_y, test_y = train_test_split(videos_frames_paths,videos_labels, test_size=0.20, random_state=42) 91 | 92 | # if apply_aug: 93 | # aug_paths = [] 94 | # aug_y = [] 95 | # for train_path_, train_y_ in zip(train_path,train_y): 96 | # 97 | # aug_path = generate_augmentations(train_path_,force = False) 98 | # aug_paths.append(aug_path) 99 | # aug_y.append(train_y_) 100 | # 101 | # train_path = train_path + aug_paths 102 | # train_y = train_y + aug_y 103 | 104 | train_path, valid_path, train_y, valid_y = train_test_split(train_path, train_y, test_size=0.20, random_state=42) 105 | return train_path,valid_path, test_path,\ 106 | train_y, valid_y, test_y,\ 107 | avg_length 108 | 109 | 110 | def frame_loader(frames,figure_shape,to_norm = True): 111 | output_frames = [] 112 | for frame in frames: 113 | image = load_img(frame, target_size=(figure_shape, figure_shape),interpolation='bilinear') 114 | img_arr = img_to_array(image) 115 | # Scale 116 | figure = (img_arr / 255.).astype(np.float32) 117 | # Normalize 118 | mean = [0.485, 0.456, 0.406] 119 | std = [0.229, 0.224, 0.225] 120 | figure = (figure - mean) / std 121 | output_frames.append(figure) 122 | return output_frames 123 | 124 | 125 | def data_generator(data_paths,labels,batch_size,figure_shape,seq_length,use_aug,use_crop,crop_x_y,classes = 1): 126 | while True: 127 | indexes = np.arange(len(data_paths)) 128 | np.random.shuffle(indexes) 129 | select_indexes = indexes[:batch_size] 130 | data_paths_batch = [data_paths[i] for i in select_indexes] 131 | labels_batch = [labels[i] for i in select_indexes] 132 | 133 | X, y = get_sequences(data_paths_batch,labels_batch,figure_shape,seq_length, classes, use_augmentation = use_aug,use_crop=use_crop,crop_x_y=crop_x_y) 134 | 135 | yield X, y 136 | 137 | def data_generator_files(data,labels,batch_size): 138 | while True: 139 | indexes = np.arange(len(data)) 140 | np.random.shuffle(indexes) 141 | select_indexes = indexes[:batch_size] 142 | X = [data[i] for i in select_indexes] 143 | y = [labels[i] for i in select_indexes] 144 | yield X, y 145 | 146 | def crop_img__remove_Dark(img,x_crop,y_crop,x,y,figure_size): 147 | x_start = x_crop 148 | x_end = x-x_crop 149 | y_start = y_crop 150 | y_end = y-y_crop 151 | return cv2.resize(img[y_start:y_end,x_start:x_end,:],(figure_size,figure_size)) 152 | 153 | 154 | def crop_img(img,figure_shape,percentage=0.8,corner="Left_up"): 155 | if(corner == None): 156 | corner = random.choice(corner_keys) 157 | 158 | if corner not in corner_keys: 159 | raise ValueError( 160 | 'Invalid corner method {} specified. Supported ' 161 | 'corners are {}'.format( 162 | corner, 163 | ", ".join(corner_keys))) 164 | 165 | resize = int(figure_shape*percentage) 166 | 167 | if(corner =="Left_up"): 168 | x_start = 0 169 | x_end = resize 170 | y_start = 0 171 | y_end = resize 172 | if (corner == "Right_down"): 173 | x_start = figure_shape-resize 174 | x_end = figure_shape 175 | y_start = figure_shape-resize 176 | y_end = figure_shape 177 | if(corner =="Right_up"): 178 | x_start = 0 179 | x_end = resize 180 | y_start = figure_shape-resize 181 | y_end = figure_shape 182 | if (corner == "Left_down"): 183 | x_start = figure_shape-resize 184 | x_end = figure_shape 185 | y_start = 0 186 | y_end = resize 187 | if (corner == "Center"): 188 | half = int(figure_shape*(1-percentage)) 189 | x_start = half 190 | x_end = figure_shape-half 191 | y_start = half 192 | y_end = figure_shape-half 193 | 194 | img = cv2.resize(img[y_start:y_end,x_start:x_end, :], (figure_shape, figure_shape)).astype(np.float32) 195 | return img 196 | 197 | 198 | def get_sequences(data_paths,labels,figure_shape,seq_length,classes=1, use_augmentation = False,use_crop=True,crop_x_y=None): 199 | X, y = [], [] 200 | seq_len = 0 201 | for data_path, label in zip(data_paths,labels): 202 | frames = sorted(glob.glob(os.path.join(data_path, '*jpg'))) 203 | x = frame_loader(frames, figure_shape) 204 | if(crop_x_y): 205 | x = [crop_img__remove_Dark(x_,crop_x_y[0],crop_x_y[1],x_.shape[0],x_.shape[1],figure_shape) for x_ in x] 206 | if use_augmentation: 207 | rand = scipy.random.random() 208 | corner="" 209 | if rand > 0.5: 210 | if(use_crop): 211 | corner=random.choice(corner_keys) 212 | x = [crop_img(x_,figure_shape,0.7,corner) for x_ in x] 213 | x = [frame.transpose(1, 0, 2) for frame in x] 214 | if(Debug_Print_AUG): 215 | to_write = [list(a) for a in zip(frames, x)] 216 | [cv2.imwrite(x_[0] + "_" + corner, x_[1] * 255) for x_ in to_write] 217 | 218 | x = [x[i] - x[i+1] for i in range(len(x)-1)] 219 | X.append(x) 220 | y.append(label) 221 | X = pad_sequences(X, maxlen=seq_length, padding='pre', truncating='pre') 222 | if classes > 1: 223 | x_ = to_categorical(x_,classes) 224 | return np.array(X), np.array(y) 225 | 226 | import re 227 | 228 | def natural_sort(l): 229 | convert = lambda text: int(text) if text.isdigit() else text.lower() 230 | alphanum_key = lambda key: [ convert(c) for c in re.split('([0-9]+)', key) ] 231 | return sorted(l, key = alphanum_key) 232 | 233 | 234 | 235 | # def generate_augmentations(data_path,figure_shape = 244, force = False): 236 | # seq_len = 0 237 | # crop_path = data_path + "_crop" 238 | # if not os.path.exists(crop_path) or force: 239 | # frames = natural_sort(glob.glob(os.path.join(data_path, '*jpg'))) 240 | # frames_arr = frame_loader(frames, figure_shape,to_norm = False) 241 | # print("augmenting " + data_path) 242 | # os.makedirs(crop_path) 243 | # for frame in frames_arr: 244 | # #transpose 245 | # img_transpose = frame.transpose(1,0,2) 246 | # data_path_aug = os.path.join(crop_path,"frame_%d.jpg" % seq_len) 247 | # cv2.imwrite(data_path_aug, img_transpose) 248 | # seq_len += 1 249 | # return crop_path 250 | 251 | # def load_data(data_paths,labels,figure_shape,seq_length): 252 | # X, y = [], [] 253 | # for select_index in range(len(data_paths)): 254 | # x = get_sequence(data_paths[select_index]) 255 | # frames = sorted(glob.glob(os.path.join(data_paths[select_index], '*jpg'))) 256 | # x = frame_loader(frames, figure_shape) 257 | # X.append(x) 258 | # y.append(labels[select_index]) 259 | # X = pad_sequences(X,maxlen = seq_length, padding = 'pre' , truncating = 'pre' ) 260 | # return np.array(X), np.array(y) 261 | # 262 | # def load_data(data_paths,labels,figure_shape,seq_length): 263 | # X,y = [], [] 264 | # x, y = get_sequences(data_paths,labels) 265 | # for select_index in range(len(data_paths)): 266 | # 267 | # frames = sorted(glob.glob(os.path.join(data_paths[select_index], '*jpg'))) 268 | # x = frame_loader(frames, figure_shape) 269 | # X.append(x) 270 | # y.append(labels[select_index]) 271 | # X = pad_sequences(X,maxlen = seq_length, padding = 'pre' , truncating = 'pre' ) 272 | # return np.array(X), np.array(y) 273 | # 274 | # def data_generator(data_paths,labels,batch_size,figure_shape,seq_length): 275 | # while True: 276 | # X, y = [], [] 277 | # indexes = np.arange(len(data_paths)) 278 | # np.random.shuffle(indexes) 279 | # select_indexes = indexes[:batch_size] 280 | # for select_index in select_indexes: 281 | # frames = sorted(glob.glob(os.path.join(data_paths[select_index], '*jpg'))) 282 | # x = frame_loader(frames, figure_shape) 283 | # X.append(x) 284 | # y.append(labels[select_index]) 285 | # X = pad_sequences(X,maxlen = seq_length, padding = 'pre' , truncating = 'pre' ) 286 | # yield np.array(X), np.array(y) -------------------------------------------------------------------------------- /model sandbox/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/manncodes/Violence-Detection/f10b835acbeeb21952f027f4ea2fc232630fc045/model sandbox/__init__.py -------------------------------------------------------------------------------- /model sandbox/results/hockey.csv: -------------------------------------------------------------------------------- 1 | ,acc,loss,lr,test accuracy,test loss,val_acc,val_loss 2 | 0,0.65390625,0.628498470026534,9.999999747378752e-05,0.845,0.4229634567350149,0.8,0.45697646965272726 3 | 1,0.70859375,0.5803124972997467,9.999999747378752e-05,0.835,0.4340670491755009,0.85625,0.38297461257316173 4 | 2,0.7265625,0.5631825152837336,9.999999747378752e-05,0.555,0.9296135601727292,0.4625,0.9253231020877137 5 | 3,0.775,0.5146091145850733,9.999999747378752e-05,0.835,0.4698491769397515,0.8375,0.551018491000832 6 | 4,0.78125,0.5070158517970412,9.999999747378752e-05,0.785,0.49887427921989,0.6875,0.569624262326397 7 | 5,0.78515625,0.46719541498805484,9.999999747378752e-05,0.845,0.51200651055653,0.84375,0.33584659325060784 8 | 6,0.81171875,0.4755434098640762,9.999999747378752e-05,0.88,0.5298987866253007,0.89375,0.6229385426354577 9 | 7,0.7859375,0.5117452629483523,9.999999747378752e-05,0.885,0.3783056869840948,0.9,0.37724031565348926 10 | 8,0.78125,0.4739710941236126,9.999999747378752e-05,0.805,0.7189141000701238,0.75625,0.5447268501269036 11 | 9,0.796875,0.47306456344681463,9.999999747378752e-05,0.86,0.6208398227052385,0.9,0.3589332878819732 12 | 10,0.7796875,0.468733240639267,9.999999747378752e-05,0.82,0.5660249486423709,0.85625,0.5364099857781184 13 | 11,0.828125,0.41110187881257615,4.999999873689376e-05,0.87,0.6420557245593596,0.91875,0.5711989348025555 14 | 12,0.8375,0.39578957735811854,4.999999873689376e-05,0.825,0.8064801371915155,0.85,0.3573613037276232 15 | 13,0.853125,0.3721691128204611,4.999999873689376e-05,0.825,0.9945481130982127,0.8625,0.49836775447261117 16 | 14,0.859375,0.36505632064299787,4.999999873689376e-05,0.88,0.7000003915025304,0.91875,0.3438793812286981 17 | 15,0.84921875,0.38930277433362337,4.999999873689376e-05,0.86,0.6768989297718201,0.8875,0.702947949869084 18 | 16,0.859375,0.35263649539469955,2.499999936844688e-05,0.865,0.837176462085828,0.88125,0.4473774912980602 19 | 17,0.83671875,0.370429134919209,2.499999936844688e-05,0.84,0.6224650741604815,0.90625,0.5392263160763378 20 | 18,0.88828125,0.29557020980947685,2.499999936844688e-05,0.855,0.7394702425414585,0.93125,0.31794939217270707 21 | 19,0.8921875,0.2795070054731298,2.499999936844688e-05,0.835,0.720113527314819,0.90625,0.20051546123163674 22 | 20,0.8765625,0.30299898718261603,2.499999936844688e-05,0.85,0.7741244592238068,0.90625,0.5152298986731475 23 | 21,0.89921875,0.2688651757811641,2.499999936844688e-05,0.865,0.6987209809124468,0.91875,0.40195642375017665 24 | 22,0.88359375,0.27566154445640445,2.499999936844688e-05,0.87,0.7271929185476502,0.875,0.2868674289924128 25 | 23,0.8890625,0.27947449008494446,2.499999936844688e-05,0.88,0.7220095617749774,0.89375,0.9244293222150317 26 | 24,0.884375,0.28916259292566904,2.499999936844688e-05,0.89,0.8219689577413116,0.9125,0.6499926353308693 27 | 25,0.90078125,0.2606161100909531,1.249999968422344e-05,0.865,0.8401218839539497,0.9,0.5022125956637377 28 | 26,0.915625,0.23518742145768384,1.249999968422344e-05,0.865,0.8803631140227517,0.925,0.3896032612015487 29 | 27,0.9046875,0.23767020811402623,1.249999968422344e-05,0.875,0.7568330459612372,0.9125,0.3821533807784183 30 | 28,0.90234375,0.24311015922525464,1.249999968422344e-05,0.875,0.8343652631862549,0.94375,0.49931025909753757 31 | 29,0.9140625,0.24354507833944625,1.249999968422344e-05,0.89,0.756587501940287,0.925,0.5791990106399778 32 | 30,0.909375,0.22386080060077376,6.24999984211172e-06,0.875,0.7665939102946113,0.94375,0.5765175205072139 33 | 31,0.9265625,0.20265535428789846,6.24999984211172e-06,0.89,0.7438882400382123,0.925,0.4760127097493411 34 | 32,0.921875,0.23669615720451417,6.24999984211172e-06,0.885,0.7169637572121665,0.94375,0.34957201581222214 35 | 33,0.921875,0.2000237230590624,6.24999984211172e-06,0.885,0.7830563173479035,0.9,0.5670125462474628 36 | 34,0.9328125,0.20470205899601163,6.24999984211172e-06,0.875,0.7917592332885769,0.8875,0.8491452741297953 37 | -------------------------------------------------------------------------------- /model sandbox/results/movies.csv: -------------------------------------------------------------------------------- 1 | ,acc,loss,lr,test accuracy,test loss,val_acc,val_loss 2 | 0,0.69140625,0.575351041770773,9.999999747378752e-05,0.926829268292683,0.26830394934045104,0.9375,0.19586299930233508 3 | 1,0.765625,0.4359811238646216,9.999999747378752e-05,0.7317073170731707,0.4523469006721271,0.90625,0.3812125417734933 4 | 2,0.828125,0.40550058890949003,9.999999747378752e-05,0.8292682926829268,0.3643115602899343,0.8125,0.5257298822543817 5 | 3,0.8203125,0.43059778588212794,9.999999747378752e-05,0.8780487804878049,0.3214016154737825,0.96875,0.07357089433935471 6 | 4,0.79296875,0.43105395785096334,9.999999747378752e-05,0.7804878048780488,0.4405821608386582,0.84375,0.3636026953608962 7 | 5,0.84765625,0.36113351379572123,9.999999747378752e-05,0.9512195121951219,0.21641131239892097,0.875,0.2436661793399253 8 | 6,0.8359375,0.3838258190316992,9.999999747378752e-05,0.8292682926829268,0.41602619167164584,0.96875,0.08448946280259406 9 | 7,0.859375,0.2943663818462028,9.999999747378752e-05,0.926829268292683,0.1819756609989648,0.9375,0.09827375906752422 10 | 8,0.91015625,0.24519988830944328,9.999999747378752e-05,0.8292682926829268,0.4046201146549897,0.96875,0.05940929188000155 11 | 9,0.91796875,0.29666949376223783,9.999999747378752e-05,0.9512195121951219,0.1126690048308441,0.90625,0.1867786030870775 12 | 10,0.9296875,0.21220806872096887,9.999999747378752e-05,0.9024390243902439,0.483933416699207,0.96875,0.1081563377265411 13 | 11,0.89453125,0.29659346823609667,9.999999747378752e-05,0.9512195121951219,0.11181254791633209,1.0,0.017409152498430558 14 | 12,0.921875,0.20709187534157536,9.999999747378752e-05,0.8536585365853658,0.6493930687186301,1.0,0.030610509577172706 15 | 13,0.91015625,0.22216209156813704,9.999999747378752e-05,0.9512195121951219,0.18361433952321912,1.0,0.03665328604410689 16 | 14,0.9453125,0.1592306455406458,9.999999747378752e-05,0.8780487804878049,0.2897349158656895,0.96875,0.07687783159815353 17 | 15,0.90234375,0.23767460843814092,9.999999747378752e-05,0.9512195121951219,0.1583021169330062,1.0,0.00685132970798108 18 | 16,0.9296875,0.18257168632734988,9.999999747378752e-05,0.9512195121951219,0.13404268758184534,1.0,0.006683263018487651 19 | 17,0.91796875,0.20187876979829866,9.999999747378752e-05,0.9512195121951219,0.12306001807608428,1.0,0.004457137918739917 20 | 18,0.95703125,0.15250381388602818,9.999999747378752e-05,0.8536585365853658,0.4916311452394527,0.9375,0.19819343498966457 21 | 19,0.94140625,0.1674219495180651,9.999999747378752e-05,0.8292682926829268,0.7561546672952183,0.84375,0.8084677233114261 22 | 20,0.9765625,0.08294466389625565,9.999999747378752e-05,0.8536585365853658,0.6905145420733454,1.0,0.0004791129197059263 23 | 21,0.9765625,0.06585682979190555,9.999999747378752e-05,0.975609756097561,0.18268219045258047,0.9375,0.10407301944906333 24 | 22,0.97265625,0.09579963576284456,9.999999747378752e-05,0.975609756097561,0.07174894515741895,1.0,0.0009487769823008918 25 | 23,0.96875,0.14014676920712787,9.999999747378752e-05,0.926829268292683,0.35869651697600297,0.96875,0.32526746845063226 26 | 24,0.98828125,0.01673601094517796,9.999999747378752e-05,0.975609756097561,0.04551445106850923,1.0,0.0002914537147455931 27 | 25,0.9765625,0.09885051443288773,9.999999747378752e-05,0.975609756097561,0.08220030310238466,1.0,0.0008127532378372493 28 | 26,0.9921875,0.0322465288525311,9.999999747378752e-05,0.926829268292683,0.4348985180318319,0.96875,0.26292209919793574 29 | 27,0.984375,0.05473275692864521,9.999999747378752e-05,1.0,0.01116307523484715,1.0,0.006184449810705317 30 | 28,0.98828125,0.05918119769163366,9.999999747378752e-05,0.975609756097561,0.04222219478896354,1.0,5.534904312654021e-07 31 | 29,0.99609375,0.0063623541473372125,9.999999747378752e-05,1.0,0.006881581815264326,1.0,1.9321346822920304e-05 32 | 30,0.9921875,0.01771718187169541,9.999999747378752e-05,0.975609756097561,0.047569441648970666,1.0,2.9203695011581488e-05 33 | 31,0.9921875,0.017852412112497162,9.999999747378752e-05,0.975609756097561,0.0173485528738688,1.0,0.00019781843201016613 34 | 32,0.98828125,0.05172509863329933,9.999999747378752e-05,1.0,3.501543140667464e-05,1.0,1.5705735146198663e-05 35 | 33,0.98046875,0.10086774594530629,9.999999747378752e-05,1.0,1.0929667336252284e-05,1.0,3.4368895285297185e-07 36 | 34,0.9921875,0.02845179297655298,4.999999873689376e-05,1.0,4.397636763958251e-05,1.0,2.8058784700135675e-06 37 | 35,1.0,0.0028562314982295,4.999999873689376e-05,1.0,0.00013287839992172604,1.0,4.7117538892749167e-07 38 | -------------------------------------------------------------------------------- /model sandbox/results/results.csv: -------------------------------------------------------------------------------- 1 | ,batch_size,cnn,cnn_train,dataset,dropout,epochs,final lr,initial_weights,last test accuracy,last test loss,last train accuracy,last train loss,last validation accuracy,last validation loss,learning_rate,lstm,optimizer,seq_len,test accuracy,test loss,total epochs,train accuracy,train loss,use_aug,validation accuracy,validation loss 2 | 0,2,VGG19,retrain,hocky,0.0,30,1.249999968422344e-05,glorot_uniform,0.665,0.6623996884631924,0.5984375,0.6639593929052353,0.59375,0.7350801134947688,0.0001,ConvLSTM2D,RMSprop,20,0.73,2.6303282265155574,16,0.6453125,0.7008159848861396,True,0.71875,2.381328369817493 3 | 1,2,ResNet50,retrain,hocky,0.0,30,2.499999936844688e-05,glorot_uniform,0.825,0.4361591333097749,0.825,0.42313834191445493,0.91875,0.2968980913316045,0.0001,ConvLSTM2D,RMSprop,20,0.88,0.5274826264381409,11,0.825,0.6445476333727129,True,0.91875,0.5457762692822143 4 | 2,2,InceptionV3,retrain,hocky,0.0,30,2.499999936844688e-05,glorot_uniform,0.87,0.5285369161661931,0.8046875,0.41072887116242784,0.89375,0.47432550635044735,0.0001,ConvLSTM2D,RMSprop,20,0.89,1.2064353383697424,13,0.8046875,0.6900985388783738,True,0.93125,1.0049526831863647 5 | 3,2,ResNet50,retrain,hocky,0.0,30,0.0002500000118743628,glorot_uniform,0.465,0.6957012832164764,0.50625,0.6932699428871274,0.40625,0.6992311403155327,0.001,ConvLSTM2D,RMSprop,20,0.465,0.6957704949378968,11,0.559375,0.7655063812620938,True,0.56875,0.6992311403155327 6 | 4,2,ResNet50,retrain,hocky,0.0,30,2.499999936844688e-05,glorot_uniform,0.865,0.39227431337727464,0.846875,0.3572263820024091,0.8875,0.35594155848318676,0.0001,ConvLSTM2D,RMSprop,10,0.9,0.5105031683295965,14,0.846875,0.6318815861595795,True,0.9375,0.5654271571569552 7 | 5,2,ResNet50,retrain,hocky,0.0,30,4.999999873689376e-05,glorot_uniform,0.84,0.7851044760050968,0.8828125,0.290943922896804,0.9125,0.36198942071514467,0.0001,ConvLSTM2D,RMSprop,10,0.855,0.7851044760050968,7,0.8828125,0.5802455507044215,False,0.9125,0.5448796475073323 8 | 6,2,ResNet50,retrain,hocky,0.5,30,2.499999936844688e-05,glorot_uniform,0.845,0.3947314548864961,0.628125,0.7323622916796012,0.8375,0.34618440526683114,0.0001,ConvLSTM2D,RMSprop,10,0.86,0.6947716012969614,14,0.665625,0.8651324649748858,True,0.8625,0.6366888475604355 9 | 7,2,ResNet50,static,hocky,0.0,30,4.999999873689376e-05,glorot_uniform,0.545,1.4971693067997693,0.8171875,0.38817677383194676,0.45625,1.681309802434407,0.0001,ConvLSTM2D,RMSprop,10,0.59,1.6541376049257814,7,0.8171875,0.6123376628675032,True,0.59375,1.7930195541586726 10 | 8,2,ResNet50,retrain,movies,0.0,50,4.999999873689376e-05,glorot_uniform,1.0,0.00013287839992172604,1.0,0.0028562314982295,1.0,4.7117538892749167e-07,0.0001,ConvLSTM2D,RMSprop,10,1.0,0.7561546672952183,36,1.0,0.575351041770773,,1.0,0.8084677233114261 11 | 9,2,ResNet50,retrain,violentflow,0.0,50,2.499999936844688e-05,glorot_uniform,0.86,0.34080726272883255,0.8108974358974359,0.39505125439684025,0.725,0.7569769555469975,0.0001,ConvLSTM2D,RMSprop,10,0.92,0.543154970407486,21,0.8205128205128205,0.7039875780733732,,0.925,1.149793932889588 12 | 10,2,ResNet50,retrain,hocky,0.0,50,6.24999984211172e-06,glorot_uniform,0.875,0.7917592332885769,0.9328125,0.20470205899601163,0.8875,0.8491452741297953,0.0001,ConvLSTM2D,RMSprop,10,0.89,0.9945481130982127,35,0.9328125,0.628498470026534,,0.94375,0.9253231020877137 13 | -------------------------------------------------------------------------------- /model sandbox/results/violentflow.csv: -------------------------------------------------------------------------------- 1 | ,acc,loss,lr,test accuracy,test loss,val_acc,val_loss 2 | 0,0.6153846153846154,0.7039875780733732,9.999999747378752e-05,0.74,0.543154970407486,0.725,0.5973348237574101 3 | 1,0.6410256410256411,0.6869423218214741,9.999999747378752e-05,0.88,0.38715401783585546,0.825,0.5324596613645554 4 | 2,0.6442307692307693,0.6590900865789407,9.999999747378752e-05,0.82,0.3594546498171985,0.75,0.5071514111012221 5 | 3,0.7051282051282052,0.6243903655081223,9.999999747378752e-05,0.8,0.39604503780603406,0.8,0.4725066116079688 6 | 4,0.7115384615384616,0.6089017940923954,9.999999747378752e-05,0.86,0.3714778922870755,0.75,0.5206363730132579 7 | 5,0.6955128205128205,0.6010543434188153,9.999999747378752e-05,0.9,0.30731326957058624,0.925,0.3261756956577301 8 | 6,0.6794871794871795,0.5935524802965423,9.999999747378752e-05,0.82,0.36681083860981745,0.825,0.4411558711901307 9 | 7,0.7403846153846154,0.5631745435440769,9.999999747378752e-05,0.8,0.3168901742431626,0.725,0.5464424151927233 10 | 8,0.7435897435897436,0.5556985907590924,9.999999747378752e-05,0.78,0.5279765690818021,0.675,1.1397417853208025 11 | 9,0.7243589743589743,0.5589170948697779,9.999999747378752e-05,0.92,0.2253295890055597,0.825,0.334544581733644 12 | 10,0.6538461538461539,0.6243392573908354,9.999999747378752e-05,0.84,0.3381636653025635,0.825,0.4477896451950073 13 | 11,0.7435897435897436,0.5190218183904504,4.999999873689376e-05,0.88,0.28169388072467655,0.85,0.368922227807343 14 | 12,0.7628205128205128,0.509199502113729,4.999999873689376e-05,0.86,0.38195332580736246,0.625,0.5657830887008458 15 | 13,0.8012820512820513,0.44300355710751876,4.999999873689376e-05,0.84,0.3403030275062159,0.8,0.5642439979623305 16 | 14,0.7596153846153846,0.4715807143276414,4.999999873689376e-05,0.84,0.30886200551902876,0.825,0.4265915058553219 17 | 15,0.7980769230769231,0.44622543029403555,4.999999873689376e-05,0.86,0.41074035389272895,0.85,0.3979616748788203 18 | 16,0.7660256410256411,0.4669350335696855,2.499999936844688e-05,0.9,0.3173244527891046,0.75,0.9147557567572221 19 | 17,0.7628205128205128,0.44687916026808894,2.499999936844688e-05,0.92,0.28970112764279465,0.85,0.6827180323190987 20 | 18,0.8205128205128205,0.4023574453193503,2.499999936844688e-05,0.82,0.3028110137660221,0.7,1.0741228700615466 21 | 19,0.7980769230769231,0.3919905330746984,2.499999936844688e-05,0.92,0.22044710259598616,0.725,1.149793932889588 22 | 20,0.8108974358974359,0.39505125439684025,2.499999936844688e-05,0.86,0.34080726272883255,0.725,0.7569769555469975 23 | -------------------------------------------------------------------------------- /model sandbox/run.py: -------------------------------------------------------------------------------- 1 | import os 2 | from itertools import chain 3 | 4 | from keras.callbacks import EarlyStopping, ReduceLROnPlateau, Callback 5 | from keras.optimizers import RMSprop, Adam 6 | 7 | import pandas as pd 8 | from keras.applications import Xception, ResNet50, InceptionV3, MobileNet, VGG19, DenseNet121, InceptionResNetV2, VGG16 9 | from keras.layers import LSTM, ConvLSTM2D 10 | from . import BuildModel_basic 11 | from . import DatasetBuilder 12 | 13 | from numpy.random import seed, shuffle 14 | 15 | from tensorflow.random import set_seed as set_random_seed 16 | from collections import defaultdict 17 | 18 | 19 | class TestCallback(Callback): 20 | def __init__(self, test_data): 21 | self.test_data = test_data 22 | self.test_loss = [] 23 | self.test_acc = [] 24 | 25 | def on_epoch_end(self, epoch, logs={}): 26 | x, y = self.test_data 27 | loss, acc = self.model.evaluate(x, y, batch_size=2, verbose=0) 28 | self.test_loss.append(loss) 29 | self.test_acc.append(acc) 30 | print('\nTesting loss: {}, acc: {}\n'.format(loss, acc)) 31 | 32 | 33 | def train_eval_network(dataset_name, train_gen, validate_gen, test_x, test_y, seq_len, epochs, batch_size, 34 | batch_epoch_ratio, initial_weights, size, cnn_arch, learning_rate, 35 | optimizer, cnn_train_type, pre_weights, lstm_conf, len_train, len_valid, dropout, classes, 36 | patience_es=15, patience_lr=5): 37 | """the function build, compine fit and evaluate a certain architechtures on a dataset""" 38 | set_random_seed(2) 39 | seed(1) 40 | result = dict(dataset=dataset_name, cnn_train=cnn_train_type, 41 | cnn=cnn_arch.__name__, lstm=lstm_conf[0].__name__, epochs=epochs, 42 | learning_rate=learning_rate, batch_size=batch_size, dropout=dropout, 43 | optimizer=optimizer[0].__name__, initial_weights=initial_weights, seq_len=seq_len) 44 | print("run experimnt " + str(result)) 45 | model = BuildModel_basic.build(size=size, seq_len=seq_len, learning_rate=learning_rate, 46 | optimizer_class=optimizer, initial_weights=initial_weights, 47 | cnn_class=cnn_arch, pre_weights=pre_weights, lstm_conf=lstm_conf, 48 | cnn_train_type=cnn_train_type, dropout=dropout, classes=classes) 49 | 50 | # the network is trained on data generatores and apply the callacks when the validation loss is not improving: 51 | # 1. early stop to training after n iteration 52 | # 2. reducing the learning rate after k iteration where k< n 53 | test_history = TestCallback((test_x, test_y)) 54 | history = model.fit_generator( 55 | steps_per_epoch=int(float(len_train) / float(batch_size * batch_epoch_ratio)), 56 | generator=train_gen, 57 | epochs=epochs, 58 | validation_data=validate_gen, 59 | validation_steps=int(float(len_valid) / float(batch_size)), 60 | callbacks=[EarlyStopping(monitor='val_loss', min_delta=0.001, patience=patience_es, ), 61 | ReduceLROnPlateau(monitor='val_loss', factor=0.5, patience=patience_lr, min_lr=1e-8, verbose=1), 62 | test_history 63 | ] 64 | ) 65 | history_to_save = history.history 66 | history_to_save['test accuracy'] = test_history.test_acc 67 | history_to_save['test loss'] = test_history.test_loss 68 | 69 | model_name = "" 70 | for k, v in result.items(): 71 | model_name = model_name + "_" + str(k) + "-" + str(v).replace(".", "d") 72 | model_path = os.path.join(res_path, model_name) 73 | pd.DataFrame(history_to_save).to_csv(model_path + "_train_results.csv") 74 | result['validation loss'] = min(history.history['val_loss']) 75 | result['validation accuracy'] = max(history.history['val_acc']) 76 | result['last validation loss'] = history.history['val_loss'][-1] 77 | result['last validation accuracy'] = history.history['val_acc'][-1] 78 | 79 | result['train accuracy'] = max(history.history['acc']) 80 | result['train loss'] = min(history.history['loss']) 81 | result['last train accuracy'] = history.history['acc'][-1] 82 | result['last train loss'] = history.history['loss'][-1] 83 | 84 | result['test accuracy'] = max(test_history.test_acc) 85 | result['test loss'] = min(test_history.test_loss) 86 | result['last test accuracy'] = test_history.test_acc[-1] 87 | result['last test loss'] = test_history.test_loss[-1] 88 | 89 | result['final lr'] = history.history['lr'][-1] 90 | result['total epochs'] = len(history.history['lr']) 91 | return result 92 | 93 | 94 | def get_generators(dataset_name, dataset_videos, datasets_frames, fix_len, figure_size, force, classes=1, use_aug=False, 95 | use_crop=True, crop_dark=None): 96 | train_path, valid_path, test_path, \ 97 | train_y, valid_y, test_y, \ 98 | avg_length = DatasetBuilder.createDataset(dataset_videos, datasets_frames, fix_len, force=force) 99 | 100 | if fix_len is not None: 101 | avg_length = fix_len 102 | crop_x_y = None 103 | if (crop_dark): 104 | crop_x_y = crop_dark[dataset_name] 105 | 106 | len_train, len_valid = len(train_path), len(valid_path) 107 | train_gen = DatasetBuilder.data_generator(train_path, train_y, batch_size, figure_size, avg_length, use_aug=use_aug, 108 | use_crop=use_crop, crop_x_y=crop_x_y, classes=classes) 109 | validate_gen = DatasetBuilder.data_generator(valid_path, valid_y, batch_size, figure_size, avg_length, 110 | use_aug=False, use_crop=False, crop_x_y=crop_x_y, classes=classes) 111 | test_x, test_y = DatasetBuilder.get_sequences(test_path, test_y, figure_size, avg_length, crop_x_y=crop_x_y, 112 | classes=classes) 113 | 114 | return train_gen, validate_gen, test_x, test_y, avg_length, len_train, len_valid 115 | 116 | 117 | def hyper_tune_network(dataset_name, epochs, batch_size, batch_epoch_ratio, figure_size, initial_weights, lstm, 118 | cnns_arch, 119 | learning_rates, optimizers, cnn_train_types, dropouts, classes, use_augs, fix_lens): 120 | """ the function train several networks parameters in a loop and select the best architechture to the next evaluation""" 121 | results = [] 122 | 123 | best_accuracy = 0.0 124 | best_loss = 10.0 125 | # static params for tunning 126 | params_to_train = dict(dataset_name=dataset_name, epochs=epochs, batch_size=batch_size, 127 | batch_epoch_ratio=batch_epoch_ratio, initial_weights=initial_weights, size=figure_size, 128 | pre_weights=weights, lstm_conf=lstm, classes=classes, patience_es=5, patience_lr=3) 129 | 130 | # the tunning is not evaluation all possible combinations 131 | # given the importance order of the hyperparams, in each iteraction we choose the best performing parmaters 132 | exp_params_order = ['cnn_arch', 'learning_rate', 'seq_len', 'use_aug', 'dropout', 133 | 'cnn_train_type'] # 'cnn_arch','learning_rate','fix_len','use_aug','dropout', 'optimizer','optimizer', 134 | # 135 | cnns_arch_values = list(cnns_arch.values()) 136 | best_params_train = dict(optimizer=optimizers[0], learning_rate=learning_rates[0], 137 | cnn_train_type=cnn_train_types[0], cnn_arch=cnns_arch_values[0], 138 | dropout=dropouts[0]) 139 | exp_params_train = dict(optimizer=optimizers[1:], learning_rate=learning_rates[1:], 140 | cnn_train_type=cnn_train_types[1:], dropout=dropouts[1:], 141 | cnn_arch=cnns_arch.values()) 142 | 143 | best_params_data = dict(use_aug=use_augs[0], seq_len=fix_lens[0]) 144 | exp_params_data = dict(use_aug=use_augs[1:], seq_len=fix_lens[1:]) 145 | 146 | for exp_param in exp_params_order: 147 | temp_param = dict(best_params_train) 148 | temp_param_data = dict(best_params_data) 149 | if exp_param in exp_params_data: 150 | exp_params_ = exp_params_data 151 | else: 152 | exp_params_ = exp_params_train 153 | for param in exp_params_[exp_param]: 154 | if exp_param in best_params_data: 155 | temp_param_data[exp_param] = param 156 | else: 157 | temp_param[exp_param] = param 158 | 159 | print(temp_param_data) 160 | print(temp_param) 161 | params_to_train['train_gen'], params_to_train['validate_gen'], params_to_train['test_x'], \ 162 | params_to_train['test_y'], params_to_train['seq_len'], params_to_train['len_train'], \ 163 | params_to_train['len_valid'] = get_generators(dataset_name, datasets_videos[dataset_name], datasets_frames, 164 | temp_param_data['seq_len'], 165 | figure_size, use_aug=temp_param_data['use_aug'], force=force, 166 | classes=classes) 167 | 168 | params_to_train.update(temp_param) 169 | result = train_eval_network(**params_to_train) 170 | result.update(temp_param_data) 171 | print(result) 172 | results.append(result) 173 | if result['test accuracy'] >= best_accuracy: # and result['test loss'] <= best_loss : 174 | best_accuracy = result['test accuracy'] 175 | best_loss = result['test loss'] 176 | if exp_param in best_params_data: 177 | best_params_data[exp_param] = param 178 | else: 179 | best_params_train[exp_param] = param 180 | print("best accuracy update " + str(best_accuracy)) 181 | best_params_train.update(best_params_data) 182 | return best_params_train, results 183 | 184 | 185 | # static parameter for the netwotk 186 | datasets_videos = dict( 187 | hocky=dict(hocky="data/raw_videos/HockeyFights"), 188 | violentflow=dict(violentflow="data/raw_videos/violentflow"), 189 | movies=dict(movies="data/raw_videos/movies") 190 | ) 191 | 192 | crop_dark = dict( 193 | hocky=(11, 38), 194 | violentflow=None, 195 | movies=None 196 | ) 197 | 198 | datasets_frames = "data/raw_frames" 199 | res_path = "results" 200 | figure_size = 244 201 | # split_ratio = 0.1 202 | batch_size = 2 203 | # batch_epoch_ratio = 0.5 #double the size because we use augmentation 204 | fix_len = 20 205 | initial_weights = 'glorot_uniform' 206 | weights = 'imagenet' 207 | force = True 208 | lstm = (ConvLSTM2D, dict(filters=256, kernel_size=(3, 3), padding='same', return_sequences=False)) 209 | classes = 1 210 | 211 | # hyper parameters for tunning the network 212 | cnns_arch = dict(ResNet50=ResNet50, InceptionV3=InceptionV3, VGG19=VGG19) # 213 | learning_rates = [1e-4, 1e-3] 214 | use_augs = [True, False, ] 215 | fix_lens = [20, 10] 216 | optimizers = [(RMSprop, {}), (Adam, {})] 217 | dropouts = [0.0, 0.5] 218 | cnn_train_types = ['retrain', 'static'] 219 | 220 | apply_hyper = True 221 | 222 | if apply_hyper: 223 | # the hyper tunning symulate the architechture behavior 224 | # we set the batch_epoch_ratio - reduced by X to have the hypertunning faster with epoches shorter 225 | hyper, results = hyper_tune_network(dataset_name='hocky', epochs=30, 226 | batch_size=batch_size, batch_epoch_ratio=1, figure_size=figure_size, 227 | initial_weights=initial_weights, lstm=lstm, 228 | cnns_arch=cnns_arch, learning_rates=learning_rates, 229 | optimizers=optimizers, cnn_train_types=cnn_train_types, dropouts=dropouts, 230 | classes=classes, use_augs=use_augs, fix_lens=fix_lens) 231 | 232 | pd.DataFrame(results).to_csv("results_hyper.csv") 233 | cnn_arch, learning_rate, optimizer, cnn_train_type, dropout, use_aug, fix_len = hyper['cnn_arch'], \ 234 | hyper['learning_rate'], \ 235 | hyper['optimizer'], \ 236 | hyper['cnn_train_type'], \ 237 | hyper['dropout'], hyper['use_aug'], \ 238 | hyper['seq_len'], 239 | else: 240 | results = [] 241 | cnn_arch, learning_rate, optimizer, cnn_train_type, dropout, use_aug, fix_len = ResNet50, 0.0001, ( 242 | RMSprop, {}), 'retrain', 0.0, True, 20 243 | 244 | # apply best architechture on all datasets with more epochs 245 | for dataset_name, dataset_videos in datasets_videos.items(): 246 | train_gen, validate_gen, test_x, test_y, seq_len, len_train, len_valid = get_generators(dataset_name, 247 | dataset_videos, 248 | datasets_frames, fix_len, 249 | figure_size, 250 | force=force, 251 | classes=classes, 252 | use_aug=use_aug, 253 | use_crop=True, 254 | crop_dark=crop_dark) 255 | result = train_eval_network(epochs=50, dataset_name=dataset_name, train_gen=train_gen, validate_gen=validate_gen, 256 | test_x=test_x, test_y=test_y, seq_len=seq_len, batch_size=batch_size, 257 | batch_epoch_ratio=0.5, initial_weights=initial_weights, size=figure_size, 258 | cnn_arch=cnn_arch, learning_rate=learning_rate, 259 | optimizer=optimizer, cnn_train_type=cnn_train_type, 260 | pre_weights=weights, lstm_conf=lstm, len_train=len_train, len_valid=len_valid, 261 | dropout=dropout, classes=classes) 262 | results.append(result) 263 | pd.DataFrame(results).to_csv("results_datasets.csv") 264 | print(result) 265 | pd.DataFrame(results).to_csv("results.csv") 266 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | Flask==1.1.2 2 | matplotlib==3.3.2 3 | h5py==2.10.0 4 | pandas==1.1.3 5 | termcolor==1.1.0 6 | tensorflow==2.3.1 7 | numpy==1.19.2 8 | scipy==1.5.3 9 | opencv_python==4.4.0.44 10 | Keras==2.4.3 11 | scikit_learn==0.23.2 12 | -------------------------------------------------------------------------------- /templates/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 11 | 12 | Multiple Live Streaming 13 | 14 | 15 |
16 |
17 | 18 |
19 |

Multiple Live Streaming

20 | 21 |
22 | 23 | 24 |
25 |
26 | 27 | 28 | --------------------------------------------------------------------------------