└── Automatic Number Plate Detection.ipynb /Automatic Number Plate Detection.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "id": "QUANWN3rpfC9" 7 | }, 8 | "source": [ 9 | "# 0. Setup Paths" 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": null, 15 | "metadata": { 16 | "id": "146BB11JpfDA" 17 | }, 18 | "outputs": [], 19 | "source": [ 20 | "import os" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": null, 26 | "metadata": { 27 | "id": "42hJEdo_pfDB" 28 | }, 29 | "outputs": [], 30 | "source": [ 31 | "CUSTOM_MODEL_NAME = 'my_ssd_mobnet' \n", 32 | "PRETRAINED_MODEL_NAME = 'ssd_mobilenet_v2_fpnlite_320x320_coco17_tpu-8'\n", 33 | "PRETRAINED_MODEL_URL = 'http://download.tensorflow.org/models/object_detection/tf2/20200711/ssd_mobilenet_v2_fpnlite_320x320_coco17_tpu-8.tar.gz'\n", 34 | "TF_RECORD_SCRIPT_NAME = 'generate_tfrecord.py'\n", 35 | "LABEL_MAP_NAME = 'label_map.pbtxt'" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": null, 41 | "metadata": { 42 | "id": "hbPhYVy_pfDB" 43 | }, 44 | "outputs": [], 45 | "source": [ 46 | "paths = {\n", 47 | " 'WORKSPACE_PATH': os.path.join('Tensorflow', 'workspace'),\n", 48 | " 'SCRIPTS_PATH': os.path.join('Tensorflow','scripts'),\n", 49 | " 'APIMODEL_PATH': os.path.join('Tensorflow','models'),\n", 50 | " 'ANNOTATION_PATH': os.path.join('Tensorflow', 'workspace','annotations'),\n", 51 | " 'IMAGE_PATH': os.path.join('Tensorflow', 'workspace','images'),\n", 52 | " 'MODEL_PATH': os.path.join('Tensorflow', 'workspace','models'),\n", 53 | " 'PRETRAINED_MODEL_PATH': os.path.join('Tensorflow', 'workspace','pre-trained-models'),\n", 54 | " 'CHECKPOINT_PATH': os.path.join('Tensorflow', 'workspace','models',CUSTOM_MODEL_NAME), \n", 55 | " 'OUTPUT_PATH': os.path.join('Tensorflow', 'workspace','models',CUSTOM_MODEL_NAME, 'export'), \n", 56 | " 'TFJS_PATH':os.path.join('Tensorflow', 'workspace','models',CUSTOM_MODEL_NAME, 'tfjsexport'), \n", 57 | " 'TFLITE_PATH':os.path.join('Tensorflow', 'workspace','models',CUSTOM_MODEL_NAME, 'tfliteexport'), \n", 58 | " 'PROTOC_PATH':os.path.join('Tensorflow','protoc')\n", 59 | " }" 60 | ] 61 | }, 62 | { 63 | "cell_type": "code", 64 | "execution_count": null, 65 | "metadata": { 66 | "id": "LwhWZMI0pfDC" 67 | }, 68 | "outputs": [], 69 | "source": [ 70 | "files = {\n", 71 | " 'PIPELINE_CONFIG':os.path.join('Tensorflow', 'workspace','models', CUSTOM_MODEL_NAME, 'pipeline.config'),\n", 72 | " 'TF_RECORD_SCRIPT': os.path.join(paths['SCRIPTS_PATH'], TF_RECORD_SCRIPT_NAME), \n", 73 | " 'LABELMAP': os.path.join(paths['ANNOTATION_PATH'], LABEL_MAP_NAME)\n", 74 | "}" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": null, 80 | "metadata": { 81 | "id": "HR-TfDGrpfDC" 82 | }, 83 | "outputs": [], 84 | "source": [ 85 | "for path in paths.values():\n", 86 | " if not os.path.exists(path):\n", 87 | " if os.name == 'posix':\n", 88 | " !mkdir -p {path}\n", 89 | " if os.name == 'nt':\n", 90 | " !mkdir {path}" 91 | ] 92 | }, 93 | { 94 | "cell_type": "markdown", 95 | "metadata": { 96 | "id": "OLU-rs_ipfDE" 97 | }, 98 | "source": [ 99 | "# 1. Download TF Models Pretrained Models from Tensorflow Model Zoo and Install TFOD" 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": null, 105 | "metadata": {}, 106 | "outputs": [], 107 | "source": [ 108 | "# https://www.tensorflow.org/install/source_windows" 109 | ] 110 | }, 111 | { 112 | "cell_type": "code", 113 | "execution_count": null, 114 | "metadata": { 115 | "id": "K-Cmz2edpfDE", 116 | "scrolled": true 117 | }, 118 | "outputs": [], 119 | "source": [ 120 | "if os.name=='nt':\n", 121 | " !pip install wget\n", 122 | " import wget" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": null, 128 | "metadata": { 129 | "id": "iA1DIq5OpfDE" 130 | }, 131 | "outputs": [], 132 | "source": [ 133 | "if not os.path.exists(os.path.join(paths['APIMODEL_PATH'], 'research', 'object_detection')):\n", 134 | " !git clone https://github.com/tensorflow/models {paths['APIMODEL_PATH']}" 135 | ] 136 | }, 137 | { 138 | "cell_type": "code", 139 | "execution_count": null, 140 | "metadata": { 141 | "id": "rJjMHbnDs3Tv" 142 | }, 143 | "outputs": [], 144 | "source": [ 145 | "# Install Tensorflow Object Detection \n", 146 | "if os.name=='posix': \n", 147 | " !apt-get install protobuf-compiler\n", 148 | " !cd Tensorflow/models/research && protoc object_detection/protos/*.proto --python_out=. && cp object_detection/packages/tf2/setup.py . && python -m pip install . \n", 149 | " \n", 150 | "if os.name=='nt':\n", 151 | " url=\"https://github.com/protocolbuffers/protobuf/releases/download/v3.15.6/protoc-3.15.6-win64.zip\"\n", 152 | " wget.download(url)\n", 153 | " !move protoc-3.15.6-win64.zip {paths['PROTOC_PATH']}\n", 154 | " !cd {paths['PROTOC_PATH']} && tar -xf protoc-3.15.6-win64.zip\n", 155 | " os.environ['PATH'] += os.pathsep + os.path.abspath(os.path.join(paths['PROTOC_PATH'], 'bin')) \n", 156 | " !cd Tensorflow/models/research && protoc object_detection/protos/*.proto --python_out=. && copy object_detection\\\\packages\\\\tf2\\\\setup.py setup.py && python setup.py build && python setup.py install\n", 157 | " !cd Tensorflow/models/research/slim && pip install -e . " 158 | ] 159 | }, 160 | { 161 | "cell_type": "code", 162 | "execution_count": null, 163 | "metadata": { 164 | "scrolled": true 165 | }, 166 | "outputs": [], 167 | "source": [ 168 | "!pip list" 169 | ] 170 | }, 171 | { 172 | "cell_type": "code", 173 | "execution_count": null, 174 | "metadata": { 175 | "scrolled": true 176 | }, 177 | "outputs": [], 178 | "source": [ 179 | "VERIFICATION_SCRIPT = os.path.join(paths['APIMODEL_PATH'], 'research', 'object_detection', 'builders', 'model_builder_tf2_test.py')\n", 180 | "# Verify Installation\n", 181 | "!python {VERIFICATION_SCRIPT}" 182 | ] 183 | }, 184 | { 185 | "cell_type": "code", 186 | "execution_count": null, 187 | "metadata": {}, 188 | "outputs": [], 189 | "source": [ 190 | "!pip install tensorflow==2.4.1 tensorflow-gpu==2.4.1 --upgrade" 191 | ] 192 | }, 193 | { 194 | "cell_type": "code", 195 | "execution_count": null, 196 | "metadata": {}, 197 | "outputs": [], 198 | "source": [ 199 | "!pip uninstall protobuf matplotlib -y\n", 200 | "!pip install protobuf matplotlib==3.2" 201 | ] 202 | }, 203 | { 204 | "cell_type": "code", 205 | "execution_count": null, 206 | "metadata": {}, 207 | "outputs": [], 208 | "source": [ 209 | "!pip install Pillow" 210 | ] 211 | }, 212 | { 213 | "cell_type": "code", 214 | "execution_count": null, 215 | "metadata": {}, 216 | "outputs": [], 217 | "source": [ 218 | "!pip install pyyaml" 219 | ] 220 | }, 221 | { 222 | "cell_type": "code", 223 | "execution_count": null, 224 | "metadata": {}, 225 | "outputs": [], 226 | "source": [ 227 | "!pip list" 228 | ] 229 | }, 230 | { 231 | "cell_type": "code", 232 | "execution_count": null, 233 | "metadata": {}, 234 | "outputs": [], 235 | "source": [ 236 | "import object_detection" 237 | ] 238 | }, 239 | { 240 | "cell_type": "code", 241 | "execution_count": null, 242 | "metadata": { 243 | "colab": { 244 | "base_uri": "https://localhost:8080/" 245 | }, 246 | "id": "csofht2npfDE", 247 | "outputId": "ff5471b2-bed2-43f2-959c-327a706527b6" 248 | }, 249 | "outputs": [], 250 | "source": [ 251 | "if os.name =='posix':\n", 252 | " !wget {PRETRAINED_MODEL_URL}\n", 253 | " !mv {PRETRAINED_MODEL_NAME+'.tar.gz'} {paths['PRETRAINED_MODEL_PATH']}\n", 254 | " !cd {paths['PRETRAINED_MODEL_PATH']} && tar -zxvf {PRETRAINED_MODEL_NAME+'.tar.gz'}\n", 255 | "if os.name == 'nt':\n", 256 | " wget.download(PRETRAINED_MODEL_URL)\n", 257 | " !move {PRETRAINED_MODEL_NAME+'.tar.gz'} {paths['PRETRAINED_MODEL_PATH']}\n", 258 | " !cd {paths['PRETRAINED_MODEL_PATH']} && tar -zxvf {PRETRAINED_MODEL_NAME+'.tar.gz'}" 259 | ] 260 | }, 261 | { 262 | "cell_type": "markdown", 263 | "metadata": { 264 | "id": "M5KJTnkfpfDC" 265 | }, 266 | "source": [ 267 | "# 2. Create Label Map" 268 | ] 269 | }, 270 | { 271 | "cell_type": "code", 272 | "execution_count": null, 273 | "metadata": { 274 | "id": "p1BVDWo7pfDC" 275 | }, 276 | "outputs": [], 277 | "source": [ 278 | "labels = [{'name':'licence', 'id':1}]\n", 279 | "\n", 280 | "with open(files['LABELMAP'], 'w') as f:\n", 281 | " for label in labels:\n", 282 | " f.write('item { \\n')\n", 283 | " f.write('\\tname:\\'{}\\'\\n'.format(label['name']))\n", 284 | " f.write('\\tid:{}\\n'.format(label['id']))\n", 285 | " f.write('}\\n')" 286 | ] 287 | }, 288 | { 289 | "cell_type": "markdown", 290 | "metadata": { 291 | "id": "C88zyVELpfDC" 292 | }, 293 | "source": [ 294 | "# 3. Create TF records" 295 | ] 296 | }, 297 | { 298 | "cell_type": "code", 299 | "execution_count": null, 300 | "metadata": { 301 | "colab": { 302 | "base_uri": "https://localhost:8080/" 303 | }, 304 | "id": "kvf5WccwrFGq", 305 | "outputId": "49902aeb-0bd7-4298-e1a0-5b4a64eb2064" 306 | }, 307 | "outputs": [], 308 | "source": [ 309 | "# OPTIONAL IF RUNNING ON COLAB\n", 310 | "ARCHIVE_FILES = os.path.join(paths['IMAGE_PATH'], 'archive.tar.gz')\n", 311 | "if os.path.exists(ARCHIVE_FILES):\n", 312 | " !tar -zxvf {ARCHIVE_FILES}" 313 | ] 314 | }, 315 | { 316 | "cell_type": "code", 317 | "execution_count": null, 318 | "metadata": { 319 | "colab": { 320 | "base_uri": "https://localhost:8080/" 321 | }, 322 | "id": "KWpb_BVUpfDD", 323 | "outputId": "56ce2a3f-3933-4ee6-8a9d-d5ec65f7d73c" 324 | }, 325 | "outputs": [], 326 | "source": [ 327 | "if not os.path.exists(files['TF_RECORD_SCRIPT']):\n", 328 | " !git clone https://github.com/nicknochnack/GenerateTFRecord {paths['SCRIPTS_PATH']}" 329 | ] 330 | }, 331 | { 332 | "cell_type": "code", 333 | "execution_count": null, 334 | "metadata": {}, 335 | "outputs": [], 336 | "source": [ 337 | "!pip install pytz" 338 | ] 339 | }, 340 | { 341 | "cell_type": "code", 342 | "execution_count": null, 343 | "metadata": { 344 | "colab": { 345 | "base_uri": "https://localhost:8080/" 346 | }, 347 | "id": "UPFToGZqpfDD", 348 | "outputId": "0ebb456f-aadc-4a1f-96e6-fbfec1923e1c" 349 | }, 350 | "outputs": [], 351 | "source": [ 352 | "!python {files['TF_RECORD_SCRIPT']} -x {os.path.join(paths['IMAGE_PATH'], 'train')} -l {files['LABELMAP']} -o {os.path.join(paths['ANNOTATION_PATH'], 'train.record')} \n", 353 | "!python {files['TF_RECORD_SCRIPT']} -x {os.path.join(paths['IMAGE_PATH'], 'test')} -l {files['LABELMAP']} -o {os.path.join(paths['ANNOTATION_PATH'], 'test.record')} " 354 | ] 355 | }, 356 | { 357 | "cell_type": "markdown", 358 | "metadata": { 359 | "id": "qT4QU7pLpfDE" 360 | }, 361 | "source": [ 362 | "# 4. Copy Model Config to Training Folder" 363 | ] 364 | }, 365 | { 366 | "cell_type": "code", 367 | "execution_count": null, 368 | "metadata": { 369 | "id": "cOjuTFbwpfDF" 370 | }, 371 | "outputs": [], 372 | "source": [ 373 | "if os.name =='posix':\n", 374 | " !cp {os.path.join(paths['PRETRAINED_MODEL_PATH'], PRETRAINED_MODEL_NAME, 'pipeline.config')} {os.path.join(paths['CHECKPOINT_PATH'])}\n", 375 | "if os.name == 'nt':\n", 376 | " !copy {os.path.join(paths['PRETRAINED_MODEL_PATH'], PRETRAINED_MODEL_NAME, 'pipeline.config')} {os.path.join(paths['CHECKPOINT_PATH'])}" 377 | ] 378 | }, 379 | { 380 | "cell_type": "markdown", 381 | "metadata": { 382 | "id": "Ga8gpNslpfDF" 383 | }, 384 | "source": [ 385 | "# 5. Update Config For Transfer Learning" 386 | ] 387 | }, 388 | { 389 | "cell_type": "code", 390 | "execution_count": null, 391 | "metadata": { 392 | "id": "Z9hRrO_ppfDF" 393 | }, 394 | "outputs": [], 395 | "source": [ 396 | "import tensorflow as tf\n", 397 | "from object_detection.utils import config_util\n", 398 | "from object_detection.protos import pipeline_pb2\n", 399 | "from google.protobuf import text_format" 400 | ] 401 | }, 402 | { 403 | "cell_type": "code", 404 | "execution_count": null, 405 | "metadata": { 406 | "id": "c2A0mn4ipfDF" 407 | }, 408 | "outputs": [], 409 | "source": [ 410 | "config = config_util.get_configs_from_pipeline_file(files['PIPELINE_CONFIG'])" 411 | ] 412 | }, 413 | { 414 | "cell_type": "code", 415 | "execution_count": null, 416 | "metadata": { 417 | "colab": { 418 | "base_uri": "https://localhost:8080/" 419 | }, 420 | "id": "uQA13-afpfDF", 421 | "outputId": "907496a4-a39d-4b13-8c2c-e5978ecb1f10" 422 | }, 423 | "outputs": [], 424 | "source": [ 425 | "config" 426 | ] 427 | }, 428 | { 429 | "cell_type": "code", 430 | "execution_count": null, 431 | "metadata": { 432 | "id": "9vK5lotDpfDF" 433 | }, 434 | "outputs": [], 435 | "source": [ 436 | "pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()\n", 437 | "with tf.io.gfile.GFile(files['PIPELINE_CONFIG'], \"r\") as f: \n", 438 | " proto_str = f.read() \n", 439 | " text_format.Merge(proto_str, pipeline_config) " 440 | ] 441 | }, 442 | { 443 | "cell_type": "code", 444 | "execution_count": null, 445 | "metadata": { 446 | "id": "rP43Ph0JpfDG" 447 | }, 448 | "outputs": [], 449 | "source": [ 450 | "pipeline_config.model.ssd.num_classes = len(labels)\n", 451 | "pipeline_config.train_config.batch_size = 4\n", 452 | "pipeline_config.train_config.fine_tune_checkpoint = os.path.join(paths['PRETRAINED_MODEL_PATH'], PRETRAINED_MODEL_NAME, 'checkpoint', 'ckpt-0')\n", 453 | "pipeline_config.train_config.fine_tune_checkpoint_type = \"detection\"\n", 454 | "pipeline_config.train_input_reader.label_map_path= files['LABELMAP']\n", 455 | "pipeline_config.train_input_reader.tf_record_input_reader.input_path[:] = [os.path.join(paths['ANNOTATION_PATH'], 'train.record')]\n", 456 | "pipeline_config.eval_input_reader[0].label_map_path = files['LABELMAP']\n", 457 | "pipeline_config.eval_input_reader[0].tf_record_input_reader.input_path[:] = [os.path.join(paths['ANNOTATION_PATH'], 'test.record')]" 458 | ] 459 | }, 460 | { 461 | "cell_type": "code", 462 | "execution_count": null, 463 | "metadata": { 464 | "id": "oJvfgwWqpfDG" 465 | }, 466 | "outputs": [], 467 | "source": [ 468 | "config_text = text_format.MessageToString(pipeline_config) \n", 469 | "with tf.io.gfile.GFile(files['PIPELINE_CONFIG'], \"wb\") as f: \n", 470 | " f.write(config_text) " 471 | ] 472 | }, 473 | { 474 | "cell_type": "markdown", 475 | "metadata": { 476 | "id": "Zr3ON7xMpfDG" 477 | }, 478 | "source": [ 479 | "# 6. Train the model" 480 | ] 481 | }, 482 | { 483 | "cell_type": "code", 484 | "execution_count": null, 485 | "metadata": { 486 | "id": "B-Y2UQmQpfDG" 487 | }, 488 | "outputs": [], 489 | "source": [ 490 | "TRAINING_SCRIPT = os.path.join(paths['APIMODEL_PATH'], 'research', 'object_detection', 'model_main_tf2.py')" 491 | ] 492 | }, 493 | { 494 | "cell_type": "code", 495 | "execution_count": null, 496 | "metadata": { 497 | "id": "jMP2XDfQpfDH" 498 | }, 499 | "outputs": [], 500 | "source": [ 501 | "command = \"python {} --model_dir={} --pipeline_config_path={} --num_train_steps=10000\".format(TRAINING_SCRIPT, paths['CHECKPOINT_PATH'],files['PIPELINE_CONFIG'])" 502 | ] 503 | }, 504 | { 505 | "cell_type": "code", 506 | "execution_count": null, 507 | "metadata": { 508 | "colab": { 509 | "base_uri": "https://localhost:8080/" 510 | }, 511 | "id": "A4OXXi-ApfDH", 512 | "outputId": "117a0e83-012b-466e-b7a6-ccaa349ac5ab" 513 | }, 514 | "outputs": [], 515 | "source": [ 516 | "print(command)" 517 | ] 518 | }, 519 | { 520 | "cell_type": "code", 521 | "execution_count": null, 522 | "metadata": { 523 | "colab": { 524 | "base_uri": "https://localhost:8080/" 525 | }, 526 | "id": "i3ZsJR-qpfDH", 527 | "outputId": "cabec5e1-45e6-4f2f-d9cf-297d9c1d0225" 528 | }, 529 | "outputs": [], 530 | "source": [ 531 | "!{command}" 532 | ] 533 | }, 534 | { 535 | "cell_type": "markdown", 536 | "metadata": { 537 | "id": "4_YRZu7npfDH" 538 | }, 539 | "source": [ 540 | "# 7. Evaluate the Model" 541 | ] 542 | }, 543 | { 544 | "cell_type": "code", 545 | "execution_count": null, 546 | "metadata": { 547 | "id": "80L7-fdPpfDH" 548 | }, 549 | "outputs": [], 550 | "source": [ 551 | "command = \"python {} --model_dir={} --pipeline_config_path={} --checkpoint_dir={}\".format(TRAINING_SCRIPT, paths['CHECKPOINT_PATH'],files['PIPELINE_CONFIG'], paths['CHECKPOINT_PATH'])" 552 | ] 553 | }, 554 | { 555 | "cell_type": "code", 556 | "execution_count": null, 557 | "metadata": { 558 | "colab": { 559 | "base_uri": "https://localhost:8080/" 560 | }, 561 | "id": "lYsgEPx9pfDH", 562 | "outputId": "8632d48b-91d2-45d9-bcb8-c1b172bf6eed" 563 | }, 564 | "outputs": [], 565 | "source": [ 566 | "print(command)" 567 | ] 568 | }, 569 | { 570 | "cell_type": "code", 571 | "execution_count": null, 572 | "metadata": { 573 | "id": "lqTV2jGBpfDH" 574 | }, 575 | "outputs": [], 576 | "source": [ 577 | "!{command}" 578 | ] 579 | }, 580 | { 581 | "cell_type": "markdown", 582 | "metadata": { 583 | "id": "orvRk02UpfDI" 584 | }, 585 | "source": [ 586 | "# 8. Load Train Model From Checkpoint" 587 | ] 588 | }, 589 | { 590 | "cell_type": "code", 591 | "execution_count": null, 592 | "metadata": { 593 | "id": "8TYk4_oIpfDI" 594 | }, 595 | "outputs": [], 596 | "source": [ 597 | "import os\n", 598 | "import tensorflow as tf\n", 599 | "from object_detection.utils import label_map_util\n", 600 | "from object_detection.utils import visualization_utils as viz_utils\n", 601 | "from object_detection.builders import model_builder\n", 602 | "from object_detection.utils import config_util" 603 | ] 604 | }, 605 | { 606 | "cell_type": "code", 607 | "execution_count": null, 608 | "metadata": {}, 609 | "outputs": [], 610 | "source": [ 611 | "# Prevent GPU complete consumption\n", 612 | "gpus = tf.config.list_physical_devices('GPU')\n", 613 | "if gpus:\n", 614 | " try: \n", 615 | " tf.config.experimental.set_virtual_device_configuration(\n", 616 | " gpus[0], [tf.config.experimental.VirtualDeviceConfiguration(memory_limit=5120)])\n", 617 | " except RunTimeError as e:\n", 618 | " print(e)" 619 | ] 620 | }, 621 | { 622 | "cell_type": "code", 623 | "execution_count": null, 624 | "metadata": { 625 | "id": "tDnQg-cYpfDI" 626 | }, 627 | "outputs": [], 628 | "source": [ 629 | "# Load pipeline config and build a detection model\n", 630 | "configs = config_util.get_configs_from_pipeline_file(files['PIPELINE_CONFIG'])\n", 631 | "detection_model = model_builder.build(model_config=configs['model'], is_training=False)\n", 632 | "\n", 633 | "# Restore checkpoint\n", 634 | "ckpt = tf.compat.v2.train.Checkpoint(model=detection_model)\n", 635 | "ckpt.restore(os.path.join(paths['CHECKPOINT_PATH'], 'ckpt-11')).expect_partial()\n", 636 | "\n", 637 | "@tf.function\n", 638 | "def detect_fn(image):\n", 639 | " image, shapes = detection_model.preprocess(image)\n", 640 | " prediction_dict = detection_model.predict(image, shapes)\n", 641 | " detections = detection_model.postprocess(prediction_dict, shapes)\n", 642 | " return detections" 643 | ] 644 | }, 645 | { 646 | "cell_type": "markdown", 647 | "metadata": { 648 | "id": "0EmsmbBZpfDI" 649 | }, 650 | "source": [ 651 | "# 9. Detect from an Image" 652 | ] 653 | }, 654 | { 655 | "cell_type": "code", 656 | "execution_count": null, 657 | "metadata": { 658 | "id": "Y_MKiuZ4pfDI" 659 | }, 660 | "outputs": [], 661 | "source": [ 662 | "import cv2 \n", 663 | "import numpy as np\n", 664 | "from matplotlib import pyplot as plt\n", 665 | "%matplotlib inline" 666 | ] 667 | }, 668 | { 669 | "cell_type": "code", 670 | "execution_count": null, 671 | "metadata": { 672 | "id": "cBDbIhNapfDI" 673 | }, 674 | "outputs": [], 675 | "source": [ 676 | "category_index = label_map_util.create_category_index_from_labelmap(files['LABELMAP'])" 677 | ] 678 | }, 679 | { 680 | "cell_type": "code", 681 | "execution_count": null, 682 | "metadata": { 683 | "id": "Lx3crOhOzITB" 684 | }, 685 | "outputs": [], 686 | "source": [ 687 | "IMAGE_PATH = os.path.join(paths['IMAGE_PATH'], 'testimg2.jpg')" 688 | ] 689 | }, 690 | { 691 | "cell_type": "code", 692 | "execution_count": null, 693 | "metadata": { 694 | "colab": { 695 | "base_uri": "https://localhost:8080/", 696 | "height": 269 697 | }, 698 | "id": "Tpzn1SMry1yK", 699 | "outputId": "c392a2c5-10fe-4fc4-9998-a1d4c7db2bd3" 700 | }, 701 | "outputs": [], 702 | "source": [ 703 | "img = cv2.imread(IMAGE_PATH)\n", 704 | "image_np = np.array(img)\n", 705 | "\n", 706 | "input_tensor = tf.convert_to_tensor(np.expand_dims(image_np, 0), dtype=tf.float32)\n", 707 | "detections = detect_fn(input_tensor)\n", 708 | "\n", 709 | "num_detections = int(detections.pop('num_detections'))\n", 710 | "detections = {key: value[0, :num_detections].numpy()\n", 711 | " for key, value in detections.items()}\n", 712 | "detections['num_detections'] = num_detections\n", 713 | "\n", 714 | "# detection_classes should be ints.\n", 715 | "detections['detection_classes'] = detections['detection_classes'].astype(np.int64)\n", 716 | "\n", 717 | "label_id_offset = 1\n", 718 | "image_np_with_detections = image_np.copy()\n", 719 | "\n", 720 | "viz_utils.visualize_boxes_and_labels_on_image_array(\n", 721 | " image_np_with_detections,\n", 722 | " detections['detection_boxes'],\n", 723 | " detections['detection_classes']+label_id_offset,\n", 724 | " detections['detection_scores'],\n", 725 | " category_index,\n", 726 | " use_normalized_coordinates=True,\n", 727 | " max_boxes_to_draw=5,\n", 728 | " min_score_thresh=.8,\n", 729 | " agnostic_mode=False)\n", 730 | "\n", 731 | "plt.imshow(cv2.cvtColor(image_np_with_detections, cv2.COLOR_BGR2RGB))\n", 732 | "plt.show()" 733 | ] 734 | }, 735 | { 736 | "cell_type": "code", 737 | "execution_count": null, 738 | "metadata": {}, 739 | "outputs": [], 740 | "source": [ 741 | "detections.keys()" 742 | ] 743 | }, 744 | { 745 | "cell_type": "markdown", 746 | "metadata": {}, 747 | "source": [ 748 | "# Apply OCR to Detection" 749 | ] 750 | }, 751 | { 752 | "cell_type": "code", 753 | "execution_count": null, 754 | "metadata": {}, 755 | "outputs": [], 756 | "source": [ 757 | "!pip install easyocr" 758 | ] 759 | }, 760 | { 761 | "cell_type": "code", 762 | "execution_count": null, 763 | "metadata": {}, 764 | "outputs": [], 765 | "source": [ 766 | "!pip install torch==1.8.1+cu111 torchvision==0.9.1+cu111 torchaudio===0.8.1 -f https://download.pytorch.org/whl/torch_stable.html" 767 | ] 768 | }, 769 | { 770 | "cell_type": "code", 771 | "execution_count": null, 772 | "metadata": {}, 773 | "outputs": [], 774 | "source": [ 775 | "import easyocr" 776 | ] 777 | }, 778 | { 779 | "cell_type": "code", 780 | "execution_count": null, 781 | "metadata": {}, 782 | "outputs": [], 783 | "source": [ 784 | "detection_threshold = 0.7" 785 | ] 786 | }, 787 | { 788 | "cell_type": "code", 789 | "execution_count": null, 790 | "metadata": {}, 791 | "outputs": [], 792 | "source": [ 793 | "image = image_np_with_detections\n", 794 | "scores = list(filter(lambda x: x> detection_threshold, detections['detection_scores']))\n", 795 | "boxes = detections['detection_boxes'][:len(scores)]\n", 796 | "classes = detections['detection_classes'][:len(scores)]" 797 | ] 798 | }, 799 | { 800 | "cell_type": "code", 801 | "execution_count": null, 802 | "metadata": {}, 803 | "outputs": [], 804 | "source": [ 805 | "width = image.shape[1]\n", 806 | "height = image.shape[0]" 807 | ] 808 | }, 809 | { 810 | "cell_type": "code", 811 | "execution_count": null, 812 | "metadata": {}, 813 | "outputs": [], 814 | "source": [ 815 | "# Apply ROI filtering and OCR\n", 816 | "for idx, box in enumerate(boxes):\n", 817 | " print(box)\n", 818 | " roi = box*[height, width, height, width]\n", 819 | " print(roi)\n", 820 | " region = image[int(roi[0]):int(roi[2]),int(roi[1]):int(roi[3])]\n", 821 | " reader = easyocr.Reader(['en'])\n", 822 | " ocr_result = reader.readtext(region)\n", 823 | " print(ocr_result)\n", 824 | " plt.imshow(cv2.cvtColor(region, cv2.COLOR_BGR2RGB))" 825 | ] 826 | }, 827 | { 828 | "cell_type": "code", 829 | "execution_count": null, 830 | "metadata": {}, 831 | "outputs": [], 832 | "source": [ 833 | "for result in ocr_result:\n", 834 | " print(np.sum(np.subtract(result[0][2],result[0][1])))\n", 835 | " print(result[1])" 836 | ] 837 | }, 838 | { 839 | "cell_type": "markdown", 840 | "metadata": {}, 841 | "source": [ 842 | "# OCR Filtering" 843 | ] 844 | }, 845 | { 846 | "cell_type": "code", 847 | "execution_count": null, 848 | "metadata": {}, 849 | "outputs": [], 850 | "source": [ 851 | "region_threshold = 0.05" 852 | ] 853 | }, 854 | { 855 | "cell_type": "code", 856 | "execution_count": null, 857 | "metadata": {}, 858 | "outputs": [], 859 | "source": [ 860 | "def filter_text(region, ocr_result, region_threshold):\n", 861 | " rectangle_size = region.shape[0]*region.shape[1]\n", 862 | " \n", 863 | " plate = [] \n", 864 | " for result in ocr_result:\n", 865 | " length = np.sum(np.subtract(result[0][1], result[0][0]))\n", 866 | " height = np.sum(np.subtract(result[0][2], result[0][1]))\n", 867 | " \n", 868 | " if length*height / rectangle_size > region_threshold:\n", 869 | " plate.append(result[1])\n", 870 | " return plate" 871 | ] 872 | }, 873 | { 874 | "cell_type": "code", 875 | "execution_count": null, 876 | "metadata": {}, 877 | "outputs": [], 878 | "source": [ 879 | "filter_text(region, ocr_result, region_threshold)" 880 | ] 881 | }, 882 | { 883 | "cell_type": "markdown", 884 | "metadata": {}, 885 | "source": [ 886 | "# Bring it Together" 887 | ] 888 | }, 889 | { 890 | "cell_type": "code", 891 | "execution_count": null, 892 | "metadata": {}, 893 | "outputs": [], 894 | "source": [ 895 | "region_threshold = 0.6" 896 | ] 897 | }, 898 | { 899 | "cell_type": "code", 900 | "execution_count": null, 901 | "metadata": {}, 902 | "outputs": [], 903 | "source": [ 904 | "def ocr_it(image, detections, detection_threshold, region_threshold):\n", 905 | " \n", 906 | " # Scores, boxes and classes above threhold\n", 907 | " scores = list(filter(lambda x: x> detection_threshold, detections['detection_scores']))\n", 908 | " boxes = detections['detection_boxes'][:len(scores)]\n", 909 | " classes = detections['detection_classes'][:len(scores)]\n", 910 | " \n", 911 | " # Full image dimensions\n", 912 | " width = image.shape[1]\n", 913 | " height = image.shape[0]\n", 914 | " \n", 915 | " # Apply ROI filtering and OCR\n", 916 | " for idx, box in enumerate(boxes):\n", 917 | " roi = box*[height, width, height, width]\n", 918 | " region = image[int(roi[0]):int(roi[2]),int(roi[1]):int(roi[3])]\n", 919 | " reader = easyocr.Reader(['en'])\n", 920 | " ocr_result = reader.readtext(region)\n", 921 | " \n", 922 | " text = filter_text(region, ocr_result, region_threshold)\n", 923 | " \n", 924 | " plt.imshow(cv2.cvtColor(region, cv2.COLOR_BGR2RGB))\n", 925 | " plt.show()\n", 926 | " print(text)\n", 927 | " return text, region" 928 | ] 929 | }, 930 | { 931 | "cell_type": "code", 932 | "execution_count": null, 933 | "metadata": { 934 | "scrolled": true 935 | }, 936 | "outputs": [], 937 | "source": [ 938 | "text, region = ocr_it(image_np_with_detections, detections, detection_threshold, region_threshold)" 939 | ] 940 | }, 941 | { 942 | "cell_type": "markdown", 943 | "metadata": {}, 944 | "source": [ 945 | "# Save Results" 946 | ] 947 | }, 948 | { 949 | "cell_type": "code", 950 | "execution_count": null, 951 | "metadata": {}, 952 | "outputs": [], 953 | "source": [ 954 | "import csv\n", 955 | "import uuid" 956 | ] 957 | }, 958 | { 959 | "cell_type": "code", 960 | "execution_count": null, 961 | "metadata": {}, 962 | "outputs": [], 963 | "source": [ 964 | "'{}.jpg'.format(uuid.uuid1())" 965 | ] 966 | }, 967 | { 968 | "cell_type": "code", 969 | "execution_count": null, 970 | "metadata": {}, 971 | "outputs": [], 972 | "source": [ 973 | "def save_results(text, region, csv_filename, folder_path):\n", 974 | " img_name = '{}.jpg'.format(uuid.uuid1())\n", 975 | " \n", 976 | " cv2.imwrite(os.path.join(folder_path, img_name), region)\n", 977 | " \n", 978 | " with open(csv_filename, mode='a', newline='') as f:\n", 979 | " csv_writer = csv.writer(f, delimiter=',', quotechar='\"', quoting=csv.QUOTE_MINIMAL)\n", 980 | " csv_writer.writerow([img_name, text])" 981 | ] 982 | }, 983 | { 984 | "cell_type": "code", 985 | "execution_count": null, 986 | "metadata": {}, 987 | "outputs": [], 988 | "source": [ 989 | "region" 990 | ] 991 | }, 992 | { 993 | "cell_type": "code", 994 | "execution_count": null, 995 | "metadata": {}, 996 | "outputs": [], 997 | "source": [ 998 | "save_results(text, region, 'detection_results.csv', 'Detection_Images')" 999 | ] 1000 | }, 1001 | { 1002 | "cell_type": "markdown", 1003 | "metadata": { 1004 | "id": "IsNAaYAo0WVL" 1005 | }, 1006 | "source": [ 1007 | "# 10. Real Time Detections from your Webcam" 1008 | ] 1009 | }, 1010 | { 1011 | "cell_type": "code", 1012 | "execution_count": null, 1013 | "metadata": {}, 1014 | "outputs": [], 1015 | "source": [ 1016 | "!pip uninstall opencv-python-headless -y" 1017 | ] 1018 | }, 1019 | { 1020 | "cell_type": "code", 1021 | "execution_count": null, 1022 | "metadata": { 1023 | "id": "o_grs6OGpfDJ" 1024 | }, 1025 | "outputs": [], 1026 | "source": [ 1027 | "cap = cv2.VideoCapture(0)\n", 1028 | "width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))\n", 1029 | "height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))\n", 1030 | "\n", 1031 | "while cap.isOpened(): \n", 1032 | " ret, frame = cap.read()\n", 1033 | " image_np = np.array(frame)\n", 1034 | " \n", 1035 | " input_tensor = tf.convert_to_tensor(np.expand_dims(image_np, 0), dtype=tf.float32)\n", 1036 | " detections = detect_fn(input_tensor)\n", 1037 | " \n", 1038 | " num_detections = int(detections.pop('num_detections'))\n", 1039 | " detections = {key: value[0, :num_detections].numpy()\n", 1040 | " for key, value in detections.items()}\n", 1041 | " detections['num_detections'] = num_detections\n", 1042 | "\n", 1043 | " # detection_classes should be ints.\n", 1044 | " detections['detection_classes'] = detections['detection_classes'].astype(np.int64)\n", 1045 | "\n", 1046 | " label_id_offset = 1\n", 1047 | " image_np_with_detections = image_np.copy()\n", 1048 | "\n", 1049 | " viz_utils.visualize_boxes_and_labels_on_image_array(\n", 1050 | " image_np_with_detections,\n", 1051 | " detections['detection_boxes'],\n", 1052 | " detections['detection_classes']+label_id_offset,\n", 1053 | " detections['detection_scores'],\n", 1054 | " category_index,\n", 1055 | " use_normalized_coordinates=True,\n", 1056 | " max_boxes_to_draw=5,\n", 1057 | " min_score_thresh=.8,\n", 1058 | " agnostic_mode=False)\n", 1059 | " \n", 1060 | " try: \n", 1061 | " text, region = ocr_it(image_np_with_detections, detections, detection_threshold, region_threshold)\n", 1062 | " save_results(text, region, 'realtimeresults.csv', 'Detection_Images')\n", 1063 | " except:\n", 1064 | " pass\n", 1065 | "\n", 1066 | " cv2.imshow('object detection', cv2.resize(image_np_with_detections, (800, 600)))\n", 1067 | " \n", 1068 | " if cv2.waitKey(10) & 0xFF == ord('q'):\n", 1069 | " cap.release()\n", 1070 | " cv2.destroyAllWindows()\n", 1071 | " break" 1072 | ] 1073 | }, 1074 | { 1075 | "cell_type": "markdown", 1076 | "metadata": { 1077 | "id": "rzlM4jt0pfDJ" 1078 | }, 1079 | "source": [ 1080 | "# 10. Freezing the Graph" 1081 | ] 1082 | }, 1083 | { 1084 | "cell_type": "code", 1085 | "execution_count": null, 1086 | "metadata": { 1087 | "id": "n4olHB2npfDJ" 1088 | }, 1089 | "outputs": [], 1090 | "source": [ 1091 | "FREEZE_SCRIPT = os.path.join(paths['APIMODEL_PATH'], 'research', 'object_detection', 'exporter_main_v2.py ')" 1092 | ] 1093 | }, 1094 | { 1095 | "cell_type": "code", 1096 | "execution_count": null, 1097 | "metadata": { 1098 | "id": "0AjO93QDpfDJ" 1099 | }, 1100 | "outputs": [], 1101 | "source": [ 1102 | "command = \"python {} --input_type=image_tensor --pipeline_config_path={} --trained_checkpoint_dir={} --output_directory={}\".format(FREEZE_SCRIPT ,files['PIPELINE_CONFIG'], paths['CHECKPOINT_PATH'], paths['OUTPUT_PATH'])" 1103 | ] 1104 | }, 1105 | { 1106 | "cell_type": "code", 1107 | "execution_count": null, 1108 | "metadata": { 1109 | "colab": { 1110 | "base_uri": "https://localhost:8080/" 1111 | }, 1112 | "id": "F6Lsp3tCpfDJ", 1113 | "outputId": "c3828529-bf06-4df5-d7f3-145890ec3edd" 1114 | }, 1115 | "outputs": [], 1116 | "source": [ 1117 | "print(command)" 1118 | ] 1119 | }, 1120 | { 1121 | "cell_type": "code", 1122 | "execution_count": null, 1123 | "metadata": { 1124 | "colab": { 1125 | "base_uri": "https://localhost:8080/" 1126 | }, 1127 | "id": "1Sw1ULgHpfDJ", 1128 | "outputId": "6fd441e1-9fc9-4889-d072-3395c21e40b6" 1129 | }, 1130 | "outputs": [], 1131 | "source": [ 1132 | "!{command}" 1133 | ] 1134 | }, 1135 | { 1136 | "cell_type": "markdown", 1137 | "metadata": { 1138 | "id": "wTPmdqaXpfDK" 1139 | }, 1140 | "source": [ 1141 | "# 11. Conversion to TFJS" 1142 | ] 1143 | }, 1144 | { 1145 | "cell_type": "code", 1146 | "execution_count": null, 1147 | "metadata": { 1148 | "colab": { 1149 | "base_uri": "https://localhost:8080/" 1150 | }, 1151 | "id": "gZ6UzY_fpfDK", 1152 | "outputId": "0c84722e-1c2b-4002-d857-80827ade828a", 1153 | "scrolled": true 1154 | }, 1155 | "outputs": [], 1156 | "source": [ 1157 | "!pip install tensorflowjs" 1158 | ] 1159 | }, 1160 | { 1161 | "cell_type": "code", 1162 | "execution_count": null, 1163 | "metadata": { 1164 | "id": "0oxbVynHpfDK" 1165 | }, 1166 | "outputs": [], 1167 | "source": [ 1168 | "command = \"tensorflowjs_converter --input_format=tf_saved_model --output_node_names='detection_boxes,detection_classes,detection_features,detection_multiclass_scores,detection_scores,num_detections,raw_detection_boxes,raw_detection_scores' --output_format=tfjs_graph_model --signature_name=serving_default {} {}\".format(os.path.join(paths['OUTPUT_PATH'], 'saved_model'), paths['TFJS_PATH'])" 1169 | ] 1170 | }, 1171 | { 1172 | "cell_type": "code", 1173 | "execution_count": null, 1174 | "metadata": { 1175 | "colab": { 1176 | "base_uri": "https://localhost:8080/" 1177 | }, 1178 | "id": "DB2AGNmJpfDK", 1179 | "outputId": "fbc9f747-f511-47e8-df8f-5ea65cef0374" 1180 | }, 1181 | "outputs": [], 1182 | "source": [ 1183 | "print(command)" 1184 | ] 1185 | }, 1186 | { 1187 | "cell_type": "code", 1188 | "execution_count": null, 1189 | "metadata": { 1190 | "colab": { 1191 | "base_uri": "https://localhost:8080/" 1192 | }, 1193 | "id": "K7rfT4-hpfDK", 1194 | "outputId": "532707fd-6feb-4bc6-84a3-325b5d16303c" 1195 | }, 1196 | "outputs": [], 1197 | "source": [ 1198 | "!{command}" 1199 | ] 1200 | }, 1201 | { 1202 | "cell_type": "code", 1203 | "execution_count": null, 1204 | "metadata": { 1205 | "id": "o8_hm-itpfDK" 1206 | }, 1207 | "outputs": [], 1208 | "source": [ 1209 | "# Test Code: https://github.com/nicknochnack/RealTimeSignLanguageDetectionwithTFJS" 1210 | ] 1211 | }, 1212 | { 1213 | "cell_type": "markdown", 1214 | "metadata": { 1215 | "id": "VtUw73FHpfDK" 1216 | }, 1217 | "source": [ 1218 | "# 12. Conversion to TFLite" 1219 | ] 1220 | }, 1221 | { 1222 | "cell_type": "code", 1223 | "execution_count": null, 1224 | "metadata": { 1225 | "id": "XviMtewLpfDK" 1226 | }, 1227 | "outputs": [], 1228 | "source": [ 1229 | "TFLITE_SCRIPT = os.path.join(paths['APIMODEL_PATH'], 'research', 'object_detection', 'export_tflite_graph_tf2.py ')" 1230 | ] 1231 | }, 1232 | { 1233 | "cell_type": "code", 1234 | "execution_count": null, 1235 | "metadata": { 1236 | "id": "us86cjC4pfDL" 1237 | }, 1238 | "outputs": [], 1239 | "source": [ 1240 | "command = \"python {} --pipeline_config_path={} --trained_checkpoint_dir={} --output_directory={}\".format(TFLITE_SCRIPT ,files['PIPELINE_CONFIG'], paths['CHECKPOINT_PATH'], paths['TFLITE_PATH'])" 1241 | ] 1242 | }, 1243 | { 1244 | "cell_type": "code", 1245 | "execution_count": null, 1246 | "metadata": { 1247 | "colab": { 1248 | "base_uri": "https://localhost:8080/" 1249 | }, 1250 | "id": "n1r5YO3rpfDL", 1251 | "outputId": "5fcdf7a4-eee2-4365-f1ca-1751968379ea" 1252 | }, 1253 | "outputs": [], 1254 | "source": [ 1255 | "print(command)" 1256 | ] 1257 | }, 1258 | { 1259 | "cell_type": "code", 1260 | "execution_count": null, 1261 | "metadata": { 1262 | "colab": { 1263 | "base_uri": "https://localhost:8080/" 1264 | }, 1265 | "id": "I-xWpHN8pfDL", 1266 | "outputId": "7f6bacd8-d077-43b5-c131-5b081fba24a4" 1267 | }, 1268 | "outputs": [], 1269 | "source": [ 1270 | "!{command}" 1271 | ] 1272 | }, 1273 | { 1274 | "cell_type": "code", 1275 | "execution_count": null, 1276 | "metadata": { 1277 | "id": "iJfYMbN6pfDL" 1278 | }, 1279 | "outputs": [], 1280 | "source": [ 1281 | "FROZEN_TFLITE_PATH = os.path.join(paths['TFLITE_PATH'], 'saved_model')\n", 1282 | "TFLITE_MODEL = os.path.join(paths['TFLITE_PATH'], 'saved_model', 'detect.tflite')" 1283 | ] 1284 | }, 1285 | { 1286 | "cell_type": "code", 1287 | "execution_count": null, 1288 | "metadata": {}, 1289 | "outputs": [], 1290 | "source": [ 1291 | "command = \"tflite_convert \\\n", 1292 | "--saved_model_dir={} \\\n", 1293 | "--output_file={} \\\n", 1294 | "--input_shapes=1,300,300,3 \\\n", 1295 | "--input_arrays=normalized_input_image_tensor \\\n", 1296 | "--output_arrays='TFLite_Detection_PostProcess','TFLite_Detection_PostProcess:1','TFLite_Detection_PostProcess:2','TFLite_Detection_PostProcess:3' \\\n", 1297 | "--inference_type=FLOAT \\\n", 1298 | "--allow_custom_ops\".format(FROZEN_TFLITE_PATH, TFLITE_MODEL, )" 1299 | ] 1300 | }, 1301 | { 1302 | "cell_type": "code", 1303 | "execution_count": null, 1304 | "metadata": { 1305 | "colab": { 1306 | "base_uri": "https://localhost:8080/" 1307 | }, 1308 | "id": "E8GwUeoFpfDL", 1309 | "outputId": "fac43ea4-cc85-471b-a362-e994b06fd583" 1310 | }, 1311 | "outputs": [], 1312 | "source": [ 1313 | "print(command)" 1314 | ] 1315 | }, 1316 | { 1317 | "cell_type": "code", 1318 | "execution_count": null, 1319 | "metadata": { 1320 | "colab": { 1321 | "base_uri": "https://localhost:8080/" 1322 | }, 1323 | "id": "Nbd7gqHMpfDL", 1324 | "outputId": "7c8fe6d5-2415-4641-8548-39d425c202f7" 1325 | }, 1326 | "outputs": [], 1327 | "source": [ 1328 | "!{command}" 1329 | ] 1330 | }, 1331 | { 1332 | "cell_type": "markdown", 1333 | "metadata": { 1334 | "id": "5NQqZRdA21Uc" 1335 | }, 1336 | "source": [ 1337 | "# 13. Zip and Export Models " 1338 | ] 1339 | }, 1340 | { 1341 | "cell_type": "code", 1342 | "execution_count": null, 1343 | "metadata": { 1344 | "id": "tTVTGCQp2ZJJ" 1345 | }, 1346 | "outputs": [], 1347 | "source": [ 1348 | "!tar -czf models.tar.gz {paths['CHECKPOINT_PATH']}" 1349 | ] 1350 | }, 1351 | { 1352 | "cell_type": "code", 1353 | "execution_count": null, 1354 | "metadata": { 1355 | "colab": { 1356 | "base_uri": "https://localhost:8080/" 1357 | }, 1358 | "id": "whShhB0x3PYJ", 1359 | "outputId": "b773201d-35c9-46a8-b893-4a76bd4d5d97" 1360 | }, 1361 | "outputs": [], 1362 | "source": [ 1363 | "from google.colab import drive\n", 1364 | "drive.mount('/content/drive')" 1365 | ] 1366 | } 1367 | ], 1368 | "metadata": { 1369 | "accelerator": "GPU", 1370 | "colab": { 1371 | "name": "3. Training and Detection.ipynb", 1372 | "provenance": [] 1373 | }, 1374 | "kernelspec": { 1375 | "display_name": "anprsys", 1376 | "language": "python", 1377 | "name": "anprsys" 1378 | }, 1379 | "language_info": { 1380 | "codemirror_mode": { 1381 | "name": "ipython", 1382 | "version": 3 1383 | }, 1384 | "file_extension": ".py", 1385 | "mimetype": "text/x-python", 1386 | "name": "python", 1387 | "nbconvert_exporter": "python", 1388 | "pygments_lexer": "ipython3", 1389 | "version": "3.7.3" 1390 | } 1391 | }, 1392 | "nbformat": 4, 1393 | "nbformat_minor": 1 1394 | } 1395 | --------------------------------------------------------------------------------