├── .idea
├── .gitignore
├── Computer-Pointer-Controller.iml
├── dictionaries
│ └── Bhadr.xml
├── inspectionProfiles
│ └── profiles_settings.xml
├── misc.xml
├── modules.xml
└── vcs.xml
├── .ipynb_checkpoints
└── Exercise_Heterogenous_plugin_and_the_DevCloud-checkpoint.ipynb
├── README.md
├── bin
├── demo.mp4
└── output_video.gif
├── imgs
├── cropped_image.jpg
├── fps.png
├── fps_a.png
├── image_classification_script_output_win.png
├── inference_time.png
├── inference_time_a.png
├── left_eye.jpg
├── model_loading_time.png
├── model_loading_time_a.png
├── pipeline.png
├── project_structure.png
└── right_eye.jpg
├── intel
├── README.md
├── face-detection-adas-binary-0001
│ └── FP32-INT1
│ │ ├── face-detection-adas-binary-0001.bin
│ │ └── face-detection-adas-binary-0001.xml
├── gaze-estimation-adas-0002
│ ├── FP16
│ │ ├── gaze-estimation-adas-0002.bin
│ │ └── gaze-estimation-adas-0002.xml
│ ├── FP32-INT8
│ │ ├── gaze-estimation-adas-0002.bin
│ │ └── gaze-estimation-adas-0002.xml
│ └── FP32
│ │ ├── gaze-estimation-adas-0002.bin
│ │ └── gaze-estimation-adas-0002.xml
├── head-pose-estimation-adas-0001
│ ├── FP16
│ │ ├── head-pose-estimation-adas-0001.bin
│ │ └── head-pose-estimation-adas-0001.xml
│ ├── FP32-INT8
│ │ ├── head-pose-estimation-adas-0001.bin
│ │ └── head-pose-estimation-adas-0001.xml
│ └── FP32
│ │ ├── head-pose-estimation-adas-0001.bin
│ │ └── head-pose-estimation-adas-0001.xml
└── landmarks-regression-retail-0009
│ ├── FP16
│ ├── landmarks-regression-retail-0009.bin
│ └── landmarks-regression-retail-0009.xml
│ ├── FP32-INT8
│ ├── landmarks-regression-retail-0009.bin
│ └── landmarks-regression-retail-0009.xml
│ └── FP32
│ ├── landmarks-regression-retail-0009.bin
│ └── landmarks-regression-retail-0009.xml
├── license.text
├── requirements.txt
└── src
├── .ipynb_checkpoints
├── PrecisionComparision-checkpoint.ipynb
└── banchmark-checkpoint.ipynb
├── PrecisionComparision.ipynb
├── __pycache__
├── face_detection_model.cpython-36.pyc
├── face_detection_model.cpython-37.pyc
├── gaze_estimation_model.cpython-36.pyc
├── gaze_estimation_model.cpython-37.pyc
├── head_pose_estimation_model.cpython-36.pyc
├── head_pose_estimation_model.cpython-37.pyc
├── input_feeder.cpython-36.pyc
├── input_feeder.cpython-37.pyc
├── landmark_detection_model.cpython-36.pyc
├── landmark_detection_model.cpython-37.pyc
├── mouse_controller.cpython-36.pyc
└── mouse_controller.cpython-37.pyc
├── banchmark.ipynb
├── computer_controller_job.sh
├── face_detection_model.py
├── gaze_estimation_model.py
├── head_pose_estimation_model.py
├── input_feeder.py
├── landmark_detection_model.py
├── main.py
├── model.py
├── mouse_controller.py
├── output_video.mp4
└── results
├── FP16
└── stats.txt
├── FP32-INT8
└── stats.txt
└── FP32
└── stats.txt
/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /workspace.xml
3 |
--------------------------------------------------------------------------------
/.idea/Computer-Pointer-Controller.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/dictionaries/Bhadr.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | argparser
5 | seprated
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/inspectionProfiles/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.ipynb_checkpoints/Exercise_Heterogenous_plugin_and_the_DevCloud-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {
6 | "graffitiCellId": "id_qwukc5b"
7 | },
8 | "source": [
9 | "# Exercise: Heterogenous Plugin and the DevCloud\n",
10 | "\n",
11 | "In this exercise, we will load a model using the hetero plugin on to the FPGA and CPU, and the GPU and CPU. We will then perform an inference on it and compare the time it takes to do the same for each device pair."
12 | ]
13 | },
14 | {
15 | "cell_type": "markdown",
16 | "metadata": {
17 | "graffitiCellId": "id_z8bfs11"
18 | },
19 | "source": [
20 | ""
21 | ]
22 | },
23 | {
24 | "cell_type": "markdown",
25 | "metadata": {
26 | "graffitiCellId": "id_0untint"
27 | },
28 | "source": [
29 | "\n",
30 | "\n",
31 | "#### Set up paths so we can run Dev Cloud utilities\n",
32 | "You *must* run this every time they enter a Workspace session."
33 | ]
34 | },
35 | {
36 | "cell_type": "code",
37 | "execution_count": null,
38 | "metadata": {
39 | "graffitiCellId": "id_axn1sb2"
40 | },
41 | "outputs": [],
42 | "source": [
43 | "%env PATH=/opt/conda/bin:/opt/spark-2.4.3-bin-hadoop2.7/bin:/opt/conda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/intel_devcloud_support\n",
44 | "import os\n",
45 | "import sys\n",
46 | "sys.path.insert(0, os.path.abspath('/opt/intel_devcloud_support'))\n",
47 | "sys.path.insert(0, os.path.abspath('/opt/intel'))"
48 | ]
49 | },
50 | {
51 | "cell_type": "markdown",
52 | "metadata": {
53 | "graffitiCellId": "id_mhiayyz"
54 | },
55 | "source": [
56 | "## The model\n",
57 | "\n",
58 | "We will be using the `vehicle-license-plate-detection-barrier-0106` model for this exercise. Remember that to run a model using the HETERO Plugin, we need to use FP16 as the model precision.\n",
59 | "\n",
60 | "The model is present in the `/data/models/intel` folder."
61 | ]
62 | },
63 | {
64 | "cell_type": "markdown",
65 | "metadata": {
66 | "graffitiCellId": "id_ltf95ei"
67 | },
68 | "source": [
69 | "# Step 1: Creating a Python Script\n",
70 | "\n",
71 | "The first step is to create a python script that you can use to load the model and perform an inference. I have used the `writefile` magic to create a python file called `inference_on_device.py`. You will need to complete this file."
72 | ]
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": null,
77 | "metadata": {
78 | "graffitiCellId": "id_bpywo8s"
79 | },
80 | "outputs": [],
81 | "source": [
82 | "%%writefile inference_on_device.py\n",
83 | "\n",
84 | "import time\n",
85 | "import numpy as np\n",
86 | "import cv2\n",
87 | "from openvino.inference_engine import IENetwork\n",
88 | "from openvino.inference_engine import IECore\n",
89 | "import argparse\n",
90 | "\n",
91 | "def main(args):\n",
92 | " model=args.model_path\n",
93 | " model_weights=model+'.bin'\n",
94 | " model_structure=model+'.xml'\n",
95 | " \n",
96 | " start=time.time()\n",
97 | " \n",
98 | " # TODO: Load the model on VPU\n",
99 | " \n",
100 | " print(f\"Time taken to load model = {time.time()-start} seconds\")\n",
101 | " \n",
102 | " # Reading and Preprocessing Image\n",
103 | " input_img=cv2.imread('car.png')\n",
104 | " input_img=cv2.resize(input_img, (300,300), interpolation = cv2.INTER_AREA)\n",
105 | " input_img=np.moveaxis(input_img, -1, 0)\n",
106 | "\n",
107 | " # TODO: Prepare the model for inference (create input dict etc.)\n",
108 | " \n",
109 | " start=time.time()\n",
110 | " for _ in range(100):\n",
111 | " # TODO: Run Inference in a Loop\n",
112 | " \n",
113 | " print(f\"Time Taken to run 100 Inference is = {time.time()-start} seconds\")\n",
114 | "\n",
115 | "if __name__=='__main__':\n",
116 | " parser=argparse.ArgumentParser()\n",
117 | " parser.add_argument('--model_path', required=True)\n",
118 | " parser.add_argument('--device', default=None)\n",
119 | " \n",
120 | " args=parser.parse_args() \n",
121 | " main(args)"
122 | ]
123 | },
124 | {
125 | "cell_type": "markdown",
126 | "metadata": {
127 | "graffitiCellId": "id_1rnmf5g"
128 | },
129 | "source": [
130 | ""
131 | ]
132 | },
133 | {
134 | "cell_type": "code",
135 | "execution_count": null,
136 | "metadata": {
137 | "graffitiCellId": "id_nmeqj1a"
138 | },
139 | "outputs": [],
140 | "source": [
141 | "%%writefile inference_on_device.py\n",
142 | "\n",
143 | "import time\n",
144 | "import cv2\n",
145 | "import numpy as np\n",
146 | "from openvino.inference_engine import IENetwork\n",
147 | "from openvino.inference_engine import IECore\n",
148 | "import argparse\n",
149 | "\n",
150 | "def main(args):\n",
151 | " model=args.model_path\n",
152 | " model_weights=model+'.bin'\n",
153 | " model_structure=model+'.xml'\n",
154 | " \n",
155 | " start=time.time()\n",
156 | " model=IENetwork(model_structure, model_weights)\n",
157 | "\n",
158 | " core = IECore()\n",
159 | " net = core.load_network(network=model, device_name=args.device, num_requests=1)\n",
160 | " load_time=time.time()-start\n",
161 | " print(f\"Time taken to load model = {load_time} seconds\")\n",
162 | " \n",
163 | " # Get the name of the input node\n",
164 | " input_name=next(iter(model.inputs))\n",
165 | "\n",
166 | " # Reading and Preprocessing Image\n",
167 | " input_img=cv2.imread('/data/resources/car.png')\n",
168 | " input_img=cv2.resize(input_img, (300,300), interpolation = cv2.INTER_AREA)\n",
169 | " input_img=np.moveaxis(input_img, -1, 0)\n",
170 | "\n",
171 | " # Running Inference in a loop on the same image\n",
172 | " input_dict={input_name:input_img}\n",
173 | "\n",
174 | " start=time.time()\n",
175 | " for _ in range(100):\n",
176 | " net.infer(input_dict)\n",
177 | " \n",
178 | " inference_time=time.time()-start\n",
179 | " fps=100/inference_time\n",
180 | " \n",
181 | " print(f\"Time Taken to run 100 Inference is = {inference_time} seconds\")\n",
182 | " \n",
183 | " with open(f\"/output/{args.path}.txt\", \"w\") as f:\n",
184 | " f.write(str(load_time)+'\\n')\n",
185 | " f.write(str(inference_time)+'\\n')\n",
186 | " f.write(str(fps)+'\\n')\n",
187 | "\n",
188 | "if __name__=='__main__':\n",
189 | " parser=argparse.ArgumentParser()\n",
190 | " parser.add_argument('--model_path', required=True)\n",
191 | " parser.add_argument('--device', default=None)\n",
192 | " parser.add_argument('--path', default=None)\n",
193 | " \n",
194 | " args=parser.parse_args() \n",
195 | " main(args)\n"
196 | ]
197 | },
198 | {
199 | "cell_type": "markdown",
200 | "metadata": {
201 | "graffitiCellId": "id_ufbi2ll"
202 | },
203 | "source": [
204 | "## Step 2: Creating a job submission script\n",
205 | "\n",
206 | "To submit a job to the devcloud, we need to create a script. I have named the script as `inference_hetero_model_job.sh`.\n",
207 | "\n",
208 | "Can you write a script that will take the model path and device as a command line argument and then call the python file you created in the previous cell with the path to the model?"
209 | ]
210 | },
211 | {
212 | "cell_type": "code",
213 | "execution_count": null,
214 | "metadata": {
215 | "graffitiCellId": "id_5r13clu"
216 | },
217 | "outputs": [],
218 | "source": [
219 | "%%writefile inference_model_job.sh\n",
220 | "\n",
221 | "#TODO: Create job submission script"
222 | ]
223 | },
224 | {
225 | "cell_type": "markdown",
226 | "metadata": {
227 | "graffitiCellId": "id_f1nbmn9"
228 | },
229 | "source": [
230 | ""
231 | ]
232 | },
233 | {
234 | "cell_type": "code",
235 | "execution_count": 19,
236 | "metadata": {
237 | "graffitiCellId": "id_ia7yjlq"
238 | },
239 | "outputs": [
240 | {
241 | "name": "stdout",
242 | "output_type": "stream",
243 | "text": [
244 | "Overwriting inference_model_job.sh\n"
245 | ]
246 | }
247 | ],
248 | "source": [
249 | "%%writefile inference_model_job.sh\n",
250 | "#!/bin/bash\n",
251 | "\n",
252 | "exec 1>/output/stdout.log 2>/output/stderr.log\n",
253 | "\n",
254 | "mkdir -p /output\n",
255 | "\n",
256 | "DEVICE=$1\n",
257 | "MODELPATH=$2\n",
258 | "\n",
259 | "\n",
260 | "source /opt/intel/init_openvino.sh\n",
261 | "aocl program acl0 /opt/intel/openvino/bitstreams/a10_vision_design_sg1_bitstreams/2019R4_PL1_FP16_MobileNet_Clamp.aocx\n",
262 | "\n",
263 | "\n",
264 | "# Run the load model python script\n",
265 | "python3 inference_on_device.py --model_path ${MODELPATH} --device ${DEVICE}\n",
266 | "\n",
267 | "cd /output\n",
268 | "\n",
269 | "tar zcvf output.tgz *"
270 | ]
271 | },
272 | {
273 | "cell_type": "markdown",
274 | "metadata": {
275 | "graffitiCellId": "id_28fed2h"
276 | },
277 | "source": [
278 | "## Step 3a: Running on the FPGA and CPU\n",
279 | "\n",
280 | "In the cell below, can you write the qsub command that will submit your job to the CPU?"
281 | ]
282 | },
283 | {
284 | "cell_type": "code",
285 | "execution_count": null,
286 | "metadata": {
287 | "graffitiCellId": "id_6awpacu"
288 | },
289 | "outputs": [],
290 | "source": [
291 | "fpga_cpu_job = # TODO: Write qsub command\n",
292 | "print(fpga_cpu_job[0])"
293 | ]
294 | },
295 | {
296 | "cell_type": "markdown",
297 | "metadata": {
298 | "graffitiCellId": "id_cvp3lyi"
299 | },
300 | "source": [
301 | ""
302 | ]
303 | },
304 | {
305 | "cell_type": "code",
306 | "execution_count": 20,
307 | "metadata": {
308 | "graffitiCellId": "id_chmeh50"
309 | },
310 | "outputs": [
311 | {
312 | "name": "stdout",
313 | "output_type": "stream",
314 | "text": [
315 | "S4JnyfRTRLgnuYAELRGDGPPyWcBeLu5K\n"
316 | ]
317 | }
318 | ],
319 | "source": [
320 | "fpga_cpu_job = !qsub inference_model_job.sh -d . -l nodes=1:tank-870:i5-6500te:iei-mustang-f100-a10 -F \"HETERO:FPGA,CPU /data/models/intel/vehicle-license-plate-detection-barrier-0106/FP16/vehicle-license-plate-detection-barrier-0106 fpga_cpu_stats\" -N store_core \n",
321 | "print(fpga_cpu_job[0])"
322 | ]
323 | },
324 | {
325 | "cell_type": "markdown",
326 | "metadata": {
327 | "graffitiCellId": "id_io25c53"
328 | },
329 | "source": [
330 | "## Step 3b: Running on CPU and GPU"
331 | ]
332 | },
333 | {
334 | "cell_type": "code",
335 | "execution_count": null,
336 | "metadata": {
337 | "graffitiCellId": "id_v5klpi1"
338 | },
339 | "outputs": [],
340 | "source": [
341 | "fpga_gpu_job = # TODO: Write qsub command\n",
342 | "print(fpga_gpu_job[0])"
343 | ]
344 | },
345 | {
346 | "cell_type": "markdown",
347 | "metadata": {
348 | "graffitiCellId": "id_7k34s6u"
349 | },
350 | "source": [
351 | ""
352 | ]
353 | },
354 | {
355 | "cell_type": "code",
356 | "execution_count": 21,
357 | "metadata": {
358 | "graffitiCellId": "id_022l4bj"
359 | },
360 | "outputs": [
361 | {
362 | "name": "stdout",
363 | "output_type": "stream",
364 | "text": [
365 | "Eh6UyjrAVHGzoOtXzCduZQ2Xew2GnSJf\n"
366 | ]
367 | }
368 | ],
369 | "source": [
370 | "cpu_gpu_job = !qsub inference_model_job.sh -d . -l nodes=tank-870:i5-6500te:intel-hd-530 -F \"HETERO:CPU,GPU /data/models/intel/vehicle-license-plate-detection-barrier-0106/FP16/vehicle-license-plate-detection-barrier-0106 cpu_gpu_stats\" -N store_core \n",
371 | "print(cpu_gpu_job[0])"
372 | ]
373 | },
374 | {
375 | "cell_type": "markdown",
376 | "metadata": {
377 | "graffitiCellId": "id_io25c53"
378 | },
379 | "source": [
380 | "## Step 3c: Running on FPGA, GPU and CPU"
381 | ]
382 | },
383 | {
384 | "cell_type": "code",
385 | "execution_count": null,
386 | "metadata": {
387 | "graffitiCellId": "id_v5klpi1"
388 | },
389 | "outputs": [],
390 | "source": [
391 | "fpga_gpu_cpu_job = # TODO: Write qsub command\n",
392 | "print(fpga_gpu_cpu_job[0])"
393 | ]
394 | },
395 | {
396 | "cell_type": "markdown",
397 | "metadata": {
398 | "graffitiCellId": "id_mxh5ozv"
399 | },
400 | "source": [
401 | ""
402 | ]
403 | },
404 | {
405 | "cell_type": "code",
406 | "execution_count": 22,
407 | "metadata": {
408 | "graffitiCellId": "id_qicoukm"
409 | },
410 | "outputs": [
411 | {
412 | "name": "stdout",
413 | "output_type": "stream",
414 | "text": [
415 | "0nJKbX8NJKvekIoxvVQ77gTk9bt2ldBk\n"
416 | ]
417 | }
418 | ],
419 | "source": [
420 | "fpga_gpu_cpu_job = !qsub inference_model_job.sh -d . -l nodes=tank-870:i5-6500te:intel-hd-530:iei-mustang-f100-a10 -F \"HETERO:FPGA,GPU,CPU /data/models/intel/vehicle-license-plate-detection-barrier-0106/FP16/vehicle-license-plate-detection-barrier-0106 fpga_gpu_cpu_stats\" -N store_core \n",
421 | "print(fpga_gpu_cpu_job[0])"
422 | ]
423 | },
424 | {
425 | "cell_type": "markdown",
426 | "metadata": {
427 | "graffitiCellId": "id_8ym2smn"
428 | },
429 | "source": [
430 | "## Step 4: Getting the Live Stat Values\n",
431 | "\n",
432 | "By running the below command, we can see the live status of the commands."
433 | ]
434 | },
435 | {
436 | "cell_type": "markdown",
437 | "metadata": {
438 | "graffitiCellId": "id_clj7fxa"
439 | },
440 | "source": [
441 | ""
442 | ]
443 | },
444 | {
445 | "cell_type": "code",
446 | "execution_count": null,
447 | "metadata": {
448 | "graffitiCellId": "id_zig7qg5"
449 | },
450 | "outputs": [],
451 | "source": [
452 | "import liveQStat\n",
453 | "liveQStat.liveQStat()"
454 | ]
455 | },
456 | {
457 | "cell_type": "markdown",
458 | "metadata": {
459 | "graffitiCellId": "id_2vp5y4m"
460 | },
461 | "source": [
462 | "## Step 5a: Get the results for FPGA and CPU\n",
463 | "\n",
464 | "Running the cell below will get the output files from our job"
465 | ]
466 | },
467 | {
468 | "cell_type": "markdown",
469 | "metadata": {
470 | "graffitiCellId": "id_cygruth"
471 | },
472 | "source": [
473 | ""
474 | ]
475 | },
476 | {
477 | "cell_type": "code",
478 | "execution_count": 23,
479 | "metadata": {
480 | "graffitiCellId": "id_zpdshwo"
481 | },
482 | "outputs": [
483 | {
484 | "name": "stdout",
485 | "output_type": "stream",
486 | "text": [
487 | "getResults() is blocking until results of the job (id:S4JnyfRTRLgnuYAELRGDGPPyWcBeLu5K) are ready.\n",
488 | "Please wait................................................Success!\n",
489 | "output.tgz was downloaded in the same folder as this notebook.\n"
490 | ]
491 | }
492 | ],
493 | "source": [
494 | "import get_results\n",
495 | "\n",
496 | "get_results.getResults(fpga_cpu_job[0], get_stderr=True, filename=\"output.tgz\", blocking=True)"
497 | ]
498 | },
499 | {
500 | "cell_type": "code",
501 | "execution_count": 24,
502 | "metadata": {
503 | "graffitiCellId": "id_0quk13q"
504 | },
505 | "outputs": [],
506 | "source": [
507 | "!tar zxf output.tgz"
508 | ]
509 | },
510 | {
511 | "cell_type": "code",
512 | "execution_count": 25,
513 | "metadata": {
514 | "graffitiCellId": "id_l1gs5j5"
515 | },
516 | "outputs": [
517 | {
518 | "name": "stdout",
519 | "output_type": "stream",
520 | "text": [
521 | "INTELFPGAOCLSDKROOT is set to /opt/altera/aocl-pro-rte/aclrte-linux64. Using that.\r\n",
522 | "\r\n",
523 | "aoc was not found, but aocl was found. Assuming only RTE is installed.\r\n",
524 | "\r\n",
525 | "AOCL_BOARD_PACKAGE_ROOT is set to /opt/intel/openvino/bitstreams/a10_vision_design_sg1_bitstreams/BSP/a10_1150_sg1. Using that.\r\n",
526 | "Adding /opt/altera/aocl-pro-rte/aclrte-linux64/bin to PATH\r\n",
527 | "Adding /opt/altera/aocl-pro-rte/aclrte-linux64/host/linux64/lib to LD_LIBRARY_PATH\r\n",
528 | "Adding /opt/intel/openvino/bitstreams/a10_vision_design_sg1_bitstreams/BSP/a10_1150_sg1/linux64/lib to LD_LIBRARY_PATH\r\n",
529 | "[setupvars.sh] OpenVINO environment initialized\r\n",
530 | "aocl program: Running program from /opt/intel/openvino/bitstreams/a10_vision_design_sg1_bitstreams/BSP/a10_1150_sg1/linux64/libexec\r\n",
531 | "Programming device: a10gx_2ddr : Intel Vision Accelerator Design with Intel Arria 10 FPGA (acla10_1150_sg10)\r\n",
532 | "Program succeed. \r\n",
533 | "Time taken to load model = 4.475625038146973 seconds\r\n",
534 | "Time Taken to run 100 Inference is = 0.8625667095184326 seconds\r\n",
535 | "None.txt\r\n",
536 | "stderr.log\r\n"
537 | ]
538 | }
539 | ],
540 | "source": [
541 | "!cat stdout.log"
542 | ]
543 | },
544 | {
545 | "cell_type": "markdown",
546 | "metadata": {
547 | "graffitiCellId": "id_gykmtow"
548 | },
549 | "source": [
550 | "## Step 5b: Get the result for CPU and GPU"
551 | ]
552 | },
553 | {
554 | "cell_type": "code",
555 | "execution_count": 26,
556 | "metadata": {
557 | "graffitiCellId": "id_8w79u8w"
558 | },
559 | "outputs": [
560 | {
561 | "name": "stdout",
562 | "output_type": "stream",
563 | "text": [
564 | "getResults() is blocking until results of the job (id:Eh6UyjrAVHGzoOtXzCduZQ2Xew2GnSJf) are ready.\n",
565 | "Please wait...Success!\n",
566 | "output.tgz was downloaded in the same folder as this notebook.\n"
567 | ]
568 | }
569 | ],
570 | "source": [
571 | "import get_results\n",
572 | "\n",
573 | "get_results.getResults(cpu_gpu_job[0], filename=\"output.tgz\", blocking=True)"
574 | ]
575 | },
576 | {
577 | "cell_type": "code",
578 | "execution_count": 27,
579 | "metadata": {
580 | "graffitiCellId": "id_kv4qcd6"
581 | },
582 | "outputs": [],
583 | "source": [
584 | "!tar zxf output.tgz"
585 | ]
586 | },
587 | {
588 | "cell_type": "code",
589 | "execution_count": 32,
590 | "metadata": {
591 | "graffitiCellId": "id_etgr4le"
592 | },
593 | "outputs": [
594 | {
595 | "name": "stdout",
596 | "output_type": "stream",
597 | "text": [
598 | "INTELFPGAOCLSDKROOT is set to /opt/altera/aocl-pro-rte/aclrte-linux64. Using that.\r\n",
599 | "\r\n",
600 | "aoc was not found, but aocl was found. Assuming only RTE is installed.\r\n",
601 | "\r\n",
602 | "AOCL_BOARD_PACKAGE_ROOT is set to /opt/intel/openvino/bitstreams/a10_vision_design_sg1_bitstreams/BSP/a10_1150_sg1. Using that.\r\n",
603 | "Adding /opt/altera/aocl-pro-rte/aclrte-linux64/bin to PATH\r\n",
604 | "Adding /opt/altera/aocl-pro-rte/aclrte-linux64/host/linux64/lib to LD_LIBRARY_PATH\r\n",
605 | "Adding /opt/intel/openvino/bitstreams/a10_vision_design_sg1_bitstreams/BSP/a10_1150_sg1/linux64/lib to LD_LIBRARY_PATH\r\n",
606 | "[setupvars.sh] OpenVINO environment initialized\r\n",
607 | "aocl program: Running program from /opt/intel/openvino/bitstreams/a10_vision_design_sg1_bitstreams/BSP/a10_1150_sg1/linux64/libexec\r\n",
608 | "Programming device: a10gx_2ddr : Intel Vision Accelerator Design with Intel Arria 10 FPGA (acla10_1150_sg10)\r\n",
609 | "Program succeed. \r\n",
610 | "DetectionOutput_Reshape_priors_/Output_0/Data__const is CPU\r\n",
611 | "DetectionOutput_Reshape_conf_ is CPU\r\n",
612 | "SSD/concat_reshape_softmax/mbox_conf_final is CPU\r\n",
613 | "SSD/concat_reshape_softmax/Reshape is GPU\r\n",
614 | "SSD/concat_reshape_softmax/mbox_conf_logits is GPU\r\n",
615 | "SSD/ssd_head_1/Flatten_1/flatten/Reshape is GPU\r\n",
616 | "SSD/ssd_head_1/layer_18/output_mbox_conf/BiasAdd/Add/Transpose is GPU\r\n",
617 | "DetectionOutput_Reshape_loc_ is CPU\r\n",
618 | "SSD/concat_reshape_softmax/mbox_loc_final is CPU\r\n",
619 | "SSD/ssd_head_1/Flatten/flatten/Reshape is GPU\r\n",
620 | "SSD/ssd_head_1/layer_18/output_mbox_loc/BiasAdd/Add/Transpose is GPU\r\n",
621 | "SSD/ssd_head_2/Flatten_1/flatten/Reshape is GPU\r\n",
622 | "SSD/ssd_head_2/feature_map_1_mbox_conf/BiasAdd/Add/Transpose is GPU\r\n",
623 | "SSD/ssd_head_2/Flatten/flatten/Reshape is GPU\r\n",
624 | "SSD/ssd_head_2/feature_map_1_mbox_loc/BiasAdd/Add/Transpose is GPU\r\n",
625 | "SSD/ssd_head_3/Flatten_1/flatten/Reshape is GPU\r\n",
626 | "SSD/ssd_head_3/feature_map_2_mbox_conf/BiasAdd/Add/Transpose is GPU\r\n",
627 | "SSD/ssd_head_3/Flatten/flatten/Reshape is GPU\r\n",
628 | "SSD/ssd_head_3/feature_map_2_mbox_loc/BiasAdd/Add/Transpose is GPU\r\n",
629 | "SSD/ssd_head_4/Flatten_1/flatten/Reshape is GPU\r\n",
630 | "SSD/ssd_head_4/feature_map_3_mbox_conf/BiasAdd/Add/Transpose is GPU\r\n",
631 | "SSD/ssd_head_4/Flatten/flatten/Reshape is GPU\r\n",
632 | "SSD/ssd_head_4/feature_map_3_mbox_loc/BiasAdd/Add/Transpose is GPU\r\n",
633 | "SSD/ssd_head_5/Flatten_1/flatten/Reshape is GPU\r\n",
634 | "SSD/ssd_head_5/feature_map_4_mbox_conf/BiasAdd/Add/Transpose is GPU\r\n",
635 | "SSD/ssd_head_5/Flatten/flatten/Reshape is GPU\r\n",
636 | "SSD/ssd_head_5/feature_map_4_mbox_loc/BiasAdd/Add/Transpose is GPU\r\n",
637 | "SSD/ssd_head/Flatten_1/flatten/Reshape is GPU\r\n",
638 | "SSD/ssd_head/layer_14/output_mbox_conf/BiasAdd/Add/Transpose is GPU\r\n",
639 | "SSD/ssd_head/Flatten/flatten/Reshape is GPU\r\n",
640 | "SSD/ssd_head/layer_14/output_mbox_loc/BiasAdd/Add/Transpose is GPU\r\n",
641 | "Time taken to load model = 11.25682806968689 seconds\r\n",
642 | "Time Taken to run 100 Inference is = 1.122713565826416 seconds\r\n",
643 | "None.txt\r\n",
644 | "stderr.log\r\n"
645 | ]
646 | }
647 | ],
648 | "source": [
649 | "!cat stdout.log"
650 | ]
651 | },
652 | {
653 | "cell_type": "markdown",
654 | "metadata": {
655 | "graffitiCellId": "id_gykmtow"
656 | },
657 | "source": [
658 | "## Step 5c: Get the result for FPGA, GPU and CPU"
659 | ]
660 | },
661 | {
662 | "cell_type": "code",
663 | "execution_count": 29,
664 | "metadata": {
665 | "graffitiCellId": "id_8w79u8w"
666 | },
667 | "outputs": [
668 | {
669 | "name": "stdout",
670 | "output_type": "stream",
671 | "text": [
672 | "getResults() is blocking until results of the job (id:0nJKbX8NJKvekIoxvVQ77gTk9bt2ldBk) are ready.\n",
673 | "Please wait.....Success!\n",
674 | "output.tgz was downloaded in the same folder as this notebook.\n"
675 | ]
676 | }
677 | ],
678 | "source": [
679 | "import get_results\n",
680 | "\n",
681 | "get_results.getResults(fpga_gpu_cpu_job[0], filename=\"output.tgz\", blocking=True)"
682 | ]
683 | },
684 | {
685 | "cell_type": "code",
686 | "execution_count": 30,
687 | "metadata": {
688 | "graffitiCellId": "id_kv4qcd6"
689 | },
690 | "outputs": [],
691 | "source": [
692 | "!tar zxf output.tgz"
693 | ]
694 | },
695 | {
696 | "cell_type": "code",
697 | "execution_count": 31,
698 | "metadata": {
699 | "graffitiCellId": "id_etgr4le"
700 | },
701 | "outputs": [
702 | {
703 | "name": "stdout",
704 | "output_type": "stream",
705 | "text": [
706 | "INTELFPGAOCLSDKROOT is set to /opt/altera/aocl-pro-rte/aclrte-linux64. Using that.\r\n",
707 | "\r\n",
708 | "aoc was not found, but aocl was found. Assuming only RTE is installed.\r\n",
709 | "\r\n",
710 | "AOCL_BOARD_PACKAGE_ROOT is set to /opt/intel/openvino/bitstreams/a10_vision_design_sg1_bitstreams/BSP/a10_1150_sg1. Using that.\r\n",
711 | "Adding /opt/altera/aocl-pro-rte/aclrte-linux64/bin to PATH\r\n",
712 | "Adding /opt/altera/aocl-pro-rte/aclrte-linux64/host/linux64/lib to LD_LIBRARY_PATH\r\n",
713 | "Adding /opt/intel/openvino/bitstreams/a10_vision_design_sg1_bitstreams/BSP/a10_1150_sg1/linux64/lib to LD_LIBRARY_PATH\r\n",
714 | "[setupvars.sh] OpenVINO environment initialized\r\n",
715 | "aocl program: Running program from /opt/intel/openvino/bitstreams/a10_vision_design_sg1_bitstreams/BSP/a10_1150_sg1/linux64/libexec\r\n",
716 | "Programming device: a10gx_2ddr : Intel Vision Accelerator Design with Intel Arria 10 FPGA (acla10_1150_sg10)\r\n",
717 | "Program succeed. \r\n",
718 | "DetectionOutput_Reshape_priors_/Output_0/Data__const is CPU\r\n",
719 | "DetectionOutput_Reshape_conf_ is CPU\r\n",
720 | "SSD/concat_reshape_softmax/mbox_conf_final is CPU\r\n",
721 | "SSD/concat_reshape_softmax/Reshape is GPU\r\n",
722 | "SSD/concat_reshape_softmax/mbox_conf_logits is GPU\r\n",
723 | "SSD/ssd_head_1/Flatten_1/flatten/Reshape is GPU\r\n",
724 | "SSD/ssd_head_1/layer_18/output_mbox_conf/BiasAdd/Add/Transpose is GPU\r\n",
725 | "DetectionOutput_Reshape_loc_ is CPU\r\n",
726 | "SSD/concat_reshape_softmax/mbox_loc_final is CPU\r\n",
727 | "SSD/ssd_head_1/Flatten/flatten/Reshape is GPU\r\n",
728 | "SSD/ssd_head_1/layer_18/output_mbox_loc/BiasAdd/Add/Transpose is GPU\r\n",
729 | "SSD/ssd_head_2/Flatten_1/flatten/Reshape is GPU\r\n",
730 | "SSD/ssd_head_2/feature_map_1_mbox_conf/BiasAdd/Add/Transpose is GPU\r\n",
731 | "SSD/ssd_head_2/Flatten/flatten/Reshape is GPU\r\n",
732 | "SSD/ssd_head_2/feature_map_1_mbox_loc/BiasAdd/Add/Transpose is GPU\r\n",
733 | "SSD/ssd_head_3/Flatten_1/flatten/Reshape is GPU\r\n",
734 | "SSD/ssd_head_3/feature_map_2_mbox_conf/BiasAdd/Add/Transpose is GPU\r\n",
735 | "SSD/ssd_head_3/Flatten/flatten/Reshape is GPU\r\n",
736 | "SSD/ssd_head_3/feature_map_2_mbox_loc/BiasAdd/Add/Transpose is GPU\r\n",
737 | "SSD/ssd_head_4/Flatten_1/flatten/Reshape is GPU\r\n",
738 | "SSD/ssd_head_4/feature_map_3_mbox_conf/BiasAdd/Add/Transpose is GPU\r\n",
739 | "SSD/ssd_head_4/Flatten/flatten/Reshape is GPU\r\n",
740 | "SSD/ssd_head_4/feature_map_3_mbox_loc/BiasAdd/Add/Transpose is GPU\r\n",
741 | "SSD/ssd_head_5/Flatten_1/flatten/Reshape is GPU\r\n",
742 | "SSD/ssd_head_5/feature_map_4_mbox_conf/BiasAdd/Add/Transpose is GPU\r\n",
743 | "SSD/ssd_head_5/Flatten/flatten/Reshape is GPU\r\n",
744 | "SSD/ssd_head_5/feature_map_4_mbox_loc/BiasAdd/Add/Transpose is GPU\r\n",
745 | "SSD/ssd_head/Flatten_1/flatten/Reshape is GPU\r\n",
746 | "SSD/ssd_head/layer_14/output_mbox_conf/BiasAdd/Add/Transpose is GPU\r\n",
747 | "SSD/ssd_head/Flatten/flatten/Reshape is GPU\r\n",
748 | "SSD/ssd_head/layer_14/output_mbox_loc/BiasAdd/Add/Transpose is GPU\r\n",
749 | "Time taken to load model = 11.25682806968689 seconds\r\n",
750 | "Time Taken to run 100 Inference is = 1.122713565826416 seconds\r\n",
751 | "None.txt\r\n",
752 | "stderr.log\r\n"
753 | ]
754 | }
755 | ],
756 | "source": [
757 | "!cat stdout.log"
758 | ]
759 | },
760 | {
761 | "cell_type": "markdown",
762 | "metadata": {
763 | "graffitiCellId": "id_4rf323l"
764 | },
765 | "source": [
766 | "## Step 6: View the Outputs\n",
767 | "\n",
768 | "Can you plot the load time, inference time and the frames per second in the cell below?"
769 | ]
770 | },
771 | {
772 | "cell_type": "code",
773 | "execution_count": null,
774 | "metadata": {
775 | "graffitiCellId": "id_bkny5ta"
776 | },
777 | "outputs": [],
778 | "source": [
779 | "import matplotlib.pyplot as plt\n",
780 | "\n",
781 | "#File Paths to stats files\n",
782 | "paths=['gpu_stats.txt', 'cpu_stats.txt']\n",
783 | "\n",
784 | "# TODO: Plot the different stats"
785 | ]
786 | },
787 | {
788 | "cell_type": "markdown",
789 | "metadata": {
790 | "graffitiCellId": "id_m9kxw9k"
791 | },
792 | "source": [
793 | ""
794 | ]
795 | },
796 | {
797 | "cell_type": "code",
798 | "execution_count": 33,
799 | "metadata": {
800 | "graffitiCellId": "id_4h5tl2h"
801 | },
802 | "outputs": [
803 | {
804 | "ename": "ValueError",
805 | "evalue": "shape mismatch: objects cannot be broadcast to a single shape",
806 | "output_type": "error",
807 | "traceback": [
808 | "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
809 | "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)",
810 | "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 25\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 26\u001b[0m \u001b[0mpaths\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'fpga_cpu_stats.txt'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'cpu_gpu_stats.txt'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'fpga_gpu_cpu_stats.txt'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 27\u001b[0;31m \u001b[0mread_files\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpaths\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m'FPGA/CPU'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'CPU/GPU'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'FPGA/GPU/CPU'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
811 | "\u001b[0;32m\u001b[0m in \u001b[0;36mread_files\u001b[0;34m(paths, labels)\u001b[0m\n\u001b[1;32m 20\u001b[0m \u001b[0mfps\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfloat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreadline\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 21\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 22\u001b[0;31m \u001b[0mplot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlabels\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mload_time\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'Model Load Time'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'seconds'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 23\u001b[0m \u001b[0mplot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlabels\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minference_time\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'Inference Time'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'seconds'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 24\u001b[0m \u001b[0mplot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlabels\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfps\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'Frames per Second'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'Frames'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
812 | "\u001b[0;32m\u001b[0m in \u001b[0;36mplot\u001b[0;34m(labels, data, title, label)\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0max\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_ylabel\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlabel\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0max\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_title\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtitle\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 8\u001b[0;31m \u001b[0max\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbar\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlabels\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdata\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 9\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 10\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mread_files\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpaths\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlabels\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
813 | "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/matplotlib/__init__.py\u001b[0m in \u001b[0;36minner\u001b[0;34m(ax, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1708\u001b[0m warnings.warn(msg % (label_namer, func.__name__),\n\u001b[1;32m 1709\u001b[0m RuntimeWarning, stacklevel=2)\n\u001b[0;32m-> 1710\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mfunc\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0max\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1711\u001b[0m \u001b[0mpre_doc\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0minner\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__doc__\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1712\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mpre_doc\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
814 | "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/matplotlib/axes/_axes.py\u001b[0m in \u001b[0;36mbar\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 2079\u001b[0m x, height, width, y, linewidth = np.broadcast_arrays(\n\u001b[1;32m 2080\u001b[0m \u001b[0;31m# Make args iterable too.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2081\u001b[0;31m np.atleast_1d(x), height, width, y, linewidth)\n\u001b[0m\u001b[1;32m 2082\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2083\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0morientation\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'vertical'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
815 | "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/numpy/lib/stride_tricks.py\u001b[0m in \u001b[0;36mbroadcast_arrays\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 248\u001b[0m \u001b[0margs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0marray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0m_m\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcopy\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msubok\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0msubok\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0m_m\u001b[0m \u001b[0;32min\u001b[0m \u001b[0margs\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 249\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 250\u001b[0;31m \u001b[0mshape\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_broadcast_shape\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 251\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 252\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mall\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0marray\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0mshape\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0marray\u001b[0m \u001b[0;32min\u001b[0m \u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
816 | "\u001b[0;32m/opt/conda/lib/python3.6/site-packages/numpy/lib/stride_tricks.py\u001b[0m in \u001b[0;36m_broadcast_shape\u001b[0;34m(*args)\u001b[0m\n\u001b[1;32m 183\u001b[0m \u001b[0;31m# use the old-iterator because np.nditer does not handle size 0 arrays\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 184\u001b[0m \u001b[0;31m# consistently\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 185\u001b[0;31m \u001b[0mb\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbroadcast\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;36m32\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 186\u001b[0m \u001b[0;31m# unfortunately, it cannot handle 32 or more arguments directly\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 187\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mpos\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m32\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m31\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
817 | "\u001b[0;31mValueError\u001b[0m: shape mismatch: objects cannot be broadcast to a single shape"
818 | ]
819 | }
820 | ],
821 | "source": [
822 | "import matplotlib.pyplot as plt\n",
823 | "\n",
824 | "def plot(labels, data, title, label):\n",
825 | " fig = plt.figure()\n",
826 | " ax = fig.add_axes([0,0,1,1])\n",
827 | " ax.set_ylabel(label)\n",
828 | " ax.set_title(title)\n",
829 | " ax.bar(labels, data)\n",
830 | " \n",
831 | "def read_files(paths, labels):\n",
832 | " load_time=[]\n",
833 | " inference_time=[]\n",
834 | " fps=[]\n",
835 | " \n",
836 | " for path in paths:\n",
837 | " if os.path.isfile(path):\n",
838 | " f=open(path, 'r')\n",
839 | " load_time.append(float(f.readline()))\n",
840 | " inference_time.append(float(f.readline()))\n",
841 | " fps.append(float(f.readline()))\n",
842 | "\n",
843 | " plot(labels, load_time, 'Model Load Time', 'seconds')\n",
844 | " plot(labels, inference_time, 'Inference Time', 'seconds')\n",
845 | " plot(labels, fps, 'Frames per Second', 'Frames')\n",
846 | "\n",
847 | "paths=['fpga_cpu_stats.txt', 'cpu_gpu_stats.txt', 'fpga_gpu_cpu_stats.txt']\n",
848 | "read_files(paths, ['FPGA/CPU', 'CPU/GPU', 'FPGA/GPU/CPU'])"
849 | ]
850 | },
851 | {
852 | "cell_type": "code",
853 | "execution_count": null,
854 | "metadata": {
855 | "graffitiCellId": "id_0c20r8n"
856 | },
857 | "outputs": [],
858 | "source": []
859 | }
860 | ],
861 | "metadata": {
862 | "graffiti": {
863 | "firstAuthorId": "dca260a8-2142-11ea-b0f7-6f7abbbf2f85",
864 | "id": "id_610hfgn",
865 | "language": "EN"
866 | },
867 | "kernelspec": {
868 | "display_name": "Python 3",
869 | "language": "python",
870 | "name": "python3"
871 | },
872 | "language_info": {
873 | "codemirror_mode": {
874 | "name": "ipython",
875 | "version": 3
876 | },
877 | "file_extension": ".py",
878 | "mimetype": "text/x-python",
879 | "name": "python",
880 | "nbconvert_exporter": "python",
881 | "pygments_lexer": "ipython3",
882 | "version": "3.6.3"
883 | }
884 | },
885 | "nbformat": 4,
886 | "nbformat_minor": 2
887 | }
888 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Computer Pointer Controller
2 |
3 | In this project, you will use a [Gaze Detection Model](https://docs.openvinotoolkit.org/latest/_models_intel_gaze_estimation_adas_0002_description_gaze_estimation_adas_0002.html) to control the mouse pointer of your computer. You will be using the Gaze Estimation model to estimate the gaze of the user's eyes and change the mouse pointer position accordingly. This project will demonstrate your ability to run multiple models in the same machine and coordinate the flow of data between those models.
4 |
5 | You will be using the InferenceEngine API from Intel's OpenVino ToolKit to build the project. The gaze estimation model requires three inputs:
6 |
7 | * The head pose
8 | * The left eye image
9 | * The right eye image.
10 |
11 | 
12 |
13 | To get these inputs, you will have to use three other OpenVino models:
14 |
15 | * [Face Detection](https://docs.openvinotoolkit.org/latest/_models_intel_face_detection_adas_binary_0001_description_face_detection_adas_binary_0001.html)
16 | * [Head Pose Estimation](https://docs.openvinotoolkit.org/latest/_models_intel_head_pose_estimation_adas_0001_description_head_pose_estimation_adas_0001.html)
17 | * [Facial Landmarks Detection](https://docs.openvinotoolkit.org/latest/_models_intel_landmarks_regression_retail_0009_description_landmarks_regression_retail_0009.html)
18 |
19 | ### The Pipeline:
20 | You will have to coordinate the flow of data from the input, and then amongst the different models and finally to the mouse controller. The flow of data will look like this:
21 |
22 | 
23 |
24 | ## Project Set Up and Installation:
25 |
26 | Step1: Download below three softwares:
27 | 1. Microsoft Visual Studio* with C++ 2019, 2017, or 2015 with MSBuild
28 | 2. CMake 3.4 or higher 64-bit
29 | NOTE: If you want to use Microsoft Visual Studio 2019, you are required to install CMake 3.14.
30 | 3. Python 3.6.5 64-bit
31 |
32 | Step2. Download **[OpenVino Toolkit 2020.1](https://docs.openvinotoolkit.org/latest/index.html)** with all the prerequisites by following this [installation guide](https://docs.openvinotoolkit.org/2020.1/_docs_install_guides_installing_openvino_windows.html)
33 |
34 | Step3: Setup OpenVino Toolkit using below command in command prompt
35 | ```
36 | cd C:\Program Files (x86)\IntelSWTools\openvino\bin\
37 | setupvars.bat
38 | ```
39 |
40 | Step4: Configure Model Optimizer using below commnads in command prompt
41 | ```
42 | cd C:\Program Files (x86)\IntelSWTools\openvino\deployment_tools\model_optimizer\install_prerequisites
43 | install_prerequisites.bat
44 | ```
45 |
46 | Step5: Varify installation
47 | ```
48 | cd C:\Program Files (x86)\IntelSWTools\openvino\deployment_tools\demo\
49 | demo_squeezenet_download_convert_run.bat
50 | ```
51 | Above command should give output like this image
52 | 
53 |
54 |
55 | ## Demo:
56 |
57 | Step1. Clone the Repository using `git clone https://github.com/bhadreshpsavani/Computer-Pointer-Controller.git`
58 |
59 | Step2. Create Virtual Environment using command `python -m venv base` in the command prompt, then activate environment using below command,
60 | ```
61 | cd base/Scripts/
62 | activate
63 | ```
64 |
65 | Step3. install all the dependency using `pip install requirements.txt`.
66 |
67 | Step4. Instantiate OpenVino Environment. For windows use below command
68 | ```
69 | cd C:\Program Files (x86)\IntelSWTools\openvino\bin\
70 | setupvars.bat
71 | ```
72 |
73 | Step5. Go back to the project directory `src` folder
74 | ```
75 | cd path_of_project_directory
76 | cd src
77 | ```
78 |
79 | Step6. Run below commands to execute the project
80 | ```
81 | python main.py -fd ../intel/face-detection-adas-binary-0001/FP32-INT1/face-detection-adas-binary-0001.xml \
82 | -lr ../intel/landmarks-regression-retail-0009/FP32-INT8/landmarks-regression-retail-0009.xml \
83 | -hp ../intel/head-pose-estimation-adas-0001/FP32-INT8/head-pose-estimation-adas-0001.xml \
84 | -ge ../intel/gaze-estimation-adas-0002/FP32-INT8/gaze-estimation-adas-0002.xml \
85 | -i ../bin/demo.mp4 -flags ff fl fh fg
86 | ```
87 | Command Line Argument Information:
88 | - fd : Specify path of xml file of face detection model
89 | - lr : Specify path of xml file of landmark regression model
90 | - hp : Specify path of xml file of Head Pose Estimation model
91 | - ge : Specify path of xml file of Gaze Estimation model
92 | - i : Specify path of input Video file or cam for Webcam
93 | - flags (Optional): if you want to see preview video in separate window you need to Specify flag from ff, fl, fh, fg like -flags ff fl...(Space seperated if multiple values) ff for faceDetectionModel, fl for landmarkRegressionModel, fh for headPoseEstimationModel, fg for gazeEstimationModel
94 | - probs (Optional): if you want to specify confidence threshold for face detection, you can specify the value here in range(0, 1), default=0.6
95 | - d (Optional): Specify Device for inference, the device can be CPU, GPU, FPGU, MYRID
96 | - o : Specify path of output folder where we will store results
97 |
98 | ## Documentation:
99 |
100 | ### Project Structure:
101 |
102 | 
103 |
104 | intel: This folder contains models in IR format downloaded from Openvino Model Zoo
105 |
106 | src: This folder contains model files, pipeline file(main.py) and utilities
107 | * `model.py` is the model class file which has common property of all the other model files. It is inherited by all the other model files
108 | This folder has 4 model class files, This class files has methods to load model and perform inference.
109 | * `face_detection_model.py`
110 | * `gaze_estimation_model.py`
111 | * `landmark_detection_model.py`
112 | * `head_pose_estimation_model.py`
113 | * `main.py` file used to run complete pipeline of project. It calls has object of all the other class files in the folder
114 | * `mouse_controller.py` is utility to move mouse curser based on mouse coordinates received from `gaze_estimation_model` class predict method.
115 | * `input_feeder.py` is utility to load local video or webcam feed
116 | * `banchmark.ipynb`, `computer_controller_job.sh`, and `PrecisionComparsion.ipynb` are for banchmarking result generation for different hardware and model comparison
117 |
118 | bin: this folder has `demo.mp4` file which can be used to test model
119 |
120 | ## Benchmarks
121 | I have checked `Inference Time`, `Model Loading Time`, and `Frames Per Second` model for `FP16`, `FP32`, and `FP32-INT8` of all the models except `Face Detection Model`. `Face Detection Model` was only available on `FP32-INT1` precision.
122 | You can use below commands to get results for respective precisions,
123 |
124 | `FP16`:
125 | ```
126 | python main.py -fd ../intel/face-detection-adas-binary-0001/FP32-INT1/face-detection-adas-binary-0001.xml -lr ../intel/landmarks-regression-retail-0009/FP16/landmarks-regression-retail-0009.xml -hp ../intel/head-pose-estimation-adas-0001/FP16/head-pose-estimation-adas-0001.xml -ge ../intel/gaze-estimation-adas-0002/FP16/gaze-estimation-adas-0002.xml -d CPU -i ../bin/demo.mp4 -o results/FP16/ -flags ff fl fh fg
127 | ```
128 |
129 | `FP32`:
130 | ```
131 | python main.py -fd ../intel/face-detection-adas-binary-0001/FP32-INT1/face-detection-adas-binary-0001.xml -lr ../intel/landmarks-regression-retail-0009/FP32/landmarks-regression-retail-0009.xml -hp ../intel/head-pose-estimation-adas-0001/FP32/head-pose-estimation-adas-0001.xml -ge ../intel/gaze-estimation-adas-0002/FP32/gaze-estimation-adas-0002.xml -d CPU -i ../bin/demo.mp4 -o results/FP32/ -flags ff fl fh fg
132 | ```
133 |
134 | `FP32-INT8`:
135 | ```
136 | python main.py -fd ../intel/face-detection-adas-binary-0001/FP32-INT1/face-detection-adas-binary-0001.xml -lr ../intel/landmarks-regression-retail-0009/FP32-INT8/landmarks-regression-retail-0009.xml -hp ../intel/head-pose-estimation-adas-0001/FP32-INT8/head-pose-estimation-adas-0001.xml -ge ../intel/gaze-estimation-adas-0002/FP32-INT8/gaze-estimation-adas-0002.xml -d CPU -i ../bin/demo.mp4 -o results/FP32-INT8/ -flags ff fl fh fg
137 | ```
138 |
139 | ### Inference Time:
140 |
141 |
142 | ### Model Loading Time:
143 |
144 |
145 | ### Frames Per Second:
146 |
147 |
148 | **Synchronous Inference**
149 |
150 | ```
151 | precisions = ['FP16', 'FP32', 'FP32-INT8']
152 | Inference Time : [26.6, 26.4, 26.9]
153 | fps : [2.218045112781955, 2.234848484848485, 2.193308550185874]
154 | Model Load Time : [1.6771371364593506, 1.6517729759216309, 5.205628395080566]
155 | ```
156 |
157 | **Asynchronous Inference**
158 |
159 | ```
160 | precisions = ['FP16', 'FP32', 'FP32-INT8']
161 | Inference Time : [23.9, 24.7, 24.0]
162 | fps : [2.468619246861925, 2.388663967611336, 2.4583333333333335]
163 | Model Load Time : [0.7770581245422363, 0.7230548858642578, 2.766681432723999]
164 | ```
165 |
166 | ## Results:
167 | * From above observations we can say that `FP16` has lowest model time and `FP32-INT8` has highest model loading time, the reason for the higher loading time can be said as combination of precisions lead to higher weight of the model for `FP32-INT8`.
168 | * For `Inference Time` and `FPS`, `FP32` give slightly better results. There is not much difference for this three different models for this two parameters.
169 | * I have tested model for Asynchronous Inference and Synchronous Inference, Asynchronous Inference has better results it has slight improvement in `inference time` and `FPS`
170 |
171 | ### Edge Cases
172 | * Multiple People Scenario: If we encounter multiple people in the video frame, it will always use and give results one face even though multiple people detected,
173 | * No Head Detection: it will skip the frame and inform the user
174 |
175 | ### Area of Improvement:
176 | * lighting condition: We might use HSV based pre-processing steps to minimize error due to different lighting conditions
177 |
--------------------------------------------------------------------------------
/bin/demo.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/bin/demo.mp4
--------------------------------------------------------------------------------
/bin/output_video.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/bin/output_video.gif
--------------------------------------------------------------------------------
/imgs/cropped_image.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/imgs/cropped_image.jpg
--------------------------------------------------------------------------------
/imgs/fps.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/imgs/fps.png
--------------------------------------------------------------------------------
/imgs/fps_a.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/imgs/fps_a.png
--------------------------------------------------------------------------------
/imgs/image_classification_script_output_win.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/imgs/image_classification_script_output_win.png
--------------------------------------------------------------------------------
/imgs/inference_time.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/imgs/inference_time.png
--------------------------------------------------------------------------------
/imgs/inference_time_a.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/imgs/inference_time_a.png
--------------------------------------------------------------------------------
/imgs/left_eye.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/imgs/left_eye.jpg
--------------------------------------------------------------------------------
/imgs/model_loading_time.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/imgs/model_loading_time.png
--------------------------------------------------------------------------------
/imgs/model_loading_time_a.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/imgs/model_loading_time_a.png
--------------------------------------------------------------------------------
/imgs/pipeline.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/imgs/pipeline.png
--------------------------------------------------------------------------------
/imgs/project_structure.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/imgs/project_structure.png
--------------------------------------------------------------------------------
/imgs/right_eye.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/imgs/right_eye.jpg
--------------------------------------------------------------------------------
/intel/README.md:
--------------------------------------------------------------------------------
1 | # How to Download Model From OpenVino Zoo:
2 |
3 | I have already downloaded four model in this repository from Intel Model Zoo. This model will work well for **Openvino Toolkit 2020.1v**
4 |
5 | If you want to download pretrained models on your own here are the commands,
6 |
7 | ### Step1. Initialize Openvino Toolkit
8 | ```
9 | cd C:\Program Files (x86)\IntelSWTools\openvino\bin\
10 | setupvars.bat
11 | ```
12 |
13 | ### Step2. Download Models:
14 | Note: in `--output_dir`, UserName indicates your computer User Name, You can also choose any path as `--output_dir`, Make sure to copy paste model in the Project Directory
15 |
16 | 1. gaze-estimation-adas-0002:
17 | ```
18 | python "C:\Program Files (x86)\IntelSWTools\openvino\deployment_tools\open_model_zoo\tools\downloader\downloader.py" --name gaze-estimation-adas-0002 --output_dir C:\Users\UserName\Documents\Intel\OpenVINO\openvino_models\models --cache_dir C:\Users\UserName\Documents\Intel\OpenVINO\openvino_models\cache
19 | ```
20 | 2. face-detection-adas-binary-0001:
21 | ```
22 | python "C:\Program Files (x86)\IntelSWTools\openvino\deployment_tools\open_model_zoo\tools\downloader\downloader.py" --name face-detection-adas-binary-0001 --output_dir C:\Users\UserName\Documents\Intel\OpenVINO\openvino_models\models --cache_dir C:\Users\UserName\Documents\Intel\OpenVINO\openvino_models\cache
23 | ```
24 | 3. head-pose-estimation-adas-0001:
25 | ```
26 | python "C:\Program Files (x86)\IntelSWTools\openvino\deployment_tools\open_model_zoo\tools\downloader\downloader.py" --name head-pose-estimation-adas-0001 --output_dir C:\Users\UserName\Documents\Intel\OpenVINO\openvino_models\models --cache_dir C:\Users\UserName\Documents\Intel\OpenVINO\openvino_models\cache
27 | ```
28 | 4. landmarks-regression-retail-0009:
29 | ```
30 | python "C:\Program Files (x86)\IntelSWTools\openvino\deployment_tools\open_model_zoo\tools\downloader\downloader.py" --name landmarks-regression-retail-0009 --output_dir C:\Users\UserName\Documents\Intel\OpenVINO\openvino_models\models --cache_dir C:\Users\UserName\Documents\Intel\OpenVINO\openvino_models\cache
31 | ```
32 |
--------------------------------------------------------------------------------
/intel/face-detection-adas-binary-0001/FP32-INT1/face-detection-adas-binary-0001.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/intel/face-detection-adas-binary-0001/FP32-INT1/face-detection-adas-binary-0001.bin
--------------------------------------------------------------------------------
/intel/gaze-estimation-adas-0002/FP16/gaze-estimation-adas-0002.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/intel/gaze-estimation-adas-0002/FP16/gaze-estimation-adas-0002.bin
--------------------------------------------------------------------------------
/intel/gaze-estimation-adas-0002/FP32-INT8/gaze-estimation-adas-0002.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/intel/gaze-estimation-adas-0002/FP32-INT8/gaze-estimation-adas-0002.bin
--------------------------------------------------------------------------------
/intel/gaze-estimation-adas-0002/FP32/gaze-estimation-adas-0002.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/intel/gaze-estimation-adas-0002/FP32/gaze-estimation-adas-0002.bin
--------------------------------------------------------------------------------
/intel/head-pose-estimation-adas-0001/FP16/head-pose-estimation-adas-0001.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/intel/head-pose-estimation-adas-0001/FP16/head-pose-estimation-adas-0001.bin
--------------------------------------------------------------------------------
/intel/head-pose-estimation-adas-0001/FP32-INT8/head-pose-estimation-adas-0001.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/intel/head-pose-estimation-adas-0001/FP32-INT8/head-pose-estimation-adas-0001.bin
--------------------------------------------------------------------------------
/intel/head-pose-estimation-adas-0001/FP32/head-pose-estimation-adas-0001.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/intel/head-pose-estimation-adas-0001/FP32/head-pose-estimation-adas-0001.bin
--------------------------------------------------------------------------------
/intel/landmarks-regression-retail-0009/FP16/landmarks-regression-retail-0009.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/intel/landmarks-regression-retail-0009/FP16/landmarks-regression-retail-0009.bin
--------------------------------------------------------------------------------
/intel/landmarks-regression-retail-0009/FP16/landmarks-regression-retail-0009.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
14 |
15 |
16 |
17 |
25 |
26 |
27 |
28 |
29 | 1
30 | 3
31 | 48
32 | 48
33 |
34 |
35 | 1
36 | 3
37 | 1
38 | 1
39 |
40 |
41 |
49 |
50 |
51 |
52 |
60 |
61 |
62 |
63 |
64 | 1
65 | 3
66 | 48
67 | 48
68 |
69 |
70 | 1
71 | 3
72 | 1
73 | 1
74 |
75 |
76 |
84 |
85 |
86 |
87 |
95 |
96 |
97 |
98 |
99 |
100 | 1
101 | 3
102 | 48
103 | 48
104 |
105 |
106 | 16
107 | 3
108 | 3
109 | 3
110 |
111 |
112 |
120 |
121 |
122 |
123 |
131 |
132 |
133 |
134 |
135 | 1
136 | 16
137 | 48
138 | 48
139 |
140 |
141 | 1
142 | 16
143 | 1
144 | 1
145 |
146 |
147 |
155 |
156 |
157 |
158 |
163 |
164 |
165 |
166 |
167 | 1
168 | 16
169 | 48
170 | 48
171 |
172 |
173 | 1
174 |
175 |
176 |
184 |
185 |
186 |
187 |
188 |
189 | 1
190 | 16
191 | 48
192 | 48
193 |
194 |
195 |
203 |
204 |
205 |
206 |
214 |
215 |
216 |
217 |
218 | 1
219 | 16
220 | 24
221 | 24
222 |
223 |
224 | 1
225 | 16
226 | 1
227 | 1
228 |
229 |
230 |
238 |
239 |
240 |
241 |
249 |
250 |
251 |
252 |
253 | 1
254 | 16
255 | 24
256 | 24
257 |
258 |
259 | 1
260 | 16
261 | 1
262 | 1
263 |
264 |
265 |
273 |
274 |
275 |
276 |
284 |
285 |
286 |
287 |
288 |
289 | 1
290 | 16
291 | 24
292 | 24
293 |
294 |
295 | 32
296 | 16
297 | 3
298 | 3
299 |
300 |
301 |
309 |
310 |
311 |
312 |
320 |
321 |
322 |
323 |
324 | 1
325 | 32
326 | 24
327 | 24
328 |
329 |
330 | 1
331 | 32
332 | 1
333 | 1
334 |
335 |
336 |
344 |
345 |
346 |
347 |
352 |
353 |
354 |
355 |
356 | 1
357 | 32
358 | 24
359 | 24
360 |
361 |
362 | 1
363 |
364 |
365 |
373 |
374 |
375 |
376 |
377 |
378 | 1
379 | 32
380 | 24
381 | 24
382 |
383 |
384 |
392 |
393 |
394 |
395 |
403 |
404 |
405 |
406 |
407 | 1
408 | 32
409 | 12
410 | 12
411 |
412 |
413 | 1
414 | 32
415 | 1
416 | 1
417 |
418 |
419 |
427 |
428 |
429 |
430 |
438 |
439 |
440 |
441 |
442 | 1
443 | 32
444 | 12
445 | 12
446 |
447 |
448 | 1
449 | 32
450 | 1
451 | 1
452 |
453 |
454 |
462 |
463 |
464 |
465 |
473 |
474 |
475 |
476 |
477 |
478 | 1
479 | 32
480 | 12
481 | 12
482 |
483 |
484 | 64
485 | 32
486 | 3
487 | 3
488 |
489 |
490 |
498 |
499 |
500 |
501 |
509 |
510 |
511 |
512 |
513 | 1
514 | 64
515 | 12
516 | 12
517 |
518 |
519 | 1
520 | 64
521 | 1
522 | 1
523 |
524 |
525 |
533 |
534 |
535 |
536 |
541 |
542 |
543 |
544 |
545 | 1
546 | 64
547 | 12
548 | 12
549 |
550 |
551 | 1
552 |
553 |
554 |
562 |
563 |
564 |
565 |
566 |
567 | 1
568 | 64
569 | 12
570 | 12
571 |
572 |
573 |
581 |
582 |
583 |
584 |
592 |
593 |
594 |
595 |
596 | 1
597 | 64
598 | 6
599 | 6
600 |
601 |
602 | 1
603 | 64
604 | 1
605 | 1
606 |
607 |
608 |
616 |
617 |
618 |
619 |
627 |
628 |
629 |
630 |
631 | 1
632 | 64
633 | 6
634 | 6
635 |
636 |
637 | 1
638 | 64
639 | 1
640 | 1
641 |
642 |
643 |
651 |
652 |
653 |
654 |
662 |
663 |
664 |
665 |
666 |
667 | 1
668 | 64
669 | 6
670 | 6
671 |
672 |
673 | 64
674 | 64
675 | 3
676 | 3
677 |
678 |
679 |
687 |
688 |
689 |
690 |
698 |
699 |
700 |
701 |
702 | 1
703 | 64
704 | 6
705 | 6
706 |
707 |
708 | 1
709 | 64
710 | 1
711 | 1
712 |
713 |
714 |
722 |
723 |
724 |
725 |
730 |
731 |
732 |
733 |
734 | 1
735 | 64
736 | 6
737 | 6
738 |
739 |
740 | 1
741 |
742 |
743 |
751 |
752 |
753 |
754 |
762 |
763 |
764 |
765 |
766 | 1
767 | 64
768 | 6
769 | 6
770 |
771 |
772 | 1
773 | 64
774 | 1
775 | 1
776 |
777 |
778 |
786 |
787 |
788 |
789 |
797 |
798 |
799 |
800 |
801 | 1
802 | 64
803 | 6
804 | 6
805 |
806 |
807 | 1
808 | 64
809 | 1
810 | 1
811 |
812 |
813 |
821 |
822 |
823 |
824 |
832 |
833 |
834 |
835 |
836 |
837 | 1
838 | 64
839 | 6
840 | 6
841 |
842 |
843 | 128
844 | 64
845 | 3
846 | 3
847 |
848 |
849 |
857 |
858 |
859 |
860 |
868 |
869 |
870 |
871 |
872 | 1
873 | 128
874 | 6
875 | 6
876 |
877 |
878 | 1
879 | 128
880 | 1
881 | 1
882 |
883 |
884 |
892 |
893 |
894 |
895 |
900 |
901 |
902 |
903 |
904 | 1
905 | 128
906 | 6
907 | 6
908 |
909 |
910 | 1
911 |
912 |
913 |
921 |
922 |
923 |
924 |
932 |
933 |
934 |
935 |
936 | 1
937 | 128
938 | 6
939 | 6
940 |
941 |
942 | 1
943 | 128
944 | 1
945 | 1
946 |
947 |
948 |
956 |
957 |
958 |
959 |
967 |
968 |
969 |
970 |
971 | 1
972 | 128
973 | 6
974 | 6
975 |
976 |
977 | 1
978 | 128
979 | 1
980 | 1
981 |
982 |
983 |
991 |
992 |
993 |
994 |
1003 |
1004 |
1005 |
1006 |
1007 |
1008 | 1
1009 | 128
1010 | 6
1011 | 6
1012 |
1013 |
1014 | 128
1015 | 1
1016 | 1
1017 | 6
1018 | 6
1019 |
1020 |
1021 |
1029 |
1030 |
1031 |
1032 |
1040 |
1041 |
1042 |
1043 |
1044 | 1
1045 | 128
1046 | 1
1047 | 1
1048 |
1049 |
1050 | 1
1051 | 128
1052 | 1
1053 | 1
1054 |
1055 |
1056 |
1064 |
1065 |
1066 |
1067 |
1072 |
1073 |
1074 |
1075 |
1076 | 1
1077 | 128
1078 | 1
1079 | 1
1080 |
1081 |
1082 | 1
1083 |
1084 |
1085 |
1093 |
1094 |
1095 |
1096 |
1104 |
1105 |
1106 |
1107 |
1108 | 1
1109 | 128
1110 | 1
1111 | 1
1112 |
1113 |
1114 | 1
1115 | 128
1116 | 1
1117 | 1
1118 |
1119 |
1120 |
1128 |
1129 |
1130 |
1131 |
1139 |
1140 |
1141 |
1142 |
1143 | 1
1144 | 128
1145 | 1
1146 | 1
1147 |
1148 |
1149 | 1
1150 | 128
1151 | 1
1152 | 1
1153 |
1154 |
1155 |
1163 |
1164 |
1165 |
1166 |
1174 |
1175 |
1176 |
1177 |
1178 |
1179 | 1
1180 | 128
1181 | 1
1182 | 1
1183 |
1184 |
1185 | 256
1186 | 128
1187 | 1
1188 | 1
1189 |
1190 |
1191 |
1199 |
1200 |
1201 |
1202 |
1210 |
1211 |
1212 |
1213 |
1214 | 1
1215 | 256
1216 | 1
1217 | 1
1218 |
1219 |
1220 | 1
1221 | 256
1222 | 1
1223 | 1
1224 |
1225 |
1226 |
1234 |
1235 |
1236 |
1237 |
1242 |
1243 |
1244 |
1245 |
1246 | 1
1247 | 256
1248 | 1
1249 | 1
1250 |
1251 |
1252 | 1
1253 |
1254 |
1255 |
1263 |
1264 |
1265 |
1266 |
1274 |
1275 |
1276 |
1277 |
1278 | 1
1279 | 256
1280 | 1
1281 | 1
1282 |
1283 |
1284 | 1
1285 | 256
1286 | 1
1287 | 1
1288 |
1289 |
1290 |
1298 |
1299 |
1300 |
1301 |
1309 |
1310 |
1311 |
1312 |
1313 | 1
1314 | 256
1315 | 1
1316 | 1
1317 |
1318 |
1319 | 1
1320 | 256
1321 | 1
1322 | 1
1323 |
1324 |
1325 |
1333 |
1334 |
1335 |
1336 |
1344 |
1345 |
1346 |
1347 |
1348 |
1349 | 1
1350 | 256
1351 | 1
1352 | 1
1353 |
1354 |
1355 | 64
1356 | 256
1357 | 1
1358 | 1
1359 |
1360 |
1361 |
1369 |
1370 |
1371 |
1372 |
1380 |
1381 |
1382 |
1383 |
1384 | 1
1385 | 64
1386 | 1
1387 | 1
1388 |
1389 |
1390 | 1
1391 | 64
1392 | 1
1393 | 1
1394 |
1395 |
1396 |
1404 |
1405 |
1406 |
1407 |
1412 |
1413 |
1414 |
1415 |
1416 | 1
1417 | 64
1418 | 1
1419 | 1
1420 |
1421 |
1422 | 1
1423 |
1424 |
1425 |
1433 |
1434 |
1435 |
1436 |
1444 |
1445 |
1446 |
1447 |
1448 |
1449 | 1
1450 | 64
1451 | 1
1452 | 1
1453 |
1454 |
1455 | 10
1456 | 64
1457 | 1
1458 | 1
1459 |
1460 |
1461 |
1469 |
1470 |
1471 |
1472 |
1480 |
1481 |
1482 |
1483 |
1484 | 1
1485 | 10
1486 | 1
1487 | 1
1488 |
1489 |
1490 | 1
1491 | 10
1492 | 1
1493 | 1
1494 |
1495 |
1496 |
1504 |
1505 |
1506 |
1507 |
1508 | 1
1509 | 10
1510 | 1
1511 | 1
1512 |
1513 |
1514 |
1522 |
1523 |
1524 |
1525 |
1526 | 1
1527 | 10
1528 | 1
1529 | 1
1530 |
1531 |
1532 |
1533 |
1534 |
1535 |
1536 |
1537 |
1538 |
1539 |
1540 |
1541 |
1542 |
1543 |
1544 |
1545 |
1546 |
1547 |
1548 |
1549 |
1550 |
1551 |
1552 |
1553 |
1554 |
1555 |
1556 |
1557 |
1558 |
1559 |
1560 |
1561 |
1562 |
1563 |
1564 |
1565 |
1566 |
1567 |
1568 |
1569 |
1570 |
1571 |
1572 |
1573 |
1574 |
1575 |
1576 |
1577 |
1578 |
1579 |
1580 |
1581 |
1582 |
1583 |
1584 |
1585 |
1586 |
1587 |
1588 |
1589 |
1590 |
1591 |
1592 |
1593 |
1594 |
1595 |
1596 |
1597 |
1598 |
1599 |
1600 |
1601 |
1602 |
1603 |
1604 |
1605 |
1606 |
1607 |
1608 |
1609 |
1610 |
1611 |
1612 |
1613 |
1614 |
1615 |
1616 |
1617 |
1618 |
1619 |
1620 |
1621 |
1622 |
1623 |
1624 |
1625 |
1626 |
1627 |
1628 |
1629 |
1630 |
1631 |
1632 |
1633 |
1634 |
1635 |
1636 |
1637 |
1638 |
1639 |
1640 |
1641 |
1642 |
1643 |
1644 |
1645 |
1646 |
1647 |
1648 |
1649 |
1650 |
1651 |
1652 |
1653 |
1654 |
1655 |
1656 |
1657 |
1658 |
1659 |
1660 |
1661 |
1662 |
1663 |
1664 |
1665 |
1666 |
1667 |
1668 |
1669 |
1670 |
1671 |
--------------------------------------------------------------------------------
/intel/landmarks-regression-retail-0009/FP32-INT8/landmarks-regression-retail-0009.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/intel/landmarks-regression-retail-0009/FP32-INT8/landmarks-regression-retail-0009.bin
--------------------------------------------------------------------------------
/intel/landmarks-regression-retail-0009/FP32/landmarks-regression-retail-0009.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/intel/landmarks-regression-retail-0009/FP32/landmarks-regression-retail-0009.bin
--------------------------------------------------------------------------------
/license.text:
--------------------------------------------------------------------------------
1 |
2 | Copyright 2020 Bhadresh Savani
3 |
4 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
5 |
6 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
7 |
8 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
9 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | absl-py==0.9.0
2 | astor==0.8.1
3 | backcall==0.2.0
4 | certifi==2020.6.20
5 | cffi==1.14.0
6 | chardet==3.0.4
7 | colorama==0.4.3
8 | conda==4.8.3
9 | conda-package-handling==1.7.0
10 | cryptography==2.9.2
11 | decorator==4.4.2
12 | defusedxml==0.6.0
13 | gast==0.2.2
14 | google-pasta==0.2.0
15 | graphviz==0.8.4
16 | grpcio==1.30.0
17 | h5py==2.10.0
18 | idna==2.6
19 | importlib-metadata==1.7.0
20 | ipython==7.10.2
21 | ipython-genutils==0.2.0
22 | jedi==0.17.1
23 | Keras-Applications==1.0.8
24 | Keras-Preprocessing==1.1.2
25 | Markdown==3.2.2
26 | menuinst==1.4.16
27 | mkl-fft==1.1.0
28 | mkl-random==1.1.1
29 | mkl-service==2.3.0
30 | MouseInfo==0.1.3
31 | mxnet==1.3.1
32 | networkx==2.3
33 | numpy==1.19.1
34 | olefile==0.46
35 | onnx==1.7.0
36 | opencv-python==4.3.0.36
37 | opt-einsum==3.3.0
38 | parso==0.7.0
39 | pickleshare==0.7.5
40 | Pillow==6.2.1
41 | prompt-toolkit==3.0.5
42 | protobuf==3.6.1
43 | PyAutoGUI==0.9.50
44 | pycosat==0.6.3
45 | pycparser==2.20
46 | PyGetWindow==0.0.8
47 | Pygments==2.6.1
48 | PyMsgBox==1.0.8
49 | pyOpenSSL==19.1.0
50 | pyperclip==1.8.0
51 | PyRect==0.1.4
52 | PyScreeze==0.1.26
53 | PySocks==1.7.1
54 | PyTweening==1.0.3
55 | pywin32==227
56 | PyYAML==5.3.1
57 | requests==2.20.0
58 | ruamel-yaml==0.15.87
59 | six==1.15.0
60 | tensorboard==1.15.0
61 | tensorflow==1.15.4
62 | tensorflow-estimator==1.15.1
63 | termcolor==1.1.0
64 | tqdm==4.46.0
65 | traitlets==4.3.3
66 | typing-extensions==3.7.4.2
67 | urllib3==1.24.2
68 | wcwidth==0.2.5
69 | Werkzeug==1.0.1
70 | win-inet-pton==1.1.0
71 | wincertstore==0.2
72 | wrapt==1.12.1
73 | zipp==3.1.0
74 |
--------------------------------------------------------------------------------
/src/.ipynb_checkpoints/PrecisionComparision-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Assess Performance:\n",
8 | "\n",
9 | "## Get Perfomance data from files"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 1,
15 | "metadata": {},
16 | "outputs": [
17 | {
18 | "name": "stdout",
19 | "output_type": "stream",
20 | "text": [
21 | "Inference Time : [24.0, 24.0, 26.9]\n",
22 | "fps : [2.4583333333333335, 2.4583333333333335, 2.193308550185874]\n",
23 | "Model Load Time : [0.8468494415283203, 0.7280545234680176, 5.170256853103638]\n"
24 | ]
25 | }
26 | ],
27 | "source": [
28 | "import matplotlib.pyplot as plt\n",
29 | "\n",
30 | "precision_list = ['FP16', 'FP32', 'FP32-INT8']\n",
31 | "inference_time = []\n",
32 | "model_load_time = []\n",
33 | "fps = []\n",
34 | "\n",
35 | "for precision in precision_list:\n",
36 | " with open('results/'+precision+'/stats.txt', 'r') as f:\n",
37 | " inference_time.append(float(f.readline().split('\\n')[0]))\n",
38 | " fps.append(float(f.readline().split('\\n')[0]))\n",
39 | " model_load_time.append(float(f.readline().split('\\n')[0]))\n",
40 | "\n",
41 | "print(\"Inference Time :\",inference_time)\n",
42 | "print(\"fps :\",fps)\n",
43 | "print(\"Model Load Time :\",model_load_time)"
44 | ]
45 | },
46 | {
47 | "cell_type": "code",
48 | "execution_count": 2,
49 | "metadata": {},
50 | "outputs": [
51 | {
52 | "data": {
53 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX4AAAEGCAYAAABiq/5QAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAWe0lEQVR4nO3deZhldX3n8fcHOwiyiNAtEkTbEKMSBxFbgpExGKMRRQiojDwuoD5BZnQIokZcEiDLE8Zxi9FxbANhkeCKiIpbGBRxAZtFVo0JtoryQGOMbAak+c4f5xTeFFXVp4o+93bVeb+e5z517u/cc8+37u3+3FO/e36/k6pCkjQcm026AEnSeBn8kjQwBr8kDYzBL0kDY/BL0sAsm3QBXSxfvrxWrlw56TIkaVG55JJLbq6qFdPbF0Xwr1y5kjVr1ky6DElaVJL8YKZ2u3okaWAMfkkaGINfkgbG4JekgTH4JWlgDH5JGhiDX5IGxuCXpIEx+CVpYBbFyF1Jm7aVx3520iUsWWtPfO5Gf06P+CVpYAx+SRoYg1+SBsbgl6SBMfglaWAMfkkaGINfkgbG4JekgTH4JWlgDH5JGpjegj/JLknOT3JtkquT/EnbfnySHye5vL09p68aJEn31edcPXcDr6uqS5NsA1yS5EvtundV1dt73LckaRa9BX9V3QDc0C7fmuRaYOe+9idJ6mYsffxJVgJPBC5qm16T5IokJyd5yCzbHJFkTZI169atG0eZkjQIvQd/kq2BTwBHV9UtwPuBXYE9aP4ieMdM21XV6qpaVVWrVqxY0XeZkjQYvQZ/kl+jCf0zquosgKq6sarWV9U9wAeBvfqsQZL0n/V5Vk+Ak4Brq+qdI+07jTzsIOCqvmqQJN1Xn2f1PBV4KXBlksvbtjcDhybZAyhgLfCqHmuQJE3T51k9FwKZYdW5fe1TkrRhjtyVpIEx+CVpYPrs498krDz2s5MuYclae+Jze3le37P+9PWeaXHxiF+SBsbgl6SBMfglaWAMfkkaGINfkgbG4JekgTH4JWlgDH5JGhiDX5IGxuCXpIEx+CVpYDYY/Em2SrJZu/xbSQ5or6wlSVqEuhzxXwBskWRn4Dzg5cApfRYlSepPl+BPVd0BHAz8XVUdBOzWb1mSpL50Cv4kTwFeDEzNl7vkp3OWpKWqS/AfDbwJ+GRVXZ3kN4Dz+y1LktSXDR65V9VXgK+M3L8OOKrPoiRJ/Zk1+JN8GqjZ1lfVAb1UJEnq1VxH/G9vfx4MPAz4UHv/UGBtjzVJkno0a/C3XTwk+cuqetrIqk8nuaD3yiRJvejy5e6K9gtdAJI8CljRX0mSpD51OS3ztcCXk1zX3l8JvKq3iiRJvepyVs/nkzwaeGzb9J2qurPfsiRJfek6EOtJNEf6y4AnJKGqTuutKklSbzYY/ElOB3YFLgfWt80FGPyStAh1OeJfBexWVbOe0y9JWjy6nNVzFc15/JKkJaDLEf9y4JokFwP3fqm7oZG7SXah6Q56GHAPsLqq/jbJ9sBHaL4zWAscUlU/W1D1kqR56xL8xy/wue8GXldVlybZBrgkyZeAw4HzqurEJMcCxwJvXOA+JEnztMGunnYE73eAbdrbtVOjejew3Q1VdWm7fCtwLbAzcCBwavuwU4E/WljpkqSF6HLpxUOAi4EXAocAFyV5wXx2kmQl8ETgImDHqroBmg8H4KGzbHNEkjVJ1qxbt24+u5MkzaFLV89bgCdX1U0ASVYA/wR8vMsOkmwNfAI4uqpuSdKpsKpaDawGWLVqlWcUSdJG0uWsns2mQr/1047b0V6U/RPAGVV1Vtt8Y5Kd2vU7ATfNtr0kaePrEuCfT/KFJIcnOZzm8ouf29BGaQ7tT6L5TuCdI6vOAQ5rlw8DPjW/kiVJ90eXuXrekORgYB8gNKdlfrLDcz8VeClwZZLL27Y3AycCH03ySuCHNN8dSJLGpMuUDY8Czp3qqkmyZZKVVbV2ru2q6kKaD4qZPGO+hUqSNo4uXT0foxmANWV92yZJWoS6BP+yqrpr6k67vHl/JUmS+tQl+NcluXd6hiQHAjf3V5IkqU9dzuM/EjgjyftopmO+HnhZr1VJknrT5ayefwX2bgdipZ1+QZK0SHWZsmHHJCcBH6uqW5Ps1p6KKUlahLr08Z8CfAH49fb+PwNH91WQJKlfXYJ/eVV9lPaUzqq6m19dglGStMh0Cf7bk+xA88UuSfYGft5rVZKk3nQ5q+cYmvl1dk3yNWAFMK9pmSVJm44uZ/VcmuT3gMfQTMHw3ar6Ze+VSZJ60eWsnhcCW1bV1TRXy/pIkj17r0yS1Isuffx/1p7GuQ/whzSXS3x/v2VJkvrSJfinzuB5LvD+qvoUztUjSYtWl+D/cZIP0Fxv99wkD+y4nSRpE9QlwA+hGcD17Kr6d2B74A29ViVJ6k2Xs3ruAM4auX8DcEOfRUmS+mOXjSQNjMEvSQNj8EvSwHQZwHVwku8l+XmSW5LcmuSWcRQnSdr4uszV8zbgeVV1bd/FSJL616Wr50ZDX5KWji5H/GuSfAQ4G7hzqrGqzpp9E0nSpqpL8G8L3AE8a6StGDm3X5K0eHQZwPXycRQiSRqPWYM/yZ9W1duS/B3t1bdGVdVRvVYmSerFXEf8U1/orhlHIZKk8Zg1+Kvq0+3PU8dXjiSpb72N3E1ycpKbklw10nZ8kh8nuby9Paev/UuSZtbnlA2nAM+eof1dVbVHezu3x/1LkmbQW/BX1QXAv/X1/JKkhekyV89vJTlvqssmye5J3no/9vmaJFe0XUEPmWO/RyRZk2TNunXr7sfuJEmjuhzxfxB4E/BLgKq6AnjRAvf3fmBXYA+ai7m8Y7YHVtXqqlpVVatWrFixwN1JkqbrEvwPqqqLp7XdvZCdVdWNVbW+qu6h+UDZayHPI0lauC7Bf3OSXWkHcSV5AQu89GKSnUbuHgRcNdtjJUn96DJXz6uB1cBjk/wY+D7wkg1tlORMYF9geZLrgeOAfZPsQfMhshZ41cLKliQtVJe5eq4D/iDJVsBmVXVrlyeuqkNnaD5pnvVJkjayDQZ/ku2AlwErgWVJAOfqkaTFqktXz7nAN4ErgXv6LUeS1Lcuwb9FVR3TeyWSpLHoclbP6Un+OMlOSbafuvVemSSpF12O+O8C/jfwFn41L38Bv9FXUZKk/nQJ/mOA36yqm/suRpLUvy5dPVfTXHNXkrQEdDniXw9cnuR84M6pRk/nlKTFqUvwn93eJElLQJeRu156UZKWkFmDP8lHq+qQJFfyq7N57lVVu/damSSpF3Md8b+r/bn/OAqRJI3HXMH/PmDPqvrBuIqRJPVvrtM5M7YqJEljM9cR/85J3jPbSk/nlKTFaa7g/wVwybgKkSSNx1zB/1NP5ZSkpWeuPv67xlaFJGlsZg3+qtp7nIVIksajyyRtkqQlxOCXpIHpFPxJ9kny8nZ5RZJH9VuWJKkvGwz+JMcBbwTe1Db9GvChPouSJPWnyxH/QcABwO0AVfUTYJs+i5Ik9adL8N9VVUU7Q2eSrfotSZLUpy7B/9EkHwC2S/LHwD8BH+y3LElSX7pciOXtSZ4J3AI8BvjzqvpS75VJknqxweBvz+D56lTYJ9kyycqqWtt3cZKkja9LV8/HgHtG7q9v2yRJi1CX4F9WVffO29Mub95fSZKkPnUJ/nVJDpi6k+RA4OYNbZTk5CQ3JblqpG37JF9K8r3250MWVrYkaaG6BP+RwJuT/DDJj2gGc72qw3anAM+e1nYscF5VPRo4r70vSRqjLmf1/Cuwd5KtgVTVrV2euKouSLJyWvOBwL7t8qnAl2k+SCRJY9LlrJ4HAs8HVgLLkuZSvFX1FwvY345VdUO7/Q1JHjrHfo8AjgB4xCMesYBdSZJm0qWr51M0R+p300zbMHXrVVWtrqpVVbVqxYoVfe9OkgZjg0f8wMOranpf/ULdmGSn9mh/J+CmjfS8kqSOuhzxfz3Jf9lI+zsHOKxdPozmrwlJ0hh1OeLfBzg8yfeBO4EAVVW7z7VRkjNpvshdnuR64DjgRJq5f14J/BB44f2oXZK0AF2Cf7+FPHFVHTrLqmcs5PkkSRvHBrt6quoHwC7A77fLd3TZTpK0afIKXJI0MF6BS5IGxitwSdLAeAUuSRqYOc/qSTM/w0eAx+IVuCRpSZgz+KuqkpxdVU8CDHtJWgK6dPV8M8mTe69EkjQWXQZwPR04MslamjN7Oo3clSRtmnobuStJ2jQ5cleSBsaRu5I0MI7claSBceSuJA2MI3claWBmPasnyQOr6s6qenuSZ+LIXUlaEuY6nfMbwJ5JTq+ql+LIXUlaEuYK/s2THAb8bpKDp6+sqrP6K0uS1Je5gv9I4MXAdsDzpq0rwOCXpEVo1uCvqguBC5OsqaqTxliTJKlHG5yyoapOSvK7wMrRx1fVaT3WJUnqyQaDP8npwK7A5cD6trkAg1+SFqEuk7StAnZrB3FJkha5LgO4rgIe1nchkqTx6HLEvxy4JsnFwJ1TjVV1QG9VSZJ60yX4j++7CEnS+HQ5q+cr4yhEkjQec83VcyvtjJzTV9FcenHb3qqSJPVmrgFczrkvSUtQlz7+ja69cPutNOMC7q6qVZOoQ5KGaCLB33p6Vd08wf1L0iB50XRJGphJBX8BX0xySZIjZnpAkiOSrEmyZt26dWMuT5KWrkkF/1Orak9gP+DVSZ42/QFVtbqqVlXVqhUrVoy/QklaoiYS/FX1k/bnTcAngb0mUYckDdHYgz/JVkm2mVoGnkUzH5AkaQwmcVbPjsAnk0zt/x+r6vMTqEOSBmnswV9V1wFPGPd+JUkNT+eUpIEx+CVpYAx+SRoYg1+SBsbgl6SBMfglaWAMfkkaGINfkgbG4JekgTH4JWlgDH5JGhiDX5IGxuCXpIEx+CVpYAx+SRoYg1+SBsbgl6SBMfglaWAMfkkaGINfkgbG4JekgTH4JWlgDH5JGhiDX5IGxuCXpIEx+CVpYAx+SRoYg1+SBsbgl6SBMfglaWAmEvxJnp3ku0n+Jcmxk6hBkoZq7MGf5AHA+4D9gN2AQ5PsNu46JGmoJnHEvxfwL1V1XVXdBXwYOHACdUjSIC2bwD53Bn40cv964HemPyjJEcAR7d3bknx3DLVtCpYDN0+6iC7yvyZdwSZh0bxf4HvWGtJ79siZGicR/Jmhre7TULUaWN1/OZuWJGuqatWk61A3vl+Lj+/ZZLp6rgd2Gbn/cOAnE6hDkgZpEsH/LeDRSR6VZHPgRcA5E6hDkgZp7F09VXV3ktcAXwAeAJxcVVePu45N2OC6txY536/FZ/DvWaru070uSVrCHLkrSQNj8EvSwBj8PUuyPsnlI7eVSfZN8vMklyW5Nslx7WN3SHJ+ktuSvHfa82yeZHWSf07ynSTPn8xvtPTN8z3ba+Rx305yUNv+oCSfbd+rq5OcONnfavEY9+ufZG2S5e1yJXnHyLrXJzk+yVtG9jNa31FJHtH+v70syRVJntP3a3S/VZW3Hm/AbTO07Qt8pl3eCvge8KR2eR/gSOC907Y5AfirdnkzYPmkf7elepvne/YgYFnbvhNwE81JEw8Cnt62bw58Fdhv0r/bYriN+/UH1k79fwL+A/j+yP3XA8fPVR/Nl8X/vV3eDVg76ddwQzeP+Cesqm4HLgF2rarbq+pCmn98070C+Jt2m3uqatGMPFxqpr1nd1TV3e2qLWgHI7bt57fLdwGX0oxZ0f3U8+t/N02Qv3Y+JQHbtssPZhGMSzL4+7flyJ+Fn5y+MskOwN7ArKe0JtmuXfzLJJcm+ViSHXuqV/N8z5L8TpKrgSuBI0eCaOrx2wHPA87rv/QlYdKv//uAFyd5cMfHHw+8JMn1wLnA/+y43cRMYsqGoflFVe0xQ/t/TXIZcA9wYs09lmEZzdHK16rqmCTHAG8HXrrxyxXzfM+q6iLgt5M8Djg1yeeq6j8AkiwDzgTeU1XXjan+xW6ir39V3ZLkNOAo4BcdNjkUOKWq3pHkKcDpSR5fVfd02d8kGPyT89Wq2r/jY38K3AFMHf18DHhlL1VpLnO+Z1V1bZLbgccDa9rm1cD3qurd4yhwibvfr3+aaeEvadedU1V/PsvTvZume+gfOtT1SuDZbQ3fSLIFzURwN3XYdiLs6lkEqvnW6NM0X3ABPAO4ZmIF6V7t1CPL2uVHAo+h+bKQJH9F0+d79MQKXOLm+/pX1fqq2qO9zRb6VNW/AR+l2wHWD2n+T9L+1bEFsG5Bv9CYeMS/iUmyluaLos2T/BHwrKq6BngjzZ+Q76b5R/XyyVWpEfsAxyb5JU0XxP+oqpuTPBx4C/Ad4NIk0Jyp9feTK3VJ6vP1fwfwmg6Pex3wwSSvpfmi9/D2YG2T5ZQNkjQwdvVI0sAY/JI0MAa/JA2MwS9JA2PwS9LAGPwam3bmw9NH7i9Lsi7JZ+b5PPfOpjjfx7TtV7YzOX4xycPms+9Z9nVkkpfNsf6AJMduhP18OckfTms7Osn/2cA2g76wuO7L4Nc43Q48PsmW7f1nAj+eQB1Pr6on0IzufPPoijTm9f+iqv5vVZ02x/pzqmpjTMt8Js01qke9qG2XOjP4NW6fA57bLh/KSGgl2T7J2e2c5t9MsnvbvkN7dH5Zkg8AGdnmJUkubif0+kA7JL+rC4DfTDPf+7XtkfOlwC5J3pDkW20tJ4zs72Vt27en/npJM1/769vlo5Jc0z7mw23b4Wmvr5DkkUnOa9efl+QRbfspSd6T5OtJrkvyghnq/Tiwf5IHttusBH4duDDJ+5OsSTP3/AkzbEuS20aWX5DklHZ5RZJPtL/vt5I8dR6voRYhg1/j9mHgRe18JrsDF42sOwG4rKp2pzkSnzqKPg64sKqeCJwDTIXl44D/Bjy1ndRrPfDiedSyP82MjtAM9T+t3cdjgEcDewF7AE9K8rQkv00zGvT3278Y/mSG5zwWeGL7Oxw5w/r3tvvZHTgDeM/Iup1oRqLuD9znL4Sq+ilwMe28MDRH+x9pR4m+papW0bymvzf1odnR3wLvqqonA88HHF28xDllg8aqqq5oj1QPpZnCdtQ+NMFDVf2/9kj/wcDTgIPb9s8m+Vn7+GfQXIzjW+2Q/C3pNjHW+UnWA1cAbwW2A35QVd9s1z+rvV3W3t+a5oPgCcDHp66F0M7nMt0VwBlJzgbOnmH9U6Z+F+B04G0j685uZ3S8JrNPuz3V3fOp9ucr2vZDkhxB8396J5oLglwxy3NM9wfAbu1rCLBtkm2q6taO22uRMfg1CefQTCu9L7DDSHtmeGxN+zkqwKlV9aZ57v/poxeySTNf++3TnvdvquoD/2lnyVGz1DHquTQfVAcAf9b+lTCX0ee7c1oNMzkbeGeSPYEtq+rSJI+iuVLUk6vqZ20XzhYb2Nfo+s2Ap1RVlymItQTY1aNJOBn4i6q6clr7BbRdNUn2BW6uqlumte8HPKR9/HnAC5I8tF23fTtD4/31BeAVSbZun3fndh/n0RxZ7zC1v9GN2i+Fd2mv/PSnNH9JbD3tub/Or76gfTFw4XwKq6rbgC/TvIZT349sS/PB9fP2L4X9Ztn8xiSPa+s8aKT9i4xMRpZkprnwtYR4xK+xq6rrafqVpzse+IckV9Bcf+Cwtv0E4MwklwJfoZkGl6q6JslbgS+2YfZL4NXAD+5nfV9svz/4Rtv9cRvwkqq6OslfA19pu4ouAw4f2fQBwIfa7qnQ9Jv/+0gXCjQX9zg5yRtY+CyrZwJn0X6AVNW301yg5GrgOuBrs2x3LPAZ4EfAVfzqQ+ko4H3t676M5oN2pu8ntEQ4O6ckDYxdPZI0MAa/JA2MwS9JA2PwS9LAGPySNDAGvyQNjMEvSQPz/wHgGYBwU61HjgAAAABJRU5ErkJggg==\n",
54 | "text/plain": [
55 | ""
56 | ]
57 | },
58 | "metadata": {
59 | "needs_background": "light"
60 | },
61 | "output_type": "display_data"
62 | }
63 | ],
64 | "source": [
65 | "plt.bar(precision_list, inference_time)\n",
66 | "plt.xlabel('Model Precision Value')\n",
67 | "plt.ylabel('Inference Time in seconds')\n",
68 | "plt.show()"
69 | ]
70 | },
71 | {
72 | "cell_type": "code",
73 | "execution_count": 3,
74 | "metadata": {},
75 | "outputs": [
76 | {
77 | "data": {
78 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEGCAYAAABo25JHAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAWv0lEQVR4nO3debRlZX2n8ecrg2IQaahqIUxFDMsWXIBQILS0gdgmoERaJQpxaIfVBKMiTh0copjEpemltk0wQqVFRV3YKoaUAoIiKqgMRTGDKKExVKBDgZFZseDXf+x99Xi599YuqvY53Lufz1pn3b3fPf3uOVX3e/b07lQVkqThetykC5AkTZZBIEkDZxBI0sAZBJI0cAaBJA3cxpMuYF0tWrSolixZMukyJGleueyyy+6oqsUzTZt3QbBkyRJWrFgx6TIkaV5J8pPZpnloSJIGziCQpIHrLQiS7JDk/CTXJ7k2yZtnmOfAJHcluaJ9vbeveiRJM+vzHMEa4G1VtTLJk4DLknyjqq6bNt8FVXVoj3VIkubQ2x5BVd1WVSvb4XuA64Ht+tqeJOnRGcs5giRLgGcCF88wef8kVyY5O8lusyx/VJIVSVasXr26x0olaXh6D4IkmwOnA8dW1d3TJq8EdqqqPYC/Bc6YaR1VtayqllbV0sWLZ7wMVpL0KPUaBEk2oQmBz1fVV6ZPr6q7q+redvgsYJMki/qsSZL0m/q8aijAJ4Hrq+qjs8yzTTsfSfZt67mzr5okSY/U51VDzwZeCVyd5Iq27V3AjgBVdRJwOPD6JGuAB4Ajqscn5Sw57sy+Vj14N3/oBb2s18+sP319Zpp/eguCqroQyFrmORE4sa8aJElr553FkjRwBoEkDZxBIEkDZxBI0sAZBJI0cAaBJA2cQSBJA2cQSNLAGQSSNHAGgSQNnEEgSQNnEEjSwBkEkjRwBoEkDZxBIEkDZxBI0sAZBJI0cAaBJA2cQSBJA2cQSNLA9fbweknDtOS4MyddwoJ184de0Mt63SOQpIEzCCRp4AwCSRo4g0CSBs4gkKSBMwgkaeAMAkkaOINAkgbOIJCkgTMIJGngDAJJGjiDQJIGrrcgSLJDkvOTXJ/k2iRvnmGeJDkhyY1JrkqyV1/1SJJm1mfvo2uAt1XVyiRPAi5L8o2qum5knkOAXdrXs4BPtD8lSWPS2x5BVd1WVSvb4XuA64Htps12GHBqNS4CtkyybV81SZIeaSznCJIsAZ4JXDxt0nbALSPjq3hkWJDkqCQrkqxYvXp1X2VK0iD1HgRJNgdOB46tqrunT55hkXpEQ9WyqlpaVUsXL17cR5mSNFi9BkGSTWhC4PNV9ZUZZlkF7DAyvj1wa581SZJ+U59XDQX4JHB9VX10ltmWA69qrx7aD7irqm7rqyZJ0iP1edXQs4FXAlcnuaJtexewI0BVnQScBTwfuBG4H3hNj/VIkmbQWxBU1YXMfA5gdJ4C3tBXDZKktfPOYkkaOINAkgbOIJCkgTMIJGngDAJJGjiDQJIGziCQpIEzCCRp4AwCSRq4We8sTvJVZugJdEpVvbCXiiRJYzVXFxMfbn++GNgG+Fw7fiRwc481SZLGaNYgqKrvACT5q6p6zsikryb5bu+VSZLGoss5gsVJfmdqJMnOgE+HkaQFokvvo28Bvp3kpnZ8CfCnvVUkSRqrtQZBVX09yS7Af2ibflhVv+i3LEnSuHR9HsHeNHsCGwN7JKGqTu2tKknS2Kw1CJJ8FngqcAXwUNtcgEEgSQtAlz2CpcCu7dPEJEkLTJerhq6huY9AkrQAddkjWARcl+QS4Fcnib2zWJIWhi5BcHzfRUiSJqfL5aPfSfIUYJ+26ZKqur3fsiRJ47LWcwRJXgpcAvwx8FLg4iSH912YJGk8uhwaejewz9ReQJLFwDeBL/dZmCRpPLpcNfS4aYeC7uy4nCRpHuiyR/D1JOcAp7XjLwPO7q8kSdI4dTlZ/I4kLwYOAAIsq6p/6L0ySdJYdOliYmfgrKr6Sju+WZIlVXVz38VJkvrX5Vj/l4CHR8YfatskSQtAlyDYuKoenBpphzftryRJ0jh1CYLVSX7VnUSSw4A7+itJkjROXa4aOhr4fJKP03Q/vQp4Va9VSZLGpstVQ/8E7JdkcyBVdU//ZUmSxqVLFxNPSfJJ4EtVdU+SXZO8rsNypyS5Pck1s0w/MMldSa5oX+99FPVLktZTl3MEnwbOAX67Hf8RcGzH5Q5eyzwXVNWe7esvO6xTkrSBdQmCRVX1RdpLSKtqDb9+ZOWsquq7wE/XrzxJUt+6BMF9SbamOVFMkv2AuzbQ9vdPcmWSs5PsNttMSY5KsiLJitWrV2+gTUuSoNtVQ28FlgNPTfI9YDGwIbqhXgnsVFX3Jnk+cAawy0wzVtUyYBnA0qVLfXayJG1AXa4aWpnk94Cn0fQ1dENV/XJ9N1xVd48Mn5Xk75IsqirvUZCkMZr10FCSfZJsA786L7A38AHgI0m2Wt8NJ9kmSdrhfdta7lzf9UqS1s1c5whOBh4ESPIc4EPAqTTnB5atbcVJTgN+ADwtyaokr0tydJKj21kOB65JciVwAnBEVXnYR5LGbK5DQxtV1dRVPy+j6X76dOD0JFesbcVVdeRapp8InNi5UklSL+baI9goyVRQPBf41si0LieZJUnzwFx/0E8DvpPkDuAB4AKAJL/Lhrt8VJI0YbMGQVV9IMl5wLbAuSPH7x8HvGkcxUmS+jfnIZ6qumiGth/1V44kady63FksSVrADAJJGrg5gyDJRkm+Oa5iJEnjN2cQVNVDwP1JnjymeiRJY9blfoCfA1cn+QZw31RjVR3TW1WSpLHpEgRnti9J0gLUpffRzyTZDNixqm4YQ02SpDHq8sziPwKuAL7eju+ZZHnfhUmSxqPL5aPHA/sCPwOoqiuAnXusSZI0Rl2CYE1VTe9byO6iJWmB6HKy+Jokf0LTG+kuwDHA9/stS5I0Ll32CN4E7Ab8gqZH0ruBY/ssSpI0Pl2uGrofeHeSv2lG657+y5IkjUuXq4b2SXI1cBXNjWVXJtm7/9IkSePQ5RzBJ4E/q6qpB9McAHwK2L3PwiRJ49HlHME9UyEAUFUXAh4ekqQFossewSVJTqY5UVw0D7L/dpK9AKpqZY/1SZJ61iUI9mx/vm9a+3+kCYbf36AVSZLGqstVQweNoxBJ0mT4hDJJGjiDQJIGziCQpIHrckPZHyd5Ujv8niRfmbpiSJI0/3XZI/iLqrqnvZHsD4HPAJ/otyxJ0rh0CYKH2p8vAD5RVf8IbNpfSZKkceoSBP/S3lD2UuCsJI/vuJwkaR7o8gf9pcA5wMFV9TNgK+AdvVYlSRqbtQZB2w317cABbdMa4Md9FiVJGp8uVw29D/hz4J1t0ybA5/osSpI0Pl0ODb0IeCFwH0BV3Qo8aW0LJTklye1JrpllepKckOTGJFd5SaokTUaXIHiwqor2gfVJfqvjuj8NHDzH9EOAXdrXUXhJqiRNRJcg+GJ71dCWSf4b8E3g79e2UFV9F/jpHLMcBpxajYva9W/bpWhJ0obTpffRDyd5Hs1D658GvLeqvrEBtr0dcMvI+Kq27bYNsG5JUkddnkdAVX0jycVT8yfZqqrm+rbfRWba1IwzJkfRHD5ixx13XM/NSpJGdblq6E+T/CvNw+tXAJe1P9fXKmCHkfHtgVtnmrGqllXV0qpaunjx4g2waUnSlC57BG8HdquqOzbwtpcDb0zyBeBZwF1V5WEhSRqzLkHwT8D967riJKcBBwKLkqyiedTlJgBVdRJwFvB84MZ2/a9Z121IktZflyB4J/D99hzBL6Yaq+qYuRaqqiPXMr2AN3QpUpLUny5BcDLwLeBq4OF+y5EkjVuXIFhTVW/tvRJJ0kR0uaHs/CRHJdk2yVZTr94rkySNRZc9gj9pf75zpK2A39nw5UiSxq3LncU7j6MQSdJkdLqzOMkzgF2BJ0y1VdWpfRUlSRqftQZB+zyCA2mC4CyaXkMvBAwCSVoAupwsPhx4LvD/quo1wB7A43utSpI0Nl2C4IGqehhYk2QLmsdWeqJYkhaILucIViTZkuYZBJcB9wKX9FqVJGls5gyCJAE+WFU/A05K8nVgi6q6aizVSZJ6N+ehobY/oDNGxm82BCRpYelyjuCiJPv0XokkaSK6nCM4CDg6yc3AfTRPFquq2r3PwiRJ4zFrECTZsar+mea+AUnSAjXXHsEZwF5V9ZMkp1fVS8ZVlCRpfOY6RzD6cHnvG5CkBWquIKhZhiVJC8hch4b2SHI3zZ7BZu0w/Ppk8Ra9VydJ6t2sQVBVG42zEEnSZHS5j0CStIAZBJI0cAaBJA2cQSBJA2cQSNLAGQSSNHAGgSQNnEEgSQNnEEjSwBkEkjRwBoEkDZxBIEkDZxBI0sAZBJI0cL0GQZKDk9yQ5MYkx80w/cAkdyW5on29t896JEmPNNeDadZLko2AjwPPA1YBlyZZXlXXTZv1gqo6tK86JElz63OPYF/gxqq6qaoeBL4AHNbj9iRJj0KfQbAdcMvI+Kq2bbr9k1yZ5Owku820oiRHJVmRZMXq1av7qFWSBqvPIMgMbTVtfCWwU1XtAfwtcMZMK6qqZVW1tKqWLl68eAOXKUnD1mcQrAJ2GBnfHrh1dIaquruq7m2HzwI2SbKox5okSdP0GQSXArsk2TnJpsARwPLRGZJskyTt8L5tPXf2WJMkaZrerhqqqjVJ3gicA2wEnFJV1yY5up1+EnA48Poka4AHgCOqavrhI0lSj3oLAvjV4Z6zprWdNDJ8InBinzVIkubmncWSNHAGgSQNnEEgSQNnEEjSwBkEkjRwBoEkDZxBIEkDZxBI0sAZBJI0cAaBJA2cQSBJA2cQSNLAGQSSNHAGgSQNnEEgSQNnEEjSwBkEkjRwBoEkDZxBIEkDZxBI0sAZBJI0cAaBJA2cQSBJA2cQSNLAGQSSNHAGgSQNnEEgSQNnEEjSwBkEkjRwBoEkDZxBIEkDZxBI0sAZBJI0cL0GQZKDk9yQ5MYkx80wPUlOaKdflWSvPuuRJD1Sb0GQZCPg48AhwK7AkUl2nTbbIcAu7eso4BN91SNJmlmfewT7AjdW1U1V9SDwBeCwafMcBpxajYuALZNs22NNkqRpNu5x3dsBt4yMrwKe1WGe7YDbRmdKchTNHgPAvUlu2LClPmYtAu6YdBFd5G8mXcFjhp/Z/DJvPi9Y789sp9km9BkEmaGtHsU8VNUyYNmGKGo+SbKiqpZOug5152c2v/h5Nfo8NLQK2GFkfHvg1kcxjySpR30GwaXALkl2TrIpcASwfNo8y4FXtVcP7QfcVVW3TV+RJKk/vR0aqqo1Sd4InANsBJxSVdcmObqdfhJwFvB84EbgfuA1fdUzTw3ucNgC4Gc2v/h5Aal6xCF5SdKAeGexJA2cQSBJA2cQjFmSh5JcMfJakuTAJHcluTzJ9Une1867dZLzk9yb5MRp69k0ybIkP0rywyQvmcxvtLCt4+e178h8VyZ5Udv+xCRntp/TtUk+NNnfav4Y9/uf5OYki9rhSvKRkWlvT3J8knePbGe0vmOS7Nj+n7287Tbn+X2/RxtEVfka4wu4d4a2A4GvtcO/BfwY2LsdPgA4Gjhx2jLvB/66HX4csGjSv9tCfK3j5/VEYOO2fVvgdpoLMp4IHNS2bwpcABwy6d9tPrzG/f4DN0/9XwJ+DvzfkfG3A8fPVR/NyefXt8O7AjdP+j3s8nKP4DGmqu4DLgOeWlX3VdWFNP8gp3st8MF2mYerat7cHbmQTPu87q+qNe2kJ9DeHNm2n98OPwispLlnRuup5/d/Dc0f9resS0nAFu3wk5kn90UZBOO32ciu5D9Mn5hka2A/4NrZVpBky3bwr5KsTPKlJE/pqd6hW6fPK8mzklwLXA0cPfKHaWr+LYE/As7rv/QFYdLv/8eBlyd5csf5jwdekWQVzeXxb+q43ET12cWEZvZAVe05Q/t/SnI58DDwoaqaNQhoPrftge9V1VuTvBX4MPDKDV/u4K3T51VVFwO7JXk68JkkZ1fVzwGSbAycBpxQVTeNqf75bqLvf1XdneRU4BjggQ6LHAl8uqo+kmR/4LNJnlFVD3fZ3qQYBI8dF1TVoR3nvZPmBrypb0hfAl7XS1WazZyfV1Vdn+Q+4BnAirZ5GfDjqvrYOApc4Nb7/U/TVf5l7bTlVfXeWVb3MZrDSZ/qUNfrgIPbGn6Q5Ak0Hdvd3mHZifHQ0DxUzZmor9KcNAN4LnDdxAoSAGm6U9m4Hd4JeBrNyUeS/DXNMeNjJ1bgAreu739VPVRVe7av2UKAqvop8EW6fdn6Z5r/j7R7JU8AVj+qX2iM3CN4jEtyM83Jp02T/BfgD6rqOuDPaXY7P0bzD83uOSbvAOC4JL+kOWTxZ1V1R5LtgXcDPwRWJoHmKrD/PblSF6Q+3/+PAG/sMN/bgL9P8haaE8evbr+4PabZxYQkDZyHhiRp4AwCSRo4g0CSBs4gkKSBMwgkaeAMAk1E27PjZ0fGN06yOsnX1nE9v+otcl3naduvbnuqPDfJNuuy7Vm2dXSSV80x/YVJjtsA2/l2kj+c1nZskr9byzKDf1C7Hskg0KTcBzwjyWbt+POAf5lAHQdV1R40d5++a3RCGuv0f6SqTqqqU+eYvryqNkQ31KfRPAd81BFtu7RODAJN0tnAC9rhIxn5I5ZkqyRntH26X5Rk97Z96/bb++VJTgYysswrklzSdlB2ctuFQFffBX43TX/317ffrFcCOyR5R5JL21reP7K9V7VtV07t3aTpr/7t7fAxSa5r5/lC2/bqtM+WSLJTkvPa6ecl2bFt/3SSE5J8P8lNSQ6fod4vA4cmeXy7zBLgt4ELk3wiyYo0fe+/f4ZlSXLvyPDhST7dDi9Ocnr7+16a5Nnr8B5qnjIINElfAI5o+2PZHbh4ZNr7gcuraneab+pT37LfB1xYVc8ElgNTfzyfDrwMeHbbSdlDwMvXoZZDaXqshKZrglPbbTwN2AXYF9gT2DvJc5LsRnO36u+3exRvnmGdxwHPbH+Ho2eYfmK7nd2BzwMnjEzbluZO2UOBR+xBVNWdwCW0/drQ7A38n/Yu1ndX1VKa9/T3pkK0o/8F/M+q2gd4CeDdzwNgFxOamKq6qv0meyRNl72jDqD5Q0RVfavdE3gy8BzgxW37mUn+rZ3/uTQPJ7m07UJgM7p19HV+koeAq4D3AFsCP6mqi9rpf9C+Lm/HN6cJhj2AL089B6Ltj2a6q4DPJzkDOGOG6ftP/S7AZ4H/MTLtjLbHyusyexfjU4eH/rH9+dq2/aVJjqL5/70tzQNSrpplHdP9Z2DX9j0E2CLJk6rqno7Lax4yCDRpy2m60D4Q2HqkPTPMW9N+jgrwmap65zpu/6DRh/qk6a/+vmnr/WBVnfwbG0uOmaWOUS+gCa4XAn/R7kXMZXR9v5hWw0zOAD6aZC9gs6pamWRnmidp7VNV/9Ye8nnCWrY1Ov1xwP5V1aXLZS0QHhrSpJ0C/GVVXT2t/bu0h3aSHAjcUVV3T2s/BPh37fznAYcn+ffttK3aHijX1znAa5Ns3q53u3Yb59F88956anujC7UnmXdon4z132n2NDaftu7v8+sTvi8HLlyXwqrqXuDbNO/h1PmVLWiC7K52T+KQWRb/1yRPb+t80Uj7uYx0rpZkpmcBaIFxj0ATVVWraI5LT3c88KkkV9E8e+G/tu3vB05LshL4Dk23v1TVdUneA5zb/nH7JfAG4CfrWd+57fmHH7SHS+4FXlFV1yb5APCd9tDS5cCrRxbdCPhcezgrNMfdfzZyyAWah52ckuQdPPoeZE8DvkIbKFV1ZZoHtlwL3AR8b5bljgO+BtwCXMOvQ+oY4OPt+74xTfDOdH5DC4i9j0rSwHloSJIGziCQpIEzCCRp4AwCSRo4g0CSBs4gkKSBMwgkaeD+P+cCjbJYcQxrAAAAAElFTkSuQmCC\n",
79 | "text/plain": [
80 | ""
81 | ]
82 | },
83 | "metadata": {
84 | "needs_background": "light"
85 | },
86 | "output_type": "display_data"
87 | }
88 | ],
89 | "source": [
90 | "plt.bar(precision_list, fps)\n",
91 | "plt.xlabel('Model Precision Value')\n",
92 | "plt.ylabel('Frames per Second')\n",
93 | "plt.show()"
94 | ]
95 | },
96 | {
97 | "cell_type": "code",
98 | "execution_count": 4,
99 | "metadata": {},
100 | "outputs": [
101 | {
102 | "data": {
103 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXgAAAEGCAYAAABvtY4XAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAToUlEQVR4nO3dfbQtdX3f8fdHrgREkQqHhIKXizGxKsWHXFEWNgoaqwFp0rCsRDSKq3fZJgFFY7FgI41pbBsTSyWtN4niA8UmUSgBibgISEgUuRcUuGAaF6ISbbg8RAGNPH37x8yBzeGec+c8zHn48X6ttdeZPbNnft+9972fM+c3M79JVSFJas/jVroASdI4DHhJapQBL0mNMuAlqVEGvCQ1at1KFzBpn332qQ0bNqx0GZK0ZmzduvW2qpra0bJVFfAbNmxgy5YtK12GJK0ZSb4x2zK7aCSpUQa8JDXKgJekRhnwktQoA16SGmXAS1KjDHhJapQBL0mNMuAlqVGr6kpWSavXhlMuXOkSmnXz+44aZbvuwUtSowx4SWrUqF00SW4G7gIeAO6vqo1jtidJethy9MEfUVW3LUM7kqQJdtFIUqPGDvgCLk6yNcmmkduSJE0Yu4vm8Kr6dpJ9gc8l+WpVXT75gj74NwGsX79+5HIk6bFj1D34qvp2//NW4Fzg0B28ZnNVbayqjVNTO7zrlCRpAUYL+CR7JHnS9DTwCuD6sdqTJD3SmF00Pwqcm2S6nf9VVX82YnuSpAmjBXxV3QQ8Z6ztS5Lm5mmSktQoA16SGmXAS1KjDHhJapQBL0mNMuAlqVEGvCQ1yoCXpEYZ8JLUKANekhplwEtSowx4SWqUAS9JjTLgJalRBrwkNcqAl6RGGfCS1CgDXpIaZcBLUqMMeElqlAEvSY0y4CWpUQa8JDXKgJekRhnwktQoA16SGmXAS1KjDHhJapQBL0mNMuAlqVEGvCQ1avSAT7JLkmuSXDB2W5Kkhy3HHvxJwI3L0I4kacKoAZ/kAOAo4A/GbEeS9Ghj78F/AHgn8ODI7UiSZhgt4JMcDdxaVVt38rpNSbYk2bJ9+/axypGkx5wx9+APB45JcjPwSeDIJJ+Y+aKq2lxVG6tq49TU1IjlSNJjy2gBX1XvqqoDqmoD8Frgz6vq+LHakyQ9kufBS1Kj1i1HI1V1GXDZcrQlSeq4By9JjTLgJalRBrwkNcqAl6RGGfCS1CgDXpIatdOAT/KTSS5Jcn3//JAkp41fmiRpMYbswf8+8C7gPoCqupbuylRJ0io2JOCfUFVfmjHv/jGKkSQtnSEBf1uSHwcKIMmxwHdGrUqStGhDhir4ZWAz8E+S/C3wdcBBwyRpldtpwFfVTcDLk+wBPK6q7hq/LEnSYu004JPsBbwB2ACsSwJAVZ04amWSpEUZ0kXzGeCLwHV46z1JWjOGBPxuVXXy6JVIkpbUkLNoPp7kXyfZL8lTph+jVyZJWpQhe/D3Av8VOJX+VMn+59PGKkqStHhDAv5k4OlVddvYxUiSls6QLpptwPfHLkSStLSG7ME/AHw5yaXAD6dnepqkJK1uQwL+vP4hSVpDhlzJ+tHlKESStLRmDfgkf1RVr0lyHQ+fPfOQqjpk1MokSYsy1x787/Y/j16OQiRJS2uugD8TeH5VfWO5ipEkLZ25TpPMslUhSVpyc+3B75/kjNkWepqkJK1ucwX8D4Cty1WIJGlpzRXwt3uKpCStXXP1wd+7bFVIkpbcrAFfVS9azkIkSUtryGBjC5JktyRfSvKVJNuSnD5WW5KkRxsyFs1C/RA4sqruTvJ44IokF1XVF0dsU5LUm2uogjnv2lRVd+xkeQF3908f3z8eNeSBJGkcc+3Bb6UL5ADrgTv76b2AbwIH7WzjSXbpt/N04MyqunKxBUuShpnrIOtBVfU04LPAq6tqn6ram25smk8P2XhVPVBVzwUOAA5NcvDM1yTZlGRLki3bt29f2LuQJD3KkIOsL6iqz0w/qaqLgJfMp5Gq+nvgMuCVO1i2uao2VtXGqamp+WxWkjSHIQF/W5LTkmxIcmCSU4Hbd7ZSkqkke/XTuwMvB766uHIlSUMNCfjjgCngXLo7O+3bz9uZ/YBLk1wLXAV8rqouWGihkqT5GXJHpzuAk+a74aq6FnjeQoqSJC3eTgM+yRTwTuDZwG7T86vqyBHrkiQt0pAumrPp+s4PAk4HbqbrcpEkrWJDAn7vqvpD4L6q+nxVnQA4To0krXJDhiq4r//5nSRHAd+mO69dkrSKDQn49yZ5MvB24L8DewJvG7UqSdKiDTmLZvrUxu8CR4xbjiRpqey0Dz7JAUnOTbI9yd8l+VQSu2gkaZUbcpD1I8D5dBcu7Q/8aT9PkrSKDQn4qar6SFXd3z/OoruyVZK0ig0di+b4JLv0j+MZMBaNJGllDQn4E4DXAP8P+A5wLPCmMYuSJC3ekLNovgkcMzkvyW8D7xirKEnS4i30ptuvWdIqJElLbqEBnyWtQpK05BZy0+1gwEvSqjf0ptsz3TtOOZKkpTJrwFfVQctZiCRpaS20D16StMoZ8JLUKANekhq1kLNogIduxi1JWqUWehZNAU8bpSJJ0pLwLBpJatSQG36kH03y3f3z9UkOHb80SdJiDDnI+nvAYcAv9s/vAs4crSJJ0pIYctPtF1bV85NcA1BVdybZdeS6JEmLNGQP/r4ku9AdWCXJFPDgqFVJkhZtSMCfAZwL7JvkN4ErgP80alWSpEUbcsOPs5NsBV5Gd8rkz1XVjaNXJklalKEXOt0KnDO5zAudJGl1G3qh03rgzn56L+CbgOfJS9IqNmsffFUdVFVPAz4LvLqq9qmqvYGjgU8vV4GSpIUZcpD1BVX1meknVXUR8JKdrZTkqUkuTXJjkm1JTlpMoZKk+RlyHvxtSU4DPkHXZXM8cPuA9e4H3l5VVyd5ErA1yeeq6oaFlytJGmrIHvxxwBTdqZLnAfv28+ZUVd+pqqv76buAG4H9F16qJGk+hpwmeQdwUpI9gQer6u75NpJkA/A84ModLNsEbAJYv379fDctSZrFkMHG/mk/TMF1wLYkW5McPLSBJE8EPgW8taq+N3N5VW2uqo1VtXFqamo+tUuS5jCki+ZDwMlVdWBVHQi8Hdg8ZONJHk8X7mdXlWfeSNIyGhLwe1TVpdNPquoyYI+drZQkwB8CN1bV7yy4QknSggwJ+JuSvDvJhv5xGvD1AesdDrweODLJl/vHzy6qWknSYENOkzwBOJ3u4qYAlwNv2tlKVXUFO77dnyRpGQw5i+ZO4MRlqEWStITmGmzs/LlWrKpjlr4cSdJSmWsP/jDgW3SjSF6J3S2StKbMFfA/BvwM3VWrvwhcCJxTVduWozBJ0uLMNZrkA1X1Z1X1S8CLgK8BlyX51WWrTpK0YHMeZE3yI8BRdHvxG+hu3+cFS5K0Bsx1kPWjwMHARcDpVXX9slUlSVq0ufbgXw/cA/wkcGJ3YSrQHWytqtpz5NokSYswa8BX1ZCrXCVJq5QhLkmNMuAlqVEGvCQ1yoCXpEYZ8JLUKANekhplwEtSowx4SWqUAS9JjTLgJalRBrwkNcqAl6RGGfCS1CgDXpIaZcBLUqMMeElqlAEvSY0y4CWpUQa8JDXKgJekRhnwktQoA16SGjVawCf5cJJbk1w/VhuSpNmNuQd/FvDKEbcvSZrDaAFfVZcDd4y1fUnS3Fa8Dz7JpiRbkmzZvn37SpcjSc1Y8YCvqs1VtbGqNk5NTa10OZLUjBUPeEnSOAx4SWrUmKdJngN8AXhGkluSvHmstiRJj7ZurA1X1XFjbVuStHOjBfxy23DKhStdQrNuft9RK12CpAWwD16SGmXAS1KjDHhJapQBL0mNauYgq9YWD4qPx4PimuYevCQ1yoCXpEYZ8JLUKANekhplwEtSowx4SWqUAS9JjTLgJalRBrwkNcqAl6RGGfCS1CgDXpIaZcBLUqMMeElqlAEvSY0y4CWpUQa8JDXKgJekRhnwktQoA16SGmXAS1KjDHhJapQBL0mNMuAlqVEGvCQ1atSAT/LKJH+d5GtJThmzLUnSI40W8El2Ac4EXgU8CzguybPGak+S9Ehj7sEfCnytqm6qqnuBTwL/YsT2JEkT1o247f2Bb008vwV44cwXJdkEbOqf3p3kr0esabXYB7htpYsYKv95pStYFdbMd+b39ZDHynd24GwLxgz47GBePWpG1WZg84h1rDpJtlTVxpWuQ8P5na09fmfjdtHcAjx14vkBwLdHbE+SNGHMgL8K+IkkByXZFXgtcP6I7UmSJozWRVNV9yf5FeCzwC7Ah6tq21jtrTGPqS6pRvidrT2P+e8sVY/qFpckNcArWSWpUQa8JDXKgF8iSR5I8uWJx4YkL03y3STXJLkxya/3r907yaVJ7k7ywRnb2TXJ5iT/N8lXk/zCyryj9s3zOzt04nVfSfLz/fwnJLmw/662JXnfyr6rtWG5P/skNyfZp5+uJO+fWPaOJO9JcupEO5P1nZhkff9/9pok1yb52bE/oyVRVT6W4AHcvYN5LwUu6Kf3AP4G+Kl++sXAW4APzljndOC9/fTjgH1W+r21+pjnd/YEYF0/fz/gVrqTFJ4AHNHP3xX4C+BVK/3eVvtjuT974Obp/0vAPwBfn3j+DuA9c9VHd8D23/TTzwJuXunPcMjDPfhlUlX3AFuBH6+qe6rqCrp/aDOdAPxWv86DVbUmrsRr0Yzv7PtVdX+/aDf6i/b6+Zf20/cCV9Nd86FFGPmzv58usN82n5KAPfvpJ7NGrukx4JfO7hN/0p07c2GSvYEXAbOeKppkr37yN5JcneSPk/zoSPVqnt9Zkhcm2QZcB7xlInSmX78X8GrgkvFLX/NW+rM/E3hdkicPfP17gOOT3AJ8BvjVgeutqDGHKnis+UFVPXcH8/9ZkmuAB4H31dzXAqyj2wP5y6o6OcnJwG8Dr1/6csU8v7OquhJ4dpJnAh9NclFV/QNAknXAOcAZVXXTMtW/lq3oZ19V30vyMeBE4AcDVjkOOKuq3p/kMODjSQ6uqgeHtLdSDPjx/UVVHT3wtbcD3wem92j+GHjzKFVpLnN+Z1V1Y5J7gIOBLf3szcDfVNUHlqPAhi36s083VPnWftn5VfUfZtncB+i6dT4yoK43A6/sa/hCkt3oBjO7dcC6K8YumlWkuiM4f0p3sAngZcANK1aQHtIPubGunz4QeAbdgTuSvJeuX/atK1Zgw+b72VfVA1X13P4xW7hTVXcAf8Swnahv0v1/pP8rYjdg+4Le0DJyD36FJLmZ7qDNrkl+DnhFVd0A/Du6P/8+QPcP6E0rV6UmvBg4Jcl9dN0H/7aqbktyAHAq8FXg6iTQnRn1BytXanPG/OzfD/zKgNe9Hfj9JG+jO+D6xn6HbFVzqAJJapRdNJLUKANekhplwEtSowx4SWqUAS9JjTLgteT60fo+PvF8XZLtSS6Y53YeGgFwvq/p51/Xjz54cZIfm0/bs7T1liRvmGP5MUlOWYJ2Lkvyz2fMe2uS39vJOo/pG0zr0Qx4jeEe4OAku/fPfwb42xWo44iqeg7dFY//fnJBOvP6919V/7OqPjbH8vOraimGCz6H7h7Gk17bz5cGM+A1louAo/rp45gIpyRPSXJeP672F5Mc0s/fu9/bvibJh4BMrHN8ki/1g1N9qL8cfajLgaenG3P8xn5P+GrgqUl+LclVfS2nT7T3hn7eV6b/Gkk3Zvg7+ukTk9zQv+aT/bw3ph/fP8mBSS7pl1+SZH0//6wkZyT5qyQ3JTl2B/X+CXB0kh/p19kA/GPgiiT/I8mWdOOfn76DdUly98T0sUnO6qenknyqf79XJTl8Hp+h1iADXmP5JPDafsyOQ4ArJ5adDlxTVYfQ7VlP7xX/OnBFVT0POB+YDsVnAv8KOLwfoOoB4HXzqOVoulEIobvM/WN9G88AfgI4FHgu8FNJfjrJs+mukDyy/wvgpB1s8xTgef17eMsOln+wb+cQ4GzgjIll+9FdnXk08Kg9/qq6HfgS/dgndHvv/7u/cvLUqtpI95m+ZPqX40D/DfjdqnoB8AuAV9s2zqEKNIqqurbf8zyObnjVSS+mCxiq6s/7PfcnAz8N/Mt+/oVJ7uxf/zK6Gz9c1V+OvjvDBnm6NMkDwLXAacBewDeq6ov98lf0j2v650+kC/znAH8yPRZ/P2bJTNcCZyc5DzhvB8sPm34vwMeB/zKx7Lx+FMIbMvtw0NPdNP+n/3lCP/81STbR/d/dj+7mE9fOso2ZXg48q/8MAfZM8qSqumvg+lpjDHiN6Xy64Y5fCuw9MT87eG3N+DkpwEer6l3zbP+IyRumpBsz/J4Z2/2tqvrQIxpLTpyljklH0f1COgZ4d7/XP5fJ7f1wRg07ch7wO0meD+xeVVcnOYju7kMvqKo7+66X3XbS1uTyxwGHVdWQ4XHVALtoNKYPA/+xqq6bMf9y+i6WJC8Fbquq782Y/yrgH/WvvwQ4Nsm+/bKn9KMKLtZngROSPLHf7v59G5fQ7SnvPd3e5Er9wdmn9ncTeifdXwZPnLHtv+LhA6WvA66YT2FVdTdwGd1nOH38Yk+6X1Df7ff8XzXL6n+X5Jl9nT8/Mf9iJgbWSrKj8djVEPfgNZqquoWu33em9wAfSXIt3fj3v9TPPx04J8nVwOfphmilqm5IchpwcR9a9wG/DHxjkfVd3Pfvf6HvtrgbOL6qtiX5TeDzfRfPNcAbJ1bdBfhE360Uun7tv5/o+oDuRhIfTvJrLHxU0HOAT9P/oqiqr6S7GcY24CbgL2dZ7xTgAuBbwPU8/MvnRODM/nNfR/cLdUfHD9QIR5OUpEbZRSNJjTLgJalRBrwkNcqAl6RGGfCS1CgDXpIaZcBLUqP+Px5GOTGBrHqzAAAAAElFTkSuQmCC\n",
104 | "text/plain": [
105 | ""
106 | ]
107 | },
108 | "metadata": {
109 | "needs_background": "light"
110 | },
111 | "output_type": "display_data"
112 | }
113 | ],
114 | "source": [
115 | "plt.bar(precision_list, model_load_time)\n",
116 | "plt.xlabel('Model Precision Value')\n",
117 | "plt.ylabel('Model Load Time')\n",
118 | "plt.show()"
119 | ]
120 | },
121 | {
122 | "cell_type": "code",
123 | "execution_count": null,
124 | "metadata": {},
125 | "outputs": [],
126 | "source": []
127 | }
128 | ],
129 | "metadata": {
130 | "kernelspec": {
131 | "display_name": "Python 3",
132 | "language": "python",
133 | "name": "python3"
134 | },
135 | "language_info": {
136 | "codemirror_mode": {
137 | "name": "ipython",
138 | "version": 3
139 | },
140 | "file_extension": ".py",
141 | "mimetype": "text/x-python",
142 | "name": "python",
143 | "nbconvert_exporter": "python",
144 | "pygments_lexer": "ipython3",
145 | "version": "3.6.5"
146 | }
147 | },
148 | "nbformat": 4,
149 | "nbformat_minor": 2
150 | }
151 |
--------------------------------------------------------------------------------
/src/PrecisionComparision.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Assess Performance:\n",
8 | "\n",
9 | "## Get Perfomance data from files"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 1,
15 | "metadata": {},
16 | "outputs": [
17 | {
18 | "name": "stdout",
19 | "output_type": "stream",
20 | "text": [
21 | "Inference Time : [23.9, 24.7, 24.0]\n",
22 | "fps : [2.468619246861925, 2.388663967611336, 2.4583333333333335]\n",
23 | "Model Load Time : [0.7770581245422363, 0.7230548858642578, 2.766681432723999]\n"
24 | ]
25 | }
26 | ],
27 | "source": [
28 | "import matplotlib.pyplot as plt\n",
29 | "\n",
30 | "precision_list = ['FP16', 'FP32', 'FP32-INT8']\n",
31 | "inference_time = []\n",
32 | "model_load_time = []\n",
33 | "fps = []\n",
34 | "\n",
35 | "for precision in precision_list:\n",
36 | " with open('results/'+precision+'/stats.txt', 'r') as f:\n",
37 | " inference_time.append(float(f.readline().split('\\n')[0]))\n",
38 | " fps.append(float(f.readline().split('\\n')[0]))\n",
39 | " model_load_time.append(float(f.readline().split('\\n')[0]))\n",
40 | "\n",
41 | "print(\"Inference Time :\",inference_time)\n",
42 | "print(\"fps :\",fps)\n",
43 | "print(\"Model Load Time :\",model_load_time)"
44 | ]
45 | },
46 | {
47 | "cell_type": "code",
48 | "execution_count": 6,
49 | "metadata": {},
50 | "outputs": [
51 | {
52 | "data": {
53 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX4AAAEWCAYAAABhffzLAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAcS0lEQVR4nO3debxd873/8debEPOQATHEcVNVaS8poVxpG1Wtea4rD0PS+oncVlWpiuFWXO2D2x/VXy/XFaUxVVFTqKLNNTTmhAiRKiUEKQlFhBri8/tjfTfLydnnrHNy1t45Z72fj8d+nLW+a/rstc/5nO/+rrW+X0UEZmZWHcs1OwAzM2ssJ34zs4px4jczqxgnfjOzinHiNzOrGCd+M7OKceK3HkdSSPpUs+NoBmV+Jenvkh5sdjzWMznxW2GS7kwJp2+zY+lNJE2S9OOCq48AdgY2jIhtSwzLejEnfitEUgvwRSCAvZoaTCdI6tPsGLrZxsCciFjU2Q174bmwLnLit6IOA+4HJgGj8wsk7SbpCUkLJb0o6Qep/HFJe+bWW0HSAknDJLWkJpvRkp5P5Sfn1l1e0kmS/pr2O13SRrnDflXSU+kbyHmSlLYbI+keSedIeg2YIGlNSZdKmi/pOUmnSFout/5USWelfT0raddcHOtLmizpNUlPSzoit+wTNXVJIyW9kJs/IZ2PhZKelLRTRye5vfMi6XDgl8D2kt6SdFoq30PSDEmvS7pX0ha5/c1JccwEFknqI2m7tN7rkh6VNDK3/p2STk/ncKGk2yUNyC0fkdt2rqQxqbxvOofPS3pZ0v9IWrmj92tNEhF++dXhC3ga+DawNfA+sG5u2Tzgi2l6bWCrNP1D4KrcensDj6XpFrJvDxcCKwNbAu8Cm6flxwOPAZsBSsv7p2UB3AysBQwG5gO7pGVjgA+A7wJ90r4vBW4EVk/H/QtweG7994EjgOWBfwNeApSW3wX8N7ASMCwda6e0bBLw49z7Gwm8kKY3A+YC6+fe75A65/aj/RQ4L2OAqblttwJeAb6Q4h8NzAH6puVzgBnARml/GwCvAruRVfx2TvMD0/p3An8FPp3WvxM4My0bDCwERgErAP2BYWnZz4HJQL90nm8Czmj2761fdf6emx2AX8v+i6xd+X1gQJr/M/D93PLngSOBNVptt35KFGuk+d8CP0zTtQS3YW79B4GD0vSTwN514glgRG7+amB8mh4DPJ9btnxKnENzZUcCd+bWfzq3bJW0//VSslwMrJ5bfgYwKU1/lLDT/Eg+TvyfSgn5q8AKHZzfj/ZT4LyM4ZOJ/3zg9Fb7exL4cpqeA3wrt+wE4LJW698GjE7TdwKn5JZ9G7g1TZ8IXN9G/AIWkfvHBmwPPNvs312/2n65qceKGA3cHhEL0vyv+WRzz/5kNcjnJN0laXuAiHgJuAfYX9JawK7AFa32/bfc9NvAaml6I7KaZz31toOspl0zAFgReC5X9hxZzXeJfUXE22lyNbJ/XK9FxMJ2tm1TRDwNHANMAF6R9BtJ63e0XVsxseT7y9sYOC41vbwu6XWyc5c/1txW63+j1fojgEEFjl3vMxlI9g9zem6ft6ZyWwb5Yo+1K7XTHggsL6mWEPoCa0naMiIejYiHgL0lrQAcRVYDr7XHXwL8H7Lftfsi4sWCh54LDAEe70LY+S5nF5B9W9kYeCKVDQaKxPES0E/S6rnkn992EVnCq1nvE0FE/Br4taQ1gAuA/wQO7cT7KGIu8JOI+Ek76+TPx1yyGv8R9Vbu4Fht3Um0AHgH+GwnPl9rItf4rSP7kDV3DCVr4x4GbA78CThM0oqSDpa0ZkS8D7yZ1q+5gawd+ntkbe1F/RI4XdKmymwhqX9ng4+IxWT/iH4iaXVJGwPHApcX2HYucC9whqSV0kXTw/n4W8sMYDdJ/SStR1bDB0DSZpK+ouzW13+QJcbFdL8LgXGSvpDO06qSdpe0ep31Lwf2lPR1ZRfQV0oXpTcscKwryC6qH5guEveXNCwiPkxxnCNpHQBJG0j6ere8Q+t2TvzWkdHAryLi+Yj4W+0FnAscnNY5FJgj6U1gHHBIbeOIeAe4FtgEuK4Tx/0ZWcK+neyfyUVkFxu74rtktfNngKlkTVUXF9x2FFm7+0vA9cCpEfGHtOwy4FGydvTbgaty2/UFziSrDf8NWAc4qYvx1xUR08guTJ8L/J3sIvyYdtafS3aR/SSyC9VzyS6kd5gLIuJ5sia944DXyP7xbZkWn5COfX/6Pfgj2QVuWwbV7lwwK42kHwGfjohDOlzZzErnNn4rlaR+ZM0j3d22bWZd5KYeK0162Gku8PuIuLvZ8ZhZxk09ZmYV4xq/mVnF9Ig2/gEDBkRLS0uzwzAz61GmT5++ICKWeJCuRyT+lpYWpk2b1uwwzMx6FEnPtVXuph4zs4px4jczqxgnfjOziikt8UvaSNIdkmZLmiXpe6l8QhqcYkZ67VZWDGZmtqQyL+5+ABwXEQ+nDqOmS6r1cXJORJxV4rHNzKyO0hJ/RMwjG5mJiFgoaTYF+jE3M7NyNaSNX9lA3Z8HHkhFR0maKeliSWvX2WaspGmSps2fP78RYZqZVULpiV/SamTd8h4TEW+SDRU3hKxf93nA2W1tFxETI2J4RAwfONAD+ZiZdZdSE38akela4IqIuA4gIl6OiMW5wRvaGtHHzMxKUlobvySRDZ4xOyJ+lisflNr/Afala0PrWS/WMv53zQ6h15pz5u7NDsGWAWXe1bMDWR/sj0makcpOAkZJGkY2Dugc4MgSYzAzs1bKvKtnKqA2Ft1S1jHNzKxjfnLXzKxinPjNzCrGid/MrGJ6RH/8S8N3iJTHd4hYjf/OylPG35lr/GZmFePEb2ZWMU78ZmYV48RvZlYxTvxmZhXjxG9mVjFO/GZmFePEb2ZWMU78ZmYV48RvZlYxTvxmZhXjxG9mVjFO/GZmFePEb2ZWMU78ZmYV48RvZlYxTvxmZhXjxG9mVjFO/GZmFePEb2ZWMU78ZmYV48RvZlYxTvxmZhXjxG9mVjFO/GZmFePEb2ZWMU78ZmYV02Hil7SqpOXS9Kcl7SVphfJDMzOzMhSp8d8NrCRpA2AK8E1gUplBmZlZeYokfkXE28B+wH9FxL7A0A43kjaSdIek2ZJmSfpeKu8n6Q+Snko/1166t2BmZp1RKPFL2h44GPhdKutTYLsPgOMiYnNgO+A7koYC44EpEbEp2TeI8Z0P28zMuqpI4j8GOBG4PiJmSfon4I6ONoqIeRHxcJpeCMwGNgD2Bi5Jq10C7NOVwM3MrGs6rLlHxF3AXbn5Z4CjO3MQSS3A54EHgHUjYl7a1zxJ69TZZiwwFmDw4MGdOZyZmbWjbuKXdBMQ9ZZHxF5FDiBpNeBa4JiIeFNSocAiYiIwEWD48OF14zAzs85pr8Z/Vvq5H7AecHmaHwXMKbLzdNvntcAVEXFdKn5Z0qBU2x8EvNLpqM3MrMvqJv7UxIOk0yPiS7lFN0m6u6MdK6vaXwTMjoif5RZNBkYDZ6afN3YlcDMz65oiF3cHpgu6AEjaBBhYYLsdgEOBr0iakV67kSX8nSU9Beyc5s3MrEGK3Jb5feBOSc+k+RbgyI42ioipQL0G/Z0KRWdmZt2uyF09t0raFPhMKvpzRLxbblhmZlaWIjV+gK3Javp9gC0lERGXlhaVmZmVpsPEL+kyYAgwA1icigNw4jcz64GK1PiHA0MjwvfSm5n1AkXu6nmc7D5+MzPrBYrU+AcAT0h6EPjoom7RJ3fNzGzZUiTxTyg7CDMza5xCnbRJWhfYJhU9GBHuZsHMrIcqMvTigcCDwDeAA4EHJB1QdmBmZlaOIk09JwPb1Gr5kgYCfwR+W2ZgZmZWjiJ39SzXqmnn1YLbmZnZMqhIjf9WSbcBV6b5fwV+X15IZmZWpiIXd4+XtB8wgqzTtYkRcX3pkZmZWSmKdNmwCXBLbSAVSStLaomIOWUHZ2Zm3a9IW/01wIe5+cWpzMzMeqAiib9PRLxXm0nTK5YXkpmZlalI4p8v6aPuGSTtDSwoLyQzMytTkbt6xgFXSDqPrDvmF4DDSo3KzMxKU+Sunr8C20laDVBELCw/LDMzK0uRLhvWlXQRcE1ELJQ0VNLhDYjNzMxKUKSNfxJwG7B+mv8LcExZAZmZWbmKJP4BEXE16ZbOiPiAj4dgNDOzHqZI4l8kqT/ZhV0kbQe8UWpUZmZWmiJ39RwLTAaGSLoHGAi4W2Yzsx6qyF09D0v6MrAZWV89T0bE+6VHZmZmpShyV883gJUjYhawD3CVpK1Kj8zMzEpRpI3/39NtnCOArwOXAOeXG5aZmZWlSOKv3cGzO3B+RNyI++oxM+uxiiT+FyVdQDbe7i2S+hbczszMlkFFEviBZA9w7RIRrwP9gONLjcrMzEpT5K6et4HrcvPzgHllBmVmZuVxk42ZWcU48ZuZVUxpiV/SxZJekfR4rmyCpBclzUiv3co6vpmZta3IA1z7SXpK0huS3pS0UNKbBfY9CdiljfJzImJYet3S2YDNzGzpFOmr56fAnhExuzM7joi7JbV0JSgzMytPkaaelzub9DtwlKSZqSlo7XorSRoraZqkafPnz+/Gw5uZVVuRxD9N0lWSRqVmn/0k7dfF450PDAGGkd0Sena9FSNiYkQMj4jhAwcO7OLhzMystSJNPWsAbwNfy5UFuXv7i4qIl2vTki4Ebu7sPszMbOkUeYDrm911MEmD0gNgAPsCj7e3vpmZdb+6iV/SDyPip5L+izT6Vl5EHN3ejiVdCYwEBkh6ATgVGClpWNrfHODIroduZmZd0V6Nv3ZBd1pXdhwRo9oovqgr+zIzs+5TN/FHxE3p5yWNC8fMzMrmLhvMzCrGid/MrGKc+M3MKqZIXz2fljSl1tmapC0knVJ+aGZmVoYiNf4LgROB9wEiYiZwUJlBmZlZeYok/lUi4sFWZR+UEYyZmZWvSOJfIGkI6SEuSQfgoRfNzHqsIn31fAeYCHxG0ovAs8AhpUZlZmalKdJXzzPAVyWtCiwXEQvLD8vMzMrSYeKXtBZwGNAC9JEEdNxXj5mZLZuKNPXcAtwPPAZ8WG44ZmZWtiKJf6WIOLb0SMzMrCGK3NVzmaQjJA2S1K/2Kj0yMzMrRZEa/3vA/wVO5uN++QP4p7KCMjOz8hRJ/McCn4qIBWUHY2Zm5SvS1DOLbMxdMzPrBYrU+BcDMyTdAbxbK/TtnGZmPVORxH9DepmZWS9Q5MldD71oZtaL1E38kq6OiAMlPcbHd/N8JCK2KDUyMzMrRXs1/nPSzz0aEYiZmTVGe4n/PGCriHiuUcGYmVn52rudUw2LwszMGqa9Gv8Gkn5Rb6Fv5zQz65naS/zvANMbFYiZmTVGe4n/Vd/KaWbW+7TXxv9ew6IwM7OGqZv4I2K7RgZiZmaNUaSTNjMz60Wc+M3MKqZQ4pc0QtI30/RASZuUG5aZmZWlw8Qv6VTgBODEVLQCcHmZQZmZWXmK1Pj3BfYCFgFExEvA6h1tJOliSa9IejxX1k/SHyQ9lX6u3dXAzcysa4ok/vciIkg9dEpateC+JwG7tCobD0yJiE2BKWnezMwaqEjiv1rSBcBako4A/ghc2NFGEXE38Fqr4r2B2kNhlwD7dCJWMzPrBkUGYjlL0s7Am8BmwI8i4g9dPN66ETEv7XeepHXqrShpLDAWYPDgwV08nJmZtdZh4k938PypluwlrSypJSLmlBlYREwEJgIMHz58iYFgzMysa4o09VwDfJibX5zKuuJlSYMA0s9XurgfMzProiKJv09EfNRvT5pesYvHmwyMTtOjgRu7uB8zM+uiIol/vqS9ajOS9gYWdLSRpCuB+4DNJL0g6XDgTGBnSU8BO6d5MzNroA7b+IFxwBWSziUblWsucFhHG0XEqDqLdioenpmZdbcid/X8FdhO0mqAImJh+WGZmVlZitzV0xfYH2gB+kjZULwR8R+lRmZmZqUo0tRzI/AG2TCM75YbjpmZla1I4t8wIlp3vWBmZj1Ukbt67pX0z6VHYmZmDVGkxj8CGCPpWbKmHgEREVuUGpmZmZWiSOLftfQozMysYTps6omI54CNgK+k6beLbGdmZssmj8BlZlYxpY3AZWZmy6YyR+AyM7NlUGkjcJmZ2bKp3bt6lPXPcBXwGbpnBC4zM2uydhN/RISkGyJia8DJ3sysFyjS1HO/pG1Kj8TMzBqiyANcOwLjJM0hu7PHT+6amfVgfnLXzKxi/OSumVnF+MldM7OK8ZO7ZmYV4yd3zcwqxk/umplVTN27eiT1jYh3I+IsSTvjJ3fNzHqF9m7nvA/YStJlEXEofnLXzKxXaC/xryhpNPAvkvZrvTAirisvLDMzK0t7iX8ccDCwFrBnq2UBOPGbmfVAdRN/REwFpkqaFhEXNTAmMzMrUYddNkTERZL+BWjJrx8Rl5YYl5mZlaTDxC/pMmAIMANYnIoDcOI3M+uBinTSNhwYmh7iMjOzHq7IA1yPA+uVHYiZmTVGkRr/AOAJSQ8C79YKI2Kv0qIyM7PSFEn8E8oOwszMGqfIXT13dfdB02heC8kuFn8QEcO7+xhmZta29vrqWUjqkbP1IrKhF9dYymPvGBELlnIfZmbWSe09wOU+983MeqFmDaEYwO2Spksa29YKksZKmiZp2vz58xscnplZ79WsxL9DRGxFNpD7dyR9qfUKETExIoZHxPCBAwc2PkIzs16qKYk/Dd9IRLwCXA9s24w4zMyqqOGJX9KqklavTQNfI3tIzMzMGqDIffzdbV3gekm14/86Im5tQhxmZpXU8MQfEc8AWzb6uGZmlmnWxV0zM2sSJ34zs4px4jczqxgnfjOzinHiNzOrGCd+M7OKceI3M6sYJ34zs4px4jczqxgnfjOzinHiNzOrGCd+M7OKceI3M6sYJ34zs4px4jczqxgnfjOzinHiNzOrGCd+M7OKceI3M6sYJ34zs4px4jczqxgnfjOzinHiNzOrGCd+M7OKceI3M6sYJ34zs4px4jczqxgnfjOzinHiNzOrGCd+M7OKceI3M6sYJ34zs4px4jczq5imJH5Ju0h6UtLTksY3IwYzs6pqeOKXtDxwHrArMBQYJWloo+MwM6uqZtT4twWejohnIuI94DfA3k2Iw8yskvo04ZgbAHNz8y8AX2i9kqSxwNg0+5akJxsQ27JgALCg2UEUof9sdgTLhB7zeYE/s6RKn9nGbRU2I/GrjbJYoiBiIjCx/HCWLZKmRcTwZsdhxfjz6nn8mTWnqecFYKPc/IbAS02Iw8yskpqR+B8CNpW0iaQVgYOAyU2Iw8yskhre1BMRH0g6CrgNWB64OCJmNTqOZVjlmrd6OH9ePU/lPzNFLNG8bmZmvZif3DUzqxgnfjOzinHiL5mkxZJm5F4tkkZKekPSI5JmSzo1rdtf0h2S3pJ0bqv9rChpoqS/SPqzpP2b8456v05+Ztvm1ntU0r6pfBVJv0uf1SxJZzb3XfUcjT7/kuZIGpCmQ9LZuWU/kDRB0sm54+TjO1rS4PR3+4ikmZJ2K/scLbWI8KvEF/BWG2UjgZvT9KrAU8DWaXoEMA44t9U2pwE/TtPLAQOa/d5666uTn9kqQJ9UPgh4heymiVWAHVP5isCfgF2b/d56wqvR5x+YU/t7Av4BPJub/wEwob34yC4W/1uaHgrMafY57OjlGn+TRcQiYDowJCIWRcRUsl++1r4FnJG2+TAiesyTh71Nq8/s7Yj4IC1aifQwYiq/I02/BzxM9syKLaWSz/8HZIn8+50JCVgjTa9JD3guyYm/fCvnvhZe33qhpP7AdkDdW1olrZUmT5f0sKRrJK1bUrzWyc9M0hckzQIeA8blElFt/bWAPYEp5YfeKzT7/J8HHCxpzYLrTwAOkfQCcAvw3YLbNU0zumyomnciYlgb5V+U9AjwIXBmtP8sQx+y2so9EXGspGOBs4BDuz9co5OfWUQ8AHxW0ubAJZJ+HxH/AJDUB7gS+EVEPNOg+Hu6pp7/iHhT0qXA0cA7BTYZBUyKiLMlbQ9cJulzEfFhkeM1gxN/8/wpIvYouO6rwNtArfZzDXB4KVFZe9r9zCJitqRFwOeAaal4IvBURPy8EQH2ckt9/pV1Cz89LZscET+qs7ufkzUP/apAXIcDu6QY7pO0EllHcK8U2LYp3NTTA0R21egmsgtcADsBTzQtIPtI6nqkT5reGNiM7GIhkn5M1uZ7TNMC7OU6e/4jYnFEDEuvekmfiHgNuJpiFaznyf4mSd86VgLmd+kNNYhr/MsYSXPILhStKGkf4GsR8QRwAtlXyJ+T/VJ9s3lRWs4IYLyk98maIL4dEQskbQicDPwZeFgSZHdq/bJ5ofZKZZ7/s4GjCqx3HHChpO+TXegdkypryyx32WBmVjFu6jEzqxgnfjOzinHiNzOrGCd+M7OKceI3M6sYJ35rmNTz4WW5+T6S5ku6uZP7+ag3xc6uk8ofSz053i5pvc4cu86xxkk6rJ3le0ka3w3HuVPS11uVHSPpvzvYptIDi9uSnPitkRYBn5O0cprfGXixCXHsGBFbkj3deVJ+gTKd+ruIiP+JiEvbWT45IrqjW+YrycaozjsolZsV5sRvjfZ7YPc0PYpc0pLUT9INqU/z+yVtkcr7p9r5I5IuAJTb5hBJD6YOvS5Ij+QXdTfwKWX9vc9ONeeHgY0kHS/poRTLabnjHZbKHq19e1HWX/sP0vTRkp5I6/wmlY1RGl9B0saSpqTlUyQNTuWTJP1C0r2SnpF0QBvx/hbYQ1LftE0LsD4wVdL5kqYp63v+tDa2RdJbuekDJE1K0wMlXZve70OSdujEObQeyInfGu03wEGpP5MtgAdyy04DHomILchq4rVa9KnA1Ij4PDAZqCXLzYF/BXZInXotBg7uRCx7kPXoCNmj/pemY2wGbApsCwwDtpb0JUmfJXsa9CvpG8P32tjneODz6T2Ma2P5uek4WwBXAL/ILRtE9iTqHsAS3xAi4lXgQVK/MGS1/avSU6InR8RwsnP65do/zYL+H3BORGwD7A/46eJezl02WENFxMxUUx1F1oVt3giyxENE/G+q6a8JfAnYL5X/TtLf0/o7kQ3G8VB6JH9linWMdYekxcBM4BRgLeC5iLg/Lf9aej2S5lcj+0ewJfDb2lgIqT+X1mYCV0i6AbihjeXb194LcBnw09yyG1KPjk+ofrfbteaeG9PPb6XyAyWNJfubHkQ2IMjMOvto7avA0HQOAdaQtHpELCy4vfUwTvzWDJPJupUeCfTPlauNdaPVzzwBl0TEiZ08/o75gWyU9de+qNV+z4iICz5xMOnoOnHk7U72j2ov4N/Tt4T25Pf3bqsY2nID8DNJWwErR8TDkjYhGylqm4j4e2rCWamDY+WXLwdsHxFFuiC2XsBNPdYMFwP/ERGPtSq/m9RUI2kksCAi3mxVviuwdlp/CnCApHXSsn6ph8aldRvwLUmrpf1ukI4xhaxm3b92vPxG6aLwRmnkpx+SfZNYrdW+7+XjC7QHA1M7E1hEvAXcSXYOa9dH1iD7x/VG+qawa53NX5a0eYpz31z57eQ6I5PUVl/41ou4xm8NFxEvkLUrtzYB+JWkmWTjD4xO5acBV0p6GLiLrBtcIuIJSacAt6dk9j7wHeC5pYzv9nT94L7U/PEWcEhEzJL0E+Cu1FT0CDAmt+nywOWpeUpk7eav55pQIBvc42JJx9P1XlavBK4j/QOJiEeVDVAyC3gGuKfOduOBm4G5wON8/E/paOC8dN77kP2jbev6hPUS7p3TzKxi3NRjZlYxTvxmZhXjxG9mVjFO/GZmFePEb2ZWMU78ZmYV48RvZlYx/x+ck7CjKmfxpwAAAABJRU5ErkJggg==\n",
54 | "text/plain": [
55 | ""
56 | ]
57 | },
58 | "metadata": {
59 | "needs_background": "light"
60 | },
61 | "output_type": "display_data"
62 | }
63 | ],
64 | "source": [
65 | "plt.bar(precision_list, inference_time)\n",
66 | "plt.xlabel('Model Precision Value')\n",
67 | "plt.ylabel('Inference Time in seconds')\n",
68 | "plt.title('Asynchronous Inference')\n",
69 | "plt.show()"
70 | ]
71 | },
72 | {
73 | "cell_type": "code",
74 | "execution_count": 7,
75 | "metadata": {},
76 | "outputs": [
77 | {
78 | "data": {
79 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEWCAYAAABrDZDcAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAcp0lEQVR4nO3debQdVZn+8e9DEiDKkM6gTBloTNOAi0QIUxs1iLRMklYQoUGI0saggsggYzM0ssDfcvrRoQlRIQQQHMAYNUzSBAjIkISQkCAaMZAYlAQkEwgkvP1H7QvFybn31k1uncO99XzWOutW1d5V9Z46yXlP7V21SxGBmZlV1ybNDsDMzJrLicDMrOKcCMzMKs6JwMys4pwIzMwqzonAzKzinAisS5MUkt7X7DiaQZlrJf1N0iPNjse6LicC2yCSpqcvoM2aHUt3ImmSpG8UrD4SOBDYISL2LjEs6+acCKzDJA0BPgQEcHhTg+kAST2bHUMnGwwsiog1HV2xGx4L2whOBLYhjgceAiYBJ+QLJB0iaYGkVZL+LOmMtPwJSZ/I1eslabmk4ZKGpCaeEyQ9m5afl6vbQ9K5kv6YtjtL0sDcbj8m6Q/pDOVKSUrrjZH0gKTvSnoRuEjS1pImS1om6RlJ50vaJFd/hqRvpW39SdLBuTi2kzRV0ouSFkr6Qq7sbb/kJY2StCQ3f1Y6HqskPSXpgPYOclvHRdKJwA+A/SStlnRxWn6YpDmSXpL0oKTdc9tblOKYC6yR1FPSvqneS5IelzQqV3+6pEvSMVwl6U5J/XPlI3PrLpY0Ji3fLB3DZyX9VdIESb3be7/WRBHhl18degELgS8BewKvA+/NlT0HfChN/wOwR5r+OvDjXL3RwLw0PYTs7OL7QG9gGPAqsEsqPxOYB+wMKJX3S2UB/AroAwwClgEHpbIxwFrgZKBn2vZk4BfAlmm/vwdOzNV/HfgC0AM4CVgKKJXfC/wPsDkwPO3rgFQ2CfhG7v2NApak6Z2BxcB2ufe7UyvH9s3tFDguY4AZuXX3AJ4H9knxnwAsAjZL5YuAOcDAtL3tgReAQ8h+FB6Y5gek+tOBPwL/lOpPBy5PZYOAVcAxQC+gHzA8lX0PmAr0Tcf5l8Blzf5361cb/6ebHYBfXetF1i79OtA/zf8O+Fqu/Fngi8BWNettl744tkrzPwO+nqZbvvB2yNV/BDg6TT8FjG4lngBG5uZ/ApydpscAz+bKeqQv0l1zy74ITM/VX5gre1fa/jbpy3MdsGWu/DJgUpp+8ws8zY/irUTwvvQF/TGgVzvH983tFDguY3h7IrgKuKRme08BH0nTi4DP58rOAq6vqX8HcEKang6cnyv7EnB7mj4H+Hmd+AWsIZfogP2APzX7365frb/cNGQddQJwZ0QsT/M/4u3NQ0eQ/cJ8RtK9kvYDiIilwAPAEZL6AAcDN9Zs+y+56ZeBLdL0QLJfpq1pbT3Ifom36A9sCjyTW/YM2S/j9bYVES+nyS3IEtmLEbGqjXXrioiFwKnARcDzkm6WtF1769WLifXfX95g4PTUVPOSpJfIjl1+X4tr6n+6pv5IYNsC+27tMxlAlkBn5bZ5e1pu71DuMLLCUjvvUUAPSS1fEJsBfSQNi4jHI+JRYLSkXsBXyH6ht7TnXwf8B9m/u99GxJ8L7noxsBPwxAaEnR9edznZ2cxgYEFaNggoEsdSoK+kLXPJIL/uGrIvwBbbvC2IiB8BP5K0FXA18E3gsx14H0UsBi6NiEvbqJM/HovJzgi+0FrldvZV70ql5cArwG4d+HytyXxGYB3xb2TNI7uStZEPB3YB7geOl7SppGMlbR0RrwMrU/0WU8jasb9K1lZf1A+ASyQNVWZ3Sf06GnxErCNLTJdK2lLSYOA04IYC6y4GHgQuk7R56oQ9kbfOauYAh0jqK2kbsjMAACTtLOmjyi61/TvZF+U6Ot/3gXGS9knH6d2SDpW0ZSv1bwA+IenjyjrkN0+d3DsU2NeNZJ30R6VO536ShkfEGymO70p6D4Ck7SV9vFPeoZXCicA64gTg2oh4NiL+0vICxgPHpjqfBRZJWgmMA45rWTkiXgFuAXYEbu3Afr9D9gV+J1ly+SFZ5+WGOJns1/vTwAyypq1rCq57DFm7/VLg58CFEXFXKrseeJysHf5O4Me59TYDLif7tfwX4D3AuRsYf6siYiZZR/d44G9knfpj2qi/mKzT/lyyju/FZB3z7X4vRMSzZE2ApwMvkiXCYan4rLTvh9K/g9+QdZjbO1TL1RBmDSHpAuCfIuK4diubWUO4j8AaRlJfsuaUzm4bN7ON4KYha4h089Vi4LaIuK/Z8ZjZW9w0ZGZWcT4jMDOruC7XR9C/f/8YMmRIs8MwM+tSZs2atTwi6t7Y1+USwZAhQ5g5c2azwzAz61IkPdNamZuGzMwqzonAzKzinAjMzCqutEQgaaCkeyQ9KWm+pK/WqTNK0or0II056a5TMzNroDI7i9cCp0fE7DTo1SxJd0XEgpp690fEYSXGYWZmbSjtjCAinouI2Wl6FfAkBcZuNzOzxmpIH4Gyh51/AHi4TvF+6Vmpt0narZX1x0qaKWnmsmXLSozUzKx6Sk8EkrYgG3r41IhYWVM8GxgcEcOA/yYbr349ETExIkZExIgBA/ygIzOzzlRqIkhPqboFuDEi1ht/PiJWRsTqND0N6CWpf5kxmZnZ25XWWSxJZA8QeTIivtNKnW2Av0ZESNqbLDG9UFZMQ87+dVmbrrxFlx/a7BDMbAOVedXQB8nGnZ8naU5adi7Zc16JiAnAkcBJktaSPb7v6PBwqGZdmn9wlaesH1ylJYKImAGonTrjyR6rZ2ZmTeI7i83MKs6JwMys4pwIzMwqrss9j8CqxR2P5fGVXtbCZwRmZhXnRGBmVnFOBGZmFedEYGZWcU4EZmYV50RgZlZxTgRmZhXnRGBmVnFOBGZmFedEYGZWcU4EZmYV50RgZlZxTgRmZhXnRGBmVnFOBGZmFedEYGZWcU4EZmYV50RgZlZxTgRmZhXnRGBmVnFOBGZmFedEYGZWcU4EZmYV50RgZlZxTgRmZhXnRGBmVnFOBGZmFedEYGZWcU4EZmYVV1oikDRQ0j2SnpQ0X9JX69SRpCskLZQ0V9IeZcVjZmb19Sxx22uB0yNitqQtgVmS7oqIBbk6BwND02sf4Kr018zMGqS0M4KIeC4iZqfpVcCTwPY11UYDkyPzENBH0rZlxWRmZutrSB+BpCHAB4CHa4q2Bxbn5pewfrJA0lhJMyXNXLZsWVlhmplVUumJQNIWwC3AqRGxsra4ziqx3oKIiRExIiJGDBgwoIwwzcwqq9REIKkXWRK4MSJurVNlCTAwN78DsLTMmMzM7O3KvGpIwA+BJyPiO61Umwocn64e2hdYERHPlRWTmZmtr8yrhj4IfBaYJ2lOWnYuMAggIiYA04BDgIXAy8DnSozHzMzqKC0RRMQM6vcB5OsE8OWyYjAzs/b5zmIzs4pzIjAzqzgnAjOzinMiMDOrOCcCM7OKcyIwM6s4JwIzs4pzIjAzq7hWbyiT9EvqDADXIiIOLyUiMzNrqLbuLP5W+vspYBvghjR/DLCoxJjMzKyBWk0EEXEvgKRLIuLDuaJfSrqv9MjMzKwhivQRDJD0jy0zknYE/FAAM7Nuosigc18Dpkt6Os0PAb5YWkRmZtZQ7SaCiLhd0lDgn9Oi30XEq+WGZWZmjVJ0GOo9yc4EegLDJBERk0uLyszMGqbdRCDpemAnYA6wLi0OwInAzKwbKHJGMALYNT1ExszMupkiVw09QXYfgZmZdUNFzgj6AwskPQK82UnsO4vNzLqHIongorKDMDOz5ily+ei9kt4L7JUWPRIRz5cblpmZNUq7fQSSjgIeAT4NHAU8LOnIsgMzM7PGKNI0dB6wV8tZgKQBwG+An5UZmJmZNUaRq4Y2qWkKeqHgemZm1gUUOSO4XdIdwE1p/jPAbeWFZGZmjVSks/hMSZ8CRgICJkbEz0uPzMzMGqLIEBM7AtMi4tY031vSkIhYVHZwZmZWviJt/T8F3sjNr0vLzMysGyiSCHpGxGstM2l60/JCMjOzRiqSCJZJenM4CUmjgeXlhWRmZo1U5KqhccCNkq4kG356CXB8qVGZmVnDFLlq6I/AvpK2ABQRq8oPy8zMGqXIEBPvlfRD4KcRsUrSrpJObEBsZmbWAEX6CCYBdwDbpfnfA6e2t5KkayQ9L+mJVspHSVohaU56XVA0aDMz6zxFEkH/iPgJ6RLSiFjLW4+sbMsk4KB26twfEcPT678KbNPMzDpZkUSwRlI/so5iJO0LrGhvpYi4D3hx48IzM7OyFblq6DRgKrCTpAeAAUBnDUO9n6THgaXAGRExv14lSWOBsQCDBg3qpF2bmRkUu2potqSPADuTjTX0VES83gn7ng0MjojVkg4BpgBDW4lhIjARYMSIEdEJ+zYzs6TVpiFJe0naBt7sF9gTuBT4tqS+G7vjiFgZEavT9DSgl6T+G7tdMzPrmLb6CK4GXgOQ9GHgcmAyWf/AxI3dsaRtJClN751ieWFjt2tmZh3TVtNQj4ho6ez9DNnw07cAt0ia096GJd0EjAL6S1oCXAj0AoiICWT9DCdJWgu8AhwdEW72MTNrsDYTgaSeqVnoAFJnbYH1AIiIY9opHw+MLxSlmZmVpq0v9JuAeyUtJ/vFfj+ApPdR4PJRMzPrGlpNBBFxqaS7gW2BO3PNNpsAJzciODMzK1+bTTwR8VCdZb8vLxwzM2u0IncWm5lZN+ZEYGZWcW0mAkk9JP2mUcGYmVnjtZkIImId8LKkrRsUj5mZNViRQef+DsyTdBewpmVhRJxSWlRmZtYwRRLBr9PLzMy6oSJ3CF8nqTcwKCKeakBMZmbWQEWeWfwJYA5we5ofLmlq2YGZmVljFLl89CJgb+AlgIiYA+xYYkxmZtZARRLB2oioHVvIo4SamXUTRTqLn5D072SjkQ4FTgEeLDcsMzNrlCJnBCcDuwGvko1IuhI4tcygzMyscYpcNfQycJ6kb2azsar8sMzMrFGKXDW0l6R5wFyyG8sel7Rn+aGZmVkjFOkj+CHwpYhoeTDNSOBaYPcyAzMzs8Yo0kewqiUJAETEDMDNQ2Zm3USRM4JHJF1N1lEcZA+yny5pD4CImF1ifGZmVrIiiWB4+nthzfJ/IUsMH+3UiMzMrKGKXDW0fyMCMTOz5vATyszMKs6JwMys4pwIzMwqrsgNZZ+WtGWaPl/SrS1XDJmZWddX5IzgPyNiVbqR7OPAdcBV5YZlZmaNUiQRrEt/DwWuiohfAJuWF5KZmTVSkUTw53RD2VHANEmbFVzPzMy6gCJf6EcBdwAHRcRLQF/gzFKjMjOzhmk3EaRhqJ8HRqZFa4E/lBmUmZk1TpGrhi4EzgLOSYt6ATeUGZSZmTVOkaahTwKHA2sAImIpsGWZQZmZWeMUSQSvRUSQHlgv6d1FNizpGknPS3qilXJJukLSQklzfW+CmVlzFEkEP0lXDfWR9AXgN8D3C6w3CTiojfKDgaHpNRbfm2Bm1hRFRh/9lqQDyR5avzNwQUTcVWC9+yQNaaPKaGByOtt4SFIfSdtGxHPFQjczs85Q5HkERMRdkh5uqS+pb0S8uJH73h5YnJtfkpatlwgkjSU7a2DQoEEbuVszM8srctXQFyX9lezh9TOBWenvxlKdZVGvYkRMjIgRETFiwIABnbBrMzNrUeSM4Axgt4hY3sn7XgIMzM3vACzt5H2YmVk7inQW/xF4uYR9TwWOT1cP7QuscP+AmVnjFTkjOAd4MPURvNqyMCJOaWslSTcBo4D+kpaQPfO4V1p3AjANOARYSJZoPrcB8ZuZ2UYqkgiuBv4XmAe8UXTDEXFMO+UBfLno9szMrBxFEsHaiDit9EjMzKwpivQR3CNprKRtJfVteZUemZmZNUSRM4J/T3/PyS0L4B87PxwzM2u0IncW79iIQMzMrDkK3Vks6f3ArsDmLcsiYnJZQZmZWeO0mwjS8whGkSWCaWSDxc0AnAjMzLqBIp3FRwIHAH+JiM8Bw4DNSo3KzMwapkgieCUi3gDWStqK7LGV7ig2M+smivQRzJTUh+wZBLOA1cAjpUZlZmYN02YikCTgsoh4CZgg6XZgq4iY25DozMysdG02DaVhIKbk5hc5CZiZdS9F+ggekrRX6ZGYmVlTFOkj2B8YJ2kRsIbsgTIREbuXGZiZmTVGq4lA0qCIeJbsvgEzM+um2jojmALsERHPSLolIo5oVFBmZtY4bfUR5J8p7PsGzMy6qbYSQbQybWZm3UhbTUPDJK0kOzPonabhrc7irUqPzszMStdqIoiIHo0MxMzMmqPIfQRmZtaNORGYmVWcE4GZWcU5EZiZVZwTgZlZxTkRmJlVnBOBmVnFORGYmVWcE4GZWcU5EZiZVZwTgZlZxTkRmJlVnBOBmVnFORGYmVVcqYlA0kGSnpK0UNLZdcpHSVohaU56XVBmPGZmtr62HkyzUST1AK4EDgSWAI9KmhoRC2qq3h8Rh5UVh5mZta3MM4K9gYUR8XREvAbcDIwucX9mZrYBykwE2wOLc/NL0rJa+0l6XNJtknartyFJYyXNlDRz2bJlZcRqZlZZZSYC1VkWNfOzgcERMQz4b2BKvQ1FxMSIGBERIwYMGNDJYZqZVVuZiWAJMDA3vwOwNF8hIlZGxOo0PQ3oJal/iTGZmVmNMhPBo8BQSTtK2hQ4GpiaryBpG0lK03uneF4oMSYzM6tR2lVDEbFW0leAO4AewDURMV/SuFQ+ATgSOEnSWuAV4OiIqG0+MjOzEpWWCODN5p5pNcsm5KbHA+PLjMHMzNrmO4vNzCrOicDMrOKcCMzMKs6JwMys4pwIzMwqzonAzKzinAjMzCrOicDMrOKcCMzMKs6JwMys4pwIzMwqzonAzKzinAjMzCrOicDMrOKcCMzMKs6JwMys4pwIzMwqzonAzKzinAjMzCrOicDMrOKcCMzMKs6JwMys4pwIzMwqzonAzKzinAjMzCrOicDMrOKcCMzMKs6JwMys4pwIzMwqzonAzKzinAjMzCrOicDMrOKcCMzMKq7URCDpIElPSVoo6ew65ZJ0RSqfK2mPMuMxM7P1lZYIJPUArgQOBnYFjpG0a021g4Gh6TUWuKqseMzMrL4yzwj2BhZGxNMR8RpwMzC6ps5oYHJkHgL6SNq2xJjMzKxGzxK3vT2wODe/BNinQJ3tgefylSSNJTtjAFgt6anODfUdqz+wvNlBFKFvNjuCdwx/Zl1Ll/m8YKM/s8GtFZSZCFRnWWxAHSJiIjCxM4LqSiTNjIgRzY7DivNn1rX488qU2TS0BBiYm98BWLoBdczMrERlJoJHgaGSdpS0KXA0MLWmzlTg+HT10L7Aioh4rnZDZmZWntKahiJiraSvAHcAPYBrImK+pHGpfAIwDTgEWAi8DHyurHi6qMo1h3UD/sy6Fn9egCLWa5I3M7MK8Z3FZmYV50RgZlZxTgQNJmmdpDm51xBJoyStkPSYpCclXZjq9pN0j6TVksbXbGdTSRMl/V7S7yQd0Zx31L118PPaO1fvcUmfTMvfJenX6XOaL+ny5r6rrqPRx1/SIkn903RI+nau7AxJF0k6L7effHynSBqU/s8+lobNOaTsY9QpIsKvBr6A1XWWjQJ+labfDfwB2DNNjwTGAeNr1rkY+Eaa3gTo3+z31h1fHfy83gX0TMu3BZ4nuyDjXcD+afmmwP3Awc1+b13h1ejjDyxq+b8E/B34U27+DOCituIj63w+KU3vCixq9jEs8vIZwTtMRKwBZgE7RcSaiJhB9g+y1ueBy9I6b0REl7k7sjup+bxejoi1qWhz0s2Rafk9afo1YDbZPTO2kUo+/mvJvti/1pGQgK3S9NZ0kfuinAgar3fuVPLntYWS+gH7AvNb24CkPmnyEkmzJf1U0ntLirfqOvR5SdpH0nxgHjAu98XUUr8P8Ang7vJD7xaaffyvBI6VtHXB+hcBx0laQnZ5/MkF12uqMoeYsPpeiYjhdZZ/SNJjwBvA5RHRaiIg+9x2AB6IiNMknQZ8C/hs54dbeR36vCLiYWA3SbsA10m6LSL+DiCpJ3ATcEVEPN2g+Lu6ph7/iFgpaTJwCvBKgVWOASZFxLcl7QdcL+n9EfFGkf01ixPBO8f9EXFYwbovkN2A1/IL6afAiaVEZa1p8/OKiCclrQHeD8xMiycCf4iI7zUiwG5uo4+/sqHyZ6WyqRFxQSub+x5Zc9K1BeI6ETgoxfBbSZuTDWz3fIF1m8ZNQ11QZD1RvyTrNAM4AFjQtIAMAGXDqfRM04OBnck6H5H0DbI241ObFmA319HjHxHrImJ4erWWBIiIF4GfUOzH1rNk/x9JZyWbA8s26A01kM8I3uEkLSLrfNpU0r8B/xoRC4CzyE47v0f2D83DczTfSOBsSa+TNVl8KSKWS9oBOA/4HTBbEmRXgf2geaF2S2Ue/28DXylQ73Tg+5K+RtZxPCb9cHtH8xATZmYV56YhM7OKcyIwM6s4JwIzs4pzIjAzqzgnAjOzinMisKZIIzten5vvKWmZpF91cDtvjhbZ0Tpp+bw0UuWdkrbpyL5b2dc4Sce3UX64pLM7YT/TJX28Ztmpkv6nnXUq/6B2W58TgTXLGuD9knqn+QOBPzchjv0jYhjZ3afn5guU6dD/kYiYEBGT2yifGhGdMQz1TWTPAc87Oi036xAnAmum24BD0/Qx5L7EJPWVNCWN6f6QpN3T8n7p1/tjkq4GlFvnOEmPpAHKrk5DCBR1H/A+ZePdP5l+Wc8GBko6U9KjKZaLc/s7Pi17vOXsRtl49Wek6VMkLUh1bk7Lxig9W0LSYEl3p/K7JQ1KyydJukLSg5KelnRknXh/BhwmabO0zhBgO2CGpKskzVQ29v7FddZF0urc9JGSJqXpAZJuSe/3UUkf7MAxtC7KicCa6Wbg6DQey+7Aw7myi4HHImJ3sl/qLb+yLwRmRMQHgKlAy5fnLsBngA+mQcrWAcd2IJbDyEashGxogslpHzsDQ4G9geHAnpI+LGk3srtVP5rOKL5aZ5tnAx9I72FcnfLxaT+7AzcCV+TKtiW7U/YwYL0ziIh4AXiENK4N2dnAj9NdrOdFxAiyY/qRliRa0P8HvhsRewFHAL77uQI8xIQ1TUTMTb9kjyEbsjdvJNkXERHxv+lMYGvgw8Cn0vJfS/pbqn8A2cNJHk1DCPSm2EBf90haB8wFzgf6AM9ExEOp/F/T67E0vwVZYhgG/KzlORBpPJpac4EbJU0BptQp36/lvQDXA/8vVzYljVi5QK0PMd7SPPSL9PfzaflRksaS/f/eluwBKXNb2UatjwG7pmMIsJWkLSNiVcH1rQtyIrBmm0o2hPYooF9uuerUjZq/eQKui4hzOrj//fMP9VE2Xv2amu1eFhFXv21n0imtxJF3KFniOhz4z3QW0Zb89l6tiaGeKcB3JO0B9I6I2ZJ2JHuS1l4R8bfU5LN5O/vKl28C7BcRRYZctm7CTUPWbNcA/xUR82qW30dq2pE0ClgeEStrlh8M/EOqfzdwpKT3pLK+aQTKjXUH8HlJW6Ttbp/2cTfZL+9+LfvLr5Q6mQemJ2N9nexMY4uabT/IWx2+xwIzOhJYRKwGppMdw5b+la3IEtmKdCZxcCur/1XSLinOT+aW30lucDVJ9Z4FYN2MzwisqSJiCVm7dK2LgGslzSV79sIJafnFwE2SZgP3kg37S0QskHQ+cGf6cnsd+DLwzEbGd2fqf/htai5ZDRwXEfMlXQrcm5qWHgPG5FbtAdyQmrNE1u7+Uq7JBbKHnVwj6Uw2fATZm4BbSQklIh5X9sCW+cDTwAOtrHc28CtgMfAEbyWpU4Ar03HvSZZ46/VvWDfi0UfNzCrOTUNmZhXnRGBmVnFOBGZmFedEYGZWcU4EZmYV50RgZlZxTgRmZhX3f5fv94QLrJ13AAAAAElFTkSuQmCC\n",
80 | "text/plain": [
81 | ""
82 | ]
83 | },
84 | "metadata": {
85 | "needs_background": "light"
86 | },
87 | "output_type": "display_data"
88 | }
89 | ],
90 | "source": [
91 | "plt.bar(precision_list, fps)\n",
92 | "plt.xlabel('Model Precision Value')\n",
93 | "plt.ylabel('Frames per Second')\n",
94 | "plt.title('Asynchronous Inference')\n",
95 | "plt.show()"
96 | ]
97 | },
98 | {
99 | "cell_type": "code",
100 | "execution_count": 8,
101 | "metadata": {},
102 | "outputs": [
103 | {
104 | "data": {
105 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEWCAYAAABrDZDcAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAbq0lEQVR4nO3de7gcVZnv8e+PJNwvGZLNALmiIofLAcQI5BHHKDJyCeCMDAeGABGOGVAEBNQIKKBwYM7xNhgkRIVwE3QUYgaC4OEQICqXJEBCEhkzEJIISsI9ASWB9/xRa0PR6d279s6u7uxdv8/z1JPqWquq3q7e6bdrrapVigjMzKy6Nmp1AGZm1lpOBGZmFedEYGZWcU4EZmYV50RgZlZxTgRmZhXnRGC9mqSQ9L5Wx9EKylwj6UVJD7U6Huu9nAisWyTNTF9Am7Q6lr5E0lRJFxesfgBwEDA0IvYtMSzr45wIrMskjQQ+AgRwREuD6QJJ/VsdQw8bASyJiNVdXbEPHgtbD04E1h0nAA8AU4ET8wWSDpW0UNKrkv4o6Zy0/HFJh+fqDZC0UtLekkamJp4TJS1Ny8/L1e0n6VxJ/5W2O0fSsNxuPyHpD+kM5QpJSuuNl/QbSd+V9AJwoaRtJF0naYWkpyWdL2mjXP1Zkr6VtvWUpENycewoabqkFyQtlvTZXNm7fslLGiNpee71V9LxeFXSE5IO7OwgNzoukk4GfgSMlrRK0kVp+VhJj0p6SdJvJe2Z296SFMc8YLWk/pL2T/VekvSYpDG5+jMlfTMdw1cl3SVpcK78gNy6yySNT8s3ScdwqaQ/S5osabPO3q+1UER48tSlCVgMfA74ILAG+Ntc2bPAR9L83wD7pPkvAz/N1TsSmJ/mR5KdXfwQ2AzYC/grsGsq/xIwH9gFUCoflMoCuA0YCAwHVgAHp7LxwFrgC0D/tO3rgF8CW6X9/idwcq7+GuCzQD/gVOAZQKn8XuAHwKbA3mlfB6ayqcDFufc3Blie5ncBlgE75t7vezs4tm9vp8BxGQ/Myq27D/AcsF+K/0RgCbBJKl8CPAoMS9sbAjwPHEr2o/Cg9Lot1Z8J/Bfw/lR/JnBZKhsOvAocCwwABgF7p7LvAdOBbdNx/g/g0lb/3Xpq8H+61QF46l0TWbv0GmBwev174Iu58qXAvwBb16y3Y/ri2Dq9/jnw5TTf/oU3NFf/IeCYNP8EcGQH8QRwQO71z4CJaX48sDRX1i99ke6WW/YvwMxc/cW5ss3T9rdPX55vAlvlyi8Fpqb5t7/A0+sxvJMI3pe+oD8BDOjk+L69nQLHZTzvTgRXAt+s2d4TwEfT/BLgpFzZV4Dra+rfCZyY5mcC5+fKPgf8Ks1/Fbi1TvwCVpNLdMBo4KlW/+166nhy05B11YnAXRGxMr3+Ce9uHvo02S/MpyXdK2k0QEQ8A/wG+LSkgcAhwI012/5Tbv41YMs0P4zsl2lHOloPsl/i7QYDGwNP55Y9TfbLeJ1tRcRraXZLskT2QkS82mDduiJiMXAmcCHwnKSbJe3Y2Xr1YmLd95c3Ajg7NdW8JOklsmOX39eymvr/VFP/AGCHAvvu6DNpI0ugc3Lb/FVabhsodxhZYamd92ign6T2L4hNgIGS9oqIxyLiYeBISQOA08h+obe3518L/E+yv7vfRcQfC+56GfBe4PFuhJ0fXncl2dnMCGBhWjYcKBLHM8C2krbKJYP8uqvJvgDbbf+uICJ+AvxE0tbAVcC/Asd34X0UsQy4JCIuaVAnfzyWkZ0RfLajyp3sq96VSiuB14Hdu/D5Wov5jMC64lNkzSO7kbWR7w3sCtwPnCBpY0nHSdomItYAr6T67aaRtWOfQdZWX9SPgG9K2lmZPSUN6mrwEfEmWWK6RNJWkkYAZwE3FFh3GfBb4FJJm6ZO2JN556zmUeBQSdtK2p7sDAAASbtI+riyS23/QvZF+SY974fAKZL2S8dpC0mHSdqqg/o3AIdL+qSyDvlNUyf30AL7upGsk/7o1Ok8SNLeEfFWiuO7krYDkDRE0id75B1aKZwIrCtOBK6JiKUR8af2CZgEHJfqHA8skfQKcAowrn3liHgd+AWwE3BLF/b7HbIv8LvIksuPyTovu+MLZL/enwRmkTVtXV1w3WPJ2u2fAW4FLoiIX6ey64HHyNrh7wJ+mltvE+Aysl/LfwK2A87tZvwdiojZZB3dk4AXyTr1xzeov4ys0/5cso7vZWQd851+L0TEUrImwLOBF8gS4V6p+Ctp3w+kv4P/S9Zhbhuo9qshzJpC0teB90fEuE4rm1lTuI/AmkbStmTNKT3dNm5m68FNQ9YU6earZcAdEXFfq+Mxs3e4acjMrOJ8RmBmVnG9ro9g8ODBMXLkyFaHYWbWq8yZM2dlRNS9sa/XJYKRI0cye/bsVodhZtarSHq6ozI3DZmZVZwTgZlZxTkRmJlVnBOBmVnFORGYmVWcE4GZWcU5EZiZVZwTgZlZxTkRmJlVXK+7s9jMNmwjJ97e6hD6rCWXHVbKdn1GYGZWcU4EZmYV50RgZlZxTgRmZhXnRGBmVnFOBGZmFedEYGZWcU4EZmYV50RgZlZxTgRmZhXnRGBmVnFOBGZmFedEYGZWcU4EZmYV50RgZlZxTgRmZhVXWiKQNEzSPZIWSVog6Yw6dcZIelnSo2n6elnxmJlZfWU+oWwtcHZEzJW0FTBH0q8jYmFNvfsjYmyJcZiZWQOlnRFExLMRMTfNvwosAoaUtT8zM+uepvQRSBoJfAB4sE7xaEmPSbpD0u4drD9B0mxJs1esWFFipGZm1VN6IpC0JfAL4MyIeKWmeC4wIiL2Ar4PTKu3jYiYEhGjImJUW1tbuQGbmVVMqYlA0gCyJHBjRNxSWx4Rr0TEqjQ/AxggaXCZMZmZ2buVedWQgB8DiyLiOx3U2T7VQ9K+KZ7ny4rJzMzWVeZVQx8GjgfmS3o0LTsXGA4QEZOBo4BTJa0FXgeOiYgoMSYzM6tRWiKIiFmAOqkzCZhUVgxmZtY531lsZlZxTgRmZhXnRGBmVnFOBGZmFedEYGZWcU4EZmYV50RgZlZxTgRmZhXnRGBmVnFOBGZmFedEYGZWcU4EZmYV50RgZlZxTgRmZhXnRGBmVnFOBGZmFedEYGZWcU4EZmYV50RgZlZxTgRmZhXnRGBmVnFOBGZmFedEYGZWcU4EZmYV50RgZlZxTgRmZhXnRGBmVnFOBGZmFVdaIpA0TNI9khZJWiDpjDp1JOlySYslzZO0T1nxmJlZff1L3PZa4OyImCtpK2COpF9HxMJcnUOAndO0H3Bl+tfMzJqktDOCiHg2Iuam+VeBRcCQmmpHAtdF5gFgoKQdyorJzMzW1ZQ+AkkjgQ8AD9YUDQGW5V4vZ91kgaQJkmZLmr1ixYqywjQzq6ROE4Gk90u6W9Lj6fWeks4vugNJWwK/AM6MiFdqi+usEussiJgSEaMiYlRbW1vRXZuZWQFFzgh+CHwVWAMQEfOAY4psXNIAsiRwY0TcUqfKcmBY7vVQ4Jki2zYzs55RJBFsHhEP1Sxb29lKkgT8GFgUEd/poNp04IR09dD+wMsR8WyBmMzMrIcUuWpopaT3kppsJB0FFPmy/jBwPDBf0qNp2bnAcICImAzMAA4FFgOvAZ/pUvRmZrbeiiSCzwNTgP8m6Y/AU8C4zlaKiFnU7wPI14m0fTMza5FOE0FEPAl8QtIWwEbpUlAzM+sjOk0EkgYCJwAjgf5Z0z9ExOmlRmZmZk1RpGloBvAAMB94q9xwzMys2Yokgk0j4qzSIzEzs5Yocvno9ZI+K2kHSdu2T6VHZmZmTVHkjOAN4P8A5/HOXb8BvKesoMzMrHmKJIKzgPdFxMqygzEzs+Yr0jS0gOxmLzMz64OKnBG8CTwq6R7gr+0LffmomVnfUCQRTEuTmZn1QUXuLL62GYGYmVlrdJgIJP0sIo6WNJ/6zwjYs9TIzMysKRqdEXw3/Tu2GYGYmVlrNEoEVwD7RMTTzQrGzMyar9Hlow2HkDYzs76h0RnBEEmXd1Toy0fNzPqGRongdWBOswIxM7PWaJQInvelo2ZmfV+jPoI3mhaFmZm1TIeJICL2b2YgZmbWGkUGnTMzsz7MicDMrOIaDTHR8ClkEfFCz4djZmbN1uiqoTlkYwwJGA68mOYHAkuBnUqPzszMSteos3iniHgPcCdweEQMjohBZGMP3dKsAM3MrFxF+gg+FBEz2l9ExB3AR8sLyczMmqnIg2lWSjofuIGsqWgc8HypUZmZWdMUOSM4FmgDbiV7Utl2aZmZmfUBRZ5Q9gJwRlc3LOlqsv6E5yJijzrlY4BfAk+lRbdExDe6uh8zM1s/nSYCSW3Al4HdgU3bl0fExztZdSowCbiuQZ37I8IPvjEza6EiTUM3Ar8nu1z0ImAJ8HBnK0XEfYDvNTAz28AVSQSDIuLHwJqIuDciTgJ6ahyi0ZIek3SHpN17aJtmZtYFRa4aWpP+fVbSYcAzwNAe2PdcYERErJJ0KFlH9M71KkqaAEwAGD58eA/s2szM2hU5I7hY0jbA2cA5wI+AL67vjiPilYhYleZnAAMkDe6g7pSIGBURo9ra2tZ312ZmllPkqqHb0uzLwMd6aseStgf+HBEhaV+ypOT7E8zMmqzIVUNDge8DBwBvAbOAMyJieSfr3QSMAQZLWg5cAAwAiIjJwFHAqZLWkj0W85iIiO6/FTMz644ifQTXAD8B/im9HpeWHdRopYhoeNNZREwiu7zUzMxaqEgfQVtEXBMRa9M0lexOYzMz6wOKJIKVksZJ6pcmjzVkZtaHFEkEJwFHA38CniVr2/9MmUGZmVnzFLlqaClwRH6ZpG+RXUpqZma9XHefWXx0j0ZhZmYt091EoB6NwszMWqY7D68XTgRmZn1G0YfX13qjnHDMzKzZOkwEEbFTMwMxM7PW6G4fgZmZ9RFOBGZmFedEYGZWcd25agh4+6H2ZmbWy3X3qqEA3lNKRGZm1lS+asjMrOI67SNQZpykr6XXw9MTxczMrA8o0ln8A2A08M/p9avAFaVFZGZmTVXkCWX7RcQ+kh4BiIgXJW1cclxmZtYkRc4I1kjqR9ZBjKQ2smcXm5lZH1AkEVwO3ApsJ+kSsofX/69SozIzs6Yp8mCaGyXNAQ4ku5T0UxGxqPTIzMysKYreUPYccFO+zDeUmZn1DUVvKBsOvJjmBwJLAd9nYGbWB3TYRxARO0XEe4A7gcMjYnBEDALGArc0K0AzMytXkc7iD0XEjPYXEXEH8NHyQjIzs2Yqch/BSknnAzeQNRWNA54vNSozM2uaImcExwJtZJeQTgO2S8vMzKwPKHL56AvAGZK2Bt6KiFXlh2VmZs1SZNC5/56Gl5gPLJA0R9Ie5YdmZmbNUKRp6CrgrIgYEREjgLOBKZ2tJOlqSc9JeryDckm6XNJiSfMk7dO10M3MrCcUSQRbRMQ97S8iYiawRYH1pgIHNyg/BNg5TROAKwts08zMeliRRPCkpK9JGpmm84GnOlspIu4DGt19fCRwXWQeAAZK2qFY2GZm1lOKJIKTyK4auoXsyqE24DM9sO8hwLLc6+Vp2TokTZA0W9LsFStW9MCuzcysXZGrhl4ETi9h3x09C7leDFNI/RKjRo2qW8fMzLqn0aBz0xutGBFHrOe+lwPDcq+HAs+s5zbNzKyLGp0RjCZrurkJeJD6v+DXx3TgNEk3A/sBL0fEsz28DzMz60SjRLA9cBDZXcT/DNwO3BQRC4psWNJNwBhgsKTlwAXAAICImAzMAA4FFgOv0TP9DmZm1kUdJoKIeBP4FfArSZuQJYSZkr4REd/vbMMR0XAYiogI4PNdjNfMzHpYw87ilAAOI0sCI8keW+khqM3M+pBGncXXAnsAdwAXRUTdO4TNzKx3a3RGcDywGng/cLr0dl+xyFp2ti45NjMza4JGfQRFbjYzM7Nezl/2ZmYV50RgZlZxTgRmZhXnRGBmVnFOBGZmFedEYGZWcU4EZmYV1+nzCPqSkRNvb3UIfdaSyw5rdQhm1k0+IzAzqzgnAjOzinMiMDOruEr1EVjv436d8rhfx9r5jMDMrOKcCMzMKs6JwMys4pwIzMwqzonAzKzinAjMzCrOicDMrOKcCMzMKs6JwMys4pwIzMwqzonAzKzinAjMzCqu1EQg6WBJT0haLGlinfIxkl6W9Giavl5mPGZmtq7SRh+V1A+4AjgIWA48LGl6RCysqXp/RIwtKw4zM2uszDOCfYHFEfFkRLwB3AwcWeL+zMysG8pMBEOAZbnXy9OyWqMlPSbpDkm719uQpAmSZkuavWLFijJiNTOrrDITgeosi5rXc4EREbEX8H1gWr0NRcSUiBgVEaPa2tp6OEwzs2orMxEsB4blXg8FnslXiIhXImJVmp8BDJA0uMSYzMysRpmJ4GFgZ0k7SdoYOAaYnq8gaXtJSvP7pnieLzEmMzOrUdpVQxGxVtJpwJ1AP+DqiFgg6ZRUPhk4CjhV0lrgdeCYiKhtPjIzsxKV+vD61Nwzo2bZ5Nz8JGBSmTGYmVljvrPYzKzinAjMzCrOicDMrOKcCMzMKs6JwMys4pwIzMwqzonAzKzinAjMzCrOicDMrOKcCMzMKs6JwMys4pwIzMwqzonAzKzinAjMzCrOicDMrOKcCMzMKs6JwMys4pwIzMwqzonAzKzinAjMzCrOicDMrOKcCMzMKs6JwMys4pwIzMwqzonAzKzinAjMzCrOicDMrOKcCMzMKs6JwMys4kpNBJIOlvSEpMWSJtYpl6TLU/k8SfuUGY+Zma2rtEQgqR9wBXAIsBtwrKTdaqodAuycpgnAlWXFY2Zm9ZV5RrAvsDginoyIN4CbgSNr6hwJXBeZB4CBknYoMSYzM6vRv8RtDwGW5V4vB/YrUGcI8Gy+kqQJZGcMAKskPdGzoW6wBgMrWx1EEfrXVkewwfBn1rv0ms8L1vszG9FRQZmJQHWWRTfqEBFTgCk9EVRvIml2RIxqdRxWnD+z3sWfV6bMpqHlwLDc66HAM92oY2ZmJSozETwM7CxpJ0kbA8cA02vqTAdOSFcP7Q+8HBHP1m7IzMzKU1rTUESslXQacCfQD7g6IhZIOiWVTwZmAIcCi4HXgM+UFU8vVbnmsD7An1nv4s8LUMQ6TfJmZlYhvrPYzKzinAjMzCrOiaDJJL0p6dHcNFLSGEkvS3pE0iJJF6S6gyTdI2mVpEk129lY0hRJ/ynp95I+3Zp31Ld18fPaN1fvMUn/kJZvLun29DktkHRZa99V79Hs4y9piaTBaT4kfTtXdo6kCyWdl9tPPr7TJQ1P/2cfScPmHFr2MeoREeGpiROwqs6yMcBtaX4L4A/AB9P8AcApwKSadS4CLk7zGwGDW/3e+uLUxc9rc6B/Wr4D8BzZBRmbAx9LyzcG7gcOafV76w1Ts48/sKT9/xLwF+Cp3OtzgAsbxUfW+Xxqmt8NWNLqY1hk8hnBBiYiVgNzgPdGxOqImEX2B1nrJODStM5bEdFr7o7sS2o+r9ciYm0q2pR0c2Rafk+afwOYS3bPjK2nko//WrIv9i92JSRg6zS/Db3kvigngubbLHcqeWttoaRBwP7Ago42IGlgmv2mpLmS/l3S35YUb9V16fOStJ+kBcB84JTcF1N7/YHA4cDd5YfeJ7T6+F8BHCdpm4L1LwTGSVpOdnn8Fwqu11JlDjFh9b0eEXvXWf4RSY8AbwGXRUSHiYDscxsK/CYizpJ0FvAt4PieD7fyuvR5RcSDwO6SdgWulXRHRPwFQFJ/4Cbg8oh4sknx93YtPf4R8Yqk64DTgdcLrHIsMDUivi1pNHC9pD0i4q0i+2sVJ4INx/0RMbZg3efJbsBr/4X078DJpURlHWn4eUXEIkmrgT2A2WnxFOAPEfG9ZgTYx6338Vc2VP6cVDY9Ir7ewea+R9acdE2BuE4GDk4x/E7SpmQD2z1XYN2WcdNQLxRZT9R/kHWaARwILGxZQAaAsuFU+qf5EcAuZJ2PSLqYrM34zJYF2Md19fhHxJsRsXeaOkoCRMQLwM8o9mNrKdn/R9JZyabAim69oSbyGcEGTtISss6njSV9Cvj7iFgIfIXstPN7ZH9oHp6j9Q4AJkpaQ9Zk8bmIWClpKHAe8HtgriTIrgL7UetC7ZPKPP7fBk4rUO9s4IeSvkjWcTw+/XDboHmICTOzinPTkJlZxTkRmJlVnBOBmVnFORGYmVWcE4GZWcU5EVhLpJEdr8+97i9phaTburidt0eL7GqdtHx+GqnyLknbd2XfHezrFEknNCg/QtLEHtjPTEmfrFl2pqQfdLJO5R/UbutyIrBWWQ3sIWmz9Pog4I8tiONjEbEX2d2n5+YLlOnS/5GImBwR1zUonx4RPTEM9U1kzwHPOyYtN+sSJwJrpTuAw9L8seS+xCRtK2laGtP9AUl7puWD0q/3RyRdBSi3zjhJD6UByq5KQwgUdR/wPmXj3S9Kv6znAsMkfUnSwymWi3L7OyEte6z97EbZePXnpPnTJS1MdW5Oy8YrPVtC0ghJd6fyuyUNT8unSrpc0m8lPSnpqDrx/hwYK2mTtM5IYEdglqQrJc1WNvb+RXXWRdKq3PxRkqam+TZJv0jv92FJH+7CMbReyonAWulm4Jg0HsuewIO5souARyJiT7Jf6u2/si8AZkXEB4DpQPuX567A/wA+nAYpexM4rguxjCUbsRKyoQmuS/vYBdgZ2BfYG/igpL+TtDvZ3aofT2cUZ9TZ5kTgA+k9nFKnfFLaz57AjcDlubIdyO6UHQuscwYREc8DD5HGtSE7G/hpuov1vIgYRXZMP9qeRAv6N+C7EfEh4NOA736uAA8xYS0TEfPSL9ljyYbszTuA7IuIiPh/6UxgG+DvgH9My2+X9GKqfyDZw0keTkMIbEaxgb7ukfQmMA84HxgIPB0RD6Tyv0/TI+n1lmSJYS/g5+3PgUjj0dSaB9woaRowrU756Pb3AlwP/O9c2bQ0YuVCdTzEeHvz0C/Tvyel5UdLmkD2/3sHsgekzOtgG7U+AeyWjiHA1pK2iohXC65vvZATgbXadLIhtMcAg3LLVadu1PybJ+DaiPhqF/f/sfxDfZSNV7+6ZruXRsRV79qZdHoHceQdRpa4jgC+ls4iGslv7681MdQzDfiOpH2AzSJirqSdyJ6k9aGIeDE1+Wzayb7y5RsBoyOiyJDL1ke4acha7WrgGxExv2b5faSmHUljgJUR8UrN8kOAv0n17waOkrRdKts2jUC5vu4ETpK0ZdrukLSPu8l+eQ9q319+pdTJPCw9GevLZGcaW9Zs+7e80+F7HDCrK4FFxCpgJtkxbO9f2Zoskb2cziQO6WD1P0vaNcX5D7nld5EbXE1SvWcBWB/jMwJrqYhYTtYuXetC4BpJ88ievXBiWn4RcJOkucC9ZMP+EhELJZ0P3JW+3NYAnweeXs/47kr9D79LzSWrgHERsUDSJcC9qWnpEWB8btV+wA2pOUtk7e4v5ZpcIHvYydWSvkT3R5C9CbiFlFAi4jFlD2xZADwJ/KaD9SYCtwHLgMd5J0mdDlyRjnt/ssRbr3/D+hCPPmpmVnFuGjIzqzgnAjOzinMiMDOrOCcCM7OKcyIwM6s4JwIzs4pzIjAzq7j/D8uemFdS2OEcAAAAAElFTkSuQmCC\n",
106 | "text/plain": [
107 | ""
108 | ]
109 | },
110 | "metadata": {
111 | "needs_background": "light"
112 | },
113 | "output_type": "display_data"
114 | }
115 | ],
116 | "source": [
117 | "plt.bar(precision_list, model_load_time)\n",
118 | "plt.xlabel('Model Precision Value')\n",
119 | "plt.ylabel('Model Load Time')\n",
120 | "plt.title('Asynchronous Inference')\n",
121 | "plt.show()"
122 | ]
123 | },
124 | {
125 | "cell_type": "code",
126 | "execution_count": null,
127 | "metadata": {},
128 | "outputs": [],
129 | "source": []
130 | },
131 | {
132 | "cell_type": "code",
133 | "execution_count": null,
134 | "metadata": {},
135 | "outputs": [],
136 | "source": []
137 | },
138 | {
139 | "cell_type": "code",
140 | "execution_count": null,
141 | "metadata": {},
142 | "outputs": [],
143 | "source": []
144 | },
145 | {
146 | "cell_type": "code",
147 | "execution_count": null,
148 | "metadata": {},
149 | "outputs": [],
150 | "source": []
151 | }
152 | ],
153 | "metadata": {
154 | "kernelspec": {
155 | "display_name": "Python 3",
156 | "language": "python",
157 | "name": "python3"
158 | },
159 | "language_info": {
160 | "codemirror_mode": {
161 | "name": "ipython",
162 | "version": 3
163 | },
164 | "file_extension": ".py",
165 | "mimetype": "text/x-python",
166 | "name": "python",
167 | "nbconvert_exporter": "python",
168 | "pygments_lexer": "ipython3",
169 | "version": "3.6.5"
170 | }
171 | },
172 | "nbformat": 4,
173 | "nbformat_minor": 2
174 | }
175 |
--------------------------------------------------------------------------------
/src/__pycache__/face_detection_model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/src/__pycache__/face_detection_model.cpython-36.pyc
--------------------------------------------------------------------------------
/src/__pycache__/face_detection_model.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/src/__pycache__/face_detection_model.cpython-37.pyc
--------------------------------------------------------------------------------
/src/__pycache__/gaze_estimation_model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/src/__pycache__/gaze_estimation_model.cpython-36.pyc
--------------------------------------------------------------------------------
/src/__pycache__/gaze_estimation_model.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/src/__pycache__/gaze_estimation_model.cpython-37.pyc
--------------------------------------------------------------------------------
/src/__pycache__/head_pose_estimation_model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/src/__pycache__/head_pose_estimation_model.cpython-36.pyc
--------------------------------------------------------------------------------
/src/__pycache__/head_pose_estimation_model.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/src/__pycache__/head_pose_estimation_model.cpython-37.pyc
--------------------------------------------------------------------------------
/src/__pycache__/input_feeder.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/src/__pycache__/input_feeder.cpython-36.pyc
--------------------------------------------------------------------------------
/src/__pycache__/input_feeder.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/src/__pycache__/input_feeder.cpython-37.pyc
--------------------------------------------------------------------------------
/src/__pycache__/landmark_detection_model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/src/__pycache__/landmark_detection_model.cpython-36.pyc
--------------------------------------------------------------------------------
/src/__pycache__/landmark_detection_model.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/src/__pycache__/landmark_detection_model.cpython-37.pyc
--------------------------------------------------------------------------------
/src/__pycache__/mouse_controller.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/src/__pycache__/mouse_controller.cpython-36.pyc
--------------------------------------------------------------------------------
/src/__pycache__/mouse_controller.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/src/__pycache__/mouse_controller.cpython-37.pyc
--------------------------------------------------------------------------------
/src/computer_controller_job.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | exec 1>/output/stdout.log 2>/output/stderr.log
4 |
5 | # TODO: Create MODEL variable
6 | FACEDETECTIONMODEL=$1
7 | LANDMARKDETECTIONMODEL=$2
8 | HEADPOSEESTIMATIONMODEL=$3
9 | GAZEESTIMATIONMODEL=$4
10 | DEVICE=$5
11 | OUTPUT=$6
12 | VIDEO=$7
13 |
14 | mkdir -p $6
15 |
16 | if echo "$DEVICE" | grep -q "FPGA"; then # if device passed in is FPGA, load bitstream to program FPGA
17 | #Environment variables and compilation for edge compute nodes with FPGAs
18 | export AOCL_BOARD_PACKAGE_ROOT=/opt/intel/openvino/bitstreams/a10_vision_design_sg2_bitstreams/BSP/a10_1150_sg2
19 |
20 | source /opt/altera/aocl-pro-rte/aclrte-linux64/init_opencl.sh
21 | aocl program acl0 /opt/intel/openvino/bitstreams/a10_vision_design_sg2_bitstreams/2020-2_PL2_FP16_MobileNet_Clamp.aocx
22 |
23 | export CL_CONTEXT_COMPILER_MODE_INTELFPGA=3
24 | fi
25 |
26 | python3 main.py -fd ${FACEDETECTIONMODEL} \
27 | -lr ${LANDMARKDETECTIONMODEL} \
28 | -hp ${HEADPOSEESTIMATIONMODEL} \
29 | -ge ${GAZEESTIMATIONMODEL} \
30 | -d ${DEVICE} \
31 | -o ${OUTPUT} \
32 | -i ${VIDEO}
33 |
34 | cd /output
35 |
36 | tar zcvf output.tgz *
--------------------------------------------------------------------------------
/src/face_detection_model.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from model import Model
3 |
4 |
5 | class FaceDetectionModel(Model):
6 | """
7 | This is a class for the operation of Face Detection Model
8 | """
9 |
10 | def __init__(self, model_path, device='CPU', extensions=None, threshold=0.6):
11 | """
12 | This will initiate Face Detection Model class object
13 | """
14 | Model.__init__(self, model_path, device, extensions, threshold)
15 | self.model_name = 'Face Detection Model'
16 | self.input_name = next(iter(self.model.inputs))
17 | self.input_shape = self.model.inputs[self.input_name].shape
18 | self.output_name = next(iter(self.model.outputs))
19 | self.output_shape = self.model.outputs[self.output_name].shape
20 |
21 | def predict(self, image, request_id=0):
22 | """
23 | This method will take image as a input and
24 | does all the preprocessing, postprocessing
25 | """
26 | try:
27 | p_image = self.preprocess_img(image)
28 | self.network.start_async(request_id, inputs={self.input_name: p_image})
29 | if self.wait() == 0:
30 | outputs = self.network.requests[0].outputs[self.output_name]
31 | detections, cropped_image = self.preprocess_output(outputs, image)
32 | except Exception as e:
33 | self.logger.error("Error While Prediction in Face Detection Model" + str(e))
34 | return detections, cropped_image
35 |
36 | def preprocess_output(self, coords, image):
37 | """
38 | We will have multiple detection for single image
39 | This function will take image and preprocessed cordinates
40 | and return image with bounding boxes and scaled cordinates
41 | """
42 | width, height = int(image.shape[1]), int(image.shape[0])
43 | detections = []
44 | cropped_image = image
45 | coords = np.squeeze(coords)
46 | try:
47 | for coord in coords:
48 | image_id, label, threshold, xmin, ymin, xmax, ymax = coord
49 | if image_id == -1:
50 | break
51 | if label == 1 and threshold >= self.threshold:
52 | xmin = int(xmin * width)
53 | ymin = int(ymin * height)
54 | xmax = int(xmax * width)
55 | ymax = int(ymax * height)
56 | detections.append([xmin, ymin, xmax, ymax])
57 | cropped_image = image[ymin:ymax, xmin:xmax]
58 | except Exception as e:
59 | self.logger.error("Error While drawing bounding boxes on image in Face Detection Model" + str(e))
60 | return detections, cropped_image
61 |
--------------------------------------------------------------------------------
/src/gaze_estimation_model.py:
--------------------------------------------------------------------------------
1 | import math
2 | from model import Model
3 |
4 |
5 | class GazeEstimationModel(Model):
6 | """
7 | Class for the Gaze Estimation Model.
8 | """
9 |
10 | def __init__(self, model_path, device='CPU', extensions=None, threshold=0.6):
11 | """
12 | This will initiate Gaze Estimation Model class object
13 | """
14 | Model.__init__(self, model_path, device, extensions, threshold)
15 | self.model_name = 'Face Detection Model'
16 | self.input_name = [i for i in self.model.inputs.keys()]
17 | self.input_shape = self.model.inputs[self.input_name[1]].shape
18 | self.output_name = [o for o in self.model.outputs.keys()]
19 |
20 | def predict(self, left_eye_image, right_eye_image, hpe_cords, request_id=0):
21 | """
22 | This method will take image as a input and
23 | does all the preprocessing, postprocessing
24 | """
25 | try:
26 | left_eye_image = self.preprocess_img(left_eye_image)
27 | right_eye_image = self.preprocess_img(right_eye_image)
28 | self.network.start_async(request_id, inputs={'left_eye_image': left_eye_image,
29 | 'right_eye_image': right_eye_image,
30 | 'head_pose_angles': hpe_cords})
31 | if self.wait() == 0:
32 | outputs = self.network.requests[0].outputs
33 | mouse_cord, gaze_vector = self.preprocess_output(outputs, hpe_cords)
34 | except Exception as e:
35 | self.logger.error("Error While Prediction in Gaze Estimation Model" + str(e))
36 | return mouse_cord, gaze_vector
37 |
38 | def preprocess_output(self, outputs, hpe_cords):
39 | """
40 | Model output is dictionary like this
41 | {'gaze_vector': array([[ 0.51141196, 0.12343533, -0.80407059]], dtype=float32)}
42 | containing Cartesian coordinates of gaze direction vector
43 |
44 | We need to get this value and convert it in required format
45 | hpe_cords which is output of head pose estimation is in radian
46 | It needed to be converted in catesian cordinate
47 | """
48 | gaze_vector = outputs[self.output_name[0]][0]
49 | mouse_cord = (0, 0)
50 | try:
51 | angle_r_fc = hpe_cords[2]
52 | sin_r = math.sin(angle_r_fc * math.pi / 180.0)
53 | cos_r = math.cos(angle_r_fc * math.pi / 180.0)
54 | x = gaze_vector[0] * cos_r + gaze_vector[1] * sin_r
55 | y = -gaze_vector[0] * sin_r + gaze_vector[1] * cos_r
56 | mouse_cord = (x, y)
57 | except Exception as e:
58 | self.logger.error("Error While preprocessing output in Gaze Estimation Model" + str(e))
59 | return mouse_cord, gaze_vector
60 |
--------------------------------------------------------------------------------
/src/head_pose_estimation_model.py:
--------------------------------------------------------------------------------
1 | from model import Model
2 |
3 |
4 | class HeadPoseEstimationModel(Model):
5 | """
6 | This is a class for the operation of Head Pose Estimation Model
7 | """
8 |
9 | def __init__(self, model_path, device='CPU', extensions=None, threshold=0.6):
10 | """
11 | This will initiate Head Pose Estimation Model class object
12 | """
13 | Model.__init__(self, model_path, device, extensions, threshold)
14 | self.model_name = 'Head Pose Estimation Model'
15 | self.input_name = next(iter(self.model.inputs))
16 | self.input_shape = self.model.inputs[self.input_name].shape
17 | self.output_name = next(iter(self.model.outputs))
18 | self.output_shape = self.model.outputs[self.output_name].shape
19 |
20 | def predict(self, image, request_id=0):
21 | """
22 | This method will take image as a input and
23 | does all the preprocessing, postprocessing
24 | """
25 | try:
26 | p_image = self.preprocess_img(image)
27 | self.network.start_async(request_id, inputs={self.input_name: p_image})
28 | if self.wait() == 0:
29 | outputs = self.network.requests[0].outputs
30 | f_output = self.preprocess_output(outputs)
31 | except Exception as e:
32 | self.logger.error("Error While prediction in Head Pose Estimation Model" + str(e))
33 | return f_output
34 |
35 | def preprocess_output(self, outputs):
36 | """
37 | Model output is a dictionary having below three arguments:
38 | "angle_y_fc", shape: [1, 1] - Estimated yaw (in degrees).
39 | "angle_p_fc", shape: [1, 1] - Estimated pitch (in degrees).
40 | "angle_r_fc", shape: [1, 1] - Estimated roll (in degrees).
41 | """
42 | final_output = []
43 | try:
44 | final_output.append(outputs['angle_y_fc'][0][0])
45 | final_output.append(outputs['angle_p_fc'][0][0])
46 | final_output.append(outputs['angle_r_fc'][0][0])
47 | except Exception as e:
48 | self.logger.error("Error While preprocessing output in Head Pose Estimation Model" + str(e))
49 | return final_output
50 |
--------------------------------------------------------------------------------
/src/input_feeder.py:
--------------------------------------------------------------------------------
1 | '''
2 | This class can be used to feed input from an image, webcam, or video to your model.
3 | Sample usage:
4 | feed=InputFeeder(input_type='video', input_file='video.mp4')
5 | feed.load_data()
6 | for batch in feed.next_batch():
7 | do_something(batch)
8 | feed.close()
9 | '''
10 | import cv2
11 | from numpy import ndarray
12 |
13 | class InputFeeder:
14 | def __init__(self, input_type, input_file=None):
15 | '''
16 | input_type: str, The type of input. Can be 'video' for video file, 'image' for image file,
17 | or 'cam' to use webcam feed.
18 | input_file: str, The file that contains the input image or video file. Leave empty for cam input_type.
19 | '''
20 | self.input_type=input_type
21 | if input_type=='video' or input_type=='image':
22 | self.input_file=input_file
23 |
24 | def load_data(self):
25 | if self.input_type=='video':
26 | self.cap=cv2.VideoCapture(self.input_file)
27 | elif self.input_type=='cam':
28 | self.cap=cv2.VideoCapture(0)
29 | else:
30 | self.cap=cv2.imread(self.input_file)
31 |
32 | def next_batch(self):
33 | '''
34 | Returns the next image from either a video file or webcam.
35 | If input_type is 'image', then it returns the same image.
36 | '''
37 | while True:
38 | for _ in range(10):
39 | ret, frame=self.cap.read()
40 | yield ret, frame
41 |
42 |
43 | def close(self):
44 | '''
45 | Closes the VideoCapture.
46 | '''
47 | if not self.input_type=='image':
48 | self.cap.release()
49 |
50 | def get_fps(self):
51 | '''
52 | return FPS
53 | '''
54 | return int(self.cap.get(cv2.CAP_PROP_FPS))
55 |
--------------------------------------------------------------------------------
/src/landmark_detection_model.py:
--------------------------------------------------------------------------------
1 | from model import Model
2 |
3 |
4 | class LandmarkDetectionModel(Model):
5 | """
6 | This is a class for the operation of Landmark Detection Model
7 | """
8 |
9 | def __init__(self, model_path, device='CPU', extensions=None, threshold=0.6):
10 | """
11 | This will initiate Landmark Detection Model class object
12 | """
13 | Model.__init__(self, model_path, device, extensions, threshold)
14 | self.model_name = 'Landmark Detection Model'
15 | self.input_name = next(iter(self.model.inputs))
16 | self.input_shape = self.model.inputs[self.input_name].shape
17 | self.output_name = next(iter(self.model.outputs))
18 |
19 | def predict(self, image, request_id=0):
20 | """
21 | This method will take image as a input and
22 | does all the preprocessing, postprocessing
23 | """
24 | left_eye_image, right_eye_image, eye_cords = [], [], []
25 | try:
26 | p_image = self.preprocess_img(image)
27 | self.network.start_async(request_id, inputs={self.input_name: p_image})
28 | if self.wait() == 0:
29 | outputs = self.network.requests[0].outputs[self.output_name]
30 | left_eye_image, right_eye_image, eye_cords = self.preprocess_output(outputs, image)
31 | except Exception as e:
32 | self.logger.error("Error While making prediction in Landmark Detection Model" + str(e))
33 | return left_eye_image, right_eye_image, eye_cords
34 |
35 | def preprocess_output(self, outputs, image):
36 | """
37 | The net outputs a blob with the shape: [1, 10],
38 | containing a row-vector of 10 floating point values
39 | for five landmarks coordinates in the form (x0, y0, x1, y1, ..., x5, y5).
40 | All the coordinates are normalized to be in range [0,1].
41 | We only need
42 | """
43 | h = image.shape[0]
44 | w = image.shape[1]
45 | left_eye_image, right_eye_image, eye_cords = [], [], []
46 | try:
47 | outputs = outputs[0]
48 |
49 | left_eye_xmin = int(outputs[0][0][0] * w) - 10
50 | left_eye_ymin = int(outputs[1][0][0] * h) - 10
51 | right_eye_xmin = int(outputs[2][0][0] * w) - 10
52 | right_eye_ymin = int(outputs[3][0][0] * h) - 10
53 |
54 | left_eye_xmax = int(outputs[0][0][0] * w) + 10
55 | left_eye_ymax = int(outputs[1][0][0] * h) + 10
56 | right_eye_xmax = int(outputs[2][0][0] * w) + 10
57 | right_eye_ymax = int(outputs[3][0][0] * h) + 10
58 |
59 | left_eye_image = image[left_eye_ymin:left_eye_ymax, left_eye_xmin:left_eye_xmax]
60 | right_eye_image = image[right_eye_ymin:right_eye_ymax, right_eye_xmin:right_eye_xmax]
61 |
62 | eye_cords = [[left_eye_xmin, left_eye_ymin, left_eye_xmax, left_eye_ymax],
63 | [right_eye_xmin, right_eye_ymin, right_eye_xmax, right_eye_ymax]]
64 |
65 | except Exception as e:
66 | self.logger.error("Error While drawing bounding boxes on image in Landmark Detection Model" + str(e))
67 | return left_eye_image, right_eye_image, eye_cords
--------------------------------------------------------------------------------
/src/main.py:
--------------------------------------------------------------------------------
1 | import cv2
2 | import os
3 | import logging
4 | import time
5 | import numpy as np
6 | from input_feeder import InputFeeder
7 | from mouse_controller import MouseController
8 | from face_detection_model import FaceDetectionModel
9 | from landmark_detection_model import LandmarkDetectionModel
10 | from head_pose_estimation_model import HeadPoseEstimationModel
11 | from gaze_estimation_model import GazeEstimationModel
12 | from argparse import ArgumentParser
13 |
14 |
15 | def build_argparser():
16 | """
17 | parse commandline argument
18 | return ArgumentParser object
19 | """
20 | parser = ArgumentParser()
21 | parser.add_argument("-fd", "--faceDetectionModel", type=str, required=True,
22 | help="Specify path of xml file of face detection model")
23 |
24 | parser.add_argument("-lr", "--landmarkRegressionModel", type=str, required=True,
25 | help="Specify path of xml file of landmark regression model")
26 |
27 | parser.add_argument("-hp", "--headPoseEstimationModel", type=str, required=True,
28 | help="Specify path of xml file of Head Pose Estimation model")
29 |
30 | parser.add_argument("-ge", "--gazeEstimationModel", type=str, required=True,
31 | help="Specify path of xml file of Gaze Estimation model")
32 |
33 | parser.add_argument("-i", "--input", type=str, required=True,
34 | help="Specify path of input Video file or cam for webcam")
35 |
36 | parser.add_argument("-flags", "--previewFlags", required=False, nargs='+',
37 | default=[],
38 | help="Specify flag from ff, fl, fh, fg like -flags ff fl(Space separated if multiple values)"
39 | "ff for faceDetectionModel, fl for landmarkRegressionModel"
40 | "fh for headPoseEstimationModel, fg for gazeEstimationModel")
41 |
42 | parser.add_argument("-prob", "--prob_threshold", required=False, type=float,
43 | default=0.6,
44 | help="Specify probability threshold for face detection model")
45 |
46 | parser.add_argument("-d", "--device", required=False, type=str, default='CPU',
47 | help="Specify Device for inference"
48 | "It can be CPU, GPU, FPGU, MYRID")
49 | parser.add_argument("-o", '--output_path', default='/results/', type=str)
50 | return parser
51 |
52 |
53 | def draw_preview(
54 | frame, preview_flags, cropped_image, left_eye_image, right_eye_image,
55 | face_cords, eye_cords, pose_output, gaze_vector):
56 | preview_frame = frame.copy()
57 |
58 | if 'ff' in preview_flags:
59 | if len(preview_flags) != 1:
60 | preview_frame = cropped_image
61 | cv2.rectangle(frame, (face_cords[0][0], face_cords[0][1]), (face_cords[0][2], face_cords[0][3]),
62 | (0, 0, 0), 3)
63 |
64 | if 'fl' in preview_flags:
65 | cv2.rectangle(cropped_image, (eye_cords[0][0]-10, eye_cords[0][1]-10), (eye_cords[0][2]+10, eye_cords[0][3]+10),
66 | (255, 0, 0), 2)
67 | cv2.rectangle(cropped_image, (eye_cords[1][0]-10, eye_cords[1][1]-10), (eye_cords[1][2]+10, eye_cords[1][3]+10),
68 | (255, 0, 0), 2)
69 |
70 | if 'fh' in preview_flags:
71 | cv2.putText(
72 | frame,
73 | "Pose Angles: yaw= {:.2f} , pitch= {:.2f} , roll= {:.2f}".format(
74 | pose_output[0], pose_output[1], pose_output[2]),
75 | (20, 40),
76 | cv2.FONT_HERSHEY_COMPLEX,
77 | 1, (0, 0, 0), 2)
78 |
79 | if 'fg' in preview_flags:
80 |
81 | cv2.putText(
82 | frame,
83 | "Gaze Cords: x= {:.2f} , y= {:.2f} , z= {:.2f}".format(
84 | gaze_vector[0], gaze_vector[1], gaze_vector[2]),
85 | (20, 80),
86 | cv2.FONT_HERSHEY_COMPLEX,
87 | 1, (0, 0, 0), 2)
88 |
89 | x, y, w = int(gaze_vector[0] * 12), int(gaze_vector[1] * 12), 160
90 | le = cv2.line(left_eye_image.copy(), (x - w, y - w), (x + w, y + w), (255, 0, 255), 2)
91 | cv2.line(le, (x - w, y + w), (x + w, y - w), (255, 0, 255), 2)
92 | re = cv2.line(right_eye_image.copy(), (x - w, y - w), (x + w, y + w), (255, 0, 255), 2)
93 | cv2.line(re, (x - w, y + w), (x + w, y - w), (255, 0, 255), 2)
94 | preview_frame[eye_cords[0][1]:eye_cords[0][3], eye_cords[0][0]:eye_cords[0][2]] = le
95 | preview_frame[eye_cords[1][1]:eye_cords[1][3], eye_cords[1][0]:eye_cords[1][2]] = re
96 |
97 | return preview_frame
98 |
99 |
100 | def main():
101 | args = build_argparser().parse_args()
102 | logger = logging.getLogger('main')
103 |
104 | is_benchmarking = False
105 | # initialize variables with the input arguments for easy access
106 | model_path_dict = {
107 | 'FaceDetectionModel': args.faceDetectionModel,
108 | 'LandmarkRegressionModel': args.landmarkRegressionModel,
109 | 'HeadPoseEstimationModel': args.headPoseEstimationModel,
110 | 'GazeEstimationModel': args.gazeEstimationModel
111 | }
112 | preview_flags = args.previewFlags
113 | input_filename = args.input
114 | device_name = args.device
115 | prob_threshold = args.prob_threshold
116 | output_path = args.output_path
117 |
118 | if input_filename.lower() == 'cam':
119 | feeder = InputFeeder(input_type='cam')
120 | else:
121 | if not os.path.isfile(input_filename):
122 | logger.error("Unable to find specified video file")
123 | exit(1)
124 | feeder = InputFeeder(input_type='video', input_file=input_filename)
125 |
126 | for model_path in list(model_path_dict.values()):
127 | if not os.path.isfile(model_path):
128 | logger.error("Unable to find specified model file" + str(model_path))
129 | exit(1)
130 |
131 | # instantiate model
132 | face_detection_model = FaceDetectionModel(model_path_dict['FaceDetectionModel'], device_name, threshold=prob_threshold)
133 | landmark_detection_model = LandmarkDetectionModel(model_path_dict['LandmarkRegressionModel'], device_name, threshold=prob_threshold)
134 | head_pose_estimation_model = HeadPoseEstimationModel(model_path_dict['HeadPoseEstimationModel'], device_name, threshold=prob_threshold)
135 | gaze_estimation_model = GazeEstimationModel(model_path_dict['GazeEstimationModel'], device_name, threshold=prob_threshold)
136 |
137 | if not is_benchmarking:
138 | mouse_controller = MouseController('medium', 'fast')
139 |
140 | # load Models
141 | start_model_load_time = time.time()
142 | face_detection_model.load_model()
143 | landmark_detection_model.load_model()
144 | head_pose_estimation_model.load_model()
145 | gaze_estimation_model.load_model()
146 | total_model_load_time = time.time() - start_model_load_time
147 |
148 | feeder.load_data()
149 |
150 | out_video = cv2.VideoWriter(os.path.join('output_video.mp4'), cv2.VideoWriter_fourcc(*'avc1'), int(feeder.get_fps()/10),
151 | (1920, 1080), True)
152 |
153 | frame_count = 0
154 | start_inference_time = time.time()
155 | for ret, frame in feeder.next_batch():
156 |
157 | if not ret:
158 | break
159 |
160 | frame_count += 1
161 |
162 | key = cv2.waitKey(60)
163 |
164 | try:
165 | face_cords, cropped_image = face_detection_model.predict(frame)
166 |
167 | if type(cropped_image) == int:
168 | logger.warning("Unable to detect the face")
169 | if key == 27:
170 | break
171 | continue
172 |
173 | left_eye_image, right_eye_image, eye_cords = landmark_detection_model.predict(cropped_image)
174 | pose_output = head_pose_estimation_model.predict(cropped_image)
175 | mouse_cord, gaze_vector = gaze_estimation_model.predict(left_eye_image, right_eye_image, pose_output)
176 |
177 | except Exception as e:
178 | logger.warning("Could predict using model" + str(e) + " for frame " + str(frame_count))
179 | continue
180 |
181 | image = cv2.resize(frame, (500, 500))
182 |
183 | if not len(preview_flags) == 0:
184 | preview_frame = draw_preview(
185 | frame, preview_flags, cropped_image, left_eye_image, right_eye_image,
186 | face_cords, eye_cords, pose_output, gaze_vector)
187 | image = np.hstack((cv2.resize(frame, (500, 500)), cv2.resize(preview_frame, (500, 500))))
188 |
189 | cv2.imshow('preview', image)
190 | out_video.write(frame)
191 |
192 | if frame_count % 5 == 0 and not is_benchmarking:
193 | mouse_controller.move(mouse_cord[0], mouse_cord[1])
194 |
195 | if key == 27:
196 | break
197 |
198 | total_time = time.time() - start_inference_time
199 | total_inference_time = round(total_time, 1)
200 | fps = frame_count / total_inference_time
201 |
202 | try:
203 | os.mkdir(output_path)
204 | except OSError as error:
205 | logger.error(error)
206 |
207 | with open(output_path+'stats.txt', 'w') as f:
208 | f.write(str(total_inference_time) + '\n')
209 | f.write(str(fps) + '\n')
210 | f.write(str(total_model_load_time) + '\n')
211 |
212 | logger.info('Model load time: ' + str(total_model_load_time))
213 | logger.info('Inference time: ' + str(total_inference_time))
214 | logger.info('FPS: ' + str(fps))
215 |
216 | logger.info('Video stream ended')
217 | cv2.destroyAllWindows()
218 | feeder.close()
219 |
220 |
221 | if __name__ == '__main__':
222 | main()
223 |
--------------------------------------------------------------------------------
/src/model.py:
--------------------------------------------------------------------------------
1 | from openvino.inference_engine import IECore, IENetwork
2 | import cv2
3 | import logging
4 |
5 |
6 | class Model:
7 | def __init__(self, model_path, device='CPU', extensions=None, threshold=0.6):
8 | self.model_structure = model_path
9 | self.model_weights = model_path.replace('.xml', '.bin')
10 | self.device_name = device
11 | self.threshold = threshold
12 | self.logger = logging.getLogger('fd')
13 | self.model_name = 'Basic Model'
14 | self.input_name = None
15 | self.input_shape = None
16 | self.output_name = None
17 | self.output_shape = None
18 | self.network = None
19 | try:
20 | self.core = IECore()
21 | self.model = IENetwork(self.model_structure, self.model_weights)
22 | except Exception as e:
23 | self.logger.error("Error While Initilizing" + str(self.model_name) + str(e))
24 | raise ValueError("Could not Initialise the network. Have you enterred the correct model path?")
25 |
26 | def load_model(self):
27 | """
28 | This method with load model using IECore object
29 | return loaded model
30 | """
31 | try:
32 | self.network = self.core.load_network(network=self.model, device_name=self.device_name, num_requests=1)
33 | except Exception as e:
34 | self.logger.error("Error While Loading"+str(self.model_name)+str(e))
35 |
36 | def predict(self):
37 | pass
38 |
39 | def preprocess_output(self):
40 | pass
41 |
42 | def preprocess_img(self, image):
43 | """
44 | Input: image
45 | Description: We have done basic preprocessing steps
46 | 1. Resizing image according to the model input shape
47 | 2. Transpose of image to change the channels of image
48 | 3. Reshape image
49 | Return: Preprocessed image
50 | """
51 | try:
52 | image = cv2.resize(image, (self.input_shape[3], self.input_shape[2]))
53 | image = image.transpose((2, 0, 1))
54 | image = image.reshape(1, *image.shape)
55 | except Exception as e:
56 | self.logger.error("Error While preprocessing Image in " + str(self.model_name) + str(e))
57 | return image
58 |
59 | def wait(self):
60 | '''
61 | Checks the status of the inference request.
62 | '''
63 | status = self.network.requests[0].wait(-1)
64 | return status
--------------------------------------------------------------------------------
/src/mouse_controller.py:
--------------------------------------------------------------------------------
1 | '''
2 | This is a sample class that you can use to control the mouse pointer.
3 | It uses the pyautogui library. You can set the precision for mouse movement
4 | (how much the mouse moves) and the speed (how fast it moves) by changing
5 | precision_dict and speed_dict.
6 | Calling the move function with the x and y output of the gaze estimation model
7 | will move the pointer.
8 | This class is provided to help get you started; you can choose whether you want to use it or create your own from scratch.
9 | '''
10 | import pyautogui
11 |
12 |
13 | class MouseController:
14 | def __init__(self, precision, speed):
15 | precision_dict = {'high': 100, 'low': 1000, 'medium': 500}
16 | speed_dict = {'fast': 1, 'slow': 10, 'medium': 5}
17 | pyautogui.FAILSAFE = False
18 | self.precision = precision_dict[precision]
19 | self.speed = speed_dict[speed]
20 |
21 | def move(self, x, y):
22 | pyautogui.moveRel(x * self.precision, -1 * y * self.precision, duration=self.speed)
23 |
--------------------------------------------------------------------------------
/src/output_video.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bhadreshpsavani/Computer-Pointer-Controller/645d62377f7b70bb38ffb8cd5cb04e83ba48acd0/src/output_video.mp4
--------------------------------------------------------------------------------
/src/results/FP16/stats.txt:
--------------------------------------------------------------------------------
1 | 23.9
2 | 2.468619246861925
3 | 0.7770581245422363
4 |
--------------------------------------------------------------------------------
/src/results/FP32-INT8/stats.txt:
--------------------------------------------------------------------------------
1 | 24.0
2 | 2.4583333333333335
3 | 2.766681432723999
4 |
--------------------------------------------------------------------------------
/src/results/FP32/stats.txt:
--------------------------------------------------------------------------------
1 | 24.7
2 | 2.388663967611336
3 | 0.7230548858642578
4 |
--------------------------------------------------------------------------------