├── OpenDevNotebook-CPU-modified.ipynb
├── OpenDevNotebook-CPU.ipynb
├── README.md
├── demo.ipynb
├── demo_files
├── __pycache__
│ ├── handle_models.cpython-36.pyc
│ ├── inference.cpython-36.pyc
│ └── preprocess_inputs.cpython-36.pyc
├── app.py
├── handle_models.py
├── images
│ ├── blue-car.jpg
│ ├── sign.jpg
│ └── sitting-on-car.jpg
├── inference.py
├── models
│ └── ModelReadme.md.docx
├── outputs
│ └── outputReadme.md.docx
├── preprocess_inputs.py
└── test.py
├── lesson2
├── Lesson2Notebook-Clear.ipynb
├── Lesson2Notebook.ipynb
└── home
│ └── workspace
│ ├── __pycache__
│ ├── handle_models.cpython-36.pyc
│ ├── inference.cpython-36.pyc
│ └── preprocess_inputs.cpython-36.pyc
│ ├── app.py
│ ├── handle_models.py
│ ├── images
│ ├── blue-car.jpg
│ ├── sign.jpg
│ └── sitting-on-car.jpg
│ ├── inference.py
│ ├── models
│ └── intel
│ │ ├── human-pose-estimation-0001
│ │ ├── FP16
│ │ │ ├── human-pose-estimation-0001.bin
│ │ │ └── human-pose-estimation-0001.xml
│ │ ├── FP32
│ │ │ ├── human-pose-estimation-0001.bin
│ │ │ └── human-pose-estimation-0001.xml
│ │ └── INT8
│ │ │ ├── human-pose-estimation-0001.bin
│ │ │ └── human-pose-estimation-0001.xml
│ │ ├── semantic-segmentation-adas-0001
│ │ ├── FP16
│ │ │ ├── semantic-segmentation-adas-0001.bin
│ │ │ └── semantic-segmentation-adas-0001.xml
│ │ └── FP32
│ │ │ ├── semantic-segmentation-adas-0001.bin
│ │ │ └── semantic-segmentation-adas-0001.xml
│ │ ├── text-detection-0004
│ │ └── FP16
│ │ │ ├── text-detection-0004.bin
│ │ │ └── text-detection-0004.xml
│ │ └── vehicle-attributes-recognition-barrier-0039
│ │ └── INT8
│ │ ├── vehicle-attributes-recognition-barrier-0039.bin
│ │ └── vehicle-attributes-recognition-barrier-0039.xml
│ ├── outputs
│ └── CAR_META-output.png
│ ├── preprocess_inputs.py
│ └── test.py
├── lesson3
├── Lesson3Notebook_TF.ipynb
└── home
│ └── workspace
│ └── tensorflow
│ └── Readme.md
└── openvino_initialization_script.py
/OpenDevNotebook-CPU-modified.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "colab": {
6 | "name": "OpenVINO-ColabNotebook.ipynb",
7 | "provenance": [],
8 | "collapsed_sections": []
9 | },
10 | "kernelspec": {
11 | "name": "python3",
12 | "display_name": "Python 3"
13 | },
14 | "accelerator": "GPU"
15 | },
16 | "cells": [
17 | {
18 | "cell_type": "code",
19 | "metadata": {
20 | "id": "r3GojvB1rfxG",
21 | "colab_type": "code",
22 | "colab": {}
23 | },
24 | "source": [
25 | "#Defining the Important Paths\n",
26 | "\n",
27 | "file_name = \"l_openvino_toolkit_p_2020.1.023.tgz\" #change the filename if version does not match\n",
28 | "dir_name = file_name[:-4]\n",
29 | "install_dir = \"/opt/intel/openvino/\"\n",
30 | "deployment_tools = install_dir+\"deployment_tools/\"\n",
31 | "model_optimizer = install_dir+\"deployment_tools/model_optimizer/\"\n",
32 | "model_zoo = deployment_tools+\"open_model_zoo/\""
33 | ],
34 | "execution_count": 0,
35 | "outputs": []
36 | },
37 | {
38 | "cell_type": "code",
39 | "metadata": {
40 | "id": "rh4rY0AOQkfI",
41 | "colab_type": "code",
42 | "colab": {}
43 | },
44 | "source": [
45 | "!wget \"https://storage.googleapis.com/open_vino_public/l_openvino_toolkit_p_2020.1.023.tgz\""
46 | ],
47 | "execution_count": 0,
48 | "outputs": []
49 | },
50 | {
51 | "cell_type": "code",
52 | "metadata": {
53 | "id": "cJ9jN-B7wk-7",
54 | "colab_type": "code",
55 | "colab": {}
56 | },
57 | "source": [
58 | "!tar -xzf l_openvino_toolkit_p_2020.1.023.tgz\n",
59 | "!sudo -E $dir_name/install_openvino_dependencies.sh\n",
60 | "!sed -i 's/decline/accept/g' $dir_name/silent.cfg\n",
61 | "!sed -i 's/#INTEL_SW_IMPROVEMENT/INTEL_SW_IMPROVEMENT/g' $dir_name/silent.cfg\n",
62 | "!sudo ./$dir_name/install.sh --silent $dir_name/silent.cfg\n",
63 | "!sudo -E $install_dir/install_dependencies/install_openvino_dependencies.sh\n",
64 | "!source $install_dir/bin/setupvars.sh"
65 | ],
66 | "execution_count": 0,
67 | "outputs": []
68 | },
69 | {
70 | "cell_type": "code",
71 | "metadata": {
72 | "id": "Eys3tefjlMjc",
73 | "colab_type": "code",
74 | "colab": {}
75 | },
76 | "source": [
77 | "frameworks = ['tf','mxnet','onnx','kaldi','all']\n",
78 | "choices = dict(zip(range(1,6),frameworks))\n",
79 | "print(\"\"\"Please enter the Choice of framework you want to work with:\n",
80 | "\\n(Note: You should only install for the ones you would be using.\n",
81 | "Incase of needing to install for more than one but not all, rerun this cell and \n",
82 | "install the pre-requisites one by one.)\n",
83 | "\"\"\")\n",
84 | "for x in choices:\n",
85 | " print(x,choices[x])\n",
86 | "choice = eval(input(\"Please enter your choice: \"))\n",
87 | "print(\"Choice is\",choice,\":\",choices[choice])\n",
88 | "if choice != 5:\n",
89 | " pre_install = model_optimizer + \"install_prerequisites/install_prerequisites.sh \"+choices[choice]\n",
90 | " !sudo $pre_install\n",
91 | "elif choice == 5:\n",
92 | " # for x in choices:\n",
93 | " # pre_install = model_optimizer + \"install_prerequisites/install_prerequisites.sh \"+choices[x]\n",
94 | " # !sudo $pre_install\n",
95 | " !sudo $model_optimizer/install_prerequisites/install_prerequisites.sh\n",
96 | "else:\n",
97 | " print(\"Wrong Choice! Please rerun this cell and enter the correct choice!\")"
98 | ],
99 | "execution_count": 0,
100 | "outputs": []
101 | },
102 | {
103 | "cell_type": "code",
104 | "metadata": {
105 | "id": "BB11uWn6ldNp",
106 | "colab_type": "code",
107 | "colab": {}
108 | },
109 | "source": [
110 | "!sudo $deployment_tools/demo/demo_squeezenet_download_convert_run.sh"
111 | ],
112 | "execution_count": 0,
113 | "outputs": []
114 | }
115 | ]
116 | }
--------------------------------------------------------------------------------
/OpenDevNotebook-CPU.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "colab": {
6 | "name": "OpenVINO-ColabNotebook.ipynb",
7 | "provenance": [],
8 | "collapsed_sections": []
9 | },
10 | "kernelspec": {
11 | "name": "python3",
12 | "display_name": "Python 3"
13 | }
14 | },
15 | "cells": [
16 | {
17 | "cell_type": "code",
18 | "metadata": {
19 | "id": "RGzxQmczbLQ5",
20 | "colab_type": "code",
21 | "colab": {}
22 | },
23 | "source": [
24 | "from google.colab import drive\n",
25 | "drive.mount('/content/drive')\n"
26 | ],
27 | "execution_count": 0,
28 | "outputs": []
29 | },
30 | {
31 | "cell_type": "code",
32 | "metadata": {
33 | "id": "SGboDSyXb-uM",
34 | "colab_type": "code",
35 | "colab": {}
36 | },
37 | "source": [
38 | "cd/content/drive/'My Drive'/openvino"
39 | ],
40 | "execution_count": 0,
41 | "outputs": []
42 | },
43 | {
44 | "cell_type": "code",
45 | "metadata": {
46 | "id": "NwstmCj6cBJp",
47 | "colab_type": "code",
48 | "colab": {}
49 | },
50 | "source": [
51 | "ls"
52 | ],
53 | "execution_count": 0,
54 | "outputs": []
55 | },
56 | {
57 | "cell_type": "code",
58 | "metadata": {
59 | "id": "p4ysLTWJ_IJt",
60 | "colab_type": "code",
61 | "colab": {}
62 | },
63 | "source": [
64 | "!tar -xvzf l_openvino_toolkit_p_2019.3.376.tgz"
65 | ],
66 | "execution_count": 0,
67 | "outputs": []
68 | },
69 | {
70 | "cell_type": "code",
71 | "metadata": {
72 | "id": "sdC6r3RwWR9D",
73 | "colab_type": "code",
74 | "colab": {}
75 | },
76 | "source": [
77 | "cd l_openvino_toolkit_p_2019.3.376"
78 | ],
79 | "execution_count": 0,
80 | "outputs": []
81 | },
82 | {
83 | "cell_type": "code",
84 | "metadata": {
85 | "id": "aNML7VEsjKGa",
86 | "colab_type": "code",
87 | "colab": {}
88 | },
89 | "source": [
90 | "cd /opt/intel/openvino/install_dependencies"
91 | ],
92 | "execution_count": 0,
93 | "outputs": []
94 | },
95 | {
96 | "cell_type": "code",
97 | "metadata": {
98 | "id": "_nr7s6nlXjAQ",
99 | "colab_type": "code",
100 | "colab": {}
101 | },
102 | "source": [
103 | "!sudo -E ./install_openvino_dependencies.sh\n"
104 | ],
105 | "execution_count": 0,
106 | "outputs": []
107 | },
108 | {
109 | "cell_type": "code",
110 | "metadata": {
111 | "id": "nwvSYhq7Xd9Q",
112 | "colab_type": "code",
113 | "colab": {}
114 | },
115 | "source": [
116 | "!sudo ./install.sh"
117 | ],
118 | "execution_count": 0,
119 | "outputs": []
120 | },
121 | {
122 | "cell_type": "code",
123 | "metadata": {
124 | "id": "zBB1gSb_X5lO",
125 | "colab_type": "code",
126 | "colab": {}
127 | },
128 | "source": [
129 | "!source /opt/intel/openvino/bin/setupvars.sh"
130 | ],
131 | "execution_count": 0,
132 | "outputs": []
133 | },
134 | {
135 | "cell_type": "code",
136 | "metadata": {
137 | "id": "VbRHDO9okoWk",
138 | "colab_type": "code",
139 | "colab": {}
140 | },
141 | "source": [
142 | "cd /opt/intel/openvino/deployment_tools/model_optimizer/install_prerequisites"
143 | ],
144 | "execution_count": 0,
145 | "outputs": []
146 | },
147 | {
148 | "cell_type": "code",
149 | "metadata": {
150 | "id": "7R0gf21AkuYR",
151 | "colab_type": "code",
152 | "colab": {}
153 | },
154 | "source": [
155 | "!sudo ./install_prerequisites.sh"
156 | ],
157 | "execution_count": 0,
158 | "outputs": []
159 | },
160 | {
161 | "cell_type": "code",
162 | "metadata": {
163 | "id": "osBTX5gokxXb",
164 | "colab_type": "code",
165 | "colab": {}
166 | },
167 | "source": [
168 | "cd /opt/intel/openvino/deployment_tools/model_optimizer/install_prerequisites"
169 | ],
170 | "execution_count": 0,
171 | "outputs": []
172 | },
173 | {
174 | "cell_type": "code",
175 | "metadata": {
176 | "id": "Eys3tefjlMjc",
177 | "colab_type": "code",
178 | "colab": {}
179 | },
180 | "source": [
181 | "!sudo ./install_prerequisites_caffe.sh"
182 | ],
183 | "execution_count": 0,
184 | "outputs": []
185 | },
186 | {
187 | "cell_type": "code",
188 | "metadata": {
189 | "id": "Gq8_f-gclPoK",
190 | "colab_type": "code",
191 | "colab": {}
192 | },
193 | "source": [
194 | "!sudo ./install_prerequisites_tf.sh"
195 | ],
196 | "execution_count": 0,
197 | "outputs": []
198 | },
199 | {
200 | "cell_type": "code",
201 | "metadata": {
202 | "id": "fz3w-7aDlUg8",
203 | "colab_type": "code",
204 | "colab": {}
205 | },
206 | "source": [
207 | "!sudo ./install_prerequisites_mxnet.sh"
208 | ],
209 | "execution_count": 0,
210 | "outputs": []
211 | },
212 | {
213 | "cell_type": "code",
214 | "metadata": {
215 | "id": "AhMM20iwlW_Z",
216 | "colab_type": "code",
217 | "colab": {}
218 | },
219 | "source": [
220 | "!sudo ./install_prerequisites_onnx.sh"
221 | ],
222 | "execution_count": 0,
223 | "outputs": []
224 | },
225 | {
226 | "cell_type": "code",
227 | "metadata": {
228 | "id": "EHCfY__LlZT4",
229 | "colab_type": "code",
230 | "colab": {}
231 | },
232 | "source": [
233 | "!sudo ./install_prerequisites_kaldi.sh"
234 | ],
235 | "execution_count": 0,
236 | "outputs": []
237 | },
238 | {
239 | "cell_type": "code",
240 | "metadata": {
241 | "id": "BB11uWn6ldNp",
242 | "colab_type": "code",
243 | "colab": {}
244 | },
245 | "source": [
246 | "cd /opt/intel/openvino/deployment_tools/demo"
247 | ],
248 | "execution_count": 0,
249 | "outputs": []
250 | },
251 | {
252 | "cell_type": "code",
253 | "metadata": {
254 | "id": "8CF8kREclwEv",
255 | "colab_type": "code",
256 | "colab": {}
257 | },
258 | "source": [
259 | "!sudo ./demo_squeezenet_download_convert_run.sh"
260 | ],
261 | "execution_count": 0,
262 | "outputs": []
263 | },
264 | {
265 | "cell_type": "code",
266 | "metadata": {
267 | "id": "IE-ONwrtmDK9",
268 | "colab_type": "code",
269 | "colab": {}
270 | },
271 | "source": [
272 | "!sudo ./demo_security_barrier_camera.sh"
273 | ],
274 | "execution_count": 0,
275 | "outputs": []
276 | },
277 | {
278 | "cell_type": "code",
279 | "metadata": {
280 | "id": "WdCY2RhjnX8q",
281 | "colab_type": "code",
282 | "colab": {}
283 | },
284 | "source": [
285 | ""
286 | ],
287 | "execution_count": 0,
288 | "outputs": []
289 | }
290 | ]
291 | }
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # OpenDevLibrary
2 | Open Source OpenVINO Edge developement and deployment on Google Colab using OpenNotebooks
3 |
4 | ## USAGE STEPS:
5 |
6 | ## **For Python Script Install**:
7 |
8 | ### Step 1:
9 | Open your Colab Notebook.
10 |
11 | ### Step 2:
12 | Execute in one cell _(Including the exclamation)_:
13 |
14 | !wget "https://storage.googleapis.com/open_vino_public/openvino_initialization_script.py"
15 |
16 | ### Step 3:
17 | Move to the next cell and execute _(Including the exclamation)_:
18 |
19 | !python openvino_initialization_script.py
20 |
21 | **Note**: You need to enter your choice of framework as the number marked against the framework in the due course of installation.
22 | Et Voila! You have Intel OpenVINO installed. Happy Inferencing!
23 |
24 |
25 | ## **For Notebook**:
26 |
27 | ### Step 1:
28 | Download OpenVINO Toolkit's ***l_openvino_toolkit_p_version.tgz*** file locally.
29 | Download link: https://software.intel.com/en-us/openvino-toolkit/choose-download/free-download-linux
30 | Alternatively, you could use the **wget** method to download the toolkit directly into the workspace and even upload it to your drive using **shutil**.
31 |
32 | ### Step 2:
33 | Create a folder in the google drive with the name openvino and upload the downloaded the ***l_openvino_toolkit_p_version.tgz*** file to a google drive folder i.e. openvino.
34 |
35 | ### Step 3:
36 | Add the "Open in Colab" Extension to your Browser.
37 | Clone or Fork the repo and open the ***OpenDevNotebook-CPU.ipynb*** in colab. Make sure to change the *file_name* variable to suite your ***l_openvino_toolkit_p_version.tgz*** file version (copy and paste the name of the downloaded file including the .tgx extension).
38 |
39 | ### Step 4:
40 | Run all the cells till initialization of the environment to successfully install and initialize OpenVINO in the OpenDevNotebook Runtime Environment.
41 |
42 | ### Step 5:
43 | Click on the ***STAR BUTTON*** above and do Play with the OpenNotebook as you like!
44 |
45 | ## FUTURE WORK:
46 |
47 | Currently trying to render the output within the cell, if you can help, PLEASE COME FORWARD AND CONTRIBUTE!!!!
48 |
49 | ## CONTACTS:
50 |
51 | ***NAME: MUHAMMAD ALI***
52 |
53 | ***SLACK USERNAME: Muhammad.Ali***
54 |
55 | ***EMAIL1: alikasbati@outlook.com***
56 |
57 | ***EMAIL2: malirashid1994@gmail.com***
58 |
59 | ***Linkedin: https://www.linkedin.com/in/alihussainia/***
60 |
61 | ## REFERENCES:
62 | Intel's Official Installation Guide for OpenVINO on linux: https://docs.openvinotoolkit.org/latest/_docs_install_guides_installing_openvino_linux.html#install-openvino
63 |
64 |
--------------------------------------------------------------------------------
/demo_files/__pycache__/handle_models.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/demo_files/__pycache__/handle_models.cpython-36.pyc
--------------------------------------------------------------------------------
/demo_files/__pycache__/inference.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/demo_files/__pycache__/inference.cpython-36.pyc
--------------------------------------------------------------------------------
/demo_files/__pycache__/preprocess_inputs.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/demo_files/__pycache__/preprocess_inputs.cpython-36.pyc
--------------------------------------------------------------------------------
/demo_files/app.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import cv2
3 | import numpy as np
4 |
5 | from handle_models import handle_output, preprocessing
6 | from inference import Network
7 |
8 |
9 | CAR_COLORS = ["white", "gray", "yellow", "red", "green", "blue", "black"]
10 | CAR_TYPES = ["car", "bus", "truck", "van"]
11 |
12 |
13 | def get_args():
14 | '''
15 | Gets the arguments from the command line.
16 | '''
17 |
18 | parser = argparse.ArgumentParser("Basic Edge App with Inference Engine")
19 | # -- Create the descriptions for the commands
20 |
21 | c_desc = "CPU extension file location, if applicable"
22 | d_desc = "Device, if not CPU (GPU, FPGA, MYRIAD)"
23 | i_desc = "The location of the input image"
24 | m_desc = "The location of the model XML file"
25 | t_desc = "The type of model: POSE, TEXT or CAR_META"
26 |
27 | # -- Add required and optional groups
28 | parser._action_groups.pop()
29 | required = parser.add_argument_group('required arguments')
30 | optional = parser.add_argument_group('optional arguments')
31 |
32 | # -- Create the arguments
33 | required.add_argument("-i", help=i_desc, required=True)
34 | required.add_argument("-m", help=m_desc, required=True)
35 | required.add_argument("-t", help=t_desc, required=True)
36 | optional.add_argument("-c", help=c_desc, default=None)
37 | optional.add_argument("-d", help=d_desc, default="CPU")
38 | args = parser.parse_args()
39 |
40 | return args
41 |
42 |
43 | def get_mask(processed_output):
44 | '''
45 | Given an input image size and processed output for a semantic mask,
46 | returns a masks able to be combined with the original image.
47 | '''
48 | # Create an empty array for other color channels of mask
49 | empty = np.zeros(processed_output.shape)
50 | # Stack to make a Green mask where text detected
51 | mask = np.dstack((empty, processed_output, empty))
52 |
53 | return mask
54 |
55 |
56 | def create_output_image(model_type, image, output):
57 | '''
58 | Using the model type, input image, and processed output,
59 | creates an output image showing the result of inference.
60 | '''
61 | if model_type == "POSE":
62 | # Remove final part of output not used for heatmaps
63 | output = output[:-1]
64 | # Get only pose detections above 0.5 confidence, set to 255
65 | for c in range(len(output)):
66 | output[c] = np.where(output[c]>0.5, 255, 0)
67 | # Sum along the "class" axis
68 | output = np.sum(output, axis=0)
69 | # Get semantic mask
70 | pose_mask = get_mask(output)
71 | # Combine with original image
72 | image = image + pose_mask
73 | return image
74 | elif model_type == "TEXT":
75 | # Get only text detections above 0.5 confidence, set to 255
76 | output = np.where(output[1]>0.5, 255, 0)
77 | # Get semantic mask
78 | text_mask = get_mask(output)
79 | # Add the mask to the image
80 | image = image + text_mask
81 | return image
82 | elif model_type == "CAR_META":
83 | # Get the color and car type from their lists
84 | color = CAR_COLORS[output[0]]
85 | car_type = CAR_TYPES[output[1]]
86 | # Scale the output text by the image shape
87 | scaler = max(int(image.shape[0] / 1000), 1)
88 | # Write the text of color and type onto the image
89 | image = cv2.putText(image,
90 | "Color: {}, Type: {}".format(color, car_type),
91 | (50 * scaler, 100 * scaler), cv2.FONT_HERSHEY_SIMPLEX,
92 | 2 * scaler, (255, 255, 255), 3 * scaler)
93 | return image
94 | else:
95 | print("Unknown model type, unable to create output image.")
96 | return image
97 |
98 |
99 | def perform_inference(args):
100 | '''
101 | Performs inference on an input image, given a model.
102 | '''
103 | # Create a Network for using the Inference Engine
104 | inference_network = Network()
105 | # Load the model in the network, and obtain its input shape
106 | n, c, h, w = inference_network.load_model(args.m, args.d, args.c)
107 |
108 | # Read the input image
109 | image = cv2.imread(args.i)
110 |
111 | ### TODO: Preprocess the input image
112 | preprocessed_image = preprocessing(image, h, w)
113 |
114 | # Perform synchronous inference on the image
115 | inference_network.sync_inference(preprocessed_image)
116 |
117 | # Obtain the output of the inference request
118 | output = inference_network.extract_output()
119 |
120 | ### TODO: Handle the output of the network, based on args.t
121 | ### Note: This will require using `handle_output` to get the correct
122 | ### function, and then feeding the output to that function.
123 | process_func = handle_output(args.t)
124 |
125 | processed_output = process_func(output, image.shape)
126 |
127 | # Create an output image based on network
128 | try:
129 | output_image = create_output_image(args.t, image, processed_output)
130 | print("Success")
131 | except:
132 | print("failure")
133 | # Save down the resulting image
134 | cv2.imwrite("outputs/{}-output.png".format(args.t), output_image)
135 |
136 |
137 | def main():
138 | args = get_args()
139 | perform_inference(args)
140 |
141 |
142 | if __name__ == "__main__":
143 | main()
144 |
--------------------------------------------------------------------------------
/demo_files/handle_models.py:
--------------------------------------------------------------------------------
1 | import cv2
2 | import numpy as np
3 |
4 |
5 | def handle_pose(output, input_shape):
6 | '''
7 | Handles the output of the Pose Estimation model.
8 | Returns ONLY the keypoint heatmaps, and not the Part Affinity Fields.
9 | '''
10 | heatmaps = output['Mconv7_stage2_L2']
11 | # TODO 1: Extract only the second blob output (keypoint heatmaps)
12 | # Create an empty array to handle the output map
13 | out_heatmap = np.zeros([heatmaps.shape[1],
14 | input_shape[0],
15 | input_shape[1]])
16 | # TODO 2: Resize the heatmap back to the size of the input
17 | for h in range(len(heatmaps[0])):
18 | out_heatmap[h] = cv2.resize(heatmaps[0][h],
19 | input_shape[0:2][::-1])
20 | return out_heatmap
21 |
22 |
23 | def handle_text(output, input_shape):
24 | '''
25 | Handles the output of the Text Detection model.
26 | Returns ONLY the text/no text classification of each pixel,
27 | and not the linkage between pixels and their neighbors.
28 | '''
29 | # TODO 1: Extract only the first blob output (text/no text classification)
30 | text_classes = output['model/segm_logits/add']
31 | # TODO 2: Resize this output back to the size of the input
32 | out_text = np.empty([text_classes.shape[1],
33 | input_shape[0],
34 | input_shape[1]])
35 | for t in range(len(text_classes[0])):
36 | out_text[t] = cv2.resize(text_classes[0][t],
37 | input_shape[0:2][::-1])
38 | return out_text
39 |
40 |
41 | def handle_car(output, input_shape):
42 | '''
43 | Handles the output of the Car Metadata model.
44 | Returns two integers: the argmax of each softmax output.
45 | The first is for color, and the second for type.
46 | '''
47 | color = output['color'].flatten()
48 | car_type = output['type'].flatten()
49 |
50 | # TODO 1: Get the argmax of the "color" output
51 | color_class = np.argmax(color)
52 | # TODO 2: Get the argmax of the "type" output
53 | type_class = np.argmax(car_type)
54 |
55 | color_class = np.argmax(color)
56 |
57 | return color_class, type_class
58 |
59 |
60 | def handle_output(model_type):
61 | '''
62 | Returns the related function to handle an output,
63 | based on the model_type being used.
64 | '''
65 | if model_type == "POSE":
66 | return handle_pose
67 | elif model_type == "TEXT":
68 | return handle_text
69 | elif model_type == "CAR_META":
70 | return handle_car
71 | else:
72 | return None
73 |
74 |
75 | '''
76 | The below function is carried over from the previous exercise.
77 | You just need to call it appropriately in `app.py` to preprocess
78 | the input image.
79 | '''
80 | def preprocessing(input_image, height, width):
81 | '''
82 | Given an input image, height and width:
83 | - Resize to width and height
84 | - Transpose the final "channel" dimension to be first
85 | - Reshape the image to add a "batch" of 1 at the start
86 | '''
87 | image = np.copy(input_image)
88 | image = cv2.resize(image, (width, height))
89 | image = image.transpose((2,0,1))
90 | image = image.reshape(1, 3, height, width)
91 |
92 | return image
--------------------------------------------------------------------------------
/demo_files/images/blue-car.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/demo_files/images/blue-car.jpg
--------------------------------------------------------------------------------
/demo_files/images/sign.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/demo_files/images/sign.jpg
--------------------------------------------------------------------------------
/demo_files/images/sitting-on-car.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/demo_files/images/sitting-on-car.jpg
--------------------------------------------------------------------------------
/demo_files/inference.py:
--------------------------------------------------------------------------------
1 | '''
2 | Contains code for working with the Inference Engine.
3 | You'll learn how to implement this code and more in
4 | the related lesson on the topic.
5 | '''
6 |
7 | import os
8 | import sys
9 | import logging as log
10 | from openvino.inference_engine import IENetwork, IECore
11 |
12 | class Network:
13 | '''
14 | Load and store information for working with the Inference Engine,
15 | and any loaded models.
16 | '''
17 |
18 | def __init__(self):
19 | self.plugin = None
20 | self.input_blob = None
21 | self.exec_network = None
22 |
23 |
24 | def load_model(self, model, device="CPU", cpu_extension=None):
25 | '''
26 | Load the model given IR files.
27 | Defaults to CPU as device for use in the workspace.
28 | Synchronous requests made within.
29 | '''
30 | model_xml = model
31 | model_bin = os.path.splitext(model_xml)[0] + ".bin"
32 |
33 | # Initialize the plugin
34 | self.plugin = IECore()
35 |
36 | # Add a CPU extension, if applicable
37 | if cpu_extension and "CPU" in device:
38 | self.plugin.add_extension(cpu_extension, device)
39 |
40 | # Read the IR as a IENetwork
41 | network = IENetwork(model=model_xml, weights=model_bin)
42 |
43 | # Load the IENetwork into the plugin
44 | self.exec_network = self.plugin.load_network(network, device)
45 |
46 | # Get the input layer
47 | self.input_blob = next(iter(network.inputs))
48 |
49 | # Return the input shape (to determine preprocessing)
50 | return network.inputs[self.input_blob].shape
51 |
52 |
53 | def sync_inference(self, image):
54 | '''
55 | Makes a synchronous inference request, given an input image.
56 | '''
57 | self.exec_network.infer({self.input_blob: image})
58 | return
59 |
60 |
61 | def extract_output(self):
62 | '''
63 | Returns a list of the results for the output layer of the network.
64 | '''
65 | return self.exec_network.requests[0].outputs
66 |
--------------------------------------------------------------------------------
/demo_files/models/ModelReadme.md.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/demo_files/models/ModelReadme.md.docx
--------------------------------------------------------------------------------
/demo_files/outputs/outputReadme.md.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/demo_files/outputs/outputReadme.md.docx
--------------------------------------------------------------------------------
/demo_files/preprocess_inputs.py:
--------------------------------------------------------------------------------
1 | import cv2
2 | import numpy as np
3 |
4 | # My Code
5 | def preprocessing(input_image, height, width):
6 | '''
7 | Given an input image, height and width:
8 | - Resize to height and width
9 | - Transpose the final "channel" dimension to be first
10 | - Reshape the image to add a "batch" of 1 at the start
11 | '''
12 | image = cv2.resize(input_image, (width, height))
13 | image = image.transpose((2,0,1))
14 | image = image.reshape(1, 3, height, width)
15 |
16 | return image
17 |
18 | def pose_estimation(input_image):
19 | '''
20 | Given some input image, preprocess the image so that
21 | it can be used with the related pose estimation model
22 | you downloaded previously. You can use cv2.resize()
23 | to resize the image.
24 | '''
25 | preprocessed_image = np.copy(input_image)
26 |
27 | # TODO: Preprocess the image for the pose estimation model
28 | preprocessed_image = cv2.resize(preprocessed_image,(456,256))
29 |
30 | preprocessed_image = preprocessed_image.transpose((2,0,1))
31 |
32 | preprocessed_image = preprocessed_image.reshape(1, 3, 256, 456)
33 |
34 | return preprocessed_image
35 |
36 |
37 | def text_detection(input_image):
38 | '''
39 | Given some input image, preprocess the image so that
40 | it can be used with the related text detection model
41 | you downloaded previously. You can use cv2.resize()
42 | to resize the image.
43 | '''
44 | preprocessed_image = np.copy(input_image)
45 |
46 | # TODO: Preprocess the image for the text detection model
47 | preprocessed_image = preprocessing(preprocessed_image, 768, 1280)
48 | return preprocessed_image
49 |
50 |
51 | def car_meta(input_image):
52 | '''
53 | Given some input image, preprocess the image so that
54 | it can be used with the related car metadata model
55 | you downloaded previously. You can use cv2.resize()
56 | to resize the image.
57 | '''
58 | preprocessed_image = np.copy(input_image)
59 |
60 | # TODO: Preprocess the image for the car metadata model
61 | preprocessed_image = preprocessing(preprocessed_image, 72, 72)
62 | return preprocessed_image
63 |
--------------------------------------------------------------------------------
/demo_files/test.py:
--------------------------------------------------------------------------------
1 | import cv2
2 | import numpy as np
3 |
4 | from preprocess_inputs import pose_estimation, text_detection, car_meta
5 |
6 | # Image locations
7 | POSE_IMAGE = cv2.imread("images/sitting-on-car.jpg")
8 | TEXT_IMAGE = cv2.imread("images/sign.jpg")
9 | CAR_IMAGE = cv2.imread("images/blue-car.jpg")
10 |
11 | # Test names
12 | test_names = ["Pose Estimation", "Text Detection", "Car Meta"]
13 |
14 | # Hold solution functions
15 | global solution_funcs
16 |
17 | def test_pose():
18 | comparison = test(pose_estimation, test_names[0], POSE_IMAGE)
19 | return comparison
20 |
21 |
22 | def test_text():
23 | comparison = test(text_detection, test_names[1], TEXT_IMAGE)
24 | return comparison
25 |
26 |
27 | def test_car():
28 | comparison = test(car_meta, test_names[2], CAR_IMAGE)
29 | return comparison
30 |
31 |
32 | def test(test_func, test_name, test_image):
33 | # Try the student's code first
34 | try:
35 | student_processed = test_func(test_image)
36 | except:
37 | print_exception(test_name)
38 | return
39 | # Run the solution code and compare to student example
40 | solution = solution_funcs[test_name](test_image)
41 | comparison = np.array_equal(student_processed, solution)
42 | print_test_result(test_name, comparison)
43 |
44 | return comparison
45 |
46 |
47 | def print_exception(test_name):
48 | print("Failed to run test on {}.".format(test_name))
49 | print("The code should be valid Python and return the preprocessed image.")
50 |
51 |
52 | def print_test_result(test_name, result):
53 | if result:
54 | print("Passed {} test.".format(test_name))
55 | else:
56 | print("Failed {} test, did not obtain expected preprocessed image."
57 | .format(test_name))
58 |
59 |
60 | def feedback(tests_passed):
61 | print("You passed {} of 3 tests.".format(int(tests_passed)))
62 | if tests_passed == 3:
63 | print("Congratulations!")
64 | else:
65 | print("See above for additional feedback.")
66 |
67 |
68 | def set_solution_functions():
69 | global solution_funcs
70 | solution_funcs = {test_names[0]: pose_solution,
71 | test_names[1]: text_solution,
72 | test_names[2]: car_solution}
73 |
74 |
75 | def preprocessing(input_image, height, width):
76 | image = cv2.resize(input_image, (width, height))
77 | image = image.transpose((2,0,1))
78 | image = image.reshape(1, 3, height, width)
79 |
80 | return image
81 |
82 |
83 | def pose_solution(input_image):
84 | return preprocessing(input_image, 256, 456)
85 |
86 |
87 | def text_solution(input_image):
88 | return preprocessing(input_image, 768, 1280)
89 |
90 |
91 | def car_solution(input_image):
92 | return preprocessing(input_image, 72, 72)
93 |
94 |
95 | def main():
96 | set_solution_functions()
97 | counter = test_pose() + test_text() + test_car()
98 | feedback(counter)
99 |
100 |
101 | if __name__ == "__main__":
102 | main()
103 |
--------------------------------------------------------------------------------
/lesson2/Lesson2Notebook-Clear.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "colab": {
6 | "name": "Lesson2Notebook.ipynb",
7 | "provenance": [],
8 | "collapsed_sections": [
9 | "BLC26D2ZeGeU",
10 | "R5MJdRk5clB_",
11 | "fRr1cc_0eiEl"
12 | ]
13 | },
14 | "kernelspec": {
15 | "name": "python3",
16 | "display_name": "Python 3"
17 | },
18 | "accelerator": "GPU"
19 | },
20 | "cells": [
21 | {
22 | "cell_type": "code",
23 | "metadata": {
24 | "id": "RGzxQmczbLQ5",
25 | "colab_type": "code",
26 | "colab": {}
27 | },
28 | "source": [
29 | "from google.colab import drive\n",
30 | "drive.mount('/content/drive')\n"
31 | ],
32 | "execution_count": 0,
33 | "outputs": []
34 | },
35 | {
36 | "cell_type": "markdown",
37 | "metadata": {
38 | "id": "BLC26D2ZeGeU",
39 | "colab_type": "text"
40 | },
41 | "source": [
42 | "# Imports"
43 | ]
44 | },
45 | {
46 | "cell_type": "code",
47 | "metadata": {
48 | "id": "_5iuIytuOlQC",
49 | "colab_type": "code",
50 | "colab": {}
51 | },
52 | "source": [
53 | "!apt update\n",
54 | "!apt install pciutils apt-file libcairo2-dev libpango1.0-dev libgtk2.0-dev\n",
55 | "!pip install imgaug==0.2.6\n",
56 | "!/usr/bin/lspci\n",
57 | "!cat /proc/cpuinfo\n",
58 | "!cat /proc/meminfo\n",
59 | "!nvidia-smi"
60 | ],
61 | "execution_count": 0,
62 | "outputs": []
63 | },
64 | {
65 | "cell_type": "code",
66 | "metadata": {
67 | "id": "D-3nucLcN0i3",
68 | "colab_type": "code",
69 | "colab": {}
70 | },
71 | "source": [
72 | "openvino_path = \"/content/drive/My Drive/openvino/l_openvino_toolkit_p_2019.3.376.tgz\" "
73 | ],
74 | "execution_count": 0,
75 | "outputs": []
76 | },
77 | {
78 | "cell_type": "code",
79 | "metadata": {
80 | "id": "p4ysLTWJ_IJt",
81 | "colab_type": "code",
82 | "colab": {}
83 | },
84 | "source": [
85 | "!tar -xvzf \"{openvino_path}\" &&\\\n",
86 | " cd l_openvino_toolkit_p* && \\\n",
87 | " ./install_openvino_dependencies.sh &&\\\n",
88 | " sed -i 's/decline/accept/g' silent.cfg && \\\n",
89 | " ./install.sh --silent silent.cfg && \\\n",
90 | " !source /opt/intel/openvino/bin/setupvars.sh &&\\\n",
91 | "!/opt/intel/openvino/deployment_tools/model_optimizer/install_prerequisites/install_prerequisites.sh \n",
92 | "!/opt/intel/openvino/deployment_tools/model_optimizer/install_prerequisites/install_prerequisites_caffe.sh \n",
93 | "!/opt/intel/openvino/deployment_tools/model_optimizer/install_prerequisites/install_prerequisites_onnx.sh \n",
94 | "!/opt/intel/openvino/deployment_tools/model_optimizer/install_prerequisites/install_prerequisites_kaldi.sh \n",
95 | "!/opt/intel/openvino/deployment_tools/model_optimizer/install_prerequisites/install_prerequisites_mxnet.sh \n",
96 | "\n",
97 | "!/opt/intel/openvino/deployment_tools/demo/demo_squeezenet_download_convert_run.sh"
98 | ],
99 | "execution_count": 0,
100 | "outputs": []
101 | },
102 | {
103 | "cell_type": "markdown",
104 | "metadata": {
105 | "id": "37h6Rxhoavak",
106 | "colab_type": "text"
107 | },
108 | "source": [
109 | "# Choosing Models and Downloading them\n",
110 | "\n",
111 | "- Running downloader.py without `--name` and `--precisions` will download all available pre-trained models. \\\n",
112 | "\n",
113 | "- The directory in which the models will be downloaded is `/home/workspace/intel/` \\\n",
114 | "\n",
115 | "- Using the -o argument to specify your output directory\n",
116 | "\n",
117 | "- \n",
118 | "\n",
119 | "[Human Pose Estimation](https://docs.openvinotoolkit.org/latest/_models_intel_human_pose_estimation_0001_description_human_pose_estimation_0001.html)\n",
120 | "\n",
121 | "[Text Detection](http://docs.openvinotoolkit.org/latest/_models_intel_text_detection_0004_description_text_detection_0004.html)\n",
122 | "\n",
123 | "[Determining Car Type & Color](https://docs.openvinotoolkit.org/latest/_models_intel_vehicle_attributes_recognition_barrier_0039_description_vehicle_attributes_recognition_barrier_0039.html)"
124 | ]
125 | },
126 | {
127 | "cell_type": "code",
128 | "metadata": {
129 | "id": "WdCY2RhjnX8q",
130 | "colab_type": "code",
131 | "colab": {}
132 | },
133 | "source": [
134 | "# Moving into the downloader folder \n",
135 | "cd /opt/intel/openvino/deployment_tools/open_model_zoo/tools/downloader"
136 | ],
137 | "execution_count": 0,
138 | "outputs": []
139 | },
140 | {
141 | "cell_type": "code",
142 | "metadata": {
143 | "id": "DZdUA8jeQ88P",
144 | "colab_type": "code",
145 | "colab": {}
146 | },
147 | "source": [
148 | "# downloading the Human Pose Estimation Model\n",
149 | "!sudo ./downloader.py --name human-pose-estimation-0001 -o /content/drive/'My Drive'/openvino/lesson2/home/workspace/models/"
150 | ],
151 | "execution_count": 0,
152 | "outputs": []
153 | },
154 | {
155 | "cell_type": "code",
156 | "metadata": {
157 | "id": "MxnDqjpMYPqu",
158 | "colab_type": "code",
159 | "colab": {}
160 | },
161 | "source": [
162 | "# downloading the Text Detection Model\n",
163 | "!sudo ./downloader.py --name text-detection-0004 --precisions FP16 -o /content/drive/'My Drive'/openvino/lesson2/home/workspace/models/"
164 | ],
165 | "execution_count": 0,
166 | "outputs": []
167 | },
168 | {
169 | "cell_type": "code",
170 | "metadata": {
171 | "id": "Ixn-HO7NYR88",
172 | "colab_type": "code",
173 | "colab": {}
174 | },
175 | "source": [
176 | "# downloading the Vehicle Attributes Model\n",
177 | "!sudo ./downloader.py --name vehicle-attributes-recognition-barrier-0039 --precisions INT8 -o /content/drive/'My Drive'/openvino/lesson2/home/workspace/models/"
178 | ],
179 | "execution_count": 0,
180 | "outputs": []
181 | },
182 | {
183 | "cell_type": "code",
184 | "metadata": {
185 | "id": "Gzp2sWQwYK5p",
186 | "colab_type": "code",
187 | "colab": {}
188 | },
189 | "source": [
190 | "# downloading the Semantic Segmentation Model\n",
191 | "!sudo ./downloader.py --name semantic-segmentation-adas-0001 -o /content/drive/'My Drive'/openvino/lesson2/home/workspace/models/"
192 | ],
193 | "execution_count": 0,
194 | "outputs": []
195 | },
196 | {
197 | "cell_type": "code",
198 | "metadata": {
199 | "id": "g1adN95XPQzv",
200 | "colab_type": "code",
201 | "colab": {}
202 | },
203 | "source": [
204 | "cd /content/drive/'My Drive'/openvino/lesson2/home/workspace/ "
205 | ],
206 | "execution_count": 0,
207 | "outputs": []
208 | },
209 | {
210 | "cell_type": "code",
211 | "metadata": {
212 | "id": "r1bftE52LdjK",
213 | "colab_type": "code",
214 | "colab": {}
215 | },
216 | "source": [
217 | "!source /opt/intel/openvino/bin/setupvars.sh && python test.py"
218 | ],
219 | "execution_count": 0,
220 | "outputs": []
221 | },
222 | {
223 | "cell_type": "code",
224 | "metadata": {
225 | "id": "EkhBhlcaZDyU",
226 | "colab_type": "code",
227 | "colab": {}
228 | },
229 | "source": [
230 | "!source /opt/intel/openvino/bin/setupvars.sh && python app.py -i \"images/blue-car.jpg\" -t \"CAR_META\" -m \"/content/drive/My Drive/openvino/lesson2/home/workspace/models/intel/vehicle-attributes-recognition-barrier-0039/INT8/vehicle-attributes-recognition-barrier-0039.xml\" -c \"/opt/intel/openvino/deployment_tools/inference_engine/lib/intel64/libcpu_extension_sse4.so\""
231 | ],
232 | "execution_count": 0,
233 | "outputs": []
234 | },
235 | {
236 | "cell_type": "markdown",
237 | "metadata": {
238 | "id": "fRr1cc_0eiEl",
239 | "colab_type": "text"
240 | },
241 | "source": [
242 | "# Output Code"
243 | ]
244 | },
245 | {
246 | "cell_type": "code",
247 | "metadata": {
248 | "id": "ABE_ti74dUe4",
249 | "colab_type": "code",
250 | "colab": {}
251 | },
252 | "source": [
253 | "cd /content/drive/'My Drive'/openvino/lesson2/home/workspace/outputs/"
254 | ],
255 | "execution_count": 0,
256 | "outputs": []
257 | },
258 | {
259 | "cell_type": "code",
260 | "metadata": {
261 | "id": "RdJ1RNyLSwKW",
262 | "colab_type": "code",
263 | "colab": {}
264 | },
265 | "source": [
266 | "#!curl -o car.png\n",
267 | "from google.colab.patches import cv2_imshow\n",
268 | "import cv2\n",
269 | "\n",
270 | "img = cv2.imread(\"CAR_META-output.png\", cv2.IMREAD_UNCHANGED)\n",
271 | "cv2_imshow(img)"
272 | ],
273 | "execution_count": 0,
274 | "outputs": []
275 | },
276 | {
277 | "cell_type": "code",
278 | "metadata": {
279 | "id": "LuQ9D3qATS-W",
280 | "colab_type": "code",
281 | "colab": {}
282 | },
283 | "source": [
284 | ""
285 | ],
286 | "execution_count": 0,
287 | "outputs": []
288 | }
289 | ]
290 | }
--------------------------------------------------------------------------------
/lesson2/home/workspace/__pycache__/handle_models.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/lesson2/home/workspace/__pycache__/handle_models.cpython-36.pyc
--------------------------------------------------------------------------------
/lesson2/home/workspace/__pycache__/inference.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/lesson2/home/workspace/__pycache__/inference.cpython-36.pyc
--------------------------------------------------------------------------------
/lesson2/home/workspace/__pycache__/preprocess_inputs.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/lesson2/home/workspace/__pycache__/preprocess_inputs.cpython-36.pyc
--------------------------------------------------------------------------------
/lesson2/home/workspace/app.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import cv2
3 | import numpy as np
4 |
5 | from handle_models import handle_output, preprocessing
6 | from inference import Network
7 |
8 |
9 | CAR_COLORS = ["white", "gray", "yellow", "red", "green", "blue", "black"]
10 | CAR_TYPES = ["car", "bus", "truck", "van"]
11 |
12 |
13 | def get_args():
14 | '''
15 | Gets the arguments from the command line.
16 | '''
17 |
18 | parser = argparse.ArgumentParser("Basic Edge App with Inference Engine")
19 | # -- Create the descriptions for the commands
20 |
21 | c_desc = "CPU extension file location, if applicable"
22 | d_desc = "Device, if not CPU (GPU, FPGA, MYRIAD)"
23 | i_desc = "The location of the input image"
24 | m_desc = "The location of the model XML file"
25 | t_desc = "The type of model: POSE, TEXT or CAR_META"
26 |
27 | # -- Add required and optional groups
28 | parser._action_groups.pop()
29 | required = parser.add_argument_group('required arguments')
30 | optional = parser.add_argument_group('optional arguments')
31 |
32 | # -- Create the arguments
33 | required.add_argument("-i", help=i_desc, required=True)
34 | required.add_argument("-m", help=m_desc, required=True)
35 | required.add_argument("-t", help=t_desc, required=True)
36 | optional.add_argument("-c", help=c_desc, default=None)
37 | optional.add_argument("-d", help=d_desc, default="CPU")
38 | args = parser.parse_args()
39 |
40 | return args
41 |
42 |
43 | def get_mask(processed_output):
44 | '''
45 | Given an input image size and processed output for a semantic mask,
46 | returns a masks able to be combined with the original image.
47 | '''
48 | # Create an empty array for other color channels of mask
49 | empty = np.zeros(processed_output.shape)
50 | # Stack to make a Green mask where text detected
51 | mask = np.dstack((empty, processed_output, empty))
52 |
53 | return mask
54 |
55 |
56 | def create_output_image(model_type, image, output):
57 | '''
58 | Using the model type, input image, and processed output,
59 | creates an output image showing the result of inference.
60 | '''
61 | if model_type == "POSE":
62 | # Remove final part of output not used for heatmaps
63 | output = output[:-1]
64 | # Get only pose detections above 0.5 confidence, set to 255
65 | for c in range(len(output)):
66 | output[c] = np.where(output[c]>0.5, 255, 0)
67 | # Sum along the "class" axis
68 | output = np.sum(output, axis=0)
69 | # Get semantic mask
70 | pose_mask = get_mask(output)
71 | # Combine with original image
72 | image = image + pose_mask
73 | return image
74 | elif model_type == "TEXT":
75 | # Get only text detections above 0.5 confidence, set to 255
76 | output = np.where(output[1]>0.5, 255, 0)
77 | # Get semantic mask
78 | text_mask = get_mask(output)
79 | # Add the mask to the image
80 | image = image + text_mask
81 | return image
82 | elif model_type == "CAR_META":
83 | # Get the color and car type from their lists
84 | color = CAR_COLORS[output[0]]
85 | car_type = CAR_TYPES[output[1]]
86 | # Scale the output text by the image shape
87 | scaler = max(int(image.shape[0] / 1000), 1)
88 | # Write the text of color and type onto the image
89 | image = cv2.putText(image,
90 | "Color: {}, Type: {}".format(color, car_type),
91 | (50 * scaler, 100 * scaler), cv2.FONT_HERSHEY_SIMPLEX,
92 | 2 * scaler, (255, 255, 255), 3 * scaler)
93 | return image
94 | else:
95 | print("Unknown model type, unable to create output image.")
96 | return image
97 |
98 |
99 | def perform_inference(args):
100 | '''
101 | Performs inference on an input image, given a model.
102 | '''
103 | # Create a Network for using the Inference Engine
104 | inference_network = Network()
105 | # Load the model in the network, and obtain its input shape
106 | n, c, h, w = inference_network.load_model(args.m, args.d, args.c)
107 |
108 | # Read the input image
109 | image = cv2.imread(args.i)
110 |
111 | ### TODO: Preprocess the input image
112 | preprocessed_image = preprocessing(image, h, w)
113 |
114 | # Perform synchronous inference on the image
115 | inference_network.sync_inference(preprocessed_image)
116 |
117 | # Obtain the output of the inference request
118 | output = inference_network.extract_output()
119 |
120 | ### TODO: Handle the output of the network, based on args.t
121 | ### Note: This will require using `handle_output` to get the correct
122 | ### function, and then feeding the output to that function.
123 | process_func = handle_output(args.t)
124 |
125 | processed_output = process_func(output, image.shape)
126 |
127 | # Create an output image based on network
128 | try:
129 | output_image = create_output_image(args.t, image, processed_output)
130 | print("Success")
131 | except:
132 | print("failure")
133 | # Save down the resulting image
134 | cv2.imwrite("outputs/{}-output.png".format(args.t), output_image)
135 |
136 |
137 | def main():
138 | args = get_args()
139 | perform_inference(args)
140 |
141 |
142 | if __name__ == "__main__":
143 | main()
144 |
--------------------------------------------------------------------------------
/lesson2/home/workspace/handle_models.py:
--------------------------------------------------------------------------------
1 | import cv2
2 | import numpy as np
3 |
4 |
5 | def handle_pose(output, input_shape):
6 | '''
7 | Handles the output of the Pose Estimation model.
8 | Returns ONLY the keypoint heatmaps, and not the Part Affinity Fields.
9 | '''
10 | heatmaps = output['Mconv7_stage2_L2']
11 | # TODO 1: Extract only the second blob output (keypoint heatmaps)
12 | # Create an empty array to handle the output map
13 | out_heatmap = np.zeros([heatmaps.shape[1],
14 | input_shape[0],
15 | input_shape[1]])
16 | # TODO 2: Resize the heatmap back to the size of the input
17 | for h in range(len(heatmaps[0])):
18 | out_heatmap[h] = cv2.resize(heatmaps[0][h],
19 | input_shape[0:2][::-1])
20 | return out_heatmap
21 |
22 |
23 | def handle_text(output, input_shape):
24 | '''
25 | Handles the output of the Text Detection model.
26 | Returns ONLY the text/no text classification of each pixel,
27 | and not the linkage between pixels and their neighbors.
28 | '''
29 | # TODO 1: Extract only the first blob output (text/no text classification)
30 | text_classes = output['model/segm_logits/add']
31 | # TODO 2: Resize this output back to the size of the input
32 | out_text = np.empty([text_classes.shape[1],
33 | input_shape[0],
34 | input_shape[1]])
35 | for t in range(len(text_classes[0])):
36 | out_text[t] = cv2.resize(text_classes[0][t],
37 | input_shape[0:2][::-1])
38 | return out_text
39 |
40 |
41 | def handle_car(output, input_shape):
42 | '''
43 | Handles the output of the Car Metadata model.
44 | Returns two integers: the argmax of each softmax output.
45 | The first is for color, and the second for type.
46 | '''
47 | color = output['color'].flatten()
48 | car_type = output['type'].flatten()
49 |
50 | # TODO 1: Get the argmax of the "color" output
51 | color_class = np.argmax(color)
52 | # TODO 2: Get the argmax of the "type" output
53 | type_class = np.argmax(car_type)
54 |
55 | color_class = np.argmax(color)
56 |
57 | return color_class, type_class
58 |
59 |
60 | def handle_output(model_type):
61 | '''
62 | Returns the related function to handle an output,
63 | based on the model_type being used.
64 | '''
65 | if model_type == "POSE":
66 | return handle_pose
67 | elif model_type == "TEXT":
68 | return handle_text
69 | elif model_type == "CAR_META":
70 | return handle_car
71 | else:
72 | return None
73 |
74 |
75 | '''
76 | The below function is carried over from the previous exercise.
77 | You just need to call it appropriately in `app.py` to preprocess
78 | the input image.
79 | '''
80 | def preprocessing(input_image, height, width):
81 | '''
82 | Given an input image, height and width:
83 | - Resize to width and height
84 | - Transpose the final "channel" dimension to be first
85 | - Reshape the image to add a "batch" of 1 at the start
86 | '''
87 | image = np.copy(input_image)
88 | image = cv2.resize(image, (width, height))
89 | image = image.transpose((2,0,1))
90 | image = image.reshape(1, 3, height, width)
91 |
92 | return image
--------------------------------------------------------------------------------
/lesson2/home/workspace/images/blue-car.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/lesson2/home/workspace/images/blue-car.jpg
--------------------------------------------------------------------------------
/lesson2/home/workspace/images/sign.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/lesson2/home/workspace/images/sign.jpg
--------------------------------------------------------------------------------
/lesson2/home/workspace/images/sitting-on-car.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/lesson2/home/workspace/images/sitting-on-car.jpg
--------------------------------------------------------------------------------
/lesson2/home/workspace/inference.py:
--------------------------------------------------------------------------------
1 | '''
2 | Contains code for working with the Inference Engine.
3 | You'll learn how to implement this code and more in
4 | the related lesson on the topic.
5 | '''
6 |
7 | import os
8 | import sys
9 | import logging as log
10 | from openvino.inference_engine import IENetwork, IECore
11 |
12 | class Network:
13 | '''
14 | Load and store information for working with the Inference Engine,
15 | and any loaded models.
16 | '''
17 |
18 | def __init__(self):
19 | self.plugin = None
20 | self.input_blob = None
21 | self.exec_network = None
22 |
23 |
24 | def load_model(self, model, device="CPU", cpu_extension=None):
25 | '''
26 | Load the model given IR files.
27 | Defaults to CPU as device for use in the workspace.
28 | Synchronous requests made within.
29 | '''
30 | model_xml = model
31 | model_bin = os.path.splitext(model_xml)[0] + ".bin"
32 |
33 | # Initialize the plugin
34 | self.plugin = IECore()
35 |
36 | # Add a CPU extension, if applicable
37 | if cpu_extension and "CPU" in device:
38 | self.plugin.add_extension(cpu_extension, device)
39 |
40 | # Read the IR as a IENetwork
41 | network = IENetwork(model=model_xml, weights=model_bin)
42 |
43 | # Load the IENetwork into the plugin
44 | self.exec_network = self.plugin.load_network(network, device)
45 |
46 | # Get the input layer
47 | self.input_blob = next(iter(network.inputs))
48 |
49 | # Return the input shape (to determine preprocessing)
50 | return network.inputs[self.input_blob].shape
51 |
52 |
53 | def sync_inference(self, image):
54 | '''
55 | Makes a synchronous inference request, given an input image.
56 | '''
57 | self.exec_network.infer({self.input_blob: image})
58 | return
59 |
60 |
61 | def extract_output(self):
62 | '''
63 | Returns a list of the results for the output layer of the network.
64 | '''
65 | return self.exec_network.requests[0].outputs
66 |
--------------------------------------------------------------------------------
/lesson2/home/workspace/models/intel/human-pose-estimation-0001/FP16/human-pose-estimation-0001.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/lesson2/home/workspace/models/intel/human-pose-estimation-0001/FP16/human-pose-estimation-0001.bin
--------------------------------------------------------------------------------
/lesson2/home/workspace/models/intel/human-pose-estimation-0001/FP16/human-pose-estimation-0001.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
13 |
14 |
15 |
16 |
17 | 1
18 | 3
19 | 256
20 | 456
21 |
22 |
23 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 | 1
41 | 3
42 | 256
43 | 456
44 |
45 |
46 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 | 1
63 | 32
64 | 128
65 | 228
66 |
67 |
68 |
76 |
77 |
78 |
79 |
80 |
81 | 1
82 | 32
83 | 128
84 | 228
85 |
86 |
87 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 | 1
104 | 32
105 | 128
106 | 228
107 |
108 |
109 |
117 |
118 |
119 |
120 |
121 |
122 | 1
123 | 32
124 | 128
125 | 228
126 |
127 |
128 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 | 1
145 | 64
146 | 128
147 | 228
148 |
149 |
150 |
158 |
159 |
160 |
161 |
162 |
163 | 1
164 | 64
165 | 128
166 | 228
167 |
168 |
169 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 | 1
186 | 64
187 | 64
188 | 114
189 |
190 |
191 |
199 |
200 |
201 |
202 |
203 |
204 | 1
205 | 64
206 | 64
207 | 114
208 |
209 |
210 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 | 1
227 | 128
228 | 64
229 | 114
230 |
231 |
232 |
240 |
241 |
242 |
243 |
244 |
245 | 1
246 | 128
247 | 64
248 | 114
249 |
250 |
251 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 | 1
268 | 128
269 | 64
270 | 114
271 |
272 |
273 |
281 |
282 |
283 |
284 |
285 |
286 | 1
287 | 128
288 | 64
289 | 114
290 |
291 |
292 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 | 1
309 | 128
310 | 64
311 | 114
312 |
313 |
314 |
322 |
323 |
324 |
325 |
326 |
327 | 1
328 | 128
329 | 64
330 | 114
331 |
332 |
333 |
341 |
342 |
343 |
344 |
345 |
346 |
347 |
348 |
349 | 1
350 | 128
351 | 32
352 | 57
353 |
354 |
355 |
363 |
364 |
365 |
366 |
367 |
368 | 1
369 | 128
370 | 32
371 | 57
372 |
373 |
374 |
382 |
383 |
384 |
385 |
386 |
387 |
388 |
389 |
390 | 1
391 | 256
392 | 32
393 | 57
394 |
395 |
396 |
404 |
405 |
406 |
407 |
408 |
409 | 1
410 | 256
411 | 32
412 | 57
413 |
414 |
415 |
423 |
424 |
425 |
426 |
427 |
428 |
429 |
430 |
431 | 1
432 | 256
433 | 32
434 | 57
435 |
436 |
437 |
445 |
446 |
447 |
448 |
449 |
450 | 1
451 | 256
452 | 32
453 | 57
454 |
455 |
456 |
464 |
465 |
466 |
467 |
468 |
469 |
470 |
471 |
472 | 1
473 | 256
474 | 32
475 | 57
476 |
477 |
478 |
486 |
487 |
488 |
489 |
490 |
491 | 1
492 | 256
493 | 32
494 | 57
495 |
496 |
497 |
505 |
506 |
507 |
508 |
509 |
510 |
511 |
512 |
513 | 1
514 | 256
515 | 32
516 | 57
517 |
518 |
519 |
527 |
528 |
529 |
530 |
531 |
532 | 1
533 | 256
534 | 32
535 | 57
536 |
537 |
538 |
546 |
547 |
548 |
549 |
550 |
551 |
552 |
553 |
554 | 1
555 | 512
556 | 32
557 | 57
558 |
559 |
560 |
568 |
569 |
570 |
571 |
572 |
573 | 1
574 | 512
575 | 32
576 | 57
577 |
578 |
579 |
587 |
588 |
589 |
590 |
591 |
592 |
593 |
594 |
595 | 1
596 | 512
597 | 32
598 | 57
599 |
600 |
601 |
609 |
610 |
611 |
612 |
613 |
614 | 1
615 | 512
616 | 32
617 | 57
618 |
619 |
620 |
628 |
629 |
630 |
631 |
632 |
633 |
634 |
635 |
636 | 1
637 | 512
638 | 32
639 | 57
640 |
641 |
642 |
650 |
651 |
652 |
653 |
654 |
655 | 1
656 | 512
657 | 32
658 | 57
659 |
660 |
661 |
669 |
670 |
671 |
672 |
673 |
674 |
675 |
676 |
677 | 1
678 | 512
679 | 32
680 | 57
681 |
682 |
683 |
691 |
692 |
693 |
694 |
695 |
696 | 1
697 | 512
698 | 32
699 | 57
700 |
701 |
702 |
710 |
711 |
712 |
713 |
714 |
715 |
716 |
717 |
718 | 1
719 | 512
720 | 32
721 | 57
722 |
723 |
724 |
732 |
733 |
734 |
735 |
736 |
737 | 1
738 | 512
739 | 32
740 | 57
741 |
742 |
743 |
751 |
752 |
753 |
754 |
755 |
756 |
757 |
758 |
759 | 1
760 | 512
761 | 32
762 | 57
763 |
764 |
765 |
773 |
774 |
775 |
776 |
777 |
778 | 1
779 | 512
780 | 32
781 | 57
782 |
783 |
784 |
792 |
793 |
794 |
795 |
796 |
797 |
798 |
799 |
800 | 1
801 | 512
802 | 32
803 | 57
804 |
805 |
806 |
814 |
815 |
816 |
817 |
818 |
819 | 1
820 | 512
821 | 32
822 | 57
823 |
824 |
825 |
833 |
834 |
835 |
836 |
837 |
838 |
839 |
840 |
841 | 1
842 | 512
843 | 32
844 | 57
845 |
846 |
847 |
855 |
856 |
857 |
858 |
859 |
860 | 1
861 | 512
862 | 32
863 | 57
864 |
865 |
866 |
874 |
875 |
876 |
877 |
878 |
879 |
880 |
881 |
882 | 1
883 | 512
884 | 32
885 | 57
886 |
887 |
888 |
896 |
897 |
898 |
899 |
900 |
901 | 1
902 | 512
903 | 32
904 | 57
905 |
906 |
907 |
915 |
916 |
917 |
918 |
919 |
920 |
921 |
922 |
923 | 1
924 | 512
925 | 32
926 | 57
927 |
928 |
929 |
937 |
938 |
939 |
940 |
941 |
942 | 1
943 | 512
944 | 32
945 | 57
946 |
947 |
948 |
956 |
957 |
958 |
959 |
960 |
961 |
962 |
963 |
964 | 1
965 | 512
966 | 32
967 | 57
968 |
969 |
970 |
978 |
979 |
980 |
981 |
982 |
983 | 1
984 | 512
985 | 32
986 | 57
987 |
988 |
989 |
997 |
998 |
999 |
1000 |
1001 |
1002 |
1003 |
1004 |
1005 |
1006 | 1
1007 | 128
1008 | 32
1009 | 57
1010 |
1011 |
1012 |
1020 |
1021 |
1022 |
1023 |
1024 |
1025 | 1
1026 | 128
1027 | 32
1028 | 57
1029 |
1030 |
1031 |
1039 |
1040 |
1041 |
1042 |
1043 |
1044 |
1045 |
1046 |
1047 |
1048 | 1
1049 | 128
1050 | 32
1051 | 57
1052 |
1053 |
1054 |
1062 |
1063 |
1064 |
1065 |
1066 |
1067 | 1
1068 | 128
1069 | 32
1070 | 57
1071 |
1072 |
1073 |
1081 |
1082 |
1083 |
1084 |
1085 |
1086 |
1087 |
1088 |
1089 | 1
1090 | 128
1091 | 32
1092 | 57
1093 |
1094 |
1095 |
1103 |
1104 |
1105 |
1106 |
1107 |
1108 | 1
1109 | 128
1110 | 32
1111 | 57
1112 |
1113 |
1114 |
1122 |
1123 |
1124 |
1125 |
1126 |
1127 |
1128 |
1129 |
1130 |
1131 | 1
1132 | 128
1133 | 32
1134 | 57
1135 |
1136 |
1137 |
1145 |
1146 |
1147 |
1148 |
1149 |
1150 | 1
1151 | 128
1152 | 32
1153 | 57
1154 |
1155 |
1156 |
1164 |
1165 |
1166 |
1167 |
1168 |
1169 |
1170 |
1171 |
1172 | 1
1173 | 128
1174 | 32
1175 | 57
1176 |
1177 |
1178 |
1186 |
1187 |
1188 |
1189 |
1190 |
1191 | 1
1192 | 128
1193 | 32
1194 | 57
1195 |
1196 |
1197 |
1205 |
1206 |
1207 |
1208 |
1209 |
1210 |
1211 |
1212 |
1213 |
1214 | 1
1215 | 128
1216 | 32
1217 | 57
1218 |
1219 |
1220 |
1228 |
1229 |
1230 |
1231 |
1232 |
1233 | 1
1234 | 128
1235 | 32
1236 | 57
1237 |
1238 |
1239 |
1247 |
1248 |
1249 |
1250 |
1251 |
1252 |
1253 |
1254 |
1255 | 1
1256 | 128
1257 | 32
1258 | 57
1259 |
1260 |
1261 |
1269 |
1270 |
1271 |
1272 |
1273 |
1274 | 1
1275 | 128
1276 | 32
1277 | 57
1278 |
1279 |
1280 |
1288 |
1289 |
1290 |
1291 |
1292 |
1293 |
1294 |
1295 |
1296 |
1297 | 1
1298 | 128
1299 | 32
1300 | 57
1301 |
1302 |
1303 |
1311 |
1312 |
1313 |
1314 |
1315 |
1316 | 1
1317 | 128
1318 | 32
1319 | 57
1320 |
1321 |
1322 | 1
1323 | 128
1324 | 32
1325 | 57
1326 |
1327 |
1328 |
1336 |
1337 |
1338 |
1339 |
1340 |
1341 | 1
1342 | 128
1343 | 32
1344 | 57
1345 |
1346 |
1347 |
1355 |
1356 |
1357 |
1358 |
1359 |
1360 |
1361 |
1362 |
1363 | 1
1364 | 128
1365 | 32
1366 | 57
1367 |
1368 |
1369 |
1377 |
1378 |
1379 |
1380 |
1381 |
1382 | 1
1383 | 128
1384 | 32
1385 | 57
1386 |
1387 |
1388 |
1396 |
1397 |
1398 |
1399 |
1400 |
1401 |
1402 |
1403 |
1404 | 1
1405 | 128
1406 | 32
1407 | 57
1408 |
1409 |
1410 |
1418 |
1419 |
1420 |
1421 |
1422 |
1423 | 1
1424 | 128
1425 | 32
1426 | 57
1427 |
1428 |
1429 |
1437 |
1438 |
1439 |
1440 |
1441 |
1442 |
1443 |
1444 |
1445 | 1
1446 | 128
1447 | 32
1448 | 57
1449 |
1450 |
1451 |
1459 |
1460 |
1461 |
1462 |
1463 |
1464 | 1
1465 | 128
1466 | 32
1467 | 57
1468 |
1469 |
1470 |
1478 |
1479 |
1480 |
1481 |
1482 |
1483 |
1484 |
1485 |
1486 | 1
1487 | 128
1488 | 32
1489 | 57
1490 |
1491 |
1492 |
1500 |
1501 |
1502 |
1503 |
1504 |
1505 | 1
1506 | 128
1507 | 32
1508 | 57
1509 |
1510 |
1511 |
1519 |
1520 |
1521 |
1522 |
1523 |
1524 |
1525 |
1526 |
1527 | 1
1528 | 512
1529 | 32
1530 | 57
1531 |
1532 |
1533 |
1541 |
1542 |
1543 |
1544 |
1545 |
1546 | 1
1547 | 512
1548 | 32
1549 | 57
1550 |
1551 |
1552 |
1560 |
1561 |
1562 |
1563 |
1564 |
1565 |
1566 |
1567 |
1568 |
1569 | 1
1570 | 128
1571 | 32
1572 | 57
1573 |
1574 |
1575 |
1583 |
1584 |
1585 |
1586 |
1587 |
1588 |
1589 |
1590 |
1591 | 1
1592 | 512
1593 | 32
1594 | 57
1595 |
1596 |
1597 |
1605 |
1606 |
1607 |
1608 |
1609 |
1610 | 1
1611 | 512
1612 | 32
1613 | 57
1614 |
1615 |
1616 |
1624 |
1625 |
1626 |
1627 |
1628 |
1629 |
1630 |
1631 |
1632 |
1633 | 1
1634 | 38
1635 | 32
1636 | 57
1637 |
1638 |
1639 | 1
1640 | 19
1641 | 32
1642 | 57
1643 |
1644 |
1645 | 1
1646 | 128
1647 | 32
1648 | 57
1649 |
1650 |
1651 |
1659 |
1660 |
1661 |
1662 |
1663 |
1664 | 1
1665 | 185
1666 | 32
1667 | 57
1668 |
1669 |
1670 |
1678 |
1679 |
1680 |
1681 |
1682 |
1683 |
1684 |
1685 |
1686 | 1
1687 | 128
1688 | 32
1689 | 57
1690 |
1691 |
1692 |
1700 |
1701 |
1702 |
1703 |
1704 |
1705 | 1
1706 | 128
1707 | 32
1708 | 57
1709 |
1710 |
1711 |
1719 |
1720 |
1721 |
1722 |
1723 |
1724 |
1725 |
1726 |
1727 | 1
1728 | 128
1729 | 32
1730 | 57
1731 |
1732 |
1733 |
1741 |
1742 |
1743 |
1744 |
1745 |
1746 | 1
1747 | 128
1748 | 32
1749 | 57
1750 |
1751 |
1752 |
1760 |
1761 |
1762 |
1763 |
1764 |
1765 |
1766 |
1767 |
1768 | 1
1769 | 128
1770 | 32
1771 | 57
1772 |
1773 |
1774 |
1782 |
1783 |
1784 |
1785 |
1786 |
1787 | 1
1788 | 128
1789 | 32
1790 | 57
1791 |
1792 |
1793 | 1
1794 | 128
1795 | 32
1796 | 57
1797 |
1798 |
1799 |
1807 |
1808 |
1809 |
1810 |
1811 |
1812 | 1
1813 | 128
1814 | 32
1815 | 57
1816 |
1817 |
1818 |
1826 |
1827 |
1828 |
1829 |
1830 |
1831 |
1832 |
1833 |
1834 | 1
1835 | 128
1836 | 32
1837 | 57
1838 |
1839 |
1840 |
1848 |
1849 |
1850 |
1851 |
1852 |
1853 | 1
1854 | 128
1855 | 32
1856 | 57
1857 |
1858 |
1859 |
1867 |
1868 |
1869 |
1870 |
1871 |
1872 |
1873 |
1874 |
1875 | 1
1876 | 128
1877 | 32
1878 | 57
1879 |
1880 |
1881 |
1889 |
1890 |
1891 |
1892 |
1893 |
1894 | 1
1895 | 128
1896 | 32
1897 | 57
1898 |
1899 |
1900 |
1908 |
1909 |
1910 |
1911 |
1912 |
1913 |
1914 |
1915 |
1916 | 1
1917 | 128
1918 | 32
1919 | 57
1920 |
1921 |
1922 |
1930 |
1931 |
1932 |
1933 |
1934 |
1935 | 1
1936 | 128
1937 | 32
1938 | 57
1939 |
1940 |
1941 | 1
1942 | 128
1943 | 32
1944 | 57
1945 |
1946 |
1947 |
1955 |
1956 |
1957 |
1958 |
1959 |
1960 | 1
1961 | 128
1962 | 32
1963 | 57
1964 |
1965 |
1966 |
1974 |
1975 |
1976 |
1977 |
1978 |
1979 |
1980 |
1981 |
1982 | 1
1983 | 128
1984 | 32
1985 | 57
1986 |
1987 |
1988 |
1996 |
1997 |
1998 |
1999 |
2000 |
2001 | 1
2002 | 128
2003 | 32
2004 | 57
2005 |
2006 |
2007 |
2015 |
2016 |
2017 |
2018 |
2019 |
2020 |
2021 |
2022 |
2023 | 1
2024 | 128
2025 | 32
2026 | 57
2027 |
2028 |
2029 |
2037 |
2038 |
2039 |
2040 |
2041 |
2042 | 1
2043 | 128
2044 | 32
2045 | 57
2046 |
2047 |
2048 |
2056 |
2057 |
2058 |
2059 |
2060 |
2061 |
2062 |
2063 |
2064 | 1
2065 | 128
2066 | 32
2067 | 57
2068 |
2069 |
2070 |
2078 |
2079 |
2080 |
2081 |
2082 |
2083 | 1
2084 | 128
2085 | 32
2086 | 57
2087 |
2088 |
2089 | 1
2090 | 128
2091 | 32
2092 | 57
2093 |
2094 |
2095 |
2103 |
2104 |
2105 |
2106 |
2107 |
2108 | 1
2109 | 128
2110 | 32
2111 | 57
2112 |
2113 |
2114 |
2122 |
2123 |
2124 |
2125 |
2126 |
2127 |
2128 |
2129 |
2130 | 1
2131 | 128
2132 | 32
2133 | 57
2134 |
2135 |
2136 |
2144 |
2145 |
2146 |
2147 |
2148 |
2149 | 1
2150 | 128
2151 | 32
2152 | 57
2153 |
2154 |
2155 |
2163 |
2164 |
2165 |
2166 |
2167 |
2168 |
2169 |
2170 |
2171 | 1
2172 | 128
2173 | 32
2174 | 57
2175 |
2176 |
2177 |
2185 |
2186 |
2187 |
2188 |
2189 |
2190 | 1
2191 | 128
2192 | 32
2193 | 57
2194 |
2195 |
2196 |
2204 |
2205 |
2206 |
2207 |
2208 |
2209 |
2210 |
2211 |
2212 | 1
2213 | 128
2214 | 32
2215 | 57
2216 |
2217 |
2218 |
2226 |
2227 |
2228 |
2229 |
2230 |
2231 | 1
2232 | 128
2233 | 32
2234 | 57
2235 |
2236 |
2237 | 1
2238 | 128
2239 | 32
2240 | 57
2241 |
2242 |
2243 |
2251 |
2252 |
2253 |
2254 |
2255 |
2256 | 1
2257 | 128
2258 | 32
2259 | 57
2260 |
2261 |
2262 |
2270 |
2271 |
2272 |
2273 |
2274 |
2275 |
2276 |
2277 |
2278 | 1
2279 | 128
2280 | 32
2281 | 57
2282 |
2283 |
2284 |
2292 |
2293 |
2294 |
2295 |
2296 |
2297 | 1
2298 | 128
2299 | 32
2300 | 57
2301 |
2302 |
2303 |
2311 |
2312 |
2313 |
2314 |
2315 |
2316 |
2317 |
2318 |
2319 | 1
2320 | 128
2321 | 32
2322 | 57
2323 |
2324 |
2325 |
2333 |
2334 |
2335 |
2336 |
2337 |
2338 | 1
2339 | 128
2340 | 32
2341 | 57
2342 |
2343 |
2344 |
2352 |
2353 |
2354 |
2355 |
2356 |
2357 |
2358 |
2359 |
2360 | 1
2361 | 128
2362 | 32
2363 | 57
2364 |
2365 |
2366 |
2374 |
2375 |
2376 |
2377 |
2378 |
2379 | 1
2380 | 128
2381 | 32
2382 | 57
2383 |
2384 |
2385 | 1
2386 | 128
2387 | 32
2388 | 57
2389 |
2390 |
2391 |
2399 |
2400 |
2401 |
2402 |
2403 |
2404 | 1
2405 | 128
2406 | 32
2407 | 57
2408 |
2409 |
2410 |
2418 |
2419 |
2420 |
2421 |
2422 |
2423 |
2424 |
2425 |
2426 | 1
2427 | 128
2428 | 32
2429 | 57
2430 |
2431 |
2432 |
2440 |
2441 |
2442 |
2443 |
2444 |
2445 | 1
2446 | 128
2447 | 32
2448 | 57
2449 |
2450 |
2451 |
2459 |
2460 |
2461 |
2462 |
2463 |
2464 |
2465 |
2466 |
2467 |
2468 | 1
2469 | 128
2470 | 32
2471 | 57
2472 |
2473 |
2474 |
2482 |
2483 |
2484 |
2485 |
2486 |
2487 |
2488 |
2489 |
2490 | 1
2491 | 128
2492 | 32
2493 | 57
2494 |
2495 |
2496 |
2504 |
2505 |
2506 |
2507 |
2508 |
2509 | 1
2510 | 128
2511 | 32
2512 | 57
2513 |
2514 |
2515 |
2523 |
2524 |
2525 |
2526 |
2527 |
2528 |
2529 |
2530 |
2531 |
2532 |
2533 |
2534 |
2535 |
2536 |
2537 |
2538 |
2539 |
2540 |
2541 |
2542 |
2543 |
2544 |
2545 |
2546 |
2547 |
2548 |
2549 |
2550 |
2551 |
2552 |
2553 |
2554 |
2555 |
2556 |
2557 |
2558 |
2559 |
2560 |
2561 |
2562 |
2563 |
2564 |
2565 |
2566 |
2567 |
2568 |
2569 |
2570 |
2571 |
2572 |
2573 |
2574 |
2575 |
2576 |
2577 |
2578 |
2579 |
2580 |
2581 |
2582 |
2583 |
2584 |
2585 |
2586 |
2587 |
2588 |
2589 |
2590 |
2591 |
2592 |
2593 |
2594 |
2595 |
2596 |
2597 |
2598 |
2599 |
2600 |
2601 |
2602 |
2603 |
2604 |
2605 |
2606 |
2607 |
2608 |
2609 |
2610 |
2611 |
2612 |
2613 |
2614 |
2615 |
2616 |
2617 |
2618 |
2619 |
2620 |
2621 |
2622 |
2623 |
2624 |
2625 |
2626 |
2627 |
2628 |
2629 |
2630 |
2631 |
2632 |
2633 |
2634 |
2635 |
2636 |
2637 |
2638 |
2639 |
2640 |
2641 |
2642 |
2643 |
2644 |
2645 |
2646 |
2647 |
2648 |
2649 |
2650 |
2651 |
2652 |
2653 |
2654 |
2655 |
2656 |
2657 |
2658 |
2659 |
2660 |
2661 |
2662 |
2663 |
2664 |
2665 |
2666 |
2667 |
2668 |
2669 |
2670 |
2671 |
2672 |
2673 |
2674 |
2675 |
2676 |
2677 |
2678 |
2679 |
2680 |
2681 |
2682 |
2683 |
2684 |
2685 |
2686 |
2687 |
2688 |
2689 |
2690 |
2691 |
2692 |
2693 |
2694 |
2695 |
2696 |
2697 |
2698 |
2699 |
2700 |
2701 |
2702 |
2703 |
2704 |
--------------------------------------------------------------------------------
/lesson2/home/workspace/models/intel/human-pose-estimation-0001/FP32/human-pose-estimation-0001.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/lesson2/home/workspace/models/intel/human-pose-estimation-0001/FP32/human-pose-estimation-0001.bin
--------------------------------------------------------------------------------
/lesson2/home/workspace/models/intel/human-pose-estimation-0001/INT8/human-pose-estimation-0001.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/lesson2/home/workspace/models/intel/human-pose-estimation-0001/INT8/human-pose-estimation-0001.bin
--------------------------------------------------------------------------------
/lesson2/home/workspace/models/intel/semantic-segmentation-adas-0001/FP16/semantic-segmentation-adas-0001.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/lesson2/home/workspace/models/intel/semantic-segmentation-adas-0001/FP16/semantic-segmentation-adas-0001.bin
--------------------------------------------------------------------------------
/lesson2/home/workspace/models/intel/semantic-segmentation-adas-0001/FP32/semantic-segmentation-adas-0001.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/lesson2/home/workspace/models/intel/semantic-segmentation-adas-0001/FP32/semantic-segmentation-adas-0001.bin
--------------------------------------------------------------------------------
/lesson2/home/workspace/models/intel/text-detection-0004/FP16/text-detection-0004.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/lesson2/home/workspace/models/intel/text-detection-0004/FP16/text-detection-0004.bin
--------------------------------------------------------------------------------
/lesson2/home/workspace/models/intel/vehicle-attributes-recognition-barrier-0039/INT8/vehicle-attributes-recognition-barrier-0039.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/lesson2/home/workspace/models/intel/vehicle-attributes-recognition-barrier-0039/INT8/vehicle-attributes-recognition-barrier-0039.bin
--------------------------------------------------------------------------------
/lesson2/home/workspace/models/intel/vehicle-attributes-recognition-barrier-0039/INT8/vehicle-attributes-recognition-barrier-0039.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
13 |
14 |
15 |
16 |
17 |
18 | 1
19 | 3
20 | 72
21 | 72
22 |
23 |
24 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 | 1
42 | 3
43 | 72
44 | 72
45 |
46 |
47 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 | 1
64 | 64
65 | 36
66 | 36
67 |
68 |
69 |
77 |
78 |
79 |
80 |
81 |
82 | 1
83 | 64
84 | 36
85 | 36
86 |
87 |
88 |
96 |
97 |
98 |
99 |
100 |
101 | 1
102 | 64
103 | 18
104 | 18
105 |
106 |
107 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 | 1
124 | 64
125 | 18
126 | 18
127 |
128 |
129 |
137 |
138 |
139 |
140 |
141 |
142 | 1
143 | 64
144 | 18
145 | 18
146 |
147 |
148 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 | 1
165 | 64
166 | 18
167 | 18
168 |
169 |
170 | 1
171 | 64
172 | 18
173 | 18
174 |
175 |
176 |
184 |
185 |
186 |
187 |
188 |
189 | 1
190 | 64
191 | 18
192 | 18
193 |
194 |
195 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 | 1
212 | 64
213 | 18
214 | 18
215 |
216 |
217 |
225 |
226 |
227 |
228 |
229 |
230 | 1
231 | 64
232 | 18
233 | 18
234 |
235 |
236 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 | 1
253 | 64
254 | 18
255 | 18
256 |
257 |
258 |
266 |
267 |
268 |
269 |
270 |
271 |
272 |
273 |
274 | 1
275 | 128
276 | 9
277 | 9
278 |
279 |
280 |
288 |
289 |
290 |
291 |
292 |
293 | 1
294 | 128
295 | 9
296 | 9
297 |
298 |
299 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 | 1
316 | 128
317 | 9
318 | 9
319 |
320 |
321 | 1
322 | 128
323 | 9
324 | 9
325 |
326 |
327 |
335 |
336 |
337 |
338 |
339 |
340 | 1
341 | 128
342 | 9
343 | 9
344 |
345 |
346 |
354 |
355 |
356 |
357 |
358 |
359 |
360 |
361 |
362 | 1
363 | 128
364 | 9
365 | 9
366 |
367 |
368 |
376 |
377 |
378 |
379 |
380 |
381 | 1
382 | 128
383 | 9
384 | 9
385 |
386 |
387 |
395 |
396 |
397 |
398 |
399 |
400 |
401 |
402 |
403 | 1
404 | 128
405 | 9
406 | 9
407 |
408 |
409 |
417 |
418 |
419 |
420 |
421 |
422 |
423 |
424 |
425 | 1
426 | 128
427 | 5
428 | 5
429 |
430 |
431 |
439 |
440 |
441 |
442 |
443 |
444 | 1
445 | 128
446 | 5
447 | 5
448 |
449 |
450 |
458 |
459 |
460 |
461 |
462 |
463 |
464 |
465 |
466 | 1
467 | 128
468 | 5
469 | 5
470 |
471 |
472 | 1
473 | 128
474 | 5
475 | 5
476 |
477 |
478 |
486 |
487 |
488 |
489 |
490 |
491 | 1
492 | 128
493 | 5
494 | 5
495 |
496 |
497 |
505 |
506 |
507 |
508 |
509 |
510 |
511 |
512 |
513 | 1
514 | 4
515 | 5
516 | 5
517 |
518 |
519 |
527 |
528 |
529 |
530 |
531 |
532 | 1
533 | 4
534 | 5
535 | 5
536 |
537 |
538 |
546 |
547 |
548 |
549 |
550 |
551 | 1
552 | 4
553 | 1
554 | 1
555 |
556 |
557 |
565 |
566 |
567 |
568 |
569 |
570 | 1
571 | 128
572 | 5
573 | 5
574 |
575 |
576 |
584 |
585 |
586 |
587 |
588 |
589 |
590 |
591 |
592 | 1
593 | 7
594 | 5
595 | 5
596 |
597 |
598 |
606 |
607 |
608 |
609 |
610 |
611 | 1
612 | 7
613 | 5
614 | 5
615 |
616 |
617 |
625 |
626 |
627 |
628 |
629 |
630 | 1
631 | 7
632 | 1
633 | 1
634 |
635 |
636 |
644 |
645 |
646 |
647 |
648 |
649 |
650 |
651 |
652 |
653 |
654 |
655 |
656 |
657 |
658 |
659 |
660 |
661 |
662 |
663 |
664 |
665 |
666 |
667 |
668 |
669 |
670 |
671 |
672 |
673 |
674 |
675 |
676 |
677 |
678 |
679 |
680 |
681 |
682 |
683 | Convolution1
684 | -0.000005, -0.468171, -0.558317, -0.585554, -0.007512, -1.013561, -0.416088, -0.599785, -0.232968, -0.000033, -1.095625, -0.000000, -0.000012, -0.017073, -0.862631, -0.000010, -0.542955, -0.529014, -0.624248, -0.833468, -0.859177, -1.056603, -0.562168, -0.815792, -0.000373, -0.370141, -1.321340, -0.000039, -0.067131, -0.771320, -1.366814, -0.326049, -0.908061, -0.000363, -0.561713, -0.216276, -0.637627, -0.587495, -0.000000, -0.272132, -0.596152, -0.720286, -0.510997, -0.861147, -0.497307, -0.988673, -0.432323, -0.383492, -1.143091, -0.749996, -0.448895, -0.000000, -0.000000, -0.632388, -0.770533, -0.324277, -0.673190, -0.000000, -0.578882, -0.311767, -0.238961, -0.169257, -0.727515, -1.374047
685 | -0.000002, 0.497820, 0.832506, 0.346552, -0.000189, 0.203427, 0.714980, 0.501595, 0.467396, -0.000026, 1.200957, -0.000000, -0.000006, -0.002029, 0.898271, -0.000005, 0.766077, 0.533865, 0.872860, 0.826616, 0.752538, 1.163071, 0.659017, 0.915900, -0.000156, 0.508371, 0.465510, -0.000006, 0.415144, 0.486053, 0.630878, 0.672043, 0.693676, -0.000055, 0.765972, 0.267683, 0.656781, 0.560202, -0.000000, 0.572641, 0.644302, 0.777188, 0.545727, 0.547142, 0.516240, 1.077425, 0.504107, 0.453935, 0.264638, 0.502588, 0.426357, -0.000000, -0.000000, 0.738134, 0.467730, 0.445617, 0.793476, -0.000000, 0.659369, 0.358823, 0.305420, 0.878953, 0.503664, 0.529017
686 |
687 |
688 | Convolution2
689 | -1.098502, -1.652673, -1.292799, -0.695292, -0.771664, -0.704992, -1.173826, -1.254422, -0.853285, -1.627373, -3.257172, -2.543629, -2.684237, -3.094170, -3.640336, -1.167094, -2.777050, -1.166496, -1.218535, -2.481499, -0.461570, -1.602698, -0.952713, -2.757544, -3.438644, -1.187139, -1.791467, -2.665778, -3.466931, -1.336845, -1.921759, -0.806761, -0.710611, -3.418076, -1.694722, -1.274505, -2.132475, -2.448902, -2.044989, -1.861555, -3.604242, -1.408006, -2.197599, -2.168938, -1.872699, -0.531415, -2.961770, -3.565571, -0.596931, -1.640053, -1.817287, -1.284543, -1.882612, -1.407553, -1.422778, -2.005674, -2.631362, -2.576587, -2.504231, -1.498285, -1.126524, -0.709283, -3.886657, -1.442050
690 | 1.149957, 1.799291, 1.168950, 0.523522, 1.063294, 1.542535, 1.123924, 0.657397, 0.895590, 0.998299, 1.200789, 1.382979, 1.868789, 1.241208, 1.036975, 1.490843, 1.424365, 0.645173, 1.358467, 1.134480, 0.711281, 1.521438, 1.241640, 1.085441, 0.800033, 1.426338, 1.078513, 1.577697, 1.401667, 1.336875, 1.395761, 1.016825, 0.990026, 0.970445, 0.767162, 1.027512, 1.287511, 1.436469, 1.009385, 1.295796, 1.123551, 1.040126, 1.322944, 1.631079, 1.473608, 1.312555, 0.995855, 0.958212, 0.210745, 1.826255, 1.314905, 1.115719, 1.441668, 0.800977, 1.166564, 1.930096, 1.393550, 1.436534, 1.496468, 1.576848, 1.158102, 0.676285, 1.170441, 0.785163
691 |
692 |
693 | Convolution3
694 | -0.017028, -1.487755, -1.673704, -1.655671, -1.524418, -1.203917, -0.703411, -1.504434, -1.494970, -1.276353, -2.029468, -1.685464, -1.072019, -1.407206, -2.019669, -1.762276, -1.983794, -1.267916, -2.427867, -1.692326, -1.258807, -2.007200, -1.597326, -1.716579, -1.542513, -1.846280, -0.995210, -1.470429, -0.737458, -1.539257, -1.350720, -1.060818, -1.766542, -0.878640, -1.315096, -0.960051, -2.159239, -1.105813, -1.222792, -1.168237, -1.471085, -1.496082, -1.521773, -0.357035, -1.904147, -1.841341, -1.003195, -1.105668, -1.165461, -1.330070, -1.094196, -1.276935, -1.054696, -1.498539, -1.753322, -1.198443, -1.223009, -1.247669, -2.315083, -0.706449, -0.739838, -0.683405, -2.504110, -0.846214
695 | 1.774242, 0.554845, 0.604507, 0.564758, 0.281967, 0.767644, 0.878304, 0.138305, 0.764049, 0.954617, 0.727941, 1.227051, 0.536636, 1.091346, 0.412984, 0.672788, 0.622063, 0.322679, 0.998433, 0.701887, 0.981225, 0.148793, 0.600544, 0.317761, 0.785881, 0.448717, 0.607089, 1.082354, 0.605373, 0.484771, 0.443209, 1.097619, 0.717130, 1.137769, 0.273552, 0.408541, 0.710548, 0.785086, 0.641267, 0.596856, 0.782577, 0.629360, 1.073299, 1.059476, 0.602181, 0.435138, 0.667813, 0.469326, 1.021919, 0.567641, 0.479659, 0.645271, 0.974916, 0.137630, 0.693514, 0.566281, 0.920627, 0.461210, 0.906541, 0.563406, 0.595676, 0.557083, 0.844296, 0.629574
696 |
697 |
698 | Convolution4
699 | -1.163678, -0.988461, -0.878521, -0.960230, -1.013843, -1.095626, -0.850668, -0.973326, -1.216645, -1.027506, -0.576591, -1.088623, -1.447615, -1.055645, -1.049181, -1.069357, -1.191690, -0.987179, -0.893215, -0.791418, -1.027495, -1.046975, -1.067589, -1.022343, -0.585755, -1.020022, -0.856144, -1.022344, -1.325944, -1.053216, -0.854086, -1.106521, -1.180389, -1.130143, -1.068603, -1.377978, -1.171535, -1.157455, -1.105008, -1.244415, -1.103494, -0.927235, -0.590122, -1.051891, -1.067084, -0.836372, -1.089619, -1.036393, -1.269433, -0.923484, -1.022650, -0.973861, -1.109921, -1.262019, -1.242923, -1.050880, -0.876912, -1.256205, -1.126109, -1.088141, -1.018697, -1.339949, -0.935818, -1.052249, -0.866647, -1.229956, -1.061465, -0.902857, -1.409805, -1.562131, -0.948189, -0.870440, -1.479956, -1.119729, -1.010025, -1.240378, -1.115050, -1.059754, -1.278100, -1.088905, -1.032416, -1.376128, -0.602091, -0.949607, -1.066123, -1.526357, -0.123372, -1.149673, -1.092868, -1.129006, -0.963926, -1.255021, -1.188450, -1.434923, -1.246647, -1.094667, -0.997965, -0.891948, -0.988816, -0.887082, -1.202354, -0.834385, -0.895983, -1.207621, -0.980702, -1.183622, -1.062238, -1.058265, -1.086310, -1.225348, -1.053063, -1.064059, -1.099063, -1.008461, -1.034848, -1.260657, -0.748438, -1.571788, -1.218363, -1.192959, -1.269116, -0.890249, -1.024153, -1.157374, -1.372261, -1.045582, -1.236042, -0.910938
700 | 0.730971, 0.871024, 0.847801, 0.659128, 0.819444, 0.660748, 0.643506, 0.667783, 0.931378, 0.659541, 0.648733, 0.867783, 0.843434, 0.702816, 0.703523, 0.527665, 0.707799, 0.759652, 0.634136, 0.721965, 0.650413, 0.600414, 0.794543, 0.736906, 0.472175, 0.642349, 0.475547, 0.667468, 0.578054, 0.683662, 0.725845, 0.640342, 0.666344, 0.954672, 0.936463, 0.968798, 0.931694, 0.789229, 0.784703, 0.698049, 0.764269, 0.574454, 0.569822, 0.918085, 0.649320, 0.475810, 0.629136, 0.645257, 0.997615, 1.041137, 0.992302, 0.820052, 0.572717, 0.941744, 0.729752, 0.858700, 0.481110, 1.074935, 0.696718, 0.673904, 0.738104, 0.700115, 0.869723, 0.657767, 0.557694, 0.774682, 0.847884, 0.699341, 0.746636, 0.713455, 0.587029, 0.531899, 0.839265, 0.664002, 0.581665, 0.734524, 0.754002, 0.631387, 0.782777, 0.795419, 0.597503, 0.654458, 0.576333, 0.468061, 0.639559, 0.681668, 0.145221, 0.999873, 0.673812, 0.614949, 0.609713, 0.910629, 0.724051, 0.686831, 0.669095, 0.667599, 0.631204, 0.732421, 0.700751, 0.429144, 0.881050, 0.451574, 0.619027, 0.905507, 0.572302, 0.754130, 0.667575, 0.728752, 0.809867, 0.746696, 0.686627, 0.892574, 0.529282, 0.425024, 0.554133, 0.722652, 0.737739, 0.784869, 0.787256, 0.698479, 0.580135, 0.485946, 0.692269, 0.716703, 0.665321, 0.675131, 0.777950, 0.389443
701 |
702 |
703 | Convolution5
704 | -0.439761, -0.563615, -0.595067, -0.468977, -0.471060, -0.566677, -0.497854, -0.394944, -0.519025, -0.423309, -0.494160, -0.422519, -0.418033, -0.370335, -0.620912, -0.507665, -0.585451, -0.463025, -0.332019, -0.408215, -0.503401, -0.405785, -0.356937, -0.274942, -0.610285, -0.592163, -0.480395, -0.568840, -0.342714, -0.363488, -0.442843, -0.321784, -0.448547, -0.407210, -0.352030, -0.310995, -0.486805, -0.437887, -0.456474, -0.471811, -0.654817, -0.411785, -0.445923, -0.630226, -0.468735, -0.608612, -0.447653, -0.397887, -0.471624, -0.386688, -0.526358, -0.389370, -0.394507, -0.449907, -0.412947, -0.389285, -0.456108, -0.535485, -0.523970, -0.466155, -0.384311, -0.477213, -0.394876, -0.464526, -0.580205, -0.456914, -0.419795, -0.397807, -0.433372, -0.370319, -0.455501, -0.392595, -0.435555, -0.506634, -0.521296, -0.680159, -0.461938, -0.477689, -0.452639, -0.401417, -0.443373, -0.414191, -0.323949, -0.496836, -0.472260, -0.459692, -0.411238, -0.499101, -0.474719, -0.484166, -0.418564, -0.455608, -0.498723, -0.576475, -0.616918, -0.427991, -0.316929, -0.451494, -0.458911, -0.441579, -0.534112, -0.575197, -0.351557, -0.475050, -0.492516, -0.622784, -0.437984, -0.440345, -0.558046, -0.389604, -0.456166, -0.389740, -0.441956, -0.446035, -0.514954, -0.151255, -0.517488, -0.311711, -0.305469, -0.409209, -0.422983, -0.353067, -0.443805, -0.641238, -0.453691, -0.400075, -0.598118, -0.403178
705 | 0.200856, 0.221410, 0.263018, 0.147891, 0.183042, 0.554279, 0.231096, 0.367310, 0.218640, 0.418890, 0.249892, 0.479055, 0.171260, 0.227639, 0.235426, 0.333333, 0.245548, 0.554974, 0.327125, 0.296519, 0.273345, 0.321082, 0.242694, 0.274486, 0.101133, 0.221745, 0.322810, 0.118420, 0.283426, 0.287556, 0.226070, 0.232536, 0.363482, 0.197913, 0.485893, 0.224047, 0.380445, 0.246375, 0.256387, 0.215602, 0.086823, 0.248473, 0.151244, 0.277584, 0.219664, 0.347100, 0.195772, 0.433420, 0.185288, 0.261143, 0.274081, 0.311593, 0.350109, 0.205241, 0.249934, 0.307223, 0.312147, 0.174891, 0.188533, 0.135045, 0.331899, 0.200836, 0.252846, 0.268004, 0.244516, 0.273791, 0.247370, 0.235458, 0.195359, 0.156559, 0.257846, 0.236382, 0.216910, 0.214244, 0.183899, 0.130513, 0.230812, 0.197156, 0.177835, 0.171626, 0.205292, 0.270667, 0.435642, 0.234463, 0.237115, 0.260316, 0.285942, 0.310525, 0.257137, 0.300631, 0.289125, 0.208787, 0.254494, 0.096603, 0.270467, 0.261460, 0.323681, 0.195777, 0.198999, 0.326082, 0.250530, 0.176400, 0.226271, 0.169688, 0.223830, 0.092745, 0.205652, 0.223771, 0.289484, 0.282144, 0.193421, 0.230882, 0.140434, 0.373008, 0.221023, 0.367763, 0.315092, 0.351097, 0.477192, 0.410058, 0.245878, 0.377333, 0.273307, 0.286632, 0.393346, 0.293250, 0.159241, 0.260430
706 |
707 |
708 | Convolution6
709 | -0.089482, -0.095289, -0.137826, -0.082785, -0.118646, -0.087762, -0.088236, -0.120866, -0.129330, -0.115775, -0.103110, -0.062163, -0.079410, -0.088940, -0.167914, -0.057917, -0.088635, -0.058831, -0.053083, -0.077133, -0.117731, -0.104811, -0.084660, -0.227422, -0.212300, -0.066927, -0.061501, -0.099632, -0.192017, -0.080907, -0.122938, -0.101491, -0.123015, -0.045389, -0.086528, -0.040804, -0.101535, -0.101838, -0.051891, -0.159844, -0.069340, -0.062727, -0.067563, -0.084584, -0.120350, -0.119272, -0.078163, -0.097650, -0.091893, -0.055983, -0.194983, -0.098828, -0.073183, -0.139064, -0.058506, -0.108071, -0.087544, -0.068032, -0.093350, -0.092769, -0.048312, -0.095119, -0.084137, -0.081320, -0.228878, -0.060920, -0.149742, -0.072877, -0.073828, -0.085126, -0.068067, -0.083705, -0.078533, -0.083134, -0.057239, -0.067750, -0.076270, -0.066851, -0.053960, -0.118079, -0.061820, -0.062638, -0.046306, -0.072079, -0.107817, -0.097724, -0.097555, -0.111627, -0.067221, -0.041476, -0.071379, -0.050021, -0.086146, -0.120597, -0.167625, -0.079938, -0.106877, -0.128042, -0.053231, -0.108939, -0.047269, -0.098221, -0.054004, -0.051497, -0.110267, -0.093833, -0.148626, -0.116559, -0.072274, -0.114716, -0.153868, -0.050409, -0.070366, -0.077674, -0.164571, -0.162034, -0.155111, -0.048567, -0.079536, -0.093881, -0.083785, -0.058592, -0.123918, -0.136604, -0.049754, -0.157326, -0.212170, -0.078485
710 | 0.074599, 0.070884, 0.060195, 0.042119, 0.028859, 0.077403, 0.038547, 0.074949, 0.032293, 0.049927, 0.049978, 0.054383, 0.038449, 0.029097, 0.028898, 0.099965, 0.058001, 0.044163, 0.075166, 0.053025, 0.073868, 0.048787, 0.045243, 0.121419, 0.096524, 0.059339, 0.042569, 0.057380, 0.127808, 0.047031, 0.059215, 0.045075, 0.027293, 0.045424, 0.087070, 0.112739, 0.042819, 0.127568, 0.055047, 0.177498, 0.075438, 0.083345, 0.031440, 0.030249, 0.061329, 0.053656, 0.069371, 0.064197, 0.075532, 0.093839, 0.059917, 0.071789, 0.038073, 0.105234, 0.084930, 0.039033, 0.041521, 0.038836, 0.081817, 0.050374, 0.065868, 0.060850, 0.134864, 0.048442, 0.118099, 0.036293, 0.032170, 0.051263, 0.041693, 0.050679, 0.030128, 0.052085, 0.064390, 0.050644, 0.064175, 0.055633, 0.037737, 0.040208, 0.079542, 0.054652, 0.041334, 0.042469, 0.058818, 0.057390, 0.067963, 0.047420, 0.064472, 0.082044, 0.052592, 0.051642, 0.047827, 0.091729, 0.023830, 0.055574, 0.046957, 0.034968, 0.079328, 0.071675, 0.142725, 0.037768, 0.045237, 0.214830, 0.056219, 0.064242, 0.206308, 0.023412, 0.053059, 0.106104, 0.109903, 0.036629, 0.033530, 0.056776, 0.083207, 0.081408, 0.034262, 0.083720, 0.029517, 0.119666, 0.077622, 0.090700, 0.101907, 0.084785, 0.106063, 0.098886, 0.067882, 0.062060, 0.017478, 0.096507
711 |
712 |
713 | Convolution7_
714 | -2.215609, -1.912781, -2.451430, -1.582963, -1.515460, -2.113768, -2.226237, -1.848880, -1.886669, -1.274036, -1.890919, -4.721658, -1.826431, -1.521296, -2.025265, -3.201293, -1.793901, -1.524532, -2.334015, -1.841306, -2.762923, -1.808418, -1.264114, -2.557080, -1.749650, -1.425438, -2.032513, -1.895699, -2.032521, -1.509109, -2.752192, -1.750388, -3.678217, -2.336217, -1.537560, -2.485640, -3.341643, -2.358865, -2.322185, -2.015939, -2.172734, -1.942976, -2.525274, -1.794242, -1.576328, -2.379991, -1.883147, -1.783739, -1.722680, -1.934142, -2.541844, -2.123822, -3.025217, -1.852017, -3.790054, -1.961537, -1.671417, -1.975165, -2.093167, -1.476847, -1.290135, -3.290838, -4.737681, -1.939017, -3.881720, -2.066238, -1.715949, -1.309314, -1.333133, -1.528140, -1.729257, -2.501204, -1.795763, -1.506547, -1.846480, -2.134313, -2.124719, -1.829100, -2.617516, -1.883698, -1.974440, -1.867660, -1.784665, -2.544510, -1.761370, -1.851449, -2.125002, -2.296850, -1.715088, -2.717076, -3.000052, -1.948582, -1.350606, -1.905949, -2.287585, -2.077981, -1.357321, -1.441318, -2.872938, -1.458346, -2.517584, -1.898482, -2.377775, -1.425892, -2.426566, -1.909781, -2.028990, -1.844113, -1.964305, -1.903566, -2.207472, -2.101010, -1.619392, -1.369358, -2.284668, -2.095020, -2.198459, -1.824583, -1.793834, -2.144227, -2.433780, -1.724818, -1.410595, -1.998477, -1.774688, -2.849646, -2.509239, -1.520611
715 | 2.219637, 3.100589, 2.187948, 2.102848, 3.639997, 2.638006, 1.934749, 1.903428, 2.239963, 3.908577, 2.235557, 2.757043, 2.092916, 2.955276, 2.558744, 2.309228, 2.075054, 2.564772, 3.022814, 2.204055, 2.862220, 2.318145, 1.891746, 2.473439, 1.957746, 2.694272, 3.782289, 2.615289, 9.435650, 1.856210, 3.453146, 3.004709, 1.840837, 2.171967, 5.553186, 2.259798, 2.905384, 3.471883, 2.353508, 2.719613, 2.104886, 2.050409, 2.418790, 2.119813, 2.340060, 3.732153, 3.199126, 2.957704, 1.927270, 1.838898, 2.400283, 2.285613, 2.628476, 2.113009, 2.989508, 2.709946, 2.960316, 2.619833, 5.116774, 2.114981, 2.500221, 2.719128, 2.706924, 2.467298, 2.543561, 1.863301, 3.057125, 2.593158, 3.723311, 2.045046, 1.975818, 2.061540, 2.141597, 2.628402, 2.577204, 4.250273, 2.638589, 2.393846, 1.913180, 2.566841, 5.853989, 2.127892, 3.302089, 2.492812, 2.178815, 3.195024, 5.283424, 3.714353, 2.563003, 2.509213, 2.085130, 2.761163, 1.411935, 2.915468, 1.803977, 3.676708, 3.703302, 3.257535, 2.703842, 3.896432, 1.904963, 2.208297, 7.098861, 3.254768, 2.380996, 15.311474, 2.521549, 2.651836, 1.974031, 2.904106, 3.142952, 2.510575, 2.167778, 1.773637, 2.047803, 3.162931, 2.223680, 2.681034, 15.820799, 1.540074, 2.771089, 2.315362, 2.224250, 2.310153, 3.788263, 2.181431, 2.756085, 2.397641
716 |
717 |
718 | Convolution8_
719 | -1.872459, -5.352844, -2.649566, -2.087601, -3.926633, -2.926332, -4.724067, -3.956396, -2.163938, -2.171557, -1.815497, -3.727360, -5.837323, -8.681602, -1.875955, -0.274282, -3.129096, -5.792335, -1.047669, -2.274732, -3.495958, -7.472712, -2.138810, -5.228797, -7.629498, -6.281832, -1.957520, -2.338044, -4.366765, -1.117212, -3.135387, -10.761464, -3.302176, -3.646890, -2.636179, -2.567659, -2.177609, -3.011981, -4.514054, -4.048757, -1.548823, -5.782872, -1.647608, -4.231062, -7.039589, -2.523952, -3.323762, -2.400477, -2.893712, -1.915588, -1.043866, -2.484202, -2.822738, -3.732413, -3.211005, -2.276246, -5.610425, -4.879899, -4.587411, -1.157857, -5.089610, -5.863313, -1.397808, -1.735279, -1.073073, -2.454280, -4.051306, -1.784232, -2.183633, -1.469260, -3.743354, -4.941779, -4.690630, -4.118478, -4.523169, -3.554013, -1.118680, -5.615449, -2.736537, -3.768539, -7.642808, -3.207220, -4.681343, -6.024813, -1.525043, -3.754062, -2.566616, -3.590148, -0.694618, -3.771402, -3.839064, -0.058365, -2.007512, -4.460944, -0.611200, -2.451908, -2.858715, -1.560654, -5.710912, -2.256960, -4.385028, -6.050653, -3.611690, 0.499684, -3.778012, -6.750606, -3.572942, -2.770468, -3.070397, -1.009839, -1.558368, -4.802804, -2.048741, -0.405085, -4.601075, -4.514427, -6.188592, -9.915135, -6.424173, -8.371862, -4.011086, -2.993605, -6.663713, -4.509265, -5.480327, -5.738597, -4.150312, -4.839559
720 | 3.429435, 3.700269, 3.674218, 2.689023, 2.447877, 3.796455, 4.823751, 4.814300, 4.284663, 7.869397, 5.337945, 2.524132, 1.269280, 0.405836, 5.519212, 8.243701, 10.594466, 0.766087, 4.758397, 3.170860, 1.142504, 1.810520, 5.103805, 9.551643, 3.240988, 0.846419, 2.740798, 1.177772, 2.432126, 3.726336, 2.208188, 0.610985, 8.961679, 7.027319, 1.757295, 10.510656, 2.717644, 2.737868, 3.499144, 2.370403, 3.354144, 0.343958, 4.437463, 1.832197, 0.934699, 5.503766, 4.801977, 2.966651, 3.820568, 4.969003, 4.320631, 2.722138, 3.239754, 6.412441, 8.854413, 3.743961, 2.181079, 2.928961, 1.996874, 2.044940, 2.566653, 0.743482, 5.468718, 5.573136, 4.664586, 3.257355, 5.359721, 5.203478, 4.163172, 5.555080, 0.470708, 2.768162, 4.626716, 1.025795, 3.708481, 3.686304, 4.888070, 2.040133, 4.084060, 4.628593, 1.709242, 3.382280, 2.977187, 2.135484, 6.309508, 5.380356, 2.372864, 2.128798, 5.887513, 2.067376, 5.359750, 8.565346, 8.163108, 3.961072, 5.819194, 3.387053, 2.953495, 2.304596, 1.920968, 4.582163, 4.163924, -0.067669, 4.745985, 8.886450, 0.633174, 5.105834, 6.652740, 3.307892, 3.545277, 3.874053, 7.186567, 4.194041, 3.392613, 4.381091, 1.843383, 1.529642, 2.794033, 3.850415, 1.816620, -0.295237, 3.744004, 5.420012, 3.036198, 2.320942, 1.854587, 0.646627, 1.521050, 2.705915
721 |
722 |
723 | Convolution9_
724 | -0.141384, -0.140614, -0.122125, -0.132391, -0.161326, -0.217106, -0.144781, -0.139152, -0.067251, -0.131850, -0.105117, -0.200349, -0.165607, -0.158392, -0.129925, -0.117243, -0.170651, -0.139268, -0.142944, -0.268188, -0.170374, -0.185460, -0.194706, -0.125657, -0.179409, -0.139717, -0.137770, -0.144900, -0.161827, -0.116752, -0.095180, -0.131101, -0.164971, -0.164557, -0.132092, -0.117414, -0.110332, -0.125184, -0.171304, -0.132943, -0.172852, -0.173234, -0.170059, -0.245928, -0.168562, -0.154429, -0.177196, -0.087597, -0.099284, -0.146703, -0.118962, -0.167918, -0.128791, -0.199360, -0.124410, -0.209607, -0.192710, -0.120686, -0.163151, -0.140004, -0.183266, -0.182779, -0.147366, -0.201588, -0.130788, -0.118730, -0.102759, -0.157234, -0.121187, -0.167453, -0.272018, -0.188587, -0.183508, -0.184845, -0.111430, -0.171266, -0.169223, -0.131493, -0.119391, -0.151135, -0.219977, -0.142727, -0.106848, -0.155600, -0.109978, -0.128091, -0.106047, -0.170440, -0.136912, -0.217854, -0.130795, -0.147434, -0.171887, -0.146335, -0.119519, -0.138614, -0.088763, -0.160719, -0.210362, -0.113426, -0.106081, -0.172056, -0.155751, -0.087255, -0.181359, -0.100823, -0.163023, -0.187006, -0.168611, -0.104720, -0.199164, -0.176602, -0.142898, -0.176948, -0.114266, -0.156973, -0.155024, -0.143181, -0.104440, -0.162102, -0.097031, -0.237458, -0.225713, -0.202350, -0.108361, -0.170474, -0.172204, -0.213260
725 | 0.192927, 0.163904, 0.216464, 0.141252, 0.175652, 0.116612, 0.129657, 0.141891, 0.227062, 0.127624, 0.233416, 0.108427, 0.125047, 0.152245, 0.169610, 0.223241, 0.125547, 0.213838, 0.153609, 0.185048, 0.154109, 0.105797, 0.191681, 0.193927, 0.116197, 0.130909, 0.183078, 0.127747, 0.135951, 0.205147, 0.181528, 0.134937, 0.116522, 0.113566, 0.183639, 0.150600, 0.191808, 0.199205, 0.125108, 0.164799, 0.145179, 0.117929, 0.143131, 0.135270, 0.149052, 0.153994, 0.142980, 0.168093, 0.161392, 0.200118, 0.155097, 0.179147, 0.129252, 0.208162, 0.170779, 0.107756, 0.142978, 0.184834, 0.131037, 0.178645, 0.126263, 0.121349, 0.161910, 0.096884, 0.162830, 0.141599, 0.238014, 0.192181, 0.195258, 0.164491, 0.123304, 0.115548, 0.223965, 0.127568, 0.140396, 0.179048, 0.207421, 0.131426, 0.173920, 0.162376, 0.127117, 0.132670, 0.163431, 0.172278, 0.176411, 0.141682, 0.178698, 0.120406, 0.144354, 0.117430, 0.127157, 0.147001, 0.102106, 0.123333, 0.153365, 0.261058, 0.178792, 0.127813, 0.124217, 0.137566, 0.148609, 0.154865, 0.119260, 0.183627, 0.112286, 0.174934, 0.139095, 0.183969, 0.101796, 0.192192, 0.219783, 0.114391, 0.167880, 0.204897, 0.146977, 0.113564, 0.103683, 0.159543, 0.221478, 0.129725, 0.210680, 0.127778, 0.124826, 0.187943, 0.224850, 0.109750, 0.178134, 0.108745
726 |
727 |
728 | Eltwise1
729 | -0.017028, -1.275471, -1.673704, -1.644008, -1.524418, -1.169149, -0.667587, -1.450444, -1.312477, -1.276353, -2.007165, -1.685464, -1.072019, -1.407206, -1.967463, -1.762276, -1.746051, -1.039968, -2.420270, -1.412734, -1.050735, -1.829014, -1.326287, -1.684462, -1.542513, -1.846280, -0.961611, -1.470429, -0.487193, -1.466300, -1.324069, -0.721018, -1.714208, -0.878640, -1.138389, -0.880113, -2.146951, -0.932845, -1.222792, -0.986625, -1.421620, -1.431515, -1.254966, -0.354831, -1.866524, -1.756024, -0.882827, -1.068052, -1.116835, -1.281034, -0.814001, -1.276935, -1.054696, -1.375416, -1.687976, -1.127508, -1.126485, -1.247669, -1.867485, -0.683524, -0.670696, -0.285824, -2.405033, -0.845408
730 | 1.774242, 0.698047, 0.974963, 0.740626, 0.281967, 0.767644, 1.501642, 0.225284, 0.919347, 0.954617, 1.497935, 1.227051, 0.536636, 1.091346, 0.975359, 0.672788, 0.753025, 0.439833, 1.523668, 0.924011, 1.438146, 0.578670, 0.996933, 0.929003, 0.785881, 0.616365, 0.795084, 1.082354, 0.876318, 0.585555, 0.510384, 1.313209, 1.094867, 1.137769, 0.815225, 0.526590, 0.941590, 0.838114, 0.641267, 0.874459, 1.125475, 1.213152, 1.166444, 1.389872, 0.874906, 0.886343, 1.046168, 0.772610, 1.021919, 0.825788, 0.604562, 0.645271, 0.974916, 0.480049, 0.932897, 0.857911, 1.255972, 0.461210, 1.214344, 0.727669, 0.737296, 1.145959, 1.082185, 0.969820
731 |
732 |
733 | Eltwise2
734 | -0.443957, -0.605963, -0.640653, -0.484198, -0.516158, -0.565088, -0.526986, -0.397427, -0.578756, -0.467145, -0.534085, -0.445885, -0.434598, -0.392231, -0.662496, -0.528484, -0.610147, -0.478382, -0.324498, -0.443648, -0.546696, -0.445921, -0.387625, -0.338345, -0.671660, -0.594909, -0.489233, -0.595987, -0.441781, -0.408518, -0.501019, -0.367906, -0.477532, -0.397874, -0.378585, -0.303186, -0.526454, -0.471465, -0.453039, -0.516069, -0.667871, -0.414087, -0.481706, -0.676395, -0.488429, -0.601673, -0.474313, -0.442131, -0.481536, -0.402817, -0.637061, -0.417550, -0.424868, -0.487512, -0.420913, -0.439849, -0.482821, -0.550174, -0.539193, -0.491423, -0.404817, -0.518623, -0.449295, -0.466241, -0.667402, -0.452453, -0.492846, -0.416107, -0.469978, -0.381496, -0.476543, -0.424077, -0.446180, -0.544722, -0.544912, -0.688754, -0.503126, -0.500714, -0.461411, -0.447614, -0.461401, -0.426766, -0.328567, -0.516746, -0.546269, -0.484139, -0.437605, -0.544050, -0.493748, -0.490350, -0.447023, -0.457235, -0.538754, -0.614329, -0.654333, -0.462788, -0.362047, -0.467872, -0.447921, -0.465190, -0.542563, -0.598265, -0.359482, -0.490150, -0.503703, -0.651471, -0.508065, -0.464660, -0.540431, -0.454038, -0.532324, -0.382675, -0.460383, -0.461374, -0.558725, -0.224947, -0.584646, -0.306992, -0.325384, -0.421760, -0.452760, -0.344762, -0.501827, -0.670179, -0.449420, -0.461779, -0.699367, -0.407958
735 | 0.216124, 0.216809, 0.257756, 0.130288, 0.168633, 0.562783, 0.243085, 0.380344, 0.187281, 0.407381, 0.255168, 0.466470, 0.171003, 0.219795, 0.189784, 0.336453, 0.249088, 0.563875, 0.351783, 0.301482, 0.303854, 0.330153, 0.251091, 0.312283, 0.100607, 0.231709, 0.337776, 0.117986, 0.317421, 0.300370, 0.238111, 0.212082, 0.339205, 0.196792, 0.474934, 0.261985, 0.366562, 0.236819, 0.264952, 0.295391, 0.115739, 0.274067, 0.140288, 0.273334, 0.231359, 0.351759, 0.198811, 0.476399, 0.175464, 0.295884, 0.250068, 0.341163, 0.308196, 0.234609, 0.283127, 0.320333, 0.301801, 0.168577, 0.198250, 0.144369, 0.353553, 0.199844, 0.281569, 0.270016, 0.299234, 0.274959, 0.244501, 0.239890, 0.204819, 0.166917, 0.257018, 0.243121, 0.222048, 0.215736, 0.209125, 0.128587, 0.229038, 0.198041, 0.225734, 0.164579, 0.206435, 0.275895, 0.464783, 0.240427, 0.249053, 0.240741, 0.288452, 0.350527, 0.257361, 0.307328, 0.281326, 0.236198, 0.245442, 0.088852, 0.280140, 0.243370, 0.349838, 0.215139, 0.261059, 0.316499, 0.253725, 0.274825, 0.230850, 0.193335, 0.332480, 0.070612, 0.203926, 0.247235, 0.333103, 0.266353, 0.169649, 0.234640, 0.159518, 0.369362, 0.187098, 0.398965, 0.291111, 0.404315, 0.484026, 0.423820, 0.288817, 0.418290, 0.304228, 0.297110, 0.423540, 0.295543, 0.137911, 0.267568
736 |
737 |
738 | Eltwise3
739 | -1.907564, -5.324944, -2.584927, -2.099427, -3.964873, -2.915219, -4.793260, -4.019541, -2.111626, -2.189340, -1.878041, -3.779873, -5.913455, -8.635453, -1.777671, -0.249340, -3.174783, -5.730870, -1.057399, -2.413578, -3.571058, -7.475122, -2.161009, -5.242243, -7.732908, -6.323933, -1.967382, -2.367035, -4.365279, -1.120882, -3.158793, -10.761180, -3.277239, -3.687467, -2.575033, -2.521878, -2.208740, -2.908697, -4.598978, -4.000963, -1.576979, -5.775121, -1.666942, -4.288087, -7.104886, -2.582561, -3.239947, -2.325077, -2.856584, -1.892248, -1.007207, -2.465359, -2.802383, -3.694602, -3.217989, -2.290163, -5.631609, -4.896439, -4.578271, -1.201638, -5.008301, -5.968531, -1.372641, -1.754668, -1.041370, -2.459767, -4.052456, -1.723538, -2.198922, -1.522968, -3.812165, -5.053628, -4.645249, -4.165840, -4.548175, -3.575165, -1.057271, -5.691209, -2.772001, -3.857198, -7.733587, -3.281178, -4.692113, -6.025166, -1.488777, -3.808657, -2.431131, -3.666995, -0.698720, -3.759022, -3.885885, -0.138630, -2.120354, -4.504014, -0.582519, -2.460079, -2.826749, -1.558318, -5.746382, -2.260880, -4.367985, -6.081302, -3.615337, 0.560889, -3.803660, -6.604887, -3.555866, -2.746310, -3.087168, -0.995096, -1.559035, -4.772481, -2.048341, -0.354973, -4.579305, -4.628964, -6.222413, -9.909677, -6.407003, -8.354704, -3.899036, -3.008315, -6.868675, -4.546631, -5.493344, -5.788953, -4.201906, -4.866153
740 | 3.475531, 3.710648, 3.740433, 2.678443, 2.441540, 3.730961, 4.856013, 4.941118, 4.399026, 7.861291, 5.376379, 2.476548, 1.289025, 0.413592, 5.549860, 8.234210, 10.524568, 0.797860, 4.828226, 3.126103, 1.170680, 1.810385, 5.022070, 9.579741, 3.280487, 0.896627, 2.805602, 1.177811, 2.351528, 3.773983, 2.201317, 0.627348, 8.836780, 6.943242, 1.724609, 10.594956, 2.693786, 2.697961, 3.498795, 2.433382, 3.300122, 0.394773, 4.438357, 1.837798, 0.928520, 5.625850, 4.742742, 3.027977, 3.786014, 4.952445, 4.362332, 2.760020, 3.263377, 6.393613, 8.829145, 3.688315, 2.159716, 2.944713, 2.050731, 2.039855, 2.557002, 0.752501, 5.479669, 5.453331, 4.686422, 3.263134, 5.481597, 5.227953, 4.185263, 5.574124, 0.404131, 2.795418, 4.595315, 1.047041, 3.696763, 3.687738, 4.864021, 2.099275, 4.132128, 4.615304, 1.700763, 3.389483, 2.974463, 2.108804, 6.285203, 5.404897, 2.416899, 2.100536, 5.948894, 2.077207, 5.420952, 8.598346, 8.144469, 3.952628, 5.821994, 3.480676, 3.041466, 2.317952, 1.887334, 4.600194, 4.177206, -0.144220, 4.703924, 8.934053, 0.667851, 5.115231, 6.644749, 3.330505, 3.528387, 3.884891, 7.141387, 4.197771, 3.400951, 4.464654, 1.838341, 1.537781, 2.767060, 3.862851, 1.824424, -0.258206, 3.858929, 5.456958, 2.957689, 2.301570, 1.980565, 0.586679, 1.545702, 2.637382
741 |
742 |
743 | Mul_/Fused_Mul_/FusedScaleShift_
744 | -1.486210, -1.100639, -1.039412
745 | 2.017180, 1.668351, 1.906080
746 |
747 |
748 | Mul_938/Fused_Mul_/FusedScaleShift_
749 | -1.000993, -0.966894, -1.147337, -0.816544, -0.961222, -1.249921, -0.841000, -0.966014, -1.106117, -0.883642, -1.391058, -0.533957, -0.678366, -0.974387, -1.177449, -1.253030, -1.118649, -1.179478, -0.797963, -1.072985, -1.216630, -0.805759, -0.765967, -1.233649, -1.009566, -0.690107, -1.004941, -0.885806, -0.538931, -0.597131, -0.899253, -0.576842, -0.843238, -0.838470, -0.895853, -0.257352, -0.883414, -1.097017, -0.751683, -0.786381, -1.048041, -0.911387, -0.770595, -1.288075, -1.123150, -1.181912, -0.716003, -0.405089, -0.757471, -0.696932, -0.757922, -1.044208, -1.038663, -0.596706, -0.576188, -0.731928, -0.737536, -0.558026, -1.257804, -0.795904, -0.996625, -0.785111, -0.885730, -0.829521, -0.931226, -1.091477, -1.015910, -0.878806, -0.760218, -0.896189, -0.909996, -0.774546, -0.975111, -1.144643, -0.995571, -1.014837, -0.848428, -1.131618, -0.920184, -0.867002, -0.952315, -1.044424, -0.686059, -0.748839, -0.785185, -0.739018, -0.675778, -1.048845, -0.578215, -0.899976, -0.931860, -0.874153, -1.020537, -0.991013, -0.879668, -0.574732, -0.536523, -0.899982, -0.962860, -0.913977, -1.127974, -0.959420, -0.726260, -0.852640, -1.298168, -1.277054, -0.589856, -0.713010, -0.985328, -0.631840, -0.948492, -0.956102, -0.642173, -0.889169, -0.752575, -0.562585, -1.032635, -0.689947, -1.080127, -0.777054, -0.913765, -0.744137, -1.028314, -1.142983, -0.710338, -1.003463, -1.150572, -0.792743
750 | 0.483944, 0.406185, 0.632219, 0.414809, 0.486445, 0.861361, 0.400378, 0.618637, 0.524014, 0.543397, 0.591688, 0.452402, 0.358524, 0.386446, 0.498011, 0.655070, 0.505053, 1.321713, 0.481918, 0.606015, 0.738089, 0.520293, 0.459486, 0.638352, 0.331212, 0.530514, 0.680300, 0.367460, 0.418880, 0.393814, 0.505746, 0.289565, 0.614410, 0.385885, 0.705580, 0.280099, 0.628635, 0.574024, 0.512155, 0.603020, 0.461235, 0.553133, 0.254880, 0.685553, 0.487954, 0.621140, 0.442192, 0.517791, 0.399861, 0.392243, 0.453780, 0.681385, 0.698659, 0.474270, 0.446528, 0.462838, 0.409811, 0.446920, 0.424049, 0.397508, 0.591208, 0.379567, 0.560726, 0.509800, 0.432462, 0.664432, 0.629337, 0.546434, 0.283690, 0.469795, 0.596327, 0.384368, 0.392363, 0.552963, 0.416645, 0.472754, 0.443526, 0.389120, 0.523686, 0.417113, 0.541141, 0.571517, 0.605550, 0.355152, 0.402135, 0.538882, 0.356453, 0.550924, 0.423157, 0.510988, 0.593968, 0.409847, 0.523218, 0.426740, 0.463910, 0.284844, 0.414432, 0.340745, 0.493517, 0.551570, 0.480329, 0.555724, 0.364474, 0.466598, 0.948566, 0.378891, 0.391581, 0.499351, 0.514681, 0.399723, 0.380286, 0.523027, 0.396151, 0.664291, 0.339964, 0.352593, 0.678574, 0.456457, 0.970837, 0.543184, 0.510499, 0.707630, 0.411521, 0.729949, 0.545635, 0.840874, 0.400712, 0.372579
751 |
752 |
753 | Mul_941/Fused_Mul_/FusedScaleShift_
754 | -0.504788, -1.100831, -0.656862, -1.567505, -0.873975, -1.047771, -0.266430, -0.647008, -1.417575, -1.136612, -1.231986, -0.886641, -1.483869, -1.001915, -0.917917, -1.520697, -0.865010, -0.846773, -0.983751, -1.391221, -0.680382, -0.851530, -1.517818, -1.059633, -1.363468, -2.029816, -1.292212, -1.237723, -0.890540, -1.303790, -0.496715, -1.116190, -1.454922, -1.223954, -1.050618, -0.793075, -1.655688, -0.434717, -0.428279, -1.135996, -1.224839, -1.254589, -0.723436, -0.646005, -1.020061, -1.575614, -1.103309, -1.029912, -1.055838, -0.816475, -1.124512, -1.074899, -1.411968, -0.770837, -1.366073, -0.804070, -1.345881, -1.019030, -1.051710, -1.015250, -0.720437, -0.724421, -1.360435, -0.955714
755 | 0.897479, 0.643705, 0.506541, 0.909026, 0.598676, 0.760437, 0.550016, 0.524786, 0.899500, 0.956593, 0.919019, 0.866192, 0.803402, 0.910110, 0.652711, 0.847069, 0.671315, 0.538715, 0.661554, 0.753926, 0.868698, 0.618573, 1.258414, 0.662642, 1.104118, 0.795911, 0.845381, 1.062159, 0.687212, 0.667873, 0.630433, 0.874851, 0.965359, 1.080826, 0.704604, 0.478592, 0.798701, 0.554845, 0.588955, 0.843883, 0.991465, 1.163610, 0.724929, 0.871114, 0.641436, 0.944970, 0.761778, 1.002170, 0.883072, 0.724287, 0.790562, 0.762222, 0.886732, 0.559242, 0.772461, 0.523369, 1.047992, 0.564693, 0.796835, 0.835792, 0.463608, 0.690179, 0.860580, 0.849676
756 |
757 |
758 | Pooling1
759 | -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, 0.031911, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, 0.124424, -0.000000, -0.000000, 0.080710, -0.000000, -0.000000, 0.034276, -0.000000, -0.000000, -0.000000, -0.000000, 0.062388, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, 0.200579, -0.000000, -0.000000
760 | -0.000000, 0.497820, 0.832506, 0.346552, -0.000000, 0.203427, 0.714980, 0.501595, 0.467396, -0.000000, 1.200957, -0.000000, -0.000000, -0.000000, 0.898271, -0.000000, 0.766077, 0.533865, 0.872860, 0.826616, 0.752538, 1.163071, 0.659017, 0.915900, -0.000000, 0.508371, 0.465510, -0.000000, 0.415144, 0.486053, 0.630878, 0.672043, 0.693676, -0.000000, 0.765972, 0.267683, 0.656781, 0.560202, -0.000000, 0.572641, 0.644302, 0.777188, 0.545727, 0.547142, 0.516240, 1.077425, 0.504107, 0.453935, 0.264638, 0.502588, 0.426357, -0.000000, -0.000000, 0.738134, 0.467730, 0.445617, 0.793476, -0.000000, 0.659369, 0.358823, 0.305420, 0.878953, 0.503664, 0.529017
761 |
762 |
763 | ReLU1
764 | -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000
765 | -0.000000, 0.497820, 0.832506, 0.346552, -0.000000, 0.203427, 0.714980, 0.501595, 0.467396, -0.000000, 1.200957, -0.000000, -0.000000, -0.000000, 0.898271, -0.000000, 0.766077, 0.533865, 0.872860, 0.826616, 0.752538, 1.163071, 0.659017, 0.915900, -0.000000, 0.508371, 0.465510, -0.000000, 0.415144, 0.486053, 0.630878, 0.672043, 0.693676, -0.000000, 0.765972, 0.267683, 0.656781, 0.560202, -0.000000, 0.572641, 0.644302, 0.777188, 0.545727, 0.547142, 0.516240, 1.077425, 0.504107, 0.453935, 0.264638, 0.502588, 0.426357, -0.000000, -0.000000, 0.738134, 0.467730, 0.445617, 0.793476, -0.000000, 0.659369, 0.358823, 0.305420, 0.878953, 0.503664, 0.529017
766 |
767 |
768 | ReLU2
769 | -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000
770 | 1.149957, 1.799291, 1.168950, 0.523522, 1.063294, 1.542535, 1.123924, 0.657397, 0.895590, 0.998299, 1.200789, 1.382979, 1.868789, 1.241208, 1.036975, 1.490843, 1.424365, 0.645173, 1.358467, 1.134480, 0.711281, 1.521438, 1.241640, 1.085441, 0.800033, 1.426338, 1.078513, 1.577697, 1.401667, 1.336875, 1.395761, 1.016825, 0.990026, 0.970445, 0.767162, 1.027512, 1.287511, 1.436469, 1.009385, 1.295796, 1.123551, 1.040126, 1.322944, 1.631079, 1.473608, 1.312555, 0.995855, 0.958212, 0.210745, 1.826255, 1.314905, 1.115719, 1.441668, 0.800977, 1.166564, 1.930096, 1.393550, 1.436534, 1.496468, 1.576848, 1.158102, 0.676285, 1.170441, 0.785163
771 |
772 |
773 | ReLU3
774 | -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000
775 | 0.897479, 0.643705, 0.506541, 0.909026, 0.598676, 0.760437, 0.550016, 0.524786, 0.899500, 0.956593, 0.919019, 0.866192, 0.803402, 0.910110, 0.652711, 0.847069, 0.671315, 0.538715, 0.661554, 0.753926, 0.868698, 0.618573, 1.258414, 0.662642, 1.104118, 0.795911, 0.845381, 1.062159, 0.687212, 0.667873, 0.630433, 0.874851, 0.965359, 1.080826, 0.704604, 0.478592, 0.798701, 0.554845, 0.588955, 0.843883, 0.991465, 1.163610, 0.724929, 0.871114, 0.641436, 0.944970, 0.761778, 1.002170, 0.883072, 0.724287, 0.790562, 0.762222, 0.886732, 0.559242, 0.772461, 0.523369, 1.047992, 0.564693, 0.796835, 0.835792, 0.463608, 0.690179, 0.860580, 0.849676
776 |
777 |
778 | ReLU4
779 | -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000
780 | 0.730971, 0.871024, 0.847801, 0.659128, 0.819444, 0.660748, 0.643506, 0.667783, 0.931378, 0.659541, 0.648733, 0.867783, 0.843434, 0.702816, 0.703523, 0.527665, 0.707799, 0.759652, 0.634136, 0.721965, 0.650413, 0.600414, 0.794543, 0.736906, 0.472175, 0.642349, 0.475547, 0.667468, 0.578054, 0.683662, 0.725845, 0.640342, 0.666344, 0.954672, 0.936463, 0.968798, 0.931694, 0.789229, 0.784703, 0.698049, 0.764269, 0.574454, 0.569822, 0.918085, 0.649320, 0.475810, 0.629136, 0.645257, 0.997615, 1.041137, 0.992302, 0.820052, 0.572717, 0.941744, 0.729752, 0.858700, 0.481110, 1.074935, 0.696718, 0.673904, 0.738104, 0.700115, 0.869723, 0.657767, 0.557694, 0.774682, 0.847884, 0.699341, 0.746636, 0.713455, 0.587029, 0.531899, 0.839265, 0.664002, 0.581665, 0.734524, 0.754002, 0.631387, 0.782777, 0.795419, 0.597503, 0.654458, 0.576333, 0.468061, 0.639559, 0.681668, 0.145221, 0.999873, 0.673812, 0.614949, 0.609713, 0.910629, 0.724051, 0.686831, 0.669095, 0.667599, 0.631204, 0.732421, 0.700751, 0.429144, 0.881050, 0.451574, 0.619027, 0.905507, 0.572302, 0.754130, 0.667575, 0.728752, 0.809867, 0.746696, 0.686627, 0.892574, 0.529282, 0.425024, 0.554133, 0.722652, 0.737739, 0.784869, 0.787256, 0.698479, 0.580135, 0.485946, 0.692269, 0.716703, 0.665321, 0.675131, 0.777950, 0.389443
781 |
782 |
783 | ReLU5
784 | -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000
785 | 0.483944, 0.406185, 0.632219, 0.414809, 0.486445, 0.861361, 0.400378, 0.618637, 0.524014, 0.543397, 0.591688, 0.452402, 0.358524, 0.386446, 0.498011, 0.655070, 0.505053, 1.321713, 0.481918, 0.606015, 0.738089, 0.520293, 0.459486, 0.638352, 0.331212, 0.530514, 0.680300, 0.367460, 0.418880, 0.393814, 0.505746, 0.289565, 0.614410, 0.385885, 0.705580, 0.280099, 0.628635, 0.574024, 0.512155, 0.603020, 0.461235, 0.553133, 0.254880, 0.685553, 0.487954, 0.621140, 0.442192, 0.517791, 0.399861, 0.392243, 0.453780, 0.681385, 0.698659, 0.474270, 0.446528, 0.462838, 0.409811, 0.446920, 0.424049, 0.397508, 0.591208, 0.379567, 0.560726, 0.509800, 0.432462, 0.664432, 0.629337, 0.546434, 0.283690, 0.469795, 0.596327, 0.384368, 0.392363, 0.552963, 0.416645, 0.472754, 0.443526, 0.389120, 0.523686, 0.417113, 0.541141, 0.571517, 0.605550, 0.355152, 0.402135, 0.538882, 0.356453, 0.550924, 0.423157, 0.510988, 0.593968, 0.409847, 0.523218, 0.426740, 0.463910, 0.284844, 0.414432, 0.340745, 0.493517, 0.551570, 0.480329, 0.555724, 0.364474, 0.466598, 0.948566, 0.378891, 0.391581, 0.499351, 0.514681, 0.399723, 0.380286, 0.523027, 0.396151, 0.664291, 0.339964, 0.352593, 0.678574, 0.456457, 0.970837, 0.543184, 0.510499, 0.707630, 0.411521, 0.729949, 0.545635, 0.840874, 0.400712, 0.372579
786 |
787 |
788 | ReLU6_
789 | -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000
790 | 2.219637, 3.100589, 2.187948, 2.102848, 3.639997, 2.638006, 1.934749, 1.903428, 2.239963, 3.908577, 2.235557, 2.757043, 2.092916, 2.955276, 2.558744, 2.309228, 2.075054, 2.564772, 3.022814, 2.204055, 2.862220, 2.318145, 1.891746, 2.473439, 1.957746, 2.694272, 3.782289, 2.615289, 9.435650, 1.856210, 3.453146, 3.004709, 1.840837, 2.171967, 5.553186, 2.259798, 2.905384, 3.471883, 2.353508, 2.719613, 2.104886, 2.050409, 2.418790, 2.119813, 2.340060, 3.732153, 3.199126, 2.957704, 1.927270, 1.838898, 2.400283, 2.285613, 2.628476, 2.113009, 2.989508, 2.709946, 2.960316, 2.619833, 5.116774, 2.114981, 2.500221, 2.719128, 2.706924, 2.467298, 2.543561, 1.863301, 3.057125, 2.593158, 3.723311, 2.045046, 1.975818, 2.061540, 2.141597, 2.628402, 2.577204, 4.250273, 2.638589, 2.393846, 1.913180, 2.566841, 5.853989, 2.127892, 3.302089, 2.492812, 2.178815, 3.195024, 5.283424, 3.714353, 2.563003, 2.509213, 2.085130, 2.761163, 1.411935, 2.915468, 1.803977, 3.676708, 3.703302, 3.257535, 2.703842, 3.896432, 1.904963, 2.208297, 7.098861, 3.254768, 2.380996, 15.311474, 2.521549, 2.651836, 1.974031, 2.904106, 3.142952, 2.510575, 2.167778, 1.773637, 2.047803, 3.162931, 2.223680, 2.681034, 15.820799, 1.540074, 2.771089, 2.315362, 2.224250, 2.310153, 3.788263, 2.181431, 2.756085, 2.397641
791 |
792 |
793 | color
794 | 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000
795 | 1.000000, 0.988427, 0.002327, 0.999999, 0.066999, 0.999951, 1.000000
796 |
797 |
798 | conv_color
799 | -13.506009, -10.808620, -14.647364, -2.789529, -12.041644, -14.881329, -11.464386
800 | 40.903057, 28.954319, 15.449120, 37.083988, 14.119350, 32.167919, 52.195187
801 |
802 |
803 | conv_type
804 | -6.242589, -15.857176, -14.670082, -11.543294
805 | 57.850468, 18.851368, 27.143709, 23.807281
806 |
807 |
808 | input
809 | 0.000000, 0.000000, 0.000000
810 | 255.000000, 255.000000, 255.000000
811 |
812 |
813 | pool_color
814 | 0.065525, 0.026355, 0.000000, 0.067317, 0.000000, 0.000000, 0.000000
815 | 20.750341, 14.051600, 6.296879, 18.551592, 5.807144, 14.179525, 22.446182
816 |
817 |
818 | pool_type
819 | 0.611729, 0.000000, 0.111630, 0.136686
820 | 26.981985, 7.690254, 14.782531, 11.727703
821 |
822 |
823 | relu_conv_color
824 | -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000, -0.000000
825 | 40.903057, 28.954319, 15.449120, 37.083988, 14.119350, 32.167919, 52.195187
826 |
827 |
828 | relu_conv_type
829 | -0.000000, -0.000000, -0.000000, -0.000000
830 | 57.850468, 18.851368, 27.143709, 23.807281
831 |
832 |
833 | type
834 | 0.000000, 0.000000, 0.000000, 0.000000
835 | 1.000000, 0.236938, 0.999966, 0.999141
836 |
837 |
838 |
839 |
--------------------------------------------------------------------------------
/lesson2/home/workspace/outputs/CAR_META-output.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abhik-99/OpenDevLibrary/f78fb17e0d9225bdc193e5000461c1c9d46efa0e/lesson2/home/workspace/outputs/CAR_META-output.png
--------------------------------------------------------------------------------
/lesson2/home/workspace/preprocess_inputs.py:
--------------------------------------------------------------------------------
1 | import cv2
2 | import numpy as np
3 |
4 | # My Code
5 | def preprocessing(input_image, height, width):
6 | '''
7 | Given an input image, height and width:
8 | - Resize to height and width
9 | - Transpose the final "channel" dimension to be first
10 | - Reshape the image to add a "batch" of 1 at the start
11 | '''
12 | image = cv2.resize(input_image, (width, height))
13 | image = image.transpose((2,0,1))
14 | image = image.reshape(1, 3, height, width)
15 |
16 | return image
17 |
18 | def pose_estimation(input_image):
19 | '''
20 | Given some input image, preprocess the image so that
21 | it can be used with the related pose estimation model
22 | you downloaded previously. You can use cv2.resize()
23 | to resize the image.
24 | '''
25 | preprocessed_image = np.copy(input_image)
26 |
27 | # TODO: Preprocess the image for the pose estimation model
28 | preprocessed_image = cv2.resize(preprocessed_image,(456,256))
29 |
30 | preprocessed_image = preprocessed_image.transpose((2,0,1))
31 |
32 | preprocessed_image = preprocessed_image.reshape(1, 3, 256, 456)
33 |
34 | return preprocessed_image
35 |
36 |
37 | def text_detection(input_image):
38 | '''
39 | Given some input image, preprocess the image so that
40 | it can be used with the related text detection model
41 | you downloaded previously. You can use cv2.resize()
42 | to resize the image.
43 | '''
44 | preprocessed_image = np.copy(input_image)
45 |
46 | # TODO: Preprocess the image for the text detection model
47 | preprocessed_image = preprocessing(preprocessed_image, 768, 1280)
48 | return preprocessed_image
49 |
50 |
51 | def car_meta(input_image):
52 | '''
53 | Given some input image, preprocess the image so that
54 | it can be used with the related car metadata model
55 | you downloaded previously. You can use cv2.resize()
56 | to resize the image.
57 | '''
58 | preprocessed_image = np.copy(input_image)
59 |
60 | # TODO: Preprocess the image for the car metadata model
61 | preprocessed_image = preprocessing(preprocessed_image, 72, 72)
62 | return preprocessed_image
63 |
--------------------------------------------------------------------------------
/lesson2/home/workspace/test.py:
--------------------------------------------------------------------------------
1 | import cv2
2 | import numpy as np
3 |
4 | from preprocess_inputs import pose_estimation, text_detection, car_meta
5 |
6 | # Image locations
7 | POSE_IMAGE = cv2.imread("images/sitting-on-car.jpg")
8 | TEXT_IMAGE = cv2.imread("images/sign.jpg")
9 | CAR_IMAGE = cv2.imread("images/blue-car.jpg")
10 |
11 | # Test names
12 | test_names = ["Pose Estimation", "Text Detection", "Car Meta"]
13 |
14 | # Hold solution functions
15 | global solution_funcs
16 |
17 | def test_pose():
18 | comparison = test(pose_estimation, test_names[0], POSE_IMAGE)
19 | return comparison
20 |
21 |
22 | def test_text():
23 | comparison = test(text_detection, test_names[1], TEXT_IMAGE)
24 | return comparison
25 |
26 |
27 | def test_car():
28 | comparison = test(car_meta, test_names[2], CAR_IMAGE)
29 | return comparison
30 |
31 |
32 | def test(test_func, test_name, test_image):
33 | # Try the student's code first
34 | try:
35 | student_processed = test_func(test_image)
36 | except:
37 | print_exception(test_name)
38 | return
39 | # Run the solution code and compare to student example
40 | solution = solution_funcs[test_name](test_image)
41 | comparison = np.array_equal(student_processed, solution)
42 | print_test_result(test_name, comparison)
43 |
44 | return comparison
45 |
46 |
47 | def print_exception(test_name):
48 | print("Failed to run test on {}.".format(test_name))
49 | print("The code should be valid Python and return the preprocessed image.")
50 |
51 |
52 | def print_test_result(test_name, result):
53 | if result:
54 | print("Passed {} test.".format(test_name))
55 | else:
56 | print("Failed {} test, did not obtain expected preprocessed image."
57 | .format(test_name))
58 |
59 |
60 | def feedback(tests_passed):
61 | print("You passed {} of 3 tests.".format(int(tests_passed)))
62 | if tests_passed == 3:
63 | print("Congratulations!")
64 | else:
65 | print("See above for additional feedback.")
66 |
67 |
68 | def set_solution_functions():
69 | global solution_funcs
70 | solution_funcs = {test_names[0]: pose_solution,
71 | test_names[1]: text_solution,
72 | test_names[2]: car_solution}
73 |
74 |
75 | def preprocessing(input_image, height, width):
76 | image = cv2.resize(input_image, (width, height))
77 | image = image.transpose((2,0,1))
78 | image = image.reshape(1, 3, height, width)
79 |
80 | return image
81 |
82 |
83 | def pose_solution(input_image):
84 | return preprocessing(input_image, 256, 456)
85 |
86 |
87 | def text_solution(input_image):
88 | return preprocessing(input_image, 768, 1280)
89 |
90 |
91 | def car_solution(input_image):
92 | return preprocessing(input_image, 72, 72)
93 |
94 |
95 | def main():
96 | set_solution_functions()
97 | counter = test_pose() + test_text() + test_car()
98 | feedback(counter)
99 |
100 |
101 | if __name__ == "__main__":
102 | main()
103 |
--------------------------------------------------------------------------------
/lesson3/home/workspace/tensorflow/Readme.md:
--------------------------------------------------------------------------------
1 | This repo belongs to files related to the tensorflow freezed graph conversion into IR using Model Optimizer
2 |
--------------------------------------------------------------------------------
/openvino_initialization_script.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """OpenVINO-ColabNotebook.ipynb
3 | Automatically generated by Colaboratory.
4 | Original file is located at
5 | https://colab.research.google.com/github/abhik-99/OpenDevLibrary/blob/master/OpenDevNotebook-CPU-modified.ipynb
6 | """
7 | from subprocess import call
8 |
9 | #Defining the Important Paths
10 |
11 | file_name = "l_openvino_toolkit_p_2020.1.023.tgz" #change the filename if version does not match
12 | dir_name = file_name[:-4]
13 | install_dir = "/opt/intel/openvino/"
14 | deployment_tools = install_dir+"deployment_tools/"
15 | model_optimizer = install_dir+"deployment_tools/model_optimizer/"
16 | model_zoo = deployment_tools+"open_model_zoo/"
17 |
18 | call('wget "https://storage.googleapis.com/open_vino_public/l_openvino_toolkit_p_2020.1.023.tgz"', shell=True)
19 |
20 | call('tar -xzf l_openvino_toolkit_p_2020.1.023.tgz', shell=True)
21 | call('sudo -E %s/install_openvino_dependencies.sh'%(dir_name), shell=True)
22 | call("sed -i 's/decline/accept/g' %s/silent.cfg && sed -i 's/#INTEL_SW_IMPROVEMENT/INTEL_SW_IMPROVEMENT/g' %s/silent.cfg"%(dir_name,dir_name), shell=True)
23 | print("Installed OpenVINO Dependencies. Installing OpenVINO...")
24 | call("sudo %s/install.sh --silent %s/silent.cfg"%(dir_name,dir_name), shell=True)
25 | call("sudo -E %s/install_dependencies/install_openvino_dependencies.sh"%(install_dir), shell=True)
26 | call("source %s/bin/setupvars.sh"%(install_dir), shell=True)
27 | print("ENV Variables Set!")
28 | # frameworks = ['tf','mxnet','onnx','kaldi','all']
29 | # choices = dict(zip(range(1,6),frameworks))
30 |
31 | # print("""Please enter the Choice of framework you want to work with:
32 | # \n(Note: You should only install for the ones you would be using.
33 | # Incase of needing to install for more than one but not all, rerun this cell and
34 | # install the pre-requisites one by one.)
35 | # """)
36 |
37 | # for x in choices:
38 | # print(x,choices[x])
39 |
40 | # choice = input("Please enter your choice (Default Option - 5): ")
41 | # if len(choice) == 0:
42 | # choice = 5
43 | # elif choice in '1 2 3 4'.split():
44 | # choice = int(choice)
45 | # if choice>5:
46 | # print("You have entered an invalid choice! Please rerun the script.")
47 |
48 | # print("Choice is",choice,":",choices[choice])
49 | # if choice != 5:
50 | # pre_install = model_optimizer + "install_prerequisites/install_prerequisites.sh "+choices[choice]
51 | # call("sudo %s"%(pre_install), shell=True)
52 | # elif choice == 5:
53 | # # for x in choices:
54 | # # pre_install = model_optimizer + "install_prerequisites/install_prerequisites.sh "+choices[x]
55 | # !sudo $pre_install
56 | call("sudo %s/install_prerequisites/install_prerequisites.sh"%(model_optimizer), shell=True)
57 | # else:
58 | # print("Wrong Choice! Please rerun this cell and enter the correct choice!")
59 |
60 | call("sudo %s/demo/demo_squeezenet_download_convert_run.sh"%(deployment_tools), shell=True)
61 |
62 | print("\n\nIntel OpenVINO Installation Fisnished!")
63 | print("Please remember the following paths :-")
64 | print("Install Dir -", install_dir)
65 | print("Model Optimizer -", model_optimizer)
66 | print("Deployment Tools -", deployment_tools)
67 | print("Model Zoo -", model_zoo)
68 |
69 | print("It is recommended that you save these paths to variables in the Python runtime/Jupyter Notebooks")
70 |
--------------------------------------------------------------------------------