├── .gitignore
├── Display_Images.xml
├── LICENSE
├── LICENSE_peopleblocker
├── MXRECORD.json
├── MovidiusIngest.xml
├── MovidiusServerAll.xml
├── README.md
├── Store_Images.xml
├── all.py
├── analyze.py
├── buildconfig.sh
├── buildpage.sh
├── classify_image.py
├── config.yml
├── displayimages.xml
├── gluon2.avsc
├── gluon2.py
├── gluoncv.avsc
├── gluoncv_image_processed_20180615202056_8eb9c885-2cf7-4591-8d4a-f0962b2e9cb1.jpg
├── gluoncvexample.json
├── gluoncvflow1.png
├── httpserver.xml
├── images.html
├── index.sh
├── minifimovidius.xml
├── mxclassify.sh
├── mxrecordschemawithopencv.png
├── nifi_gluoncv_sd.py
├── nifigluon2.py
├── pb.py
├── run.sh
├── run2.sh
├── run3.sh
├── rungluon2.sh
└── testcv.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 |
49 | # Translations
50 | *.mo
51 | *.pot
52 |
53 | # Django stuff:
54 | *.log
55 | local_settings.py
56 |
57 | # Flask stuff:
58 | instance/
59 | .webassets-cache
60 |
61 | # Scrapy stuff:
62 | .scrapy
63 |
64 | # Sphinx documentation
65 | docs/_build/
66 |
67 | # PyBuilder
68 | target/
69 |
70 | # Jupyter Notebook
71 | .ipynb_checkpoints
72 |
73 | # pyenv
74 | .python-version
75 |
76 | # celery beat schedule file
77 | celerybeat-schedule
78 |
79 | # SageMath parsed files
80 | *.sage.py
81 |
82 | # dotenv
83 | .env
84 |
85 | # virtualenv
86 | .venv
87 | venv/
88 | ENV/
89 |
90 | # Spyder project settings
91 | .spyderproject
92 | .spyproject
93 |
94 | # Rope project settings
95 | .ropeproject
96 |
97 | # mkdocs documentation
98 | /site
99 |
100 | # mypy
101 | .mypy_cache/
102 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/LICENSE_peopleblocker:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 Max Woolf
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
23 | ---
24 |
25 | Mask R-CNN
26 |
27 | The MIT License (MIT)
28 |
29 | Copyright (c) 2017 Matterport, Inc.
30 |
--------------------------------------------------------------------------------
/MXRECORD.json:
--------------------------------------------------------------------------------
1 | {
2 | "type": "record",
3 | "name": "MXRECORD",
4 | "fields": [
5 | {
6 | "name": "filename",
7 | "type": "string",
8 | "doc": "Type inferred from '\"tx1_image_uhh_20180328184728.jpg\"'"
9 | },
10 | {
11 | "name": "top1pct",
12 | "type": "string",
13 | "doc": "Type inferred from '\"30.0\"'"
14 | },
15 | {
16 | "name": "top5",
17 | "type": "string",
18 | "doc": "Type inferred from '\"n03126707 crane\"'"
19 | },
20 | {
21 | "name": "top4",
22 | "type": "string",
23 | "doc": "Type inferred from '\"n03977966 police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria\"'"
24 | },
25 | {
26 | "name": "top3",
27 | "type": "string",
28 | "doc": "Type inferred from '\"n03417042 garbage truck, dustcart\"'"
29 | },
30 | {
31 | "name": "top2",
32 | "type": "string",
33 | "doc": "Type inferred from '\"n02930766 cab, hack, taxi, taxicab\"'"
34 | },
35 | {
36 | "name": "top1",
37 | "type": "string",
38 | "doc": "Type inferred from '\"n02977058 cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM\"'"
39 | },
40 | {
41 | "name": "y",
42 | "type": "string",
43 | "doc": "Type inferred from '\"0\"'"
44 | },
45 | {
46 | "name": "host",
47 | "type": "string",
48 | "doc": "Type inferred from '\"tegra-ubuntu\"'"
49 | },
50 | {
51 | "name": "h",
52 | "type": "string",
53 | "doc": "Type inferred from '\"0\"'"
54 | },
55 | {
56 | "name": "top2pct",
57 | "type": "string",
58 | "doc": "Type inferred from '\"4.8\"'"
59 | },
60 | {
61 | "name": "cputemp",
62 | "type": "double",
63 | "doc": "Type inferred from '30.0'"
64 | },
65 | {
66 | "name": "endtime",
67 | "type": "string",
68 | "doc": "Type inferred from '\"2018-03-28 18:47:36\"'"
69 | },
70 | {
71 | "name": "ipaddress",
72 | "type": "string",
73 | "doc": "Type inferred from '\"192.168.1.190\"'"
74 | },
75 | {
76 | "name": "imagefilename",
77 | "type": "string",
78 | "doc": "Type inferred from '\"tx1_image_uhh_20180328184728.jpg\"'"
79 | },
80 | {
81 | "name": "top3pct",
82 | "type": "string",
83 | "doc": "Type inferred from '\"3.7\"'"
84 | },
85 | {
86 | "name": "uuid",
87 | "type": "string",
88 | "doc": "Type inferred from '\"mxnet_uuid_dcx_20180328184727\"'"
89 | },
90 | {
91 | "name": "facedetect",
92 | "type": "string",
93 | "doc": "Type inferred from '\"[GIE] layer network time - 28.215418 ms|detectnet-console: finished processing network (1522262852823)|0 bounding boxes detected|detectnet-console: writing 640x480 image to '/media/nvidia/96ed93f9-7c40-4999-85ba-3eb24262d0a5/images/fcout-tx1_image_uhh_20180328184728.jpg'|detectnet-console: successfully wrote 640x480 image to '/media/nvidia/96ed93f9-7c40-4999-85ba-3eb24262d0a5/images/fcout-tx1_image_uhh_20180328184728.jpg'||shutting down...|\"'"
94 | },
95 | {
96 | "name": "diskfree",
97 | "type": "string",
98 | "doc": "Type inferred from '\"2931.8 MB\"'"
99 | },
100 | {
101 | "name": "cvfilename",
102 | "type": "string",
103 | "doc": "Type inferred from '\"\"'"
104 | },
105 | {
106 | "name": "ts",
107 | "type": "string",
108 | "doc": "Type inferred from '\"2018-03-28 18:47:26\"'"
109 | },
110 | {
111 | "name": "top4pct",
112 | "type": "string",
113 | "doc": "Type inferred from '\"3.5\"'"
114 | },
115 | {
116 | "name": "gputempf",
117 | "type": "string",
118 | "doc": "Type inferred from '\"73.0\"'"
119 | },
120 | {
121 | "name": "gputemp",
122 | "type": "string",
123 | "doc": "Type inferred from '\"22.5\"'"
124 | },
125 | {
126 | "name": "top5pct",
127 | "type": "string",
128 | "doc": "Type inferred from '\"2.5\"'"
129 | },
130 | {
131 | "name": "w",
132 | "type": "string",
133 | "doc": "Type inferred from '\"0\"'"
134 | },
135 | {
136 | "name": "memory",
137 | "type": "double",
138 | "doc": "Type inferred from '29.7'"
139 | },
140 | {
141 | "name": "imagenet",
142 | "type": "string",
143 | "doc": "Type inferred from '\"class 0481 - 0.039276 (cassette)|class 0482 - 0.025558 (cassette player)|class 0492 - 0.028412 (chest)|class 0519 - 0.016373 (crate)|class 0530 - 0.047028 (digital clock)|class 0531 - 0.021957 (digital watch)|class 0579 - 0.018341 (grand piano, grand)|class 0598 - 0.034119 (home theater, home theatre)|class 0613 - 0.015030 (joystick)|class 0620 - 0.012413 (laptop, laptop computer)|class 0632 - 0.021866 (loudspeaker, speaker, speaker unit, loudspeaker system, speaker system)|class 0662 - 0.010910 (modem)|class 0710 - 0.012657 (pencil sharpener)|class 0713 - 0.010574 (photocopier)|class 0732 - 0.043488 (Polaroid camera, Polaroid Land camera)|class 0742 - 0.038239 (printer)|class 0745 - 0.063049 (projector)|class 0754 - 0.014793 (radio, wireless)|class 0761 - 0.013901 (remote control, remote)|class 0848 - 0.031204 (tape player)|class 0851 - 0.045227 (television, television system)|class 0882 - 0.017990 (vacuum, vacuum cleaner)|imagenet-console: '/media/nvidia/96ed93f9-7c40-4999-85ba-3eb24262d0a5/images/tx1_image_uhh_20180328184728.jpg' -> 6.30493% class #745 (projector)|loaded image fontmapA.png (256 x 512) 2097152 bytes|[cuda] cudaAllocMapped 2097152 bytes, CPU 0x101500000 GPU 0x101500000|[cuda] cudaAllocMapped 8192 bytes, CPU 0x100e22000 GPU 0x100e22000|imagenet-console: attempting to save output image to '/media/nvidia/96ed93f9-7c40-4999-85ba-3eb24262d0a5/images/cfout-tx1_image_uhh_20180328184728.jpg'|imagenet-console: completed saving '/media/nvidia/96ed93f9-7c40-4999-85ba-3eb24262d0a5/images/cfout-tx1_image_uhh_20180328184728.jpg'||shutting down...|\"'"
144 | },
145 | {
146 | "name": "x",
147 | "type": "string",
148 | "doc": "Type inferred from '\"0\"'"
149 | },
150 | {
151 | "name": "cvface",
152 | "type": "string",
153 | "doc": "Type inferred from '\"\"'"
154 | },
155 | {
156 | "name": "runtime",
157 | "type": "string",
158 | "doc": "Type inferred from '\"9.0\"'"
159 | },
160 | {
161 | "name": "cputempf",
162 | "type": "string",
163 | "doc": "Type inferred from '\"78.0\"'"
164 | }
165 | ]
166 | }
167 |
--------------------------------------------------------------------------------
/MovidiusIngest.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | dfd41f8e-015f-1000-f542-c7e0bc412206
5 | MovidiusIngest
6 |
7 |
8 | 536ac54b-f3e5-3c73-0000-000000000000
9 | d94c58fd-a3f4-3d32-0000-000000000000
10 | 1 GB
11 | 10000
12 |
13 | 05d3395a-cb38-3c2b-0000-000000000000
14 | 773eac68-98d1-3125-0000-000000000000
15 | INPUT_PORT
16 |
17 | 0 sec
18 | 1
19 |
20 |
21 | d94c58fd-a3f4-3d32-0000-000000000000
22 | e0fd3cb1-5a4a-3b8f-0000-000000000000
23 | INPUT_PORT
24 |
25 | 0
26 |
27 |
28 | 9a96d4a8-a40f-3e24-0000-000000000000
29 | d94c58fd-a3f4-3d32-0000-000000000000
30 |
31 | nifi-hwx-schema-registry-nar
32 | org.apache.nifi
33 | 1.5.0-SNAPSHOT
34 |
35 |
36 |
37 |
38 | url
39 |
40 | url
41 |
42 |
43 |
44 | cache-size
45 |
46 | cache-size
47 |
48 |
49 |
50 | cache-expiration
51 |
52 | cache-expiration
53 |
54 |
55 |
56 | HortonworksSchemaRegistry
57 | false
58 |
59 |
60 | url
61 | http://172.26.247.222:7788/api/v1
62 |
63 |
64 | cache-size
65 | 1000
66 |
67 |
68 | cache-expiration
69 | 1 hour
70 |
71 |
72 | ENABLED
73 | org.apache.nifi.schemaregistry.hortonworks.HortonworksSchemaRegistry
74 |
75 |
76 | 1aa4a949-0b37-35cb-0000-000000000000
77 | d94c58fd-a3f4-3d32-0000-000000000000
78 |
79 | nifi-record-serialization-services-nar
80 | org.apache.nifi
81 | 1.5.0-SNAPSHOT
82 |
83 |
84 |
85 |
86 | schema-access-strategy
87 |
88 | schema-access-strategy
89 |
90 |
91 |
92 | schema-registry
93 |
94 | org.apache.nifi.schemaregistry.services.SchemaRegistry
95 | schema-registry
96 |
97 |
98 |
99 | schema-name
100 |
101 | schema-name
102 |
103 |
104 |
105 | schema-text
106 |
107 | schema-text
108 |
109 |
110 |
111 | Date Format
112 |
113 | Date Format
114 |
115 |
116 |
117 | Time Format
118 |
119 | Time Format
120 |
121 |
122 |
123 | Timestamp Format
124 |
125 | Timestamp Format
126 |
127 |
128 |
129 | JsonTreeReader
130 | false
131 |
132 |
133 | schema-access-strategy
134 | schema-name
135 |
136 |
137 | schema-registry
138 | 9a96d4a8-a40f-3e24-0000-000000000000
139 |
140 |
141 | schema-name
142 | ${schema.name}
143 |
144 |
145 | schema-text
146 | ${avro.schema}
147 |
148 |
149 | Date Format
150 |
151 |
152 | Time Format
153 |
154 |
155 | Timestamp Format
156 |
157 |
158 | ENABLED
159 | org.apache.nifi.json.JsonTreeReader
160 |
161 |
162 | 70cea84f-0933-3100-0000-000000000000
163 | d94c58fd-a3f4-3d32-0000-000000000000
164 |
165 | nifi-record-serialization-services-nar
166 | org.apache.nifi
167 | 1.5.0-SNAPSHOT
168 |
169 |
170 |
171 |
172 | Schema Write Strategy
173 |
174 | Schema Write Strategy
175 |
176 |
177 |
178 | schema-access-strategy
179 |
180 | schema-access-strategy
181 |
182 |
183 |
184 | schema-registry
185 |
186 | org.apache.nifi.schemaregistry.services.SchemaRegistry
187 | schema-registry
188 |
189 |
190 |
191 | schema-name
192 |
193 | schema-name
194 |
195 |
196 |
197 | schema-text
198 |
199 | schema-text
200 |
201 |
202 |
203 | compression-format
204 |
205 | compression-format
206 |
207 |
208 |
209 | AvroRecordSetWriter Embed for Save
210 | false
211 |
212 |
213 | Schema Write Strategy
214 | avro-embedded
215 |
216 |
217 | schema-access-strategy
218 | schema-name
219 |
220 |
221 | schema-registry
222 | 9a96d4a8-a40f-3e24-0000-000000000000
223 |
224 |
225 | schema-name
226 | ${schema.name}
227 |
228 |
229 | schema-text
230 | ${avro.schema}
231 |
232 |
233 | compression-format
234 | NONE
235 |
236 |
237 | ENABLED
238 | org.apache.nifi.avro.AvroRecordSetWriter
239 |
240 |
241 | e0fd3cb1-5a4a-3b8f-0000-000000000000
242 | d94c58fd-a3f4-3d32-0000-000000000000
243 |
244 | 59.657995877187545
245 | 0.0
246 |
247 | 1
248 | Movidius Input
249 | RUNNING
250 | false
251 | INPUT_PORT
252 |
253 |
254 | 05d3395a-cb38-3c2b-0000-000000000000
255 | d94c58fd-a3f4-3d32-0000-000000000000
256 |
257 | 0.0
258 | 144.59495328357434
259 |
260 |
261 |
262 |
263 | 8d5b7265-4bc5-3841-0000-000000000000
264 | 05d3395a-cb38-3c2b-0000-000000000000
265 | 1 GB
266 | 10000
267 |
268 | 05d3395a-cb38-3c2b-0000-000000000000
269 | e6b5f4c1-deab-3ba8-0000-000000000000
270 | PROCESSOR
271 |
272 | 0 sec
273 | 1
274 |
275 | notImage
276 |
277 | 05d3395a-cb38-3c2b-0000-000000000000
278 | 4df2fc33-1b7a-32c7-0000-000000000000
279 | PROCESSOR
280 |
281 | 0
282 |
283 |
284 | 9bf7c58f-6bda-3ae5-0000-000000000000
285 | 05d3395a-cb38-3c2b-0000-000000000000
286 | 0 MB
287 | 0
288 |
289 | 05d3395a-cb38-3c2b-0000-000000000000
290 | 4e1c12c1-0d8d-38ae-0000-000000000000
291 | PROCESSOR
292 |
293 | 0 sec
294 | 1
295 |
296 | success
297 |
298 | 05d3395a-cb38-3c2b-0000-000000000000
299 | 11dbe2c0-c507-35fc-0000-000000000000
300 | PROCESSOR
301 |
302 | 0
303 |
304 |
305 | a9aa9a65-efd3-36e1-0000-000000000000
306 | 05d3395a-cb38-3c2b-0000-000000000000
307 | 1 GB
308 | 10000
309 |
310 | 05d3395a-cb38-3c2b-0000-000000000000
311 | a98c5595-aaba-37e7-0000-000000000000
312 | PROCESSOR
313 |
314 | 0 sec
315 | 1
316 |
317 | success
318 |
319 | 05d3395a-cb38-3c2b-0000-000000000000
320 | e6b5f4c1-deab-3ba8-0000-000000000000
321 | PROCESSOR
322 |
323 | 0
324 |
325 |
326 | decb9ef3-c007-3a80-0000-000000000000
327 | 05d3395a-cb38-3c2b-0000-000000000000
328 | 1 GB
329 | 10000
330 |
331 | 05d3395a-cb38-3c2b-0000-000000000000
332 | 912ea51b-18d0-3d91-0000-000000000000
333 | PROCESSOR
334 |
335 | 0 sec
336 | 1
337 |
338 | success
339 |
340 | 05d3395a-cb38-3c2b-0000-000000000000
341 | a98c5595-aaba-37e7-0000-000000000000
342 | PROCESSOR
343 |
344 | 0
345 |
346 |
347 | efef9f57-1e72-32df-0000-000000000000
348 | 05d3395a-cb38-3c2b-0000-000000000000
349 | 1 GB
350 | 10000
351 |
352 | 05d3395a-cb38-3c2b-0000-000000000000
353 | b31f774a-ec3f-3216-0000-000000000000
354 | PROCESSOR
355 |
356 | 0 sec
357 | 1
358 |
359 | success
360 |
361 | 05d3395a-cb38-3c2b-0000-000000000000
362 | 4e1c12c1-0d8d-38ae-0000-000000000000
363 | PROCESSOR
364 |
365 | 0
366 |
367 |
368 | fd63abf2-9830-3466-0000-000000000000
369 | 05d3395a-cb38-3c2b-0000-000000000000
370 | 1 GB
371 | 10000
372 |
373 | 05d3395a-cb38-3c2b-0000-000000000000
374 | 4df2fc33-1b7a-32c7-0000-000000000000
375 | PROCESSOR
376 |
377 | 0 sec
378 | 1
379 |
380 |
381 | 05d3395a-cb38-3c2b-0000-000000000000
382 | 773eac68-98d1-3125-0000-000000000000
383 | INPUT_PORT
384 |
385 | 0
386 |
387 |
388 | 2e0d1257-6a37-33b6-0000-000000000000
389 | 05d3395a-cb38-3c2b-0000-000000000000
390 | 1 GB
391 | 10000
392 |
393 | 05d3395a-cb38-3c2b-0000-000000000000
394 | 11dbe2c0-c507-35fc-0000-000000000000
395 | PROCESSOR
396 |
397 | 0 sec
398 | 1
399 |
400 | merged
401 |
402 | 05d3395a-cb38-3c2b-0000-000000000000
403 | 2b13178d-09f3-385a-0000-000000000000
404 | PROCESSOR
405 |
406 | 0
407 |
408 |
409 | 32caf7dd-a9a0-3449-0000-000000000000
410 | 05d3395a-cb38-3c2b-0000-000000000000
411 | 1 GB
412 | 10000
413 |
414 | 05d3395a-cb38-3c2b-0000-000000000000
415 | cd4fb737-c687-3096-0000-000000000000
416 | PROCESSOR
417 |
418 | 0 sec
419 | 1
420 |
421 | isImage
422 |
423 | 05d3395a-cb38-3c2b-0000-000000000000
424 | 4df2fc33-1b7a-32c7-0000-000000000000
425 | PROCESSOR
426 |
427 | 0
428 |
429 |
430 | 58ab61d5-9200-32b2-0000-000000000000
431 | 05d3395a-cb38-3c2b-0000-000000000000
432 | 1 GB
433 | 10000
434 |
435 | 05d3395a-cb38-3c2b-0000-000000000000
436 | 2b13178d-09f3-385a-0000-000000000000
437 | PROCESSOR
438 |
439 | 0 sec
440 | 1
441 |
442 | success
443 |
444 | 05d3395a-cb38-3c2b-0000-000000000000
445 | 912ea51b-18d0-3d91-0000-000000000000
446 | PROCESSOR
447 |
448 | 0
449 |
450 |
451 | 773eac68-98d1-3125-0000-000000000000
452 | 05d3395a-cb38-3c2b-0000-000000000000
453 |
454 | 700.6135537778479
455 | 62.19028026739761
456 |
457 | 1
458 | Movidius Input
459 | RUNNING
460 | INPUT_PORT
461 |
462 |
463 | 912ea51b-18d0-3d91-0000-000000000000
464 | 05d3395a-cb38-3c2b-0000-000000000000
465 |
466 | 606.7933633481604
467 | 1057.6917565552394
468 |
469 |
470 | nifi-standard-nar
471 | org.apache.nifi
472 | 1.5.0-SNAPSHOT
473 |
474 |
475 | WARN
476 |
477 | 1
478 |
479 |
480 | record-reader
481 |
482 | org.apache.nifi.serialization.RecordReaderFactory
483 | record-reader
484 |
485 |
486 |
487 | record-writer
488 |
489 | org.apache.nifi.serialization.RecordSetWriterFactory
490 | record-writer
491 |
492 |
493 |
494 | ALL
495 | false
496 | 30 sec
497 |
498 |
499 | record-reader
500 | 1aa4a949-0b37-35cb-0000-000000000000
501 |
502 |
503 | record-writer
504 | 70cea84f-0933-3100-0000-000000000000
505 |
506 |
507 | 0
508 | 0 sec
509 | TIMER_DRIVEN
510 | 1 sec
511 |
512 | ConvertRecord
513 |
514 | true
515 | failure
516 |
517 |
518 | false
519 | success
520 |
521 | STOPPED
522 |
523 | org.apache.nifi.processors.standard.ConvertRecord
524 |
525 |
526 | a98c5595-aaba-37e7-0000-000000000000
527 | 05d3395a-cb38-3c2b-0000-000000000000
528 |
529 | 608.0000283872229
530 | 823.1115563599269
531 |
532 |
533 | nifi-update-attribute-nar
534 | org.apache.nifi
535 | 1.5.0-SNAPSHOT
536 |
537 |
538 | WARN
539 |
540 | 1
541 |
542 |
543 | Delete Attributes Expression
544 |
545 | Delete Attributes Expression
546 |
547 |
548 |
549 | Store State
550 |
551 | Store State
552 |
553 |
554 |
555 | Stateful Variables Initial Value
556 |
557 | Stateful Variables Initial Value
558 |
559 |
560 |
561 | content-type
562 |
563 | content-type
564 |
565 |
566 |
567 | mime.type
568 |
569 | mime.type
570 |
571 |
572 |
573 | schema
574 |
575 | schema
576 |
577 |
578 |
579 | schema.name
580 |
581 | schema.name
582 |
583 |
584 |
585 | ALL
586 | false
587 | 30 sec
588 |
589 |
590 | Delete Attributes Expression
591 |
592 |
593 | Store State
594 | Do not store state
595 |
596 |
597 | Stateful Variables Initial Value
598 |
599 |
600 | content-type
601 | text/json
602 |
603 |
604 | mime.type
605 | text/json
606 |
607 |
608 | schema
609 | movidiussense
610 |
611 |
612 | schema.name
613 | movidiussense
614 |
615 |
616 | 0
617 | 0 sec
618 | TIMER_DRIVEN
619 | 1 sec
620 |
621 | Set Schema
622 |
623 | false
624 | success
625 |
626 | RUNNING
627 |
628 | org.apache.nifi.processors.attributes.UpdateAttribute
629 |
630 |
631 | b31f774a-ec3f-3216-0000-000000000000
632 | 05d3395a-cb38-3c2b-0000-000000000000
633 |
634 | 601.6200235044104
635 | 1667.876565150285
636 |
637 |
638 | nifi-hadoop-nar
639 | org.apache.nifi
640 | 1.5.0-SNAPSHOT
641 |
642 |
643 | WARN
644 |
645 | 1
646 |
647 |
648 | Hadoop Configuration Resources
649 |
650 | Hadoop Configuration Resources
651 |
652 |
653 |
654 | Kerberos Principal
655 |
656 | Kerberos Principal
657 |
658 |
659 |
660 | Kerberos Keytab
661 |
662 | Kerberos Keytab
663 |
664 |
665 |
666 | Kerberos Relogin Period
667 |
668 | Kerberos Relogin Period
669 |
670 |
671 |
672 | Additional Classpath Resources
673 |
674 | Additional Classpath Resources
675 |
676 |
677 |
678 | Directory
679 |
680 | Directory
681 |
682 |
683 |
684 | Conflict Resolution Strategy
685 |
686 | Conflict Resolution Strategy
687 |
688 |
689 |
690 | Block Size
691 |
692 | Block Size
693 |
694 |
695 |
696 | IO Buffer Size
697 |
698 | IO Buffer Size
699 |
700 |
701 |
702 | Replication
703 |
704 | Replication
705 |
706 |
707 |
708 | Permissions umask
709 |
710 | Permissions umask
711 |
712 |
713 |
714 | Remote Owner
715 |
716 | Remote Owner
717 |
718 |
719 |
720 | Remote Group
721 |
722 | Remote Group
723 |
724 |
725 |
726 | Compression codec
727 |
728 | Compression codec
729 |
730 |
731 |
732 | ALL
733 | false
734 | 30 sec
735 |
736 |
737 | Hadoop Configuration Resources
738 | /etc/hadoop/conf/core-site.xml
739 |
740 |
741 | Kerberos Principal
742 |
743 |
744 | Kerberos Keytab
745 |
746 |
747 | Kerberos Relogin Period
748 | 4 hours
749 |
750 |
751 | Additional Classpath Resources
752 |
753 |
754 | Directory
755 | /movidiussense
756 |
757 |
758 | Conflict Resolution Strategy
759 | replace
760 |
761 |
762 | Block Size
763 |
764 |
765 | IO Buffer Size
766 |
767 |
768 | Replication
769 |
770 |
771 | Permissions umask
772 |
773 |
774 | Remote Owner
775 |
776 |
777 | Remote Group
778 |
779 |
780 | Compression codec
781 | NONE
782 |
783 |
784 | 0
785 | 0 sec
786 | TIMER_DRIVEN
787 | 1 sec
788 |
789 | Save To HDFS
790 |
791 | true
792 | failure
793 |
794 |
795 | true
796 | success
797 |
798 | STOPPED
799 |
800 | org.apache.nifi.processors.hadoop.PutHDFS
801 |
802 |
803 | cd4fb737-c687-3096-0000-000000000000
804 | 05d3395a-cb38-3c2b-0000-000000000000
805 |
806 | 1179.7990079787978
807 | 550.2416183681777
808 |
809 |
810 | nifi-standard-nar
811 | org.apache.nifi
812 | 1.5.0-SNAPSHOT
813 |
814 |
815 | WARN
816 |
817 | 1
818 |
819 |
820 | Directory
821 |
822 | Directory
823 |
824 |
825 |
826 | Conflict Resolution Strategy
827 |
828 | Conflict Resolution Strategy
829 |
830 |
831 |
832 | Create Missing Directories
833 |
834 | Create Missing Directories
835 |
836 |
837 |
838 | Maximum File Count
839 |
840 | Maximum File Count
841 |
842 |
843 |
844 | Last Modified Time
845 |
846 | Last Modified Time
847 |
848 |
849 |
850 | Permissions
851 |
852 | Permissions
853 |
854 |
855 |
856 | Owner
857 |
858 | Owner
859 |
860 |
861 |
862 | Group
863 |
864 | Group
865 |
866 |
867 |
868 | ALL
869 | false
870 | 30 sec
871 |
872 |
873 | Directory
874 |
875 |
876 | Conflict Resolution Strategy
877 | fail
878 |
879 |
880 | Create Missing Directories
881 | true
882 |
883 |
884 | Maximum File Count
885 |
886 |
887 | Last Modified Time
888 |
889 |
890 | Permissions
891 |
892 |
893 | Owner
894 |
895 |
896 | Group
897 |
898 |
899 | 0
900 | 0 sec
901 | TIMER_DRIVEN
902 | 1 sec
903 |
904 | PutFile
905 |
906 | false
907 | failure
908 |
909 |
910 | false
911 | success
912 |
913 | STOPPED
914 |
915 | org.apache.nifi.processors.standard.PutFile
916 |
917 |
918 | e6b5f4c1-deab-3ba8-0000-000000000000
919 | 05d3395a-cb38-3c2b-0000-000000000000
920 |
921 | 610.2067544614416
922 | 580.7724145142238
923 |
924 |
925 | nifi-update-attribute-nar
926 | org.apache.nifi
927 | 1.5.0-SNAPSHOT
928 |
929 |
930 | WARN
931 |
932 | 1
933 |
934 |
935 | Delete Attributes Expression
936 |
937 | Delete Attributes Expression
938 |
939 |
940 |
941 | Store State
942 |
943 | Store State
944 |
945 |
946 |
947 | Stateful Variables Initial Value
948 |
949 | Stateful Variables Initial Value
950 |
951 |
952 |
953 | source
954 |
955 | source
956 |
957 |
958 |
959 | ALL
960 | false
961 | 30 sec
962 |
963 |
964 | Delete Attributes Expression
965 |
966 |
967 | Store State
968 | Do not store state
969 |
970 |
971 | Stateful Variables Initial Value
972 |
973 |
974 | source
975 | remote
976 |
977 |
978 | 0
979 | 0 sec
980 | TIMER_DRIVEN
981 | 1 sec
982 |
983 | UpdateAttribute
984 |
985 | false
986 | success
987 |
988 | STOPPED
989 |
990 | org.apache.nifi.processors.attributes.UpdateAttribute
991 |
992 |
993 | 11dbe2c0-c507-35fc-0000-000000000000
994 | 05d3395a-cb38-3c2b-0000-000000000000
995 |
996 | 602.0000283872229
997 | 1330.9913942563408
998 |
999 |
1000 | nifi-hive-nar
1001 | org.apache.nifi
1002 | 1.5.0-SNAPSHOT
1003 |
1004 |
1005 | WARN
1006 |
1007 | 1
1008 |
1009 |
1010 | orc-config-resources
1011 |
1012 | orc-config-resources
1013 |
1014 |
1015 |
1016 | orc-stripe-size
1017 |
1018 | orc-stripe-size
1019 |
1020 |
1021 |
1022 | orc-buffer-size
1023 |
1024 | orc-buffer-size
1025 |
1026 |
1027 |
1028 | orc-compression-type
1029 |
1030 | orc-compression-type
1031 |
1032 |
1033 |
1034 | orc-hive-table-name
1035 |
1036 | orc-hive-table-name
1037 |
1038 |
1039 |
1040 | ALL
1041 | false
1042 | 30 sec
1043 |
1044 |
1045 | orc-config-resources
1046 | /etc/hive/conf/hive-site.xml
1047 |
1048 |
1049 | orc-stripe-size
1050 | 64 MB
1051 |
1052 |
1053 | orc-buffer-size
1054 | 10 KB
1055 |
1056 |
1057 | orc-compression-type
1058 | NONE
1059 |
1060 |
1061 | orc-hive-table-name
1062 | movidiussense
1063 |
1064 |
1065 | 0
1066 | 0 sec
1067 | TIMER_DRIVEN
1068 | 1 sec
1069 |
1070 | ConvertAvroToORC
1071 |
1072 | true
1073 | failure
1074 |
1075 |
1076 | false
1077 | success
1078 |
1079 | RUNNING
1080 |
1081 | org.apache.nifi.processors.hive.ConvertAvroToORC
1082 |
1083 |
1084 | 2b13178d-09f3-385a-0000-000000000000
1085 | 05d3395a-cb38-3c2b-0000-000000000000
1086 |
1087 | 1178.2424737468514
1088 | 1169.7352065640575
1089 |
1090 |
1091 | nifi-standard-nar
1092 | org.apache.nifi
1093 | 1.5.0-SNAPSHOT
1094 |
1095 |
1096 | WARN
1097 |
1098 | 1
1099 |
1100 |
1101 | Merge Strategy
1102 |
1103 | Merge Strategy
1104 |
1105 |
1106 |
1107 | Merge Format
1108 |
1109 | Merge Format
1110 |
1111 |
1112 |
1113 | Attribute Strategy
1114 |
1115 | Attribute Strategy
1116 |
1117 |
1118 |
1119 | Correlation Attribute Name
1120 |
1121 | Correlation Attribute Name
1122 |
1123 |
1124 |
1125 | mergecontent-metadata-strategy
1126 |
1127 | mergecontent-metadata-strategy
1128 |
1129 |
1130 |
1131 | Minimum Number of Entries
1132 |
1133 | Minimum Number of Entries
1134 |
1135 |
1136 |
1137 | Maximum Number of Entries
1138 |
1139 | Maximum Number of Entries
1140 |
1141 |
1142 |
1143 | Minimum Group Size
1144 |
1145 | Minimum Group Size
1146 |
1147 |
1148 |
1149 | Maximum Group Size
1150 |
1151 | Maximum Group Size
1152 |
1153 |
1154 |
1155 | Max Bin Age
1156 |
1157 | Max Bin Age
1158 |
1159 |
1160 |
1161 | Maximum number of Bins
1162 |
1163 | Maximum number of Bins
1164 |
1165 |
1166 |
1167 | Delimiter Strategy
1168 |
1169 | Delimiter Strategy
1170 |
1171 |
1172 |
1173 | Header File
1174 |
1175 | Header File
1176 |
1177 |
1178 |
1179 | Footer File
1180 |
1181 | Footer File
1182 |
1183 |
1184 |
1185 | Demarcator File
1186 |
1187 | Demarcator File
1188 |
1189 |
1190 |
1191 | Compression Level
1192 |
1193 | Compression Level
1194 |
1195 |
1196 |
1197 | Keep Path
1198 |
1199 | Keep Path
1200 |
1201 |
1202 |
1203 | Tar Modified Time
1204 |
1205 | Tar Modified Time
1206 |
1207 |
1208 |
1209 | ALL
1210 | false
1211 | 30 sec
1212 |
1213 |
1214 | Merge Strategy
1215 | Bin-Packing Algorithm
1216 |
1217 |
1218 | Merge Format
1219 | Avro
1220 |
1221 |
1222 | Attribute Strategy
1223 | Keep Only Common Attributes
1224 |
1225 |
1226 | Correlation Attribute Name
1227 |
1228 |
1229 | mergecontent-metadata-strategy
1230 | Do Not Merge Uncommon Metadata
1231 |
1232 |
1233 | Minimum Number of Entries
1234 | 4
1235 |
1236 |
1237 | Maximum Number of Entries
1238 | 1000
1239 |
1240 |
1241 | Minimum Group Size
1242 | 0 B
1243 |
1244 |
1245 | Maximum Group Size
1246 |
1247 |
1248 | Max Bin Age
1249 |
1250 |
1251 | Maximum number of Bins
1252 | 5
1253 |
1254 |
1255 | Delimiter Strategy
1256 | Filename
1257 |
1258 |
1259 | Header File
1260 |
1261 |
1262 | Footer File
1263 |
1264 |
1265 | Demarcator File
1266 |
1267 |
1268 | Compression Level
1269 | 1
1270 |
1271 |
1272 | Keep Path
1273 | false
1274 |
1275 |
1276 | Tar Modified Time
1277 | ${file.lastModifiedTime}
1278 |
1279 |
1280 | 0
1281 | 0 sec
1282 | TIMER_DRIVEN
1283 | 1 sec
1284 |
1285 | MergeContent
1286 |
1287 | true
1288 | failure
1289 |
1290 |
1291 | false
1292 | merged
1293 |
1294 |
1295 | true
1296 | original
1297 |
1298 | RUNNING
1299 |
1300 | org.apache.nifi.processors.standard.MergeContent
1301 |
1302 |
1303 | 4df2fc33-1b7a-32c7-0000-000000000000
1304 | 05d3395a-cb38-3c2b-0000-000000000000
1305 |
1306 | 652.4828684995746
1307 | 252.19331619015213
1308 |
1309 |
1310 | nifi-standard-nar
1311 | org.apache.nifi
1312 | 1.5.0-SNAPSHOT
1313 |
1314 |
1315 | WARN
1316 |
1317 | 1
1318 |
1319 |
1320 | Routing Strategy
1321 |
1322 | Routing Strategy
1323 |
1324 |
1325 |
1326 | isImage
1327 |
1328 | isImage
1329 |
1330 |
1331 |
1332 | notImage
1333 |
1334 | notImage
1335 |
1336 |
1337 |
1338 | ALL
1339 | false
1340 | 30 sec
1341 |
1342 |
1343 | Routing Strategy
1344 | Route to Property name
1345 |
1346 |
1347 | isImage
1348 | ${filename:contains('jpg')}
1349 |
1350 |
1351 | notImage
1352 | ${filename:contains('jpg'):not()}
1353 |
1354 |
1355 | 0
1356 | 0 sec
1357 | TIMER_DRIVEN
1358 | 1 sec
1359 |
1360 | RouteOnAttribute
1361 |
1362 | false
1363 | isImage
1364 |
1365 |
1366 | false
1367 | notImage
1368 |
1369 |
1370 | true
1371 | unmatched
1372 |
1373 | RUNNING
1374 |
1375 | org.apache.nifi.processors.standard.RouteOnAttribute
1376 |
1377 |
1378 | 4e1c12c1-0d8d-38ae-0000-000000000000
1379 | 05d3395a-cb38-3c2b-0000-000000000000
1380 |
1381 | 1173.6556817546639
1382 | 1475.6814956265575
1383 |
1384 |
1385 | nifi-standard-nar
1386 | org.apache.nifi
1387 | 1.5.0-SNAPSHOT
1388 |
1389 |
1390 | WARN
1391 |
1392 | 1
1393 |
1394 |
1395 | Rate Control Criteria
1396 |
1397 | Rate Control Criteria
1398 |
1399 |
1400 |
1401 | Maximum Rate
1402 |
1403 | Maximum Rate
1404 |
1405 |
1406 |
1407 | Rate Controlled Attribute
1408 |
1409 | Rate Controlled Attribute
1410 |
1411 |
1412 |
1413 | Time Duration
1414 |
1415 | Time Duration
1416 |
1417 |
1418 |
1419 | Grouping Attribute
1420 |
1421 | Grouping Attribute
1422 |
1423 |
1424 |
1425 | ALL
1426 | false
1427 | 30 sec
1428 |
1429 |
1430 | Rate Control Criteria
1431 | flowfile count
1432 |
1433 |
1434 | Maximum Rate
1435 | 1
1436 |
1437 |
1438 | Rate Controlled Attribute
1439 |
1440 |
1441 | Time Duration
1442 | 1 min
1443 |
1444 |
1445 | Grouping Attribute
1446 |
1447 |
1448 | 0
1449 | 0 sec
1450 | TIMER_DRIVEN
1451 | 1 sec
1452 |
1453 | ControlRate
1454 |
1455 | true
1456 | failure
1457 |
1458 |
1459 | false
1460 | success
1461 |
1462 | RUNNING
1463 |
1464 | org.apache.nifi.processors.standard.ControlRate
1465 |
1466 |
1467 | Movidius Ingest
1468 |
1469 |
1470 |
1471 | 12/28/2017 16:41:57 EST
1472 |
1473 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # OpenSourceComputerVision
2 |
3 | Open Source Computer Vision with TensorFlow, MiniFi, Apache NiFi, OpenCV, Apache Tika and Python For processing images from IoT devices like Raspberry Pis, NVidia Jetson TX1, NanoPi Duos and more which are equipped with attached cameras or external USB webcams, we use Python to interface via OpenCV and PiCamera. From there we run image processing at the edge on these IoT device using OpenCV and TensorFlow to determine attributes and image analytics. A pache MiniFi coordinates running these Python scripts and decides when and what to send from that analysis and the image to a remote Apache NiFi server for additional processing. At the Apache NiFi cluster in the cluster it routes the images to one processing path and the JSON encoded metadata to another flow. The JSON data (with it's schema referenced from a central Schema Registry) is routed and routed using Record Processing and SQL, this data in enriched and augment before conversion to AVRO to be send via Apache Kafka to SAM. Streaming Analytics Manager then does deeper processing on this stream and others including weather and twitter to determine what should be done on this data.
4 |
5 | References
6 |
7 | * https://community.hortonworks.com/articles/103863/using-an-asus-tinkerboard-with-tensorflow-and-pyth.html
8 | * https://community.hortonworks.com/articles/118132/minifi-capturing-converting-tensorflow-inception-t.html
9 | * https://github.com/tspannhw/rpi-noir-screen
10 | * https://community.hortonworks.com/articles/77988/ingest-remote-camera-images-from-raspberry-pi-via.html
11 | * https://community.hortonworks.com/articles/107379/minifi-for-image-capture-and-ingestion-from-raspbe.html
12 | * https://community.hortonworks.com/articles/58265/analyzing-images-in-hdf-20-using-tensorflow.html
13 |
14 |
15 |
16 | pb.py
17 |
18 | License
19 | MIT
20 |
21 | Code used from Mask R-CNN by Matterport, Inc. (MIT-Licensed), with minor alterations and copyright notices retained.
22 |
23 |
24 | MIT License
25 |
26 | Copyright (c) 2018 Max Woolf
27 |
28 | Permission is hereby granted, free of charge, to any person obtaining a copy
29 | of this software and associated documentation files (the "Software"), to deal
30 | in the Software without restriction, including without limitation the rights
31 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
32 | copies of the Software, and to permit persons to whom the Software is
33 | furnished to do so, subject to the following conditions:
34 |
35 | The above copyright notice and this permission notice shall be included in all
36 | copies or substantial portions of the Software.
37 |
38 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
39 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
40 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
41 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
42 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
43 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
44 | SOFTWARE.
45 |
46 | ---
47 |
48 | Mask R-CNN
49 |
50 | The MIT License (MIT)
51 |
52 | Copyright (c) 2017 Matterport, Inc.
53 |
54 |
--------------------------------------------------------------------------------
/Store_Images.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | 7c84501d-d10c-407c-b9f3-1d80e38fe36a
5 | Store Images
6 |
7 |
8 | 0501c2d6-38e4-38c9-0000-000000000000
9 | 46f95fe5-3024-31d8-0000-000000000000
10 | 1 GB
11 | 10000
12 |
13 | b157a270-19e5-36e2-0000-000000000000
14 | ff0346d8-0207-3773-0000-000000000000
15 | INPUT_PORT
16 | ff0346d8-0207-3773-be4f-ee2ebe9c84f1
17 |
18 | 0 sec
19 | 1
20 |
21 |
22 | 46f95fe5-3024-31d8-0000-000000000000
23 | 3df7d6b5-6df6-3987-0000-000000000000
24 | INPUT_PORT
25 |
26 | 0
27 |
28 |
29 | 3df7d6b5-6df6-3987-0000-000000000000
30 | 46f95fe5-3024-31d8-0000-000000000000
31 |
32 | 60.0
33 | 0.0
34 |
35 | 1
36 | Images In
37 | RUNNING
38 | false
39 | INPUT_PORT
40 |
41 |
42 | b157a270-19e5-36e2-0000-000000000000
43 | 46f95fe5-3024-31d8-0000-000000000000
44 |
45 | 0.0
46 | 164.0
47 |
48 | b157a270-19e5-36e2-9a0d-6fa3585cea5f
49 |
50 |
51 |
52 | bc941ec9-45af-36fe-0000-000000000000
53 | b157a270-19e5-36e2-0000-000000000000
54 | bc941ec9-45af-36fe-bd16-7dc279068652
55 | 1 GB
56 | 10000
57 |
58 | b157a270-19e5-36e2-0000-000000000000
59 | 903d027c-cade-3fff-0000-000000000000
60 | PROCESSOR
61 | 903d027c-cade-3fff-9777-e7a8b317dd55
62 |
63 | 0 sec
64 | 1
65 |
66 | success
67 |
68 | b157a270-19e5-36e2-0000-000000000000
69 | 3d8e69f2-c2eb-33f5-0000-000000000000
70 | PROCESSOR
71 | 3d8e69f2-c2eb-33f5-b1d3-7cfbf641734c
72 |
73 | 0
74 |
75 |
76 | f9480b18-c560-3310-0000-000000000000
77 | b157a270-19e5-36e2-0000-000000000000
78 | f9480b18-c560-3310-ba2b-43ddc597914e
79 | 1 GB
80 | 10000
81 |
82 | b157a270-19e5-36e2-0000-000000000000
83 | 585c4a89-b766-34a2-0000-000000000000
84 | PROCESSOR
85 | 585c4a89-b766-34a2-9376-62553ee13457
86 |
87 | 0 sec
88 | 1
89 |
90 |
91 | b157a270-19e5-36e2-0000-000000000000
92 | ff0346d8-0207-3773-0000-000000000000
93 | INPUT_PORT
94 | ff0346d8-0207-3773-be4f-ee2ebe9c84f1
95 |
96 | 0
97 |
98 |
99 | fb4b2454-9ea8-3cf9-0000-000000000000
100 | b157a270-19e5-36e2-0000-000000000000
101 | fb4b2454-9ea8-3cf9-bbb1-1d4c0910025a
102 | 1 GB
103 | 10000
104 |
105 | b157a270-19e5-36e2-0000-000000000000
106 | 3d8e69f2-c2eb-33f5-0000-000000000000
107 | PROCESSOR
108 | 3d8e69f2-c2eb-33f5-b1d3-7cfbf641734c
109 |
110 | 0 sec
111 | 1
112 |
113 | success
114 |
115 | b157a270-19e5-36e2-0000-000000000000
116 | 585c4a89-b766-34a2-0000-000000000000
117 | PROCESSOR
118 | 585c4a89-b766-34a2-9376-62553ee13457
119 |
120 | 0
121 |
122 |
123 | 2beff2ef-5946-337f-0000-000000000000
124 | b157a270-19e5-36e2-0000-000000000000
125 | 2beff2ef-5946-337f-8fd7-44f1576b4590
126 | 1 GB
127 | 10000
128 |
129 | b157a270-19e5-36e2-0000-000000000000
130 | 5ae30d8b-947f-3022-0000-000000000000
131 | PROCESSOR
132 | 5ae30d8b-947f-3022-b101-f1999d1d1908
133 |
134 | 0 sec
135 | 1
136 |
137 | success
138 |
139 | b157a270-19e5-36e2-0000-000000000000
140 | 903d027c-cade-3fff-0000-000000000000
141 | PROCESSOR
142 | 903d027c-cade-3fff-9777-e7a8b317dd55
143 |
144 | 0
145 |
146 |
147 | ff0346d8-0207-3773-0000-000000000000
148 | b157a270-19e5-36e2-0000-000000000000
149 |
150 | 926.0000400816652
151 | 388.01390580296265
152 |
153 | ff0346d8-0207-3773-be4f-ee2ebe9c84f1
154 | 1
155 | Store Images
156 | RUNNING
157 | INPUT_PORT
158 |
159 |
160 | 903d027c-cade-3fff-0000-000000000000
161 | b157a270-19e5-36e2-0000-000000000000
162 |
163 | 899.0000400816652
164 | 951.0138447678064
165 |
166 | 903d027c-cade-3fff-9777-e7a8b317dd55
167 |
168 | nifi-update-attribute-nar
169 | org.apache.nifi
170 | 1.5.0.3.1.0.0-564
171 |
172 |
173 | WARN
174 |
175 | 1
176 |
177 |
178 | Delete Attributes Expression
179 |
180 | Delete Attributes Expression
181 |
182 |
183 |
184 | Store State
185 |
186 | Store State
187 |
188 |
189 |
190 | Stateful Variables Initial Value
191 |
192 | Stateful Variables Initial Value
193 |
194 |
195 |
196 | filename
197 |
198 | filename
199 |
200 |
201 |
202 | ALL
203 | false
204 | 30 sec
205 |
206 |
207 | Delete Attributes Expression
208 |
209 |
210 | Store State
211 | Do not store state
212 |
213 |
214 | Stateful Variables Initial Value
215 |
216 |
217 | filename
218 | current.jpg
219 |
220 |
221 | 0
222 | 0 sec
223 | TIMER_DRIVEN
224 | 1 sec
225 |
226 | UpdateAttribute
227 |
228 | false
229 | success
230 |
231 | RUNNING
232 |
233 | org.apache.nifi.processors.attributes.UpdateAttribute
234 |
235 |
236 | 3d8e69f2-c2eb-33f5-0000-000000000000
237 | b157a270-19e5-36e2-0000-000000000000
238 |
239 | 894.0000400816652
240 | 749.0139668381189
241 |
242 | 3d8e69f2-c2eb-33f5-b1d3-7cfbf641734c
243 |
244 | nifi-standard-nar
245 | org.apache.nifi
246 | 1.5.0.3.1.0.0-564
247 |
248 |
249 | WARN
250 |
251 | 1
252 |
253 |
254 | Directory
255 |
256 | Directory
257 |
258 |
259 |
260 | Conflict Resolution Strategy
261 |
262 | Conflict Resolution Strategy
263 |
264 |
265 |
266 | Create Missing Directories
267 |
268 | Create Missing Directories
269 |
270 |
271 |
272 | Maximum File Count
273 |
274 | Maximum File Count
275 |
276 |
277 |
278 | Last Modified Time
279 |
280 | Last Modified Time
281 |
282 |
283 |
284 | Permissions
285 |
286 | Permissions
287 |
288 |
289 |
290 | Owner
291 |
292 | Owner
293 |
294 |
295 |
296 | Group
297 |
298 | Group
299 |
300 |
301 |
302 | ALL
303 | false
304 | 30 sec
305 |
306 |
307 | Directory
308 | /opt/demo/images2
309 |
310 |
311 | Conflict Resolution Strategy
312 | replace
313 |
314 |
315 | Create Missing Directories
316 | true
317 |
318 |
319 | Maximum File Count
320 |
321 |
322 | Last Modified Time
323 |
324 |
325 | Permissions
326 |
327 |
328 | Owner
329 |
330 |
331 | Group
332 |
333 |
334 | 0
335 | 0 sec
336 | TIMER_DRIVEN
337 | 1 sec
338 |
339 | PutFile
340 |
341 | true
342 | failure
343 |
344 |
345 | false
346 | success
347 |
348 | RUNNING
349 |
350 | org.apache.nifi.processors.standard.PutFile
351 |
352 |
353 | 585c4a89-b766-34a2-0000-000000000000
354 | b157a270-19e5-36e2-0000-000000000000
355 |
356 | 883.0000400816652
357 | 537.0139058029627
358 |
359 | 585c4a89-b766-34a2-9376-62553ee13457
360 |
361 | nifi-update-attribute-nar
362 | org.apache.nifi
363 | 1.5.0.3.1.0.0-564
364 |
365 |
366 | WARN
367 |
368 | 1
369 |
370 |
371 | Delete Attributes Expression
372 |
373 | Delete Attributes Expression
374 |
375 |
376 |
377 | Store State
378 |
379 | Store State
380 |
381 |
382 |
383 | Stateful Variables Initial Value
384 |
385 | Stateful Variables Initial Value
386 |
387 |
388 |
389 | ALL
390 | false
391 | 30 sec
392 |
393 |
394 | Delete Attributes Expression
395 |
396 |
397 | Store State
398 | Do not store state
399 |
400 |
401 | Stateful Variables Initial Value
402 |
403 |
404 | 0
405 | 0 sec
406 | TIMER_DRIVEN
407 | 1 sec
408 |
409 | UpdateAttribute
410 |
411 | false
412 | success
413 |
414 | RUNNING
415 |
416 | org.apache.nifi.processors.attributes.UpdateAttribute
417 |
418 |
419 | 5ae30d8b-947f-3022-0000-000000000000
420 | b157a270-19e5-36e2-0000-000000000000
421 |
422 | 892.0000400816652
423 | 1176.013966838119
424 |
425 | 5ae30d8b-947f-3022-b101-f1999d1d1908
426 |
427 | nifi-standard-nar
428 | org.apache.nifi
429 | 1.5.0.3.1.0.0-564
430 |
431 |
432 | WARN
433 |
434 | 1
435 |
436 |
437 | Directory
438 |
439 | Directory
440 |
441 |
442 |
443 | Conflict Resolution Strategy
444 |
445 | Conflict Resolution Strategy
446 |
447 |
448 |
449 | Create Missing Directories
450 |
451 | Create Missing Directories
452 |
453 |
454 |
455 | Maximum File Count
456 |
457 | Maximum File Count
458 |
459 |
460 |
461 | Last Modified Time
462 |
463 | Last Modified Time
464 |
465 |
466 |
467 | Permissions
468 |
469 | Permissions
470 |
471 |
472 |
473 | Owner
474 |
475 | Owner
476 |
477 |
478 |
479 | Group
480 |
481 | Group
482 |
483 |
484 |
485 | ALL
486 | false
487 | 30 sec
488 |
489 |
490 | Directory
491 | /opt/demo/images2
492 |
493 |
494 | Conflict Resolution Strategy
495 | replace
496 |
497 |
498 | Create Missing Directories
499 | true
500 |
501 |
502 | Maximum File Count
503 |
504 |
505 | Last Modified Time
506 |
507 |
508 | Permissions
509 |
510 |
511 | Owner
512 |
513 |
514 | Group
515 |
516 |
517 | 0
518 | 0 sec
519 | TIMER_DRIVEN
520 | 1 sec
521 |
522 | PutFile Current
523 |
524 | true
525 | failure
526 |
527 |
528 | true
529 | success
530 |
531 | RUNNING
532 |
533 | org.apache.nifi.processors.standard.PutFile
534 |
535 |
536 | Store Images
537 |
538 |
539 | 77af1571-e1a4-435f-af74-7b814f33c565
540 | conference
541 |
542 | 7cd581b6-249e-40d0-95b8-5fd553e4ab80
543 | Store Images in File System
544 | adc0435b-3eb5-1e1a-ffff-fffff521cba3
545 | nipyapi_console_reg_client_0
546 | UP_TO_DATE
547 | Flow version is current
548 | 1
549 |
550 |
551 |
552 | 03/28/2018 20:39:44 UTC
553 |
554 |
--------------------------------------------------------------------------------
/all.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 |
3 | # ****************************************************************************
4 | # Copyright(c) 2017 Intel Corporation.
5 | # License: MIT See LICENSE file in root directory.
6 | # ****************************************************************************
7 | # How to classify images using DNNs on Intel Neural Compute Stick (NCS)
8 |
9 | # Forked by Tim Spann and added Sense Hat Code and JSON
10 | # 2017-December-28
11 |
12 | from sense_hat import SenseHat
13 | import json
14 | import sys, socket
15 | import os
16 | import psutil
17 | import subprocess
18 | import time
19 | import datetime
20 | from time import sleep
21 | from time import gmtime, strftime
22 | import mvnc.mvncapi as mvnc
23 | import skimage
24 | from skimage import io, transform
25 | import numpy
26 | import json
27 | import traceback
28 | import math
29 | import random, string
30 | import base64
31 | import json
32 | import mxnet as mx
33 | import numpy as np
34 | import time
35 | import cv2, os, urllib
36 | from collections import namedtuple
37 |
38 | start = time.time()
39 | starttime= strftime("%Y-%m-%d %H:%M:%S",gmtime())
40 |
41 | # User modifiable input parameters
42 | NCAPPZOO_PATH = os.path.expanduser( '~/workspace/ncappzoo' )
43 | GRAPH_PATH = NCAPPZOO_PATH + '/caffe/GoogLeNet/graph'
44 | IMAGE_PATH = sys.argv[1]
45 | LABELS_FILE_PATH = NCAPPZOO_PATH + '/data/ilsvrc12/synset_words.txt'
46 | IMAGE_MEAN = [ 104.00698793, 116.66876762, 122.67891434]
47 | IMAGE_STDDEV = 1
48 | IMAGE_DIM = ( 224, 224 )
49 |
50 | Batch = namedtuple('Batch', ['data'])
51 |
52 | # Load the symbols for the networks
53 | with open('/opt/demo/incubator-mxnet/synset.txt', 'r') as f:
54 | synsets = [l.rstrip() for l in f]
55 |
56 | # Load the network parameters
57 | sym, arg_params, aux_params = mx.model.load_checkpoint('/opt/demo/incubator-mxnet/Inception-BN', 0)
58 |
59 | # Load the network into an MXNet module and bind the corresponding parameters
60 | mod = mx.mod.Module(symbol=sym, context=mx.cpu())
61 | mod.bind(for_training=False, data_shapes=[('data', (1,3,224,224))])
62 | mod.set_params(arg_params, aux_params)
63 |
64 | '''
65 | Function to predict objects by giving the model a pointer to an image file and running a forward pass through the model.
66 | inputs:
67 | filename = jpeg file of image to classify objects in
68 | mod = the module object representing the loaded model
69 | synsets = the list of symbols representing the model
70 | N = Optional parameter denoting how many predictions to return (default is top 5)
71 | outputs:
72 | python list of top N predicted objects and corresponding probabilities
73 | '''
74 | def predict(filename, mod, synsets, N=5):
75 | tic = time.time()
76 | img = cv2.cvtColor(cv2.imread(filename), cv2.COLOR_BGR2RGB)
77 | if img is None:
78 | return None
79 | img = cv2.resize(img, (224, 224))
80 | img = np.swapaxes(img, 0, 2)
81 | img = np.swapaxes(img, 1, 2)
82 | img = img[np.newaxis, :]
83 |
84 | toc = time.time()
85 | mod.forward(Batch([mx.nd.array(img)]))
86 | prob = mod.get_outputs()[0].asnumpy()
87 | prob = np.squeeze(prob)
88 |
89 | topN = []
90 | a = np.argsort(prob)[::-1]
91 | for i in a[0:N]:
92 | topN.append((prob[i], synsets[i]))
93 | return topN
94 |
95 |
96 | # Code to download an image from the internet and run a prediction on it
97 | def predict_from_url(url, N=5):
98 | filename = url.split("/")[-1]
99 | urllib.urlretrieve(url, filename)
100 | img = cv2.imread(filename)
101 | if img is None:
102 | print( "Failed to download" )
103 | else:
104 | return predict(filename, mod, synsets, N)
105 |
106 | # Code to predict on a local file
107 | def predict_from_local_file(filename, N=5):
108 | return predict(filename, mod, synsets, N)
109 |
110 | packet_size=3000
111 |
112 | # Create unique image name
113 | uniqueid = 'mxnet_uuid_{0}_{1}.json'.format('json',strftime("%Y%m%d%H%M%S",gmtime()))
114 |
115 | filename = IMAGE_PATH
116 | topn = []
117 | # Run inception prediction on image
118 | try:
119 | topn = predict_from_local_file(filename, N=5)
120 | except:
121 | print("Error")
122 | errorcondition = "true"
123 |
124 | # ---- Step 1: Open the enumerated device and get a handle to it -------------
125 |
126 | # Look for enumerated NCS device(s); quit program if none found.
127 | devices = mvnc.EnumerateDevices()
128 | if len( devices ) == 0:
129 | print( 'No devices found' )
130 | quit()
131 |
132 | # Get a handle to the first enumerated device and open it
133 | device = mvnc.Device( devices[0] )
134 | device.OpenDevice()
135 |
136 | # ---- Step 2: Load a graph file onto the NCS device -------------------------
137 |
138 | # Read the graph file into a buffer
139 | with open( GRAPH_PATH, mode='rb' ) as f:
140 | blob = f.read()
141 |
142 | # Load the graph buffer into the NCS
143 | graph = device.AllocateGraph( blob )
144 |
145 | # ---- Step 3: Offload image onto the NCS to run inference -------------------
146 |
147 | # Read & resize image [Image size is defined during training]
148 | img = print_img = skimage.io.imread( IMAGE_PATH )
149 | img = skimage.transform.resize( img, IMAGE_DIM, preserve_range=True )
150 |
151 | # Convert RGB to BGR [skimage reads image in RGB, but Caffe uses BGR]
152 | img = img[:, :, ::-1]
153 |
154 | # Mean subtraction & scaling [A common technique used to center the data]
155 | img = img.astype( numpy.float32 )
156 | img = ( img - IMAGE_MEAN ) * IMAGE_STDDEV
157 |
158 | # Load the image as a half-precision floating point array
159 | graph.LoadTensor( img.astype( numpy.float16 ), 'user object' )
160 |
161 | # ---- Step 4: Read & print inference results from the NCS -------------------
162 |
163 | # Get the results from NCS
164 | output, userobj = graph.GetResult()
165 |
166 | labels = numpy.loadtxt( LABELS_FILE_PATH, str, delimiter = '\t' )
167 |
168 | order = output.argsort()[::-1][:6]
169 |
170 | #### Initialization
171 |
172 | external_IP_and_port = ('198.41.0.4', 53) # a.root-servers.net
173 | socket_family = socket.AF_INET
174 |
175 | host = os.uname()[1]
176 |
177 | def getCPUtemperature():
178 | res = os.popen('vcgencmd measure_temp').readline()
179 | return(res.replace("temp=","").replace("'C\n",""))
180 |
181 | def IP_address():
182 | try:
183 | s = socket.socket(socket_family, socket.SOCK_DGRAM)
184 | s.connect(external_IP_and_port)
185 | answer = s.getsockname()
186 | s.close()
187 | return answer[0] if answer else None
188 | except socket.error:
189 | return None
190 |
191 | cpuTemp=int(float(getCPUtemperature()))
192 | ipaddress = IP_address()
193 |
194 | host = os.uname()[1]
195 | rasp = ('armv' in os.uname()[4])
196 | cpu = psutil.cpu_percent(interval=1)
197 | if rasp:
198 | f = open('/sys/class/thermal/thermal_zone0/temp', 'r')
199 | l = f.readline()
200 | ctemp = 1.0 * float(l)/1000
201 | usage = psutil.disk_usage("/")
202 | mem = psutil.virtual_memory()
203 | diskrootfree = "{:.1f} MB".format(float(usage.free) / 1024 / 1024)
204 | mempercent = mem.percent
205 | external_IP_and_port = ('198.41.0.4', 53) # a.root-servers.net
206 | socket_family = socket.AF_INET
207 |
208 | ipaddress = IP_address()
209 |
210 | # Sense Hat
211 | sense = SenseHat()
212 | sense.clear()
213 | temp = sense.get_temperature()
214 | temp = round(temp, 2)
215 | humidity = sense.get_humidity()
216 | humidity = round(humidity, 1)
217 | pressure = sense.get_pressure()
218 | pressure = round(pressure, 1)
219 | orientation = sense.get_orientation()
220 | pitch = orientation['pitch']
221 | roll = orientation['roll']
222 | yaw = orientation['yaw']
223 | acceleration = sense.get_accelerometer_raw()
224 | x = acceleration['x']
225 | y = acceleration['y']
226 | z = acceleration['z']
227 | #cputemp = out
228 | x=round(x, 0)
229 | y=round(y, 0)
230 | z=round(z, 0)
231 | pitch=round(pitch,0)
232 | roll=round(roll,0)
233 | yaw=round(yaw,0)
234 |
235 | try:
236 | # 5 MXNET Analysis
237 | top1 = str(topn[0][1])
238 | top1pct = str(round(topn[0][0],3) * 100)
239 |
240 | top2 = str(topn[1][1])
241 | top2pct = str(round(topn[1][0],3) * 100)
242 |
243 | top3 = str(topn[2][1])
244 | top3pct = str(round(topn[2][0],3) * 100)
245 |
246 | top4 = str(topn[3][1])
247 | top4pct = str(round(topn[3][0],3) * 100)
248 |
249 | top5 = str(topn[4][1])
250 | top5pct = str(round(topn[4][0],3) * 100)
251 |
252 | end = time.time()
253 | currenttime= strftime("%Y-%m-%d %H:%M:%S",gmtime())
254 |
255 | row = { 'uuid': uniqueid, 'top1pct': top1pct, 'top1': top1, 'top2pct': top2pct, 'top2': top2,'top3pct': top3pct, 'top3': top3,'top4pct': top4pct,'top4': top4, 'top5pct': top5pct,'top5': top5, 'imagefilename': filename, 'runtime': str(round(end - start)),
256 | 'cputemp2': round(ctemp,2), 'temp': temp, 'tempf': round(((temp * 1.8) + 12),2), 'humidity': humidity, 'pressure': pressure, 'pitch': pitch, 'roll': roll, 'yaw': yaw, 'x': x, 'y': y, 'z': z,'memory': mempercent, 'diskfree': diskrootfree, 'label1': labels[order[0]],
257 | 'label2': labels[order[1]], 'label3': labels[order[2]], 'label4': labels[order[3]], 'label5': labels[order[4]], 'currenttime': currenttime, 'host': host, 'cputemp': round(cpuTemp,2), 'ipaddress': ipaddress, 'starttime': starttime }
258 |
259 | json_string = json.dumps(row)
260 | print (json_string)
261 |
262 | except:
263 | print("{\"message\": \"Failed to run\"}")
264 |
265 |
266 | # ---- Step 5: Unload the graph and close the device -------------------------
267 |
268 | graph.DeallocateGraph()
269 | device.CloseDevice()
270 |
271 | # ==== End of file ===========================================================
272 |
--------------------------------------------------------------------------------
/analyze.py:
--------------------------------------------------------------------------------
1 | # 2017 load pictures and analyze
2 | # https://github.com/tspannhw/mxnet_rpi/blob/master/analyze.py
3 | import time
4 | import sys
5 | import datetime
6 | import subprocess
7 | import urllib2
8 | import os
9 | import datetime
10 | import traceback
11 | import math
12 | import random, string
13 | import base64
14 | import json
15 | import mxnet as mx
16 | import inception_predict
17 | import numpy as np
18 | import cv2
19 | import random, string
20 | import socket
21 | import psutil
22 | from time import sleep
23 | from string import Template
24 | from time import gmtime, strftime
25 |
26 | # Time
27 | start = time.time()
28 | currenttime= strftime("%Y-%m-%d %H:%M:%S",gmtime())
29 | host = os.uname()[1]
30 | cpu = psutil.cpu_percent(interval=1)
31 | if 1==1:
32 | f = open('/sys/class/thermal/thermal_zone0/temp', 'r')
33 | l = f.readline()
34 | ctemp = 1.0 * float(l)/1000
35 | usage = psutil.disk_usage("/")
36 | mem = psutil.virtual_memory()
37 | diskrootfree = "{:.1f} MB".format(float(usage.free) / 1024 / 1024)
38 | mempercent = mem.percent
39 | external_IP_and_port = ('198.41.0.4', 53) # a.root-servers.net
40 | socket_family = socket.AF_INET
41 |
42 | def IP_address():
43 | try:
44 | s = socket.socket(socket_family, socket.SOCK_DGRAM)
45 | s.connect(external_IP_and_port)
46 | answer = s.getsockname()
47 | s.close()
48 | return answer[0] if answer else None
49 | except socket.error:
50 | return None
51 | ipaddress = IP_address()
52 |
53 | face_cascade_path = '/media/nvidia/96ed93f9-7c40-4999-85ba-3eb24262d0a5/haarcascade_frontalface_default.xml'
54 | face_cascade = cv2.CascadeClassifier(os.path.expanduser(face_cascade_path))
55 |
56 | scale_factor = 1.1
57 | min_neighbors = 3
58 | min_size = (30, 30)
59 |
60 | cap = cv2.VideoCapture(0)
61 | packet_size=3000
62 |
63 | def randomword(length):
64 | return ''.join(random.choice(string.lowercase) for i in range(length))
65 |
66 | #while True:
67 |
68 | # Create unique image name
69 | uniqueid = 'mxnet_uuid_{0}_{1}'.format(randomword(3),strftime("%Y%m%d%H%M%S",gmtime()))
70 |
71 | ret, frame = cap.read()
72 |
73 | imgdir = 'images/'
74 | filename = 'tx1_image_{0}_{1}.jpg'.format(randomword(3),strftime("%Y%m%d%H%M%S",gmtime()))
75 | cv2.imwrite(imgdir + filename, frame)
76 |
77 | # Run inception prediction on image
78 | try:
79 | topn = inception_predict.predict_from_local_file(imgdir + filename, N=5)
80 | except:
81 | errorcondition = "true"
82 |
83 | # CPU Temp
84 | f = open("/sys/devices/virtual/thermal/thermal_zone1/temp","r")
85 | cputemp = str( f.readline() )
86 | cputemp = cputemp.replace('\n','')
87 | cputemp = cputemp.strip()
88 | cputemp = str(round(float(cputemp)) / 1000)
89 | cputempf = str(round(9.0/5.0 * float(cputemp) + 32))
90 | f.close()
91 |
92 | # GPU Temp
93 | f = open("/sys/devices/virtual/thermal/thermal_zone2/temp","r")
94 | gputemp = str( f.readline() )
95 | gputemp = gputemp.replace('\n','')
96 | gputemp = gputemp.strip()
97 | gputemp = str(round(float(gputemp)) / 1000)
98 | gputempf = str(round(9.0/5.0 * float(gputemp) + 32))
99 | f.close()
100 |
101 | # NVidia Face Detect
102 | p = os.popen('/media/nvidia/96ed93f9-7c40-4999-85ba-3eb24262d0a5/jetson-inference/build/aarch64/bin/facedetect.sh ' + filename).read()
103 | face = p.replace('\n','|')
104 | face = face.strip()
105 |
106 | # NVidia Image Net Classify
107 | p2 = os.popen('/media/nvidia/96ed93f9-7c40-4999-85ba-3eb24262d0a5/jetson-inference/build/aarch64/bin/runclassify.sh ' + filename).read()
108 | imagenet = p2.replace('\n','|')
109 | imagenet = imagenet.strip()
110 |
111 | # 5 MXNET Analysis
112 | top1 = str(topn[0][1])
113 | top1pct = str(round(topn[0][0],3) * 100)
114 |
115 | top2 = str(topn[1][1])
116 | top2pct = str(round(topn[1][0],3) * 100)
117 |
118 | top3 = str(topn[2][1])
119 | top3pct = str(round(topn[2][0],3) * 100)
120 |
121 | top4 = str(topn[3][1])
122 | top4pct = str(round(topn[3][0],3) * 100)
123 |
124 | top5 = str(topn[4][1])
125 | top5pct = str(round(topn[4][0],3) * 100)
126 |
127 | # OpenCV
128 |
129 | infname = "/media/nvidia/96ed93f9-7c40-4999-85ba-3eb24262d0a5/images" + filename
130 | flags = cv2.CASCADE_SCALE_IMAGE
131 | #image_path = os.path.expanduser(infname)
132 | image = cv2.imread(imgdir + filename)
133 | #frame
134 | gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
135 | faces = face_cascade.detectMultiScale(gray, scaleFactor = scale_factor, minNeighbors = min_neighbors, minSize = min_size, flags = flags)
136 |
137 | # Create Face Images
138 |
139 | x = 0
140 | y = 0
141 | w = 0
142 | h = 0
143 | outfilename = filename
144 | outfname = filename
145 | cvface = ''
146 | cvfilename = ''
147 |
148 | for( x1, y1, w1, h1 ) in faces:
149 | cv2.rectangle(image, (x, y), (x + w, y + h), (255, 255, 0), 2)
150 | outfname = "/media/nvidia/96ed93f9-7c40-4999-85ba-3eb24262d0a5/images/%s.faces.jpg" % os.path.basename(infname)
151 | cv2.imwrite(os.path.expanduser(outfname), image)
152 | cvfilename += outfname
153 | cvface += 'Face {0}'.format(faces)
154 | outfilename = outfname
155 | x = x1
156 | y = y1
157 | w = w1
158 | h = h1
159 |
160 | endtime= strftime("%Y-%m-%d %H:%M:%S",gmtime())
161 | end = time.time()
162 | row = { 'uuid': uniqueid, 'top1pct': top1pct, 'top1': top1, 'top2pct': top2pct, 'top2': top2,'top3pct': top3pct, 'top3': top3,'top4pct': top4pct,'top4': top4, 'top5pct': top5pct,'top5': top5, 'gputemp': gputemp, 'imagefilename': filename, 'gputempf': gputempf, 'cputempf': cputempf, 'runtime': str(round(end - start)), 'facedetect': face, 'imagenet': imagenet, 'ts': currenttime, 'endtime': endtime, 'host': host, 'memory': mempercent, 'diskfree': diskrootfree, 'cputemp': round(ctemp,2), 'ipaddress': ipaddress, 'x': str(x), 'y': str(y), 'w': str(w), 'h': str(h), 'filename': outfname, 'cvface': cvface, 'cvfilename': cvfilename }
163 |
164 | json_string = json.dumps(row)
165 |
166 | print (json_string )
167 |
--------------------------------------------------------------------------------
/buildconfig.sh:
--------------------------------------------------------------------------------
1 | /Volumes/seagate/IoTFusion/minifi-toolkit-0.4.0/bin/config.sh transform $1 config.yml
2 | scp config.yml pi@192.168.1.156:/opt/demo/minifi-0.4.0/conf/
3 |
--------------------------------------------------------------------------------
/buildpage.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | echo '
NiFi List Images
List Images
'
4 | sed 's/^.*/&<\/a>
/'
5 | echo ''
6 |
--------------------------------------------------------------------------------
/classify_image.py:
--------------------------------------------------------------------------------
1 | #
2 | #Copyright 2015 The TensorFlow Authors. All Rights Reserved.
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | # ==============================================================================
15 |
16 |
17 | """Simple image classification with Inception.
18 |
19 |
20 | Run image classification with Inception trained on ImageNet 2012 Challenge data
21 | set.
22 |
23 |
24 | This program creates a graph from a saved GraphDef protocol buffer,
25 | and runs inference on an input JPEG image. It outputs human readable
26 | strings of the top 5 predictions along with their probabilities.
27 |
28 |
29 | Change the --image_file argument to any jpg image to compute a
30 | classification of that image.
31 |
32 |
33 | Please see the tutorial and website for a detailed description of how
34 | to use this script to perform image recognition.
35 |
36 |
37 | https://tensorflow.org/tutorials/image_recognition/
38 | """
39 |
40 |
41 | from __future__ import absolute_import
42 | from __future__ import division
43 | from __future__ import print_function
44 |
45 |
46 | import argparse
47 | import os.path
48 | import re
49 | import sys
50 | import tarfile
51 | import os
52 | import datetime
53 | import math
54 | import random, string
55 | import base64
56 | import json
57 | import time
58 | from time import sleep
59 | from time import gmtime, strftime
60 |
61 |
62 | import numpy as np
63 | from six.moves import urllib
64 | import tensorflow as tf
65 |
66 |
67 | tf.logging.set_verbosity(tf.logging.ERROR)
68 |
69 |
70 | FLAGS = None
71 |
72 |
73 | # pylint: disable=line-too-long
74 | DATA_URL = 'http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz'
75 | # pylint: enable=line-too-long
76 |
77 |
78 | # yyyy-mm-dd hh:mm:ss
79 | currenttime= strftime("%Y-%m-%d %H:%M:%S",gmtime())
80 |
81 |
82 | host = os.uname()[1]
83 |
84 |
85 | def randomword(length):
86 | return ''.join(random.choice(string.lowercase) for i in range(length))
87 |
88 |
89 | class NodeLookup(object):
90 | """Converts integer node ID's to human readable labels."""
91 |
92 |
93 | def __init__(self,
94 | label_lookup_path=None,
95 | uid_lookup_path=None):
96 | if not label_lookup_path:
97 | label_lookup_path = os.path.join(
98 | FLAGS.model_dir, 'imagenet_2012_challenge_label_map_proto.pbtxt')
99 | if not uid_lookup_path:
100 | uid_lookup_path = os.path.join(
101 | FLAGS.model_dir, 'imagenet_synset_to_human_label_map.txt')
102 | self.node_lookup = self.load(label_lookup_path, uid_lookup_path)
103 |
104 |
105 |
106 |
107 | def load(self, label_lookup_path, uid_lookup_path):
108 | """Loads a human readable English name for each softmax node.
109 |
110 |
111 | Args:
112 | label_lookup_path: string UID to integer node ID.
113 | uid_lookup_path: string UID to human-readable string.
114 |
115 |
116 | Returns:
117 | dict from integer node ID to human-readable string.
118 | """
119 | if not tf.gfile.Exists(uid_lookup_path):
120 | tf.logging.fatal('File does not exist %s', uid_lookup_path)
121 | if not tf.gfile.Exists(label_lookup_path):
122 | tf.logging.fatal('File does not exist %s', label_lookup_path)
123 |
124 |
125 | # Loads mapping from string UID to human-readable string
126 | proto_as_ascii_lines = tf.gfile.GFile(uid_lookup_path).readlines()
127 | uid_to_human = {}
128 | p = re.compile(r'[n\d]*[ \S,]*')
129 | for line in proto_as_ascii_lines:
130 | parsed_items = p.findall(line)
131 | uid = parsed_items[0]
132 | human_string = parsed_items[2]
133 | uid_to_human[uid] = human_string
134 |
135 |
136 | # Loads mapping from string UID to integer node ID.
137 | node_id_to_uid = {}
138 | proto_as_ascii = tf.gfile.GFile(label_lookup_path).readlines()
139 | for line in proto_as_ascii:
140 | if line.startswith(' target_class:'):
141 | target_class = int(line.split(': ')[1])
142 | if line.startswith(' target_class_string:'):
143 | target_class_string = line.split(': ')[1]
144 | node_id_to_uid[target_class] = target_class_string[1:-2]
145 |
146 |
147 | # Loads the final mapping of integer node ID to human-readable string
148 | node_id_to_name = {}
149 | for key, val in node_id_to_uid.items():
150 | if val not in uid_to_human:
151 | tf.logging.fatal('Failed to locate: %s', val)
152 | name = uid_to_human[val]
153 | node_id_to_name[key] = name
154 |
155 |
156 | return node_id_to_name
157 |
158 |
159 | def id_to_string(self, node_id):
160 | if node_id not in self.node_lookup:
161 | return ''
162 | return self.node_lookup[node_id]
163 |
164 |
165 |
166 |
167 | def create_graph():
168 | """Creates a graph from saved GraphDef file and returns a saver."""
169 | # Creates graph from saved graph_def.pb.
170 | with tf.gfile.FastGFile(os.path.join(
171 | FLAGS.model_dir, 'classify_image_graph_def.pb'), 'rb') as f:
172 | graph_def = tf.GraphDef()
173 | graph_def.ParseFromString(f.read())
174 | _ = tf.import_graph_def(graph_def, name='')
175 |
176 |
177 |
178 |
179 | def run_inference_on_image(image):
180 | """Runs inference on an image.
181 |
182 |
183 | Args:
184 | image: Image file name.
185 |
186 |
187 | Returns:
188 | Nothing
189 | """
190 | if not tf.gfile.Exists(image):
191 | tf.logging.fatal('File does not exist %s', image)
192 | image_data = tf.gfile.FastGFile(image, 'rb').read()
193 |
194 |
195 | # Creates graph from saved GraphDef.
196 | create_graph()
197 |
198 |
199 | with tf.Session() as sess:
200 | # Some useful tensors:
201 | # 'softmax:0': A tensor containing the normalized prediction across
202 | # 1000 labels.
203 | # 'pool_3:0': A tensor containing the next-to-last layer containing 2048
204 | # float description of the image.
205 | # 'DecodeJpeg/contents:0': A tensor containing a string providing JPEG
206 | # encoding of the image.
207 | # Runs the softmax tensor by feeding the image_data as input to the graph.
208 | softmax_tensor = sess.graph.get_tensor_by_name('softmax:0')
209 | predictions = sess.run(softmax_tensor,
210 | {'DecodeJpeg/contents:0': image_data})
211 | predictions = np.squeeze(predictions)
212 |
213 |
214 | # Creates node ID --> English string lookup.
215 | node_lookup = NodeLookup()
216 |
217 |
218 | top_k = predictions.argsort()[-FLAGS.num_top_predictions:][::-1]
219 | row = []
220 | for node_id in top_k:
221 | human_string = node_lookup.id_to_string(node_id)
222 | score = predictions[node_id]
223 | row.append( { 'node_id': node_id, 'image': image, 'host': host, 'ts': currenttime, 'human_string': str(human_string), 'score': str(score)} )
224 |
225 | json_string = json.dumps(row)
226 | print( json_string )
227 |
228 | def maybe_download_and_extract():
229 | """Download and extract model tar file."""
230 | dest_directory = FLAGS.model_dir
231 | if not os.path.exists(dest_directory):
232 | os.makedirs(dest_directory)
233 | filename = DATA_URL.split('/')[-1]
234 | filepath = os.path.join(dest_directory, filename)
235 | if not os.path.exists(filepath):
236 | def _progress(count, block_size, total_size):
237 | sys.stdout.write('\r>> Downloading %s %.1f%%' % (
238 | filename, float(count * block_size) / float(total_size) * 100.0))
239 | sys.stdout.flush()
240 | filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath, _progress)
241 | print()
242 | statinfo = os.stat(filepath)
243 | print('Successfully downloaded', filename, statinfo.st_size, 'bytes.')
244 | tarfile.open(filepath, 'r:gz').extractall(dest_directory)
245 |
246 |
247 |
248 |
249 | def main(_):
250 | maybe_download_and_extract()
251 |
252 |
253 | # Create unique image name
254 | # img_name = '/opt/demo/images/pi_image_{0}_{1}.jpg'.format(randomword(3),strftime("%Y%m%d%H%M%S",gmtime()))
255 |
256 | IMAGE_PATH = sys.argv[1]
257 |
258 | img_name = IMAGE_PATH
259 |
260 | run_inference_on_image(img_name)
261 |
262 |
263 | if __name__ == '__main__':
264 | parser = argparse.ArgumentParser()
265 | # classify_image_graph_def.pb:
266 | # Binary representation of the GraphDef protocol buffer.
267 | # imagenet_synset_to_human_label_map.txt:
268 | # Map from synset ID to a human readable string.
269 | # imagenet_2012_challenge_label_map_proto.pbtxt:
270 | # Text representation of a protocol buffer mapping a label to synset ID.
271 | parser.add_argument(
272 | '--model_dir',
273 | type=str,
274 | default='/tmp/imagenet',
275 | help="""\
276 | Path to classify_image_graph_def.pb,
277 | imagenet_synset_to_human_label_map.txt, and
278 | imagenet_2012_challenge_label_map_proto.pbtxt.\
279 | """
280 | )
281 | parser.add_argument(
282 | '--image_file',
283 | type=str,
284 | default='',
285 | help='Absolute path to image file.'
286 | )
287 | parser.add_argument(
288 | '--num_top_predictions',
289 | type=int,
290 | default=5,
291 | help='Display this many predictions.'
292 | )
293 | FLAGS, unparsed = parser.parse_known_args()
294 | tf.app.run(main=main)
295 |
--------------------------------------------------------------------------------
/config.yml:
--------------------------------------------------------------------------------
1 | MiNiFi Config Version: 3
2 | Flow Controller:
3 | name: MiniFiSenseHatDevieFlowMiniFiTimers
4 | comment: ''
5 | Core Properties:
6 | flow controller graceful shutdown period: 10 sec
7 | flow service write delay interval: 500 ms
8 | administrative yield duration: 30 sec
9 | bored yield duration: 10 millis
10 | max concurrent threads: 1
11 | variable registry properties: ''
12 | FlowFile Repository:
13 | partitions: 256
14 | checkpoint interval: 2 mins
15 | always sync: false
16 | Swap:
17 | threshold: 20000
18 | in period: 5 sec
19 | in threads: 1
20 | out period: 5 sec
21 | out threads: 4
22 | Content Repository:
23 | content claim max appendable size: 10 MB
24 | content claim max flow files: 100
25 | always sync: false
26 | Provenance Repository:
27 | provenance rollover time: 1 min
28 | implementation: org.apache.nifi.provenance.MiNiFiPersistentProvenanceRepository
29 | Component Status Repository:
30 | buffer size: 1440
31 | snapshot frequency: 1 min
32 | Security Properties:
33 | keystore: ''
34 | keystore type: ''
35 | keystore password: ''
36 | key password: ''
37 | truststore: ''
38 | truststore type: ''
39 | truststore password: ''
40 | ssl protocol: ''
41 | Sensitive Props:
42 | key:
43 | algorithm: PBEWITHMD5AND256BITAES-CBC-OPENSSL
44 | provider: BC
45 | Processors: []
46 | Controller Services: []
47 | Process Groups:
48 | - id: f828c0b4-3a39-3d1b-0000-000000000000
49 | name: Movidius Sensehat Device Flow MiniFi
50 | Processors:
51 | - id: 630180d0-6e3b-3dd2-0000-000000000000
52 | name: Capture Photo and NCS Image Classifier
53 | class: org.apache.nifi.processors.standard.ExecuteProcess
54 | max concurrent tasks: 1
55 | scheduling strategy: CRON_DRIVEN
56 | scheduling period: 0,2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42,44,46,47,50,52,54,56,58 * * * * ?
57 | penalization period: 30 sec
58 | yield period: 1 sec
59 | run duration nanos: 0
60 | auto-terminated relationships list: []
61 | Properties:
62 | Argument Delimiter: ' '
63 | Batch Duration:
64 | Command: /opt/demo/run2.sh
65 | Command Arguments:
66 | Redirect Error Stream: 'false'
67 | Working Directory:
68 | - id: 359adfdb-2393-3b49-0000-000000000000
69 | name: Capture Photo and TensorFlow
70 | class: org.apache.nifi.processors.standard.ExecuteProcess
71 | max concurrent tasks: 1
72 | scheduling strategy: CRON_DRIVEN
73 | scheduling period: 1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,37,39,41,43,45,47,49,51,53,55,57,59 * * * * ?
74 | penalization period: 30 sec
75 | yield period: 1 sec
76 | run duration nanos: 0
77 | auto-terminated relationships list: []
78 | Properties:
79 | Argument Delimiter: ' '
80 | Batch Duration:
81 | Command: /opt/demo/run3.sh
82 | Command Arguments:
83 | Redirect Error Stream: 'false'
84 | Working Directory:
85 | - id: d24ff86a-530a-3617-0000-000000000000
86 | name: GetFile
87 | class: org.apache.nifi.processors.standard.GetFile
88 | max concurrent tasks: 1
89 | scheduling strategy: TIMER_DRIVEN
90 | scheduling period: 30 sec
91 | penalization period: 30 sec
92 | yield period: 1 sec
93 | run duration nanos: 0
94 | auto-terminated relationships list: []
95 | Properties:
96 | Batch Size: '10'
97 | File Filter: '[^\.].*'
98 | Ignore Hidden Files: 'false'
99 | Input Directory: /opt/demo/images/
100 | Keep Source File: 'false'
101 | Maximum File Age:
102 | Maximum File Size:
103 | Minimum File Age: 90 sec
104 | Minimum File Size: 120 B
105 | Path Filter:
106 | Polling Interval: 0 sec
107 | Recurse Subdirectories: 'true'
108 | Controller Services: []
109 | Process Groups: []
110 | Input Ports: []
111 | Output Ports: []
112 | Funnels: []
113 | Connections:
114 | - id: 2bde6057-2014-3456-0000-000000000000
115 | name: Capture Photo and NCS Image Classifier/success/e3af35aa-c8a9-189e-40e8-6170aeb5eff6
116 | source id: 630180d0-6e3b-3dd2-0000-000000000000
117 | source relationship names:
118 | - success
119 | destination id: e3af35aa-c8a9-189e-40e8-6170aeb5eff6
120 | max work queue size: 10000
121 | max work queue data size: 1 GB
122 | flowfile expiration: 0 sec
123 | queue prioritizer class: ''
124 | - id: 0a79b384-a17a-3ac2-0000-000000000000
125 | name: Capture Photo and TensorFlow/success/e3af35aa-c8a9-189e-40e8-6170aeb5eff6
126 | source id: 359adfdb-2393-3b49-0000-000000000000
127 | source relationship names:
128 | - success
129 | destination id: e3af35aa-c8a9-189e-40e8-6170aeb5eff6
130 | max work queue size: 10000
131 | max work queue data size: 1 GB
132 | flowfile expiration: 0 sec
133 | queue prioritizer class: ''
134 | - id: ce29f4f8-d06e-31d2-0000-000000000000
135 | name: GetFile/success/e3af35aa-c8a9-189e-40e8-6170aeb5eff6
136 | source id: d24ff86a-530a-3617-0000-000000000000
137 | source relationship names:
138 | - success
139 | destination id: e3af35aa-c8a9-189e-40e8-6170aeb5eff6
140 | max work queue size: 10000
141 | max work queue data size: 1 GB
142 | flowfile expiration: 0 sec
143 | queue prioritizer class: ''
144 | Remote Process Groups:
145 | - id: ba036c2c-9b89-3494-0000-000000000000
146 | name: ''
147 | url: http://hw13125.local:8080/nifi
148 | comment: ''
149 | timeout: 60 sec
150 | yield period: 10 sec
151 | transport protocol: HTTP
152 | proxy host: ''
153 | proxy port: ''
154 | proxy user: ''
155 | proxy password: ''
156 | local network interface: ''
157 | Input Ports:
158 | - id: e3af35aa-c8a9-189e-40e8-6170aeb5eff6
159 | name: Movidius Input
160 | comment: ''
161 | max concurrent tasks: 1
162 | use compression: false
163 | Output Ports: []
164 | Input Ports: []
165 | Output Ports: []
166 | Funnels: []
167 | Connections: []
168 | Remote Process Groups: []
169 | NiFi Properties Overrides: {}
170 |
--------------------------------------------------------------------------------
/gluon2.avsc:
--------------------------------------------------------------------------------
1 | { "type" : "record", "name" : "gluon2", "fields" : [ { "name" : "top1pct", "type" : "string", "doc" : "Type inferred from '\"48.4\"'" }, { "name" : "top2pct", "type" : "string", "doc" : "Type inferred from '\"14.3\"'" }, { "name" : "top3pct", "type" : "string", "doc" : "Type inferred from '\"9.1\"'" }, { "name" : "top4pct", "type" : "string", "doc" : "Type inferred from '\"5.2\"'" }, { "name" : "top5pct", "type" : "string", "doc" : "Type inferred from '\"4.7\"'" }, { "name" : "top1", "type" : "string", "doc" : "Type inferred from '\"cellular telephone, cellular phone, cellphone, cell, mobile phone\"'" }, { "name" : "top2", "type" : "string", "doc" : "Type inferred from '\"Polaroid camera, Polaroid Land camera\"'" }, { "name" : "top3", "type" : "string", "doc" : "Type inferred from '\"joystick\"'" }, { "name" : "top4", "type" : "string", "doc" : "Type inferred from '\"remote control, remote\"'" }, { "name" : "top5", "type" : "string", "doc" : "Type inferred from '\"iPod\"'" }, { "name" : "imgname", "type" : "string", "doc" : "Type inferred from '\"images/gluon_image_20180615142650_6f6f93b4-637c-4c5a-b169-7c50efda32b6.jpg\"'" }, { "name" : "host", "type" : "string", "doc" : "Type inferred from '\"HW13125.local\"'" }, { "name" : "end", "type" : "string", "doc" : "Type inferred from '\"1529072810.261188\"'" }, { "name" : "te", "type" : "string", "doc" : "Type inferred from '\"2.0492031574249268\"'" }, { "name" : "battery", "type" : "int", "doc" : "Type inferred from '100'" }, { "name" : "systemtime", "type" : "string", "doc" : "Type inferred from '\"06/15/2018 10:26:50\"'" }, { "name" : "cpu", "type" : "double", "doc" : "Type inferred from '47.9'" }, { "name" : "diskusage", "type" : "string", "doc" : "Type inferred from '\"110708.1 MB\"'" }, { "name" : "memory", "type" : "double", "doc" : "Type inferred from '88.8'" }, { "name" : "id", "type" : "string", "doc" : "Type inferred from '\"20180615142650_6f6f93b4-637c-4c5a-b169-7c50efda32b6\"'" } ] }
2 |
--------------------------------------------------------------------------------
/gluon2.py:
--------------------------------------------------------------------------------
1 | import numpy
2 | from mxnet import nd, gluon, init, autograd
3 | from mxnet.gluon import nn
4 | from mxnet.gluon.data.vision import datasets, transforms
5 | import matplotlib.pyplot as plt
6 | from time import time
7 | from mxnet.gluon.model_zoo import vision as models
8 | from mxnet.gluon.utils import download
9 | from mxnet import image
10 | import time
11 | import sys
12 | import datetime
13 | import subprocess
14 | import sys
15 | import os
16 | import datetime
17 | import traceback
18 | import math
19 | import random, string
20 | import base64
21 | import json
22 | from time import gmtime, strftime
23 | import mxnet as mx
24 | import inception_predict
25 | import numpy as np
26 | import cv2
27 | import math
28 | import random, string
29 | import time
30 | import numpy
31 |
32 | import numpy
33 | from time import gmtime, strftime
34 | start = time.time()
35 | cap = cv2.VideoCapture(1)
36 |
37 | # http://gluon-crash-course.mxnet.io/predict.html
38 | def transform(data):
39 | data = data.transpose((2,0,1)).expand_dims(axis=0)
40 | rgb_mean = nd.array([0.485, 0.456, 0.406]).reshape((1,3,1,1))
41 | rgb_std = nd.array([0.229, 0.224, 0.225]).reshape((1,3,1,1))
42 | return (data.astype('float32') / 255 - rgb_mean) / rgb_std
43 |
44 |
45 | net = models.resnet50_v2(pretrained=True)
46 |
47 |
48 | url = 'http://data.mxnet.io/models/imagenet/synset.txt'
49 | fname = download(url)
50 | with open(fname, 'r') as f:
51 | text_labels = [' '.join(l.split()[1:]) for l in f]
52 |
53 | url = 'https://upload.wikimedia.org/wikipedia/commons/thumb/b/b5/\
54 | Golden_Retriever_medium-to-light-coat.jpg/\
55 | 365px-Golden_Retriever_medium-to-light-coat.jpg'
56 | fname = download(url)
57 |
58 | ret, frame = cap.read()
59 | filename = 'images/gluon_image_{0}_{1}.jpg'.format('img',strftime("%Y%m%d%H%M%S",gmtime()))
60 | cv2.imwrite(filename, frame)
61 |
62 | x = image.imread(filename)
63 |
64 | x = image.resize_short(x, 256)
65 | x, _ = image.center_crop(x, (224,224))
66 |
67 | prob = net(transform(x)).softmax()
68 | idx = prob.topk(k=5)[0]
69 | for i in idx:
70 | i = int(i.asscalar())
71 | print('prob=%.5f, %s' % (
72 | prob[0,i].asscalar() * 100, text_labels[i]))
73 |
--------------------------------------------------------------------------------
/gluoncv.avsc:
--------------------------------------------------------------------------------
1 | {
2 | "type": "record",
3 | "name": "gluoncv",
4 | "fields": [
5 | {
6 | "name": "imgname",
7 | "type": "string",
8 | "doc": "Type inferred from '\"images/gluoncv_image_20180615203319_6e0e5f0b-d2aa-4e94-b7e9-8bb7f29c9512.jpg\"'"
9 | },
10 | {
11 | "name": "host",
12 | "type": "string",
13 | "doc": "Type inferred from '\"HW13125.local\"'"
14 | },
15 | {
16 | "name": "shape",
17 | "type": "string",
18 | "doc": "Type inferred from '\"(1, 3, 512, 910)\"'"
19 | },
20 | {
21 | "name": "end",
22 | "type": "string",
23 | "doc": "Type inferred from '\"1529094800.88097\"'"
24 | },
25 | {
26 | "name": "te",
27 | "type": "string",
28 | "doc": "Type inferred from '\"2.4256367683410645\"'"
29 | },
30 | {
31 | "name": "battery",
32 | "type": "int",
33 | "doc": "Type inferred from '100'"
34 | },
35 | {
36 | "name": "systemtime",
37 | "type": "string",
38 | "doc": "Type inferred from '\"06/15/2018 16:33:20\"'"
39 | },
40 | {
41 | "name": "cpu",
42 | "type": "double",
43 | "doc": "Type inferred from '23.2'"
44 | },
45 | {
46 | "name": "diskusage",
47 | "type": "string",
48 | "doc": "Type inferred from '\"112000.8 MB\"'"
49 | },
50 | {
51 | "name": "memory",
52 | "type": "double",
53 | "doc": "Type inferred from '65.8'"
54 | },
55 | {
56 | "name": "id",
57 | "type": "string",
58 | "doc": "Type inferred from '\"20180615203319_6e0e5f0b-d2aa-4e94-b7e9-8bb7f29c9512\"'"
59 | }
60 | ]
61 | }
62 |
--------------------------------------------------------------------------------
/gluoncv_image_processed_20180615202056_8eb9c885-2cf7-4591-8d4a-f0962b2e9cb1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tspannhw/OpenSourceComputerVision/5c654a9025354257cfbf0b352e5b95e15223b595/gluoncv_image_processed_20180615202056_8eb9c885-2cf7-4591-8d4a-f0962b2e9cb1.jpg
--------------------------------------------------------------------------------
/gluoncvexample.json:
--------------------------------------------------------------------------------
1 | {"imgname": "images/gluoncv_image_20180615203615_c83fed6f-2ec8-4841-97e3-40985f7859ad.jpg",
2 | "host": "HW13125.local", "shape": "(1, 3, 512, 910)", "end": "1529094976.237143",
3 | "te": "1.8907802104949951", "battery": 100, "systemtime": "06/15/2018 16:36:16",
4 | "cpu": 29.3, "diskusage": "112008.6 MB", "memory": 66.5,
5 | "id": "20180615203615_c83fed6f-2ec8-4841-97e3-40985f7859ad"}
6 |
--------------------------------------------------------------------------------
/gluoncvflow1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tspannhw/OpenSourceComputerVision/5c654a9025354257cfbf0b352e5b95e15223b595/gluoncvflow1.png
--------------------------------------------------------------------------------
/httpserver.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | 6ceb3410-0162-1000-9671-611f5033539e
5 | httpserver
6 |
7 |
8 | bf0545ad-83d6-3232-0000-000000000000
9 | a519e49c-0695-3dc0-0000-000000000000
10 | 1 GB
11 | 10000
12 |
13 | a519e49c-0695-3dc0-0000-000000000000
14 | 3702027e-33fc-3ac7-0000-000000000000
15 | PROCESSOR
16 |
17 | 0 sec
18 | 1
19 |
20 | success
21 |
22 | a519e49c-0695-3dc0-0000-000000000000
23 | 58afed83-f8ce-3575-0000-000000000000
24 | PROCESSOR
25 |
26 | 0
27 |
28 |
29 | 13577ac3-be4a-33fc-0000-000000000000
30 | a519e49c-0695-3dc0-0000-000000000000
31 | 1 GB
32 | 10000
33 |
34 | a519e49c-0695-3dc0-0000-000000000000
35 | 58afed83-f8ce-3575-0000-000000000000
36 | PROCESSOR
37 |
38 | 0 sec
39 | 1
40 |
41 | unmatched
42 |
43 | a519e49c-0695-3dc0-0000-000000000000
44 | 9e77fda7-9adb-3163-0000-000000000000
45 | PROCESSOR
46 |
47 | 0
48 |
49 |
50 | 1a3663c1-d984-3b4d-0000-000000000000
51 | a519e49c-0695-3dc0-0000-000000000000
52 | 1 GB
53 | 10000
54 |
55 | a519e49c-0695-3dc0-0000-000000000000
56 | 9e77fda7-9adb-3163-0000-000000000000
57 | PROCESSOR
58 |
59 | 0 sec
60 | 1
61 |
62 | success
63 |
64 | a519e49c-0695-3dc0-0000-000000000000
65 | 03ac86dc-2e8b-3f1d-0000-000000000000
66 | PROCESSOR
67 |
68 | 0
69 |
70 |
71 | c3ac3fea-fef1-3592-0000-000000000000
72 | a519e49c-0695-3dc0-0000-000000000000
73 | c7e39625-0f28-3727-a2e3-701a3cee21c9
74 |
75 | nifi-ssl-context-service-nar
76 | org.apache.nifi
77 | 1.5.0.3.1.1.0-35
78 |
79 |
80 |
81 |
82 | Keystore Filename
83 |
84 | Keystore Filename
85 |
86 |
87 |
88 | Keystore Password
89 |
90 | Keystore Password
91 |
92 |
93 |
94 | key-password
95 |
96 | key-password
97 |
98 |
99 |
100 | Keystore Type
101 |
102 | Keystore Type
103 |
104 |
105 |
106 | Truststore Filename
107 |
108 | Truststore Filename
109 |
110 |
111 |
112 | Truststore Password
113 |
114 | Truststore Password
115 |
116 |
117 |
118 | Truststore Type
119 |
120 | Truststore Type
121 |
122 |
123 |
124 | SSL Protocol
125 |
126 | SSL Protocol
127 |
128 |
129 |
130 | StandardRestrictedSSLContextService
131 | false
132 |
133 |
134 | Keystore Filename
135 | /Volumes/seagate/clients/demos/key.pem
136 |
137 |
138 | Keystore Password
139 |
140 |
141 | key-password
142 |
143 |
144 | Keystore Type
145 | PKCS12
146 |
147 |
148 | Truststore Filename
149 |
150 |
151 | Truststore Password
152 |
153 |
154 | Truststore Type
155 |
156 |
157 | SSL Protocol
158 | TLS
159 |
160 |
161 | DISABLED
162 | org.apache.nifi.ssl.StandardRestrictedSSLContextService
163 |
164 |
165 | 434bed6c-1777-3cf9-0000-000000000000
166 | a519e49c-0695-3dc0-0000-000000000000
167 | 434bed6c-1777-3cf9-b4ec-1695e95cecb0
168 |
169 | nifi-http-context-map-nar
170 | org.apache.nifi
171 | 1.5.0.3.1.1.0-35
172 |
173 |
174 |
175 | Maximum Outstanding Requests
176 |
177 | Maximum Outstanding Requests
178 |
179 |
180 |
181 | Request Expiration
182 |
183 | Request Expiration
184 |
185 |
186 |
187 | StandardHttpContextMap
188 | false
189 |
190 |
191 | Maximum Outstanding Requests
192 |
193 |
194 | Request Expiration
195 |
196 |
197 | ENABLED
198 | org.apache.nifi.http.StandardHttpContextMap
199 |
200 |
201 | 9e77fda7-9adb-3163-0000-000000000000
202 | a519e49c-0695-3dc0-0000-000000000000
203 |
204 | 281.2112438800391
205 | 249.4000601504663
206 |
207 |
208 | nifi-standard-nar
209 | org.apache.nifi
210 | 1.5.0.3.1.1.0-35
211 |
212 |
213 | WARN
214 |
215 | 1
216 |
217 |
218 | Routing Strategy
219 |
220 | Routing Strategy
221 |
222 |
223 |
224 | isjsonpost
225 |
226 | isjsonpost
227 |
228 |
229 |
230 | ALL
231 | false
232 | 30 sec
233 |
234 |
235 | Routing Strategy
236 | Route to Property name
237 |
238 |
239 | isjsonpost
240 | ${mime.type:startsWith('application/json')}
241 |
242 |
243 | 0
244 | 0 sec
245 | TIMER_DRIVEN
246 | 1 sec
247 |
248 | Send Page or Parse Post
249 |
250 | false
251 | isjsonpost
252 |
253 |
254 | false
255 | unmatched
256 |
257 | STOPPED
258 |
259 | org.apache.nifi.processors.standard.RouteOnAttribute
260 |
261 |
262 | 03ac86dc-2e8b-3f1d-0000-000000000000
263 | a519e49c-0695-3dc0-0000-000000000000
264 |
265 | 271.9875611603684
266 | 0.0
267 |
268 |
269 | nifi-standard-nar
270 | org.apache.nifi
271 | 1.5.0.3.1.1.0-35
272 |
273 |
274 | WARN
275 |
276 | 1
277 |
278 |
279 | Listening Port
280 |
281 | Listening Port
282 |
283 |
284 |
285 | Hostname
286 |
287 | Hostname
288 |
289 |
290 |
291 | SSL Context Service
292 |
293 | org.apache.nifi.ssl.RestrictedSSLContextService
294 | SSL Context Service
295 |
296 |
297 |
298 | HTTP Context Map
299 |
300 | org.apache.nifi.http.HttpContextMap
301 | HTTP Context Map
302 |
303 |
304 |
305 | Allowed Paths
306 |
307 | Allowed Paths
308 |
309 |
310 |
311 | Default URL Character Set
312 |
313 | Default URL Character Set
314 |
315 |
316 |
317 | Allow GET
318 |
319 | Allow GET
320 |
321 |
322 |
323 | Allow POST
324 |
325 | Allow POST
326 |
327 |
328 |
329 | Allow PUT
330 |
331 | Allow PUT
332 |
333 |
334 |
335 | Allow DELETE
336 |
337 | Allow DELETE
338 |
339 |
340 |
341 | Allow HEAD
342 |
343 | Allow HEAD
344 |
345 |
346 |
347 | Allow OPTIONS
348 |
349 | Allow OPTIONS
350 |
351 |
352 |
353 | Additional HTTP Methods
354 |
355 | Additional HTTP Methods
356 |
357 |
358 |
359 | Client Authentication
360 |
361 | Client Authentication
362 |
363 |
364 |
365 | container-queue-size
366 |
367 | container-queue-size
368 |
369 |
370 |
371 | ALL
372 | false
373 | 30 sec
374 |
375 |
376 | Listening Port
377 | 9089
378 |
379 |
380 | Hostname
381 | hw13125.local
382 |
383 |
384 | SSL Context Service
385 | c3ac3fea-fef1-3592-0000-000000000000
386 |
387 |
388 | HTTP Context Map
389 | 434bed6c-1777-3cf9-0000-000000000000
390 |
391 |
392 | Allowed Paths
393 |
394 |
395 | Default URL Character Set
396 | UTF-8
397 |
398 |
399 | Allow GET
400 | true
401 |
402 |
403 | Allow POST
404 | true
405 |
406 |
407 | Allow PUT
408 | true
409 |
410 |
411 | Allow DELETE
412 | true
413 |
414 |
415 | Allow HEAD
416 | true
417 |
418 |
419 | Allow OPTIONS
420 | true
421 |
422 |
423 | Additional HTTP Methods
424 |
425 |
426 | Client Authentication
427 | No Authentication
428 |
429 |
430 | container-queue-size
431 | 50
432 |
433 |
434 | 0
435 | 0 sec
436 | TIMER_DRIVEN
437 | 1 sec
438 |
439 | Server Static Page and Get Post Backs
440 |
441 | false
442 | success
443 |
444 | STOPPED
445 |
446 | org.apache.nifi.processors.standard.HandleHttpRequest
447 |
448 |
449 | 3702027e-33fc-3ac7-0000-000000000000
450 | a519e49c-0695-3dc0-0000-000000000000
451 |
452 | 0.0
453 | 742.7707486540578
454 |
455 |
456 | nifi-standard-nar
457 | org.apache.nifi
458 | 1.5.0.3.1.1.0-35
459 |
460 |
461 | WARN
462 |
463 | 1
464 |
465 |
466 | HTTP Status Code
467 |
468 | HTTP Status Code
469 |
470 |
471 |
472 | HTTP Context Map
473 |
474 | org.apache.nifi.http.HttpContextMap
475 | HTTP Context Map
476 |
477 |
478 |
479 | ALL
480 | false
481 | 30 sec
482 |
483 |
484 | HTTP Status Code
485 | 200
486 |
487 |
488 | HTTP Context Map
489 | 610e6151-e44c-349b-0000-000000000000
490 |
491 |
492 | 0
493 | 0 sec
494 | TIMER_DRIVEN
495 | 1 sec
496 |
497 | HandleHttpResponse
498 |
499 | true
500 | failure
501 |
502 |
503 | true
504 | success
505 |
506 | STOPPED
507 |
508 | org.apache.nifi.processors.standard.HandleHttpResponse
509 |
510 |
511 | 58afed83-f8ce-3575-0000-000000000000
512 | a519e49c-0695-3dc0-0000-000000000000
513 |
514 | 1.522712834158483
515 | 512.9918312402287
516 |
517 |
518 | nifi-standard-nar
519 | org.apache.nifi
520 | 1.5.0.3.1.1.0-35
521 |
522 |
523 | WARN
524 |
525 | 1
526 |
527 |
528 | File to Fetch
529 |
530 | File to Fetch
531 |
532 |
533 |
534 | Completion Strategy
535 |
536 | Completion Strategy
537 |
538 |
539 |
540 | Move Destination Directory
541 |
542 | Move Destination Directory
543 |
544 |
545 |
546 | Move Conflict Strategy
547 |
548 | Move Conflict Strategy
549 |
550 |
551 |
552 | Log level when file not found
553 |
554 | Log level when file not found
555 |
556 |
557 |
558 | Log level when permission denied
559 |
560 | Log level when permission denied
561 |
562 |
563 |
564 | ALL
565 | false
566 | 30 sec
567 |
568 |
569 | File to Fetch
570 | /volumes/seagate/clients/demos/index.html
571 |
572 |
573 | Completion Strategy
574 | None
575 |
576 |
577 | Move Destination Directory
578 |
579 |
580 | Move Conflict Strategy
581 | Keep Existing
582 |
583 |
584 | Log level when file not found
585 | ERROR
586 |
587 |
588 | Log level when permission denied
589 | ERROR
590 |
591 |
592 | 0
593 | 0 sec
594 | TIMER_DRIVEN
595 | 1 sec
596 |
597 | FetchFile
598 |
599 | true
600 | failure
601 |
602 |
603 | true
604 | not.found
605 |
606 |
607 | true
608 | permission.denied
609 |
610 |
611 | false
612 | success
613 |
614 | STOPPED
615 |
616 | org.apache.nifi.processors.standard.FetchFile
617 |
618 |
619 | 03/28/2018 16:11:15 EDT
620 |
621 |
--------------------------------------------------------------------------------
/images.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | NiFi Browser Images
5 |
6 |
7 |
255 |
256 |
257 |
258 | DEMO: Send Data to HDF / Apache NiFi via HandleHTTPRequest
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 |
272 |
277 |
278 |
279 |
280 |
281 |
282 |
283 | © 2018 Timothy Spann
284 |
285 |
286 |
--------------------------------------------------------------------------------
/index.sh:
--------------------------------------------------------------------------------
1 | ls /opt/demo/images2/ | /opt/demo/buildpage.sh
2 |
--------------------------------------------------------------------------------
/minifimovidius.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | 1019117c-1fb6-1358-9f00-ab7b2f3a0a8d
5 | minifimovidius
6 |
7 |
8 | 1bd103a9-09dc-3121-0000-000000000000
9 | 9fdb6f99-2e9f-3d86-0000-000000000000
10 | 1 GB
11 | 10000
12 |
13 | c8b85dc1-0dfb-3f01-0000-000000000000
14 | 0d6447ee-be36-3ec4-b896-1d57e374580f
15 | REMOTE_INPUT_PORT
16 |
17 | 0 sec
18 | 1
19 |
20 | success
21 |
22 | 9fdb6f99-2e9f-3d86-0000-000000000000
23 | ceab4537-43cb-31c8-0000-000000000000
24 | PROCESSOR
25 |
26 | 0
27 |
28 |
29 | 3cdac0a3-aabf-3a15-0000-000000000000
30 | 9fdb6f99-2e9f-3d86-0000-000000000000
31 | 1 GB
32 | 10000
33 |
34 | c8b85dc1-0dfb-3f01-0000-000000000000
35 | 0d6447ee-be36-3ec4-b896-1d57e374580f
36 | REMOTE_INPUT_PORT
37 |
38 | 0 sec
39 | 1
40 |
41 | success
42 |
43 | 9fdb6f99-2e9f-3d86-0000-000000000000
44 | 9fbeac0f-1c3f-3535-0000-000000000000
45 | PROCESSOR
46 |
47 | 0
48 |
49 |
50 | 9fbeac0f-1c3f-3535-0000-000000000000
51 | 9fdb6f99-2e9f-3d86-0000-000000000000
52 |
53 | 658.8581065828637
54 | 5.3328413496026315
55 |
56 |
57 | nifi-standard-nar
58 | org.apache.nifi
59 | 1.5.0-SNAPSHOT
60 |
61 |
62 | WARN
63 |
64 | 1
65 |
66 |
67 | Input Directory
68 |
69 | Input Directory
70 |
71 |
72 |
73 | File Filter
74 |
75 | File Filter
76 |
77 |
78 |
79 | Path Filter
80 |
81 | Path Filter
82 |
83 |
84 |
85 | Batch Size
86 |
87 | Batch Size
88 |
89 |
90 |
91 | Keep Source File
92 |
93 | Keep Source File
94 |
95 |
96 |
97 | Recurse Subdirectories
98 |
99 | Recurse Subdirectories
100 |
101 |
102 |
103 | Polling Interval
104 |
105 | Polling Interval
106 |
107 |
108 |
109 | Ignore Hidden Files
110 |
111 | Ignore Hidden Files
112 |
113 |
114 |
115 | Minimum File Age
116 |
117 | Minimum File Age
118 |
119 |
120 |
121 | Maximum File Age
122 |
123 | Maximum File Age
124 |
125 |
126 |
127 | Minimum File Size
128 |
129 | Minimum File Size
130 |
131 |
132 |
133 | Maximum File Size
134 |
135 | Maximum File Size
136 |
137 |
138 |
139 | ALL
140 | false
141 | 30 sec
142 |
143 |
144 | Input Directory
145 | /opt/demo/images/
146 |
147 |
148 | File Filter
149 | [^\.].*
150 |
151 |
152 | Path Filter
153 |
154 |
155 | Batch Size
156 | 10
157 |
158 |
159 | Keep Source File
160 | false
161 |
162 |
163 | Recurse Subdirectories
164 | true
165 |
166 |
167 | Polling Interval
168 | 0 sec
169 |
170 |
171 | Ignore Hidden Files
172 | false
173 |
174 |
175 | Minimum File Age
176 | 300 sec
177 |
178 |
179 | Maximum File Age
180 |
181 |
182 | Minimum File Size
183 | 10 B
184 |
185 |
186 | Maximum File Size
187 |
188 |
189 | 0
190 | 0 sec
191 | TIMER_DRIVEN
192 | 1 sec
193 |
194 | GetFile
195 |
196 | false
197 | success
198 |
199 | STOPPED
200 |
201 | org.apache.nifi.processors.standard.GetFile
202 |
203 |
204 | ceab4537-43cb-31c8-0000-000000000000
205 | 9fdb6f99-2e9f-3d86-0000-000000000000
206 |
207 | 0.0
208 | 0.0
209 |
210 |
211 | nifi-standard-nar
212 | org.apache.nifi
213 | 1.5.0-SNAPSHOT
214 |
215 |
216 | WARN
217 |
218 | 1
219 |
220 |
221 | Command
222 |
223 | Command
224 |
225 |
226 |
227 | Command Arguments
228 |
229 | Command Arguments
230 |
231 |
232 |
233 | Batch Duration
234 |
235 | Batch Duration
236 |
237 |
238 |
239 | Redirect Error Stream
240 |
241 | Redirect Error Stream
242 |
243 |
244 |
245 | Working Directory
246 |
247 | Working Directory
248 |
249 |
250 |
251 | Argument Delimiter
252 |
253 | Argument Delimiter
254 |
255 |
256 |
257 | ALL
258 | false
259 | 30 sec
260 |
261 |
262 | Command
263 | /root/workspace/ncappzoo/apps/image-classifier/run.sh
264 |
265 |
266 | Command Arguments
267 |
268 |
269 | Batch Duration
270 |
271 |
272 | Redirect Error Stream
273 | false
274 |
275 |
276 | Working Directory
277 |
278 |
279 | Argument Delimiter
280 |
281 |
282 |
283 | 0
284 | 60 sec
285 | TIMER_DRIVEN
286 | 1 sec
287 |
288 | Capture Photo and NCS Image Classifier
289 |
290 | false
291 | success
292 |
293 | STOPPED
294 |
295 | org.apache.nifi.processors.standard.ExecuteProcess
296 |
297 |
298 | c8b85dc1-0dfb-3f01-0000-000000000000
299 | 9fdb6f99-2e9f-3d86-0000-000000000000
300 |
301 | 210.87661495669886
302 | 405.82958221435547
303 |
304 | 60 sec
305 |
306 |
307 |
308 | 1
309 | false
310 | true
311 | befe3bd8-b4ac-326c-becf-fc448e4a8ff6
312 | MiniFi From TX1 Jetson
313 | 6f466132-3548-35c6-e8e8-facb1b5b60f9
314 | false
315 | false
316 | false
317 |
318 |
319 | 1
320 | true
321 | true
322 | 0d6447ee-be36-3ec4-b896-1d57e374580f
323 | Movidius Input
324 | b0af3ba0-6f65-1a2d-8461-55aa9d4f6243
325 | true
326 | true
327 | false
328 |
329 |
330 |
331 | 1
332 | false
333 | true
334 | e43fd055-eb0c-37b2-ad38-91aeea9380cd
335 | ChristmasTreeInput
336 | 5b09ecbd-0160-1000-0589-15fe48da826c
337 | true
338 | false
339 | false
340 |
341 |
342 |
343 | 1
344 | false
345 | true
346 | 4689a182-d7e1-3371-bef2-24e102ed9350
347 | From ADP Remote Partition 1
348 | dba83df1-d9d6-1b9b-5abc-809eacc93f44
349 | true
350 | false
351 | false
352 |
353 |
354 |
355 |
356 | http://hw13125.local:8080/nifi
357 | http://hw13125.local:8080/nifi
358 | HTTP
359 | 10 sec
360 |
361 |
362 | 12/28/2017 15:03:32 EST
363 |
364 |
--------------------------------------------------------------------------------
/mxclassify.sh:
--------------------------------------------------------------------------------
1 | cd /media/nvidia/96ed93f9-7c40-4999-85ba-3eb24262d0a5/
2 | python -W ignore /media/nvidia/96ed93f9-7c40-4999-85ba-3eb24262d0a5/analyze.py 2>/dev/null
3 |
--------------------------------------------------------------------------------
/mxrecordschemawithopencv.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tspannhw/OpenSourceComputerVision/5c654a9025354257cfbf0b352e5b95e15223b595/mxrecordschemawithopencv.png
--------------------------------------------------------------------------------
/nifi_gluoncv_sd.py:
--------------------------------------------------------------------------------
1 | # Based on https://gluon-cv.mxnet.io/build/examples_detection/demo_ssd.html#sphx-glr-build-examples-detection-demo-ssd-py
2 | from gluoncv import model_zoo, data, utils
3 | from matplotlib import pyplot as plt
4 | import numpy
5 | import base64
6 | import uuid
7 | from mxnet import nd, gluon, init, autograd
8 | from mxnet.gluon import nn
9 | from mxnet.gluon.data.vision import datasets, transforms
10 | import matplotlib.pyplot as plt
11 | from time import time
12 | from mxnet.gluon.model_zoo import vision as models
13 | from mxnet.gluon.utils import download
14 | from mxnet import image
15 | import time
16 | import sys
17 | import datetime
18 | import subprocess
19 | import os
20 | from PIL import Image
21 | import datetime
22 | import traceback
23 | import math
24 | import random, string
25 | import base64
26 | import json
27 | from time import gmtime, strftime
28 | import mxnet as mx
29 | import inception_predict
30 | import numpy as np
31 | import cv2
32 | import math
33 | import random, string
34 | import time
35 | import numpy
36 | import random, string
37 | import time
38 | import psutil
39 | import paho.mqtt.client as mqtt
40 | import scipy.misc
41 | from time import gmtime, strftime
42 | start = time.time()
43 | cap = cv2.VideoCapture(1) # 0 - laptop #1 - monitor
44 | ret, frame = cap.read()
45 | uuid = '{0}_{1}'.format(strftime("%Y%m%d%H%M%S",gmtime()),uuid.uuid4())
46 | filename = 'images/gluoncv_image_{0}.jpg'.format(uuid)
47 | filename2 = 'images/gluoncv_image_processed_{0}.jpg'.format(uuid)
48 | cv2.imwrite(filename, frame)
49 |
50 | # model zoo for SSD 512 RESNET 50 v1 VOC
51 | net = model_zoo.get_model('ssd_512_resnet50_v1_voc', pretrained=True)
52 |
53 | #im_fname = utils.download('https://github.com/dmlc/web-data/blob/master/' +
54 | # 'gluoncv/detection/street_small.jpg?raw=true',
55 | # path='street_small.jpg')
56 |
57 | x, img = data.transforms.presets.ssd.load_test(filename, short=512)
58 |
59 | end = time.time()
60 | row = { }
61 | row['imgname'] = filename
62 | row['host'] = os.uname()[1]
63 | row['shape'] = str(x.shape)
64 | row['end'] = '{0}'.format( str(end ))
65 | row['te'] = '{0}'.format(str(end-start))
66 | row['battery'] = psutil.sensors_battery()[0]
67 | row['systemtime'] = datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S')
68 | row['cpu'] = psutil.cpu_percent(interval=1)
69 | usage = psutil.disk_usage("/")
70 | row['diskusage'] = "{:.1f} MB".format(float(usage.free) / 1024 / 1024)
71 | row['memory'] = psutil.virtual_memory().percent
72 | row['id'] = str(uuid)
73 | json_string = json.dumps(row)
74 | # print(json_string)
75 |
76 | # MQTT
77 | client = mqtt.Client()
78 | client.username_pw_set("user","pass")
79 | client.connect("server", 17769, 60)
80 | client.publish("gluoncv", payload=json_string, qos=0, retain=True)
81 |
82 | class_IDs, scores, bounding_boxs = net(x)
83 |
84 | ax = utils.viz.plot_bbox(img, bounding_boxs[0], scores[0], class_IDs[0], class_names=net.classes)
85 |
86 | plt.savefig(filename2)
87 | # plt.show()
88 |
--------------------------------------------------------------------------------
/nifigluon2.py:
--------------------------------------------------------------------------------
1 | import numpy
2 | import base64
3 | import uuid
4 | from mxnet import nd, gluon, init, autograd
5 | from mxnet.gluon import nn
6 | from mxnet.gluon.data.vision import datasets, transforms
7 | import matplotlib.pyplot as plt
8 | from time import time
9 | from mxnet.gluon.model_zoo import vision as models
10 | from mxnet.gluon.utils import download
11 | from mxnet import image
12 | import time
13 | import sys
14 | import datetime
15 | import subprocess
16 | import sys
17 | import os
18 | import datetime
19 | import traceback
20 | import math
21 | import random, string
22 | import base64
23 | import json
24 | from time import gmtime, strftime
25 | import mxnet as mx
26 | import inception_predict
27 | import numpy as np
28 | import cv2
29 | import math
30 | import random, string
31 | import time
32 | import numpy
33 | import random, string
34 | import time
35 | import psutil
36 | import paho.mqtt.client as mqtt
37 | from time import gmtime, strftime
38 | start = time.time()
39 | cap = cv2.VideoCapture(1) # 0 - laptop #1 - monitor
40 |
41 | # http://gluon-crash-course.mxnet.io/predict.html
42 | def transform(data):
43 | data = data.transpose((2,0,1)).expand_dims(axis=0)
44 | rgb_mean = nd.array([0.485, 0.456, 0.406]).reshape((1,3,1,1))
45 | rgb_std = nd.array([0.229, 0.224, 0.225]).reshape((1,3,1,1))
46 | return (data.astype('float32') / 255 - rgb_mean) / rgb_std
47 |
48 |
49 | net = models.resnet50_v2(pretrained=True)
50 |
51 |
52 | url = 'http://data.mxnet.io/models/imagenet/synset.txt'
53 | fname = download(url)
54 | with open(fname, 'r') as f:
55 | text_labels = [' '.join(l.split()[1:]) for l in f]
56 |
57 | ret, frame = cap.read()
58 | uuid = '{0}_{1}'.format(strftime("%Y%m%d%H%M%S",gmtime()),uuid.uuid4())
59 | filename = 'images/gluon_image_{0}.jpg'.format(uuid)
60 | cv2.imwrite(filename, frame)
61 |
62 | x = image.imread(filename)
63 | x = image.resize_short(x, 256)
64 | x, _ = image.center_crop(x, (224,224))
65 |
66 | prob = net(transform(x)).softmax()
67 | idx = prob.topk(k=5)[0]
68 | row = { }
69 |
70 | #for i in idx:
71 | # i = int(i.asscalar())
72 | # print(i)
73 | # print('prob=%.5f, %s' % ( prob[0,i].asscalar() * 100, text_labels[i]))
74 | try:
75 | end = time.time()
76 | row['top1pct'] = '{:.1f}'.format(prob[0,int(idx[0].asscalar())].asscalar()*100)
77 | row['top2pct'] = '{:.1f}'.format(prob[0,int(idx[1].asscalar())].asscalar()*100)
78 | row['top3pct'] = '{:.1f}'.format(prob[0,int(idx[2].asscalar())].asscalar()*100)
79 | row['top4pct'] = '{:.1f}'.format(prob[0,int(idx[3].asscalar())].asscalar()*100)
80 | row['top5pct'] = '{:.1f}'.format(prob[0,int(idx[4].asscalar())].asscalar()*100)
81 | row['top1'] = str(text_labels[int(idx[0].asscalar())])
82 | row['top2'] = str(text_labels[int(idx[1].asscalar())])
83 | row['top3'] = str(text_labels[int(idx[2].asscalar())])
84 | row['top4'] = str(text_labels[int(idx[3].asscalar())])
85 | row['top5'] = str(text_labels[int(idx[4].asscalar())])
86 | row['imgname'] = filename
87 | row['host'] = os.uname()[1]
88 | row['end'] = '{0}'.format( str(end ))
89 | row['te'] = '{0}'.format(str(end-start))
90 | row['battery'] = psutil.sensors_battery()[0]
91 | row['systemtime'] = datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S')
92 | row['cpu'] = psutil.cpu_percent(interval=1)
93 | usage = psutil.disk_usage("/")
94 | row['diskusage'] = "{:.1f} MB".format(float(usage.free) / 1024 / 1024)
95 | row['memory'] = psutil.virtual_memory().percent
96 | row['id'] = str(uuid)
97 | json_string = json.dumps(row)
98 | #print(json_string)
99 | # MQTT
100 | client = mqtt.Client()
101 | client.username_pw_set("user","pass")
102 | client.connect("server", 17769, 60)
103 | client.publish("gluon", payload=json_string, qos=0, retain=True)
104 | except:
105 | print("{\"message\": \"Failed to run\"}")
106 |
--------------------------------------------------------------------------------
/pb.py:
--------------------------------------------------------------------------------
1 | # Forked From https://github.com/minimaxir/person-blocker
2 | # License
3 | #MIT
4 | #
5 | #Code used from Mask R-CNN by Matterport, Inc. (MIT-Licensed), with minor alterations and copyright notices retained.
6 | #MIT License
7 | #
8 | #Copyright (c) 2018 Max Woolf
9 | #
10 | #Permission is hereby granted, free of charge, to any person obtaining a copy
11 | #of this software and associated documentation files (the "Software"), to deal
12 | #in the Software without restriction, including without limitation the rights
13 | #to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | #copies of the Software, and to permit persons to whom the Software is
15 | #furnished to do so, subject to the following conditions:#
16 | #
17 | #The above copyright notice and this permission notice shall be included in all
18 | #copies or substantial portions of the Software.
19 | #
20 | #THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21 | #IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22 | #FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
23 | #AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24 | #LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25 | #OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26 | #SOFTWARE.
27 | #
28 | #---
29 | #
30 | #Mask R-CNN
31 | #
32 | #The MIT License (MIT)
33 | #
34 | #Copyright (c) 2017 Matterport, Inc.
35 |
36 | import os
37 | import sys
38 | import argparse
39 | import numpy as np
40 | import coco
41 | import utils
42 | import model as modellib
43 | from classes import get_class_names, InferenceConfig
44 | from ast import literal_eval as make_tuple
45 | import imageio
46 | import visualize
47 | import os.path
48 | import re
49 | import datetime
50 | import math
51 | import random, string
52 | import base64
53 | import json
54 | import socket
55 | import psutil
56 | import subprocess
57 | import time
58 | import uuid
59 | import cv2
60 | import math
61 | import random, string
62 | import time
63 | from time import gmtime, strftime
64 |
65 | cap = cv2.VideoCapture(1)
66 | packet_size=3000
67 | ret, frame = cap.read()
68 | filename = 'images2/tx1_image_{0}_{1}.jpg'.format(uuid.uuid4(),strftime("%Y%m%d%H%M%S",gmtime()))
69 | cv2.imwrite(filename, frame)
70 |
71 | from time import sleep
72 | from time import gmtime, strftime
73 | start = time.time()
74 |
75 | # minor fork by tim spann for nifi usage
76 | # 2018-april-4
77 | # Creates a color layer and adds Gaussian noise.
78 | # For each pixel, the same noise value is added to each channel
79 | # to mitigate hue shfting.
80 |
81 | external_IP_and_port = ('198.41.0.4', 53) # a.root-servers.net
82 | socket_family = socket.AF_INET
83 |
84 | def IP_address():
85 | try:
86 | s = socket.socket(socket_family, socket.SOCK_DGRAM)
87 | s.connect(external_IP_and_port)
88 | answer = s.getsockname()
89 | s.close()
90 | return answer[0] if answer else None
91 | except socket.error:
92 | return None
93 |
94 | def create_noisy_color(image, color):
95 | color_mask = np.full(shape=(image.shape[0], image.shape[1], 3),
96 | fill_value=color)
97 |
98 | noise = np.random.normal(0, 25, (image.shape[0], image.shape[1]))
99 | noise = np.repeat(np.expand_dims(noise, axis=2), repeats=3, axis=2)
100 | mask_noise = np.clip(color_mask + noise, 0., 255.)
101 | return mask_noise
102 |
103 |
104 | # Helper function to allow both RGB triplet + hex CL input
105 |
106 | def string_to_rgb_triplet(triplet):
107 |
108 | if '#' in triplet:
109 | # http://stackoverflow.com/a/4296727
110 | triplet = triplet.lstrip('#')
111 | _NUMERALS = '0123456789abcdefABCDEF'
112 | _HEXDEC = {v: int(v, 16)
113 | for v in (x + y for x in _NUMERALS for y in _NUMERALS)}
114 | return (_HEXDEC[triplet[0:2]], _HEXDEC[triplet[2:4]],
115 | _HEXDEC[triplet[4:6]])
116 |
117 | else:
118 | # https://stackoverflow.com/a/9763133
119 | triplet = make_tuple(triplet)
120 | return triplet
121 |
122 |
123 | def person_blocker(args):
124 |
125 | # Required to load model, but otherwise unused
126 | ROOT_DIR = os.getcwd()
127 | COCO_MODEL_PATH = args.model or os.path.join(ROOT_DIR, "mask_rcnn_coco.h5")
128 |
129 | MODEL_DIR = os.path.join(ROOT_DIR, "logs") # Required to load model
130 |
131 | if not os.path.exists(COCO_MODEL_PATH):
132 | utils.download_trained_weights(COCO_MODEL_PATH)
133 |
134 | # Load model and config
135 | config = InferenceConfig()
136 | model = modellib.MaskRCNN(mode="inference",
137 | model_dir=MODEL_DIR, config=config)
138 | model.load_weights(COCO_MODEL_PATH, by_name=True)
139 |
140 | image = imageio.imread(filename)
141 |
142 | # Create masks for all objects
143 | results = model.detect([image], verbose=0)
144 | r = results[0]
145 |
146 | if args.labeled:
147 | position_ids = ['[{}]'.format(x)
148 | for x in range(r['class_ids'].shape[0])]
149 | visualize.display_instances(image, r['rois'],
150 | r['masks'], r['class_ids'],
151 | get_class_names(), position_ids)
152 | sys.exit()
153 |
154 | # Filter masks to only the selected objects
155 | objects = np.array(args.objects)
156 |
157 | # Object IDs:
158 | if np.all(np.chararray.isnumeric(objects)):
159 | object_indices = objects.astype(int)
160 | # Types of objects:
161 | else:
162 | selected_class_ids = np.flatnonzero(np.in1d(get_class_names(),
163 | objects))
164 | object_indices = np.flatnonzero(
165 | np.in1d(r['class_ids'], selected_class_ids))
166 |
167 | mask_selected = np.sum(r['masks'][:, :, object_indices], axis=2)
168 |
169 | # Replace object masks with noise
170 | mask_color = string_to_rgb_triplet(args.color)
171 | image_masked = image.copy()
172 | noisy_color = create_noisy_color(image, mask_color)
173 | image_masked[mask_selected > 0] = noisy_color[mask_selected > 0]
174 |
175 | img_dir = '/Volumes/seagate/projects/person-blocker/images2/'
176 | img_pre = 'person_blocked_{0}'.format(strftime("%Y%m%d%H%M%S",gmtime()))
177 | img_name = img_dir + img_pre + '.png'
178 | gif_name = img_dir + img_pre + '.gif'
179 |
180 | imageio.imwrite(img_name, image_masked)
181 |
182 | # Create GIF. The noise will be random for each frame,
183 | # which creates a "static" effect
184 | # this works great, but takes some time and produces 7+ meg file
185 | #images = [image_masked]
186 | #num_images = 10 # should be a divisor of 30
187 | #
188 | #for _ in range(num_images - 1):
189 | # new_image = image.copy()
190 | # noisy_color = create_noisy_color(image, mask_color)
191 | # new_image[mask_selected > 0] = noisy_color[mask_selected > 0]
192 | # images.append(new_image)
193 | #
194 | #imageio.mimsave(gif_name, images, fps=30., subrectangles=True)
195 |
196 | # print json
197 | try:
198 | # Create unique image name
199 | uniqueid = 'person_uuid_{0}_{1}'.format(strftime("%Y%m%d%H%M%S%f",gmtime()),uuid.uuid4())
200 | host = os.uname()[1]
201 | currenttime= strftime("%Y-%m-%d %H:%M:%S",gmtime())
202 | ipaddress = IP_address()
203 | end = time.time()
204 | row = { 'uuid': uniqueid, 'runtime': str(round(end - start)), 'host': host, 'ts': currenttime, 'ipaddress': ipaddress, 'imagefilename': img_pre, 'originalfilename': filename }
205 | print( json.dumps(row) )
206 |
207 | except:
208 | print("{\"message\": \"Failed to run\"}")
209 |
210 |
211 | if __name__ == '__main__':
212 | parser = argparse.ArgumentParser(
213 | description='Person Blocker - Automatically "block" people '
214 | 'in images using a neural network.')
215 | parser.add_argument('-i', '--image', help='Image file name.',
216 | required=False)
217 | parser.add_argument(
218 | '-m', '--model', help='path to COCO model', default=None)
219 | parser.add_argument('-o',
220 | '--objects', nargs='+',
221 | help='object(s)/object ID(s) to block. ' +
222 | 'Use the -names flag to print a list of ' +
223 | 'valid objects',
224 | default='person')
225 | parser.add_argument('-c',
226 | '--color', nargs='?', default='(255, 255, 255)',
227 | help='color of the "block"')
228 | parser.add_argument('-l',
229 | '--labeled', dest='labeled',
230 | action='store_true',
231 | help='generate labeled image instead')
232 | parser.add_argument('-n',
233 | '--names', dest='names',
234 | action='store_true',
235 | help='prints class names and exits.')
236 | parser.set_defaults(labeled=False, names=False)
237 | args = parser.parse_args()
238 |
239 | if args.names:
240 | print(get_class_names())
241 | sys.exit()
242 |
243 | person_blocker(args)
244 |
--------------------------------------------------------------------------------
/run.sh:
--------------------------------------------------------------------------------
1 | cd /Volumes/seagate/projects/person-blocker
2 | python3 -W ignore pb.py 2>/dev/null
3 |
--------------------------------------------------------------------------------
/run2.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | DATE=$(date +"%Y-%m-%d_%H%M")
4 |
5 | fswebcam -q -r 1280x720 --no-banner /opt/demo/images/$DATE.jpg
6 |
7 | python3 -W ignore /opt/demo/all.py /opt/demo/images/$DATE.jpg 2>/dev/null
8 |
--------------------------------------------------------------------------------
/run3.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | DATE=$(date +"%Y-%m-%d_%H%M")
4 |
5 | fswebcam -q -r 1280x720 --no-banner /opt/demo/images/$DATE.jpg
6 |
7 | python2 -W ignore /opt/demo/classify_image.py /opt/demo/images/$DATE.jpg 2>/dev/null
8 |
--------------------------------------------------------------------------------
/rungluon2.sh:
--------------------------------------------------------------------------------
1 | python3.6 -W ignore /Volumes/seagate/projects/incubator-mxnet/gluon2.py 2>/dev/null
2 |
--------------------------------------------------------------------------------
/testcv.py:
--------------------------------------------------------------------------------
1 | import cv2
2 | import os
3 | import sys
4 | import json
5 | import socket
6 | import psutil
7 | import subprocess
8 | import time
9 | import datetime
10 | from time import sleep
11 | from time import gmtime, strftime
12 | from string import Template
13 | # forked from https://gist.github.com/dannguyen/cfa2fb49b28c82a1068f
14 | # first argument is the haarcascades path
15 |
16 | currenttime= strftime("%Y-%m-%d %H:%M:%S",gmtime())
17 | host = os.uname()[1]
18 | #print(os.uname())
19 | cpu = psutil.cpu_percent(interval=1)
20 | if 1==1:
21 | f = open('/sys/class/thermal/thermal_zone0/temp', 'r')
22 | l = f.readline()
23 | ctemp = 1.0 * float(l)/1000
24 | usage = psutil.disk_usage("/")
25 | mem = psutil.virtual_memory()
26 | diskrootfree = "{:.1f} MB".format(float(usage.free) / 1024 / 1024)
27 | mempercent = mem.percent
28 | external_IP_and_port = ('198.41.0.4', 53) # a.root-servers.net
29 | socket_family = socket.AF_INET
30 | #p = subprocess.Popen(['/opt/vc/bin/vcgencmd','measure_temp'], stdout=subprocess.PIPE,
31 | # stderr=subprocess.PIPE)
32 | #out, err = p.communicate()
33 | def IP_address():
34 | try:
35 | s = socket.socket(socket_family, socket.SOCK_DGRAM)
36 | s.connect(external_IP_and_port)
37 | answer = s.getsockname()
38 | s.close()
39 | return answer[0] if answer else None
40 | except socket.error:
41 | return None
42 | ipaddress = IP_address()
43 |
44 | face_cascade_path = '/media/nvidia/96ed93f9-7c40-4999-85ba-3eb24262d0a5/haarcascade_frontalface_default.xml'
45 | face_cascade = cv2.CascadeClassifier(os.path.expanduser(face_cascade_path))
46 |
47 | scale_factor = 1.1
48 | min_neighbors = 3
49 | min_size = (30, 30)
50 | flags = cv2.CASCADE_SCALE_IMAGE
51 |
52 | print('[')
53 | for infname in sys.argv[1:]:
54 | image_path = os.path.expanduser(infname)
55 | image = cv2.imread(image_path)
56 | gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
57 | faces = face_cascade.detectMultiScale(gray, scaleFactor = scale_factor, minNeighbors = min_neighbors, minSize = min_size, flags = flags)
58 | print('Faces: {0}'.format(len(faces)))
59 | print('Face {0}'.format(faces))
60 | for( x, y, w, h ) in faces:
61 | cv2.rectangle(image, (x, y), (x + w, y + h), (255, 255, 0), 2)
62 | outfname = "/media/nvidia/96ed93f9-7c40-4999-85ba-3eb24262d0a5/images/%s.faces.jpg" % os.path.basename(infname)
63 | cv2.imwrite(os.path.expanduser(outfname), image)
64 | endtime= strftime("%Y-%m-%d %H:%M:%S",gmtime())
65 | # row = { 'ts': currenttime, 'endtime': endtime, 'host': host, 'memory': mempercent, 'diskfree': diskrootfree, 'cputemp': round(ctemp,2), 'ipaddress': ipaddress, 'x': x, 'y': y, 'w': w, 'h': h, 'filename': outfname }
66 | row = { 'ts': currenttime, 'endtime': endtime, 'host': host, 'memory': mempercent, 'diskfree': diskrootfree, 'cputemp': round(ctemp,2), 'ipaddress': ipaddress, 'x': str(x), 'y': str(y), 'w': str(w), 'h': str(h), 'filename': outfname }
67 |
68 | json_string = json.dumps(row)
69 | print(json_string)
70 | print(']')
71 |
--------------------------------------------------------------------------------