├── .eslintignore
├── .gitignore
├── .npmignore
├── CONTRIBUTING.md
├── LICENSE
├── MAINTAINERS.md
├── README.md
├── backend
├── .env.template
├── app.js
├── bin
│ └── www
├── package-lock.json
├── package.json
└── routes
│ ├── index.js
│ └── users.js
├── frontend
├── README.md
├── app.js
├── babel.config.js
├── index.html
├── package-lock.json
├── package.json
├── public
│ ├── favicon.ico
│ └── index.html
├── src
│ ├── .eslintrc.js
│ ├── App.vue
│ ├── assets
│ │ ├── logo.png
│ │ └── logo.svg
│ ├── components
│ │ ├── DetailedInferenceView.vue
│ │ ├── Drag.vue
│ │ ├── HelloWorld.vue
│ │ └── Modal.vue
│ ├── dist
│ │ ├── json-tree.css
│ │ ├── vfc.css
│ │ ├── vuetable-2.css
│ │ └── vuetable-2.js
│ ├── main.js
│ └── plugins
│ │ └── vuetify.js
└── vue.config.js
├── images
├── GettingStartedWComposer-arch-diagram.png
├── addtowallet.png
├── admintab.png
├── archi.png
├── checkcompleted.png
├── composerplayground.png
├── createparticipantbtn.png
├── developer-analytical-dashboard-ai-powerai-flow-11.png
├── developer-analytical-dashboards-ai-powerai-flow.png
├── generateNewId.png
├── idstowallet.png
├── importbtn.png
├── productListing.png
├── retailer.png
├── retailerPL.png
└── selectid.png
├── lib
├── foodSupply.js
└── foodSupplyFabric.js
├── models
├── base.cto
└── foodSupply.cto
├── package.json
└── permissions.acl
/.eslintignore:
--------------------------------------------------------------------------------
1 | coverage
2 | dist
3 | go
4 | lib
5 | node_modules
6 | out
7 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Emacs backup files
2 | *~
3 | *#
4 | .#*
5 | # Vim file artifacts
6 | .*.sw*
7 | # installed platform-specific binaries
8 | .DS_Store
9 | .env
10 | local/crypto-config/
11 | backend/hfc-key-store/
12 | backend/.env
13 | frontend/.env
14 | */.env
15 |
16 | .project
17 |
18 | # Logs
19 | logs
20 | *.log
21 | npm-debug.log*
22 |
23 | # Runtime data
24 | pids
25 | *.pid
26 | *.seed
27 |
28 | # Directory for instrumented libs generated by jscoverage/JSCover
29 | lib-cov
30 |
31 | # Coverage directory used by tools like istanbul
32 | coverage
33 |
34 | # nyc test coverage
35 | .nyc_output
36 |
37 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
38 | .grunt
39 |
40 | # node-waf configuration
41 | .lock-wscript
42 |
43 | # Compiled binary addons (http://nodejs.org/api/addons.html)
44 | build/Release
45 |
46 | # Dependency directories
47 | node_modules
48 | jspm_packages
49 |
50 | # Optional npm cache directory
51 | .npm
52 |
53 | # Optional REPL history
54 | .node_repl_history
55 |
56 | # JSDoc
57 | out
58 |
59 | # Mac files.
60 | **/.DS_Store
61 |
62 | *.swp
63 |
64 | # Build generated files should be ignored by git, but not by npm.
65 | #dist
66 |
67 | node_modules
68 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 |
6 | # Runtime data
7 | pids
8 | *.pid
9 | *.seed
10 |
11 | # Directory for instrumented libs generated by jscoverage/JSCover
12 | lib-cov
13 |
14 | # Coverage directory used by tools like istanbul
15 | coverage
16 |
17 | # nyc test coverage
18 | .nyc_output
19 |
20 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
21 | .grunt
22 |
23 | # node-waf configuration
24 | .lock-wscript
25 |
26 | # Compiled binary addons (http://nodejs.org/api/addons.html)
27 | build/Release
28 |
29 | # Dependency directories
30 | node_modules
31 | jspm_packages
32 |
33 | # Optional npm cache directory
34 | .npm
35 |
36 | # Optional REPL history
37 | .node_repl_history
38 |
39 | # JSDoc
40 | out
41 |
42 | # Mac files.
43 | **/.DS_Store
44 |
45 | *.swp
46 |
47 | # Build generated files should be ignored by git, but not by npm.
48 | # dist
49 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | This is an open source project, and we appreciate your help!
4 |
5 | We use the GitHub issue tracker to discuss new features and non-trivial bugs.
6 |
7 | In addition to the issue tracker, [#journeys on
8 | Slack](https://dwopen.slack.com) is the best way to get into contact with the
9 | project's maintainers.
10 |
11 | To contribute code, documentation, or tests, please submit a pull request to
12 | the GitHub repository. Generally, we expect two maintainers to review your pull
13 | request before it is approved for merging. For more details, see the
14 | [MAINTAINERS](MAINTAINERS.md) page.
15 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/MAINTAINERS.md:
--------------------------------------------------------------------------------
1 | # Maintainers Guide
2 |
3 | This guide is intended for maintainers - anybody with commit access to one or
4 | more Code Pattern repositories.
5 |
6 | ## Methodology
7 |
8 | This repository does not have a traditional release management cycle, but
9 | should instead be maintained as as a useful, working, and polished reference at
10 | all times. While all work can therefore be focused on the master branch, the
11 | quality of this branch should never be compromised.
12 |
13 | The remainder of this document details how to merge pull requests to the
14 | repositories.
15 |
16 | ## Merge approval
17 |
18 | The project maintainers use LGTM (Looks Good To Me) in comments on the pull
19 | request to indicate acceptance prior to merging. A change requires LGTMs from
20 | two project maintainers. If the code is written by a maintainer, the change
21 | only requires one additional LGTM.
22 |
23 | ## Reviewing Pull Requests
24 |
25 | We recommend reviewing pull requests directly within GitHub. This allows a
26 | public commentary on changes, providing transparency for all users. When
27 | providing feedback be civil, courteous, and kind. Disagreement is fine, so long
28 | as the discourse is carried out politely. If we see a record of uncivil or
29 | abusive comments, we will revoke your commit privileges and invite you to leave
30 | the project.
31 |
32 | During your review, consider the following points:
33 |
34 | ### Does the change have positive impact?
35 |
36 | Some proposed changes may not represent a positive impact to the project. Ask
37 | whether or not the change will make understanding the code easier, or if it
38 | could simply be a personal preference on the part of the author (see
39 | [bikeshedding](https://en.wiktionary.org/wiki/bikeshedding)).
40 |
41 | Pull requests that do not have a clear positive impact should be closed without
42 | merging.
43 |
44 | ### Do the changes make sense?
45 |
46 | If you do not understand what the changes are or what they accomplish, ask the
47 | author for clarification. Ask the author to add comments and/or clarify test
48 | case names to make the intentions clear.
49 |
50 | At times, such clarification will reveal that the author may not be using the
51 | code correctly, or is unaware of features that accommodate their needs. If you
52 | feel this is the case, work up a code sample that would address the pull
53 | request for them, and feel free to close the pull request once they confirm.
54 |
55 | ### Does the change introduce a new feature?
56 |
57 | For any given pull request, ask yourself "is this a new feature?" If so, does
58 | the pull request (or associated issue) contain narrative indicating the need
59 | for the feature? If not, ask them to provide that information.
60 |
61 | Are new unit tests in place that test all new behaviors introduced? If not, do
62 | not merge the feature until they are! Is documentation in place for the new
63 | feature? (See the documentation guidelines). If not do not merge the feature
64 | until it is! Is the feature necessary for general use cases? Try and keep the
65 | scope of any given component narrow. If a proposed feature does not fit that
66 | scope, recommend to the user that they maintain the feature on their own, and
67 | close the request. You may also recommend that they see if the feature gains
68 | traction among other users, and suggest they re-submit when they can show such
69 | support.
70 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # WARNING: This repository is no longer maintained
2 |
3 | > This repository will not be updated. The repository will be kept available in read-only mode.
4 |
5 | # Generate and visualize video analytics using IBM Maximo Visual Inspection
6 |
7 | In this Code Pattern we will show how to deploy a customizable dashboard to visualize video/image analytics. This dashboard enables users to upload images to be processed by IBM Maximo Visual Inspection (object recognition, image classification), download the analyzed results, and view analytics via interactive graphs.
8 |
9 | When the reader has completed this Code Pattern, they will understand how to build a dashboard using Vue.js and Maximo Visual Inspection APIs to generate and visualize image analytics.
10 |
11 |
12 |
13 |
14 |
15 | 
16 |
17 |
18 | # Components
19 |
20 | * [IBM Maximo Visual Inspection](https://www.ibm.com/us-en/marketplace/ibm-powerai-vision). This is an image analysis platform that allows you to build and manage computer vision models, upload and annotate images, and deploy apis to analyze images and videos.
21 |
22 | Sign up for a trial account of IBM Maximo Visual Inspection [here](https://developer.ibm.com/linuxonpower/deep-learning-powerai/try-powerai/). This link includes options to provision a IBM Maximo Visual Inspection instance either locally on in the cloud.
23 |
24 |
25 |
26 | # Flow
27 |
28 | 1. Upload images to IBM Maximo Visual Inspection
29 | 2. Label uploaded images to train model
30 | 3. Deploy model
31 | 4. Upload image via dashboard
32 | 5. View processed image and graphs in dashboard
33 |
34 | # Prerequisites
35 |
36 | * An account on IBM Marketplace that has access to IBM Maximo Visual Inspection. This service can be provisioned [here](https://developer.ibm.com/linuxonpower/deep-learning-powerai/vision/access-registration-form/)
37 |
38 | # Steps
39 |
40 | Follow these steps to setup and run this Code Pattern.
41 |
42 | 1. [Upload training images to IBM Maximo Visual Inspection ](#1-upload-training-images-to-IBM-Visual-Insights)
43 | 2. [Train and deploy model in IBM Maximo Visual Inspection](#2-Train-and-deploy-model-in-IBM-Visual-Insights)
44 | 3. [Clone repository](#3-clone-repository)
45 | 4. [Deploy dashboard](#4-Deploy-dashboard)
46 | 5. [Upload images to be processed via dashboard](#5-Upload-images-to-be-processed-via-dashboard)
47 | 6. [View processed images and graphs in dashboard](#6-View-images-and-graphs-in-dashboard)
48 |
49 |
50 |
51 | ## 1. Upload training images to IBM Maximo Visual Inspection
52 |
53 | Login to IBM Maximo Visual Inspection Dashboard
54 |
55 |
56 |
57 |
58 | To build a model, we'll first need to upload a set of images. Click "Datasets" in the upper menu. Then, click "Create New Data Set", and enter a name. We'll use "traffic_long" here
59 |
60 |
61 |
62 |
63 |
64 | Drag and drop one or more images to build your dataset.
65 |
66 |
67 |
68 |
69 |
70 | ## 2. Train and deploy model in IBM Maximo Visual Inspection
71 |
72 | In this example, we'll build an object recognition model to identify specific objects in each frame of a video. After the images have completed uploading to the dataset, select one or more images in the dataset, and then select "Label Objects".
73 |
74 |
75 |
76 |
77 |
78 |
79 | Next, we'll split the training video into multiple frames. We'll label objects in a few of the frames manually. After generating a model, we can automatically label the rest of the frames to increase accuracy.
80 |
81 |
82 |
83 |
84 |
85 | Identify what kinds of objects will need to be recognized. Click "Add Label", and type the name of each object. In this case, we're detecting traffic on a freeway, so we'll set our objects as "car", "truck", and "bus".
86 |
87 |
88 |
89 |
90 |
91 | We can then manually annotate objects by
92 | 1. Selecting a video frame
93 | 2. Selecting an object type
94 | 3. Drawing a rectangle (or custom shape) around object in frame
95 |
96 |
97 |
98 |
99 |
100 | After annotating a few frames, we can then build a model. Do so by going back to the "Datasets" view, selecting your dataset, and then selecting "Train Model"
101 |
102 |
103 |
104 |
105 | Select type of Model you'd like to build. In this case, we'll use "Object Detection" as our model type, and "Detectron" as our model optimizer. Then, click the "Train Model" button.
106 |
107 |
108 |
109 | After the model completes training, click the "Models" button in the upper menu. Then, select the model and then click the "Deploy Model" button.
110 |
111 |
112 |
113 |
114 | Deploying the custom will establish an endpoint where images and videos can be uploaded, either through the UI or through a REST API endpoint.
115 |
116 |
117 |
118 |
119 |
120 | ## 3. Clone repository
121 |
122 | Clone repository using the git cli
123 |
124 | ```
125 | git clone https://github.com/IBM/power-ai-dashboard
126 | ```
127 |
128 | ### Install Node.js packages
129 |
130 | If expecting to run this application locally, please install [Node.js](https://nodejs.org/en/) and NPM. Windows users can use the installer at the link [here](https://nodejs.org/en/download/)
131 |
132 | If you're using Mac OS X or Linux, and your system requires additional versions of node for other projects, we'd suggest using [nvm](https://github.com/creationix/nvm) to easily switch between node versions. Install nvm with the following commands
133 |
134 | ```bash
135 | curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.33.11/install.sh | bash
136 | # Place next three lines in ~/.bash_profile
137 | export NVM_DIR="$HOME/.nvm"
138 | [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm
139 | [ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion" # This loads nvm bash_completion
140 | ```
141 |
142 |
143 | ```
144 | nvm install v8.9.0
145 | nvm use 8.9.0
146 | ```
147 |
148 | Also install [ffmpeg](https://www.ffmpeg.org/) using on of the following command, depending on your operating system. ffmpeg enables the app to receive metadata describing the analyzed videos.
149 |
150 | This may take a while (10-15 minutes).
151 |
152 | ```
153 | # OS X
154 | brew install ffmpeg
155 |
156 | # Linux
157 | sudo apt install ffmpeg -y
158 | ```
159 |
160 | To run the dashboard locally, we'll need to install a few node libraries which are listed in our `package.json` file.
161 | - [Vue.js](https://vuejs.org/): Used to simplify the generation of front-end components
162 | - [Express.js](https://expressjs.org/): Used to provide custom api endpoints
163 |
164 | These libraries can be installed by entering the following commands in a terminal.
165 |
166 | ```
167 | cd visual_insights_dashboard
168 | ```
169 |
170 | ```
171 | cd backend
172 | npm install
173 | cd ..
174 | cd frontend
175 | npm install
176 | ```
177 |
178 |
179 | ## 4. Deploy dashboard
180 |
181 | After installing the prerequisites, we can start the dashboard application.
182 |
183 |
184 |
185 | Run the following to start the backend
186 | ```
187 | cd backend
188 | npm start
189 | ```
190 |
191 | In a separate terminal, run the following to start the frontend UI
192 | ```
193 | cd frontend
194 | npm run serve
195 | ```
196 |
197 |
198 | Confirm you can access the Dashboard UI at [http://localhost:8080](http://localhost:8080).
199 |
200 |
201 |
202 |
203 |
204 | Click the Login button at the top and enter your IBM Maximo Visual Inspection credentials. These credentials should be included in the welcome letter when your PowerAI instance provisioned. This input form requires a username, password, and url where the instance can be accessed.
205 |
206 |
207 |
208 | ## 5. Upload images to be processed via dashboard
209 |
210 | After providing our IBM Maximo Visual Inspection credentials, we can then use the dashboard to
211 | Let's upload a video or image to be processed by our custom model. We'll do this by clicking the "Upload Image(s)" button in the upper menu. Then, drag and drop images that need to be analyzed. Select a model from the selection dropbox, and then click the "Upload" button.
212 |
213 |
214 |
215 |
216 | ## 6. View processed images and graphs in dashboard
217 |
218 | As images are uploaded to the IBM Maximo Visual Inspection service, they'll be shown in a grid in the main dashboard view. We can use the "Search" input to filter the image analysis results by time, model id, object type, etc. Also, the annotated images can be downloaded as a zip file by clicking the "Download Images" button.
219 |
220 | Select any of the images to open a detailed view for a video/image. This detailed view will show the original image/video, as well as a few graphs showing basic video analytics, such as a breakdown of objects detected per second (line graph), and a comparison of total detected objects by type (circle graph).
221 |
222 |
223 |
224 |
225 | # Learn more
226 |
227 |
228 |
229 |
230 |
231 | # License
232 |
233 | This code pattern is licensed under the Apache Software License, Version 2. Separate third party code objects invoked within this code pattern are licensed by their respective providers pursuant to their own separate licenses. Contributions are subject to the [Developer Certificate of Origin, Version 1.1 (DCO)](https://developercertificate.org/) and the [Apache Software License, Version 2](https://www.apache.org/licenses/LICENSE-2.0.txt).
234 |
235 | [Apache Software License (ASL) FAQ](https://www.apache.org/foundation/license-faq.html#WhatDoesItMEAN)
236 |
--------------------------------------------------------------------------------
/backend/.env.template:
--------------------------------------------------------------------------------
1 | url=""
2 | user=""
3 | password=""
4 |
--------------------------------------------------------------------------------
/backend/app.js:
--------------------------------------------------------------------------------
1 | var createError = require('http-errors');
2 | var express = require('express');
3 | var path = require('path');
4 | var cookieParser = require('cookie-parser');
5 | var logger = require('morgan');
6 | var fs = require('fs')
7 | var indexRouter = require('./routes/index');
8 | var usersRouter = require('./routes/users');
9 |
10 | var app = express();
11 | var cors = require('cors')
12 | var multer = require('multer')
13 | const proxy = require('express-http-proxy');
14 | require('dotenv').config()
15 |
16 | console.log("Backend Server Started.")
17 | // app.use(cors({credentials: false, origin: true}))
18 | app.use(cors())
19 | app.options('*', cors())
20 | app.use(function(req, res, next) {
21 | if (req.method === 'OPTIONS') {
22 | console.log('!OPTIONS');
23 | res.header("Access-Control-Allow-Origin", "*");
24 | res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
25 | res.header("Access-Control-Allow-Methods", "POST, GET, PUT, DELETE, OPTIONS")
26 | res.header("Access-Control-Allow-Credentials", "true")
27 | res.writeHead(200, headers);
28 | res.end();
29 | }
30 | next();
31 | });
32 |
33 | const bodyParser = require('body-parser')
34 | app.use(bodyParser.json());
35 | // bodyParser.raw({ type: 'application/vnd.custom-type' })
36 |
37 | app.use('/proxyget', function(req, res) {
38 | console.log(req)
39 | const headers = req.headers
40 | console.log("headers")
41 | console.log(headers)
42 | var url = headers['x-proxy-url']
43 | var paiv_url = url + req.url;
44 | console.log("sending proxy get request to " + paiv_url)
45 | const options = {
46 | url: paiv_url,
47 | headers: headers
48 | }
49 | console.log("options")
50 | console.log(options)
51 | require('request').get(options).on('error', function(err) {
52 | console.error(err)
53 | }).pipe(res)
54 | });
55 |
56 | // app.use(bodyParser.raw({ type: 'application/vnd.custom-type' }));
57 |
58 |
59 |
60 | app.set('views', path.join(__dirname, ''));
61 | app.use(require("express-form-data").parse())
62 | // app.use(formData.stream())
63 | app.use('/proxypost', function(req, res) {
64 | // console.log(upload)
65 | console.log(req)
66 | console.log("received post request")
67 | console.log("req.headers")
68 | console.log(req.headers)
69 | var url = req.headers['x-proxy-url']
70 | console.log(`url: ${url}`)
71 | // console.log(`req.url: ${req.url}`)
72 | var paiv_url = url + req.url;
73 | console.log(`posting to ${paiv_url}`)
74 | // console.log(fs.createReadStream(req.files['files']['path']))
75 | // console.log(req.files)
76 | var filePath = req.files['blob']['path']
77 | // console.log("filePath")
78 | // console.log(filePath)
79 | var readStream = fs.createReadStream(filePath)
80 | console.log("readStream")
81 | console.log(readStream)
82 | var formData = {
83 | files: readStream,
84 | containHeatMap: "true"
85 | }
86 | // console.log("posting to " + paiv_url)
87 | // require('request').post({url: paiv_url}).pipe(res)
88 | require('request').post({url: paiv_url, formData: formData}).pipe(res)
89 | });
90 |
91 |
92 | // view engine setup
93 | app.set('views', path.join(__dirname, 'views'));
94 | app.set('view engine', 'jade');
95 | // var url = process.env.url
96 | // console.log("url")
97 | // console.log(url)
98 | // app.use('/proxy', proxy(url));
99 |
100 | // app.use('/token', function(req, res) {
101 | // var url = req.url
102 | // console.log(url)
103 | // console.log(req)
104 | // console.log(`setting up proxy for ${url}`)
105 | // app.use('/proxy', proxy(url))
106 | // })
107 |
108 |
109 |
110 | app.use(logger('dev'));
111 | app.use(express.json());
112 | app.use(express.urlencoded({ extended: false }));
113 | app.use(cookieParser());
114 | app.use(express.static(path.join(__dirname, 'public')));
115 | app.use('/', indexRouter);
116 | app.use('/users', usersRouter);
117 |
118 | // catch 404 and forward to error handler
119 | app.use(function(req, res, next) {
120 | next(createError(404));
121 | });
122 |
123 | // error handler
124 | app.use(function(err, req, res, next) {
125 | // set locals, only providing error in development
126 | res.locals.message = err.message;
127 | res.locals.error = req.app.get('env') === 'development' ? err : {};
128 |
129 | // render the error page
130 | res.status(err.status || 500);
131 | res.render('error');
132 | });
133 |
134 | module.exports = app;
135 |
--------------------------------------------------------------------------------
/backend/bin/www:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | /**
4 | * Module dependencies.
5 | */
6 |
7 | var app = require('../app');
8 | var debug = require('debug')('react-backend:server');
9 | var http = require('http');
10 |
11 | /**
12 | * Get port from environment and store in Express.
13 | */
14 |
15 | var port = normalizePort(process.env.PORT || '30000');
16 | app.set('port', port);
17 |
18 | console.log("Vue Server Started")
19 | /**
20 | * Create HTTP server.
21 | */
22 |
23 | var server = http.createServer(app);
24 |
25 | /**
26 | * Listen on provided port, on all network interfaces.
27 | */
28 |
29 | server.listen(port);
30 | server.on('error', onError);
31 | server.on('listening', onListening);
32 |
33 | /**
34 | * Normalize a port into a number, string, or false.
35 | */
36 |
37 | function normalizePort(val) {
38 | var port = parseInt(val, 10);
39 |
40 | if (isNaN(port)) {
41 | // named pipe
42 | return val;
43 | }
44 |
45 | if (port >= 0) {
46 | // port number
47 | return port;
48 | }
49 |
50 | return false;
51 | }
52 |
53 | /**
54 | * Event listener for HTTP server "error" event.
55 | */
56 |
57 | function onError(error) {
58 | if (error.syscall !== 'listen') {
59 | throw error;
60 | }
61 |
62 | var bind = typeof port === 'string'
63 | ? 'Pipe ' + port
64 | : 'Port ' + port;
65 |
66 | // handle specific listen errors with friendly messages
67 | switch (error.code) {
68 | case 'EACCES':
69 | console.error(bind + ' requires elevated privileges');
70 | process.exit(1);
71 | break;
72 | case 'EADDRINUSE':
73 | console.error(bind + ' is already in use');
74 | process.exit(1);
75 | break;
76 | default:
77 | throw error;
78 | }
79 | }
80 |
81 | /**
82 | * Event listener for HTTP server "listening" event.
83 | */
84 |
85 | function onListening() {
86 | var addr = server.address();
87 | var bind = typeof addr === 'string'
88 | ? 'pipe ' + addr
89 | : 'port ' + addr.port;
90 | debug('Listening on ' + bind);
91 | }
92 |
--------------------------------------------------------------------------------
/backend/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "react-backend",
3 | "version": "0.0.0",
4 | "private": true,
5 | "scripts": {
6 | "start": "node ./bin/www"
7 | },
8 | "dependencies": {
9 | "async": "2.6.1",
10 | "body-parser": "^1.19.0",
11 | "cookie-parser": "~1.4.3",
12 | "cors": "2.8.4",
13 | "debug": "~2.6.9",
14 | "dotenv": "^8.2.0",
15 | "express": "~4.16.0",
16 | "express-form-data": "^2.0.10",
17 | "express-http-proxy": "^1.6.0",
18 | "fabric-ca-client": "1.2.0",
19 | "fabric-client": "1.2.0",
20 | "fluent-ffmpeg": "^2.1.2",
21 | "grpc": "1.11.0",
22 | "http-errors": "~1.6.2",
23 | "jade": "~1.11.0",
24 | "morgan": "~1.9.0",
25 | "multer": "^1.4.2",
26 | "node-fetch": "^2.6.0",
27 | "nopt": "^4.0.1",
28 | "underscore": "x.x.x",
29 | "url": "^0.11.0",
30 | "vuetify": "^2.1.10"
31 | },
32 | "devDependencies": {
33 | "vue-card": "^1.1.0"
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/backend/routes/index.js:
--------------------------------------------------------------------------------
1 | const express = require('express');
2 | const router = express.Router();
3 | const request = require('request')
4 | const hfc = require('fabric-client')
5 | const CAClient = require('fabric-ca-client')
6 | const fs = require('fs')
7 | const cors = require('cors')
8 | const _ = require('underscore')
9 | const util = require('util')
10 | const async = require('async')
11 | const exec = require('child_process').exec;
12 | const glob = require("glob")
13 | const path = require('path');
14 | const os = require('os');
15 | const fetch = require('node-fetch')
16 | const proxy = require('express-http-proxy');
17 | const ffmpeg = require('fluent-ffmpeg');
18 |
19 | require('dotenv').config()
20 |
21 | router.all('*', cors())
22 | module.exports = router;
23 |
24 | // TODO, this line is temporary
25 | process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
26 |
27 | // Authenticate to powerai
28 | var username, password, url, token
29 |
30 | if ( process.env.user ) {
31 | username = process.env.user
32 | password = process.env.password
33 | url = process.env.url
34 | console.log(username, password, url)
35 | getToken(username, password, url)
36 | // token = getToken(username, password, url)
37 | }
38 |
39 | // var tokenRefreshTime =
40 | function getToken(username = undefined, password = undefined, url = undefined) {
41 | return new Promise( (resolve, reject) => {
42 | console.log("requesting auth token")
43 | var options = {
44 | method: "POST",
45 | headers: {
46 | "Content-Type": "application/json"
47 | },
48 | body: JSON.stringify({
49 | username: username,
50 | password: password,
51 | grant_type: "password"
52 | })
53 | }
54 | console.log("token options")
55 | console.log(options)
56 | fetch( url + "/api/tokens", options ).then ( (r) => {
57 | console.log(r)
58 | r.json().then( (t) => {
59 | console.log("token received: " + t.token)
60 | token = t.token
61 | tokenRefreshTime = Date.now()
62 | resolve(t.token)
63 | } )
64 | }).catch ( (err) => {
65 | console.log("error finding token")
66 | console.log(err)
67 | reject(err)
68 | })
69 | })
70 | }
71 |
72 | router.post('/token', function(req, res) {
73 | console.log("token: " + token)
74 | // if token hasn't expired yet
75 | if ((token) && (tokenRefreshTime) && ( Date.now() - 60*60*100 < tokenRefreshTime )) {
76 | console.log("token hasn't expired yet, sending exisiting token")
77 | res.send(token)
78 | // otherwise, get a new token
79 | } else {
80 | console.log("requesting new token")
81 | console.log(req.body)
82 | console.log(req.body.url)
83 | if (req.body.username && req.body.password) {
84 | console.log(`creds found in http request: ${req.body.username} ${req.body.password} ${req.body.url}`)
85 | username = req.body.username
86 | password = req.body.password
87 | getToken(req.body.username, req.body.password, req.body.url).then( (t) => {
88 | token = t
89 | console.log(t)
90 | res.send(token)
91 | } )
92 |
93 | } else {
94 | // var token = getToken(username, password, url)
95 | // res.send(token)
96 | console.log("username / password not set")
97 | res.send(401)
98 | /*
99 | getToken(username, password, url).then( (t) => {
100 | token = t
101 | console.log(t)
102 | res.send(token)
103 | } )
104 | */
105 | }
106 | }
107 | // res.set('Content-Type', 'text/json')
108 | });
109 |
110 |
111 | // app.use('/download', function(req, res) {
112 | // proxy(url)
113 | // // res.send(200)
114 | // });
115 |
116 | var getInferences = function(url, token) {
117 | return new Promise( (resolve, reject) => {
118 | var options = {
119 | method: "GET",
120 | headers: {
121 | "X-Auth-Token": token
122 | }
123 | }
124 | console.log(`posting ${url}/api/inferences`)
125 | fetch( url + "/api/inferences", options ).then ( (r) => {
126 | // result = r
127 | // console.log(r)
128 | r.json().then( (e) => {
129 | console.log("inferences received")
130 | inferences = e
131 | console.log(e)
132 | resolve(inferences)
133 | // next get detailed inference info by id
134 | } ).catch((err) => {
135 | console.log("error parsing json")
136 | console.log(err)
137 | } )
138 | }).catch( (err) => {
139 | console.log(err)
140 | })
141 | })
142 | }
143 |
144 | var datasets = []
145 | var getDatasets = function() {
146 | // var datasets = []
147 | var options = {
148 | method: "GET",
149 | headers: {
150 | "X-Auth-Token": token
151 | }
152 | }
153 | // return new Promise( (resolve, reject) => {
154 |
155 | fetch(url + "/api/datasets", options).then((r) => {
156 | // result = r
157 | // console.log(r)
158 | r.json().then((ds) => {
159 | console.log("datasets received")
160 | // resolve(inferences)
161 | ds.map((set, idx) => {
162 | fetch(`${url}/api/datasets/${set._id}/files`, options).then((r) => {
163 | console.log("receiving files from set " + set._id)
164 | r.json().then((files) => {
165 | console.log("parsed json")
166 | console.log(files)
167 | datasets = datasets.concat(files)
168 | if (idx == ds.length) {
169 | console.log('datasets')
170 | // console.log(datasets)
171 | resolve(datasets)
172 | }
173 | })
174 | })
175 | // datasets.push()
176 |
177 | })
178 | // next get detailed inference info by id
179 | }).catch((err) => {
180 | console.log("error parsing json")
181 | console.log(err)
182 | })
183 | }).catch((err) => {
184 | console.log(err)
185 | })
186 |
187 | // }) // end promise
188 |
189 | // /datasets
190 | // /datasets/{id}/files
191 | }
192 |
193 | router.get('/inferences', function(req, res) {
194 | getInferences(req.headers['x-proxy-url'], req.headers['x-auth-token']).then( (i) => {
195 | // if no new inferences, keep vars as is
196 | // if (inferences.map.sort() == i.inferences_list.sort()) {
197 | // console.log("no new inferences, skipping update")
198 | // } else {
199 | inferences = i.inferences_list
200 | res.send(i.inferences_list)
201 | inferences.map( (i) => {
202 | // setTimeout(function(){ processInferences(i); }, 1000);
203 | processInferences(req.headers['x-proxy-url'], req.headers['x-auth-token'], i) // store result in an object, reference by id
204 | })
205 | // }
206 | })
207 | });
208 |
209 | function sleep(ms) {
210 | return new Promise(resolve => setTimeout(resolve, ms));
211 | }
212 |
213 | var checkInferences = function() {
214 | return new Promise( (resolve, reject) => {
215 | //
216 | var retries = 5
217 | var r = [...Array(retries).keys()]
218 | r.map((idx) => {
219 | setTimeout( () => {
220 | console.log("printing")
221 | }, 1000)
222 | })
223 | // console.log(Object.keys(inferenceData).sort())
224 | // console.log(Object.keys(inferences).sort())
225 | // inferences.map(i => i._id)
226 | // sleep(500).then(() => {
227 | // if (Object.keys(inferenceData).sort() == Object.keys(inferences).sort()) {
228 | // resolve()
229 | // } else {
230 | // console.log("sleeping")
231 | // }
232 | // }
233 | // })
234 | // if (idx == retries) {
235 | // reject()
236 | // }
237 | // })
238 |
239 | })
240 | }
241 |
242 | router.get('/inferencedetailed', function(req, res) {
243 | console.log("requesting detections for all inferences ")
244 | console.log(inferenceData)
245 | res.json(inferenceData)
246 |
247 | });
248 | // Flow,
249 | // User loads main dashboard page
250 | // Request is sent to receive inferences
251 | // Process list of inferences
252 | // Once processing is done, return processed results to frontend
253 |
254 |
255 | // inference_id = "7afb7810-bdfa-4968-aafc-06a8bd758f5b"
256 | // 1. Split frames based off fps modulus
257 | // 2. Get array of count of each
258 |
259 | // var countObjects = function( ) {
260 | //
261 | // }
262 | process.env['NODE_TLS_REJECT_UNAUTHORIZED'] = 0
263 | var inferenceData = {} // object to keep inference analytics
264 | var fps = {}
265 | var seconds = {}
266 |
267 | var getFPS = function(url, endpoint, id) {
268 | return new Promise ( (resolve, reject) => {
269 | // if (url) {
270 | console.log("ffprobe analyzing " + `${url}${endpoint}`)
271 | ffmpeg.ffprobe(url + endpoint, (err, data) => {
272 | var fps_reading = data.streams.filter(s => s.codec_type == 'video' )[0]['avg_frame_rate']
273 | // var fps_reading = data['streams'][0]['avg_frame_rate']
274 | var vid_length = data.streams.filter(s => s.codec_type == 'video' )[0]['duration']
275 | var fps_nums = fps_reading.split('/')
276 | // inferenceData[det._id]['fps'] = fps_nums[0] / fps_nums[1]
277 | var fps_calc = Math.round(fps_nums[0] / fps_nums[1])
278 | fps[id] = fps_calc
279 | seconds[id] = Math.round(vid_length)
280 | console.log(fps_calc)
281 | resolve(fps_calc)
282 | })
283 | // ffmpeg.ffprobe(url, (err, data) => {
284 | // console.log(`data ${JSON.stringify(data)}`)
285 | // console.log(data['streams'][0]['avg_frame_rate'])
286 | // console.log(`err ${err}`)
287 | // })
288 | // } else {
289 | // console.log("problem initializing ffprobe")
290 | // reject("No valid url provided")
291 | // }
292 | })
293 | }
294 |
295 | var matchFrames = function(frameNumbers, seconds, framesPerSecond) {
296 | var matches = []
297 |
298 | // [...Array(seconds).keys()].map((s) => {
299 | Array.from({length: seconds}, (x,i) => i).map((s) => {
300 | console.log("---------------------")
301 | var start = (framesPerSecond * s)
302 | console.log(`checking second ${s} at frame ${start}`)
303 | if (frameNumbers.includes(start)) {
304 | console.log("found matching frame in " + start)
305 | matches.push(start)
306 | // return start
307 | } else {
308 | for (i = start; i <= (start + framesPerSecond); i++) {
309 | // console.log("checking frame: " + i)
310 | if (frameNumbers.includes(i)) {
311 | console.log("found matching frame in " + i)
312 | matches.push(i)
313 | break
314 | // return i
315 | }
316 | if (i == (start + framesPerSecond)) {
317 | matches.push(start)
318 | break
319 | }
320 | }
321 | }
322 | })
323 | // after loop completes
324 | return matches
325 | }
326 |
327 |
328 | // m = matchFrames(frames, seconds, framesPerSecond)
329 |
330 |
331 | var processInferences = function(url, token, i) {
332 | // var infDetails = JSON.parse(fs.readFileSync('../inference.json').toString())
333 | // var d = JSON.parse(data) //JSON.parse(fs.readFileSync('../detections.json').toString())
334 | // get detections.json
335 | // reduce detections, select frames by modulus
336 | var options = {
337 | method: "GET",
338 | headers: {
339 | "X-Auth-Token": token
340 | }
341 | }
342 | // request inference detections
343 | console.log("calling url")
344 | console.log(url + "/api/inferences/" + i['_id'])
345 | processing = true
346 | fetch( url + "/api/inferences/" + i['_id'], options ).then ( (r) => {
347 | // result = r
348 | console.log("r")
349 | console.log(JSON.stringify(r))
350 | console.log("r.status")
351 | console.log(r.status)
352 | r.json().then((det) => {
353 | console.log(`detections received for inf ${det._id}`)
354 | var d = det.classified
355 | var totalFrames = det.total_frames
356 | var times = det.classified.map(f => f.time_offset)
357 | var allFrames = det.classified.map(f => f.frame_number)
358 | console.log(`allFrames ${allFrames}`)
359 | console.log("starting ffprobe")
360 | var frameNumbers = Array.from(new Set(det.classified.map(f => f.frame_number))).sort((a, b) => a - b)
361 | console.log(`frameNumbers ${frameNumbers}`)
362 | var endpoint = encodeURI(det.video_in)
363 | console.log(endpoint)
364 | getFPS(url, endpoint, det['_id']).then(() => {
365 | console.log("received fps")
366 | var framesPerSecond = fps[det._id]
367 | console.log("framesPerSecond")
368 | console.log(framesPerSecond)
369 | console.log(`allFrames ${allFrames}`)
370 | console.log(`seconds[det['_id']] ${seconds[det['_id']]}`)
371 | var m = matchFrames(frameNumbers, seconds[det['_id']], framesPerSecond)
372 | console.log(`m ${m}`)
373 | console.log(`m.length ${m.length}`)
374 | // after getting an array of frame numbers, select a frame number corresponding to each second. this is needed because paiv doesn't process every single frame, likely skips those that are very similar
375 |
376 | // var frameNumbers = [...new Set(reducedFrames.map(e => Number(e.frame_number)))].reverse()
377 |
378 | var reducedFrames = d.filter( e => e.frame_number % framesPerSecond == 0 )
379 | var allFrames = d.map( e => e.frame_number)
380 |
381 | console.log("totalFrames " + String(totalFrames))
382 | console.log("d length: " + String(d.length))
383 | console.log("times length " + String(times.length))
384 | console.log("reducedFrames.length")
385 | console.log(reducedFrames.length)
386 |
387 | // get unique classes
388 | var labels = [...new Set(reducedFrames.map(e => e.label))]
389 | // get list of unique frame numbers
390 | console.log("frameNumbers")
391 | console.log(frameNumbers)
392 | // get total number of objects found
393 | var totalObjectCount = []
394 | m.map((num) => {
395 | totalObjectCount.push(reducedFrames.filter(f => f.frame_number == num).length)
396 | })
397 |
398 | // get number of classes by frame number
399 | objectCount = {} // TODO, function should be async instead of using global
400 | labels.map((label, lIdx) => {
401 | objectCount[label] = []
402 | m.map( (num, frameIdx) => {
403 | var numInstances = reducedFrames.filter(f => (f.frame_number == num && f.label == label )).length
404 | objectCount[label].push(numInstances)
405 | if ( (frameIdx == (m.length - 1 )) && (lIdx == (labels.length - 1)) ) {
406 | console.log("processing complete")
407 | console.log(objectCount)
408 | processing = false
409 | inferenceData[i['_id']] = objectCount
410 | // return objectCount
411 | }
412 | })
413 | })
414 | })
415 | // var totalTime = (Math.max(...times) * .001) // in seconds
416 | // var proFrames = det.processed_frames
417 | // var framesPerSecond = Math.round(proFrames / totalTime)
418 | } ).catch((err) => {
419 | console.log("error parsing json")
420 | console.log(err)
421 | } )
422 | }).catch( (err) => {
423 | console.log(err)
424 | })
425 |
426 |
427 | }
428 |
429 | // get detections from powerai with
430 | // powerai-vision-ny/api/inferences/7afb7810-bdfa-4968-aafc-06a8bd758f5b
431 |
432 | // Global dashboard view
433 | // List of inferences
434 | // List of all pic/vid thumbnails
435 |
436 |
437 |
438 | // Detailed View
439 | // Get and process detections for a given inference
440 | // Should give a response to render the following
441 | // Draw a graph (line, circle)
442 | //
443 |
444 | router.get('/inference/:infId', function(req, res) {
445 | console.log("requesting detections for id: " + req.params.infId)
446 | if ( req.params.infId in inferenceData.keys() ) {
447 | res.send(inferenceData[req.params.infId])
448 | } else {
449 | res.send(404)
450 | }
451 | // if (req.user && req.password) {
452 | /*
453 | var options = {
454 | method: "GET",
455 | headers: {
456 | "X-Auth-Token": token
457 | }
458 | }
459 | fetch( url + "/api/inferences/" + id, options ).then ( (r) => {
460 | // result = r
461 | // console.log(r)
462 | r.json().then( (e) => {
463 | console.log("inference")
464 | inferences = e
465 | console.log(e)
466 | res.send(inferences)
467 | } ).catch((err) => {
468 | console.log("error parsing json")
469 | console.log(err)
470 | } )
471 | }).catch( (err) => {
472 | console.log(err)
473 | } )*/
474 | });
475 |
476 |
477 | /*
478 | router.post('/api/chaincode', function(req, res) {
479 | console.log("chaincode request received")
480 | console.log(req.body)
481 | var chaincode = req.body.params.ctorMsg
482 | var chaincode_query = JSON.stringify({
483 | "Args": [chaincode.function].concat(chaincode.args)
484 | })
485 | if (typeof(client) !== 'undefined') {
486 | console.log("invoking chaincode with hfc client")
487 | console.log("req")
488 | console.log(req.body)
489 | console.log("req.body.method")
490 | console.log(req.body.method)
491 | if (req.body.method && req.body.method.includes('invoke')) {
492 | console.log("invoking request")
493 | var transaction_id = client.newTransactionID(true)
494 | var txRequest = {
495 | chaincodeId: sec_chaincode.name,
496 | chaincodeVersion: sec_chaincode.version,
497 | txId: transaction_id,
498 | fcn: req.body.params.ctorMsg.function,
499 | args: req.body.params.ctorMsg.args
500 | }
501 | console.log(txRequest)
502 | var txResult = proposeAndSubmitTransaction(txRequest)
503 | res.send(200)
504 | } else {
505 | console.log("querying chaincode with hfc client")
506 | var txRequest = {
507 | chaincodeId: sec_chaincode.name,
508 | chaincodeVersion: sec_chaincode.version,
509 | fcn: req.body.params.ctorMsg.function,
510 | args: req.body.params.ctorMsg.args
511 | }
512 | console.log("txRequest")
513 | console.log(txRequest)
514 | channel.queryByChaincode(txRequest).then((cc_response) => {
515 | console.log("cc query response received")
516 | console.log(cc_response[0].toString())
517 | res.json(cc_response[0].toString())
518 | }).catch((err) => {
519 | console.log("cc query failed")
520 | console.log(err)
521 | res.json(err)
522 | })
523 | }
524 | }
525 |
526 | });
527 |
528 | function submitTransaction(txRequest) {
529 | console.log(util.format('Successfully sent Proposal and received ProposalResponse: Status - %s, message - "%s"', proposalResponses[0].response.status, proposalResponses[0].response.message));
530 | var promises = []
531 | var sendPromise = channel.sendTransaction({
532 | proposalResponses: proposalResponses,
533 | proposal: proposal
534 | })
535 | sendPromise.then((result) => {
536 | console.log("transaction result")
537 | console.log(result)
538 | res.send(result)
539 | })
540 | }
541 |
542 | function proposeAndSubmitTransaction(txRequest) {
543 | console.log("sending transaction proposal")
544 | channel.sendTransactionProposal(txRequest).then((proposalRes) => {
545 | console.log("response received")
546 | var proposalResponses = proposalRes[0];
547 | var proposal = proposalRes[1];
548 | let isProposalGood = false;
549 | console.log("proposalResponses[0].response")
550 | console.log(proposalResponses[0].response)
551 | if (proposalResponses && proposalResponses[0].response && proposalResponses[0].response.status === 200) {
552 | console.log('Transaction proposal was accepted');
553 | channel.sendTransaction({
554 | proposalResponses: proposalResponses,
555 | proposal: proposal
556 | }).then((res) => {
557 | console.log("Transaction result was accepted")
558 | return true
559 | })
560 | } else {
561 | console.log('Transaction proposal was rejected');
562 | return false
563 | }
564 | }).catch((err) => {
565 | return false
566 | console.log(err)
567 | });
568 | }
569 |
570 | function uploadAdminCert(req, mspId) {
571 | var uploadAdminCertReq = {
572 | "msp_id": mspId,
573 | "adminCertName": "admin_cert" + Math.floor(Math.random() * 1000),
574 | "adminCertificate": user._signingIdentity._certificate,
575 | "peer_names": Object.keys(client._network_config._network_config.peers),
576 | "SKIP_CACHE": true
577 | }
578 | if (! req.body.api_endpoint.includes('/api/v1')) {
579 | var api_endpoint = req.body.api_endpoint + '/api/v1'
580 | } else {
581 | var api_endpoint = req.body.api_endpoint
582 | }
583 | var options = {
584 | url: api_endpoint + '/networks/' + req.body.network_id + '/certificates',
585 | method: 'POST',
586 | headers: {
587 | 'Accept': 'application/json',
588 | 'Content-Type': 'application/json',
589 | 'Accept-Charset': 'utf-8',
590 | "Authorization": "Basic " + new Buffer(req.body.key + ":" + req.body.secret, "utf8").toString("base64")
591 | },
592 | body: uploadAdminCertReq
593 | }
594 | console.log("uploading admin cert")
595 | request(options, function(err, res, body) {
596 | console.log("res")
597 | console.log(res)
598 | if (err) {
599 | console.log(err)
600 | }
601 | })
602 | }
603 | */
604 |
--------------------------------------------------------------------------------
/backend/routes/users.js:
--------------------------------------------------------------------------------
1 | var express = require('express');
2 | var router = express.Router();
3 |
4 | /* GET users listing. */
5 | router.get('/', function(req, res, next) {
6 | res.send('respond with a resource');
7 | });
8 |
9 | module.exports = router;
10 |
11 |
--------------------------------------------------------------------------------
/frontend/README.md:
--------------------------------------------------------------------------------
1 | # hello-world
2 |
3 | ## Project setup
4 | ```
5 | npm install
6 | ```
7 |
8 | ### Compiles and hot-reloads for development
9 | ```
10 | npm run serve
11 | ```
12 |
13 | ### Compiles and minifies for production
14 | ```
15 | npm run build
16 | ```
17 |
18 | ### Run your tests
19 | ```
20 | npm run test
21 | ```
22 |
23 | ### Lints and fixes files
24 | ```
25 | npm run lint
26 | ```
27 |
28 | ### Customize configuration
29 | See [Configuration Reference](https://cli.vuejs.org/config/).
30 |
--------------------------------------------------------------------------------
/frontend/app.js:
--------------------------------------------------------------------------------
1 | var express = require('express');
2 | var path = require('path');
3 | var serveStatic = require('serve-static');
4 | app = express();
5 | // app.use(serveStatic(__dirname + "/dist"));
6 | app.use(serveStatic(__dirname + "/index.html"));
7 | var port = process.env.PORT || 30000;
8 | var hostname = '127.0.0.1';
9 |
10 | app.listen(port, hostname, () => {
11 | console.log(`Server running at http://${hostname}:${port}/`);
12 | });
13 |
--------------------------------------------------------------------------------
/frontend/babel.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | presets: [
3 | '@vue/app'
4 | ]
5 | }
6 |
--------------------------------------------------------------------------------
/frontend/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | My first Vue app
5 |
6 |
7 |
8 |