├── .github └── workflows │ └── npm-publish.yml ├── .gitignore ├── .vscode └── launch.json ├── Dockerfile.alpine ├── Dockerfile.debian ├── Images ├── Example Flow.PNG ├── bbt1.jpg ├── face-api-input-node-menu.JPG └── face-api-recognise-node-menu.JPG ├── README.md ├── _config.yml ├── app ├── endpoints │ ├── add_descriptors.js │ ├── check_descriptors.js │ └── delete_descriptors.js ├── helpers │ ├── buffer2canvas.js │ ├── create_descriptor_location.js │ ├── detect_faces.js │ ├── detect_on_fork.js │ ├── draw_faces.js │ ├── load_models.js │ ├── load_tfjs.js │ ├── parse_detection_options.js │ ├── parse_model_options.js │ ├── save_image.js │ └── type_checks.js ├── nodes │ ├── inference_node.js │ ├── input_node.js │ └── recognise_node.js └── weights │ ├── age_gender_model-shard1 │ ├── age_gender_model-weights_manifest.json │ ├── face_expression_model-shard1 │ ├── face_expression_model-weights_manifest.json │ ├── face_landmark_68_model-shard1 │ ├── face_landmark_68_model-weights_manifest.json │ ├── face_landmark_68_tiny_model-shard1 │ ├── face_landmark_68_tiny_model-weights_manifest.json │ ├── face_recognition_model-shard1 │ ├── face_recognition_model-shard2 │ ├── face_recognition_model-weights_manifest.json │ ├── mtcnn_model-shard1 │ ├── mtcnn_model-weights_manifest.json │ ├── ssd_mobilenetv1_model-shard1 │ ├── ssd_mobilenetv1_model-shard2 │ ├── ssd_mobilenetv1_model-weights_manifest.json │ ├── tiny_face_detector_model-shard1 │ └── tiny_face_detector_model-weights_manifest.json ├── face-api.html ├── face-api.js ├── package-lock.json ├── package.json └── tests └── test.js /.github/workflows/npm-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will run tests using node and then publish a package to GitHub Packages when a release is created 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/publishing-nodejs-packages 3 | 4 | name: Node.js Package 5 | 6 | on: 7 | release: 8 | types: [created] 9 | 10 | jobs: 11 | build: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v2 15 | - uses: actions/setup-node@v2 16 | with: 17 | node-version: 12 18 | - run: npm ci 19 | - run: npm test 20 | 21 | publish-npm: 22 | needs: build 23 | runs-on: ubuntu-latest 24 | steps: 25 | - uses: actions/checkout@v2 26 | - uses: actions/setup-node@v2 27 | with: 28 | node-version: 12 29 | registry-url: https://registry.npmjs.org/ 30 | - run: npm ci 31 | - run: npm publish 32 | env: 33 | NODE_AUTH_TOKEN: ${{secrets.npm_token}} 34 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | descriptors -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "console": "integratedTerminal", 9 | "internalConsoleOptions": "neverOpen", 10 | "name": "nodemon", 11 | "program": "${workspaceFolder}\\node_modules\\node-red\\red.js", 12 | "request": "launch", 13 | "restart": true, 14 | "runtimeExecutable": "nodemon", 15 | "skipFiles": [ 16 | "/**", 17 | "${workspaceFolder}/node_modules/**/*.js", 18 | ], 19 | "type": "pwa-node", 20 | "args": ["--ignore", "./app/descriptors/*.json"] 21 | }, 22 | { 23 | "console": "integratedTerminal", 24 | "internalConsoleOptions": "neverOpen", 25 | "name": "profile", 26 | "program": "${workspaceFolder}\\node_modules\\node-red\\red.js", 27 | "request": "launch", 28 | "restart": true, 29 | "runtimeExecutable": "node", 30 | "skipFiles": [ 31 | "/**", 32 | "${workspaceFolder}/node_modules/**/*.js", 33 | ], 34 | "type": "pwa-node", 35 | "runtimeArgs": ["--prof"] 36 | 37 | } 38 | ] 39 | } -------------------------------------------------------------------------------- /Dockerfile.alpine: -------------------------------------------------------------------------------- 1 | # This is an example docker file on how to install the package successfully 2 | # You can either build a custom image using this file or inject the 3 | # dependancies using the following one line command on a running instance: 4 | 5 | # Find the container name using docker ps and insert where is 6 | # docker exec -it --user=root apk add python g++ build-base cairo-dev jpeg-dev pango-dev musl-dev giflib-dev pixman-dev pangomm-dev libjpeg-turbo-dev freetype-dev 7 | 8 | # Pull the latest 9 | FROM nodered/node-red 10 | 11 | # Change the user to root to install packages 12 | USER root 13 | 14 | # Install required alpine packages 15 | RUN apk add python \ 16 | g++ \ 17 | build-base \ 18 | cairo-dev \ 19 | jpeg-dev \ 20 | pango-dev \ 21 | musl-dev \ 22 | giflib-dev \ 23 | pixman-dev \ 24 | pangomm-dev \ 25 | libjpeg-turbo-dev \ 26 | freetype-dev 27 | 28 | # Finally install the face recognition package 29 | RUN npm i node-red-contrib-face-recognition 30 | 31 | # Change the user back to node-red 32 | USER node-red 33 | -------------------------------------------------------------------------------- /Dockerfile.debian: -------------------------------------------------------------------------------- 1 | # This is an example docker file on how to install the package successfully 2 | # You can either build a custom image using this file or inject the 3 | # dependancies using the following one line command on a running instance: 4 | 5 | # To start you need to buold the official debian docker image found on teh official repo: 6 | # https://github.com/node-red/node-red-docker/blob/master/docker-custom/Dockerfile.debian 7 | 8 | 9 | # Pull the latest 10 | FROM testing:node-red-build 11 | 12 | # Change the user to root to install packages 13 | USER root 14 | 15 | # Install required alpine packages 16 | RUN apt-get install -y python \ 17 | g++ \ 18 | build-essential \ 19 | libcairo2-dev \ 20 | libjpeg-dev 21 | 22 | # Optionally if your architecture supports it 23 | # RUN npm install @tensorflow/tfjs-core@1.2.11 \ 24 | # @tensorflow/tfjs-node@1.2.11 \ 25 | 26 | # Finally install the face recognition package 27 | RUN npm install node-red-contrib-face-recognition 28 | 29 | # Change the user back to node-red 30 | USER node-red -------------------------------------------------------------------------------- /Images/Example Flow.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thebigpotatoe/node-red-contrib-face-recognition/76b9629c2091949257fbeaa8094efb427d214b92/Images/Example Flow.PNG -------------------------------------------------------------------------------- /Images/bbt1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thebigpotatoe/node-red-contrib-face-recognition/76b9629c2091949257fbeaa8094efb427d214b92/Images/bbt1.jpg -------------------------------------------------------------------------------- /Images/face-api-input-node-menu.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thebigpotatoe/node-red-contrib-face-recognition/76b9629c2091949257fbeaa8094efb427d214b92/Images/face-api-input-node-menu.JPG -------------------------------------------------------------------------------- /Images/face-api-recognise-node-menu.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thebigpotatoe/node-red-contrib-face-recognition/76b9629c2091949257fbeaa8094efb427d214b92/Images/face-api-recognise-node-menu.JPG -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # node-red-contrib-face-recognition 2 | 3 | [![GitHub release](https://img.shields.io/npm/v/node-red-contrib-face-recognition)](https://www.npmjs.com/package/node-red-contrib-face-recognition) 4 | [![GitHub release](https://img.shields.io/npm/dw/node-red-contrib-face-recognition)](https://www.npmjs.com/package/node-red-contrib-face-recognition) 5 | [![GitHub release](https://img.shields.io/npm/l/node-red-contrib-face-recognition)](https://www.npmjs.com/package/node-red-contrib-face-recognition) 6 | 7 | ## Version 2 Out Now! 8 | 9 | Version 2.0.0 is now officially released brining performance increases, better useability, and more support across systems. Testing is still ongoing, so if you come across any bugs please open an issue or a discussion here. 10 | 11 | ## Overview 12 | 13 | This node aims to wrap the epic [Face-API.js library](https://github.com/justadudewhohacks/face-api.js) from [justadudewhohacks](https://github.com/justadudewhohacks) into a simple to import and use node in Node-Red. If you like anything in this repo be sure to also check out the original. 14 | 15 | ![bbt1](Images/bbt1.jpg) 16 | 17 | Usage of this node is designed to be very easy while allowing the user to choose any number of options exposed by the original face-api.js module. These are currently; 18 | 19 | - __Face detection__ 20 | - __Facial Landmarks__ 21 | - __Facial Expressions__ 22 | - __Age and Gender Predictions__ 23 | - __Facial Recognition__ 24 | 25 | This module also utilizes the `child_process` module of Node.js to offload the complex calculations required to a separate thread. In turn, the offloaded task will not block the main event loop and allow Node-Red to continue other tasks. Each input node spawns a new fork, which will consume memory, so this may want to be limited on resource restricted environments. 26 | 27 | ## Installation 28 | 29 | From your `.node-red` directory, you can run; 30 | 31 | ``` bash 32 | npm install node-red-contrib-face-recognition 33 | ``` 34 | 35 | or you can go to the pallette manager in Node-Red and find `node-red-contrib-face-recognition` in the install tab. 36 | 37 | ### Canvas 38 | 39 | Canvas will be installed correctly providing when using either installation method, however the required packages need to be installed on the system. These are as follows for several common OS's; 40 | 41 | #### Windows 42 | 43 | No requirements 44 | 45 | #### Mac 46 | 47 | `¯\_(ツ)_/¯` 48 | 49 | #### Linux (Debian Based) 50 | 51 | ``` bash 52 | apt-get install -y python \ 53 | g++ \ 54 | build-essential \ 55 | libcairo2-dev \ 56 | libjpeg-dev 57 | ``` 58 | 59 | #### Official Docker Image 60 | 61 | ``` bash 62 | apk add python \ 63 | g++ \ 64 | build-base \ 65 | cairo-dev \ 66 | jpeg-dev \ 67 | pango-dev \ 68 | musl-dev \ 69 | giflib-dev \ 70 | pixman-dev \ 71 | pangomm-dev \ 72 | libjpeg-turbo-dev \ 73 | freetype-dev 74 | ``` 75 | 76 | ### TensorFlow for Node.js (Optional) 77 | 78 | You can also optionally install TensorFlow for Node.js to make this package run faster. If you do not, the node will still run albeit much slower. To install TensorFlow navigate to your `.node-red` folder and run the following command. This will install TensorFlow in your Node-Red directory for use by the node. 79 | 80 | `npm install @tensorflow/tfjs-node` 81 | 82 | > There are known issues with the working combinations version of Node.js, @tensorflow/tfjs-node and face-api.js. At the time of writing this, these were found to be; 83 | > - Node.js: 10.16.3 84 | > - @tensorflow/tfjs-node: 1.2.11 85 | > - face-api.js: 0.21.0 86 | 87 | > Please install these to gain the speed of the tf c++ backend and keep up to date on the face-api.js GitHib page for any errors relating to this. 88 | 89 | tfjs-node is unfortunatley not supported on all OS's and all architectures. Below is a table of where they are supported; 90 | 91 | |OS | x86 | armv7 | arm64v8 | 92 | |------------------------|-----|-------|---------| 93 | |Windows | yes | - | - | 94 | |Mac | ? | - | ? | 95 | |Linux (Debian based) | yes | no | yes | 96 | |Official Docker Image | no | no | no | 97 | |[Unofficial Docker Image](https://github.com/thebigpotatoe/node-red-docker) | yes! | soon | yes! | 98 | 99 | ## Example Flow 100 | 101 | As an example on how to use the node, below is a flow that grabs an image from the internet and runs inference over it. Copy and paste it into Node-Red to use, but make sure to install the following nodes from the pallet manager; 102 | 103 | - node-red-contrib-image-output 104 | 105 | > Note: In order to recognise faces you will need to add the recognise config nodes yourself as these cannot be exported across instances. 106 | 107 | ![Example](Images/Example%20Flow.PNG) 108 | 109 | ``` JSON 110 | [{"id":"c08c9d7b.c2377","type":"image","z":"4eb4b426.c9cfcc","name":"","width":"320","data":"payload","dataType":"msg","thumbnail":false,"pass":false,"outputs":0,"x":120,"y":100,"wires":[]},{"id":"461f82e0.80fc8c","type":"image","z":"4eb4b426.c9cfcc","name":"","width":"640","data":"payload.labelled_img","dataType":"msg","thumbnail":false,"pass":false,"outputs":0,"x":440,"y":100,"wires":[]},{"id":"453418e3.520f28","type":"face-api-input","z":"4eb4b426.c9cfcc","name":"TBBT Recognition","model":"SSD","confidence":50,"input_size":"128","landmarks":false,"expressions":true,"age_gender":true,"descriptors":true,"match_metric":"Mean Squared Error","match_confidence":"2500","recognise_nodes":["a88d60e.9ca13a","5d6c06f7.11d2c8","71bfb897.3b8ef8","e09c0d5.ca5acf","dc3c3afc.04e708","eb7ecb3c.a1cbb8","b4b62a6d.fc5c18"],"recognise_node_editor":"b4b62a6d.fc5c18","x":450,"y":40,"wires":[["461f82e0.80fc8c","4d4a98a1.c04008"]]},{"id":"d7b82011.6cfd1","type":"http request","z":"4eb4b426.c9cfcc","name":"TBBT","method":"GET","ret":"bin","paytoqs":"ignore","url":"https://www.etonline.com/sites/default/files/images/2019-05/bigbangtheory.jpg","tls":"","persist":false,"proxy":"","authType":"","x":230,"y":40,"wires":[["453418e3.520f28","c08c9d7b.c2377"]]},{"id":"492a9534.4d694c","type":"inject","z":"4eb4b426.c9cfcc","name":"","props":[{"p":"payload","v":"","vt":"str"},{"p":"topic","v":"","vt":"string"}],"repeat":"","crontab":"","once":false,"onceDelay":0.1,"topic":"","payload":"","payloadType":"str","x":90,"y":40,"wires":[["d7b82011.6cfd1"]]},{"id":"4d4a98a1.c04008","type":"debug","z":"4eb4b426.c9cfcc","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"false","statusVal":"","statusType":"auto","x":650,"y":40,"wires":[]},{"id":"b4b62a6d.fc5c18","type":"face-api-recognise","name":"Leonard"}] 111 | ``` 112 | 113 | ## Included Nodes 114 | 115 | This module comes with two nodes; the `face-api-input` node and `face-api-recognise` node. 116 | 117 | ### Input node 118 | 119 | ![Input Node](Images/face-api-input-node-menu.JPG) 120 | 121 | #### Description 122 | 123 | The `face-api-input` node is the main node that runs inference over an input image and optionally utilised recognition nodes to recognise faces. Each input node spawns a `fork` which is a seperate nodejs instance to run the recognition on to avoid blocking the main node-red event loop. This takes resources, be be conservative on resource constrained environments. 124 | 125 | #### General Settings 126 | 127 | - __Name__: The name of this specific node. Useful to change when you have multiple config nodes and need to know the difference between them. (defaults to "face-api-input") 128 | 129 | #### Detection Settings 130 | 131 | - __Detection Type__: The detection type to use. This is either `SSD` or `Yolo`, as `MTCNN` is not currently supported. (Defaults to SSD) 132 | 133 | - __Input Size__: The input size of the Yolo algorithm. This option is only available when using Yolo as the type. (Defaults to 416) 134 | 135 | - __Detection Confidence__: The minimum confidence score that each detected face much be above to be counted as a face. This option is available for both SSD and Yolo. (Defaults to 50%, Limited between 0 and 100) 136 | 137 | - __Facial Landmarks__: Select this if you would like to add facial landmarks of each face to the output. (Defaults to false) 138 | 139 | - __Facial Expressions__: Select this if you would like to add a prediction of facial expressions of each face to the output. (Defaults to false) 140 | 141 | - __Age and Gender__: Select this if you would like to add a prediction of the age and gender of each face to the output. (Defaults to false) 142 | 143 | - __Descriptors__: Select this if you would like to output the computed descriptors for each found face. (Defaults to false) 144 | 145 | #### Recognition Settings 146 | 147 | - __Recognition Metric__: Select the type of recognition metric to use when comparing faces with the recognition option. This is option is only shown when the recognise option is selected. (Defaults to Mean Squared Error) 148 | 149 | - __Matched Confidence__: This is the minimum cutoff value for recognition for each of the metrics. Keep in mind that the different metrics will produce different ranges of values for recognition. This is option is only shown when the recognise option is selected. (Typical cutoffs are around 2000) 150 | 151 | - __Recognise Nodes List__: A list of recognition nodes to run recognition against for each face found in an input image. Simply add and remove as many as required, there are no limits 152 | 153 | - __Add/Edit Recognise Nodes__: Use this dropdown menu to add and edit new recognise nodes which you can then add to the input node list. Keep in mimd you will have to deploy the node before being able to find it in the list. 154 | 155 | #### Compute Node 156 | 157 | ![Input Node](Images/face-api-recognise-node-menu.JPG) 158 | 159 | - __Name__: The name of this specific face to recognise. (defaults to "Unknown") 160 | 161 | - __Add Images__: Use this button to add images to create a descriptors from. These descriptors will then be used in the input node to predict against an image. When adding images, the descriptors will take a while to compute. 162 | 163 | - __Remove Descriptors__: Use this button to remove the currently stored descriptors. This is irreversible. 164 | 165 | ## Adding a face descriptor 166 | 167 | In order to use the facial recognition, facial descriptors must be calculated first to allow a comparison between them and any input image. To do this, create a recognition node through the input node menu, then click `Add Images`. Once selected all images will be computed immediately in the background. 168 | 169 | These descriptors are then saved to disk allowing it to survive restarts of Node-Red. The saved file will then be loaded on startup of Node-Red. Saving the descriptor is also safer than saving an image if your Node-Red instance is online as no data about the original image is stored. 170 | 171 | ## The Recognition Metric 172 | 173 | The original Face-api.js library only supports the Euclidean distance method of comparison between descriptors when matching faces. To extend this, this node also supports 3 more type of metrics. These are; __Manhattan__, __Chebyshev__, and __Mean Squared Error__. 174 | 175 | The outputs of these metrics from the node are all mapped roughly to the same output values of around 0 to 10000. This was to make it easier to compare the usefulness of each and allow the user to set a cutoff value within a similar range for each metric. 176 | 177 | From testing, __Mean Squared Error__ gives the highest contrasting results from known to unknown faces. If you have another metric you feel may be useful, feel free to submit a pull request or create a discussion as I can now implement it along with the others. 178 | 179 | ## Contributing 180 | 181 | If you like this node and want to contribute feel free to fork this repo and make a pull request. New features and suggestions are welcome, and there are several features I would like to implement, but lack the Javascript knowledge in Node.js. These are; 182 | 183 | - [ ] Help on implementing tfjs-node on all platforms. 184 | - [ ] General code improvements and clean ups 185 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-cayman -------------------------------------------------------------------------------- /app/endpoints/add_descriptors.js: -------------------------------------------------------------------------------- 1 | // Load required modules 2 | const { readFile, writeFile, stat } = require("fs").promises; 3 | const { join } = require("path"); 4 | const faceapi = require('face-api.js'); 5 | const formidable = require('formidable'); 6 | const detect_on_fork = require("../helpers/detect_on_fork"); 7 | 8 | // Create supporting functions 9 | function parse_input_form(req) { 10 | return new Promise(function (resolve, reject) { 11 | // Use formidable to parse the files in the request 12 | var form = new formidable.IncomingForm(); 13 | form.parse(req, function (err, fields, files) { 14 | (form.openedFiles.length > 0) ? resolve(files) : reject(err || "No files sent with request"); 15 | }); 16 | }); 17 | } 18 | function get_file_data(files) { 19 | return Promise.all(Object.keys(files).map((number) => { 20 | // Get each of the files data from the POST and put into an array 21 | return new Promise((resolve, reject) => { 22 | stat(files[number].path) 23 | .then(readFile.bind(this, files[number].path)) 24 | .then(resolve) 25 | .catch(reject); 26 | }); 27 | })); 28 | } 29 | function get_descriptors(name, data) { 30 | return new Promise(async function (resolve, reject) { 31 | // Create a new array to store each of the calculated descriptors from the files 32 | let descriptors = []; 33 | 34 | // Inference each file to get the descriptor 35 | for (i = 0; i < data.length; i++) { 36 | // Get the inference data from a child fork 37 | let results = await detect_on_fork({ 38 | "image": data[i], 39 | "model_options": { 40 | "model": "ssd", 41 | "minConfidence": 0.6, 42 | "maxResults": 1 43 | }, 44 | "detect_options": { 45 | "landmarks": false, 46 | "age_gender": false, 47 | "expressions": false, 48 | "descriptors": true 49 | } 50 | }); 51 | 52 | // Check if there was a detected face 53 | if (results.detected_faces.length) { 54 | // Convert the descriptor data back to float32 after child inference 55 | let descriptor_array = new Float32Array(128); 56 | for (const [key, value] of Object.entries(results.detected_faces[0].descriptor)) { 57 | descriptor_array[key] = value; 58 | } 59 | 60 | // Add descriptor to complete descriptor array 61 | descriptors.push(descriptor_array); 62 | } 63 | } 64 | 65 | // Resolve with a labelled face descriptor 66 | resolve(new faceapi.LabeledFaceDescriptors(name, descriptors)); 67 | }); 68 | } 69 | function update_node_descriptors(RED, id) { 70 | return new Promise((resolve, reject) => { 71 | try { 72 | let node = RED.nodes.getNode(id) 73 | if (node) node.load_descriptor(); 74 | resolve(); 75 | } 76 | catch (err) { 77 | reject(err); 78 | } 79 | }); 80 | } 81 | 82 | // Export the main function 83 | module.exports = function (RED, req, res) { 84 | return new Promise((resolve, reject) => { 85 | // Get all of the file data, inference each on a fork, and store the descriptors in a labelled file 86 | parse_input_form(req) 87 | .then(get_file_data, resolve.bind(null, 202)) 88 | .then(get_descriptors.bind(null, req.params.id)) 89 | .then(JSON.stringify) 90 | .then(writeFile.bind(null, join(global.descriptor_location, req.params.id + '.json'))) 91 | .then(update_node_descriptors.bind(null, RED, req.params.id)) 92 | .then(resolve.bind(null, 201)) 93 | .catch(reject); 94 | }); 95 | } -------------------------------------------------------------------------------- /app/endpoints/check_descriptors.js: -------------------------------------------------------------------------------- 1 | // Load required modules 2 | const { readFile } = require("fs").promises 3 | const { join } = require("path") 4 | 5 | const get_descriptor_location = function (RED, id) { 6 | return new Promise((resolve, reject) => { 7 | let node = RED.nodes.getNode(id); 8 | (node) ? resolve(join(global.descriptor_location, id + '.json')) : reject(); 9 | }); 10 | } 11 | 12 | // Export the main endpoint 13 | module.exports = async function (RED, id) { 14 | return new Promise(async function (resolve, reject) { 15 | get_descriptor_location(RED, id) 16 | .then(readFile) 17 | .then(JSON.parse) 18 | .then((array) => {return array.descriptors.length || 0}) 19 | .then(resolve) 20 | .catch(reject); 21 | }); 22 | } -------------------------------------------------------------------------------- /app/endpoints/delete_descriptors.js: -------------------------------------------------------------------------------- 1 | // Export main function 2 | module.exports = async function (RED, id) { 3 | return new Promise(async function (resolve, reject) { 4 | try { 5 | let node = RED.nodes.getNode(id).delete_descriptor().then(() => { 6 | resolve(201); 7 | }).catch(() => { 8 | reject(400); 9 | }); 10 | } 11 | catch (err) { 12 | reject(404); 13 | } 14 | }); 15 | } -------------------------------------------------------------------------------- /app/helpers/buffer2canvas.js: -------------------------------------------------------------------------------- 1 | // Import Canvas and face-api.js 2 | const fs = require('fs'); 3 | const canvas = require('canvas'); 4 | const faceapi = require('face-api.js'); 5 | const { Canvas, Image, ImageData } = canvas 6 | faceapi.env.monkeyPatch({ Canvas, Image, ImageData }) 7 | 8 | // Export main function 9 | module.exports = async function (buffer) { 10 | return new Promise(async function (resolve, reject) { 11 | try { 12 | let img = new Image; 13 | img.onload = function () { 14 | const canvas_img = faceapi.createCanvasFromMedia(img); 15 | resolve(canvas_img); 16 | }; 17 | img.onerror = function (err) { 18 | reject(err) 19 | } 20 | img.src = buffer; 21 | } 22 | catch (err) { 23 | reject(err); 24 | } 25 | }); 26 | } -------------------------------------------------------------------------------- /app/helpers/create_descriptor_location.js: -------------------------------------------------------------------------------- 1 | // Import Canvas and face-api.js 2 | const { stat, mkdir } = require('fs').promises 3 | 4 | // Export main function 5 | module.exports = function (location) { 6 | return new Promise(async function (resolve, reject) { 7 | stat(location) 8 | .then(resolve, mkdir.bind(null, location)) 9 | .catch(reject); 10 | }); 11 | } -------------------------------------------------------------------------------- /app/helpers/detect_faces.js: -------------------------------------------------------------------------------- 1 | // Import Canvas and face-api.js 2 | const canvas = require('canvas'); 3 | const faceapi = require('face-api.js'); 4 | const { Canvas, Image, ImageData } = canvas; 5 | faceapi.env.monkeyPatch({ Canvas, Image, ImageData }); 6 | 7 | // Import Helpers 8 | const { isObject, isType } = require('./type_checks'); 9 | 10 | // Export main function 11 | module.exports = async function (model_options, detect_options, img) { 12 | return new Promise(async function (resolve, reject) { 13 | try { 14 | // Ensure that the img and options are valid 15 | if (isType(img, Canvas)) { 16 | let detections = []; 17 | 18 | // Check the model_options are valid or use a default 19 | if (!(isType(model_options, faceapi.SsdMobilenetv1Options) || !isType(model_options, faceapi.TinyFaceDetectorOptions))) { 20 | model_options = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.6 }); 21 | } 22 | 23 | // Check detect_options are valid else set to default 24 | if (!isObject(detect_options)) { 25 | detect_options = { 26 | 'landmarks': false, 27 | 'expressions': false, 28 | 'age_gender': false, 29 | 'descriptors': true 30 | } 31 | } 32 | 33 | // If descriptors are requested, landmarks are required 34 | detect_options.landmarks = (detect_options.descriptors) ? true : detect_options.landmarks; 35 | 36 | // Face detection only 37 | if (!detect_options.landmarks && !detect_options.expressions && !detect_options.age_gender && !detect_options.descriptors) { 38 | detections = await faceapi.detectAllFaces(img, model_options); 39 | } 40 | 41 | // Face detection with either landmarks, expressions, AAG, or descriptors 42 | else if (detect_options.landmarks && !detect_options.expressions && !detect_options.age_gender && !detect_options.descriptors) { 43 | detections = await faceapi.detectAllFaces(img, model_options).withFaceLandmarks(); 44 | } 45 | else if (!detect_options.landmarks && detect_options.expressions && !detect_options.age_gender && !detect_options.descriptors) { 46 | detections = await faceapi.detectAllFaces(img, model_options).withFaceExpressions(); 47 | } 48 | else if (!detect_options.landmarks && !detect_options.expressions && detect_options.age_gender && !detect_options.descriptors) { 49 | detections = await faceapi.detectAllFaces(img, model_options).withAgeAndGender(); 50 | } 51 | else if (!detect_options.landmarks && !detect_options.expressions && !detect_options.age_gender && detect_options.descriptors) { 52 | // invalid without landmarks 53 | detections = await faceapi.detectAllFaces(img, model_options).withFaceLandmarks().withFaceDescriptors(); 54 | } 55 | 56 | // Face detection with landmarks and either expressions, AAG, or descriptors 57 | else if (detect_options.landmarks && detect_options.expressions && !detect_options.age_gender && !detect_options.descriptors) { 58 | detections = await faceapi.detectAllFaces(img, model_options).withFaceLandmarks().withFaceExpressions(); 59 | } 60 | else if (detect_options.landmarks && !detect_options.expressions && detect_options.age_gender && !detect_options.descriptors) { 61 | detections = await faceapi.detectAllFaces(img, model_options).withFaceLandmarks().withAgeAndGender(); 62 | } 63 | else if (detect_options.landmarks && !detect_options.expressions && !detect_options.age_gender && detect_options.descriptors) { 64 | detections = await faceapi.detectAllFaces(img, model_options).withFaceLandmarks().withFaceDescriptors(); 65 | } 66 | 67 | // Face detection with landmarks and expressions with either AAG, or descriptors 68 | else if (detect_options.landmarks && detect_options.expressions && detect_options.age_gender && !detect_options.descriptors) { 69 | detections = await faceapi.detectAllFaces(img, model_options).withFaceLandmarks().withFaceExpressions().withAgeAndGender(); 70 | } 71 | else if (detect_options.landmarks && detect_options.expressions && !detect_options.age_gender && detect_options.descriptors) { 72 | detections = await faceapi.detectAllFaces(img, model_options).withFaceLandmarks().withFaceExpressions().withFaceDescriptors(); 73 | } 74 | 75 | // Face detection with landmarks, AAG, and descriptors, but not expressions 76 | else if (detect_options.landmarks && !detect_options.expressions && detect_options.age_gender && detect_options.descriptors) { 77 | detections = await faceapi.detectAllFaces(img, model_options).withFaceLandmarks().withAgeAndGender().withFaceDescriptors(); 78 | } 79 | 80 | // Face detection wihout landmarks or descriptors but with expressions and age and gender 81 | else if (!detect_options.landmarks && detect_options.expressions && detect_options.age_gender && !detect_options.descriptors) { 82 | detections = await faceapi.detectAllFaces(img, model_options).withFaceExpressions().withAgeAndGender(); 83 | } 84 | 85 | // All possible options 86 | else if (detect_options.landmarks && detect_options.expressions && detect_options.age_gender && detect_options.descriptors) { 87 | detections = await faceapi.detectAllFaces(img, model_options).withFaceLandmarks().withFaceExpressions().withAgeAndGender().withFaceDescriptors() 88 | } 89 | 90 | // Else not supported 91 | else { 92 | reject(detections); 93 | } 94 | 95 | // If the detection worked, resolve with detections 96 | resolve(detections); 97 | } 98 | else { 99 | throw 'Image object passed was not a Canvas object'; 100 | } 101 | } 102 | catch (err) { 103 | reject(err); 104 | } 105 | }); 106 | } -------------------------------------------------------------------------------- /app/helpers/detect_on_fork.js: -------------------------------------------------------------------------------- 1 | // Import required modules 2 | const { join } = require("path"); 3 | const { fork } = require('child_process'); 4 | const { isObject, isType, isArray, isNull, isUndefined } = require("./type_checks"); 5 | 6 | // Create a timer for timing out the child process 7 | let child_timer = null; 8 | 9 | module.exports = function (msg, external_fork) { 10 | return new Promise((resolve, reject) => { 11 | // Check the image 12 | if (!isType(msg.image, Buffer)) { 13 | reject("Image was not a buffer"); 14 | return; 15 | } 16 | 17 | // Check the model_options 18 | if (!isObject(msg.model_options)) { 19 | msg.model_options = { 20 | "model": "ssd", 21 | "minConfidence": 0.6, 22 | "maxResults": 100 23 | }; 24 | } 25 | 26 | // Check the detect_options 27 | if (!isObject(msg.detect_options)) { 28 | msg.detect_options = { 29 | "landmarks": true, 30 | "age_gender": true, 31 | "expressions": true, 32 | "descriptors": true 33 | }; 34 | } 35 | 36 | // Check if any descriptors have been added properly 37 | if (!isArray(msg.descriptors)) { 38 | msg.descriptors = []; 39 | } 40 | 41 | // Use external fork or create a new one 42 | let child_process = null; 43 | if (isUndefined(external_fork) || isNull(external_fork)) { 44 | // Create a child process using the script for the node 45 | const args = []; 46 | const options = { stdio: "pipe" }; 47 | child_process = fork(join(__dirname, '../nodes/inference_node.js'), args, options); 48 | 49 | // Create a callback to handle a error events 50 | child_process.on('error', (err, signal) => { 51 | reject("Child - " + err); 52 | }); 53 | 54 | // Create callback to handle a exit events 55 | child_process.on('exit', (code, signal) => { 56 | reject(`Child - child_process exited with code ${code} and signal ${signal}`); 57 | }); 58 | 59 | child_process.stderr.on('data', (data) => { 60 | console.debug(data.toString()); 61 | }) 62 | } 63 | else { 64 | child_process = external_fork; 65 | } 66 | 67 | // Set up the message resolver callback 68 | child_process.on('message', (msg) => { 69 | if (isUndefined(external_fork) || isNull(external_fork)) { 70 | child_process.kill('SIGINT'); 71 | child_process = null; 72 | } 73 | clearTimeout(child_timer); 74 | resolve(msg); 75 | }); 76 | 77 | // Send the image and configuration to the child process 78 | child_process.send(msg); 79 | 80 | // Set a timeout if the inference fails 81 | // child_timer = setTimeout(() => { reject("Timed out") }, 30000); 82 | }); 83 | }; -------------------------------------------------------------------------------- /app/helpers/draw_faces.js: -------------------------------------------------------------------------------- 1 | // Import Canvas and face-api.js 2 | const canvas = require('canvas'); 3 | const faceapi = require('face-api.js'); 4 | const { Canvas, Image, ImageData } = canvas 5 | faceapi.env.monkeyPatch({ Canvas, Image, ImageData }) 6 | 7 | // Import Helpers 8 | const { isObject } = require('./type_checks'); 9 | 10 | // Export main function 11 | module.exports = async function (detection_options, detections, img) { 12 | return new Promise(async function (resolve, reject) { 13 | try { 14 | // Check img is a canvas object or detections are valid or detection_options are valid 15 | if (!img instanceof Canvas || !isObject(detection_options)) { 16 | reject(); 17 | } 18 | else if (!detections.length) { 19 | resolve(img); 20 | } 21 | else { 22 | // Draw Rectangle around face as default 23 | faceapi.draw.drawDetections(img, detections); 24 | 25 | // Draw Landmarks if required 26 | if (detection_options.landmarks) faceapi.draw.drawFaceLandmarks(img, detections); 27 | 28 | // Loop each face for the other types of detection 29 | detections.forEach(face => { 30 | try { 31 | // Draw age and gender if required 32 | const { age, gender, genderProbability } = face 33 | const age_gender_label = (detection_options.age_gender && age && gender && genderProbability) ? [ 34 | `${gender} : ${Math.round(genderProbability * 100, 0)}%`, 35 | `${Math.round(age, 0)} years` 36 | ] : [] 37 | 38 | // Draw expersions if required 39 | const { expressions } = face; 40 | const expressionMaxKey = (detection_options.expressions && expressions) ? Object.keys(expressions).reduce(function (a, b) { 41 | return expressions[a] > expressions[b] ? a : b 42 | }) : null 43 | const expressions_label = (detection_options.expressions && expressions && expressionMaxKey) ? [ 44 | `${expressionMaxKey} : ${Math.round(expressions[expressionMaxKey] * 100, 0)}%` 45 | ] : [] 46 | 47 | // Draw recognised face names and confidence value if required 48 | const { name, distance } = face; 49 | const matched_face_label = (name && distance) ? [ 50 | `${name} (${Math.round(distance * 100, 2) / 100})` 51 | ] : []; 52 | 53 | // Concat the labels and draw them on the face 54 | if (age_gender_label.length || expressions_label.length || matched_face_label.length) 55 | new faceapi.draw.DrawTextField( 56 | [ 57 | ...age_gender_label, 58 | ...expressions_label, 59 | ...matched_face_label 60 | ], 61 | face.detection.box.bottomLeft 62 | ).draw(img); 63 | } 64 | catch (err) { 65 | reject(err); 66 | } 67 | }); 68 | 69 | // Return image 70 | resolve(img); 71 | } 72 | } 73 | catch (err) { 74 | reject(err); 75 | } 76 | }); 77 | } -------------------------------------------------------------------------------- /app/helpers/load_models.js: -------------------------------------------------------------------------------- 1 | // Import Canvas, tensorflow, and face-api.js 2 | const fs = require('fs'); 3 | const canvas = require('canvas'); 4 | const faceapi = require('face-api.js'); 5 | const { Canvas, Image, ImageData } = canvas 6 | faceapi.env.monkeyPatch({ Canvas, Image, ImageData }) 7 | 8 | // Export main function 9 | module.exports = async function (weights_path, cb) { 10 | return new Promise(async function (resolve, reject) { 11 | try { 12 | // Start to load the models 13 | const ssdMobilenetv1Method = faceapi.nets.ssdMobilenetv1.loadFromDisk(weights_path); 14 | const tinyFaceDetectorMethod = faceapi.nets.tinyFaceDetector.loadFromDisk(weights_path); 15 | const faceLandmark68NetMethod = faceapi.nets.faceLandmark68Net.loadFromDisk(weights_path); 16 | const faceLandmark68TinyNetMethod = faceapi.nets.faceLandmark68TinyNet.loadFromDisk(weights_path); 17 | const faceExpressionNetMethod = faceapi.nets.faceExpressionNet.loadFromDisk(weights_path); 18 | const ageGenderNetMethod = faceapi.nets.ageGenderNet.loadFromDisk(weights_path); 19 | const faceRecognitionNetMethod = faceapi.nets.faceRecognitionNet.loadFromDisk(weights_path); 20 | 21 | // Wait for models to load 22 | await ssdMobilenetv1Method; 23 | await tinyFaceDetectorMethod; 24 | await faceLandmark68NetMethod; 25 | await faceLandmark68TinyNetMethod; 26 | await faceExpressionNetMethod; 27 | await ageGenderNetMethod; 28 | await faceRecognitionNetMethod; 29 | 30 | // Return true when done 31 | resolve(true); 32 | } 33 | catch (err) { 34 | reject(err); 35 | } 36 | }); 37 | } 38 | -------------------------------------------------------------------------------- /app/helpers/load_tfjs.js: -------------------------------------------------------------------------------- 1 | // Export the main function 2 | module.exports = async function () { 3 | return new Promise((resolve, reject) => { 4 | // Try load in Tfjs-node if it is installed 5 | try { 6 | require('@tensorflow/tfjs-node'); 7 | resolve(); 8 | } 9 | catch (err) { 10 | if (err instanceof Error && err.code === "MODULE_NOT_FOUND") 11 | reject("[Face-api.js] - TensorFlow.js for Node.js was not found, running without it"); 12 | else 13 | reject(err); 14 | } 15 | }) 16 | } -------------------------------------------------------------------------------- /app/helpers/parse_detection_options.js: -------------------------------------------------------------------------------- 1 | // Import helpers 2 | const { isString } = require("./type_checks"); 3 | 4 | // Export main function 5 | module.exports = function (options) { 6 | // Parse input string as JSON if supplied 7 | if (isString(options)) { 8 | options = JSON.parse(options); 9 | } 10 | 11 | // Create a default options template 12 | let detect_options = {} 13 | 14 | // Check for landmarks 15 | if ('landmarks' in options) detect_options['landmarks'] = true; 16 | 17 | // Check for age and gender 18 | if ('age_gender' in options) detect_options['age_gender'] = true; 19 | 20 | // Check for expressions 21 | if ('expressions' in options) detect_options['expressions'] = true; 22 | 23 | // Check for descriptors 24 | if ('descriptors' in options) detect_options['descriptors'] = true; 25 | 26 | // Return options 27 | return detect_options; 28 | } -------------------------------------------------------------------------------- /app/helpers/parse_model_options.js: -------------------------------------------------------------------------------- 1 | // Load in face-api.js 2 | const faceapi = require('face-api.js'); 3 | 4 | // Import helpers 5 | const { isString } = require("./type_checks"); 6 | 7 | // Export main function 8 | module.exports = function (options) { 9 | // Parse input string as JSON if supplied 10 | if (isString(options)) { 11 | options = JSON.parse(options); 12 | } 13 | 14 | // Check if model is valid 15 | if ('model' in options && (typeof options.model === 'string' || options.model instanceof String)) { 16 | // Setup the model depending on which one is chosen 17 | let model_options = {} 18 | if (options.model.toLowerCase() === 'ssd') { 19 | let model_min_confidence = parseFloat(options.minConfidence); 20 | let model_max_results = parseInt(options.maxResults); 21 | if (typeof model_min_confidence === 'number' && isFinite(model_min_confidence)) model_options['minConfidence'] = model_min_confidence; 22 | if (typeof model_max_results === 'number' && isFinite(model_max_results)) model_options['maxResults'] = model_max_results; 23 | return new faceapi.SsdMobilenetv1Options(model_options); 24 | } 25 | else if (options.model.toLowerCase() === 'tiny') { 26 | let model_score_threshold = parseFloat(options.scoreThreshold); 27 | let model_input_size = parseInt(options.inputSize); 28 | if (typeof model_score_threshold === 'number' && isFinite(model_score_threshold)) model_options['scoreThreshold'] = model_score_threshold; 29 | if (typeof model_input_size === 'number' && isFinite(model_input_size)) model_options['inputSize'] = model_input_size; 30 | return new faceapi.TinyFaceDetectorOptions(model_options); 31 | } 32 | } 33 | 34 | // Return nothing if failed 35 | return; 36 | } -------------------------------------------------------------------------------- /app/helpers/save_image.js: -------------------------------------------------------------------------------- 1 | // Import main modules 2 | const fs = require('fs'); 3 | const path = require('path'); 4 | 5 | // Import Canvas 6 | const canvas = require('canvas'); 7 | const { Canvas } = canvas; 8 | 9 | // Export main function 10 | module.exports = async function (img, location, name) { 11 | return new Promise(async function (resolve, reject) { 12 | try { 13 | // Check img is a canvas object 14 | if (!img instanceof Canvas) throw 'Image object passed was not a Canvas object'; 15 | else { 16 | // Check if folder exists or create it 17 | if (!fs.existsSync(location)) fs.mkdirSync(location); 18 | 19 | // Write to img to file 20 | fs.writeFile(path.join(location, name), img.toBuffer(), (err) => { 21 | if (err) reject(err); 22 | else resolve(); 23 | }); 24 | } 25 | } 26 | catch (err) { 27 | reject(err); 28 | } 29 | }); 30 | } -------------------------------------------------------------------------------- /app/helpers/type_checks.js: -------------------------------------------------------------------------------- 1 | // isString 2 | module.exports.isString = function (value) { 3 | return typeof value === 'string' || value instanceof String; 4 | } 5 | 6 | // isNumber 7 | module.exports.isNumber = function (value) { 8 | return typeof value === 'number' && isFinite(value); 9 | } 10 | 11 | // isArray 12 | module.exports.isArray = function (value) { 13 | return value && typeof value === 'object' && value.constructor === Array; 14 | } 15 | 16 | // isFunction 17 | module.exports.isFunction = function (value) { 18 | return typeof value === 'function'; 19 | } 20 | 21 | // isObject 22 | module.exports.isObject = function (value) { 23 | return value && typeof value === 'object' && value.constructor === Object; 24 | } 25 | 26 | // isNull 27 | module.exports.isNull = function (value) { 28 | return value === null; 29 | } 30 | 31 | // isUndefined 32 | module.exports.isUndefined = function (value) { 33 | return typeof value === 'undefined'; 34 | } 35 | 36 | // isBoolean 37 | module.exports.isBoolean = function (value) { 38 | return typeof value === 'boolean'; 39 | } 40 | 41 | // isRegExp 42 | module.exports.isRegExp = function (value) { 43 | return value && typeof value === 'object' && value.constructor === RegExp; 44 | } 45 | 46 | // isError 47 | module.exports.isError = function (value) { 48 | return value instanceof Error && typeof value.message !== 'undefined'; 49 | } 50 | 51 | // isDate 52 | module.exports.isDate = function (value) { 53 | return value instanceof Date; 54 | } 55 | 56 | // isSymbol 57 | module.exports.isSymbol = function (value) { 58 | return typeof value === 'symbol'; 59 | } 60 | 61 | // isType 62 | module.exports.isType = function (value, type) { 63 | return value && value instanceof type; 64 | } -------------------------------------------------------------------------------- /app/nodes/inference_node.js: -------------------------------------------------------------------------------- 1 | // Load required modules 2 | const { join, resolve } = require("path") 3 | 4 | const load_models = require('../helpers/load_models'); 5 | const load_tfjs = require('../helpers/load_tfjs'); 6 | 7 | const faceapi = require('face-api.js'); 8 | 9 | const parse_model_options = require('../helpers/parse_model_options'); 10 | const parse_detection_options = require('../helpers/parse_detection_options'); 11 | 12 | const buffer2canvas = require('../helpers/buffer2canvas'); 13 | const detect_faces = require('../helpers/detect_faces'); 14 | const draw_faces = require("../helpers/draw_faces"); 15 | 16 | // Load in models and tf js globally 17 | let models_loaded = false; 18 | (async function () { load_tfjs(join()); })(); 19 | (async function () { models_loaded = await load_models(join(__dirname, '../weights')) })(); 20 | 21 | const { Image } = require('canvas'); 22 | let global_img = new Image; 23 | 24 | // Create supporting functions 25 | let convert_img_to_buffer = function (data) { 26 | return new Promise((resolve, reject) => { 27 | // Cast the input to a buffer 28 | const buffer = Buffer.from(data); 29 | 30 | // Check the cast went successfully 31 | (!Buffer.isBuffer(buffer)) ? reject() : resolve(buffer); 32 | }); 33 | } 34 | let compare_individual_descriptors = function (metric, labelled_descriptor, unknown_descriptor) { 35 | // Compare the incoming descriptor against this nodes descriptors 36 | let calculate_distances = Promise.all(labelled_descriptor._descriptors.map((known_descriptor) => { 37 | return new Promise((resolve, reject) => { 38 | let calculated_distance = Infinity; 39 | switch (metric) { 40 | case "Euclidean": 41 | calculated_distance = Math.round(euclideanDistance(known_descriptor, unknown_descriptor) * 10000) 42 | break; 43 | case "Manhattan": 44 | calculated_distance = Math.round(manhattanDistance(known_descriptor, unknown_descriptor) * 1000) 45 | break; 46 | case "Chebyshev": 47 | calculated_distance = Math.round(chebyshevDistance(known_descriptor, unknown_descriptor) * 100000) 48 | break; 49 | case "Mean Squared Error": 50 | let sum = 0; 51 | let length = 128; 52 | for (i = 0; i < length; i += 1) { 53 | var error = unknown_descriptor[i] - known_descriptor[i]; 54 | sum += error * error; 55 | } 56 | calculated_distance = Math.round(sum / length * 1000000) 57 | break; 58 | } 59 | resolve(calculated_distance); 60 | }); 61 | })) 62 | 63 | // Get the smallest value from the array of results and return the name based on score 64 | return calculate_distances.then((results) => { 65 | return { 'name': labelled_descriptor._label, 'distance': Math.min.apply(Math, results) }; 66 | }) 67 | } 68 | let compare_with_labelled_descriptors = function (metric, labelled_descriptors, unknown_descriptor) { 69 | return Promise.all(labelled_descriptors.map((labelled_descriptor) => { 70 | return new Promise((resolve, reject) => { 71 | compare_individual_descriptors(metric, labelled_descriptor, unknown_descriptor) 72 | .then(resolve) 73 | .catch(reject); 74 | }); 75 | })); 76 | } 77 | let find_best_match = function (match_confidence, face, matches) { 78 | return new Promise((resolve, reject) => { 79 | // Setup a best match initial object 80 | let best_match = { 81 | "distance": Infinity 82 | }; 83 | 84 | // Find the best match of all the recognise nodes 85 | for (const current_match of matches) { 86 | best_match = (best_match.distance > current_match.distance) ? current_match : best_match; 87 | } 88 | 89 | // Check if the best match is below the confidence level 90 | best_match.name = (best_match.distance < match_confidence) ? best_match.name : "Unknown"; 91 | 92 | // Set the name and distance of the face from the best match 93 | face = { ...face, ...best_match }; 94 | 95 | // Resolve the promise 96 | resolve(face); 97 | }) 98 | } 99 | let recognise_faces = function (metric, match_confidence, labelled_descriptors, detections) { 100 | return Promise.all(detections.map((face) => { 101 | return new Promise((resolve, reject) => { 102 | return compare_with_labelled_descriptors(metric, labelled_descriptors, face.descriptor) 103 | .then(find_best_match.bind(null, match_confidence, face)) 104 | .then(resolve) 105 | .catch(reject); 106 | }); 107 | })); 108 | } 109 | 110 | process.on('message', async function (data) { 111 | try { 112 | // Log thr start time of inference 113 | let start_time = Date.now(); 114 | 115 | // Extract the information from the data 116 | const { image, model_options, detect_options, descriptors, metric, match_confidence } = data; 117 | 118 | // If the models are yet to load, wait till they do, or kill the process if they don't 119 | let model_load_timer = setTimeout(() => { process.exit(); }, 30000); 120 | while (!models_loaded) { await new Promise(resolve => setTimeout(resolve, 10)); } 121 | clearTimeout(model_load_timer); 122 | 123 | // Convert the image from an array to canvas 124 | global_img = await convert_img_to_buffer(image.data) 125 | .then(buffer2canvas); 126 | 127 | // Parse input options from parent node 128 | let parsed_model_options = parse_model_options(model_options); 129 | let parsed_detect_options = parse_detection_options(detect_options); 130 | 131 | // Parse the descriptors from the passed json 132 | let parsed_descriptors = descriptors.map(x => faceapi.LabeledFaceDescriptors.fromJSON(x)); 133 | 134 | // Convert the input to buffer, detect faces, and send back the results 135 | data['detected_faces'] = await detect_faces(parsed_model_options, parsed_detect_options, global_img) 136 | .then(recognise_faces.bind(null, metric, match_confidence, parsed_descriptors)) 137 | 138 | // Draw on image 139 | global_img = await draw_faces(detect_options, data.detected_faces, global_img) 140 | data.labelled_img = global_img.toBuffer('image/jpeg'); 141 | 142 | // Log the total time for inference 143 | data['inference_time'] = Date.now() - start_time; 144 | 145 | // Send the data back as an object 146 | process.send(data); 147 | } 148 | catch (err) { 149 | console.error(err) 150 | } 151 | }); -------------------------------------------------------------------------------- /app/nodes/input_node.js: -------------------------------------------------------------------------------- 1 | // Import required modules 2 | const { join } = require('path') 3 | const { fork } = require('child_process'); 4 | const detect_on_fork = require("../helpers/detect_on_fork"); 5 | const { isNull } = require('../helpers/type_checks'); 6 | 7 | // Input Node constructor 8 | module.exports = function (RED, config, node) { 9 | // Register the node with node red 10 | RED.nodes.createNode(node, config); 11 | 12 | // Node specific variables 13 | node.RED = RED; 14 | node._busy = false; 15 | node.child = null; 16 | node.match_confidence = parseInt(config.match_confidence) || 2000; 17 | node.match_metric = config.match_metric || "Mean Squared Error"; 18 | 19 | // Get the model and detection parameters from config 20 | node.model_options = { 21 | "model": config.model, 22 | "minConfidence": Math.min(Math.max(parseInt(config.confidence) / 100, 0), 1), 23 | "inputSize": parseInt(config.input_size), 24 | "maxResults": 100 25 | }; 26 | node.detect_options = { 27 | "landmarks": config.landmarks, 28 | "age_gender": config.age_gender, 29 | "expressions": config.expressions, 30 | "descriptors": config.descriptors 31 | }; 32 | 33 | // Register all compute nodes into an array 34 | node.recognise_nodes = config.recognise_nodes; 35 | 36 | // Set the initial status of the node 37 | node.status({ fill: "green", shape: "dot", text: "ready" }); 38 | 39 | // Create the supporting functions 40 | node._set_status = function (level, colour, msg) { 41 | return new Promise((resolve, reject) => { 42 | let prefix = "[face-api-input:" + node.id + "] - "; 43 | switch (level) { 44 | case "error": 45 | node.status({ fill: colour, shape: "dot", text: msg }); 46 | node.RED.log.error(prefix + msg); 47 | break; 48 | case "warn": 49 | node.status({ fill: colour, shape: "dot", text: msg }); 50 | node.RED.log.warn(prefix + msg); 51 | break; 52 | 53 | case "info": 54 | node.status({ fill: colour, shape: "dot", text: msg }); 55 | node.RED.log.info(prefix + msg); 56 | break; 57 | 58 | default: 59 | node.status({ fill: "green", shape: "dot", text: "ready" }); 60 | node.RED.log.debug(prefix + "Node Ready"); 61 | break; 62 | } 63 | 64 | resolve(); 65 | }); 66 | } 67 | node._is_busy = function () { 68 | return new Promise((resolve, reject) => { 69 | (node._busy) ? reject("Node is busy") : resolve(); 70 | }); 71 | } 72 | node._set_busy = function (state) { 73 | return new Promise((resolve, reject) => { 74 | // node._busy = state; 75 | node._busy = false; 76 | (state) ? node._set_status("info", "blue", "Computing...") : node._set_status("info", "green", "Ready"); 77 | resolve(); 78 | }); 79 | } 80 | node._check_recognise_nodes = function () { 81 | return new Promise((resolve, reject) => { 82 | if (node.recognise_nodes.every(element => element === null)) { 83 | reject("No compute nodes selected"); 84 | } 85 | else { 86 | resolve(); 87 | } 88 | }); 89 | } 90 | node._check_payload = function (msg) { 91 | return new Promise((resolve, reject) => { 92 | if (!("payload" in msg)) { 93 | reject("No msg.payload found"); 94 | } 95 | else if (!Buffer.isBuffer(msg.payload)) { 96 | reject("msg.payload was not a buffer"); 97 | } 98 | else { 99 | resolve({ 100 | "image": msg.payload, 101 | "model_options": msg.model_options || node.model_options, 102 | "detect_options": msg.detect_options || node.detect_options, 103 | "metric": msg.metric || node.match_metric, 104 | "match_confidence": msg.match_confidence || node.match_confidence 105 | }); 106 | } 107 | }); 108 | } 109 | node._inject_value = function (msg, name, value) { 110 | return new Promise((resolve, reject) => { 111 | msg[name] = value; 112 | resolve(msg); 113 | }); 114 | } 115 | node._remove_value = function (name, msg) { 116 | return new Promise((resolve, reject) => { 117 | delete msg.name; 118 | resolve(msg); 119 | }); 120 | } 121 | node._prepare_to_send = function (msg) { 122 | return new Promise((resolve, reject) => { 123 | resolve({ "payload": msg }); 124 | }); 125 | } 126 | node._start_inference = function (msg) { 127 | return new Promise((resolve, reject) => { 128 | node._is_busy() 129 | .then(node._set_busy.bind(null, true)) 130 | // .then(node._check_recognise_nodes.bind(null)) 131 | .then(node._check_payload.bind(null, msg)) 132 | .then(resolve) 133 | .catch(reject); 134 | }); 135 | } 136 | node._end_inference = function () { 137 | return new Promise((resolve, reject) => { 138 | node._set_busy(false) 139 | .then(resolve) 140 | .catch(reject); 141 | }); 142 | } 143 | 144 | // Create functions for handling child processes 145 | node._get_recognise_node = function (node_id) { 146 | return new Promise((resolve, reject) => { 147 | if (node_id !== null) { 148 | let recognise_node = RED.nodes.getNode(node_id); 149 | if (recognise_node !== null) { 150 | resolve(recognise_node); 151 | return; 152 | } 153 | } 154 | 155 | reject("Recognise node " + node_id + "does not exist"); 156 | }); 157 | } 158 | node._get_comparison_descriptors = function (msg) { 159 | return Promise.all(node.recognise_nodes.map((id) => { 160 | return node._get_recognise_node(id) 161 | .then((recognise_node) => { return recognise_node.get_descriptor(); }) 162 | .then((descriptor) => { return descriptor.toJSON() }) 163 | .catch((err) => { return null; }); 164 | })).then((descriptors) => { 165 | return descriptors.filter(x => x) 166 | }); 167 | } 168 | node._create_fork = function () { 169 | return new Promise(async function (resolve, reject) { 170 | // Return the previous child process if it already exists 171 | if (isNull(node.child)) { 172 | // Create the child process 173 | const args = []; 174 | const options = { stdio: "pipe" }; 175 | node.child = fork(join(__dirname, 'inference_node.js'), args, options); 176 | 177 | // Create a callback to handle a error events 178 | node.child.on('error', (err, signal) => { 179 | node._set_status("error", "red", "Child - " + err); 180 | node.child.kill('SIGINT'); 181 | node.child = null; 182 | }); 183 | 184 | // Create callback to handle a exit events 185 | node.child.on('exit', (code, signal) => { 186 | node._set_status("warn", "yellow", `Child - child_process exited with code ${code} and signal ${signal}`); 187 | }); 188 | 189 | // Add logging 190 | node.child.stderr.on('data', (data) => { 191 | node.RED.log.debug(data.toString()); 192 | }) 193 | } 194 | 195 | // Always return the child fork 196 | resolve(node.child); 197 | }); 198 | } 199 | node._clean_results = function (msg) { 200 | return new Promise((resolve, reject) => { 201 | delete msg.descriptors 202 | msg.image = Buffer.from(msg.image) 203 | msg.labelled_img = Buffer.from(msg.labelled_img) 204 | resolve(msg) 205 | }); 206 | } 207 | node._detect_on_fork = function (msg) { 208 | return new Promise(async function (resolve, reject) { 209 | node._get_comparison_descriptors(msg) 210 | .then(node._inject_value.bind(null, msg, "descriptors")) 211 | .then(node._create_fork) 212 | .then(detect_on_fork.bind(null, msg)) 213 | .then(node._clean_results) 214 | .then(resolve); 215 | }); 216 | } 217 | 218 | // message input handle 219 | node.on('input', function (msg, send, done) { 220 | node._start_inference(msg) 221 | .then(node._detect_on_fork) 222 | .then(node._prepare_to_send) 223 | .then(send) 224 | .then(node._end_inference) 225 | .catch(node._set_status.bind(null, "error", "red")) 226 | .finally(done) 227 | }); 228 | 229 | // Clean up the node when closed 230 | node.on('close', function () { 231 | if (!isNull(node.child)) node.child.kill('SIGINT'); 232 | }); 233 | } -------------------------------------------------------------------------------- /app/nodes/recognise_node.js: -------------------------------------------------------------------------------- 1 | // Load used modules 2 | const { join } = require('path') 3 | const { readFile, unlink, stat } = require("fs").promises; 4 | const faceapi = require('face-api.js'); 5 | const euclideanDistance = require('euclidean') 6 | const manhattanDistance = require('manhattan') 7 | const chebyshevDistance = require('chebyshev') 8 | 9 | const { isArray } = require('../helpers/type_checks'); 10 | 11 | // Export the main function class for teh node 12 | module.exports = function (RED, config, node) { 13 | // Register node with node red 14 | RED.nodes.createNode(node, config); 15 | 16 | // Node variables 17 | node.name = config.name || "face-api-compute"; 18 | node.descriptors = null; 19 | node._filename = join(global.descriptor_location, node.id + ".json"); 20 | 21 | // Create supporting functions 22 | node.load_descriptor = function () { 23 | stat(node._filename) 24 | .then(readFile.bind(this, node._filename)) 25 | .then(JSON.parse) 26 | .then((file) => { 27 | node.descriptors = faceapi.LabeledFaceDescriptors.fromJSON(file); 28 | node.descriptors._label = node.name; 29 | }) 30 | .catch((err) => { 31 | node.descriptors = null; 32 | RED.log.error("No descriptor file found for " + String(node.id)) 33 | }); 34 | } 35 | node.delete_descriptor = function () { 36 | stat(node._filename) 37 | .then(unlink(node._filename)) 38 | .catch((err) => { 39 | node.descriptors = null; 40 | RED.log.error("No descriptor file to delete for " + String(node.id)) 41 | }); 42 | } 43 | node.get_descriptor = function () { 44 | return new Promise((resolve, reject) => { 45 | resolve(node.descriptors) 46 | }); 47 | } 48 | 49 | // Clean up the node when closed 50 | node.on('close', function (removed, done) { 51 | // delete descriptor here 52 | if (removed) node.delete_descriptor(); 53 | done(); 54 | }); 55 | 56 | // Load in the descriptors at start 57 | node.load_descriptor(); 58 | } -------------------------------------------------------------------------------- /app/weights/age_gender_model-shard1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thebigpotatoe/node-red-contrib-face-recognition/76b9629c2091949257fbeaa8094efb427d214b92/app/weights/age_gender_model-shard1 -------------------------------------------------------------------------------- /app/weights/age_gender_model-weights_manifest.json: -------------------------------------------------------------------------------- 1 | [{"weights":[{"name":"entry_flow/conv_in/filters","shape":[3,3,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005431825039433498,"min":-0.7441600304023892}},{"name":"entry_flow/conv_in/bias","shape":[32],"dtype":"float32"},{"name":"entry_flow/reduction_block_0/separable_conv0/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005691980614381678,"min":-0.6090419257388395}},{"name":"entry_flow/reduction_block_0/separable_conv0/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009089225881239947,"min":-1.1179747833925135}},{"name":"entry_flow/reduction_block_0/separable_conv0/bias","shape":[64],"dtype":"float32"},{"name":"entry_flow/reduction_block_0/separable_conv1/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00683894624897078,"min":-0.8138346036275228}},{"name":"entry_flow/reduction_block_0/separable_conv1/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011632566358528886,"min":-1.3028474321552352}},{"name":"entry_flow/reduction_block_0/separable_conv1/bias","shape":[64],"dtype":"float32"},{"name":"entry_flow/reduction_block_0/expansion_conv/filters","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010254812240600587,"min":-0.9229331016540528}},{"name":"entry_flow/reduction_block_0/expansion_conv/bias","shape":[64],"dtype":"float32"},{"name":"entry_flow/reduction_block_1/separable_conv0/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0052509616403018725,"min":-0.6406173201168285}},{"name":"entry_flow/reduction_block_1/separable_conv0/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010788509424994973,"min":-1.4564487723743214}},{"name":"entry_flow/reduction_block_1/separable_conv0/bias","shape":[128],"dtype":"float32"},{"name":"entry_flow/reduction_block_1/separable_conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00553213918910307,"min":-0.7025816770160899}},{"name":"entry_flow/reduction_block_1/separable_conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013602388606351965,"min":-1.6186842441558837}},{"name":"entry_flow/reduction_block_1/separable_conv1/bias","shape":[128],"dtype":"float32"},{"name":"entry_flow/reduction_block_1/expansion_conv/filters","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007571851038465313,"min":-1.158493208885193}},{"name":"entry_flow/reduction_block_1/expansion_conv/bias","shape":[128],"dtype":"float32"},{"name":"middle_flow/main_block_0/separable_conv0/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005766328409606335,"min":-0.6688940955143349}},{"name":"middle_flow/main_block_0/separable_conv0/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.012136116214826995,"min":-1.5776951079275094}},{"name":"middle_flow/main_block_0/separable_conv0/bias","shape":[128],"dtype":"float32"},{"name":"middle_flow/main_block_0/separable_conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004314773222979377,"min":-0.5652352922102984}},{"name":"middle_flow/main_block_0/separable_conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01107162026798024,"min":-1.2400214700137868}},{"name":"middle_flow/main_block_0/separable_conv1/bias","shape":[128],"dtype":"float32"},{"name":"middle_flow/main_block_0/separable_conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0036451735917259667,"min":-0.4848080876995536}},{"name":"middle_flow/main_block_0/separable_conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008791744942758598,"min":-1.134135097615859}},{"name":"middle_flow/main_block_0/separable_conv2/bias","shape":[128],"dtype":"float32"},{"name":"middle_flow/main_block_1/separable_conv0/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004915751896652521,"min":-0.6095532351849126}},{"name":"middle_flow/main_block_1/separable_conv0/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010868691463096469,"min":-1.3368490499608656}},{"name":"middle_flow/main_block_1/separable_conv0/bias","shape":[128],"dtype":"float32"},{"name":"middle_flow/main_block_1/separable_conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005010117269029804,"min":-0.6012140722835765}},{"name":"middle_flow/main_block_1/separable_conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010311148213405235,"min":-1.3816938605963016}},{"name":"middle_flow/main_block_1/separable_conv1/bias","shape":[128],"dtype":"float32"},{"name":"middle_flow/main_block_1/separable_conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004911523706772748,"min":-0.7367285560159123}},{"name":"middle_flow/main_block_1/separable_conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008976466047997568,"min":-1.2207993825276693}},{"name":"middle_flow/main_block_1/separable_conv2/bias","shape":[128],"dtype":"float32"},{"name":"exit_flow/reduction_block/separable_conv0/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005074804436926748,"min":-0.7104726211697447}},{"name":"exit_flow/reduction_block/separable_conv0/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011453078307357489,"min":-1.4545409450344011}},{"name":"exit_flow/reduction_block/separable_conv0/bias","shape":[256],"dtype":"float32"},{"name":"exit_flow/reduction_block/separable_conv1/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007741751390344957,"min":-1.1380374543807086}},{"name":"exit_flow/reduction_block/separable_conv1/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011347713189966538,"min":-1.497898141075583}},{"name":"exit_flow/reduction_block/separable_conv1/bias","shape":[256],"dtype":"float32"},{"name":"exit_flow/reduction_block/expansion_conv/filters","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006717281014311547,"min":-0.8329428457746318}},{"name":"exit_flow/reduction_block/expansion_conv/bias","shape":[256],"dtype":"float32"},{"name":"exit_flow/separable_conv/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0027201742518181892,"min":-0.3237007359663645}},{"name":"exit_flow/separable_conv/pointwise_filter","shape":[1,1,256,512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010076364348916447,"min":-1.330080094056971}},{"name":"exit_flow/separable_conv/bias","shape":[512],"dtype":"float32"},{"name":"fc/age/weights","shape":[512,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008674054987290326,"min":-1.2664120281443876}},{"name":"fc/age/bias","shape":[1],"dtype":"float32"},{"name":"fc/gender/weights","shape":[512,2],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0029948226377075793,"min":-0.34140978069866407}},{"name":"fc/gender/bias","shape":[2],"dtype":"float32"}],"paths":["age_gender_model-shard1"]}] -------------------------------------------------------------------------------- /app/weights/face_expression_model-shard1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thebigpotatoe/node-red-contrib-face-recognition/76b9629c2091949257fbeaa8094efb427d214b92/app/weights/face_expression_model-shard1 -------------------------------------------------------------------------------- /app/weights/face_expression_model-weights_manifest.json: -------------------------------------------------------------------------------- 1 | [{"weights":[{"name":"dense0/conv0/filters","shape":[3,3,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0057930146946626555,"min":-0.7125408074435067}},{"name":"dense0/conv0/bias","shape":[32],"dtype":"float32"},{"name":"dense0/conv1/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006473719839956246,"min":-0.6408982641556684}},{"name":"dense0/conv1/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010509579321917366,"min":-1.408283629136927}},{"name":"dense0/conv1/bias","shape":[32],"dtype":"float32"},{"name":"dense0/conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005666389652326995,"min":-0.7252978754978554}},{"name":"dense0/conv2/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010316079270605948,"min":-1.1760330368490781}},{"name":"dense0/conv2/bias","shape":[32],"dtype":"float32"},{"name":"dense0/conv3/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0063220320963392074,"min":-0.853474333005793}},{"name":"dense0/conv3/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010322785377502442,"min":-1.4658355236053466}},{"name":"dense0/conv3/bias","shape":[32],"dtype":"float32"},{"name":"dense1/conv0/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0042531527724920535,"min":-0.5741756242864272}},{"name":"dense1/conv0/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010653339647779278,"min":-1.1825207009035}},{"name":"dense1/conv0/bias","shape":[64],"dtype":"float32"},{"name":"dense1/conv1/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005166931012097527,"min":-0.6355325144879957}},{"name":"dense1/conv1/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011478300188101974,"min":-1.3888743227603388}},{"name":"dense1/conv1/bias","shape":[64],"dtype":"float32"},{"name":"dense1/conv2/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006144821410085641,"min":-0.8479853545918185}},{"name":"dense1/conv2/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010541967317169788,"min":-1.3809977185492421}},{"name":"dense1/conv2/bias","shape":[64],"dtype":"float32"},{"name":"dense1/conv3/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005769844849904378,"min":-0.686611537138621}},{"name":"dense1/conv3/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010939095534530341,"min":-1.2689350820055196}},{"name":"dense1/conv3/bias","shape":[64],"dtype":"float32"},{"name":"dense2/conv0/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0037769308277204924,"min":-0.40790852939381317}},{"name":"dense2/conv0/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01188667194516051,"min":-1.4382873053644218}},{"name":"dense2/conv0/bias","shape":[128],"dtype":"float32"},{"name":"dense2/conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006497045825509464,"min":-0.8381189114907208}},{"name":"dense2/conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011632198913424622,"min":-1.3377028750438316}},{"name":"dense2/conv1/bias","shape":[128],"dtype":"float32"},{"name":"dense2/conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005947182225246056,"min":-0.7969224181829715}},{"name":"dense2/conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011436844339557722,"min":-1.4524792311238306}},{"name":"dense2/conv2/bias","shape":[128],"dtype":"float32"},{"name":"dense2/conv3/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006665432686899222,"min":-0.8998334127313949}},{"name":"dense2/conv3/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01283421422920975,"min":-1.642779421338848}},{"name":"dense2/conv3/bias","shape":[128],"dtype":"float32"},{"name":"dense3/conv0/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004711699953266218,"min":-0.6737730933170692}},{"name":"dense3/conv0/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010955964817720302,"min":-1.3914075318504784}},{"name":"dense3/conv0/bias","shape":[256],"dtype":"float32"},{"name":"dense3/conv1/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00554193468654857,"min":-0.7149095745647656}},{"name":"dense3/conv1/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.016790372250126858,"min":-2.484975093018775}},{"name":"dense3/conv1/bias","shape":[256],"dtype":"float32"},{"name":"dense3/conv2/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006361540626077091,"min":-0.8142772001378676}},{"name":"dense3/conv2/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01777329678628959,"min":-1.7062364914838006}},{"name":"dense3/conv2/bias","shape":[256],"dtype":"float32"},{"name":"dense3/conv3/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006900275922289082,"min":-0.8625344902861353}},{"name":"dense3/conv3/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015449936717164282,"min":-1.9003422162112067}},{"name":"dense3/conv3/bias","shape":[256],"dtype":"float32"},{"name":"fc/weights","shape":[256,7],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004834276554631252,"min":-0.7203072066400565}},{"name":"fc/bias","shape":[7],"dtype":"float32"}],"paths":["face_expression_model-shard1"]}] -------------------------------------------------------------------------------- /app/weights/face_landmark_68_model-shard1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thebigpotatoe/node-red-contrib-face-recognition/76b9629c2091949257fbeaa8094efb427d214b92/app/weights/face_landmark_68_model-shard1 -------------------------------------------------------------------------------- /app/weights/face_landmark_68_model-weights_manifest.json: -------------------------------------------------------------------------------- 1 | [{"weights":[{"name":"dense0/conv0/filters","shape":[3,3,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004853619781194949,"min":-0.5872879935245888}},{"name":"dense0/conv0/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004396426443960153,"min":-0.7298067896973853}},{"name":"dense0/conv1/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00635151559231328,"min":-0.5589333721235686}},{"name":"dense0/conv1/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009354315552057004,"min":-1.2628325995276957}},{"name":"dense0/conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0029380727048013726,"min":-0.5846764682554731}},{"name":"dense0/conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0049374802439820535,"min":-0.6171850304977566}},{"name":"dense0/conv2/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009941946758943446,"min":-1.3421628124573652}},{"name":"dense0/conv2/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0030300481062309416,"min":-0.5272283704841838}},{"name":"dense0/conv3/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005672684837790097,"min":-0.7431217137505026}},{"name":"dense0/conv3/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010712201455060173,"min":-1.5639814124387852}},{"name":"dense0/conv3/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0030966934035806097,"min":-0.3839899820439956}},{"name":"dense1/conv0/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0039155554537679636,"min":-0.48161332081345953}},{"name":"dense1/conv0/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01023082966898002,"min":-1.094698774580862}},{"name":"dense1/conv0/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0027264176630506327,"min":-0.3871513081531898}},{"name":"dense1/conv1/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004583378632863362,"min":-0.5454220573107401}},{"name":"dense1/conv1/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00915846403907327,"min":-1.117332612766939}},{"name":"dense1/conv1/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003091680419211294,"min":-0.5966943209077797}},{"name":"dense1/conv2/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005407439727409214,"min":-0.708374604290607}},{"name":"dense1/conv2/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00946493943532308,"min":-1.2399070660273235}},{"name":"dense1/conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004409168514550901,"min":-0.9788354102303}},{"name":"dense1/conv3/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004478132958505668,"min":-0.6493292789833219}},{"name":"dense1/conv3/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011063695888893277,"min":-1.2501976354449402}},{"name":"dense1/conv3/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003909627596537272,"min":-0.6646366914113363}},{"name":"dense2/conv0/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003213915404151468,"min":-0.3374611174359041}},{"name":"dense2/conv0/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010917326048308728,"min":-1.4520043644250609}},{"name":"dense2/conv0/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002800439152063108,"min":-0.38085972468058266}},{"name":"dense2/conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0050568851770139206,"min":-0.6927932692509071}},{"name":"dense2/conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01074961213504567,"min":-1.3222022926106174}},{"name":"dense2/conv1/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0030654204242369708,"min":-0.5487102559384177}},{"name":"dense2/conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00591809165244009,"min":-0.917304206128214}},{"name":"dense2/conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01092823346455892,"min":-1.366029183069865}},{"name":"dense2/conv2/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002681120470458386,"min":-0.36463238398234055}},{"name":"dense2/conv3/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0048311497650894465,"min":-0.5797379718107336}},{"name":"dense2/conv3/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011227761062921263,"min":-1.4483811771168429}},{"name":"dense2/conv3/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0034643323982463162,"min":-0.3360402426298927}},{"name":"dense3/conv0/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003394978887894574,"min":-0.49227193874471326}},{"name":"dense3/conv0/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010051267287310432,"min":-1.2765109454884247}},{"name":"dense3/conv0/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003142924752889895,"min":-0.4588670139219247}},{"name":"dense3/conv1/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00448304671867221,"min":-0.5872791201460595}},{"name":"dense3/conv1/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.016063522357566685,"min":-2.3613377865623026}},{"name":"dense3/conv1/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00287135781026354,"min":-0.47664539650374765}},{"name":"dense3/conv2/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006002906724518421,"min":-0.7923836876364315}},{"name":"dense3/conv2/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.017087187019048954,"min":-1.6061955797906016}},{"name":"dense3/conv2/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003124481205846749,"min":-0.46242321846531886}},{"name":"dense3/conv3/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006576311588287353,"min":-1.0193282961845398}},{"name":"dense3/conv3/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015590153955945782,"min":-1.99553970636106}},{"name":"dense3/conv3/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004453541601405424,"min":-0.6546706154065973}},{"name":"fc/weights","shape":[256,136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010417488509533453,"min":-1.500118345372817}},{"name":"fc/bias","shape":[136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0025084222648658005,"min":0.07683877646923065}}],"paths":["face_landmark_68_model-shard1"]}] -------------------------------------------------------------------------------- /app/weights/face_landmark_68_tiny_model-shard1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thebigpotatoe/node-red-contrib-face-recognition/76b9629c2091949257fbeaa8094efb427d214b92/app/weights/face_landmark_68_tiny_model-shard1 -------------------------------------------------------------------------------- /app/weights/face_landmark_68_tiny_model-weights_manifest.json: -------------------------------------------------------------------------------- 1 | [{"weights":[{"name":"dense0/conv0/filters","shape":[3,3,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008194216092427571,"min":-0.9423348506291708}},{"name":"dense0/conv0/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006839508168837603,"min":-0.8412595047670252}},{"name":"dense0/conv1/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009194007106855804,"min":-1.2779669878529567}},{"name":"dense0/conv1/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0036026100317637128,"min":-0.3170296827952067}},{"name":"dense0/conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.000740380117706224,"min":-0.06367269012273527}},{"name":"dense0/conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":1,"min":0}},{"name":"dense0/conv2/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":1,"min":0}},{"name":"dense0/conv2/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0037702228508743585,"min":-0.6220867703942692}},{"name":"dense1/conv0/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0033707996209462483,"min":-0.421349952618281}},{"name":"dense1/conv0/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014611541991140328,"min":-1.8556658328748217}},{"name":"dense1/conv0/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002832523046755323,"min":-0.30307996600281956}},{"name":"dense1/conv1/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006593170586754294,"min":-0.6329443763284123}},{"name":"dense1/conv1/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.012215249211180444,"min":-1.6001976466646382}},{"name":"dense1/conv1/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002384825547536214,"min":-0.3028728445370992}},{"name":"dense1/conv2/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005859645441466687,"min":-0.7617539073906693}},{"name":"dense1/conv2/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013121426806730382,"min":-1.7845140457153321}},{"name":"dense1/conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0032247188044529336,"min":-0.46435950784122243}},{"name":"dense2/conv0/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002659512618008782,"min":-0.32977956463308894}},{"name":"dense2/conv0/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015499923743453681,"min":-1.9839902391620712}},{"name":"dense2/conv0/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0032450980999890497,"min":-0.522460794098237}},{"name":"dense2/conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005911862382701799,"min":-0.792189559282041}},{"name":"dense2/conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021025861478319356,"min":-2.2077154552235325}},{"name":"dense2/conv1/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00349616945958605,"min":-0.46149436866535865}},{"name":"dense2/conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008104994250278847,"min":-1.013124281284856}},{"name":"dense2/conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.029337059282789044,"min":-3.5791212325002633}},{"name":"dense2/conv2/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0038808938334969913,"min":-0.4230174278511721}},{"name":"fc/weights","shape":[128,136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014016061670639936,"min":-1.8921683255363912}},{"name":"fc/bias","shape":[136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0029505149698724935,"min":0.088760145008564}}],"paths":["face_landmark_68_tiny_model-shard1"]}] -------------------------------------------------------------------------------- /app/weights/face_recognition_model-shard1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thebigpotatoe/node-red-contrib-face-recognition/76b9629c2091949257fbeaa8094efb427d214b92/app/weights/face_recognition_model-shard1 -------------------------------------------------------------------------------- /app/weights/face_recognition_model-shard2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thebigpotatoe/node-red-contrib-face-recognition/76b9629c2091949257fbeaa8094efb427d214b92/app/weights/face_recognition_model-shard2 -------------------------------------------------------------------------------- /app/weights/face_recognition_model-weights_manifest.json: -------------------------------------------------------------------------------- 1 | [{"weights":[{"name":"conv32_down/conv/filters","shape":[7,7,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0005260649557207145,"min":-0.07101876902229645}},{"name":"conv32_down/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":8.471445956577858e-7,"min":-0.00014740315964445472}},{"name":"conv32_down/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.06814416062598135,"min":5.788674831390381}},{"name":"conv32_down/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008471635042452345,"min":-0.931879854669758}},{"name":"conv32_1/conv1/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0007328585666768691,"min":-0.0974701893680236}},{"name":"conv32_1/conv1/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.5952091238361e-8,"min":-0.000001978059313556764}},{"name":"conv32_1/conv1/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.02146628510718252,"min":3.1103382110595703}},{"name":"conv32_1/conv1/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0194976619645661,"min":-2.3787147596770644}},{"name":"conv32_1/conv2/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0004114975824075587,"min":-0.05267169054816751}},{"name":"conv32_1/conv2/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":4.600177166424806e-9,"min":-5.70421968636676e-7}},{"name":"conv32_1/conv2/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.03400764932819441,"min":2.1677730083465576}},{"name":"conv32_1/conv2/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010974494616190593,"min":-1.240117891629537}},{"name":"conv32_2/conv1/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0005358753251094444,"min":-0.0760942961655411}},{"name":"conv32_2/conv1/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":5.9886454383719385e-9,"min":-7.366033889197485e-7}},{"name":"conv32_2/conv1/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014633869657329485,"min":2.769575357437134}},{"name":"conv32_2/conv1/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.022131107367721257,"min":-2.5229462399202234}},{"name":"conv32_2/conv2/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00030145110452876373,"min":-0.03949009469326805}},{"name":"conv32_2/conv2/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":6.8779549306497095e-9,"min":-9.010120959151119e-7}},{"name":"conv32_2/conv2/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.03929369870354148,"min":4.8010945320129395}},{"name":"conv32_2/conv2/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010553357180427103,"min":-1.2452961472903983}},{"name":"conv32_3/conv1/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0003133527642371608,"min":-0.040735859350830905}},{"name":"conv32_3/conv1/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":4.1064200719547974e-9,"min":-3.0387508532465503e-7}},{"name":"conv32_3/conv1/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009252088210161994,"min":2.333256721496582}},{"name":"conv32_3/conv1/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007104101251153385,"min":-0.34810096130651585}},{"name":"conv32_3/conv2/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00029995629892629733,"min":-0.031195455088334923}},{"name":"conv32_3/conv2/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":5.62726418316814e-9,"min":-6.921534945296811e-7}},{"name":"conv32_3/conv2/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0467432975769043,"min":5.362040996551514}},{"name":"conv32_3/conv2/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010314425300149357,"min":-1.268674311918371}},{"name":"conv64_down/conv1/conv/filters","shape":[3,3,32,64],"dtype":"float32"},{"name":"conv64_down/conv1/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":8.373908033218849e-10,"min":-1.172347124650639e-7}},{"name":"conv64_down/conv1/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0066875364266189875,"min":2.5088400840759277}},{"name":"conv64_down/conv1/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01691421620986041,"min":-2.0973628100226906}},{"name":"conv64_down/conv2/conv/filters","shape":[3,3,64,64],"dtype":"float32"},{"name":"conv64_down/conv2/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":2.3252014483766877e-9,"min":-2.673981665633191e-7}},{"name":"conv64_down/conv2/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.032557439804077146,"min":2.6351239681243896}},{"name":"conv64_down/conv2/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015429047509735706,"min":-1.5429047509735707}},{"name":"conv64_1/conv1/conv/filters","shape":[3,3,64,64],"dtype":"float32"},{"name":"conv64_1/conv1/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.1319172039756998e-9,"min":-1.4941307092479238e-7}},{"name":"conv64_1/conv1/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007802607031429515,"min":3.401733160018921}},{"name":"conv64_1/conv1/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01425027146058924,"min":-0.6982633015688727}},{"name":"conv64_1/conv2/conv/filters","shape":[3,3,64,64],"dtype":"float32"},{"name":"conv64_1/conv2/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":2.5635019893325435e-9,"min":-2.717312108692496e-7}},{"name":"conv64_1/conv2/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.04062801716374416,"min":3.542381525039673}},{"name":"conv64_1/conv2/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007973166306813557,"min":-0.7415044665336609}},{"name":"conv64_2/conv1/conv/filters","shape":[3,3,64,64],"dtype":"float32"},{"name":"conv64_2/conv1/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.2535732661062331e-9,"min":-1.8302169685151004e-7}},{"name":"conv64_2/conv1/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005631206549850164,"min":2.9051668643951416}},{"name":"conv64_2/conv1/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01859012585060269,"min":-2.3795361088771445}},{"name":"conv64_2/conv2/conv/filters","shape":[3,3,64,64],"dtype":"float32"},{"name":"conv64_2/conv2/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":2.486726369919351e-9,"min":-3.5311514452854786e-7}},{"name":"conv64_2/conv2/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.03740917467603497,"min":5.571568965911865}},{"name":"conv64_2/conv2/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006418555858088475,"min":-0.5263215803632549}},{"name":"conv64_3/conv1/conv/filters","shape":[3,3,64,64],"dtype":"float32"},{"name":"conv64_3/conv1/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":7.432564576875473e-10,"min":-8.47312361763804e-8}},{"name":"conv64_3/conv1/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006400122362024644,"min":2.268010377883911}},{"name":"conv64_3/conv1/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010945847922680425,"min":-1.3353934465670119}},{"name":"conv64_3/conv2/conv/filters","shape":[3,3,64,64],"dtype":"float32"},{"name":"conv64_3/conv2/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":2.278228722014533e-9,"min":-3.212302498040492e-7}},{"name":"conv64_3/conv2/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.029840927498013366,"min":7.038398265838623}},{"name":"conv64_3/conv2/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010651412197187834,"min":-1.161003929493474}},{"name":"conv128_down/conv1/conv/filters","shape":[3,3,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00020040544662989823,"min":-0.022245004575918704}},{"name":"conv128_down/conv1/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":4.3550543563576545e-10,"min":-4.311503812794078e-8}},{"name":"conv128_down/conv1/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007448580685783835,"min":2.830846071243286}},{"name":"conv128_down/conv1/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01211262824488621,"min":-1.6957679542840696}},{"name":"conv128_down/conv2/conv/filters","shape":[3,3,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00022380277514457702,"min":-0.02484210804104805}},{"name":"conv128_down/conv2/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":9.031058637304466e-10,"min":-1.1650065642122761e-7}},{"name":"conv128_down/conv2/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.027663578706629135,"min":3.1111555099487305}},{"name":"conv128_down/conv2/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008878476946961646,"min":-1.029903325847551}},{"name":"conv128_1/conv1/conv/filters","shape":[3,3,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00022380667574265425,"min":-0.032899581334170175}},{"name":"conv128_1/conv1/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":4.4147297756478345e-10,"min":-5.253528433020923e-8}},{"name":"conv128_1/conv1/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013599334978589825,"min":3.634530782699585}},{"name":"conv128_1/conv1/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014059314073300829,"min":-1.4059314073300828}},{"name":"conv128_1/conv2/conv/filters","shape":[3,3,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00021715293474057143,"min":-0.02909849325523657}},{"name":"conv128_1/conv2/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":9.887046963276768e-10,"min":-1.1370104007768284e-7}},{"name":"conv128_1/conv2/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.029993299409454943,"min":3.630716562271118}},{"name":"conv128_1/conv2/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00782704236460667,"min":-0.7200878975438136}},{"name":"conv128_2/conv1/conv/filters","shape":[3,3,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00017718105923895743,"min":-0.022324813464108636}},{"name":"conv128_2/conv1/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":3.567012027797675e-10,"min":-5.243507680862582e-8}},{"name":"conv128_2/conv1/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007940645778880399,"min":4.927767753601074}},{"name":"conv128_2/conv1/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015933452867994122,"min":-1.5614783810634238}},{"name":"conv128_2/conv2/conv/filters","shape":[3,3,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0001451439717236687,"min":-0.01712698866339291}},{"name":"conv128_2/conv2/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.0383988570966347e-9,"min":-1.2356946399449953e-7}},{"name":"conv128_2/conv2/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.02892604528688917,"min":4.750600814819336}},{"name":"conv128_2/conv2/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00797275748907351,"min":-0.7414664464838364}},{"name":"conv256_down/conv1/conv/filters","shape":[3,3,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0002698827827093648,"min":-0.03994265184098599}},{"name":"conv256_down/conv1/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":5.036909834755123e-10,"min":-6.396875490139006e-8}},{"name":"conv256_down/conv1/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014870181738161573,"min":4.269900798797607}},{"name":"conv256_down/conv1/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.022031106200872685,"min":-3.1063859743230484}},{"name":"conv256_down/conv2/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00046430734150549946,"min":-0.03946612402796745}},{"name":"conv256_down/conv2/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":6.693064577513153e-10,"min":-7.630093618364995e-8}},{"name":"conv256_down/conv2/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.03475512242784687,"min":3.608360528945923}},{"name":"conv256_down/conv2/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01290142021927179,"min":-1.1482263995151893}},{"name":"conv256_1/conv1/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00037147209924810076,"min":-0.04234781931428348}},{"name":"conv256_1/conv1/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":3.2105515457510146e-10,"min":-3.467395669411096e-8}},{"name":"conv256_1/conv1/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.043242172166412955,"min":5.28542947769165}},{"name":"conv256_1/conv1/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01643658619300992,"min":-1.3149268954407936}},{"name":"conv256_1/conv2/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0003289232651392619,"min":-0.041773254672686264}},{"name":"conv256_1/conv2/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":9.13591691187321e-10,"min":-1.2333487831028833e-7}},{"name":"conv256_1/conv2/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0573908618852204,"min":4.360693454742432}},{"name":"conv256_1/conv2/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0164216583850337,"min":-1.3958409627278647}},{"name":"conv256_2/conv1/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00010476927912118389,"min":-0.015610622589056398}},{"name":"conv256_2/conv1/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":2.418552539068639e-10,"min":-2.539480166022071e-8}},{"name":"conv256_2/conv1/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.06024209564807368,"min":6.598613739013672}},{"name":"conv256_2/conv1/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01578534350675695,"min":-1.1049740454729864}},{"name":"conv256_2/conv2/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00005543030908002573,"min":-0.007427661416723448}},{"name":"conv256_2/conv2/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.0822061852320308e-9,"min":-1.515088659324843e-7}},{"name":"conv256_2/conv2/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.04302893993901272,"min":2.2855491638183594}},{"name":"conv256_2/conv2/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006792667566561232,"min":-0.8083274404207865}},{"name":"conv256_down_out/conv1/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.000568966465253456,"min":-0.05632768006009214}},{"name":"conv256_down_out/conv1/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":4.5347887884881677e-10,"min":-6.530095855422961e-8}},{"name":"conv256_down_out/conv1/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.017565592597512638,"min":4.594101905822754}},{"name":"conv256_down_out/conv1/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.04850864223405427,"min":-6.306123490427055}},{"name":"conv256_down_out/conv2/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0003739110687199761,"min":-0.06954745878191555}},{"name":"conv256_down_out/conv2/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.2668428328152895e-9,"min":-2.2549802424112154e-7}},{"name":"conv256_down_out/conv2/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.04351314469879749,"min":4.31956672668457}},{"name":"conv256_down_out/conv2/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021499746921015722,"min":-1.2039858275768804}},{"name":"fc","shape":[256,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.000357687911566566,"min":-0.04578405268052045}}],"paths":["face_recognition_model-shard1","face_recognition_model-shard2"]}] -------------------------------------------------------------------------------- /app/weights/mtcnn_model-shard1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thebigpotatoe/node-red-contrib-face-recognition/76b9629c2091949257fbeaa8094efb427d214b92/app/weights/mtcnn_model-shard1 -------------------------------------------------------------------------------- /app/weights/mtcnn_model-weights_manifest.json: -------------------------------------------------------------------------------- 1 | [{"paths":["mtcnn_model-shard1"],"weights":[{"dtype":"float32","name":"pnet/conv1/weights","shape":[3,3,3,10]},{"dtype":"float32","name":"pnet/conv1/bias","shape":[10]},{"dtype":"float32","name":"pnet/prelu1_alpha","shape":[10]},{"dtype":"float32","name":"pnet/conv2/weights","shape":[3,3,10,16]},{"dtype":"float32","name":"pnet/conv2/bias","shape":[16]},{"dtype":"float32","name":"pnet/prelu2_alpha","shape":[16]},{"dtype":"float32","name":"pnet/conv3/weights","shape":[3,3,16,32]},{"dtype":"float32","name":"pnet/conv3/bias","shape":[32]},{"dtype":"float32","name":"pnet/prelu3_alpha","shape":[32]},{"dtype":"float32","name":"pnet/conv4_1/weights","shape":[1,1,32,2]},{"dtype":"float32","name":"pnet/conv4_1/bias","shape":[2]},{"dtype":"float32","name":"pnet/conv4_2/weights","shape":[1,1,32,4]},{"dtype":"float32","name":"pnet/conv4_2/bias","shape":[4]},{"dtype":"float32","name":"rnet/conv1/weights","shape":[3,3,3,28]},{"dtype":"float32","name":"rnet/conv1/bias","shape":[28]},{"dtype":"float32","name":"rnet/prelu1_alpha","shape":[28]},{"dtype":"float32","name":"rnet/conv2/weights","shape":[3,3,28,48]},{"dtype":"float32","name":"rnet/conv2/bias","shape":[48]},{"dtype":"float32","name":"rnet/prelu2_alpha","shape":[48]},{"dtype":"float32","name":"rnet/conv3/weights","shape":[2,2,48,64]},{"dtype":"float32","name":"rnet/conv3/bias","shape":[64]},{"dtype":"float32","name":"rnet/prelu3_alpha","shape":[64]},{"dtype":"float32","name":"rnet/fc1/weights","shape":[576,128]},{"dtype":"float32","name":"rnet/fc1/bias","shape":[128]},{"dtype":"float32","name":"rnet/prelu4_alpha","shape":[128]},{"dtype":"float32","name":"rnet/fc2_1/weights","shape":[128,2]},{"dtype":"float32","name":"rnet/fc2_1/bias","shape":[2]},{"dtype":"float32","name":"rnet/fc2_2/weights","shape":[128,4]},{"dtype":"float32","name":"rnet/fc2_2/bias","shape":[4]},{"dtype":"float32","name":"onet/conv1/weights","shape":[3,3,3,32]},{"dtype":"float32","name":"onet/conv1/bias","shape":[32]},{"dtype":"float32","name":"onet/prelu1_alpha","shape":[32]},{"dtype":"float32","name":"onet/conv2/weights","shape":[3,3,32,64]},{"dtype":"float32","name":"onet/conv2/bias","shape":[64]},{"dtype":"float32","name":"onet/prelu2_alpha","shape":[64]},{"dtype":"float32","name":"onet/conv3/weights","shape":[3,3,64,64]},{"dtype":"float32","name":"onet/conv3/bias","shape":[64]},{"dtype":"float32","name":"onet/prelu3_alpha","shape":[64]},{"dtype":"float32","name":"onet/conv4/weights","shape":[2,2,64,128]},{"dtype":"float32","name":"onet/conv4/bias","shape":[128]},{"dtype":"float32","name":"onet/prelu4_alpha","shape":[128]},{"dtype":"float32","name":"onet/fc1/weights","shape":[1152,256]},{"dtype":"float32","name":"onet/fc1/bias","shape":[256]},{"dtype":"float32","name":"onet/prelu5_alpha","shape":[256]},{"dtype":"float32","name":"onet/fc2_1/weights","shape":[256,2]},{"dtype":"float32","name":"onet/fc2_1/bias","shape":[2]},{"dtype":"float32","name":"onet/fc2_2/weights","shape":[256,4]},{"dtype":"float32","name":"onet/fc2_2/bias","shape":[4]},{"dtype":"float32","name":"onet/fc2_3/weights","shape":[256,10]},{"dtype":"float32","name":"onet/fc2_3/bias","shape":[10]}]}] -------------------------------------------------------------------------------- /app/weights/ssd_mobilenetv1_model-shard1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thebigpotatoe/node-red-contrib-face-recognition/76b9629c2091949257fbeaa8094efb427d214b92/app/weights/ssd_mobilenetv1_model-shard1 -------------------------------------------------------------------------------- /app/weights/ssd_mobilenetv1_model-shard2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thebigpotatoe/node-red-contrib-face-recognition/76b9629c2091949257fbeaa8094efb427d214b92/app/weights/ssd_mobilenetv1_model-shard2 -------------------------------------------------------------------------------- /app/weights/ssd_mobilenetv1_model-weights_manifest.json: -------------------------------------------------------------------------------- 1 | [{"paths":["ssd_mobilenetv1_model-shard1","ssd_mobilenetv1_model-shard2"],"weights":[{"dtype":"float32","shape":[1,1,512,9],"quantization":{"scale":0.0026856216729856004,"min":-0.34107395246917127,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/ClassPredictor/weights"},{"dtype":"float32","shape":[9],"quantization":{"scale":0.00198518248165355,"min":-0.32159956202787515,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/ClassPredictor/biases"},{"dtype":"float32","shape":[1,1,1024,18],"quantization":{"scale":0.003060340296988394,"min":-0.489654447518143,"dtype":"uint8"},"name":"Prediction/BoxPredictor_1/ClassPredictor/weights"},{"dtype":"float32","shape":[18],"quantization":{"scale":0.0008040678851744708,"min":-0.12221831854651957,"dtype":"uint8"},"name":"Prediction/BoxPredictor_1/ClassPredictor/biases"},{"dtype":"float32","shape":[1,1,512,18],"quantization":{"scale":0.0012513800578958848,"min":-0.16017664741067325,"dtype":"uint8"},"name":"Prediction/BoxPredictor_2/ClassPredictor/weights"},{"dtype":"float32","shape":[18],"quantization":{"scale":0.000338070518245884,"min":-0.05510549447407909,"dtype":"uint8"},"name":"Prediction/BoxPredictor_2/ClassPredictor/biases"},{"dtype":"float32","shape":[1,1,256,18],"quantization":{"scale":0.0011819932975021064,"min":-0.1453851755927591,"dtype":"uint8"},"name":"Prediction/BoxPredictor_3/ClassPredictor/weights"},{"dtype":"float32","shape":[18],"quantization":{"scale":0.00015985782386041154,"min":-0.026536398760828316,"dtype":"uint8"},"name":"Prediction/BoxPredictor_3/ClassPredictor/biases"},{"dtype":"float32","shape":[1,1,256,18],"quantization":{"scale":0.0007035591438704846,"min":-0.08513065640832863,"dtype":"uint8"},"name":"Prediction/BoxPredictor_4/ClassPredictor/weights"},{"dtype":"float32","shape":[18],"quantization":{"scale":0.00008793946574716008,"min":-0.013190919862074012,"dtype":"uint8"},"name":"Prediction/BoxPredictor_4/ClassPredictor/biases"},{"dtype":"float32","shape":[1,1,128,18],"quantization":{"scale":0.00081320781918133,"min":-0.11059626340866088,"dtype":"uint8"},"name":"Prediction/BoxPredictor_5/ClassPredictor/weights"},{"dtype":"float32","shape":[18],"quantization":{"scale":0.0000980533805547976,"min":-0.014609953702664841,"dtype":"uint8"},"name":"Prediction/BoxPredictor_5/ClassPredictor/biases"},{"dtype":"int32","shape":[],"quantization":{"scale":1,"min":3,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/stack_1/2"},{"dtype":"int32","shape":[3],"quantization":{"scale":0.00392156862745098,"min":0,"dtype":"uint8"},"name":"Postprocessor/Slice/begin"},{"dtype":"int32","shape":[3],"quantization":{"scale":1,"min":-1,"dtype":"uint8"},"name":"Postprocessor/Slice/size"},{"dtype":"float32","shape":[1,1,512,12],"quantization":{"scale":0.003730384859384275,"min":-0.4327246436885759,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/BoxEncodingPredictor/weights"},{"dtype":"float32","shape":[12],"quantization":{"scale":0.0018744708568442102,"min":-0.3917644090804399,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/BoxEncodingPredictor/biases"},{"dtype":"int32","shape":[],"quantization":{"scale":1,"min":3072,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/stack_1/1"},{"dtype":"float32","shape":[1,1,1024,24],"quantization":{"scale":0.00157488017689948,"min":-0.20000978246623397,"dtype":"uint8"},"name":"Prediction/BoxPredictor_1/BoxEncodingPredictor/weights"},{"dtype":"float32","shape":[24],"quantization":{"scale":0.0002823906713256649,"min":-0.043488163384152394,"dtype":"uint8"},"name":"Prediction/BoxPredictor_1/BoxEncodingPredictor/biases"},{"dtype":"int32","shape":[],"quantization":{"scale":1,"min":1536,"dtype":"uint8"},"name":"Prediction/BoxPredictor_1/stack_1/1"},{"dtype":"float32","shape":[1,1,512,24],"quantization":{"scale":0.0007974451663447361,"min":-0.11004743295557358,"dtype":"uint8"},"name":"Prediction/BoxPredictor_2/BoxEncodingPredictor/weights"},{"dtype":"float32","shape":[24],"quantization":{"scale":0.0001350417988849621,"min":-0.02039131163162928,"dtype":"uint8"},"name":"Prediction/BoxPredictor_2/BoxEncodingPredictor/biases"},{"dtype":"int32","shape":[],"quantization":{"scale":1,"min":384,"dtype":"uint8"},"name":"Prediction/BoxPredictor_2/stack_1/1"},{"dtype":"float32","shape":[1,1,256,24],"quantization":{"scale":0.0007113990246080885,"min":-0.0860792819775787,"dtype":"uint8"},"name":"Prediction/BoxPredictor_3/BoxEncodingPredictor/weights"},{"dtype":"float32","shape":[24],"quantization":{"scale":0.000050115815418608046,"min":-0.007617603943628423,"dtype":"uint8"},"name":"Prediction/BoxPredictor_3/BoxEncodingPredictor/biases"},{"dtype":"int32","shape":[],"quantization":{"scale":1,"min":96,"dtype":"uint8"},"name":"Prediction/BoxPredictor_3/stack_1/1"},{"dtype":"float32","shape":[1,1,256,24],"quantization":{"scale":0.000590049314732645,"min":-0.06903576982371946,"dtype":"uint8"},"name":"Prediction/BoxPredictor_4/BoxEncodingPredictor/weights"},{"dtype":"float32","shape":[24],"quantization":{"scale":0.00003513663861097074,"min":-0.006359731588585704,"dtype":"uint8"},"name":"Prediction/BoxPredictor_4/BoxEncodingPredictor/biases"},{"dtype":"int32","shape":[],"quantization":{"scale":1,"min":24,"dtype":"uint8"},"name":"Prediction/BoxPredictor_4/stack_1/1"},{"dtype":"float32","shape":[1,1,128,24],"quantization":{"scale":0.0005990567744946948,"min":-0.07907549423329971,"dtype":"uint8"},"name":"Prediction/BoxPredictor_5/BoxEncodingPredictor/weights"},{"dtype":"float32","shape":[24],"quantization":{"scale":0.00003392884288640583,"min":-0.006039334033780238,"dtype":"uint8"},"name":"Prediction/BoxPredictor_5/BoxEncodingPredictor/biases"},{"dtype":"float32","shape":[],"quantization":{"scale":1,"min":0.007843137718737125,"dtype":"uint8"},"name":"Preprocessor/mul/x"},{"dtype":"int32","shape":[2],"quantization":{"scale":1,"min":512,"dtype":"uint8"},"name":"Preprocessor/ResizeImage/size"},{"dtype":"float32","shape":[],"quantization":{"scale":1,"min":1,"dtype":"uint8"},"name":"Preprocessor/sub/y"},{"dtype":"float32","shape":[3,3,3,32],"quantization":{"scale":0.03948551065781537,"min":-5.014659853542552,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_0_pointwise/weights"},{"dtype":"float32","shape":[32],"quantization":{"scale":0.0498106133704092,"min":-7.371970778820562,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_0_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[3,3,32,1],"quantization":{"scale":0.036833542468501075,"min":-4.714693435968138,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_1_depthwise/depthwise_weights"},{"dtype":"float32","shape":[32],"quantization":{"scale":0.012173276705046495,"min":-0.012173276705046495,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_1_depthwise/BatchNorm/gamma"},{"dtype":"float32","shape":[32],"quantization":{"scale":0.032182769214405736,"min":-2.4780732295092416,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_1_depthwise/BatchNorm/beta"},{"dtype":"float32","shape":[32],"quantization":{"scale":0.028287527607936486,"min":-3.366215785344442,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_1_depthwise/BatchNorm/moving_mean"},{"dtype":"float32","shape":[32],"quantization":{"scale":0.04716738532571232,"min":3.9071404665769224e-36,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_1_depthwise/BatchNorm/moving_variance"},{"dtype":"float32","shape":[1,1,32,64],"quantization":{"scale":0.04010109433940812,"min":-4.290817094316669,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_1_pointwise/weights"},{"dtype":"float32","shape":[64],"quantization":{"scale":0.2212210038129021,"min":-34.51047659481273,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_1_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[3,3,64,1],"quantization":{"scale":0.010024750933927648,"min":-1.343316625146305,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_2_depthwise/depthwise_weights"},{"dtype":"float32","shape":[64],"quantization":{"scale":0.006120916675118839,"min":0.5227176547050476,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_2_depthwise/BatchNorm/gamma"},{"dtype":"float32","shape":[64],"quantization":{"scale":0.02317035385206634,"min":-0.7646216771181892,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_2_depthwise/BatchNorm/beta"},{"dtype":"float32","shape":[64],"quantization":{"scale":0.04980821422502106,"min":-5.8275610643274645,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_2_depthwise/BatchNorm/moving_mean"},{"dtype":"float32","shape":[64],"quantization":{"scale":0.051751047022202436,"min":3.916113799002297e-36,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_2_depthwise/BatchNorm/moving_variance"},{"dtype":"float32","shape":[1,1,64,128],"quantization":{"scale":0.021979344124887504,"min":-2.1319963801140878,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_2_pointwise/weights"},{"dtype":"float32","shape":[128],"quantization":{"scale":0.09958663267247816,"min":-11.054116226645077,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_2_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[3,3,128,1],"quantization":{"scale":0.01943492702409333,"min":-2.6237151482525993,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_3_depthwise/depthwise_weights"},{"dtype":"float32","shape":[128],"quantization":{"scale":0.017852897737540452,"min":0.40204083919525146,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_3_depthwise/BatchNorm/gamma"},{"dtype":"float32","shape":[128],"quantization":{"scale":0.029888209174661076,"min":-1.972621805527631,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_3_depthwise/BatchNorm/beta"},{"dtype":"float32","shape":[128],"quantization":{"scale":0.029319268581913967,"min":-5.130872001834945,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_3_depthwise/BatchNorm/moving_mean"},{"dtype":"float32","shape":[128],"quantization":{"scale":0.014018708584355373,"min":3.9083178263362604e-36,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_3_depthwise/BatchNorm/moving_variance"},{"dtype":"float32","shape":[1,1,128,128],"quantization":{"scale":0.020776657964669022,"min":-2.5347522716896207,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_3_pointwise/weights"},{"dtype":"float32","shape":[128],"quantization":{"scale":0.14383157094319662,"min":-9.636715253194174,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_3_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[3,3,128,1],"quantization":{"scale":0.004463558571011412,"min":-0.5981168485155293,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_4_depthwise/depthwise_weights"},{"dtype":"float32","shape":[128],"quantization":{"scale":0.006487431245691636,"min":0.47910428047180176,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_4_depthwise/BatchNorm/gamma"},{"dtype":"float32","shape":[128],"quantization":{"scale":0.026542164297664865,"min":-1.2209395576925839,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_4_depthwise/BatchNorm/beta"},{"dtype":"float32","shape":[128],"quantization":{"scale":0.05119945675719018,"min":-8.60150873520795,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_4_depthwise/BatchNorm/moving_mean"},{"dtype":"float32","shape":[128],"quantization":{"scale":0.03081628388049556,"min":3.911508751095344e-36,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_4_depthwise/BatchNorm/moving_variance"},{"dtype":"float32","shape":[1,1,128,256],"quantization":{"scale":0.010758659886378868,"min":-1.0328313490923713,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_4_pointwise/weights"},{"dtype":"float32","shape":[256],"quantization":{"scale":0.08058219610476026,"min":-9.34753474815219,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_4_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[3,3,256,1],"quantization":{"scale":0.01145936741548426,"min":-1.3292866201961742,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_5_depthwise/depthwise_weights"},{"dtype":"float32","shape":[256],"quantization":{"scale":0.0083988838336047,"min":0.36280909180641174,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_5_depthwise/BatchNorm/gamma"},{"dtype":"float32","shape":[256],"quantization":{"scale":0.02858148649627087,"min":-3.6584302715226715,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_5_depthwise/BatchNorm/beta"},{"dtype":"float32","shape":[256],"quantization":{"scale":0.03988401375564874,"min":-7.099354448505476,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_5_depthwise/BatchNorm/moving_mean"},{"dtype":"float32","shape":[256],"quantization":{"scale":0.009090481683904049,"min":0.020878996700048447,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_5_depthwise/BatchNorm/moving_variance"},{"dtype":"float32","shape":[1,1,256,256],"quantization":{"scale":0.008951201625898773,"min":-1.1189002032373465,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_5_pointwise/weights"},{"dtype":"float32","shape":[256],"quantization":{"scale":0.051758006974762565,"min":-5.745138774198645,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_5_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[3,3,256,1],"quantization":{"scale":0.004110433190476661,"min":-0.6042336790000691,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_6_depthwise/depthwise_weights"},{"dtype":"float32","shape":[256],"quantization":{"scale":0.013170199768216002,"min":0.3386639356613159,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_6_depthwise/BatchNorm/gamma"},{"dtype":"float32","shape":[256],"quantization":{"scale":0.03599378548416437,"min":-3.70735990486893,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_6_depthwise/BatchNorm/beta"},{"dtype":"float32","shape":[256],"quantization":{"scale":0.026967673208199296,"min":-3.748506575939702,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_6_depthwise/BatchNorm/moving_mean"},{"dtype":"float32","shape":[256],"quantization":{"scale":0.012615410486857097,"min":3.9111388979838637e-36,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_6_depthwise/BatchNorm/moving_variance"},{"dtype":"float32","shape":[1,1,256,512],"quantization":{"scale":0.00822840648538926,"min":-1.1848905338960536,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_6_pointwise/weights"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.06608965817619772,"min":-7.468131373910342,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_6_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[3,3,512,1],"quantization":{"scale":0.008801074355256323,"min":-0.9593171047229393,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_7_depthwise/depthwise_weights"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.030577416513480393,"min":0.3285980224609375,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_7_depthwise/BatchNorm/gamma"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.04778536441279393,"min":-8.935863145192464,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_7_depthwise/BatchNorm/beta"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.04331884945140165,"min":-9.660103427662568,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_7_depthwise/BatchNorm/moving_mean"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.04126455444367785,"min":0.000604183878749609,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_7_depthwise/BatchNorm/moving_variance"},{"dtype":"float32","shape":[1,1,512,512],"quantization":{"scale":0.009305818408143287,"min":-1.1446156642016243,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_7_pointwise/weights"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.04640720217835669,"min":-4.733534622192383,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_7_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[3,3,512,1],"quantization":{"scale":0.008138792655047248,"min":-0.9766551186056698,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_8_depthwise/depthwise_weights"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.027351748358969596,"min":0.34030041098594666,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_8_depthwise/BatchNorm/gamma"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.04415061053107767,"min":-7.019947074441349,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_8_depthwise/BatchNorm/beta"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.02476683784933651,"min":-2.9224868662217083,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_8_depthwise/BatchNorm/moving_mean"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.02547598832684076,"min":0.00026032101595774293,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_8_depthwise/BatchNorm/moving_variance"},{"dtype":"float32","shape":[1,1,512,512],"quantization":{"scale":0.01083052625843123,"min":-1.2563410459780227,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_8_pointwise/weights"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.06360894371481503,"min":-7.951117964351878,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_8_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[3,3,512,1],"quantization":{"scale":0.006704086883395326,"min":-0.8648272079579971,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_9_depthwise/depthwise_weights"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.015343831567203297,"min":0.2711026668548584,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_9_depthwise/BatchNorm/gamma"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.03378283930759804,"min":-4.797163181678922,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_9_depthwise/BatchNorm/beta"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.021910778213949763,"min":-3.987761634938857,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_9_depthwise/BatchNorm/moving_mean"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.009284070410007296,"min":0.000021581046894425526,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_9_depthwise/BatchNorm/moving_variance"},{"dtype":"float32","shape":[1,1,512,512],"quantization":{"scale":0.012783036979974485,"min":-1.9046725100161983,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_9_pointwise/weights"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.07273082733154297,"min":-9.52773838043213,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_9_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[3,3,512,1],"quantization":{"scale":0.006126228033327589,"min":-0.7351473639993107,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_10_depthwise/depthwise_weights"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.029703759212119908,"min":0.28687000274658203,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_10_depthwise/BatchNorm/gamma"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.04394429898729511,"min":-6.3279790541704966,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_10_depthwise/BatchNorm/beta"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.016566915605582443,"min":-2.7501079905266854,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_10_depthwise/BatchNorm/moving_mean"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.012152872833551145,"min":3.913338286370366e-36,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_10_depthwise/BatchNorm/moving_variance"},{"dtype":"float32","shape":[1,1,512,512],"quantization":{"scale":0.01354524388032801,"min":-1.7473364605623134,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_10_pointwise/weights"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.08566816367355047,"min":-9.937506986131854,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_10_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[3,3,512,1],"quantization":{"scale":0.006012305558896532,"min":-0.7876120282154457,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_11_depthwise/depthwise_weights"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.01469323155926723,"min":0.29223933815956116,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_11_depthwise/BatchNorm/gamma"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.030889174517463234,"min":-3.2433633243336395,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_11_depthwise/BatchNorm/beta"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.014836942448335536,"min":-2.047498057870304,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_11_depthwise/BatchNorm/moving_mean"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.007234466105343445,"min":0.00013165915152058005,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_11_depthwise/BatchNorm/moving_variance"},{"dtype":"float32","shape":[1,1,512,512],"quantization":{"scale":0.016261722527298274,"min":-1.4798167499841428,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_11_pointwise/weights"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.091437328563017,"min":-14.172785927267636,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_11_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[3,3,512,1],"quantization":{"scale":0.004750356487199372,"min":-0.650798838746314,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_12_depthwise/depthwise_weights"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.008174965545242907,"min":0.3120670020580292,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_12_depthwise/BatchNorm/gamma"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.030133422215779623,"min":-2.41067377726237,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_12_depthwise/BatchNorm/beta"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.006088157261119169,"min":-0.7853722866843729,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_12_depthwise/BatchNorm/moving_mean"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.003668997334498985,"min":3.9124486300013356e-36,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_12_depthwise/BatchNorm/moving_variance"},{"dtype":"float32","shape":[1,1,512,1024],"quantization":{"scale":0.010959514449624454,"min":-1.4028178495519301,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_12_pointwise/weights"},{"dtype":"float32","shape":[1024],"quantization":{"scale":0.10896045834410424,"min":-14.818622334798176,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_12_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[3,3,1024,1],"quantization":{"scale":0.004633033509347953,"min":-0.5652300881404502,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_13_depthwise/depthwise_weights"},{"dtype":"float32","shape":[1024],"quantization":{"scale":0.022285057224479377,"min":0.23505790531635284,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_13_depthwise/BatchNorm/gamma"},{"dtype":"float32","shape":[1024],"quantization":{"scale":0.0324854850769043,"min":-3.9957146644592285,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_13_depthwise/BatchNorm/beta"},{"dtype":"float32","shape":[1024],"quantization":{"scale":0.014760061806323482,"min":-2.125448900110581,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_13_depthwise/BatchNorm/moving_mean"},{"dtype":"float32","shape":[1024],"quantization":{"scale":0.0036057423142825855,"min":3.9067056828997994e-36,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_13_depthwise/BatchNorm/moving_variance"},{"dtype":"float32","shape":[1,1,1024,1024],"quantization":{"scale":0.017311988157384536,"min":-2.094750567043529,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_13_pointwise/weights"},{"dtype":"float32","shape":[1024],"quantization":{"scale":0.16447528764313343,"min":-25.658144872328815,"dtype":"uint8"},"name":"MobilenetV1/Conv2d_13_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[1,1,1024,256],"quantization":{"scale":0.0026493051472832175,"min":-0.36825341547236723,"dtype":"uint8"},"name":"Prediction/Conv2d_0_pointwise/weights"},{"dtype":"float32","shape":[256],"quantization":{"scale":0.012474596734140433,"min":-2.3078003958159803,"dtype":"uint8"},"name":"Prediction/Conv2d_0_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[3,3,256,512],"quantization":{"scale":0.014533351449405445,"min":-1.8166689311756807,"dtype":"uint8"},"name":"Prediction/Conv2d_1_pointwise/weights"},{"dtype":"float32","shape":[512],"quantization":{"scale":0.024268776762719248,"min":-2.4754152297973633,"dtype":"uint8"},"name":"Prediction/Conv2d_1_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[1,1,512,128],"quantization":{"scale":0.002208403746287028,"min":-0.28709248701731366,"dtype":"uint8"},"name":"Prediction/Conv2d_2_pointwise/weights"},{"dtype":"float32","shape":[128],"quantization":{"scale":0.012451349052728392,"min":-1.5937726787492341,"dtype":"uint8"},"name":"Prediction/Conv2d_2_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[3,3,128,256],"quantization":{"scale":0.026334229637594783,"min":-2.8967652601354263,"dtype":"uint8"},"name":"Prediction/Conv2d_3_pointwise/weights"},{"dtype":"float32","shape":[256],"quantization":{"scale":0.02509917792151956,"min":-1.4055539636050953,"dtype":"uint8"},"name":"Prediction/Conv2d_3_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[1,1,256,128],"quantization":{"scale":0.004565340046789132,"min":-0.3971845840706545,"dtype":"uint8"},"name":"Prediction/Conv2d_4_pointwise/weights"},{"dtype":"float32","shape":[128],"quantization":{"scale":0.017302456556581983,"min":-2.5953684834872974,"dtype":"uint8"},"name":"Prediction/Conv2d_4_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[3,3,128,256],"quantization":{"scale":0.025347338470758176,"min":-3.8527954475552426,"dtype":"uint8"},"name":"Prediction/Conv2d_5_pointwise/weights"},{"dtype":"float32","shape":[256],"quantization":{"scale":0.033134659598855414,"min":-2.9158500446992766,"dtype":"uint8"},"name":"Prediction/Conv2d_5_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[1,1,256,64],"quantization":{"scale":0.002493104397081861,"min":-0.2817207968702503,"dtype":"uint8"},"name":"Prediction/Conv2d_6_pointwise/weights"},{"dtype":"float32","shape":[64],"quantization":{"scale":0.011383360974928912,"min":-1.2749364291920382,"dtype":"uint8"},"name":"Prediction/Conv2d_6_pointwise/convolution_bn_offset"},{"dtype":"float32","shape":[3,3,64,128],"quantization":{"scale":0.020821522731407017,"min":-2.7484410005457263,"dtype":"uint8"},"name":"Prediction/Conv2d_7_pointwise/weights"},{"dtype":"float32","shape":[128],"quantization":{"scale":0.052144218893612135,"min":-3.5979511036592373,"dtype":"uint8"},"name":"Prediction/Conv2d_7_pointwise/convolution_bn_offset"},{"dtype":"int32","shape":[],"quantization":{"scale":1,"min":6,"dtype":"uint8"},"name":"Prediction/BoxPredictor_5/stack_1/1"},{"dtype":"int32","shape":[],"quantization":{"scale":1,"min":1,"dtype":"uint8"},"name":"concat_1/axis"},{"dtype":"int32","shape":[1],"quantization":{"scale":1,"min":0,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/strided_slice/stack"},{"dtype":"int32","shape":[1],"quantization":{"scale":1,"min":1,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/strided_slice/stack_1"},{"dtype":"int32","shape":[],"quantization":{"scale":1,"min":5118,"dtype":"uint8"},"name":"Postprocessor/stack/1"},{"dtype":"int32","shape":[],"quantization":{"scale":1,"min":4,"dtype":"uint8"},"name":"Prediction/BoxPredictor_0/stack/3"},{"dtype":"float32","shape":[1, 5118, 4],"name":"Output/extra_dim"}]}] -------------------------------------------------------------------------------- /app/weights/tiny_face_detector_model-shard1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thebigpotatoe/node-red-contrib-face-recognition/76b9629c2091949257fbeaa8094efb427d214b92/app/weights/tiny_face_detector_model-shard1 -------------------------------------------------------------------------------- /app/weights/tiny_face_detector_model-weights_manifest.json: -------------------------------------------------------------------------------- 1 | [{"weights":[{"name":"conv0/filters","shape":[3,3,3,16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009007044399485869,"min":-1.2069439495311063}},{"name":"conv0/bias","shape":[16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005263455241334205,"min":-0.9211046672334858}},{"name":"conv1/depthwise_filter","shape":[3,3,16,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004001977630690033,"min":-0.5042491814669441}},{"name":"conv1/pointwise_filter","shape":[1,1,16,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013836609615999109,"min":-1.411334180831909}},{"name":"conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0015159862590771096,"min":-0.30926119685173037}},{"name":"conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002666276225856706,"min":-0.317286870876948}},{"name":"conv2/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015265831292844286,"min":-1.6792414422128714}},{"name":"conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0020280554598453,"min":-0.37113414915168985}},{"name":"conv3/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006100742489683862,"min":-0.8907084034938438}},{"name":"conv3/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.016276211832083907,"min":-2.0508026908425725}},{"name":"conv3/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003394414279975143,"min":-0.7637432129944072}},{"name":"conv4/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006716050119961009,"min":-0.8059260143953211}},{"name":"conv4/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021875603993733724,"min":-2.8875797271728514}},{"name":"conv4/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0041141652009066415,"min":-0.8187188749804216}},{"name":"conv5/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008423839597141042,"min":-0.9013508368940915}},{"name":"conv5/pointwise_filter","shape":[1,1,256,512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.030007277283014035,"min":-3.8709387695088107}},{"name":"conv5/bias","shape":[512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008402082966823203,"min":-1.4871686851277068}},{"name":"conv8/filters","shape":[1,1,512,25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.028336129469030042,"min":-4.675461362389957}},{"name":"conv8/bias","shape":[25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002268134028303857,"min":-0.41053225912299807}}],"paths":["tiny_face_detector_model-shard1"]}] -------------------------------------------------------------------------------- /face-api.html: -------------------------------------------------------------------------------- 1 | 2 | 147 | 148 | 261 | 262 | 325 | 326 | 327 | 431 | 432 | 459 | 460 | -------------------------------------------------------------------------------- /face-api.js: -------------------------------------------------------------------------------- 1 | // Import required modules 2 | const input_node = require('./app/nodes/input_node') 3 | const recognise_node = require('./app/nodes/recognise_node') 4 | const add_descriptors = require('./app/endpoints/add_descriptors'); 5 | const check_descriptors = require('./app/endpoints/check_descriptors'); 6 | const delete_descriptors = require('./app/endpoints/delete_descriptors'); 7 | const create_descriptor_location = require('./app/helpers/create_descriptor_location'); 8 | 9 | // Set the global variables for the app 10 | global.descriptor_location = require('path').join(__dirname, 'app/descriptors') 11 | create_descriptor_location(global.descriptor_location); 12 | 13 | // Export the nodes 14 | module.exports = function (RED) { 15 | // Recognise Node Constructor 16 | function recognise_node_creator(config) { 17 | recognise_node(RED, config, this) 18 | } 19 | RED.nodes.registerType("face-api-recognise", recognise_node_creator); 20 | 21 | // Input Node constructor 22 | function input_node_creator(config) { 23 | input_node(RED, config, this); 24 | } 25 | RED.nodes.registerType("face-api-input", input_node_creator) 26 | 27 | // HTTP Endpoints for use with the front end 28 | RED.httpAdmin.get('/faceapi/:id/check', RED.auth.needsPermission('face-api-recognise.upload'), async function (req, res) { 29 | RED.log.debug("Finding descriptors for " + req.params.id); 30 | check_descriptors(RED, req.params.id) 31 | .then((value) => { 32 | res.status(200).send(String(value)).end(); 33 | }) 34 | .catch((err) => { 35 | RED.log.error(err); 36 | res.status(200).send("0").end(); 37 | }) 38 | }); 39 | RED.httpAdmin.post('/faceapi/:id/create', RED.auth.needsPermission('face-api-recognise.upload'), async function (req, res) { 40 | RED.log.debug("Attempting to create descriptors for " + req.params.id); 41 | add_descriptors(RED, req, res) 42 | .then((code) => { 43 | RED.log.info("Successfully created descriptors for " + req.params.id); 44 | res.status(code).send('OK').end(); 45 | }) 46 | .catch((err) => { 47 | RED.log.error(err); 48 | res.status(400).send(err).end(); 49 | }) 50 | }); 51 | RED.httpAdmin.get('/faceapi/:id/delete', RED.auth.needsPermission('face-api-recognise.upload'), async function (req, res) { 52 | RED.log.debug("Attempting to delete descriptors for " + req.params.id); 53 | delete_descriptors(RED, req.params.id) 54 | .then((code) => { 55 | RED.log.info("Successfully deleted descriptors for " + req.params.id); 56 | res.status(201).send('OK').end(); 57 | }) 58 | .catch((err) => { 59 | RED.log.error(err); 60 | switch (err) { 61 | case 400: 62 | res.status(400).send('OK').end(); 63 | break; 64 | case 404: 65 | res.send("No node found matching " + req.params.id).status(404).end(); 66 | break; 67 | } 68 | }) 69 | }); 70 | } -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "node-red-contrib-face-recognition", 3 | "version": "2.0.4", 4 | "description": "A wrapper node for the epic face-api.js library", 5 | "author": "thebigpotatoe", 6 | "license": "MIT", 7 | "keywords": [ 8 | "node-red", 9 | "face-api.js", 10 | "facial recognition", 11 | "easy", 12 | "fast", 13 | "multiple faces", 14 | "recognition", 15 | "detection" 16 | ], 17 | "repository": { 18 | "type": "git", 19 | "url": "git+https://github.com/thebigpotatoe/node-red-contrib-face-recognition.git" 20 | }, 21 | "bugs": { 22 | "url": "https://github.com/thebigpotatoe/node-red-contrib-face-recognition/issues" 23 | }, 24 | "homepage": "https://github.com/thebigpotatoe/node-red-contrib-face-recognition#readme", 25 | "dependencies": { 26 | "canvas": "^2.7.0", 27 | "chebyshev": "0.2.1", 28 | "euclidean": "0.0.0", 29 | "face-api.js": "^0.21.0", 30 | "formidable": "^1.2.1", 31 | "manhattan": "1.0.0" 32 | }, 33 | "devDependencies": { 34 | "nodemon": "^2.0.7", 35 | "node-red": "^1.3.3" 36 | }, 37 | "peerDependencies": { 38 | "@tensorflow/tfjs-node": "1.2.11" 39 | }, 40 | "files": [ 41 | "app/**", 42 | "face-api.html", 43 | "face-api.js" 44 | ], 45 | "main": "app/face-api.js", 46 | "scripts": { 47 | "test": "node tests/test.js" 48 | }, 49 | "node-red": { 50 | "nodes": { 51 | "face-api": "face-api.js" 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /tests/test.js: -------------------------------------------------------------------------------- 1 | console.log("No tests configured yet, skipping..."); 2 | return true; 3 | --------------------------------------------------------------------------------