├── .gitattributes
├── .gitignore
├── .npmignore
├── LICENSE
├── README.md
├── RNSwiftBridge.js
├── cameraregion.js
├── examples
└── Food101
│ ├── .babelrc
│ ├── .buckconfig
│ ├── .flowconfig
│ ├── .gitattributes
│ ├── .gitignore
│ ├── .prettierrc
│ ├── .vscode
│ └── settings.json
│ ├── .watchmanconfig
│ ├── App.js
│ ├── README.md
│ ├── _art
│ └── video.gif
│ ├── android
│ ├── app
│ │ ├── BUCK
│ │ ├── build.gradle
│ │ ├── proguard-rules.pro
│ │ └── src
│ │ │ └── main
│ │ │ ├── AndroidManifest.xml
│ │ │ ├── java
│ │ │ └── com
│ │ │ │ └── food101
│ │ │ │ ├── MainActivity.java
│ │ │ │ └── MainApplication.java
│ │ │ └── res
│ │ │ ├── mipmap-hdpi
│ │ │ ├── ic_launcher.png
│ │ │ └── ic_launcher_round.png
│ │ │ ├── mipmap-mdpi
│ │ │ ├── ic_launcher.png
│ │ │ └── ic_launcher_round.png
│ │ │ ├── mipmap-xhdpi
│ │ │ ├── ic_launcher.png
│ │ │ └── ic_launcher_round.png
│ │ │ ├── mipmap-xxhdpi
│ │ │ ├── ic_launcher.png
│ │ │ └── ic_launcher_round.png
│ │ │ ├── mipmap-xxxhdpi
│ │ │ ├── ic_launcher.png
│ │ │ └── ic_launcher_round.png
│ │ │ └── values
│ │ │ ├── strings.xml
│ │ │ └── styles.xml
│ ├── build.gradle
│ ├── gradle.properties
│ ├── gradle
│ │ └── wrapper
│ │ │ ├── gradle-wrapper.jar
│ │ │ └── gradle-wrapper.properties
│ ├── gradlew
│ ├── gradlew.bat
│ ├── keystores
│ │ ├── BUCK
│ │ └── debug.keystore.properties
│ └── settings.gradle
│ ├── app.json
│ ├── fetchModel.js
│ ├── index.js
│ ├── ios
│ ├── Food101-tvOS
│ │ └── Info.plist
│ ├── Food101-tvOSTests
│ │ └── Info.plist
│ ├── Food101.xcodeproj
│ │ ├── project.pbxproj
│ │ └── xcshareddata
│ │ │ └── xcschemes
│ │ │ ├── Food101-tvOS.xcscheme
│ │ │ └── Food101.xcscheme
│ ├── Food101
│ │ ├── AppDelegate.h
│ │ ├── AppDelegate.m
│ │ ├── Base.lproj
│ │ │ └── LaunchScreen.xib
│ │ ├── Images.xcassets
│ │ │ ├── AppIcon.appiconset
│ │ │ │ └── Contents.json
│ │ │ └── Contents.json
│ │ ├── Info.plist
│ │ └── main.m
│ ├── Food101Tests
│ │ ├── Food101Tests.m
│ │ └── Info.plist
│ └── RNPlaceholder.swift
│ └── package.json
├── faceprovider.js
├── facetracker.js
├── imageview.js
├── index.js
├── ios
├── RHDVision.xcodeproj
│ └── project.pbxproj
├── RHDVision
│ ├── RHDVision-Bridging-Header.h
│ ├── RHDVision.h
│ ├── RHDVisionDelegate.swift
│ ├── RHDVisionImageView.swift
│ ├── RHDVisionImageViewManager.swift
│ ├── RHDVisionView.swift
│ └── RHDVisionViewManager.swift
└── rn-swift-bridge.m
├── module.js
├── package.json
├── plugin.js
├── region.js
├── styleview.js
├── view.js
├── visioncamera.js
└── wrapper.js
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 |
8 | # Runtime data
9 | pids
10 | *.pid
11 | *.seed
12 | *.pid.lock
13 |
14 | # Directory for instrumented libs generated by jscoverage/JSCover
15 | lib-cov
16 |
17 | # Coverage directory used by tools like istanbul
18 | coverage
19 |
20 | # nyc test coverage
21 | .nyc_output
22 |
23 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
24 | .grunt
25 |
26 | # Bower dependency directory (https://bower.io/)
27 | bower_components
28 |
29 | # node-waf configuration
30 | .lock-wscript
31 |
32 | # Compiled binary addons (http://nodejs.org/api/addons.html)
33 | build/Release
34 |
35 | # Dependency directories
36 | node_modules/
37 | jspm_packages/
38 |
39 | # Typescript v1 declaration files
40 | typings/
41 |
42 | # Optional npm cache directory
43 | .npm
44 |
45 | # Optional eslint cache
46 | .eslintcache
47 |
48 | # Optional REPL history
49 | .node_repl_history
50 |
51 | # Output of 'npm pack'
52 | *.tgz
53 |
54 | # Yarn Integrity file
55 | .yarn-integrity
56 |
57 | # dotenv environment variables file
58 | .env
59 |
60 | *.xcuserstate
61 | *.xcworkspacedata
62 | ios/RHDVision.xcodeproj/xcuserdata/ray.xcuserdatad/xcschemes/xcschememanagement.plist
63 | *.lock
64 | .vscode/settings.json
65 | .watchmanconfig
66 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | examples/
2 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2017 Ray Deck
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # react-native-vision
2 |
3 | Library for accessing VisionKit and visual applications of CoreML from React Native. **iOS Only**
4 |
5 | Incredibly super-alpha, and endeavors to provide a relatively thin wrapper between the underlying vision functionality and RN. Higher-level abstractions are @TODO and will be in a separate library.
6 |
7 | # Installation
8 |
9 | ```
10 | yarn add react-native-vision react-native-swift
11 | react-native link
12 | ```
13 |
14 | **Note** `react-native-swift` is a peer dependency of `react-native-vision`.
15 |
16 | If you are running on a stock RN deployment (e.g. from `react-native init`) you will need to make sure your app is targeting IOS 11 or higher:
17 |
18 | ```bash
19 | yarn add react-native-fix-ios-version
20 | react-native link
21 | ```
22 |
23 | Since this module uses the camera, it will work much better on a device, and setting up permissions and codesigning in advance will help:
24 |
25 | ```bash
26 | yarn add -D react-native-camera-ios-enable
27 | yarn add -D react-native-setdevteam
28 | react-native link
29 | react-native setdevteam
30 | ```
31 |
32 | Then you are ready to run!
33 |
34 | ```bash
35 | react-native run-ios --device
36 | ```
37 |
38 | # Command line - adding a Machine Learning Model with `add-mlmodel`
39 |
40 | `react-native-vision` makes it easier to bundle a pre-built machine learning model into your app.
41 |
42 | After installing, you will find the following command available:
43 |
44 | ```bash
45 | react-native add-mlmodel /path/to/mymodel.mlmodel
46 | ```
47 |
48 | You may also refere to the model from a URL, which is handy when getting something off the interwebs. For example, to apply the pre-built mobileNet model from apple, you can:
49 |
50 | ```bash
51 | react-native add-mlmodel https://docs-assets.developer.apple.com/coreml/models/MobileNet.mlmodel
52 | ```
53 |
54 | Note that the name of your model in the code will be the same as the filename minus the "mlmodel". In the above case, the model in code can be referenced as "MobileNet"
55 |
56 | # Easy Start 1 : Full Frame Object Detection
57 |
58 | One of the most common easy use cases is just detecting what is in front of you. For this we use the `VisionCamera` component that lets you apply a model and get the classification via render props.
59 |
60 | ## Setup
61 |
62 | ```bash
63 | react-native init imagedetector; cd imagedetector
64 | yarn add react-native-swift react-native-vision
65 | yarn add react-native-fix-ios-version react-native-camera-ios-enable react-native-setdevteam
66 | react-native link
67 | react-native setdevteam
68 | ```
69 |
70 | ## Load your model with MobileNet
71 |
72 | A free download from Apple!
73 |
74 | ```bash
75 | react-native add-mlmodel https://docs-assets.developer.apple.com/coreml/models/MobileNet.mlmodel
76 | ```
77 |
78 | ## Add Some App Code
79 |
80 | ```javascript
81 | import React from "react";
82 | import { Text } from "react-native";
83 | import { VisionCamera } from "react-native-vision";
84 | export default () => (
85 |
86 | {({ label, confidence }) => (
87 |
96 | {label + " :" + (confidence * 100).toFixed(0) + "%"}
97 |
98 | )}
99 |
100 | );
101 | ```
102 |
103 | # Easy Start 2: GeneratorView - for Style Transfer
104 |
105 | Most machine learning application are classifiers. But generators can be useful and a lot of fun. The `GeneratorView` lets you look at style transfer models that show how you can use deep learning techniques for creating whole new experiences.
106 |
107 | ## Setup
108 |
109 | ```bash
110 | react-native init styletest; cd styletest
111 | yarn add react-native-swift react-native-vision
112 | yarn add react-native-fix-ios-version react-native-camera-ios-enable react-native-setdevteam
113 | react-native link
114 | react-native setdevteam
115 | ```
116 |
117 | ## Load your model with `add-mlmodel`
118 |
119 | Apple has not published a style transfer model, but there are a few locations on the web where you can download them. Here is one: https://github.com/mdramos/fast-style-transfer-coreml
120 |
121 | So go to his github, navigate to his google drive, and then download the `la_muse` model to your personal Downloads directory.
122 |
123 | ```bash
124 | react-native add-mlmodel ~/Downloads/la_muse.mlmodel
125 | ```
126 |
127 | ## App Code
128 |
129 | This is the insanely short part. Note that the camera view is not necessary for viewing the style-transferred view: its just for reference.
130 |
131 | ```javascript
132 | import React from "react";
133 | import { GeneratorView, RNVCameraView } from "react-native-vision";
134 | export default () => (
135 |
136 |
146 |
147 | );
148 | ```
149 |
150 | # Easy Start 3: Face Camera
151 |
152 | Detect what faces are where in your camera view!
153 |
154 | Taking a page (and the model!) from (https://github.com/gantman/nicornot)[Gant Laborde's NicOrNot app], here is the entirety of an app that discerns whether the target is nicolas cage.
155 |
156 | ## Setup
157 |
158 | ```bash
159 | react-native init nictest; cd nictest
160 | yarn add react-native-swift react-native-vision
161 | yarn add react-native-fix-ios-version react-native-camera-ios-enable react-native-setdevteam
162 | react-native link
163 | react-native setdevteam
164 | ```
165 |
166 | ## Load your model with `add-mlmodel`
167 |
168 | ```bash
169 | react-native add-mlmodel https://s3.amazonaws.com/despiteallmyrage/MegaNic50_linear_5.mlmodel
170 | ```
171 |
172 | ## App Code
173 |
174 | ```javascript
175 | import React from "react";
176 | import { Text, View } from "react-native";
177 | import { FaceCamera } from "react-native-vision";
178 | import { Identifier } from "react-native-identifier";
179 | export default () => (
180 |
181 | {({ face, faceConfidence, style }) =>
182 | face &&
183 | (face == "nic" ? (
184 |
185 | ) : (
186 |
189 |
190 | X
191 |
192 |
193 | ))
194 | }
195 |
196 | );
197 | ```
198 |
199 | # Face Detection Component Reference
200 |
201 | ## FacesProvider
202 |
203 | Context Provider that extends `` to detect, track, and identify faces.
204 |
205 | ### Props
206 |
207 | Inherits from ``, plus:
208 |
209 | - `interval`: How frequently (in ms) to run the face detection re-check. (Basically lower values here keeps the face tracking more accurate) **Default**: 500
210 | - `classifier`: File URL to compiled MLModel (e.g. mlmodelc) that will be applied to detected faces
211 | - `updateInterval`: How frequently (in ms) to update the detected faces - position, classified face, etc. Smaller values will mean smoother animation, but at the price of processor intensity. **Default**: 100
212 |
213 | ### Example
214 |
215 | ```javascript
216 |
221 | {/* my code for handling detected faces */}
222 |
223 | ```
224 |
225 | ## FacesConsumer
226 |
227 | Consumer of `` context. As such, takes no props and returns a render prop function.
228 |
229 | ### Render Prop Members
230 |
231 | - `faces`: Keyed object of information about the detected face. Elements of each object include:
232 | - `region`: The key associated with this object (e.g. `faces[k].region === k`)
233 | - `x`, `y`, `height`, `width`: Position and size of the bounding box for the detected face.
234 | - `faces`: Array of top-5 results from face classifier, with keys `label` and `confidence`
235 | - `face`: Label of top-scoring result from classifier (e.g. the face this is most likely to be)
236 | - `faceConfidence`: Confidence score of top-scoring result above.
237 |
238 | Note that when there is no classifier specified, `faces`, `face` and `faceConfidence` are undefined
239 |
240 | ## Face
241 |
242 | Render prop generator to provision information about a single detected face.
243 | Can be instantiated by spread-propping the output of a single face value from `` or by appling a `faceID` that maps to the key of a face. Returns null if no match.
244 |
245 | ### Props
246 |
247 | - `faceID`: ID of the face (corresponding to the key of the `faces` object in `FacesConsumer`)
248 |
249 | ### Render Prop Members
250 |
251 | - `region`: The key associated with this object (e.g. `faces[k].region === k`)
252 | - `x`, `y`, `height`, `width`: Position and size of the bounding box for the detected face. **Note** These are adjusted for the visible camera view when you are rendering from that context.
253 | - `faces`: Array of top-5 results from face classifier, with keys `label` and `confidence`
254 | - `face`: Label of top-scoring result from classifier (e.g. the face this is most likely to be)
255 | - `faceConfidence`: Confidence score of top-scoring result above.
256 | **Note** These arguments are the sam
257 |
258 | ## Faces
259 |
260 | A render-prop generator to provision information about all detected faces. Will map all detected faces into `` components and apply the `children` prop to each, so you have one function to generate all your faces. Designed to be similar to `FlatMap` implentation.
261 |
262 | ### Required Provider Context
263 |
264 | This component must be a descendant of a ``
265 |
266 | ### Props
267 |
268 | None
269 |
270 | ### Render Prop Members
271 |
272 | Same as `` above, but output will be mapped across all detected faces.
273 |
274 | Example of use is in the primary Face Recognizer demo code above.
275 |
276 | ### Props
277 |
278 | - `faceID`: ID of the face applied.
279 | - `isCameraView`: Whether the region frame information to generate should be camera-aware (e.g. is it adjusted for a preview window or not)
280 |
281 | ### Render Props
282 |
283 | This largely passes throught the members of the element that you could get from the `faces` collection from `FaceConsumer`, with the additional consideration that when `isCameraView` is set,
284 |
285 | - `style`: A spreadable set of styling members to position the rectangle, in the same style as a `RNVCameraRegion`
286 |
287 | If `faceID` is provided but does not map to a member of the `faces` collection, the function will return null.
288 |
289 | # Core Component References
290 |
291 | The package exports a number of components to facilitate the vision process. Note that the `` needs to be ancestors to any others in the tree. So a simple single-classifier using dominant image would look something like:
292 |
293 | ```javascript
294 |
295 |
296 | {({classifications})=>{
297 | return (
298 |
299 | {classifications[this.state.FileUrlOfClassifier][0].label}
300 |
301 | }}
302 |
303 |
304 | ```
305 |
306 | ## RNVisionProvider
307 |
308 | Context provider for information captured from the camera. Allows the use of regional detection methods to initialize identification of objects in the frame.
309 |
310 | ### Props
311 |
312 | - `isStarted`: Whether the camera should be activated for vision capture. Boolean
313 | - `isCameraFront`: Facing of the camera. False for the back camera, true to use the front. _Note_ only one camera facing can be used at a time. As of now, this is a hardware limitation.
314 | - `regions`: Specified regions on the camera capture frame articulated as `{x,y,width,height}` that should always be returned by the consumer
315 | - `trackedObjects`: Specified regions that should be tracked as objects, so that the regions returned match these object IDs and show current position.
316 | - `onRegionsChanged`: Fires when the list of regions has been altered
317 | - `onDetectedFaces`: Fires when the number of detected faces has changed
318 |
319 | ### Class imperative member
320 |
321 | - `detectFaces`: Triggers one call to detect faces based on current active frame. Directly returns locations.
322 |
323 | ## RNVisionConsumer
324 |
325 | Consumer partner of `RNVisionProvider`. Must be its descendant in the node tree.
326 |
327 | ### Render Prop Members
328 |
329 | - `imageDimensions`: Object representing size of the camera frame in `{width, height}`
330 | - `isCameraFront`: Relaying whether camera is currently in selfie mode. This is important if you plan on displaying camera output, because in selfie mode a preview will be mirrored.
331 | - `regions`: The list of detected rectangles in the most recently captured frame, where detection is driven by the `RNVisionProvider` props
332 |
333 | ## RNVRegion
334 |
335 | ### Props
336 |
337 | - `region`: ID of the region (**Note** the default region, which is the whole frame, has an id of `""` - blank.)
338 | - `classifiers`: CoreML classifiers passed as file URLs to the classifier mlmodelc itself. Array
339 | - `generators`: CoreML image generators passed as file URLs to the classifier mlmodelc itself. Array
340 | - `generators`: CoreML models that generate a collection of output values passed as file URLs to the classifier mlmodelc itself.
341 | - `bottlenecks`: A collection of CoreML models that take other CoreML model outputs as their inputs. Keys are the file URLs of the original models (that take an image as their input) and values are arrays of mdoels that generate the output passed via render props.
342 | - `onFrameCaptured`: Callback to fire when a new image of the current frame in this region has been captured. Making non-null activates frame capture, setting to null turns it off. The callback passes a URL of the saved frame image file.
343 |
344 | ### Render Prop members
345 |
346 | - `key`: ID of the region
347 | - `x, y, width, height`: the elements of the frame containing the region. All values expressed as percentages of the overall frame size, so a 50x100 frame at origin 5,10 in a 500x500 frame would come across as `{x: 0.01, y: 0.02, width: .1, height: .2}`. Changes in these values are often what drives the re-render of the component (and therefore re-run of the render prop)
348 | - `confidence`: If set, the confidence that the object identified as `key` is actually at this location. Used by tracked objects API of iOS Vision. Sometimes null.
349 | - `classifications`: Collection, keyed by the file URL of the classifier passed in props, of collections of labels and probabilities. (e.g. `{"file:///path/to/myclassifier.mlmodelc": {"label1": 0.84, "label2": 0.84}}`)
350 | - `genericResults`: Collection of generic results returned from generic models passed in via props to the region
351 |
352 | ## RNVDefaultRegion
353 |
354 | Convenience region that references the full frame. Same props as `RNVRegion`, except `region` is always set to `""` - the full frame. Useful for simple style transfers or "dominant image" classifiers.
355 |
356 | ### Props
357 |
358 | Same as `RNVRegion`, with the exception that `region` is forced to `""`
359 |
360 | ### Render Prop Members
361 |
362 | Same as `RNVRegion`, with the note that `key` will always be `""`
363 |
364 | ## RNVCameraView
365 |
366 | Preview of the camera captured by the `RNVisionProvider`.
367 | **Note** that the preview is flipped in selfie mode (e.g. when `isCameraFront` is true)
368 |
369 | ### Props
370 |
371 | The properties of a `View` plus:
372 |
373 | - `gravity`: how to scale the captured camera frame in the view. String. Valid values:
374 | - `fill`: Fills the rectangle much like the "cover" in an Image
375 | - `resize`: Leaves transparent (or style:{backgroundColor}) the parts of the rectangle that are left over from a resized version of the image.
376 |
377 | ## RNVCameraConsumer
378 |
379 | Render prop consumer for delivering additional context that regions will find helpful, mostly for rendering rectangles that map to the regions identified.
380 |
381 | ### Render Prop Members
382 |
383 | - `viewPortDimensions`: A collection of `{width, height}` of the view rectangle.
384 | - `viewPortGravity`: A pass-through of the `gravity` prop to help decide how to manage the math converting coordinates.
385 |
386 | ## RNVCameraRegion
387 |
388 | A compound consumer that blends the render prop members of `RNVRegion` and `RNVCameraConsumer` and adds a `style` prop that can position the region on a specified camera preview
389 |
390 | ### Props
391 |
392 | Same as `RNVRegion`
393 |
394 | ### Render Prop Members
395 |
396 | Includes members from `RNVRegion` and `RNVCameraConsumer` and adds:
397 |
398 | - `style`: A pre-built colleciton of style prop members `{position, width, height, left, top}` that are designed to act in the context of the `RNVCameraView` rectangle. Spread-prop with your other style preferences (border? backgroundColor?) for easy on-screen representation.
399 |
400 | ## RNVImageView
401 |
402 | View for displaying output of image generators. Link it to , and the resulting image will display in this view. Useful for style transfer models. More performant because there is no round trip to JavaScript notifying of each frame update.
403 |
404 | ### Props
405 |
406 | - `id`: the ID of an image generator model attached to a region. Usually is the `file:///` URL of the .mlmodelc.
407 |
408 | Otherwise conforms to Image and View API.
409 |
--------------------------------------------------------------------------------
/RNSwiftBridge.js:
--------------------------------------------------------------------------------
1 | import { PropTypes } from "prop-types";
2 | import React, { Component } from "react";
3 | import {
4 | NativeModules,
5 | NativeEventEmitter,
6 | requireNativeComponent,
7 | ViewPropTypes
8 | } from "react-native";
9 | //#region Code for object RHDVisionModule
10 | const NativeRHDVisionModule = NativeModules.RHDVisionModule;
11 | const start = async cameraFront => {
12 | return await NativeRHDVisionModule.start(cameraFront);
13 | };
14 | const stop = async () => {
15 | return await NativeRHDVisionModule.stop();
16 | };
17 | const getImageDimensions = async () => {
18 | return await NativeRHDVisionModule.getImageDimensions();
19 | };
20 | const attachCameraView = async () => {
21 | return await NativeRHDVisionModule.attachCameraView();
22 | };
23 | const detachCameraView = async () => {
24 | return await NativeRHDVisionModule.detachCameraView();
25 | };
26 | const cameraIsView = async newIsCameraView => {
27 | return await NativeRHDVisionModule.cameraIsView(newIsCameraView);
28 | };
29 | const saveFrame = async (disposition, region) => {
30 | return await NativeRHDVisionModule.saveFrame(disposition, region);
31 | };
32 | const removeSaveFrame = async region => {
33 | return await NativeRHDVisionModule.removeSaveFrame(region);
34 | };
35 | const detectFaces = async region => {
36 | return await NativeRHDVisionModule.detectFaces(region);
37 | };
38 | const removeDetectFaces = async region => {
39 | return await NativeRHDVisionModule.removeDetectFaces(region);
40 | };
41 | const trackObject = async (name, region, dict) => {
42 | return await NativeRHDVisionModule.trackObject(name, region, dict);
43 | };
44 | const removeTrackObject = async (name, region) => {
45 | return await NativeRHDVisionModule.removeTrackObject(name, region);
46 | };
47 | const removeTrackObjects = async region => {
48 | return await NativeRHDVisionModule.removeTrackObjects(region);
49 | };
50 | const applyMLClassifier = async (thisURL, field, resultMax) => {
51 | return await NativeRHDVisionModule.applyMLClassifier(
52 | thisURL,
53 | field,
54 | resultMax
55 | );
56 | };
57 | const applyMLGenerator = async (thisURL, field, handler) => {
58 | return await NativeRHDVisionModule.applyMLGenerator(thisURL, field, handler);
59 | };
60 | const applyMLBottleneck = async (thisURL, field) => {
61 | return await NativeRHDVisionModule.applyMLBottleneck(thisURL, field);
62 | };
63 | const applyMLGeneric = async (thisURL, field) => {
64 | return await NativeRHDVisionModule.applyMLGeneric(thisURL, field);
65 | };
66 | const removeML = async (thisURL, field) => {
67 | return await NativeRHDVisionModule.removeML(thisURL, field);
68 | };
69 | const applyBottleneckClassifier = async (
70 | thisURL,
71 | toField,
72 | toModel,
73 | maxResults
74 | ) => {
75 | return await NativeRHDVisionModule.applyBottleneckClassifier(
76 | thisURL,
77 | toField,
78 | toModel,
79 | maxResults
80 | );
81 | };
82 | const applyBottleneckGenerator = async (thisURL, handler, toField, toModel) => {
83 | return await NativeRHDVisionModule.applyBottleneckGenerator(
84 | thisURL,
85 | handler,
86 | toField,
87 | toModel
88 | );
89 | };
90 | const applyBottleneckBottleneck = async (thisURL, toField, toModel) => {
91 | return await NativeRHDVisionModule.applyBottleneckBottleneck(
92 | thisURL,
93 | toField,
94 | toModel
95 | );
96 | };
97 | const applyBottleneckGeneric = async (thisURL, toField, toModel) => {
98 | return await NativeRHDVisionModule.applyBottleneckGeneric(
99 | thisURL,
100 | toField,
101 | toModel
102 | );
103 | };
104 | const removeBottleneck = async (modelURL, fromField, fromModel) => {
105 | return await NativeRHDVisionModule.removeBottleneck(
106 | modelURL,
107 | fromField,
108 | fromModel
109 | );
110 | };
111 | const removeBottlenecks = async (fromField, fromModel) => {
112 | return await NativeRHDVisionModule.removeBottlenecks(fromField, fromModel);
113 | };
114 | const setRegion = async (region, rectDic) => {
115 | return await NativeRHDVisionModule.setRegion(region, rectDic);
116 | };
117 | const removeRegion = async region => {
118 | return await NativeRHDVisionModule.removeRegion(region);
119 | };
120 | const saveMultiArray = async key => {
121 | return await NativeRHDVisionModule.saveMultiArray(key);
122 | };
123 | //#endregion
124 | //#region events for object RHDVisionModule
125 | var _getNativeRHDVisionModuleEventEmitter = null;
126 | const getNativeRHDVisionModuleEventEmitter = () => {
127 | if (!_getNativeRHDVisionModuleEventEmitter)
128 | _getNativeRHDVisionModuleEventEmitter = new NativeEventEmitter(
129 | NativeRHDVisionModule
130 | );
131 | return _getNativeRHDVisionModuleEventEmitter;
132 | };
133 | const subscribeToRNVision = cb => {
134 | return getNativeRHDVisionModuleEventEmitter().addListener("RNVision", cb);
135 | };
136 | const subscribeToRNVMetaData = cb => {
137 | return getNativeRHDVisionModuleEventEmitter().addListener("RNVMetaData", cb);
138 | };
139 | const subscribeToRNVisionImageDim = cb => {
140 | return getNativeRHDVisionModuleEventEmitter().addListener(
141 | "RNVisionImageDim",
142 | cb
143 | );
144 | };
145 | //#endregion
146 | //#region constants for object RHDVisionModule
147 | const bundlePath = NativeRHDVisionModule.bundlePath;
148 | const bundleURL = NativeRHDVisionModule.bundleURL;
149 | //#endregion
150 | const NativeRHDVisionImageView = requireNativeComponent(
151 | "RHDVisionImageView",
152 | RHDVisionImageView
153 | );
154 | class RHDVisionImageView extends Component {
155 | render() {
156 | return ;
157 | }
158 | }
159 | RHDVisionImageView.propTypes = {
160 | isMirrored: PropTypes.bool,
161 | resizeMode: PropTypes.string,
162 | id: PropTypes.string,
163 | interval: PropTypes.number,
164 | ...ViewPropTypes
165 | };
166 | const NativeRHDVisionCameraView = requireNativeComponent(
167 | "RHDVisionCameraView",
168 | RHDVisionCameraView
169 | );
170 | class RHDVisionCameraView extends Component {
171 | render() {
172 | return ;
173 | }
174 | }
175 | RHDVisionCameraView.propTypes = {
176 | gravity: PropTypes.string,
177 | ...ViewPropTypes
178 | };
179 | //#region Event marshalling object
180 | const RNSEvents = {
181 | RNVision: subscribeToRNVision,
182 | RNVMetaData: subscribeToRNVMetaData,
183 | RNVisionImageDim: subscribeToRNVisionImageDim
184 | };
185 | //#endregion
186 | //#region Exports
187 | export {
188 | start,
189 | stop,
190 | getImageDimensions,
191 | attachCameraView,
192 | detachCameraView,
193 | cameraIsView,
194 | saveFrame,
195 | removeSaveFrame,
196 | detectFaces,
197 | removeDetectFaces,
198 | trackObject,
199 | removeTrackObject,
200 | removeTrackObjects,
201 | applyMLClassifier,
202 | applyMLGenerator,
203 | applyMLBottleneck,
204 | applyMLGeneric,
205 | removeML,
206 | applyBottleneckClassifier,
207 | applyBottleneckGenerator,
208 | applyBottleneckBottleneck,
209 | applyBottleneckGeneric,
210 | removeBottleneck,
211 | removeBottlenecks,
212 | setRegion,
213 | removeRegion,
214 | saveMultiArray,
215 | subscribeToRNVision,
216 | subscribeToRNVMetaData,
217 | subscribeToRNVisionImageDim,
218 | bundlePath,
219 | bundleURL,
220 | RHDVisionImageView,
221 | RHDVisionCameraView,
222 | RNSEvents
223 | };
224 | //#endregion
225 |
--------------------------------------------------------------------------------
/cameraregion.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import { RNVCameraConsumer } from "./view";
3 | import Region from "./region";
4 | import PropTypes from "prop-types";
5 | const CameraRegion = props => {
6 | return (
7 |
8 | {regionInfo => {
9 | return (
10 |
11 | {value => {
12 | const newValue = {
13 | ...regionInfo,
14 | ...value,
15 | style: calculateRectangles({ ...regionInfo, ...value })
16 | };
17 | return props.children(newValue);
18 | }}
19 |
20 | );
21 | }}
22 |
23 | );
24 | };
25 | const calculateRectangles = data => {
26 | if (!data) return null;
27 | if (!data.viewPortDimensions) return null;
28 | if (!data.imageDimensions) return null;
29 | const vpRatio =
30 | data.viewPortDimensions.width / data.viewPortDimensions.height;
31 | const iRatio = data.imageDimensions.width / data.imageDimensions.height;
32 | const totalRatio = vpRatio / iRatio;
33 | const subRectangle =
34 | data.viewPortGravity == "fill"
35 | ? totalRatio > 1
36 | ? {
37 | x: 0,
38 | y: (1.0 - 1.0 / totalRatio) / 2,
39 | height: 1.0 / totalRatio,
40 | width: 1,
41 | outerX: 0,
42 | outerY: 0
43 | }
44 | : {
45 | x: (1.0 - totalRatio) / 2,
46 | y: 0,
47 | height: 1,
48 | width: totalRatio,
49 | outerX: 0,
50 | outerY: 0
51 | }
52 | : totalRatio > 1
53 | ? {
54 | x: 0,
55 | y: 0,
56 | height: 1,
57 | width: totalRatio,
58 | outerX: (1 / totalRatio - 1) / 2,
59 | outerY: 0
60 | }
61 | : {
62 | x: 0,
63 | y: 0,
64 | height: 1 / totalRatio,
65 | width: 1,
66 | outerX: 0,
67 | outerY: (totalRatio - 1) / 2
68 | };
69 | if (data.isCameraFront) {
70 | data.originalx = data.x;
71 | data.x = 1 - data.x - data.width;
72 | }
73 | const style = {
74 | position: "absolute",
75 | width: ((data.width / subRectangle.width) * 100).toFixed(2) + "%",
76 | height: ((data.height / subRectangle.height) * 100).toFixed(2) + "%",
77 | left:
78 | (
79 | ((data.x - subRectangle.x) / subRectangle.width - subRectangle.outerX) *
80 | 100
81 | ).toFixed(2) + "%",
82 | top:
83 | (
84 | ((data.y - subRectangle.y) / subRectangle.height -
85 | subRectangle.outerY) *
86 | 100
87 | ).toFixed(2) + "%"
88 | };
89 | return style;
90 | };
91 | CameraRegion.propTypes = {
92 | ...Region.propTypes,
93 | children: PropTypes.func.isRequired
94 | };
95 | export default CameraRegion;
96 | export { CameraRegion, calculateRectangles };
97 |
--------------------------------------------------------------------------------
/examples/Food101/.babelrc:
--------------------------------------------------------------------------------
1 | {
2 | "presets": ["react-native"]
3 | }
4 |
--------------------------------------------------------------------------------
/examples/Food101/.buckconfig:
--------------------------------------------------------------------------------
1 |
2 | [android]
3 | target = Google Inc.:Google APIs:23
4 |
5 | [maven_repositories]
6 | central = https://repo1.maven.org/maven2
7 |
--------------------------------------------------------------------------------
/examples/Food101/.flowconfig:
--------------------------------------------------------------------------------
1 | [ignore]
2 | ; We fork some components by platform
3 | .*/*[.]android.js
4 |
5 | ; Ignore "BUCK" generated dirs
6 | /\.buckd/
7 |
8 | ; Ignore unexpected extra "@providesModule"
9 | .*/node_modules/.*/node_modules/fbjs/.*
10 |
11 | ; Ignore duplicate module providers
12 | ; For RN Apps installed via npm, "Libraries" folder is inside
13 | ; "node_modules/react-native" but in the source repo it is in the root
14 | .*/Libraries/react-native/React.js
15 |
16 | ; Ignore polyfills
17 | .*/Libraries/polyfills/.*
18 |
19 | ; Ignore metro
20 | .*/node_modules/metro/.*
21 |
22 | [include]
23 |
24 | [libs]
25 | node_modules/react-native/Libraries/react-native/react-native-interface.js
26 | node_modules/react-native/flow/
27 | node_modules/react-native/flow-github/
28 |
29 | [options]
30 | emoji=true
31 |
32 | module.system=haste
33 | module.system.haste.use_name_reducers=true
34 | # get basename
35 | module.system.haste.name_reducers='^.*/\([a-zA-Z0-9$_.-]+\.js\(\.flow\)?\)$' -> '\1'
36 | # strip .js or .js.flow suffix
37 | module.system.haste.name_reducers='^\(.*\)\.js\(\.flow\)?$' -> '\1'
38 | # strip .ios suffix
39 | module.system.haste.name_reducers='^\(.*\)\.ios$' -> '\1'
40 | module.system.haste.name_reducers='^\(.*\)\.android$' -> '\1'
41 | module.system.haste.name_reducers='^\(.*\)\.native$' -> '\1'
42 | module.system.haste.paths.blacklist=.*/__tests__/.*
43 | module.system.haste.paths.blacklist=.*/__mocks__/.*
44 | module.system.haste.paths.blacklist=/node_modules/react-native/Libraries/Animated/src/polyfills/.*
45 | module.system.haste.paths.whitelist=/node_modules/react-native/Libraries/.*
46 |
47 | munge_underscores=true
48 |
49 | module.name_mapper='^[./a-zA-Z0-9$_-]+\.\(bmp\|gif\|jpg\|jpeg\|png\|psd\|svg\|webp\|m4v\|mov\|mp4\|mpeg\|mpg\|webm\|aac\|aiff\|caf\|m4a\|mp3\|wav\|html\|pdf\)$' -> 'RelativeImageStub'
50 |
51 | module.file_ext=.js
52 | module.file_ext=.jsx
53 | module.file_ext=.json
54 | module.file_ext=.native.js
55 |
56 | suppress_type=$FlowIssue
57 | suppress_type=$FlowFixMe
58 | suppress_type=$FlowFixMeProps
59 | suppress_type=$FlowFixMeState
60 |
61 | suppress_comment=\\(.\\|\n\\)*\\$FlowFixMe\\($\\|[^(]\\|(\\(\\)? *\\(site=[a-z,_]*react_native[a-z,_]*\\)?)\\)
62 | suppress_comment=\\(.\\|\n\\)*\\$FlowIssue\\((\\(\\)? *\\(site=[a-z,_]*react_native[a-z,_]*\\)?)\\)?:? #[0-9]+
63 | suppress_comment=\\(.\\|\n\\)*\\$FlowFixedInNextDeploy
64 | suppress_comment=\\(.\\|\n\\)*\\$FlowExpectedError
65 |
66 | [version]
67 | ^0.75.0
68 |
--------------------------------------------------------------------------------
/examples/Food101/.gitattributes:
--------------------------------------------------------------------------------
1 | *.pbxproj -text
2 |
--------------------------------------------------------------------------------
/examples/Food101/.gitignore:
--------------------------------------------------------------------------------
1 | # OSX
2 | #
3 | .DS_Store
4 |
5 | # Xcode
6 | #
7 | build/
8 | *.pbxuser
9 | !default.pbxuser
10 | *.mode1v3
11 | !default.mode1v3
12 | *.mode2v3
13 | !default.mode2v3
14 | *.perspectivev3
15 | !default.perspectivev3
16 | xcuserdata
17 | *.xccheckout
18 | *.moved-aside
19 | DerivedData
20 | *.hmap
21 | *.ipa
22 | *.xcuserstate
23 | project.xcworkspace
24 |
25 | # Android/IntelliJ
26 | #
27 | build/
28 | .idea
29 | .gradle
30 | local.properties
31 | *.iml
32 |
33 | # node.js
34 | #
35 | node_modules/
36 | npm-debug.log
37 | yarn-error.log
38 |
39 | # BUCK
40 | buck-out/
41 | \.buckd/
42 | *.keystore
43 |
44 | # fastlane
45 | #
46 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the
47 | # screenshots whenever they are needed.
48 | # For more information about the recommended setup visit:
49 | # https://docs.fastlane.tools/best-practices/source-control/
50 |
51 | */fastlane/report.xml
52 | */fastlane/Preview.html
53 | */fastlane/screenshots
54 |
55 | # Bundle artifact
56 | *.jsbundle
57 |
--------------------------------------------------------------------------------
/examples/Food101/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "printWidth": 100,
3 | "semi": false,
4 | "singleQuote": false,
5 | "trailingComma": "all"
6 | }
7 |
--------------------------------------------------------------------------------
/examples/Food101/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | }
--------------------------------------------------------------------------------
/examples/Food101/.watchmanconfig:
--------------------------------------------------------------------------------
1 | {}
--------------------------------------------------------------------------------
/examples/Food101/App.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Sample React Native App
3 | * https://github.com/facebook/react-native
4 | *
5 | * @format
6 | * @flow
7 | */
8 |
9 | import React, { Component } from "react"
10 | import { StyleSheet, Text, View, SafeAreaView } from "react-native"
11 | import fetchModel from "./fetchModel"
12 | import { RNVCameraView, RNVisionProvider, RNVDefaultRegion } from "react-native-vision"
13 |
14 | let downloadedModel
15 |
16 | export default class App extends Component {
17 | state = {
18 | classifier: null,
19 | }
20 |
21 | componentDidMount() {
22 | // switch to async/await style
23 | ;(async () => {
24 | downloadedModel = await fetchModel("Food101")
25 | this.setState({ classifier: downloadedModel })
26 | })()
27 | }
28 |
29 | render() {
30 | return (
31 |
32 |
33 | {({ classifications }) => {
34 | return (
35 |
36 | Food 101
37 | Point the camera at some food!
38 |
39 |
40 |
41 |
42 | {classifications && classifications[this.state.classifier]
43 | ? classifications[this.state.classifier][0].label
44 | : "Loading Model"}
45 |
46 |
47 | )
48 | }}
49 |
50 |
51 | )
52 | }
53 | }
54 |
55 | const styles = StyleSheet.create({
56 | container: {
57 | justifyContent: "center",
58 | backgroundColor: "#F5FCFF",
59 | },
60 | welcome: {
61 | fontSize: 20,
62 | textAlign: "center",
63 | margin: 10,
64 | },
65 | explainer: {
66 | alignSelf: "stretch",
67 | textAlign: "center",
68 | width: "100%",
69 | },
70 | foodBlock: {
71 | padding: 20,
72 | fontSize: 20,
73 | textAlign: "center",
74 | backgroundColor: "#333",
75 | color: "#ccc",
76 | },
77 | camera: {
78 | flex: 1,
79 | borderWidth: 2,
80 | borderColor: "#fee",
81 | backgroundColor: "#111",
82 | overflow: "hidden",
83 | },
84 | cameraContainer: {
85 | height: "80%",
86 | },
87 | })
88 |
--------------------------------------------------------------------------------
/examples/Food101/README.md:
--------------------------------------------------------------------------------
1 | # Food 101
2 |
3 | Simply point the camera at a delicious dish and find out what food it is!
4 |
5 | Hopefully a temporary GIF with some Google images instead of real food.
6 | 
7 |
8 | ## Warning
9 |
10 | As of the creation of this repo, Xcode 10 and iOS 12 are in beta. To compile you must first "FAIL" a compilation, then run `react-native fix-xcode`.
11 |
12 | ## Credits
13 |
14 | ```
15 | @inproceedings{bossard14,
16 | title = {Food-101 -- Mining Discriminative Components with Random Forests},
17 | author = {Bossard, Lukas and Guillaumin, Matthieu and Van Gool, Luc},
18 | booktitle = {European Conference on Computer Vision},
19 | year = {2014}
20 | }
21 | ```
22 |
23 | - WEBSITE: https://www.vision.ee.ethz.ch/datasets_extra/food-101/
24 | - DATASET: http://data.vision.ee.ethz.ch/cvl/food-101.tar.gz
25 | - COREML MODEL: https://coreml.store/food101
26 | - REACT NATIVE VISION: https://github.com/rhdeck/react-native-vision
27 |
--------------------------------------------------------------------------------
/examples/Food101/_art/video.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhdeck/react-native-vision/0598c9889db146f88319a6ce3dd660ad6edc512a/examples/Food101/_art/video.gif
--------------------------------------------------------------------------------
/examples/Food101/android/app/BUCK:
--------------------------------------------------------------------------------
1 | # To learn about Buck see [Docs](https://buckbuild.com/).
2 | # To run your application with Buck:
3 | # - install Buck
4 | # - `npm start` - to start the packager
5 | # - `cd android`
6 | # - `keytool -genkey -v -keystore keystores/debug.keystore -storepass android -alias androiddebugkey -keypass android -dname "CN=Android Debug,O=Android,C=US"`
7 | # - `./gradlew :app:copyDownloadableDepsToLibs` - make all Gradle compile dependencies available to Buck
8 | # - `buck install -r android/app` - compile, install and run application
9 | #
10 |
11 | lib_deps = []
12 |
13 | for jarfile in glob(['libs/*.jar']):
14 | name = 'jars__' + jarfile[jarfile.rindex('/') + 1: jarfile.rindex('.jar')]
15 | lib_deps.append(':' + name)
16 | prebuilt_jar(
17 | name = name,
18 | binary_jar = jarfile,
19 | )
20 |
21 | for aarfile in glob(['libs/*.aar']):
22 | name = 'aars__' + aarfile[aarfile.rindex('/') + 1: aarfile.rindex('.aar')]
23 | lib_deps.append(':' + name)
24 | android_prebuilt_aar(
25 | name = name,
26 | aar = aarfile,
27 | )
28 |
29 | android_library(
30 | name = "all-libs",
31 | exported_deps = lib_deps,
32 | )
33 |
34 | android_library(
35 | name = "app-code",
36 | srcs = glob([
37 | "src/main/java/**/*.java",
38 | ]),
39 | deps = [
40 | ":all-libs",
41 | ":build_config",
42 | ":res",
43 | ],
44 | )
45 |
46 | android_build_config(
47 | name = "build_config",
48 | package = "com.food101",
49 | )
50 |
51 | android_resource(
52 | name = "res",
53 | package = "com.food101",
54 | res = "src/main/res",
55 | )
56 |
57 | android_binary(
58 | name = "app",
59 | keystore = "//android/keystores:debug",
60 | manifest = "src/main/AndroidManifest.xml",
61 | package_type = "debug",
62 | deps = [
63 | ":app-code",
64 | ],
65 | )
66 |
--------------------------------------------------------------------------------
/examples/Food101/android/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: "com.android.application"
2 |
3 | import com.android.build.OutputFile
4 |
5 | /**
6 | * The react.gradle file registers a task for each build variant (e.g. bundleDebugJsAndAssets
7 | * and bundleReleaseJsAndAssets).
8 | * These basically call `react-native bundle` with the correct arguments during the Android build
9 | * cycle. By default, bundleDebugJsAndAssets is skipped, as in debug/dev mode we prefer to load the
10 | * bundle directly from the development server. Below you can see all the possible configurations
11 | * and their defaults. If you decide to add a configuration block, make sure to add it before the
12 | * `apply from: "../../node_modules/react-native/react.gradle"` line.
13 | *
14 | * project.ext.react = [
15 | * // the name of the generated asset file containing your JS bundle
16 | * bundleAssetName: "index.android.bundle",
17 | *
18 | * // the entry file for bundle generation
19 | * entryFile: "index.android.js",
20 | *
21 | * // whether to bundle JS and assets in debug mode
22 | * bundleInDebug: false,
23 | *
24 | * // whether to bundle JS and assets in release mode
25 | * bundleInRelease: true,
26 | *
27 | * // whether to bundle JS and assets in another build variant (if configured).
28 | * // See http://tools.android.com/tech-docs/new-build-system/user-guide#TOC-Build-Variants
29 | * // The configuration property can be in the following formats
30 | * // 'bundleIn${productFlavor}${buildType}'
31 | * // 'bundleIn${buildType}'
32 | * // bundleInFreeDebug: true,
33 | * // bundleInPaidRelease: true,
34 | * // bundleInBeta: true,
35 | *
36 | * // whether to disable dev mode in custom build variants (by default only disabled in release)
37 | * // for example: to disable dev mode in the staging build type (if configured)
38 | * devDisabledInStaging: true,
39 | * // The configuration property can be in the following formats
40 | * // 'devDisabledIn${productFlavor}${buildType}'
41 | * // 'devDisabledIn${buildType}'
42 | *
43 | * // the root of your project, i.e. where "package.json" lives
44 | * root: "../../",
45 | *
46 | * // where to put the JS bundle asset in debug mode
47 | * jsBundleDirDebug: "$buildDir/intermediates/assets/debug",
48 | *
49 | * // where to put the JS bundle asset in release mode
50 | * jsBundleDirRelease: "$buildDir/intermediates/assets/release",
51 | *
52 | * // where to put drawable resources / React Native assets, e.g. the ones you use via
53 | * // require('./image.png')), in debug mode
54 | * resourcesDirDebug: "$buildDir/intermediates/res/merged/debug",
55 | *
56 | * // where to put drawable resources / React Native assets, e.g. the ones you use via
57 | * // require('./image.png')), in release mode
58 | * resourcesDirRelease: "$buildDir/intermediates/res/merged/release",
59 | *
60 | * // by default the gradle tasks are skipped if none of the JS files or assets change; this means
61 | * // that we don't look at files in android/ or ios/ to determine whether the tasks are up to
62 | * // date; if you have any other folders that you want to ignore for performance reasons (gradle
63 | * // indexes the entire tree), add them here. Alternatively, if you have JS files in android/
64 | * // for example, you might want to remove it from here.
65 | * inputExcludes: ["android/**", "ios/**"],
66 | *
67 | * // override which node gets called and with what additional arguments
68 | * nodeExecutableAndArgs: ["node"],
69 | *
70 | * // supply additional arguments to the packager
71 | * extraPackagerArgs: []
72 | * ]
73 | */
74 |
75 | project.ext.react = [
76 | entryFile: "index.js"
77 | ]
78 |
79 | apply from: "../../node_modules/react-native/react.gradle"
80 |
81 | /**
82 | * Set this to true to create two separate APKs instead of one:
83 | * - An APK that only works on ARM devices
84 | * - An APK that only works on x86 devices
85 | * The advantage is the size of the APK is reduced by about 4MB.
86 | * Upload all the APKs to the Play Store and people will download
87 | * the correct one based on the CPU architecture of their device.
88 | */
89 | def enableSeparateBuildPerCPUArchitecture = false
90 |
91 | /**
92 | * Run Proguard to shrink the Java bytecode in release builds.
93 | */
94 | def enableProguardInReleaseBuilds = false
95 |
96 | android {
97 | compileSdkVersion rootProject.ext.compileSdkVersion
98 | buildToolsVersion rootProject.ext.buildToolsVersion
99 |
100 | defaultConfig {
101 | applicationId "com.food101"
102 | minSdkVersion rootProject.ext.minSdkVersion
103 | targetSdkVersion rootProject.ext.targetSdkVersion
104 | versionCode 1
105 | versionName "1.0"
106 | ndk {
107 | abiFilters "armeabi-v7a", "x86"
108 | }
109 | }
110 | splits {
111 | abi {
112 | reset()
113 | enable enableSeparateBuildPerCPUArchitecture
114 | universalApk false // If true, also generate a universal APK
115 | include "armeabi-v7a", "x86"
116 | }
117 | }
118 | buildTypes {
119 | release {
120 | minifyEnabled enableProguardInReleaseBuilds
121 | proguardFiles getDefaultProguardFile("proguard-android.txt"), "proguard-rules.pro"
122 | }
123 | }
124 | // applicationVariants are e.g. debug, release
125 | applicationVariants.all { variant ->
126 | variant.outputs.each { output ->
127 | // For each separate APK per architecture, set a unique version code as described here:
128 | // http://tools.android.com/tech-docs/new-build-system/user-guide/apk-splits
129 | def versionCodes = ["armeabi-v7a":1, "x86":2]
130 | def abi = output.getFilter(OutputFile.ABI)
131 | if (abi != null) { // null for the universal-debug, universal-release variants
132 | output.versionCodeOverride =
133 | versionCodes.get(abi) * 1048576 + defaultConfig.versionCode
134 | }
135 | }
136 | }
137 | }
138 |
139 | dependencies {
140 | compile project(':react-native-fs')
141 | compile fileTree(dir: "libs", include: ["*.jar"])
142 | compile "com.android.support:appcompat-v7:${rootProject.ext.supportLibVersion}"
143 | compile "com.facebook.react:react-native:+" // From node_modules
144 | }
145 |
146 | // Run this once to be able to run the application with BUCK
147 | // puts all compile dependencies into folder libs for BUCK to use
148 | task copyDownloadableDepsToLibs(type: Copy) {
149 | from configurations.compile
150 | into 'libs'
151 | }
152 |
--------------------------------------------------------------------------------
/examples/Food101/android/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # By default, the flags in this file are appended to flags specified
3 | # in /usr/local/Cellar/android-sdk/24.3.3/tools/proguard/proguard-android.txt
4 | # You can edit the include path and order by changing the proguardFiles
5 | # directive in build.gradle.
6 | #
7 | # For more details, see
8 | # http://developer.android.com/guide/developing/tools/proguard.html
9 |
10 | # Add any project specific keep options here:
11 |
12 | # If your project uses WebView with JS, uncomment the following
13 | # and specify the fully qualified class name to the JavaScript interface
14 | # class:
15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
16 | # public *;
17 | #}
18 |
--------------------------------------------------------------------------------
/examples/Food101/android/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
3 |
4 |
5 |
6 |
7 |
13 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/examples/Food101/android/app/src/main/java/com/food101/MainActivity.java:
--------------------------------------------------------------------------------
1 | package com.food101;
2 |
3 | import com.facebook.react.ReactActivity;
4 |
5 | public class MainActivity extends ReactActivity {
6 |
7 | /**
8 | * Returns the name of the main component registered from JavaScript.
9 | * This is used to schedule rendering of the component.
10 | */
11 | @Override
12 | protected String getMainComponentName() {
13 | return "Food101";
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/examples/Food101/android/app/src/main/java/com/food101/MainApplication.java:
--------------------------------------------------------------------------------
1 | package com.food101;
2 |
3 | import android.app.Application;
4 |
5 | import com.facebook.react.ReactApplication;
6 | import com.rnfs.RNFSPackage;
7 | import com.facebook.react.ReactNativeHost;
8 | import com.facebook.react.ReactPackage;
9 | import com.facebook.react.shell.MainReactPackage;
10 | import com.facebook.soloader.SoLoader;
11 |
12 | import java.util.Arrays;
13 | import java.util.List;
14 |
15 | public class MainApplication extends Application implements ReactApplication {
16 |
17 | private final ReactNativeHost mReactNativeHost = new ReactNativeHost(this) {
18 | @Override
19 | public boolean getUseDeveloperSupport() {
20 | return BuildConfig.DEBUG;
21 | }
22 |
23 | @Override
24 | protected List getPackages() {
25 | return Arrays.asList(
26 | new MainReactPackage(),
27 | new RNFSPackage()
28 | );
29 | }
30 |
31 | @Override
32 | protected String getJSMainModuleName() {
33 | return "index";
34 | }
35 | };
36 |
37 | @Override
38 | public ReactNativeHost getReactNativeHost() {
39 | return mReactNativeHost;
40 | }
41 |
42 | @Override
43 | public void onCreate() {
44 | super.onCreate();
45 | SoLoader.init(this, /* native exopackage */ false);
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/examples/Food101/android/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhdeck/react-native-vision/0598c9889db146f88319a6ce3dd660ad6edc512a/examples/Food101/android/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/examples/Food101/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhdeck/react-native-vision/0598c9889db146f88319a6ce3dd660ad6edc512a/examples/Food101/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/examples/Food101/android/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhdeck/react-native-vision/0598c9889db146f88319a6ce3dd660ad6edc512a/examples/Food101/android/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/examples/Food101/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhdeck/react-native-vision/0598c9889db146f88319a6ce3dd660ad6edc512a/examples/Food101/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/examples/Food101/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhdeck/react-native-vision/0598c9889db146f88319a6ce3dd660ad6edc512a/examples/Food101/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/examples/Food101/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhdeck/react-native-vision/0598c9889db146f88319a6ce3dd660ad6edc512a/examples/Food101/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/examples/Food101/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhdeck/react-native-vision/0598c9889db146f88319a6ce3dd660ad6edc512a/examples/Food101/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/examples/Food101/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhdeck/react-native-vision/0598c9889db146f88319a6ce3dd660ad6edc512a/examples/Food101/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/examples/Food101/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhdeck/react-native-vision/0598c9889db146f88319a6ce3dd660ad6edc512a/examples/Food101/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/examples/Food101/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhdeck/react-native-vision/0598c9889db146f88319a6ce3dd660ad6edc512a/examples/Food101/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/examples/Food101/android/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | Food101
3 |
4 |
--------------------------------------------------------------------------------
/examples/Food101/android/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/examples/Food101/android/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 | repositories {
5 | jcenter()
6 | maven {
7 | url 'https://maven.google.com/'
8 | name 'Google'
9 | }
10 | }
11 | dependencies {
12 | classpath 'com.android.tools.build:gradle:2.3.3'
13 |
14 | // NOTE: Do not place your application dependencies here; they belong
15 | // in the individual module build.gradle files
16 | }
17 | }
18 |
19 | allprojects {
20 | repositories {
21 | mavenLocal()
22 | jcenter()
23 | maven {
24 | // All of React Native (JS, Obj-C sources, Android binaries) is installed from npm
25 | url "$rootDir/../node_modules/react-native/android"
26 | }
27 | maven {
28 | url 'https://maven.google.com/'
29 | name 'Google'
30 | }
31 | }
32 | }
33 |
34 | ext {
35 | buildToolsVersion = "26.0.3"
36 | minSdkVersion = 16
37 | compileSdkVersion = 26
38 | targetSdkVersion = 26
39 | supportLibVersion = "26.1.0"
40 | }
41 |
--------------------------------------------------------------------------------
/examples/Food101/android/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 |
3 | # IDE (e.g. Android Studio) users:
4 | # Gradle settings configured through the IDE *will override*
5 | # any settings specified in this file.
6 |
7 | # For more details on how to configure your build environment visit
8 | # http://www.gradle.org/docs/current/userguide/build_environment.html
9 |
10 | # Specifies the JVM arguments used for the daemon process.
11 | # The setting is particularly useful for tweaking memory settings.
12 | # Default value: -Xmx10248m -XX:MaxPermSize=256m
13 | # org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8
14 |
15 | # When configured, Gradle will run in incubating parallel mode.
16 | # This option should only be used with decoupled projects. More details, visit
17 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
18 | # org.gradle.parallel=true
19 |
20 | android.useDeprecatedNdk=true
21 |
--------------------------------------------------------------------------------
/examples/Food101/android/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhdeck/react-native-vision/0598c9889db146f88319a6ce3dd660ad6edc512a/examples/Food101/android/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/examples/Food101/android/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionBase=GRADLE_USER_HOME
2 | distributionPath=wrapper/dists
3 | zipStoreBase=GRADLE_USER_HOME
4 | zipStorePath=wrapper/dists
5 | distributionUrl=https\://services.gradle.org/distributions/gradle-3.5.1-all.zip
6 |
--------------------------------------------------------------------------------
/examples/Food101/android/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # For Cygwin, ensure paths are in UNIX format before anything is touched.
46 | if $cygwin ; then
47 | [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
48 | fi
49 |
50 | # Attempt to set APP_HOME
51 | # Resolve links: $0 may be a link
52 | PRG="$0"
53 | # Need this for relative symlinks.
54 | while [ -h "$PRG" ] ; do
55 | ls=`ls -ld "$PRG"`
56 | link=`expr "$ls" : '.*-> \(.*\)$'`
57 | if expr "$link" : '/.*' > /dev/null; then
58 | PRG="$link"
59 | else
60 | PRG=`dirname "$PRG"`"/$link"
61 | fi
62 | done
63 | SAVED="`pwd`"
64 | cd "`dirname \"$PRG\"`/" >&-
65 | APP_HOME="`pwd -P`"
66 | cd "$SAVED" >&-
67 |
68 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
69 |
70 | # Determine the Java command to use to start the JVM.
71 | if [ -n "$JAVA_HOME" ] ; then
72 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
73 | # IBM's JDK on AIX uses strange locations for the executables
74 | JAVACMD="$JAVA_HOME/jre/sh/java"
75 | else
76 | JAVACMD="$JAVA_HOME/bin/java"
77 | fi
78 | if [ ! -x "$JAVACMD" ] ; then
79 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
80 |
81 | Please set the JAVA_HOME variable in your environment to match the
82 | location of your Java installation."
83 | fi
84 | else
85 | JAVACMD="java"
86 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
87 |
88 | Please set the JAVA_HOME variable in your environment to match the
89 | location of your Java installation."
90 | fi
91 |
92 | # Increase the maximum file descriptors if we can.
93 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
94 | MAX_FD_LIMIT=`ulimit -H -n`
95 | if [ $? -eq 0 ] ; then
96 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
97 | MAX_FD="$MAX_FD_LIMIT"
98 | fi
99 | ulimit -n $MAX_FD
100 | if [ $? -ne 0 ] ; then
101 | warn "Could not set maximum file descriptor limit: $MAX_FD"
102 | fi
103 | else
104 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
105 | fi
106 | fi
107 |
108 | # For Darwin, add options to specify how the application appears in the dock
109 | if $darwin; then
110 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
111 | fi
112 |
113 | # For Cygwin, switch paths to Windows format before running java
114 | if $cygwin ; then
115 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
116 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
158 | function splitJvmOpts() {
159 | JVM_OPTS=("$@")
160 | }
161 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
162 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
163 |
164 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
165 |
--------------------------------------------------------------------------------
/examples/Food101/android/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/examples/Food101/android/keystores/BUCK:
--------------------------------------------------------------------------------
1 | keystore(
2 | name = "debug",
3 | properties = "debug.keystore.properties",
4 | store = "debug.keystore",
5 | visibility = [
6 | "PUBLIC",
7 | ],
8 | )
9 |
--------------------------------------------------------------------------------
/examples/Food101/android/keystores/debug.keystore.properties:
--------------------------------------------------------------------------------
1 | key.store=debug.keystore
2 | key.alias=androiddebugkey
3 | key.store.password=android
4 | key.alias.password=android
5 |
--------------------------------------------------------------------------------
/examples/Food101/android/settings.gradle:
--------------------------------------------------------------------------------
1 | rootProject.name = 'Food101'
2 | include ':react-native-fs'
3 | project(':react-native-fs').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-fs/android')
4 |
5 | include ':app'
6 |
--------------------------------------------------------------------------------
/examples/Food101/app.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Food101",
3 | "displayName": "Food101"
4 | }
--------------------------------------------------------------------------------
/examples/Food101/fetchModel.js:
--------------------------------------------------------------------------------
1 | import RNFS from "react-native-fs"
2 | import { compileModel } from "react-native-coreml"
3 | const fetchModel = async modelName => {
4 | const compiledPath = RNFS.DocumentDirectoryPath + "/" + modelName + ".mlmodelc"
5 | if (await RNFS.exists(compiledPath)) {
6 | return compiledPath
7 | }
8 |
9 | const sourceURL = `https://s3-us-west-2.amazonaws.com/coreml-models/${modelName}.mlmodel`
10 | const toFile = RNFS.TemporaryDirectoryPath + modelName + ".mlmodel"
11 | const { promise, jobId } = RNFS.downloadFile({
12 | fromUrl: sourceURL,
13 | toFile: toFile,
14 | })
15 | await promise
16 | const tempPath = await compileModel(toFile)
17 | await RNFS.moveFile(tempPath, compiledPath)
18 | return "file://" + compiledPath
19 | }
20 |
21 | export default fetchModel
22 |
--------------------------------------------------------------------------------
/examples/Food101/index.js:
--------------------------------------------------------------------------------
1 | /** @format */
2 |
3 | import {AppRegistry} from 'react-native';
4 | import App from './App';
5 | import {name as appName} from './app.json';
6 |
7 | AppRegistry.registerComponent(appName, () => App);
8 |
--------------------------------------------------------------------------------
/examples/Food101/ios/Food101-tvOS/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | org.reactjs.native.example.$(PRODUCT_NAME:rfc1034identifier)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | 1
23 | LSRequiresIPhoneOS
24 |
25 | UILaunchStoryboardName
26 | LaunchScreen
27 | UIRequiredDeviceCapabilities
28 |
29 | armv7
30 |
31 | UISupportedInterfaceOrientations
32 |
33 | UIInterfaceOrientationPortrait
34 | UIInterfaceOrientationLandscapeLeft
35 | UIInterfaceOrientationLandscapeRight
36 |
37 | UIViewControllerBasedStatusBarAppearance
38 |
39 | NSLocationWhenInUseUsageDescription
40 |
41 | NSAppTransportSecurity
42 |
43 | NSExceptionDomains
44 |
45 | localhost
46 |
47 | NSExceptionAllowsInsecureHTTPLoads
48 |
49 |
50 |
51 |
52 | NSCameraUsageDescription
53 | This app requires the camera to function
54 |
55 |
--------------------------------------------------------------------------------
/examples/Food101/ios/Food101-tvOSTests/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | org.reactjs.native.example.$(PRODUCT_NAME:rfc1034identifier)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | BNDL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | 1
23 | NSCameraUsageDescription
24 | This app requires the camera to function
25 |
26 |
--------------------------------------------------------------------------------
/examples/Food101/ios/Food101.xcodeproj/xcshareddata/xcschemes/Food101-tvOS.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
29 |
35 |
36 |
37 |
43 |
49 |
50 |
51 |
52 |
53 |
58 |
59 |
61 |
67 |
68 |
69 |
70 |
71 |
77 |
78 |
79 |
80 |
81 |
82 |
92 |
94 |
100 |
101 |
102 |
103 |
104 |
105 |
111 |
113 |
119 |
120 |
121 |
122 |
124 |
125 |
128 |
129 |
130 |
--------------------------------------------------------------------------------
/examples/Food101/ios/Food101.xcodeproj/xcshareddata/xcschemes/Food101.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
29 |
35 |
36 |
37 |
43 |
49 |
50 |
51 |
52 |
53 |
58 |
59 |
61 |
67 |
68 |
69 |
70 |
71 |
77 |
78 |
79 |
80 |
81 |
82 |
92 |
94 |
100 |
101 |
102 |
103 |
104 |
105 |
111 |
113 |
119 |
120 |
121 |
122 |
124 |
125 |
128 |
129 |
130 |
--------------------------------------------------------------------------------
/examples/Food101/ios/Food101/AppDelegate.h:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (c) 2015-present, Facebook, Inc.
3 | *
4 | * This source code is licensed under the MIT license found in the
5 | * LICENSE file in the root directory of this source tree.
6 | */
7 |
8 | #import
9 |
10 | @interface AppDelegate : UIResponder
11 |
12 | @property (nonatomic, strong) UIWindow *window;
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/examples/Food101/ios/Food101/AppDelegate.m:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (c) 2015-present, Facebook, Inc.
3 | *
4 | * This source code is licensed under the MIT license found in the
5 | * LICENSE file in the root directory of this source tree.
6 | */
7 |
8 | #import "AppDelegate.h"
9 |
10 | #import
11 | #import
12 |
13 | @implementation AppDelegate
14 |
15 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions
16 | {
17 | NSURL *jsCodeLocation;
18 |
19 | jsCodeLocation = [[RCTBundleURLProvider sharedSettings] jsBundleURLForBundleRoot:@"index" fallbackResource:nil];
20 |
21 | RCTRootView *rootView = [[RCTRootView alloc] initWithBundleURL:jsCodeLocation
22 | moduleName:@"Food101"
23 | initialProperties:nil
24 | launchOptions:launchOptions];
25 | rootView.backgroundColor = [[UIColor alloc] initWithRed:1.0f green:1.0f blue:1.0f alpha:1];
26 |
27 | self.window = [[UIWindow alloc] initWithFrame:[UIScreen mainScreen].bounds];
28 | UIViewController *rootViewController = [UIViewController new];
29 | rootViewController.view = rootView;
30 | self.window.rootViewController = rootViewController;
31 | [self.window makeKeyAndVisible];
32 | return YES;
33 | }
34 |
35 | @end
36 |
--------------------------------------------------------------------------------
/examples/Food101/ios/Food101/Base.lproj/LaunchScreen.xib:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
21 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/examples/Food101/ios/Food101/Images.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "29x29",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "29x29",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "40x40",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "40x40",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "60x60",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "60x60",
31 | "scale" : "3x"
32 | }
33 | ],
34 | "info" : {
35 | "version" : 1,
36 | "author" : "xcode"
37 | }
38 | }
--------------------------------------------------------------------------------
/examples/Food101/ios/Food101/Images.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "version" : 1,
4 | "author" : "xcode"
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/examples/Food101/ios/Food101/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleDisplayName
8 | Food101
9 | CFBundleExecutable
10 | $(EXECUTABLE_NAME)
11 | CFBundleIdentifier
12 | org.reactjs.native.example.$(PRODUCT_NAME:rfc1034identifier)
13 | CFBundleInfoDictionaryVersion
14 | 6.0
15 | CFBundleName
16 | $(PRODUCT_NAME)
17 | CFBundlePackageType
18 | APPL
19 | CFBundleShortVersionString
20 | 1.0
21 | CFBundleSignature
22 | ????
23 | CFBundleVersion
24 | 1
25 | LSRequiresIPhoneOS
26 |
27 | NSAppTransportSecurity
28 |
29 | NSExceptionDomains
30 |
31 | localhost
32 |
33 | NSExceptionAllowsInsecureHTTPLoads
34 |
35 |
36 |
37 |
38 | NSCameraUsageDescription
39 | This app requires the camera to function
40 | NSLocationWhenInUseUsageDescription
41 |
42 | UILaunchStoryboardName
43 | LaunchScreen
44 | UIRequiredDeviceCapabilities
45 |
46 | armv7
47 |
48 | UISupportedInterfaceOrientations
49 |
50 | UIInterfaceOrientationPortrait
51 |
52 | UIViewControllerBasedStatusBarAppearance
53 |
54 |
55 |
--------------------------------------------------------------------------------
/examples/Food101/ios/Food101/main.m:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (c) 2015-present, Facebook, Inc.
3 | *
4 | * This source code is licensed under the MIT license found in the
5 | * LICENSE file in the root directory of this source tree.
6 | */
7 |
8 | #import
9 |
10 | #import "AppDelegate.h"
11 |
12 | int main(int argc, char * argv[]) {
13 | @autoreleasepool {
14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/examples/Food101/ios/Food101Tests/Food101Tests.m:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (c) 2015-present, Facebook, Inc.
3 | *
4 | * This source code is licensed under the MIT license found in the
5 | * LICENSE file in the root directory of this source tree.
6 | */
7 |
8 | #import
9 | #import
10 |
11 | #import
12 | #import
13 |
14 | #define TIMEOUT_SECONDS 600
15 | #define TEXT_TO_LOOK_FOR @"Welcome to React Native!"
16 |
17 | @interface Food101Tests : XCTestCase
18 |
19 | @end
20 |
21 | @implementation Food101Tests
22 |
23 | - (BOOL)findSubviewInView:(UIView *)view matching:(BOOL(^)(UIView *view))test
24 | {
25 | if (test(view)) {
26 | return YES;
27 | }
28 | for (UIView *subview in [view subviews]) {
29 | if ([self findSubviewInView:subview matching:test]) {
30 | return YES;
31 | }
32 | }
33 | return NO;
34 | }
35 |
36 | - (void)testRendersWelcomeScreen
37 | {
38 | UIViewController *vc = [[[RCTSharedApplication() delegate] window] rootViewController];
39 | NSDate *date = [NSDate dateWithTimeIntervalSinceNow:TIMEOUT_SECONDS];
40 | BOOL foundElement = NO;
41 |
42 | __block NSString *redboxError = nil;
43 | RCTSetLogFunction(^(RCTLogLevel level, RCTLogSource source, NSString *fileName, NSNumber *lineNumber, NSString *message) {
44 | if (level >= RCTLogLevelError) {
45 | redboxError = message;
46 | }
47 | });
48 |
49 | while ([date timeIntervalSinceNow] > 0 && !foundElement && !redboxError) {
50 | [[NSRunLoop mainRunLoop] runMode:NSDefaultRunLoopMode beforeDate:[NSDate dateWithTimeIntervalSinceNow:0.1]];
51 | [[NSRunLoop mainRunLoop] runMode:NSRunLoopCommonModes beforeDate:[NSDate dateWithTimeIntervalSinceNow:0.1]];
52 |
53 | foundElement = [self findSubviewInView:vc.view matching:^BOOL(UIView *view) {
54 | if ([view.accessibilityLabel isEqualToString:TEXT_TO_LOOK_FOR]) {
55 | return YES;
56 | }
57 | return NO;
58 | }];
59 | }
60 |
61 | RCTSetLogFunction(RCTDefaultLogFunction);
62 |
63 | XCTAssertNil(redboxError, @"RedBox error: %@", redboxError);
64 | XCTAssertTrue(foundElement, @"Couldn't find element with text '%@' in %d seconds", TEXT_TO_LOOK_FOR, TIMEOUT_SECONDS);
65 | }
66 |
67 |
68 | @end
69 |
--------------------------------------------------------------------------------
/examples/Food101/ios/Food101Tests/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | org.reactjs.native.example.$(PRODUCT_NAME:rfc1034identifier)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | BNDL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | 1
23 | NSCameraUsageDescription
24 | This app requires the camera to function
25 |
26 |
--------------------------------------------------------------------------------
/examples/Food101/ios/RNPlaceholder.swift:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhdeck/react-native-vision/0598c9889db146f88319a6ce3dd660ad6edc512a/examples/Food101/ios/RNPlaceholder.swift
--------------------------------------------------------------------------------
/examples/Food101/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Food101",
3 | "version": "0.0.1",
4 | "private": true,
5 | "scripts": {
6 | "start": "node node_modules/react-native/local-cli/cli.js start",
7 | "test": "jest"
8 | },
9 | "dependencies": {
10 | "@babel/core": "^7.0.0-beta.47",
11 | "babel-core": "^7.0.0-beta.47",
12 | "react": "16.3.1",
13 | "react-native": "0.55.4",
14 | "react-native-camera-ios-enable": "^1.0.1",
15 | "react-native-coreml": "^1.2.0",
16 | "react-native-fs": "^2.11.15",
17 | "react-native-swift": "^1.2.1",
18 | "react-native-vision": "^1.2.0"
19 | },
20 | "devDependencies": {
21 | "babel-jest": "23.4.2",
22 | "babel-preset-react-native": "4.0.0",
23 | "jest": "23.5.0",
24 | "prettier": "^1.14.2",
25 | "react-native-fix-xcode-10": "rhdeck/react-native-fix-xcode-10",
26 | "react-test-renderer": "16.4.1"
27 | },
28 | "jest": {
29 | "preset": "react-native"
30 | },
31 | "isSwift": true
32 | }
33 |
--------------------------------------------------------------------------------
/faceprovider.js:
--------------------------------------------------------------------------------
1 | import React, { createContext, Component } from "react";
2 | import { PropTypes } from "prop-types";
3 | import FaceTracker from "./facetracker";
4 | import { RNVisionConsumer } from "./wrapper";
5 | import { RNVRegion } from "./region";
6 | import { calculateRectangles } from "./cameraregion";
7 | import { RNVCameraConsumer, RNVCameraView } from "./view";
8 | const { Provider, Consumer: FacesConsumer } = createContext({ faces: {} });
9 | class FaceInfo extends Component {
10 | state = {
11 | faces: {}
12 | };
13 | componentDidMount() {
14 | this.timer = setInterval(() => {
15 | this.checkOldFaces();
16 | }, this.props.timeout);
17 | }
18 | checkOldFaces() {
19 | try {
20 | const keys = Object.entries(this.state.faces)
21 | .filter(
22 | ([_, { lastUpdate }]) => lastUpdate < Date.now() - this.props.timeout
23 | )
24 | .map(([key]) => key);
25 | if (keys.length) {
26 | this.setState(({ faces }) => {
27 | keys.forEach(k => delete faces[k]);
28 | return { faces: { ...faces } };
29 | });
30 | }
31 | } catch (e) {}
32 | }
33 | componentWillUnmount() {
34 | if (this.timer) clearInterval(this.timer);
35 | if (this.faceTimer) clearInterval(this.faceTimer);
36 | }
37 | faceTimer = null;
38 | myFaceInfo = {};
39 | setFaceInfo(k, info) {
40 | info.lastUpdate = Date.now();
41 | this.myFaceInfo[k] = info;
42 | if (!this.faceTimer)
43 | this.faceTimer = setTimeout(() => {
44 | if (this.myFaceInfo) {
45 | this.setState(
46 | ({ faces }) => {
47 | return {
48 | faces: { ...faces, ...this.myFaceInfo }
49 | };
50 | },
51 | () => {
52 | this.myFaceInfo = {};
53 | this.checkOldFaces();
54 | }
55 | );
56 | }
57 | if (this.faceTimer) clearTimeout(this.faceTimer);
58 | this.faceTimer = null;
59 | }, this.props.updateInterval);
60 | }
61 | render() {
62 | return (
63 |
64 | {this.props.children({ setFaceInfo: this.setFaceInfo.bind(this) })}
65 |
66 | );
67 | }
68 | }
69 | const FacesProvider = props => {
70 | return (
71 |
72 |
73 | {({ setFaceInfo }) => (
74 |
75 | {data => {
76 | if (!data) return null;
77 | const regions = data.regions;
78 | return [
79 | regions
80 | ? [
81 | ...Object.keys(regions)
82 | .filter(k => k.length)
83 | .map(k => (
84 |
93 | {({ classifications }) => {
94 | if (typeof classifications == "object") {
95 | const fk = Object.keys(classifications).shift();
96 | if (!fk) {
97 | setFaceInfo(k, {
98 | region: k,
99 | ...regions[k]
100 | });
101 | } else {
102 | const firstClassifier = classifications[fk];
103 | setFaceInfo(k, {
104 | region: k,
105 | ...regions[k],
106 | face: [...firstClassifier].shift().label,
107 | faceConfidence: [...firstClassifier].shift()
108 | .confidence,
109 | faces: firstClassifier
110 | });
111 | }
112 | } else {
113 | setFaceInfo(k, {
114 | region: k,
115 | ...regions[k]
116 | });
117 | }
118 | }}
119 |
120 | ))
121 | ]
122 | : null,
123 | typeof props.children == "function" ? (
124 | {props.children}
125 | ) : (
126 | props.children
127 | )
128 | ];
129 | }}
130 |
131 | )}
132 |
133 |
134 | );
135 | };
136 | FacesProvider.propTypes = {
137 | ...FaceTracker.propTypes,
138 | classifier: PropTypes.string,
139 | updateInterval: PropTypes.number
140 | };
141 | FacesProvider.defaultProps = {
142 | isCameraFront: true,
143 | isStarted: true,
144 | interval: 500,
145 | updateInterval: 100
146 | };
147 | const Face = props =>
148 | props.faceID ? (
149 |
150 | {({ faces: { [props.faceID]: faceObj } }) =>
151 | faceObj ? : null
152 | }
153 |
154 | ) : (
155 |
156 | {value =>
157 | !!value ? (
158 |
159 | {({ imageDimensions, isCameraFront }) => {
160 | const newValue = {
161 | ...props,
162 | ...value,
163 | style: calculateRectangles({
164 | ...props,
165 | ...value,
166 | imageDimensions,
167 | isCameraFront
168 | }),
169 | children: null
170 | };
171 | return props.children(newValue);
172 | }}
173 |
174 | ) : (
175 | props.children({ ...props, children: null })
176 | )
177 | }
178 |
179 | );
180 | Face.propTypes = {
181 | faceID: PropTypes.string
182 | };
183 | Face.defaultProps = {
184 | faceID: null
185 | };
186 | const Faces = props => (
187 |
188 | {({ faces }) =>
189 | faces &&
190 | Object.keys(faces).map(k => (
191 |
192 | {value => props.children({ key: k, ...value })}
193 |
194 | ))
195 | }
196 |
197 | );
198 |
199 | Faces.propTypes = {
200 | children: PropTypes.func.isRequired
201 | };
202 | const FaceCamera = props => (
203 |
204 |
205 | {typeof props.children === "function" ? (
206 | {props.children}
207 | ) : (
208 | props.children
209 | )}
210 |
211 |
212 | );
213 | export { FacesProvider, FacesConsumer, Face, Faces, FaceCamera };
214 |
--------------------------------------------------------------------------------
/facetracker.js:
--------------------------------------------------------------------------------
1 | import React, { Component } from "react";
2 | import { RNVisionProvider, RNVisionConsumer } from "./wrapper";
3 | import PropTypes from "prop-types";
4 |
5 | const intersect = (a, b) => {
6 | if (!a && !b) return false;
7 | return !(
8 | b.x > a.x + a.width ||
9 | a.x > b.x + b.width ||
10 | b.y > a.y + a.height ||
11 | a.y > b.y + b.height
12 | );
13 | };
14 | class FaceTracker extends Component {
15 | state = {
16 | interval: null,
17 | intervalDuration: 0,
18 | onDetectedFaces: null,
19 | regions: null
20 | };
21 | static getDerivedStateFromProps(nextProps, prevState) {
22 | var ret = prevState;
23 | if (
24 | nextProps.interval &&
25 | nextProps.interval != prevState.intervalDuration
26 | ) {
27 | ret.intervalDuration = nextProps.interval;
28 | }
29 | return ret;
30 | }
31 | updateInterval() {
32 | clearInterval(this.state.interval);
33 | this.setState({
34 | interval: setInterval(() => {
35 | this.setState({
36 | onDetectedFaces: faces => {
37 | var out = {};
38 | var doneIndexes = [];
39 | if (faces && faces.length) {
40 | if (this.state.regions) {
41 | faces.forEach((v, i) => {
42 | var done = false;
43 | const newRect = {
44 | x: v.x - v.width * 0.1,
45 | y: v.y - v.height * 0.2,
46 | height: v.height * 1.4,
47 | width: v.width * 1.2
48 | };
49 | Object.keys(this.state.regions).forEach(region => {
50 | if (done) return;
51 | if (region == "") return;
52 | const rect = this.state.regions[region];
53 | if (intersect(newRect, rect)) {
54 | out[region] = newRect;
55 | doneIndexes.push(i);
56 | done = true;
57 | }
58 | });
59 | });
60 | }
61 | faces.forEach((v, i) => {
62 | if (doneIndexes.indexOf(i) > -1) return;
63 | var i = 0;
64 | var key;
65 | do {
66 | key = "face" + String(i);
67 | i++;
68 | } while (Object.keys(out).indexOf(key) > -1);
69 | const newRect = {
70 | x: v.x - v.width * 0.1,
71 | y: v.y - v.height * 0.2,
72 | height: v.height * 1.4,
73 | width: v.width * 1.2
74 | };
75 | out[key] = newRect;
76 | });
77 | this.setState({ trackedObjects: out, onDetectedFaces: null });
78 | } else {
79 | this.setState({ trackedObjects: null, onDetectedFaces: null });
80 | }
81 | }
82 | });
83 | }, this.state.intervalDuration)
84 | });
85 | }
86 | componentDidUpdate(prevProps, prevState) {
87 | if (prevState.intervalDuration != prevState.intervalDuration) {
88 | this.updateInterval();
89 | }
90 | }
91 | componentDidMount() {
92 | this.updateInterval();
93 | }
94 | componentWillUnmount() {
95 | if (this.state.interval) clearInterval(this.state.interval);
96 | }
97 | render() {
98 | return (
99 | {
107 | this.setState({ regions: regions });
108 | if (this.props.onRegionsChange) this.props.onRegionsChange(regions);
109 | }}
110 | children={
111 | typeof this.props.children == "function" ? (
112 | {this.props.children}
113 | ) : (
114 | this.props.children
115 | )
116 | }
117 | />
118 | );
119 | }
120 | }
121 | FaceTracker.propTypes = {
122 | ...RNVisionProvider.propTypes,
123 | interval: PropTypes.number.isRequired
124 | };
125 | export default FaceTracker;
126 |
--------------------------------------------------------------------------------
/imageview.js:
--------------------------------------------------------------------------------
1 | import React, { Component } from "react";
2 | import PropTypes from "prop-types";
3 | import { RHDVisionImageView } from "./RNSwiftBridge";
4 | class RNVImageView extends Component {
5 | render() {
6 | return (
7 |
17 | );
18 | }
19 | }
20 | RNVImageView.propTypes = {
21 | id: PropTypes.string.isRequired
22 | };
23 | export default RNVImageView;
24 |
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 | import { RNVCameraView, RNVCameraConsumer } from "./view";
2 | import Delegate from "./module";
3 | import RNVImageView from "./imageview";
4 | import { RNVisionProvider, RNVisionConsumer } from "./wrapper";
5 | import { RNVRegion, RNVDefaultRegion } from "./region";
6 | import {
7 | CameraRegion as RNVCameraRegion,
8 | calculateRectangles
9 | } from "./cameraregion";
10 | import {
11 | FacesProvider,
12 | FacesConsumer,
13 | Face,
14 | Faces,
15 | FaceCamera
16 | } from "./faceprovider";
17 | import { VisionCamera } from "./visioncamera";
18 | import { StyleView } from "./styleview";
19 | export {
20 | RNVCameraView,
21 | Delegate,
22 | RNVisionProvider,
23 | RNVisionConsumer,
24 | RNVImageView,
25 | RNVRegion,
26 | RNVCameraRegion,
27 | RNVCameraConsumer,
28 | calculateRectangles,
29 | RNVDefaultRegion,
30 | FacesProvider,
31 | FacesConsumer,
32 | Face,
33 | Faces,
34 | FaceCamera,
35 | VisionCamera,
36 | StyleView as GeneratorView
37 | };
38 |
--------------------------------------------------------------------------------
/ios/RHDVision.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 48;
7 | objects = {
8 | /* Begin PBXBuildFile section */
9 | 7172008220A276FB00DEACEB /* RHDVisionImageViewManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7172008120A276FB00DEACEB /* RHDVisionImageViewManager.swift */; };
10 | 7172008420A2770A00DEACEB /* RHDVisionImageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7172008320A2770A00DEACEB /* RHDVisionImageView.swift */; };
11 | 71BBE5711FEE99600085603D /* RHDVisionDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 71BBE5701FEE99600085603D /* RHDVisionDelegate.swift */; };
12 | 71D018061FED85F600939074 /* RHDVisionViewManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 71D018051FED85F600939074 /* RHDVisionViewManager.swift */; };
13 | 71D018381FED8A5A00939074 /* RHDVisionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 71D018371FED8A5A00939074 /* RHDVisionView.swift */; };
14 | E36AA68C8B1D4C0DB395FB12 /* rn-swift-bridge.m in Sources */ = {isa = PBXBuildFile; fileRef = AAC91471FCBB43E18A0037E9 /* rn-swift-bridge.m */; };
15 | /* End PBXBuildFile section */
16 |
17 | /* Begin PBXCopyFilesBuildPhase section */
18 | 71D017F61FED85D600939074 /* CopyFiles */ = {
19 | isa = PBXCopyFilesBuildPhase;
20 | buildActionMask = 2147483647;
21 | dstPath = "include/$(PRODUCT_NAME)";
22 | dstSubfolderSpec = 16;
23 | files = (
24 | );
25 | runOnlyForDeploymentPostprocessing = 0;
26 | };
27 | /* End PBXCopyFilesBuildPhase section */
28 |
29 | /* Begin PBXFileReference section */
30 | 7172008120A276FB00DEACEB /* RHDVisionImageViewManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RHDVisionImageViewManager.swift; sourceTree = ""; };
31 | 7172008320A2770A00DEACEB /* RHDVisionImageView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RHDVisionImageView.swift; sourceTree = ""; };
32 | 71BBE5701FEE99600085603D /* RHDVisionDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RHDVisionDelegate.swift; sourceTree = ""; };
33 | 71D017F81FED85D600939074 /* libRHDVision.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libRHDVision.a; sourceTree = BUILT_PRODUCTS_DIR; };
34 | 71D018041FED85F600939074 /* RHDVision-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "RHDVision-Bridging-Header.h"; sourceTree = ""; };
35 | 71D018051FED85F600939074 /* RHDVisionViewManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RHDVisionViewManager.swift; sourceTree = ""; };
36 | 71D018371FED8A5A00939074 /* RHDVisionView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RHDVisionView.swift; sourceTree = ""; };
37 | AAC91471FCBB43E18A0037E9 /* rn-swift-bridge.m */ = {isa = PBXFileReference; explicitFileType = undefined; fileEncoding = 4; includeInIndex = 0; lastKnownFileType = sourcecode.c.objc; path = "rn-swift-bridge.m"; sourceTree = ""; };
38 | /* End PBXFileReference section */
39 |
40 | /* Begin PBXFrameworksBuildPhase section */
41 | 71D017F51FED85D600939074 /* Frameworks */ = {
42 | isa = PBXFrameworksBuildPhase;
43 | buildActionMask = 2147483647;
44 | files = (
45 | );
46 | runOnlyForDeploymentPostprocessing = 0;
47 | };
48 | /* End PBXFrameworksBuildPhase section */
49 |
50 | /* Begin PBXGroup section */
51 | 71D017EF1FED85D600939074 = {
52 | isa = PBXGroup;
53 | children = (
54 | 71D017FA1FED85D600939074 /* RHDVision */,
55 | 71D017F91FED85D600939074 /* Products */,
56 | 71F4EEE9209E82F100997296 /* Recovered References */,
57 | );
58 | sourceTree = "";
59 | };
60 | 71D017F91FED85D600939074 /* Products */ = {
61 | isa = PBXGroup;
62 | children = (
63 | 71D017F81FED85D600939074 /* libRHDVision.a */,
64 | );
65 | name = Products;
66 | sourceTree = "";
67 | };
68 | 71D017FA1FED85D600939074 /* RHDVision */ = {
69 | isa = PBXGroup;
70 | children = (
71 | 71D018051FED85F600939074 /* RHDVisionViewManager.swift */,
72 | 71D018041FED85F600939074 /* RHDVision-Bridging-Header.h */,
73 | 71D018371FED8A5A00939074 /* RHDVisionView.swift */,
74 | 71BBE5701FEE99600085603D /* RHDVisionDelegate.swift */,
75 | 7172008120A276FB00DEACEB /* RHDVisionImageViewManager.swift */,
76 | 7172008320A2770A00DEACEB /* RHDVisionImageView.swift */,
77 | );
78 | path = RHDVision;
79 | sourceTree = "";
80 | };
81 | 71F4EEE9209E82F100997296 /* Recovered References */ = {
82 | isa = PBXGroup;
83 | children = (
84 | AAC91471FCBB43E18A0037E9 /* rn-swift-bridge.m */,
85 | );
86 | name = "Recovered References";
87 | sourceTree = "";
88 | };
89 | /* End PBXGroup section */
90 |
91 | /* Begin PBXNativeTarget section */
92 | 71D017F71FED85D600939074 /* RHDVision */ = {
93 | isa = PBXNativeTarget;
94 | buildConfigurationList = 71D018011FED85D600939074 /* Build configuration list for PBXNativeTarget "RHDVision" */;
95 | buildPhases = (
96 | 71D017F41FED85D600939074 /* Sources */,
97 | 71D017F51FED85D600939074 /* Frameworks */,
98 | 71D017F61FED85D600939074 /* CopyFiles */,
99 | );
100 | buildRules = (
101 | );
102 | dependencies = (
103 | );
104 | name = RHDVision;
105 | productName = RHDVision;
106 | productReference = 71D017F81FED85D600939074 /* libRHDVision.a */;
107 | productType = "com.apple.product-type.library.static";
108 | };
109 | /* End PBXNativeTarget section */
110 |
111 | /* Begin PBXProject section */
112 | 71D017F01FED85D600939074 /* Project object */ = {
113 | isa = PBXProject;
114 | attributes = {
115 | LastUpgradeCheck = 920;
116 | ORGANIZATIONNAME = "Ray Deck";
117 | TargetAttributes = {
118 | 71D017F71FED85D600939074 = {
119 | CreatedOnToolsVersion = 9.2;
120 | LastSwiftMigration = 920;
121 | ProvisioningStyle = Automatic;
122 | };
123 | };
124 | };
125 | buildConfigurationList = 71D017F31FED85D600939074 /* Build configuration list for PBXProject "RHDVision" */;
126 | compatibilityVersion = "Xcode 8.0";
127 | developmentRegion = en;
128 | hasScannedForEncodings = 0;
129 | knownRegions = (
130 | en,
131 | );
132 | mainGroup = 71D017EF1FED85D600939074;
133 | productRefGroup = 71D017F91FED85D600939074 /* Products */;
134 | projectDirPath = "";
135 | projectRoot = "";
136 | targets = (
137 | 71D017F71FED85D600939074 /* RHDVision */,
138 | );
139 | };
140 | /* End PBXProject section */
141 |
142 | /* Begin PBXSourcesBuildPhase section */
143 | 71D017F41FED85D600939074 /* Sources */ = {
144 | isa = PBXSourcesBuildPhase;
145 | buildActionMask = 2147483647;
146 | files = (
147 | 7172008220A276FB00DEACEB /* RHDVisionImageViewManager.swift in Sources */,
148 | 7172008420A2770A00DEACEB /* RHDVisionImageView.swift in Sources */,
149 | 71D018061FED85F600939074 /* RHDVisionViewManager.swift in Sources */,
150 | 71D018381FED8A5A00939074 /* RHDVisionView.swift in Sources */,
151 | 71BBE5711FEE99600085603D /* RHDVisionDelegate.swift in Sources */,
152 | E36AA68C8B1D4C0DB395FB12 /* rn-swift-bridge.m in Sources */,
153 | );
154 | runOnlyForDeploymentPostprocessing = 0;
155 | };
156 | /* End PBXSourcesBuildPhase section */
157 |
158 | /* Begin XCBuildConfiguration section */
159 | 71D017FF1FED85D600939074 /* Debug */ = {
160 | isa = XCBuildConfiguration;
161 | buildSettings = {
162 | ALWAYS_SEARCH_USER_PATHS = NO;
163 | CLANG_ANALYZER_NONNULL = YES;
164 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
165 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
166 | CLANG_CXX_LIBRARY = "libc++";
167 | CLANG_ENABLE_MODULES = YES;
168 | CLANG_ENABLE_OBJC_ARC = YES;
169 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
170 | CLANG_WARN_BOOL_CONVERSION = YES;
171 | CLANG_WARN_COMMA = YES;
172 | CLANG_WARN_CONSTANT_CONVERSION = YES;
173 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
174 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
175 | CLANG_WARN_EMPTY_BODY = YES;
176 | CLANG_WARN_ENUM_CONVERSION = YES;
177 | CLANG_WARN_INFINITE_RECURSION = YES;
178 | CLANG_WARN_INT_CONVERSION = YES;
179 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
180 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
181 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
182 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
183 | CLANG_WARN_STRICT_PROTOTYPES = YES;
184 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
185 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
186 | CLANG_WARN_UNREACHABLE_CODE = YES;
187 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
188 | CODE_SIGN_IDENTITY = "iPhone Developer";
189 | COPY_PHASE_STRIP = NO;
190 | DEBUG_INFORMATION_FORMAT = dwarf;
191 | ENABLE_STRICT_OBJC_MSGSEND = YES;
192 | ENABLE_TESTABILITY = YES;
193 | GCC_C_LANGUAGE_STANDARD = gnu11;
194 | GCC_DYNAMIC_NO_PIC = NO;
195 | GCC_NO_COMMON_BLOCKS = YES;
196 | GCC_OPTIMIZATION_LEVEL = 0;
197 | GCC_PREPROCESSOR_DEFINITIONS = (
198 | "DEBUG=1",
199 | "$(inherited)",
200 | );
201 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
202 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
203 | GCC_WARN_UNDECLARED_SELECTOR = YES;
204 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
205 | GCC_WARN_UNUSED_FUNCTION = YES;
206 | GCC_WARN_UNUSED_VARIABLE = YES;
207 | IPHONEOS_DEPLOYMENT_TARGET = 11.2;
208 | MTL_ENABLE_DEBUG_INFO = YES;
209 | ONLY_ACTIVE_ARCH = YES;
210 | SDKROOT = iphoneos;
211 | };
212 | name = Debug;
213 | };
214 | 71D018001FED85D600939074 /* Release */ = {
215 | isa = XCBuildConfiguration;
216 | buildSettings = {
217 | ALWAYS_SEARCH_USER_PATHS = NO;
218 | CLANG_ANALYZER_NONNULL = YES;
219 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
220 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
221 | CLANG_CXX_LIBRARY = "libc++";
222 | CLANG_ENABLE_MODULES = YES;
223 | CLANG_ENABLE_OBJC_ARC = YES;
224 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
225 | CLANG_WARN_BOOL_CONVERSION = YES;
226 | CLANG_WARN_COMMA = YES;
227 | CLANG_WARN_CONSTANT_CONVERSION = YES;
228 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
229 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
230 | CLANG_WARN_EMPTY_BODY = YES;
231 | CLANG_WARN_ENUM_CONVERSION = YES;
232 | CLANG_WARN_INFINITE_RECURSION = YES;
233 | CLANG_WARN_INT_CONVERSION = YES;
234 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
235 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
236 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
237 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
238 | CLANG_WARN_STRICT_PROTOTYPES = YES;
239 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
240 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
241 | CLANG_WARN_UNREACHABLE_CODE = YES;
242 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
243 | CODE_SIGN_IDENTITY = "iPhone Developer";
244 | COPY_PHASE_STRIP = NO;
245 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
246 | ENABLE_NS_ASSERTIONS = NO;
247 | ENABLE_STRICT_OBJC_MSGSEND = YES;
248 | GCC_C_LANGUAGE_STANDARD = gnu11;
249 | GCC_NO_COMMON_BLOCKS = YES;
250 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
251 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
252 | GCC_WARN_UNDECLARED_SELECTOR = YES;
253 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
254 | GCC_WARN_UNUSED_FUNCTION = YES;
255 | GCC_WARN_UNUSED_VARIABLE = YES;
256 | IPHONEOS_DEPLOYMENT_TARGET = 11.2;
257 | MTL_ENABLE_DEBUG_INFO = NO;
258 | SDKROOT = iphoneos;
259 | VALIDATE_PRODUCT = YES;
260 | };
261 | name = Release;
262 | };
263 | 71D018021FED85D600939074 /* Debug */ = {
264 | isa = XCBuildConfiguration;
265 | buildSettings = {
266 | CLANG_ENABLE_MODULES = YES;
267 | CODE_SIGN_STYLE = Automatic;
268 | DEVELOPMENT_TEAM = 3UK2WGC8YX;
269 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
270 | OTHER_LDFLAGS = "-ObjC";
271 | PRODUCT_NAME = "$(TARGET_NAME)";
272 | SKIP_INSTALL = YES;
273 | SWIFT_OBJC_BRIDGING_HEADER = "RHDVision/RHDVision-Bridging-Header.h";
274 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
275 | SWIFT_VERSION = 3.0;
276 | TARGETED_DEVICE_FAMILY = "1,2";
277 | };
278 | name = Debug;
279 | };
280 | 71D018031FED85D600939074 /* Release */ = {
281 | isa = XCBuildConfiguration;
282 | buildSettings = {
283 | CLANG_ENABLE_MODULES = YES;
284 | CODE_SIGN_STYLE = Automatic;
285 | DEVELOPMENT_TEAM = 3UK2WGC8YX;
286 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
287 | OTHER_LDFLAGS = "-ObjC";
288 | PRODUCT_NAME = "$(TARGET_NAME)";
289 | SKIP_INSTALL = YES;
290 | SWIFT_OBJC_BRIDGING_HEADER = "RHDVision/RHDVision-Bridging-Header.h";
291 | SWIFT_VERSION = 3.0;
292 | TARGETED_DEVICE_FAMILY = "1,2";
293 | };
294 | name = Release;
295 | };
296 | /* End XCBuildConfiguration section */
297 |
298 | /* Begin XCConfigurationList section */
299 | 71D017F31FED85D600939074 /* Build configuration list for PBXProject "RHDVision" */ = {
300 | isa = XCConfigurationList;
301 | buildConfigurations = (
302 | 71D017FF1FED85D600939074 /* Debug */,
303 | 71D018001FED85D600939074 /* Release */,
304 | );
305 | defaultConfigurationIsVisible = 0;
306 | defaultConfigurationName = Release;
307 | };
308 | 71D018011FED85D600939074 /* Build configuration list for PBXNativeTarget "RHDVision" */ = {
309 | isa = XCConfigurationList;
310 | buildConfigurations = (
311 | 71D018021FED85D600939074 /* Debug */,
312 | 71D018031FED85D600939074 /* Release */,
313 | );
314 | defaultConfigurationIsVisible = 0;
315 | defaultConfigurationName = Release;
316 | };
317 | /* End XCConfigurationList section */
318 | };
319 | rootObject = 71D017F01FED85D600939074 /* Project object */;
320 | }
321 |
--------------------------------------------------------------------------------
/ios/RHDVision/RHDVision-Bridging-Header.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 | #import
4 |
--------------------------------------------------------------------------------
/ios/RHDVision/RHDVision.h:
--------------------------------------------------------------------------------
1 | //
2 | // RHDVision.h
3 | // RHDVision
4 | //
5 | // Created by Raymond Deck on 12/22/17.
6 | // Copyright © 2017 Ray Deck. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface RHDVision : NSObject
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/ios/RHDVision/RHDVisionDelegate.swift:
--------------------------------------------------------------------------------
1 | import AVKit
2 | import Vision
3 | import Photos
4 | import Foundation
5 | import Accelerate
6 | typealias SFCallback = (UIImage)->Void
7 | typealias VNRGenerator = ()->VNRequest?
8 | typealias BottleneckCB = (MLFeatureProvider, MLModel)->Void
9 | struct ModelStruct {
10 | var model: MLModel
11 | var callback: BottleneckCB
12 | }
13 | enum visionErrors:Error {
14 | case NoModelError
15 | }
16 | var session:AVCaptureSession?
17 | // Don't know if I want this typealias VNReqMaker = () -> VNRequest
18 | @objc(RHDVisionModule)
19 | class RHDVisionModule: RCTEventEmitter, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureMetadataOutputObjectsDelegate {
20 | static var instance:RHDVisionModule?
21 | //MARK: Private Collections
22 | var sr:[String: [String:VNRequest]] = [:] //Sequence Requests
23 | var srg:[String:[String:VNRGenerator]] = [:] // Sequence Request Generators
24 | var srobs:[String:[String:VNDetectedObjectObservation]] = [:]
25 | var ir:[String:[String:VNRequest]] = [:] // Image Requests - by region
26 | var irg:[String:[String:VNRGenerator]] = [:] // Image Request Generators - by region
27 | var sf: [String: SFCallback] = [:] //SaveFrame Callbacks - by region
28 | var br: [String:[String: [String:ModelStruct]]] = [:] //Bottleneck Requests by bottleneck model (does not require a region, since that is specied in the orignal bottlenecking model)
29 | var regions: [String: CGRect] = [:] // Identified regions, organized as relative position. Note that "" is reserved for the whole visible field
30 | //MARK: Private Properties
31 | var pl:AVCaptureVideoPreviewLayer?
32 | var connection: AVCaptureConnection?
33 | var srh:[String: VNSequenceRequestHandler] = [:]
34 | var imageHeight = 0
35 | var imageWidth = 0
36 | var doAttachCamera = false
37 | //MARK: Private Methods
38 | override init() {
39 | super.init()
40 | RHDVisionModule.instance = self
41 | multiArrays = [:] // Prevent memory leaks
42 | //Kill previous connections
43 | if let i = RHDVisionModule.instance, let thispl = i.pl {
44 | thispl.session.outputs.forEach() { o in
45 | if let os = o as? AVCaptureVideoDataOutput , let _ = os.sampleBufferDelegate {
46 | thispl.session.removeOutput(os)
47 | }
48 | }
49 | }
50 | RHDVisionModule.instance = self
51 | }
52 | override class func requiresMainQueueSetup() -> Bool {
53 | return true
54 | }
55 | //MARK: Lifecycle management
56 |
57 | @objc func start(_ cameraFront: Bool, resolve: RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
58 | AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo) { success in
59 | guard success else { reject("no_permission", "Permission denied for Video Capture", nil); return }
60 | guard
61 | let device = AVCaptureDevice.defaultDevice(withDeviceType: .builtInWideAngleCamera, mediaType: AVMediaTypeVideo, position: cameraFront ? AVCaptureDevice.Position.front : AVCaptureDevice.Position.back),
62 | let input = try? AVCaptureDeviceInput(device: device)
63 | else { return }
64 | if let olds = session {
65 | olds.inputs.forEach() { i in
66 | olds.removeInput(i as! AVCaptureInput)
67 | }
68 | olds.outputs.forEach() { o in
69 | olds.removeOutput(o as! AVCaptureOutput)
70 | }
71 | session = nil
72 | }
73 | let s = AVCaptureSession()
74 | s.addInput(input)
75 | s.startRunning()
76 | session = s
77 | let o = AVCaptureVideoDataOutput()
78 | o.setSampleBufferDelegate(self, queue: DispatchQueue(label:"RHDVisionModuleQueue"))
79 | o.alwaysDiscardsLateVideoFrames = true
80 | o.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
81 | s.addOutput(o)
82 | /*
83 | let o2 = AVCaptureMetadataOutput()
84 | o2.setMetadataObjectsDelegate(self, queue:DispatchQueue.main)
85 | o2.metadataObjectTypes = o2.availableMetadataObjectTypes
86 | s.addOutput(o2)
87 | */
88 | if let conn = o.connection(withMediaType: AVMediaTypeVideo) {
89 | conn.videoOrientation = deviceOrientationtoAVOrientation(UIDevice.current.orientation)
90 | }
91 | if(self.doAttachCamera) {
92 | RHDVisionCameraViewManager.currentView?.attach(s)
93 | }
94 | }
95 | }
96 | @objc func stop(_ resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
97 | guard let s = session else { resolve(true); return }
98 | s.stopRunning()
99 | s.inputs.forEach() { i in
100 | s.removeInput(i as! AVCaptureInput)
101 | }
102 | s.outputs.forEach() { o in
103 | s.removeOutput(o as! AVCaptureOutput)
104 | }
105 | resolve(true)
106 | }
107 | @objc func getImageDimensions(_ resolve:RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
108 | resolve(["height": imageHeight, "width": imageWidth])
109 | }
110 | @objc func attachCameraView(_ resolve:RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
111 | //Look for current vision object
112 | doAttachCamera = true
113 | guard let view = RHDVisionCameraViewManager.currentView else { reject("no_view", "No view instantiated", nil); return }
114 | guard let s = session else { resolve(false); return }
115 | view.attach(s)
116 | resolve(true)
117 | }
118 | @objc func detachCameraView(_ resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
119 | doAttachCamera = false
120 | RHDVisionCameraViewManager.currentView?.detach()
121 | }
122 | var isCameraView: Bool = false
123 | @objc func cameraIsView(_ newIsCameraView: Bool, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
124 | guard isCameraView == newIsCameraView else { resolve(true); return }
125 | isCameraView = newIsCameraView
126 | resolve(true)
127 | }
128 | //MARK:Delegate Methods
129 | func captureOutput(_ output: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
130 | self.connection = connection
131 | if isCameraView, let v = RHDVisionCameraViewManager.currentView, let ao = v.pl?.connection.videoOrientation { connection.videoOrientation = ao }
132 | guard sr.count > 0
133 | || srg.count > 0
134 | || ir.count > 0
135 | || irg.count > 0
136 | || sf.count > 0
137 | || imageHeight == 0,
138 | let cvp = CMSampleBufferGetImageBuffer(sampleBuffer)
139 | else { return }
140 |
141 | let newImageHeight = CVPixelBufferGetHeight(cvp)
142 | let newImageWidth = CVPixelBufferGetWidth(cvp)
143 | if newImageHeight != imageHeight || newImageWidth != imageWidth {
144 | imageHeight = newImageHeight
145 | imageWidth = newImageWidth
146 | sendEvent(withName: "RNVisionImageDim", body: ["height": newImageHeight, "width": newImageWidth])
147 | }
148 | if newImageWidth==0 || newImageHeight==0 { return }
149 | analyzePixelBuffer(cvp, key: "") //Image Analysis as applied to the whole visible region
150 | regions.forEach() { region, rect in
151 | guard sr[region] != nil || srg[region] != nil || ir[region] != nil || irg[region] != nil || sf[region] != nil else { return }
152 | guard let slicedCVP = slicePixelBuffer(cvp, toRect: rect) else { return }
153 | analyzePixelBuffer(slicedCVP, key: region)
154 | }
155 | }
156 | func analyzePixelBuffer(_ cvp: CVPixelBuffer, key: String) {
157 | var irs:[VNRequest] = []
158 | if let i = ir[key] {
159 | i.forEach() { k, v in
160 | irs.append(v)
161 | }
162 | }
163 | if let i = irg[key] {
164 | i.forEach() { k, v in
165 | if let r = v() { irs.append(r) }
166 | }
167 | }
168 | if irs.count > 0 {
169 | let irh = VNImageRequestHandler(cvPixelBuffer: cvp, options: [:])
170 | try? irh.perform(irs)
171 | }
172 | if let cb = sf[key] {
173 | if let i = CVPtoUIImage(cvp) { cb(i) }
174 | }
175 | var srs:[VNRequest] = []
176 | if let s = sr[key] {
177 | s.forEach() { k, v in
178 | srs.append(v)
179 | }
180 | }
181 | if let s = srg[key] {
182 | s.forEach() { k, generator in
183 | if let r = generator() { srs.append(r) }
184 | }
185 | }
186 | if srh[key] == nil {
187 | srh[key] = VNSequenceRequestHandler()
188 | }
189 |
190 | if srs.count > 0 {
191 | try? srh[key]!.perform(srs, on: cvp)
192 | }
193 | }
194 | //MARK: SaveFrame Code
195 | let defaultDisposition:String = "file"
196 | var savedFrames:[String: UIImage] = [:]
197 | @objc func saveFrame(_ disposition: String?, region: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
198 | sf[region] = { i in
199 | let d = disposition ?? self.defaultDisposition
200 | switch(d) {
201 | case "file":
202 | guard let d = UIImageJPEGRepresentation(i, 1.0) else { return }
203 | let u = UUID().uuidString
204 | let t = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(u).appendingPathExtension("jpg")
205 | do {
206 | try d.write(to: t)
207 | self.sendEvent(withName: "RNVision", body: ["key": "saveFrame", "region": region, "event": "savedFile", "fileURL": t.absoluteString])
208 | return
209 | } catch {
210 | return
211 | }
212 | case "roll":
213 | switch PHPhotoLibrary.authorizationStatus() {
214 | case PHAuthorizationStatus.notDetermined:
215 | PHPhotoLibrary.requestAuthorization(){ success in
216 | switch PHPhotoLibrary.authorizationStatus() {
217 | case PHAuthorizationStatus.denied:
218 | return
219 | default:
220 | UIImageWriteToSavedPhotosAlbum(i, nil, nil, nil)
221 | self.sendEvent(withName: "RNVision", body:["region": region, "key": "saveFrame", "event":"savedToRoll"])
222 | }
223 | }
224 | case PHAuthorizationStatus.denied:
225 | return
226 | default:
227 | UIImageWriteToSavedPhotosAlbum(i, nil, nil, nil)
228 | self.sendEvent(withName: "RNVision", body:["region": region, "key": "saveFrame", "event":"savedToRoll"])
229 | }
230 | case "memory":
231 | let u = UUID().uuidString
232 | self.savedFrames[u] = i
233 | self.sendEvent(withName: "RNVision", body:["region": region, "key": "saveFrame", "event":"savedToMemory", "savedFrameID": u])
234 | default:
235 | return
236 | }
237 | }
238 | resolve(region)
239 | }
240 | @objc func removeSaveFrame(_ region:String, resolve:RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
241 | sf.removeValue(forKey: region)
242 | resolve(true)
243 | }
244 | //MARK:Face Detection
245 | @objc func detectFaces(_ region: String, resolve: RCTPromiseResolveBlock, reject:RCTPromiseRejectBlock) {
246 | guard ir[region]?["detectFaces"] == nil else { resolve("detectFaces"); return }
247 | if ir[region] == nil { ir[region] = [:] }
248 | ir[region]!["detectFaces"] = VNDetectFaceRectanglesRequest() { request, error in
249 | var data:[Any] = []
250 | guard error == nil else { return }
251 | guard let r = request.results else { return }
252 | r.forEach() { result in
253 | guard let rs = result as? VNDetectedObjectObservation else {return}
254 | let bb = rs.boundingBox
255 | let normalRect = visionRectToNormal(bb)
256 | data.append(rectToDictionary(normalRect))
257 | }
258 | self.sendEvent(withName: "RNVision", body: ["region": region, "key": "detectFaces", "data":data])
259 | }
260 | resolve("detectFaces");
261 | }
262 | @objc func removeDetectFaces(_ region: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
263 | guard let _ = ir[region]?["detectFaces"] else { reject("not_running", "Not running detect faces",nil); return}
264 | ir[region]?.removeValue(forKey: "detectFaces")
265 | resolve(true)
266 | }
267 | //MARK: Object Tracking
268 | @objc func trackObject(_ name: String, region: String, dict: [String: Any], resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
269 | guard let r = dictionaryToRect(dict) else { reject("no_rectangle", "Dictionary does not represent a usable rectangle", nil); return }
270 | if srg[region] == nil { srg[region] = [:] }
271 | if srobs[region] == nil { srobs[region] = [:] }
272 | let visionRect = normalRectToVision(r)
273 | let obs = VNDetectedObjectObservation(boundingBox: visionRect)
274 | srobs[region]![name] = obs
275 | regions[name] = r
276 | srg[region]![name] = {
277 | guard let o = self.srobs[region]?[name] else { return nil }
278 | let r = VNTrackObjectRequest(detectedObjectObservation: o) {request, error in
279 | guard
280 | error == nil,
281 | let newobs = request.results?.first as? VNDetectedObjectObservation
282 | else { print("TO ERROR", error); self.srh[region] = nil; return }
283 | let newBox = visionRectToNormal(newobs.boundingBox)
284 | let oldobsQ = self.srobs[region]![name]
285 | self.srobs[region]![name] = newobs
286 | if let oldobs = oldobsQ {
287 | guard newobs.boundingBox != oldobs.boundingBox else { return }
288 | }
289 | self.regions[name] = visionRectToNormal(newobs.boundingBox)
290 | self.sendEvent(withName: "RNVision", body: ["key": name, "region": region, "frame": rectToDictionary(newBox), "confidence": newobs.confidence])
291 | }
292 | // r.preferBackgroundProcessing = true
293 | r.trackingLevel = .accurate
294 | return r
295 | }
296 | resolve(region)
297 | }
298 | @objc func removeTrackObject(_ name: String, region: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
299 | srobs[region]?.removeValue(forKey: name)
300 | srg[region]?.removeValue(forKey: name)
301 | resolve(true)
302 | }
303 | @objc func removeTrackObjects(_ region: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
304 | let keys = srobs[region]?.keys
305 | srobs[region]?.removeAll()
306 | srg[region]?.removeAll()
307 | resolve(["removedKeys": keys])
308 | }
309 | //MARK: CoreML Model Application
310 | var ms:[String: MLModel] = [:]
311 | func applyML(_ thisURL: String, field: String, resolve:RCTPromiseResolveBlock, reject:RCTPromiseRejectBlock, cb:@escaping VNRequestCompletionHandler) {
312 | var origmodel:MLModel? = ms[thisURL]
313 | if origmodel == nil {
314 | guard let modelURL = URL(string: thisURL) else { reject("bad_url", "This is a bad URL: " + thisURL, nil); return }
315 | guard let o = try? MLModel(contentsOf: modelURL) else { reject("no_model", "No model at " + thisURL, nil); return }
316 | ms[thisURL] = o
317 | origmodel = o
318 | }
319 | guard let m = origmodel else { reject("no_model", "Unable to load model at URL " + thisURL, nil); return }
320 | guard let vnmodel = try? VNCoreMLModel(for: m) else { reject("not_vision_model", "Model is not vision model: " + thisURL, nil); return }
321 | let r = VNCoreMLRequest(model: vnmodel, completionHandler: cb)
322 | if ir[field] == nil { ir[field] = [:] }
323 | ir[field]![thisURL] = r
324 | resolve(thisURL)
325 | }
326 | //Note that resultMax is basically ignored if it is 0 or less
327 | @objc func applyMLClassifier(_ thisURL: String, field: String, resultMax: Int, resolve:RCTPromiseResolveBlock, reject:RCTPromiseRejectBlock) {
328 | applyML(thisURL, field: field, resolve: resolve, reject: reject) { request, error in
329 | guard error == nil, let results = request.results else { return }
330 | var out:[[String:Any]] = []
331 | for result in results {
332 | if let co = result as? VNClassificationObservation {
333 | let obj:[String: Any] = ["label": co.identifier, "confidence": co.confidence];
334 | out.append(obj)
335 | if(resultMax > 0 && ((out.count + 1) > resultMax)) {
336 | break;
337 | }
338 | }
339 | }
340 | self.sendEvent(withName: "RNVision", body:["region": field, "key": thisURL, "data": out])
341 | }
342 | }
343 | var pixelBuffers:[String:CVPixelBuffer] = [:]
344 | @objc func applyMLGenerator(_ thisURL: String, field: String, handler: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
345 | applyML(thisURL, field: field, resolve: resolve, reject: reject) { request, error in
346 | guard error == nil, let results = request.results else { return }
347 | for result in results {
348 | if let pbo = result as? VNPixelBufferObservation {
349 | switch handler {
350 | case "memory":
351 | self.pixelBuffers[thisURL] = pbo.pixelBuffer;
352 | self.sendEvent(withName: "RNVision", body: ["region": field, "key": thisURL, "data": ["memorykey": thisURL]])
353 | case "file":
354 | if let i = CVPtoUIImage(pbo.pixelBuffer) {
355 | if let d = UIImageJPEGRepresentation(i, 1.0) {
356 | do {
357 | let url = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent((thisURL as NSString).lastPathComponent).appendingPathExtension("jpg")
358 | try d.write(to: url)
359 | self.sendEvent(withName: "RNVision", body: ["region": field, "key": thisURL, "data": ["url": url.absoluteString]])
360 | } catch {
361 | NSLog(error.localizedDescription)
362 | self.sendEvent(withName: "RNVision", body: ["region": field, "key": thisURL, "data": ["error": "No save"]])
363 | }
364 | } else {
365 | self.sendEvent(withName: "RNVision", body: ["region": field, "key": thisURL, "data": ["error": "No save"]])
366 | }
367 | }
368 | case "view":
369 | //do nothing
370 | guard let v = RHDVisionImageViewManager.instance?.views[thisURL] else { continue; }
371 | if !v.shouldUpdateImage() { continue }
372 | //Let's ask some questions about pbo
373 | guard let ui = CVPtoUIImage(pbo.pixelBuffer) else { continue }
374 | DispatchQueue.main.async() {
375 | v.addImage(ui)
376 | }
377 | default:
378 | NSLog("Unhandled generator handler key: " + handler)
379 | }
380 | }
381 | }
382 | }
383 | }
384 | @objc func applyMLBottleneck(_ thisURL: String, field: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
385 | applyML(thisURL, field: field, resolve: resolve, reject: reject) { request, error in
386 | guard error == nil, let results = request.results else { return }
387 | if let m = self.ms[thisURL], let models = self.br[field]?[thisURL] {
388 | //build bottlenecks for each of them
389 | let input = MLDictionaryFeatureProvider();
390 | if m.modelDescription.outputDescriptionsByName.count == 0 {
391 | if(results.count == 1 ) {
392 | let label = m.modelDescription.predictedFeatureName ?? "bottleneck"
393 | if let fvo = results.first as? VNCoreMLFeatureValueObservation {
394 | input.setValue(fvo.featureValue, forKey: label)
395 | }
396 | } else {
397 | //Boned we are
398 | }
399 | } else {
400 | //@TODO this is a flier to see if we can walk the descriptions to get this, boyo
401 | let names = Array(m.modelDescription.outputDescriptionsByName.keys)
402 | for i in 1...results.count {
403 | let result = results[i]
404 | guard let fvo = result as? VNCoreMLFeatureValueObservation else { continue }
405 | input.setValue(fvo.featureValue, forKey: names[i])
406 | }
407 | }
408 | if input.featureNames.count > 0 {
409 | for (_, info) in models {
410 | guard let out = try? info.model.prediction(from: input) else { continue }
411 | info.callback(out, info.model)
412 | }
413 | }
414 | }
415 | }
416 | }
417 | @objc func applyMLGeneric(_ thisURL: String, field: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
418 | applyML(thisURL, field: field, resolve: resolve, reject: reject) { request, error in
419 | guard error == nil, let results = request.results else { return }
420 | if let m = self.ms[thisURL] {
421 | var input:[String: Any] = [:]
422 | if m.modelDescription.outputDescriptionsByName.count == 0 {
423 | if(results.count == 1 ) {
424 | let label = m.modelDescription.predictedFeatureName ?? "bottleneck"
425 | if let fvo = results.first as? VNCoreMLFeatureValueObservation {
426 | (_, input[label]) = convertFeatureValue(fvo.featureValue)
427 | }
428 | } else {
429 | //Boned we are
430 | }
431 | } else {
432 | //@TODO this is a flier to see if we can walk the descriptions to get this, boyo
433 | let names = Array(m.modelDescription.outputDescriptionsByName.keys)
434 | for i in 1...results.count {
435 | let result = results[i]
436 | guard let fvo = result as? VNCoreMLFeatureValueObservation else { continue }
437 | (_, input[names[i]]) = convertFeatureValue(fvo.featureValue)
438 | }
439 | }
440 | if input.count > 0 {
441 | self.sendEvent(withName: "RNVision", body: ["region": field, "key": thisURL, "data": input])
442 | }
443 | }
444 |
445 | }
446 | }
447 | @objc func removeML(_ thisURL: String, field: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
448 | ir[field]?.removeValue(forKey: thisURL)
449 | if ir[field]?.keys.count == 0 {
450 | ir.removeValue(forKey: field)
451 | }
452 | resolve(true)
453 | }
454 | func applyBottleneck(_ thisURL: String, toField: String, toModel: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock, callback: @escaping BottleneckCB) {
455 | var origmodel:MLModel?
456 | if origmodel == nil {
457 | guard let modelURL = URL(string: thisURL) else { reject("bad_url", "This is a bad URL", nil); return }
458 | guard let o = try? MLModel(contentsOf: modelURL) else { reject("no_model", "No model at " + thisURL, nil); return }
459 | ms[thisURL] = o
460 | origmodel = o
461 | }
462 | guard let o = origmodel else { reject("no_mode", "Could not make/find a model at " + thisURL, nil); return }
463 | let info = ModelStruct(model: o, callback: callback)
464 | if br[toField] == nil {
465 | br[toField] = [:]
466 | }
467 | if br[toField]![toModel] == nil {
468 | br[toField]![toModel] = [:]
469 | }
470 | br[toField]![toModel]![thisURL] = info
471 | resolve(thisURL)
472 | }
473 | @objc func applyBottleneckClassifier(_ thisURL: String, toField: String, toModel: String, maxResults: Int, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
474 | applyBottleneck(thisURL, toField: toField, toModel: toModel, resolve: resolve, reject: reject) { fp, model in
475 | //Find the classifer
476 | for name in fp.featureNames {
477 | guard let fv = fp.featureValue(for: name), fv.type == .dictionary, let original = fp.featureValue(for: name)?.dictionaryValue else { continue }
478 | let dic = original.sorted() { a, b in
479 | return Float(b.value) < Float(a.value) // Reversing the order - want highest values at the top
480 | }
481 | var out:[[String: Any]] = []
482 | for (key, value) in dic {
483 | out.append(["label": key, "confidence": value])
484 | }
485 | self.sendEvent(withName: "RNVision", body: ["key": thisURL, "bottleneck": toModel, "data": out])
486 | break
487 | }
488 | }
489 | }
490 | @objc func applyBottleneckGenerator(_ thisURL: String, handler: String, toField: String, toModel: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
491 | applyBottleneck(thisURL, toField: toField, toModel: toModel, resolve: resolve, reject: reject) { fp, model in
492 | for name in fp.featureNames {
493 | guard let fv = fp.featureValue(for: name), fv.type == .image, let i = fv.imageBufferValue else { continue }
494 | self.pixelBuffers[thisURL] = i
495 | if handler == "sendEvent" {
496 | self.sendEvent(withName: "RNVision", body: ["key": thisURL])
497 | }
498 | break
499 | }
500 | }
501 | }
502 | @objc func applyBottleneckBottleneck(_ thisURL: String, toField: String, toModel: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
503 | applyBottleneck(thisURL, toField: toField, toModel: toModel, resolve: resolve, reject: reject) { fp, model in
504 | guard let bottlenecks = self.br[toField]?[thisURL] else { return }
505 | for (_, ms) in bottlenecks {
506 | let m = ms.model
507 | let cb = ms.callback
508 | cb(fp, m)
509 | }
510 | }
511 | }
512 | @objc func applyBottleneckGeneric(_ thisURL: String, toField: String, toModel: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
513 | applyBottleneck(thisURL, toField: toField, toModel: toModel, resolve: resolve, reject: reject) { fp, model in
514 | var out:[String: Any] = [:]
515 | for n in fp.featureNames {
516 | guard let fv = fp.featureValue(for: n) else { continue }
517 | (_, out[n]) = convertFeatureValue(fv)
518 | }
519 | if out.count > 0 {
520 | self.sendEvent(withName: "RNVML", body: ["key": thisURL, "bottleneck": toModel, "data": out])
521 | }
522 | }
523 | }
524 | @objc func removeBottleneck(_ modelURL:String, fromField: String, fromModel: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
525 | guard let _ = br[fromField] else { reject("no_parent_field", "There is no parent field " + fromField, nil); return }
526 | guard let _ = br[fromField]![fromModel] else { reject("no_parent_model", "There is no parent model " + fromModel, nil); return }
527 | br[fromField]![fromModel]!.removeValue(forKey: modelURL)
528 | resolve(true)
529 | }
530 | @objc func removeBottlenecks(_ fromField:String, fromModel: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
531 | guard let keys = br[fromField]?[fromModel]?.keys else { resolve(["removedBottlenecks": 0]); return }
532 | br[fromField]!.removeValue(forKey: fromModel)
533 | resolve(["removedBottlenecks": keys])
534 | }
535 | //MARK: Region Management
536 | @objc func setRegion(_ region: String, rectDic: [String: Any], resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
537 | regions[region] = dictionaryToRect(rectDic);
538 | resolve(region)
539 | }
540 | @objc func removeRegion(_ region: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
541 | regions.removeValue(forKey: region)
542 | resolve(true)
543 | }
544 | //MARK: MultiArray access
545 | @objc func saveMultiArray(key: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) {
546 | guard let m = multiArrays[key] else { reject("no_multiarray", "No Multiarrays with key " + key, nil); return }
547 | let url = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(key).appendingPathExtension(makeMultiArrayExtension(multiArray: m))
548 | if RHDVision.saveMultiArray(multiArray: m, url: url) {
549 | resolve(url.absoluteString)
550 | } else { resolve(false) }
551 | }
552 | //MARK: Metadata Capture
553 | func captureOutput(_ output: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
554 | metadataObjects.forEach() {obj in
555 | if let ro = obj as? AVMetadataMachineReadableCodeObject {
556 | guard let sv = ro.stringValue else {return}
557 | self.foundMetaData(sv)
558 | }
559 | }
560 | }
561 | func foundMetaData(_ stringValue:String) {
562 | sendEvent(withName: "RNVMetaData", body:["string": stringValue])
563 | }
564 | //MARK: RCTEventEmitter Support
565 |
566 | override func supportedEvents() -> [String]! {
567 | return ["RNVision", "RNVMetaData", "RNVisionImageDim"] //@RNSEvents
568 | }
569 | //MARK: Constants
570 | //@RNSConstants bundlePath bundleURL
571 | override func constantsToExport() -> [AnyHashable : Any]! {
572 | return ["bundlePath": Bundle.main.bundlePath, "bundleURL": Bundle.main.bundleURL.absoluteString]
573 | }
574 | }
575 | //MARK: Orientation Conversion
576 | func deviceOrientationtoAVOrientation(_ uiorientation:UIDeviceOrientation) -> AVCaptureVideoOrientation {
577 | switch uiorientation {
578 | case .landscapeLeft:
579 | return .landscapeRight //Note left and right get flipped
580 | case .landscapeRight:
581 | return .landscapeLeft //Note Left and Right get flipped
582 | case .portrait:
583 | return .portrait
584 | case .portraitUpsideDown:
585 | return .portraitUpsideDown
586 | case .unknown:
587 | return .portrait
588 | default:
589 | return .portrait
590 | }
591 | }
592 | func AVOrientationToDeviceOrientation(_ avorientation:AVCaptureVideoOrientation) -> UIDeviceOrientation {
593 | switch avorientation {
594 | case .landscapeLeft: return .landscapeLeft
595 | case .landscapeRight: return .landscapeRight
596 | case .portrait: return .portrait
597 | case .portraitUpsideDown: return .portraitUpsideDown
598 | }
599 |
600 | }
601 | //MARK: Feature Management and MultiArrays
602 | var multiArrays:[String:MLMultiArray] = [:]
603 | func convertFeatureValue(_ v:MLFeatureValue) -> (String, Any?) {
604 | let t:MLFeatureType = v.type
605 | var o:Any?;
606 | var ts:String = "";
607 | switch t {
608 | case .string:
609 | ts = "string";
610 | o = v.stringValue;
611 | case .double:
612 | ts = "double";
613 | o = v.doubleValue;
614 | case .int64:
615 | ts = "int64";
616 | o = v.int64Value;
617 | case .dictionary:
618 | ts = "dictionary";
619 | o = v.dictionaryValue
620 | case .image:
621 | if let cvib:CVImageBuffer = v.imageBufferValue {
622 | let tempURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(UUID().uuidString).appendingPathExtension("jpg")
623 | let ci = CIImage(cvImageBuffer: cvib)
624 | let ui = UIImage(ciImage: ci)
625 | if let _ = try? UIImageJPEGRepresentation(ui, 1.0)?.write(to: tempURL) {
626 | o = tempURL.absoluteString
627 | } else { o = "COULDNOTWRITE" }
628 | ts = "image";
629 | }
630 | case .invalid:
631 | print("This was an invalid answer");
632 | case .multiArray:
633 | if let m = v.multiArrayValue {
634 | ts = "multiarray"
635 | let k = UUID().uuidString
636 | multiArrays[k] = m
637 | o = k
638 | }/* - commented for iOS 11 */
639 | case .sequence:
640 | //Not sure what to do with this
641 | ts = "sequence"
642 | if #available(iOS 12.0, *) {
643 | let s = v as! MLSequence
644 | switch s.type {
645 | case MLFeatureType.string:
646 | o = s.stringValues
647 | case MLFeatureType.int64:
648 | o = s.int64Values
649 | default:
650 | o = s.stringValues
651 | }
652 | } else {
653 | // Fallback on earlier versions
654 | }
655 | /**/
656 | }
657 | return (ts, o)
658 | }
659 | func saveMultiArray(multiArray: MLMultiArray, path:String) -> Bool {
660 | let url = URL(fileURLWithPath: path)
661 | return saveMultiArray(multiArray: multiArray, url: url);
662 | }
663 | func saveMultiArray(multiArray: MLMultiArray, url: URL) -> Bool {
664 | var size:Int = 1;
665 | var unitSize:Int
666 | switch multiArray.dataType {
667 | case .double: unitSize = 8;
668 | case .float32: unitSize = 4;
669 | case .int32: unitSize = 4;
670 | }
671 | for dim in 1...multiArray.shape.count {
672 | size = size * (multiArray.shape[dim] as! Int) * (multiArray.strides[dim] as! Int) * unitSize
673 | }
674 | let d = NSData(bytes: multiArray.dataPointer, length: size)
675 | do {
676 | try d.write(to: url, options: .atomic)
677 | return true
678 | } catch {
679 | return false
680 | }
681 | }
682 | func makeMultiArrayExtension(multiArray: MLMultiArray) -> String {
683 | var t:String
684 | switch multiArray.dataType {
685 | case .double: t = "double";
686 | case .float32: t = "float32";
687 | case .int32: t = "int32";
688 | }
689 | for n in multiArray.shape {
690 | t = t.appending(".").appending(n.stringValue)
691 | }
692 | t = t.appending(".").appending("s")
693 | for n in multiArray.strides {
694 | t = t.appending(".").appending(n.stringValue)
695 | }
696 | return t
697 | }
698 | //MARK: Editing PixelBuffers
699 | public func resizePixelBuffer(_ srcPixelBuffer: CVPixelBuffer,
700 | cropX: Int,
701 | cropY: Int,
702 | cropWidth: Int,
703 | cropHeight: Int,
704 | scaleWidth: Int,
705 | scaleHeight: Int) -> CVPixelBuffer? {
706 | CVPixelBufferLockBaseAddress(srcPixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
707 | guard let srcData = CVPixelBufferGetBaseAddress(srcPixelBuffer) else {
708 | print("Error: could not get pixel buffer base address")
709 | return nil
710 | }
711 | let srcBytesPerRow = CVPixelBufferGetBytesPerRow(srcPixelBuffer)
712 | let offset = cropY*srcBytesPerRow + cropX*4
713 | var srcBuffer = vImage_Buffer(data: srcData.advanced(by: offset),
714 | height: vImagePixelCount(cropHeight),
715 | width: vImagePixelCount(cropWidth),
716 | rowBytes: srcBytesPerRow)
717 | let destBytesPerRow = scaleWidth*4
718 | guard let destData = malloc(scaleHeight*destBytesPerRow) else {
719 | print("Error: out of memory")
720 | return nil
721 | }
722 | var destBuffer = vImage_Buffer(data: destData,
723 | height: vImagePixelCount(scaleHeight),
724 | width: vImagePixelCount(scaleWidth),
725 | rowBytes: destBytesPerRow)
726 | let error = vImageScale_ARGB8888(&srcBuffer, &destBuffer, nil, vImage_Flags(0))
727 | CVPixelBufferUnlockBaseAddress(srcPixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
728 | if error != kvImageNoError {
729 | print("Error:", error)
730 | free(destData)
731 | return nil
732 | }
733 | let releaseCallback: CVPixelBufferReleaseBytesCallback = { _, ptr in
734 | if let ptr = ptr {
735 | free(UnsafeMutableRawPointer(mutating: ptr))
736 | }
737 | }
738 | let pixelFormat = CVPixelBufferGetPixelFormatType(srcPixelBuffer)
739 | var dstPixelBuffer: CVPixelBuffer?
740 | let status = CVPixelBufferCreateWithBytes(nil, scaleWidth, scaleHeight,
741 | pixelFormat, destData,
742 | destBytesPerRow, releaseCallback,
743 | nil, nil, &dstPixelBuffer)
744 | if status != kCVReturnSuccess {
745 | print("Error: could not create new pixel buffer")
746 | free(destData)
747 | return nil
748 | }
749 | return dstPixelBuffer
750 | }
751 | func slicePixelBuffer(_ cvp: CVPixelBuffer, toRect: CGRect) -> CVPixelBuffer? {
752 | //Rect time! let's get the rectangle we represent from the buffer
753 | let sourceHeight:Int = CVPixelBufferGetHeight(cvp)
754 | let sourceWidth:Int = CVPixelBufferGetWidth(cvp)
755 | let cropWidth = Int(toRect.width * CGFloat(sourceWidth))
756 | let cropHeight = Int(toRect.height * CGFloat(sourceHeight))
757 | let cropX = Int(toRect.origin.x * CGFloat(sourceWidth))
758 | let cropY = Int(toRect.origin.y * CGFloat(sourceHeight))
759 | return resizePixelBuffer(cvp, cropX: cropX, cropY: cropY, cropWidth: cropWidth, cropHeight: cropHeight, scaleWidth: cropHeight, scaleHeight: cropWidth)
760 | }
761 |
762 | //MARK: Rectangle Conversion
763 | func visionRectToNormal(_ visionRect: CGRect)->CGRect {
764 | var newRect = visionRect
765 | newRect.origin.y = 1 - visionRect.origin.y - visionRect.size.height
766 | return newRect
767 | }
768 | func normalRectToVision(_ normalRect: CGRect) -> CGRect {
769 | return CGRect(
770 | x: normalRect.origin.x,
771 | y: 1 - (normalRect.origin.y + normalRect.size.height),
772 | width: normalRect.size.width,
773 | height: normalRect.size.height
774 | )
775 | }
776 | func rectToDictionary(_ rect:CGRect) -> [String: Any] {
777 | return [
778 | "x": rect.origin.x,
779 | "y": rect.origin.y,
780 | "height": rect.size.height,
781 | "width": rect.size.width]
782 | }
783 | func dictionaryToRect(_ dic:[String: Any]) -> CGRect? {
784 | guard
785 | let x = dic["x"] as? CGFloat,
786 | let y = dic["y"] as? CGFloat,
787 | let height = dic["height"] as? CGFloat,
788 | let width = dic["width"] as? CGFloat
789 | else { return nil }
790 | return CGRect(x: x, y: y, width: width,height: height)
791 | }
792 | func CVPtoUIImage(_ cvp: CVPixelBuffer) -> UIImage? {
793 | let h = CVPixelBufferGetHeight(cvp)
794 | let w = CVPixelBufferGetWidth(cvp)
795 | let ci = CIImage(cvPixelBuffer: cvp)
796 | let tc = CIContext(options: nil)
797 | guard let cg = tc.createCGImage(ci, from: CGRect(x: 0, y: 0, width: w, height: h)) else { return nil}
798 | let temp = UIImage(cgImage: cg)
799 | return temp
800 | }
801 |
--------------------------------------------------------------------------------
/ios/RHDVision/RHDVisionImageView.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | @objc(RHDVisionImageView)
3 | class RHDVisionImageView: UIView {
4 | var _id:String = ""
5 | var imageView = UIImageView()
6 | var manager:RHDVisionImageViewManager?
7 | var isAdded = false
8 | @objc var isMirrored:Bool = false
9 | @objc var resizeMode:String {
10 | get { return "fuck you" }
11 | set(c) { imageView.contentMode = resizeModeToContentMode(c)
12 | if(!isAdded) {
13 | addSubview(imageView);
14 | isAdded = true
15 | }
16 | }
17 | }
18 | var image:UIImage? {
19 | get {return imageView.image}
20 | set(i) { imageView.image = isMirrored ? i?.withHorizontallyFlippedOrientation() : i }
21 | }
22 | @objc var id: String {
23 | get { return _id }
24 | set(newID) {
25 | if let m = manager {
26 | _id = newID
27 | m.views[_id] = self
28 | }
29 | }
30 | }
31 | override func layoutSubviews() {
32 | super.layoutSubviews()
33 | imageView.frame = bounds
34 | }
35 | var _interval:Double = 0.25
36 | @objc var interval:Double {
37 | get { return _interval}
38 | set(d) { _interval = d }
39 | }
40 | var lastAddMS = Date(timeIntervalSinceNow: 0)
41 | func shouldUpdateImage() -> Bool {
42 | return lastAddMS.timeIntervalSinceNow < 0
43 | }
44 | func addImage(_ i:UIImage) {
45 | //SLOW DOWN!!!!
46 | if shouldUpdateImage() {
47 | image = i
48 | lastAddMS = Date(timeIntervalSinceNow: _interval)
49 | print("addImage: RESET")
50 | } else {
51 | print("addImage: SKIPPING")
52 | }
53 | }
54 | }
55 | func resizeModeToContentMode(_ resizeMode: String) -> UIViewContentMode {
56 | switch resizeMode {
57 | case "cover": return UIViewContentMode.scaleAspectFill
58 | case "stretch": return UIViewContentMode.scaleToFill
59 | case "contain": return UIViewContentMode.scaleAspectFit
60 | case "center": return UIViewContentMode.center
61 | case "repeat": return UIViewContentMode.redraw
62 | default: return UIViewContentMode.scaleAspectFill
63 | }
64 | }
65 | func contentModeToResizeMode(_ contentMode: UIViewContentMode) -> String {
66 | switch(contentMode) {
67 | case .scaleAspectFill: return "cover"
68 | case .scaleToFill: return "stretch"
69 | case .scaleAspectFit: return "contain"
70 | case .center: return "center"
71 | case .redraw: return "redraw"
72 | default: return ""
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/ios/RHDVision/RHDVisionImageViewManager.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 | @objc(RHDVisionImageViewManager)
3 | class RHDVisionImageViewManager: RCTViewManager {
4 | static var instance:RHDVisionImageViewManager?
5 | var views:[String: RHDVisionImageView] = [:]
6 | override init() {
7 | super.init()
8 | RHDVisionImageViewManager.instance = self
9 | }
10 | override func view() -> UIView! {
11 | let i = RHDVisionImageView()
12 | i.manager = self
13 | return i
14 | }
15 |
16 | override class func requiresMainQueueSetup() -> Bool {
17 | return false
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/ios/RHDVision/RHDVisionView.swift:
--------------------------------------------------------------------------------
1 | import UIKit
2 | import AVKit
3 | @objc(RHDVisionCameraView)
4 | class RHDVisionCameraView: UIView {
5 | var pl:AVCaptureVideoPreviewLayer?
6 | var manager: RHDVisionCameraViewManager?
7 | var _gravity:String = AVLayerVideoGravityResizeAspectFill
8 | @objc var gravity:String {
9 | get {return _gravity == AVLayerVideoGravityResizeAspectFill ? "fill" : "resize"
10 | }
11 | set(newGravity) {
12 | _gravity = newGravity == "fill" ? AVLayerVideoGravityResizeAspectFill : AVLayerVideoGravityResizeAspect
13 | if let p = pl {
14 | p.videoGravity = _gravity
15 | }
16 | }
17 | }
18 | func attach(_ session: AVCaptureSession) {
19 | DispatchQueue.main.async(){
20 | guard let pl = AVCaptureVideoPreviewLayer(session: session) else { return }
21 | pl.frame = self.bounds
22 | pl.videoGravity = self._gravity
23 | self.layer.addSublayer(pl)
24 | self.pl = pl
25 | }
26 | }
27 | func detach() {
28 | if let pl = self.pl {
29 | pl.removeFromSuperlayer();
30 | }
31 | pl = nil
32 | if let manager = self.manager {
33 | manager.closedView(self)
34 | }
35 | }
36 | override func layoutSubviews() {
37 | super.layoutSubviews()
38 | if let c = pl?.connection {
39 | if c.isVideoOrientationSupported {
40 | c.videoOrientation = deviceOrientationtoAVOrientation(UIDevice.current.orientation)
41 | }
42 | }
43 | pl?.frame = self.bounds
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/ios/RHDVision/RHDVisionViewManager.swift:
--------------------------------------------------------------------------------
1 | import AVKit
2 | @objc(RHDVisionCameraViewManager)
3 | class RHDVisionCameraViewManager: RCTViewManager {
4 | static var currentView: RHDVisionCameraView?
5 | override func view() -> UIView {
6 | if let v = RHDVisionCameraViewManager.currentView {
7 | v.detach()
8 | RHDVisionCameraViewManager.currentView = nil
9 | }
10 | let v = RHDVisionCameraView()
11 | RHDVisionCameraViewManager.currentView = v
12 | v.manager = self
13 | return v
14 | }
15 | override class func requiresMainQueueSetup() -> Bool {
16 | return false
17 | }
18 | func closedView(_ v:RHDVisionCameraView) {}
19 | }
20 |
--------------------------------------------------------------------------------
/ios/rn-swift-bridge.m:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 | #import
4 | @interface RCT_EXTERN_MODULE(RHDVisionModule, RCTEventEmitter)
5 | RCT_EXTERN_METHOD(start:(BOOL)cameraFront resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
6 | RCT_EXTERN_METHOD(stop:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
7 | RCT_EXTERN_METHOD(getImageDimensions:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
8 | RCT_EXTERN_METHOD(attachCameraView:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
9 | RCT_EXTERN_METHOD(detachCameraView:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
10 | RCT_EXTERN_METHOD(cameraIsView:(BOOL)newIsCameraView resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
11 | RCT_EXTERN_METHOD(saveFrame:(NSString *)disposition region:(NSString *)region resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
12 | RCT_EXTERN_METHOD(removeSaveFrame:(NSString *)region resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
13 | RCT_EXTERN_METHOD(detectFaces:(NSString *)region resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
14 | RCT_EXTERN_METHOD(removeDetectFaces:(NSString *)region resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
15 | RCT_EXTERN_METHOD(trackObject:(NSString *)name region:(NSString *)region dict:(NSDictionary *)dict resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
16 | RCT_EXTERN_METHOD(removeTrackObject:(NSString *)name region:(NSString *)region resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
17 | RCT_EXTERN_METHOD(removeTrackObjects:(NSString *)region resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
18 | RCT_EXTERN_METHOD(applyMLClassifier:(NSString *)thisURL field:(NSString *)field resultMax:(NSInteger)resultMax resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
19 | RCT_EXTERN_METHOD(applyMLGenerator:(NSString *)thisURL field:(NSString *)field handler:(NSString *)handler resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
20 | RCT_EXTERN_METHOD(applyMLBottleneck:(NSString *)thisURL field:(NSString *)field resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
21 | RCT_EXTERN_METHOD(applyMLGeneric:(NSString *)thisURL field:(NSString *)field resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
22 | RCT_EXTERN_METHOD(removeML:(NSString *)thisURL field:(NSString *)field resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
23 | RCT_EXTERN_METHOD(applyBottleneckClassifier:(NSString *)thisURL toField:(NSString *)toField toModel:(NSString *)toModel maxResults:(NSInteger)maxResults resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
24 | RCT_EXTERN_METHOD(applyBottleneckGenerator:(NSString *)thisURL handler:(NSString *)handler toField:(NSString *)toField toModel:(NSString *)toModel resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
25 | RCT_EXTERN_METHOD(applyBottleneckBottleneck:(NSString *)thisURL toField:(NSString *)toField toModel:(NSString *)toModel resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
26 | RCT_EXTERN_METHOD(applyBottleneckGeneric:(NSString *)thisURL toField:(NSString *)toField toModel:(NSString *)toModel resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
27 | RCT_EXTERN_METHOD(removeBottleneck:(NSString *)modelURL fromField:(NSString *)fromField fromModel:(NSString *)fromModel resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
28 | RCT_EXTERN_METHOD(removeBottlenecks:(NSString *)fromField fromModel:(NSString *)fromModel resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
29 | RCT_EXTERN_METHOD(setRegion:(NSString *)region rectDic:(NSDictionary *)rectDic resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
30 | RCT_EXTERN_METHOD(removeRegion:(NSString *)region resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
31 | RCT_EXTERN_METHOD(saveMultiArraykey:(NSString *)key resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject);
32 | @end
33 | @interface RCT_EXTERN_MODULE(RHDVisionImageViewManager, RCTViewManager)
34 | RCT_EXPORT_VIEW_PROPERTY(isMirrored, BOOL);
35 | RCT_EXPORT_VIEW_PROPERTY(resizeMode, NSString *);
36 | RCT_EXPORT_VIEW_PROPERTY(id, NSString *);
37 | RCT_EXPORT_VIEW_PROPERTY(interval, double);
38 | @end
39 | @interface RCT_EXTERN_MODULE(RHDVisionCameraViewManager, RCTViewManager)
40 | RCT_EXPORT_VIEW_PROPERTY(gravity, NSString *);
41 | @end
--------------------------------------------------------------------------------
/module.js:
--------------------------------------------------------------------------------
1 | import { NativeModules, NativeEventEmitter } from "react-native";
2 | const RNVNative = NativeModules.RHDVisionModule;
3 | //#region Event Management
4 | var cachedEmitter = null;
5 | const getEmitter = () => {
6 | if (!cachedEmitter) {
7 | cachedEmitter = new NativeEventEmitter(RNVNative);
8 | }
9 | return cachedEmitter;
10 | };
11 | var cachedListener = null;
12 | const addListener = (region, key, cb) => {
13 | const newKey = region + "_" + key;
14 | if (!cachedListener) {
15 | cachedListener = getEmitter().addListener("RNVision", masterHandler);
16 | }
17 | cachedHandlers[newKey] = cb;
18 | };
19 | const masterHandler = body => {
20 | const region = String(body["region"]);
21 | const thisKey = String(body["key"]);
22 | const key = region + "_" + thisKey;
23 | if (typeof cachedHandlers[key] == "function") {
24 | cachedHandlers[key](body);
25 | } else {
26 | console.log("NO handler for ", key, region, thisKey);
27 | }
28 | };
29 | var cachedHandlers = {};
30 | const removeListener = (region, key) => {
31 | const newKey = String(region) + "_" + String(key);
32 | delete cachedHandlers[newKey];
33 | };
34 | //#endregion
35 | //#region Lifecycle management
36 | const start = async cameraFront => {
37 | return await RNVNative.start(cameraFront);
38 | };
39 | const stop = async () => {
40 | cachedHandlers = {};
41 | cachedListener.remove();
42 | cachedEmitter = null;
43 | return await RNVNative.stop();
44 | };
45 | const attachCameraView = async () => {
46 | return await RNVNative.attachCameraView();
47 | };
48 | const isCameraFrame = async isTrue => {
49 | return await RNVNative.isCameraView(isTrue);
50 | };
51 | const getImageDimensions = async () => {
52 | return await RNVNative.getImageDimensions();
53 | };
54 | var ImageDimensionListener = null;
55 | const setImageDimensionListener = cb => {
56 | if (!cb) return removeImageDimensionListener();
57 | if (typeof cb != "function")
58 | throw new Error("Argument must be a function in setImageDimensionListener");
59 | if (ImageDimensionListener) removeImageDimensionListener;
60 | ImageDimensionListener = getEmitter().addListener("RNVisionImageDim", cb);
61 | return true;
62 | };
63 | const removeImageDimensionListener = () => {
64 | if (ImageDimensionListener) ImageDimensionListener.remove();
65 | ImageDimensionListener = null;
66 | return true;
67 | };
68 | //#endregion
69 | //#region Save Frame
70 | const saveFrame = async (region, disposition, callback) => {
71 | //Add a listener
72 | addListener(region, "saveFrame", callback);
73 | return await RNVNative.saveFrame(disposition, region);
74 | };
75 | const removeSaveFrame = async region => {
76 | removeListener(region, "saveFrame");
77 | return await RNVNative.removeSaveFrame(region);
78 | };
79 | const saveFrameOnce = (region, disposition) => {
80 | return new Promise((resolve, reject) => {
81 | saveFrame(region, disposition, async body => {
82 | await removeSaveFrame(region);
83 |
84 | return body;
85 | });
86 | });
87 | };
88 | //#endregion
89 | //#region Face Detection
90 | const detectFaces = async (region, handler) => {
91 | const key = await RNVNative.detectFaces(region); // Key should be "detectFaces"
92 | addListener(region, key, body => {
93 | return handler(body.data);
94 | });
95 | return key;
96 | };
97 | const removeDetectFaces = async region => {
98 | removeListener(region, "detectFaces");
99 | return await RNVNative.removeDetectFaces(region);
100 | };
101 | const detectFacesOnce = region => {
102 | return new Promise((resolve, reject) => {
103 | detectFaces(region, body => {
104 | removeDetectFaces(region);
105 | resolve(body);
106 | });
107 | });
108 | };
109 | //#endregion
110 | //#region Object Tracking
111 | var boxHandlers = {};
112 | var boxListener = null;
113 | const trackObject = async (region, name, boxDictionary, callback) => {
114 | if ((await RNVNative.trackObject(name, region, boxDictionary)) !== null) {
115 | addListener(region, name, callback);
116 | return true;
117 | } else return false;
118 | };
119 | const removeTrackObject = async (region, name) => {
120 | removeListener(region, name);
121 | return await RNVNative.removeTrackObject(name, region);
122 | };
123 | const removeTrackObjects = async region => {
124 | const data = await RNVNative.removeTrackObjects(region);
125 | if (data.removedKeys) {
126 | data.removedKeys.forEach(removedKey => {
127 | removeListener(removedKey);
128 | });
129 | }
130 | return true;
131 | };
132 | //#endregion
133 | //#region Region Management
134 | const setRegion = async (region, rectangle) => {
135 | return await RNVNative.setRegion(region, rectangle);
136 | };
137 | const removeRegion = async region => {
138 | return await RNVNative.removeRegion(region);
139 | };
140 | //#endregion
141 | //#region Machine Learning Models
142 | const applyMLClassifier = async (
143 | region,
144 | modelURL,
145 | maxResults,
146 | callback = null
147 | ) => {
148 | if (typeof maxResults == "function") {
149 | callback = maxResults;
150 | maxResults = 5;
151 | }
152 | const key = await RNVNative.applyMLClassifier(modelURL, region, maxResults);
153 | if (key) {
154 | addListener(region, key, body => {
155 | callback(body.data);
156 | });
157 | }
158 | return key;
159 | };
160 | const applyMLClassifierOnce = (region, modelURL, maxResults) => {
161 | return new Promise((resolve, reject) => {
162 | applyMLClassifier(region, modelURL, maxResults, body => {
163 | removeML(region, modelURL);
164 | resolve(body);
165 | });
166 | });
167 | };
168 | const applyMLGenerator = async (region, modelURL, handler, callback) => {
169 | const key = await RNVNative.applyMLGenerator(modelURL, region, handler);
170 | if (handler != "view" && typeof callback == "function") {
171 | addListener(region, key, data => {
172 | callback(data.data);
173 | });
174 | }
175 | return key;
176 | };
177 | const applyMLBottleneck = async modelURL => {
178 | return await RNVNative.applyMLBottleneck(modelURL);
179 | };
180 | const applyMLGeneric = async (region, modelURL, callback) => {
181 | const key = await RNVNative.applyMLGeneric(modelURL, region);
182 | if (key) {
183 | addListener(region, key, body => {
184 | callback(body.data);
185 | });
186 | }
187 | return key;
188 | };
189 | const applyMLGenericOnce = (region, modelURL) => {
190 | return new Promise((resolve, reject) => {
191 | applyMLGeneric(region, modelURL, body => {
192 | removeML(region, modelURL);
193 | resolve(body);
194 | });
195 | });
196 | };
197 | const removeML = async (region, modelURL) => {
198 | removeListener(region, modelURL);
199 | return await RNVNative.removeML(modelURL, region);
200 | };
201 | //#endregion
202 | //#region ML Bottlenecks
203 | const REGION_ALL = "";
204 | const applyBottleneckClassifier = async (
205 | modelURL,
206 | region,
207 | toModelURL,
208 | maxResults,
209 | callback = null
210 | ) => {
211 | if (typeof maxResults == "function") {
212 | callback = maxResults;
213 | maxResults = 5;
214 | }
215 | const key = await RNVNative.applyBottleneckClassifier(
216 | modelURL,
217 | region,
218 | toModelURL,
219 | maxResults
220 | );
221 | if (key) {
222 | addListener(key, body => {
223 | callback(body.data);
224 | });
225 | }
226 | };
227 | const applyBottleneckGenerator = async (
228 | modelURL,
229 | region,
230 | toModelURL,
231 | handlerOrCallback
232 | ) => {
233 | const handler =
234 | typeof handlerOrCallback == "function" ? "sendEvent" : handlerOrCallback;
235 | const key = await RNVNative.applyBottleneckGenerator(
236 | modelURL,
237 | handler,
238 | region,
239 | toModelURL
240 | );
241 | if (key && handler == "sendEvent") addListener(key, handlerOrCallback);
242 | };
243 | const applyBottleneckBottleneck = async (modelURL, region, toModelURL) => {
244 | return await RNVNative.applyBottleneckBottleneck(modelURL, toModelURL);
245 | };
246 | const applyBottleneckGeneric = async (
247 | modelURL,
248 | region,
249 | toModelURL,
250 | callback
251 | ) => {
252 | const key = await RNVNative.applyBottleneckGeneric(
253 | modelURL,
254 | region,
255 | toModelURL
256 | );
257 | if (key) {
258 | addListener(key, body => {
259 | callback(body.data);
260 | });
261 | }
262 | };
263 | const removeBottleneck = async (modelURL, region, fromModelURL) => {
264 | removeListener(modelURL);
265 | return await RNVNative.removeBottleneck(modelURL, region, fromModelURL);
266 | };
267 | const removeBottlenecks = async (region, fromModelURL) => {
268 | const out = await RNVNative.removeBottlenecks(region, fromModelURL);
269 | if (out) {
270 | if (out.removedBottlenecks) {
271 | out.removedBottlenecks.forEach(key => {
272 | removeListener(key);
273 | });
274 | }
275 | }
276 | };
277 | //#endregion
278 | //#region MultiArray access
279 | //Returns URL of saved file
280 | const saveMultiArray = async name => {
281 | return await RNVNative.saveMultiArray(name);
282 | };
283 | //#endregion
284 | //#region Metadata Capture
285 | var MDListener = null;
286 | const handleMetadata = async callback => {
287 | removeMetadataListener();
288 | MDListener = getEmitter().addListener("RNVisionMetaData", callback);
289 | };
290 | const removeMetadataListener = {
291 | if(MDListener) {
292 | MDListener.remove();
293 | }
294 | };
295 | //#endregion
296 | //#region Exports
297 | export {
298 | REGION_ALL,
299 | start,
300 | stop,
301 | attachCameraView,
302 | isCameraFrame,
303 | getImageDimensions,
304 | setImageDimensionListener,
305 | removeImageDimensionListener,
306 | saveFrame,
307 | saveFrameOnce,
308 | removeSaveFrame,
309 | detectFaces,
310 | detectFacesOnce,
311 | removeDetectFaces,
312 | trackObject,
313 | removeTrackObject,
314 | setRegion,
315 | removeRegion,
316 | applyMLClassifier,
317 | applyMLClassifierOnce,
318 | applyMLGenerator,
319 | applyMLBottleneck,
320 | applyMLGeneric,
321 | applyMLGenericOnce,
322 | applyBottleneckClassifier,
323 | applyBottleneckGenerator,
324 | applyBottleneckBottleneck,
325 | applyBottleneckGeneric,
326 | removeML,
327 | removeBottleneck,
328 | removeBottlenecks,
329 | handleMetadata,
330 | removeMetadataListener
331 | };
332 | //#endregion
333 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "react-native-vision",
3 | "version": "2.0.1",
4 | "description": "React Native module for enhanced camera and vision in iOS",
5 | "main": "index.js",
6 | "scripts": {
7 | "test": "echo \"Error: no test specified\" && exit 1",
8 | "bridge": "react-native-swift-bridge",
9 | "watch": "react-native-swift-bridge --watch"
10 | },
11 | "repository": {
12 | "type": "git",
13 | "url": "git+https://github.com/rhdeck/react-native-vision.git"
14 | },
15 | "author": "Ray Deck",
16 | "license": "MIT",
17 | "bugs": {
18 | "url": "https://github.com/rhdeck/react-native-vision/issues"
19 | },
20 | "homepage": "https://github.com/rhdeck/react-native-vision#readme",
21 | "dependencies": {
22 | "react": "^16.3.2",
23 | "react-native-swift": "^1.2.0",
24 | "request": "^2.88.0",
25 | "rimraf": "^2.6.2"
26 | },
27 | "isSwift": true,
28 | "devDependencies": {
29 | "glob": "^7.1.3",
30 | "react-native": "*",
31 | "react-native-swift-bridge": "^2.1.0",
32 | "@raydeck/xcode": "^2.2.0"
33 | },
34 | "peerDependencies": {
35 | "react-native-camera-ios-enable": "^1.0.1"
36 | },
37 | "rnpm": {
38 | "plugin": "plugin.js"
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/plugin.js:
--------------------------------------------------------------------------------
1 | const { spawnSync } = require("child_process");
2 | const xcode = require("@raydeck/xcode");
3 | const { join, basename, isAbsolute } = require("path");
4 | const {
5 | existsSync,
6 | renameSync,
7 | writeFileSync,
8 | createWriteStream
9 | } = require("fs");
10 | const { sync } = require("glob");
11 | const request = require("request");
12 | const rimraf = require("rimraf");
13 | module.exports = [
14 | {
15 | name: "add-mlmodel ",
16 | description: "Add and compile mlmodel into your IOS project assets",
17 | func: async (argv, _, options) => {
18 | let tempPath;
19 | const outPath = options.outPath ? options.outPath : ".";
20 | const finalLocation = join(outPath, basename(argv[0]) + "c");
21 | const projectPath = sync(
22 | join(process.cwd(), "ios", "**", "project.pbxproj")
23 | )[0];
24 | if (!projectPath) {
25 | console.error("Cannot find an XCode project to modify: aborting");
26 | return;
27 | }
28 | if (existsSync(finalLocation)) {
29 | console.log(
30 | "Aborting compile: The mlmodelc directory already exists at ",
31 | finalLocation
32 | );
33 | addToProject(finalLocation, projectPath);
34 | const base = basename(finalLocation);
35 | const parts = base.split(".");
36 | parts.pop();
37 | const newBase = parts.join("."); //.replace("-", "_");
38 | console.log(
39 | `Model added. You may refer to it as ${newBase} in your code.`
40 | );
41 | return;
42 | }
43 | const finish = tempPath => {
44 | if (tempPath) {
45 | renameSync(tempPath, finalLocation);
46 | }
47 | addToProject(finalLocation, projectPath);
48 | const base = basename(finalLocation);
49 | const parts = base.split(".");
50 | parts.pop();
51 | const newBase = parts.join("."); //.replace("-", "_");
52 | console.log(
53 | `Model added. You may refer to it as ${newBase} in your code.`
54 | );
55 | };
56 |
57 | if (argv[0].includes("://")) {
58 | //URL!
59 | //transfer URL to our temp Path
60 | console.log(
61 | "I was passed a URL - attempting download. Big models can take a little time"
62 | );
63 | const outFile = join(process.env.TMPDIR, basename(argv[0]));
64 | request(argv[0])
65 | .pipe(createWriteStream(outFile))
66 | .on("finish", () => {
67 | tempPath = compileMLModel(outFile);
68 | finish(tempPath);
69 | });
70 | } else {
71 | tempPath = compileMLModel(argv[0]);
72 | finish(tempPath);
73 | }
74 | }
75 | }
76 | ];
77 | const compileMLModel = (originPath, destinationPath = process.env.TMPDIR) => {
78 | const result = join(destinationPath, basename(originPath) + "c");
79 | try {
80 | if (existsSync(result)) rimraf.sync(result);
81 | } catch (e) {}
82 | spawnSync(
83 | "xcrun",
84 | ["coremlcompiler", "compile", originPath, destinationPath],
85 | {
86 | stdio: "inherit"
87 | }
88 | );
89 | //We know what the tail result will be
90 | return existsSync(result) ? result : false;
91 | };
92 | const addToProject = (fileToAdd, projectPath) => {
93 | if (!isAbsolute(fileToAdd)) fileToAdd = join(process.cwd(), fileToAdd);
94 | const project = xcode.project(projectPath);
95 | project.parseSync();
96 | console.log("Adding file ", fileToAdd);
97 | try {
98 | project.removeResourceFile(fileToAdd);
99 | } catch (e) {}
100 | project.addResourceFile(fileToAdd, {
101 | target: project.getFirstTarget().uuid,
102 | lastKnownFileType: "folder"
103 | });
104 | const outText = project.writeSync();
105 | writeFileSync(projectPath, outText);
106 | };
107 |
--------------------------------------------------------------------------------
/region.js:
--------------------------------------------------------------------------------
1 | import React, { Component } from "react";
2 | import PropTypes from "prop-types";
3 | import { RNVisionConsumer } from "./wrapper";
4 | import * as Module from "./module";
5 | import { bundleURL } from "./RNSwiftBridge";
6 | class Region extends Component {
7 | state = {
8 | classifiers: null,
9 | generators: null,
10 | generics: null,
11 | bottlenecks: null,
12 | classifications: {},
13 | genericResults: {},
14 | frameListener: null
15 | };
16 | static getDerivedStateFromProps(nextProps, prevState) {
17 | var ret = prevState;
18 | if (!ret.todos) ret.todos = {};
19 | if (prevState.frameListener != nextProps.onFrameCaptured) {
20 | if (nextProps.onFrameCaptured) {
21 | //Turn it on!
22 | Module.saveFrame(
23 | nextProps.region,
24 | nextProps.frameDisposition ? nextProps.frameDisposition : "file",
25 | body => {
26 | nextProps.onFrameCaptured(body);
27 | }
28 | );
29 | } else {
30 | Module.removeSaveFrame(nextProps.region);
31 | }
32 | ret.frameListener = nextProps.onFrameCaptured;
33 | }
34 | if (nextProps.classifiers != prevState.classifiers) {
35 | if (nextProps.classifiers)
36 | if (Array.isArray(nextProps.classifiers))
37 | nextProps.classifiers
38 | .filter(({ url }) => Boolean)
39 | .map(o => ({ ...o, url: fixURL(o.url) }))
40 | .forEach(obj => {
41 | const { url, max: maxCount } = obj;
42 | if (
43 | prevState.classifiers &&
44 | prevState.classifiers.filter(o => {
45 | return o.url == url && o.max == maxCount;
46 | }).length
47 | )
48 | return;
49 |
50 | if (!ret.todos.addMLClassifiers) ret.todos.addMLClassifiers = [];
51 | ret.todos.addMLClassifiers.push(obj);
52 | });
53 | else
54 | Object.keys(nextProps.classifiers).forEach(k => {
55 | const maxCount = nextProps.classifiers[k];
56 | if (
57 | prevState.classifiers[k] &&
58 | prevState.classifiers[k] == maxCount
59 | )
60 | return;
61 | if (!ret.todos.addMLClassifiers) ret.todos.addMLClassifiers = [];
62 | if (k)
63 | ret.todos.addMLClassifiers.push({
64 | url: fixURL(k),
65 | max: maxCount
66 | });
67 | });
68 | if (prevState.classifiers)
69 | if (Array.isArray(prevState.classifiers))
70 | prevState.classifiers.forEach(obj => {
71 | if (
72 | !nextProps.classifiers ||
73 | !nextProps.classifiers.filter(nobj => {
74 | return nobj.url == obj.url;
75 | }).length
76 | ) {
77 | Module.removeML(nextProps.region, obj.url);
78 | delete ret.classifications[obj.url];
79 | }
80 | });
81 | else
82 | Object.keys(prevState.classifiers).forEach(k => {
83 | if (
84 | !nextProps.classifiers ||
85 | typeof nextProps.classifiers[k] == "undefined"
86 | ) {
87 | Module.removeML(nextProps.region, k);
88 | delete ret.classifications[k];
89 | }
90 | });
91 | ret.classifiers = nextProps.classifiers;
92 | }
93 | if (nextProps.generators != prevState.generators) {
94 | if (nextProps.generators)
95 | if (Array.isArray(nextProps.generators))
96 | nextProps.generators
97 | .map(o => ({ ...o, url: fixURL(o.url) }))
98 | .forEach(obj => {
99 | const { url, type } = obj;
100 | if (
101 | prevState.generators &&
102 | prevState.generators.filter(o => {
103 | const ret = o.url == url && o.type == type;
104 | return ret;
105 | }).length
106 | )
107 | return;
108 | if (!ret.todos.addMLGenerators) ret.todos.addMLGenerators = [];
109 | console.log("Adding a generator!!!!", obj);
110 | ret.todos.addMLGenerators.push(obj);
111 | });
112 | if (prevState.generators)
113 | if (Array.isArray(prevState.generators))
114 | prevState.generators.forEach(obj => {
115 | if (
116 | !nextProps.generators ||
117 | !nextProps.generators.filter(nobj => {
118 | return nobj.url == obj.url;
119 | }).length
120 | ) {
121 | Module.removeML(nextProps.region, obj.url);
122 | delete ret.generators;
123 | }
124 | });
125 | ret.generators = nextProps.generators;
126 | }
127 | if (nextProps.bottlenecks != prevState.bottlenecks) {
128 | //Update bottlenecks - keys are the bottleneck URLS, values are an object of keys generators, generics, classifiers - examine them all
129 | if (nextProps.bottlenecks)
130 | Object.keys(nextProps.bottlenecks).forEach(k => {
131 | //@TODO MUST ACTUALLY CREATE BOTTLENECK SUPPORT
132 | const v = nextProps.bottlenecks[k];
133 | });
134 | Object.keys(prevState.bottlenecks).forEach(k => {
135 | //Look for no-longer operative bottlenecks and remove them
136 | if (typeof nextProps.classifiers[k] == "undefined") {
137 | Module.removeML(nextProps.region, k);
138 | }
139 | });
140 | ret.bottlenecks = nextProps.bottlenecks;
141 | }
142 | return ret;
143 | }
144 | componentDidUpdate() {
145 | this.manageTodo();
146 | }
147 | componentDidMount() {
148 | this.manageTodo();
149 | }
150 | componentWillUnmount() {
151 | try {
152 | Module.removeSaveFrame(this.props.region);
153 | } catch (e) {}
154 | this.state.classifiers &&
155 | this.state.classifiers.map(u =>
156 | Module.removeML(this.props.region, u.url)
157 | );
158 | this.state.generators &&
159 | this.state.generators.map(u => Module.removeML(this.props.region, u));
160 | this.state.generics &&
161 | this.state.generics.map(u => Module.removeML(this.props.region, url));
162 | this.state.bottlenecks &&
163 | Object.keys(this.state.bottlenecks).map(u =>
164 | Module.removeML(this.props.region, u)
165 | );
166 | }
167 | manageTodo() {
168 | if (this.state.todos) {
169 | Object.keys(this.state.todos).forEach(k => {
170 | if (typeof this[k] == "function") this[k](this.state.todos[k]);
171 | else console.log("No todo function for key ", k);
172 | });
173 | // console .log("Running setstate from region todo");
174 | //this.setState({ todos: null });
175 | }
176 | }
177 | cachedDate = {};
178 | addMLClassifiers(classifiers) {
179 | classifiers.forEach(obj => {
180 | const url = obj.url;
181 | const maxCount = obj.max;
182 | Module.applyMLClassifier(
183 | this.props.region,
184 | url,
185 | maxCount,
186 | newClassifications => {
187 | const now = Date.now();
188 | if (now < parseInt(this.cachedDate[this.props.region + url]) + 50)
189 | return;
190 | this.setState(({ classifications }) => {
191 | return {
192 | classifications: {
193 | ...classifications,
194 | [url]: newClassifications
195 | }
196 | };
197 | });
198 | this.cachedDate[this.props.region + url] = now;
199 | }
200 | );
201 | });
202 | }
203 | addMLGenerators(generators) {
204 | generators.forEach(o => {
205 | const url = o.url;
206 | Module.applyMLGenerator(this.props.region, url, o.type, data => {
207 | if (Array.isArray(this.state.generators))
208 | this.state.generators
209 | .filter(obj => {
210 | return obj.url == url;
211 | })
212 | .forEach(obj => {
213 | obj.callback(data);
214 | });
215 | else this.state.generators[url].callback(data);
216 | });
217 | });
218 | }
219 | render() {
220 | return typeof this.props.children == "function" ? (
221 |
222 | {value => {
223 | if (!value) return;
224 | const region = value.regions[this.props.region];
225 | const regionInfo = {
226 | ...region,
227 | imageDimensions: value.imageDimensions,
228 | isCameraFront: value.isCameraFront,
229 | classifications: this.state.classifications,
230 | classification:
231 | this.state.classifications &&
232 | Object.values(this.state.classifications)[0],
233 | label:
234 | this.state.classifications &&
235 | Object.values(this.state.classifications)[0] &&
236 | Object.values(this.state.classifications)[0][0].label,
237 | confidence:
238 | this.state.classifications &&
239 | Object.values(this.state.classifications)[0] &&
240 | Object.values(this.state.classifications)[0][0].confidence,
241 | genericResults: this.state.genericResults
242 | };
243 | return this.props.children(regionInfo);
244 | }}
245 |
246 | ) : (
247 | this.props.children && this.props.children
248 | );
249 | }
250 | }
251 | Region.propTypes = {
252 | region: PropTypes.string.isRequired,
253 | classifiers: PropTypes.array,
254 | generators: PropTypes.array,
255 | generics: PropTypes.array,
256 | bottlenecks: PropTypes.object,
257 | // children: PropTypes.func,
258 | onFrameCaptured: PropTypes.func,
259 | frameDisposition: PropTypes.string
260 | };
261 | const fixURL = url => {
262 | //ask if this is a URL
263 | if (!url) {
264 | throw "Bad URL PASSED! I AM SAD";
265 | }
266 | if (url.includes("://")) return url;
267 | if (url.endsWith(".mlmodel")) url = url + "c";
268 | if (!url.endsWith(".mlmodelc")) url = url + ".mlmodelc";
269 | const final = bundleURL + url;
270 | return final;
271 | };
272 |
273 | const RNVDefaultRegion = props => ;
274 | export default Region;
275 | export { Region as RNVRegion, RNVDefaultRegion, fixURL };
276 |
--------------------------------------------------------------------------------
/styleview.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import { RNVisionProvider } from "./wrapper";
3 | import { RNVDefaultRegion, fixURL } from "./region";
4 | import { RNVImageView } from ".";
5 |
6 | const StyleView = props => {
7 | const generatorURL = fixURL(props.generator);
8 | return (
9 |
10 |
14 |
15 |
16 | );
17 | };
18 |
19 | export { StyleView };
20 |
--------------------------------------------------------------------------------
/view.js:
--------------------------------------------------------------------------------
1 | import { requireNativeComponent, NativeModules, View } from "react-native";
2 | import { attachCameraView } from "./module";
3 | import { RNVisionConsumer } from "./wrapper";
4 | import PropTypes from "prop-types";
5 | import React, { Component } from "react";
6 | import Region from "./region";
7 | import { RHDVisionCameraView } from "./RNSwiftBridge";
8 | const { Provider, Consumer: CameraConsumer } = React.createContext();
9 | class RNVision extends Component {
10 | state = {
11 | viewPortRectangle: null,
12 | height: null,
13 | width: null
14 | };
15 | stringified = "";
16 | constructor(props) {
17 | super(props);
18 | }
19 | componentDidMount() {
20 | attachCameraView();
21 | }
22 | onLayout(e) {
23 | const layout = e.nativeEvent.layout;
24 | this.setState({ height: layout.height, width: layout.width });
25 | }
26 | render() {
27 | return (
28 | {
31 | this.onLayout(e);
32 | }}
33 | >
34 |
35 |
44 | {typeof this.props.children == "function" ? (
45 |
46 | {value => {
47 | const newValue = {
48 | ...value,
49 | viewPortDimensions: {
50 | height: this.state.height,
51 | width: this.state.width
52 | },
53 | viewPortGravity: this.props.gravity
54 | };
55 | return this.props.children(newValue);
56 | }}
57 |
58 | ) : (
59 | this.props.children
60 | )}
61 |
62 |
63 | );
64 | }
65 | }
66 | RNVision.defaultProps = {
67 | gravity: "fill"
68 | };
69 | RNVision.propTypes = {
70 | gravity: PropTypes.string.isRequired
71 | };
72 | export { RNVision as RNVCameraView, CameraConsumer as RNVCameraConsumer };
73 | export default RNVision;
74 |
--------------------------------------------------------------------------------
/visioncamera.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 | import { RNVisionProvider } from "./wrapper";
3 | import { RNVCameraView } from "./view";
4 | import { CameraRegion } from "./cameraregion";
5 | const VisionCamera = props => (
6 |
7 |
8 | {typeof props.children == "function" ? (
9 |
14 | ) : (
15 | props.children
16 | )}
17 |
18 |
19 | );
20 | export { VisionCamera };
21 |
--------------------------------------------------------------------------------
/wrapper.js:
--------------------------------------------------------------------------------
1 | import React, { createContext, Component } from "react";
2 | import PropTypes from "prop-types";
3 | import {
4 | start,
5 | stop,
6 | detectFacesOnce,
7 | removeDetectFaces,
8 | detectFaces,
9 | removeRegion,
10 | setRegion,
11 | trackObject,
12 | removeTrackObject,
13 | getImageDimensions,
14 | setImageDimensionListener,
15 | removeImageDimensionListener
16 | } from "./module";
17 | const { Provider, Consumer: RNVisionConsumer } = createContext({
18 | regions: { "": null }
19 | });
20 | class RNVisionProvider extends Component {
21 | state = {
22 | isStarted: false,
23 | isCameraFront: false,
24 | onDetectedFaces: null,
25 | fixedRegions: {},
26 | trackedObjects: {},
27 | calculatedRegions: {},
28 | imageDimensions: { height: 0, width: 0 },
29 | providerValue: null
30 | };
31 | componentDidMount() {
32 | this.setProviderValue();
33 | }
34 | componentWillUnmount() {
35 | if (this.state.connected) {
36 | stop();
37 | }
38 | }
39 | static getDerivedStateFromProps(nextProps, prevState) {
40 | var ret = prevState;
41 | if (!ret.todos) ret.todos = [];
42 | if (nextProps.isCameraFront != prevState.isCameraFront) {
43 | ret.isCameraFront = nextProps.isCameraFront;
44 | if (prevState.isStarted) {
45 | start(ret.isCameraFront);
46 | }
47 | }
48 | if (nextProps.isStarted != prevState.isStarted) {
49 | ret.isStarted = nextProps.isStarted;
50 | if (nextProps.isStarted) {
51 | start(ret.isCameraFront);
52 | ret.todos.push({ key: "getImageDimensions" });
53 | } else {
54 | stop();
55 | removeImageDimensionListener();
56 | }
57 | }
58 | if (nextProps.onDetectedFaces != prevState.onDetectedFaces) {
59 | if (nextProps.onDetectedFaces) {
60 | //Turn it on
61 | detectFaces("", nextProps.onDetectedFaces);
62 | } else {
63 | //Turn it off
64 | removeDetectFaces("");
65 | }
66 | ret.onDetectedFaces = nextProps.onDetectedFaces;
67 | }
68 | if (nextProps.trackedObjects != prevState.trackedObjects) {
69 | if (nextProps.trackedObjects)
70 | Object.keys(nextProps.trackedObjects).forEach(k => {
71 | const v = nextProps.trackedObjects[k];
72 | if (!prevState.trackedObjects[k]) {
73 | if (!ret.todoObjects) ret.todoObjects = {};
74 | ret.todoObjects[k] = nextProps.trackedObjects[k];
75 | }
76 | if (
77 | JSON.stringify(prevState.trackedObjects[k]) != JSON.stringify(v)
78 | ) {
79 | if (!ret.todoObjects) ret.todoObjects = {};
80 | ret.calculatedRegions[k] = v;
81 | ret.todoObjects[k] = v;
82 | }
83 | });
84 | Object.keys(prevState.trackedObjects).forEach(k => {
85 | const v = prevState.trackedObjects[k];
86 | if (!nextProps.trackedObjects || !nextProps.trackedObjects[k]) {
87 | removeTrackObject("", k);
88 | delete ret.calculatedRegions[k];
89 | ret.todos.push({ key: "setProviderValue" });
90 | }
91 | });
92 | ret.trackedObjects = nextProps.trackedObjects;
93 | }
94 | if (nextProps.regions != prevState.fixedRegions) {
95 | var shouldChange = false;
96 | Object.keys(prevState.fixedRegions).forEach(k => {
97 | if (!nextProps.regions[k]) {
98 | shouldChange = true;
99 | removeRegion(k);
100 | }
101 | });
102 | if (shouldChange) {
103 | ret.fixedRegions = nextProps.regions;
104 | Object.keys(ret.fixedRegions).forEach(k => {
105 | setRegion(k, ret.fixedRegions[k]);
106 | });
107 | }
108 | }
109 | if (ret && ret.todos && ret.todos.length == 0) delete ret.todos;
110 | return ret;
111 | }
112 | getImageDimensions() {
113 | setImageDimensionListener(dims => {
114 | this.setState(({ imageDimensions }) => {
115 | if (
116 | dims &&
117 | (dims.height != imageDimensions.height ||
118 | dims.width != imageDimensions.width)
119 | )
120 | return { imageDimensions: dims };
121 | });
122 | });
123 | }
124 | manageTodo() {
125 | const todos = this.state.todos;
126 | if (todos) {
127 | todos.forEach(v => {
128 | const k = v.key;
129 | if (this[k]) return this[k](v);
130 | });
131 | this.setState({ todos: null }, () => {
132 | this.setProviderValue();
133 | });
134 | }
135 | const todo = this.state.todoObjects;
136 | if (todo) {
137 | Object.keys(todo).forEach(k => {
138 | (async () => {
139 | try {
140 | await removeTrackObject("", k);
141 | await trackObject("", k, todo[k], data => {
142 | const newRect = data.frame;
143 | this.setState(
144 | ({ calculatedRegions, trackedObjects }) => {
145 | if (!trackedObjects || !trackedObjects[k]) {
146 | return {
147 | calculatedRegions: { calculatedRegions, [k]: null }
148 | };
149 | } else {
150 | return {
151 | calculatedRegions: {
152 | ...calculatedRegions,
153 | [k]: newRect
154 | }
155 | };
156 | }
157 | },
158 | () => {
159 | this.setProviderValue();
160 | }
161 | );
162 | });
163 | } catch (e) {
164 | console.log("TRACKOBJECT FAIL", k, todo[k], e);
165 | }
166 | })();
167 | });
168 | this.setState({ todoObjects: null }, () => {
169 | this.setProviderValue();
170 | });
171 | }
172 | }
173 | componentDidUpdate() {
174 | this.manageTodo();
175 | }
176 | setProviderValue() {
177 | this.setState(
178 | {
179 | providerValue: {
180 | imageDimensions: this.state.imageDimensions,
181 | isCameraFront: this.state.isCameraFront,
182 | regions: {
183 | "": null,
184 | ...this.state.fixedRegions,
185 | ...this.state.calculatedRegions
186 | }
187 | }
188 | },
189 | () => {
190 | if (typeof this.props.onRegionsChanged == "function") {
191 | this.props.onRegionsChanged(this.state.providerValue.regions);
192 | }
193 | }
194 | );
195 | }
196 | render() {
197 | return (
198 |
199 | {this.props.children}
200 |
201 | );
202 | }
203 | }
204 | RNVisionProvider.propTypes = {
205 | isStarted: PropTypes.bool.isRequired,
206 | isCameraFront: PropTypes.bool.isRequired,
207 | onDetectedFaces: PropTypes.func,
208 | trackedObjects: PropTypes.object,
209 | regions: PropTypes.object,
210 | onRegionsChanged: PropTypes.func
211 | };
212 | RNVisionProvider.detectFaces = async () => {
213 | return await detectFacesOnce("");
214 | };
215 | export { RNVisionProvider, RNVisionConsumer };
216 |
--------------------------------------------------------------------------------