├── .gitattributes
├── .gitignore
├── LICENSE
├── README.md
├── android
├── build.gradle
└── src
│ └── main
│ ├── AndroidManifest.xml
│ └── java
│ └── com
│ └── reactlibrary
│ ├── RNTensorIOModule.java
│ └── RNTensorIOPackage.java
├── cloudbuild.yaml
├── index.js
├── ios
├── RNPixelBufferUtilities.h
├── RNPixelBufferUtilities.mm
├── RNTensorIO.h
├── RNTensorIO.mm
├── RNTensorIO.podspec
└── RNTensorIO.xcodeproj
│ └── project.pbxproj
├── package.json
└── windows
├── .gitignore
├── .npmignore
├── RNTensorIO.sln
└── RNTensorIO
├── Properties
├── AssemblyInfo.cs
└── RNTensorIO.rd.xml
├── RNTensorIO.csproj
├── RNTensorIOModule.cs
├── RNTensorIOPackage.cs
└── project.json
/.gitattributes:
--------------------------------------------------------------------------------
1 | *.pbxproj -text
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | # OSX
3 | #
4 | .DS_Store
5 |
6 | # node.js
7 | #
8 | node_modules/
9 | npm-debug.log
10 | yarn-error.log
11 |
12 |
13 | # Xcode
14 | #
15 | build/
16 | *.pbxuser
17 | !default.pbxuser
18 | *.mode1v3
19 | !default.mode1v3
20 | *.mode2v3
21 | !default.mode2v3
22 | *.perspectivev3
23 | !default.perspectivev3
24 | xcuserdata
25 | *.xccheckout
26 | *.moved-aside
27 | DerivedData
28 | *.hmap
29 | *.ipa
30 | *.xcuserstate
31 | project.xcworkspace
32 |
33 |
34 | # Android/IntelliJ
35 | #
36 | build/
37 | .idea
38 | .gradle
39 | local.properties
40 | *.iml
41 |
42 | # BUCK
43 | buck-out/
44 | \.buckd/
45 | *.keystore
46 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2018 doc.ai
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # react-native-tensorio
2 |
3 | Machine learning for your React Native projects, using TensorIO and TensorFlow Lite. See our example project, [TensorIOExample](https://github.com/doc-ai/react-native-tensorio-example).
4 |
5 | **DEPRECATED**
6 |
7 | See instead:
8 |
9 | https://github.com/doc-ai/react-native-tensorio-tflite
10 |
11 | https://github.com/doc-ai/react-native-tensorio-tensorflow
12 |
13 |
14 | ## Support
15 |
16 | - iOS
17 |
18 |
19 | ## Getting started
20 |
21 | ### Mostly automatic installation
22 |
23 | Install the package into your project:
24 |
25 | ```bash
26 | $ npm install react-native-tensorio --save
27 | ```
28 |
29 | And link the package to your project:
30 |
31 | ```bash
32 | $ react-native link react-native-tensorio
33 | ```
34 |
35 | #### iOS
36 |
37 | The react-native-tensorio package depends on the TensorIO library, which can most easily be added to your project using cocoapods ([learn more](https://cocoapods.org/)).
38 |
39 | ##### Initialize Cocoapods
40 |
41 | If you have not already initialized cocoapods for your project, cd into your project's *ios* directory and initialize cocoapods:
42 |
43 | ```
44 | $ pod init
45 | ```
46 |
47 | This command creates a *Podfile* in your project's *ios* directory.
48 |
49 | ##### Fix a Cocoapods / React Native Bug
50 |
51 | Edit the *Podfile* and, because of a bug in how cocoapods and React Native interact, remove the first block for `target MyProject-tvOSTests do`, everything from that line to the `end` statement. It is a duplicate entry that will cause problems when you try to install the cocoapod dependencies.
52 |
53 | You may also completey remove the entire `target 'MyProject-tvOS' do` block if you are not building for tvOS.
54 |
55 | ##### Uncomment Lines
56 |
57 | Uncomment the `platform :ios` line and make sure it is no lower than `9.3`. Uncomment the `use_frameworks!` line.
58 |
59 | ##### Add TensorIO
60 |
61 | Add TensorIO as a dependency under the `# Pods for MyProject ` comment:
62 |
63 | ```rb
64 | pod 'TensorIO'
65 | pod 'TensorIO/TFLite'
66 | ```
67 |
68 | A new podfile should like something like:
69 |
70 | ```rb
71 | # Uncomment the next line to define a global platform for your project
72 | platform :ios, '9.3'
73 |
74 | target 'MyProject' do
75 | # Uncomment the next line if you're using Swift or would like to use dynamic frameworks
76 | use_frameworks!
77 |
78 | # Pods for MyProject
79 |
80 | pod 'TensorIO'
81 | pod 'TensorIO/TFLite'
82 |
83 | target 'MyProjectTests' do
84 | inherit! :search_paths
85 | # Pods for testing
86 | end
87 |
88 | end
89 |
90 | ```
91 |
92 | ##### Install Pods
93 |
94 | At the command line type:
95 |
96 | ```
97 | $ pod install
98 | ```
99 |
100 | This should install the TensorIO dependency as well as TensorFlow Lite and link them into your project.
101 |
102 | ##### Use the .xcworkspace file
103 |
104 | You should now use the *MyProject.xcworkspace* file to make changes to and build your project instead of the *MyProject.xcodeproj* file.
105 |
106 |
115 |
116 | #### Android
117 |
118 | Android support is forthcoming.
119 |
120 |
134 |
135 |
136 | ## Usage
137 |
138 |
139 | ### About TensorIO
140 |
141 | TensorIO uses model bundles to wrap an underlying model and a description of its inputs and outputs along with any assets the model requires, such as text labels for image classification outputs. They are simply folders with the *.tfbundle* extension ([learn more](https://github.com/doc-ai/TensorIO)). You will need to add these bundles to your React Native application in order to perform inference with the underlying models.
142 |
143 | Every TensorIO bundle includes a description of the underlying model. Model inputs and outputs are named and indicate what kind of data they expect or produce. You must know these names in order to pass data to the model and extract results from it. From the perspective of a React Native application, you will pass an object to the model whose name-value pairs match the model's input names, and you will receive an object back from the model whose name-value pairs match the model's output names.
144 |
145 | All this information appears in a bundle's *model.json* file. Let's have a look at the json description of a simple test model that takes a single input and produces a single output. Notice specifically the *inputs* and *outputs* fields:
146 |
147 | ```json
148 | {
149 | "name": "1 in 1 out numeric test",
150 | "details": "Simple model with a single valued input and single valued output",
151 | "id": "1_in_1_out_number_test",
152 | "version": "1",
153 | "author": "doc.ai",
154 | "license": "Apache 2.0",
155 | "model": {
156 | "file": "model.tflite",
157 | "backend": "tflite",
158 | "quantized": false
159 | },
160 | "inputs": [
161 | {
162 | "name": "x",
163 | "type": "array",
164 | "shape": [1],
165 | }
166 | ],
167 | "outputs": [
168 | {
169 | "name": "y",
170 | "type": "array",
171 | "shape": [1],
172 | }
173 | ]
174 | }
175 | ```
176 |
177 | The *inputs* and *outputs* fields tell us that this model takes a single input named *"x"* whose value is a single number and produces a single output named *"y"* whose value is also a single number. We know that the values are a single number from the shape. Let's see how to use a model like this in your own application.
178 |
179 |
180 | ### Basic Usage
181 |
182 | Add a TensorIO bundle to your application in Xcode. Simply drag the bundle into the project under the project's primary folder (it will be the folder with the same name as your project). Make sure to check *Copy items if needed*, select *Create folder references*, and that your build target is selected.
183 |
184 | Then in javascript, import `RNTensorIO from 'react-native-tensorio`. Load the model by providing its name or a fully qualified path, run inference with it, and unload the model when you are done to free the underlying resources.
185 |
186 | Again, imagine we had a model that takes a single input named *"x"* with a single value and produces a singe output named *"y"* with a single value:
187 |
188 | ```json
189 | "inputs": [
190 | {
191 | "name": "x",
192 | "type": "array",
193 | "shape": [1],
194 | }
195 | ],
196 | "outputs": [
197 | {
198 | "name": "y",
199 | "type": "array",
200 | "shape": [1],
201 | }
202 | ]
203 | ```
204 |
205 | We would use this model as follows. Notice that we pass an object to the run function whose name-value pairs match those of the model's inputs and we extract name-value pairs from the results that match those of the model's outputs:
206 |
207 | ```javascript
208 | import RNTensorIO from 'react-native-tensorio';
209 |
210 | RNTensorIO.load('model.tfbundle');
211 |
212 | RNTensorIO.run({
213 | 'x': [42]
214 | }, (error, results) => {
215 | const y = results['y']
216 | console.log(y);
217 | });
218 |
219 | RNTensorIO.unload();
220 | ```
221 |
222 | You can use any model that doesn't take image inputs like this. Computer vision models, however, require a little more work to use. Let's have a look.
223 |
224 |
225 | ### Image Models
226 |
227 | React Native represents image data as a base64 encoded string. When you pass that data to a model that has image inputs you must include some additional metadata that describes the encoded image. For example, is it JPEG or PNG data, raw pixel buffer data, or a path to an image on the filesystem?
228 |
229 |
230 | #### About Image Data
231 |
232 | Models that take image inputs must receive those inputs in a pixel buffer format. A pixel buffer is an unrolled vector of bytes corresponding to the red-green-blue (RGB) values that define the pixel representation of an image. Image models are trained on these kinds of representations and expect them for inputs.
233 |
234 | React Native represents image data in javascript as a base64 encoded string. In order to perform inference with an image model you must provide this base64 encoded string to the run function as well as a description of the those bytes that may included metadata such as the width, height, and format of the image they represent. To run an image model you'll pack this information into a javascript object and use that object in one of the name-value pairs you provide to the run function.
235 |
236 |
237 | #### An Image Classification Example
238 |
239 | Let's look at a basic image classification model and see how to use it in React Native. The JSON description for the ImageNet MobileNet classification model is as follows. Again, pay special attention to the *inputs* and *outputs* fields:
240 |
241 | ```json
242 | {
243 | "name": "MobileNet V2 1.0 224",
244 | "details": "MobileNet V2 with a width multiplier of 1.0 and an input resolution of 224x224. \n\nMobileNets are based on a streamlined architecture that have depth-wise separable convolutions to build light weight deep neural networks. Trained on ImageNet with categories such as trees, animals, food, vehicles, person etc. MobileNets: Efficient Convolutional Neural Networks for Mobile Vision Applications.",
245 | "id": "mobilenet-v2-100-224-unquantized",
246 | "version": "1",
247 | "author": "Andrew G. Howard, Menglong Zhu, Bo Chen, Dmitry Kalenichenko, Weijun Wang, Tobias Weyand, Marco Andreetto, Hartwig Adam",
248 | "license": "Apache License. Version 2.0 http://www.apache.org/licenses/LICENSE-2.0",
249 | "model": {
250 | "file": "model.tflite",
251 | "backend": "tflite",
252 | "quantized": false,
253 | },
254 | "inputs": [
255 | {
256 | "name": "image",
257 | "type": "image",
258 | "shape": [224,224,3],
259 | "format": "RGB",
260 | "normalize": {
261 | "standard": "[-1,1]"
262 | }
263 | },
264 | ],
265 | "outputs": [
266 | {
267 | "name": "classification",
268 | "type": "array",
269 | "shape": [1,1000],
270 | "labels": "labels.txt"
271 | },
272 | ]
273 | }
274 | ```
275 |
276 | The *inputs* and *outputs* fields tell us that this model expects a single image input whose name is *"image"* and produces a single output whose name is *"classification"*. You don't need to worry about the image input details. TensorIO will take care of preparing an image input for the model using this information. But the output field tells you that the classification output will be a labeled list of 1000 values (1 x 1000 from the shape).
277 |
278 | Let's see how to use this model in React Native. Assuming we have some base64 encoded JPEG data:
279 |
280 |
281 | ```js
282 | var data = 'data:image/jpeg;base64,' + some.data;
283 | var orientation = RNTensorIO.imageOrientationUp;
284 | var format = RNTensorIO.imageTypeJPEG;
285 |
286 | RNTensorIO.run({
287 | 'image': {
288 | [RNTensorIO.imageKeyData]: data,
289 | [RNTensorIO.imageKeyFormat]: format,
290 | [RNTensorIO.imageKeyOrientation]: orientation
291 | }
292 | }, (error, results) => {
293 | classifications = results['classification'];
294 | console.log(classifications);
295 | });
296 | ```
297 |
298 | This time we provide an object for the *"image"* name-value pair and this object contains three pieces of information: the base64 encoded string, the format of the underlying data, in this case, JPEG data, and the image's orientation. The names used in this object are exported by the RNTensorIO module along with the supported image orientations and image data types. These are all described in more detail below.
299 |
300 | RNTensorIO supports image data in a number of formats. Imagine instead that we have the path to an image on the filesystem. We would run the model as follows, and this time we'll omit the image orientation, which is assumed to be 'Up' by default:
301 |
302 | ```js
303 | var data = '/path/to/image.png';
304 | var format = RNTensorIO.imageTypeFile;
305 |
306 | RNTensorIO.run({
307 | 'image': {
308 | [RNTensorIO.imageKeyData]: data,
309 | [RNTensorIO.imageKeyFormat]: format
310 | }
311 | }, (error, results) => {
312 | classifications = results['classification'];
313 | console.log(classifications);
314 | });
315 | ```
316 |
317 | Another use case might be real time pixel buffer data coming from a device camera. In this case, and on iOS, the bytes will represent raw pixel data in the BGRA format. This representation tells us nothing else about the image, so we'll also need to specify its width, height, and orientation. On iOS, pixel buffer data coming from the camera is often 640x480 and will be right oriented. We'd run the model as follows:
318 |
319 | ```js
320 | var data; // pixel buffer data as a base64 encoded string
321 | var format = RNTensorIO.imageTypeBGRA;
322 | var orientation = RNTensorIO.imageOrientationRight;
323 | var width = 640;
324 | var height = 480;
325 |
326 | RNTensorIO.run({
327 | 'image': {
328 | [RNTensorIO.imageKeyData]: data,
329 | [RNTensorIO.imageKeyFormat]: format,
330 | [RNTensorIO.imageKeyOrientation]: orientation,
331 | [RNTensorIO.imageKeyWidth]: width,
332 | [RNTensorIO.imageKeyHeight]: height
333 | }
334 | }, (error, results) => {
335 | classifications = results['classification'];
336 | console.log(classifications);
337 | });
338 | ```
339 |
340 | All image models that take image inputs will be run in this manner.
341 |
342 |
343 | #### Image Outputs
344 |
345 | Some models will produce image outputs. In this case the value for that output will be provided to javascript as base64 encoded jpeg data. You'll likely need to prefix it as follows before being able to display it:
346 |
347 | ```js
348 | RNTensorIO.run({
349 | 'image': {
350 | // ...
351 | }
352 | }, (error, results) => {
353 | var image = results['image'];
354 | var data = 'data:image/jpeg;base64,' + image;
355 | });
356 | ```
357 |
358 | ## The RNTensorIO Module
359 |
360 | Listed below are the functions and constants exported by this module.
361 |
362 | ### Functions
363 |
364 | #### load(path)
365 |
366 | Loads the model at the given path. If the path is a relative path the model will be loaded from the application bundle.
367 |
368 | Usage:
369 |
370 | ```js
371 | RNTensorIO.load('model.tfbundle');
372 | ```
373 |
374 | #### run(input, callback)
375 |
376 | Perform inference with the loaded model on the input.
377 |
378 | The input must be a javascript object whose name-value pairs match the names expected by the underlying model's inputs and which are described in the model bundle's *model.json* file.
379 |
380 | The callback has the signature `(error, results) => { ... }`. If there was a problem performing inference, error will be set to a string value describing the problem. It will be null otherwise. Results will be a javascript object whose name-value pairs match the names of the model's outputs and which are described in the model bundle's *model.json* file. If there was an error, results will be null.
381 |
382 | Usage:
383 |
384 | ```js
385 | RNTensorIO.run({
386 | 'input': [1]
387 | }, (error, results) => {
388 | if (error) {
389 | // handle error
390 | } else {
391 | console.log(results);
392 | }
393 | });
394 | ```
395 |
396 |
397 | #### isTrainable(callback)
398 |
399 | Returns true or false, indicating whether the model is trainable or not. Useful if you want to call the `train` function to train your model.
400 |
401 | Usage:
402 |
403 | ```js
404 | RNTensorIO.isTrainable((error, isTrainable) => {
405 | if (error) {
406 | // handle error
407 | } else {
408 | console.log(`Is this model trainable? ${isTrainable}`);
409 | }
410 | });
411 | ```
412 |
413 |
414 | #### train(input, callback)
415 |
416 | Perform model training on the inputs provided.
417 |
418 | **Important:** Please ensure that the training model is loaded before this method is called. You may programatically verify this by using the `isTrainable` function. For the moment, the model used
419 | for inference is different from the one used for training.
420 |
421 | The input must be a javascript object whose name-value pairs match the names expected by the underlying model's inputs and which are described in the model bundle's *model.json* file.
422 |
423 | The callback has the signature `(error, results) => { ... }`. If there was a problem performing inference, error will be set to a string value describing the problem. It will be null otherwise.
424 |
425 | Usage:
426 |
427 | ```js
428 | RNTensorIO.train([{
429 | 'input': [1]
430 | 'label': [0]
431 | }], (error, results) => {
432 | if (error) {
433 | // handle error
434 | } else {
435 | console.log(results);
436 | }
437 | });
438 | ```
439 |
440 | #### unload()
441 |
442 | Unloads the loaded model and frees the underlying resources. Explicitly unload models when you are done with them to aggressively manage the application's memory footprint.
443 |
444 | Usage:
445 |
446 | ```js
447 | RNTensorIO.unload()
448 | ```
449 |
450 | #### topN(count, threshold, classifications, callback)
451 |
452 | A utility function for image classification models that filters for the results with the highest probabilities above a given threshold.
453 |
454 | Image classification models are often capable of recognizing hundreds or thousands of items and return what is called a softmax probability distribution that describes the likelihood that a recognizable item appears in the image. Often we do not want to know the entire probabilty distribution but only want to know which items have the highest probability of being in the image. Use this function to filter for those items.
455 |
456 | Count is the number of items you would like to be returned.
457 |
458 | Threshold is the minimum probability value an item should have in order to be returned. If there are fewer than count items above this probability, only that many items will be returned.
459 |
460 | Classifications is the output of a classification model.
461 |
462 | The callback has the signature `(error, results) => { ... }`. Error will always be null. Results will contain the filtered items.
463 |
464 | Usage:
465 |
466 | ```js
467 | // Give the results from a model whose output has the name 'classification',
468 | // filter for the top five probabilities above a threshold of 0.1
469 |
470 | RNTensorIO.run({
471 | 'image': {}
472 | }, (error, results) => {
473 | classifications = results['classification'];
474 |
475 | RNTensorIO.topN(5, 0.1, classifications, (error, top5) => {
476 | console.log("TOP 5", top5);
477 | });
478 | )};
479 | ```
480 |
481 | ### Constants
482 |
483 | #### Image Input Keys
484 |
485 | ```js
486 | RNTensorIO.imageKeyData
487 | RNTensorIO.imageKeyFormat
488 | RNTensorIO.imageKeyWidth
489 | RNTensorIO.imageKeyHeight
490 | RNTensorIO.imageKeyOrientation
491 | ```
492 |
493 | ##### RNTensorIO.imageKeyData
494 |
495 | The data for the image. Must be a base64 encoded string or the fully qualified path to an image on the filesystem.
496 |
497 | ##### RNTensorIO.imageKeyFormat
498 |
499 | The image format. See supported types below. Pixel buffer data coming directly from an iOS camera will usually have the format `RNTensorIO.imageOrientationRight`.
500 |
501 | ##### RNTensorIO.imageKeyWidth
502 |
503 | The width of the underlying image. Only required if the format is `RNTensorIO.imageTypeARGB` or `RNTensorIO.imageTypeBGRA`. Pixel buffer data coming directly from an iOS device camera will often have a width of 640.
504 |
505 | ##### RNTensorIO.imageKeyHeight
506 |
507 | The height of the underlying image. Only required if the format is `RNTensorIO.imageTypeARGB` or `RNTensorIO.imageTypeBGRA`. Pixel buffer data coming directly from an iOS device camera will often have a height of 480.
508 |
509 | ##### RNTensorIO.imageKeyOrientation
510 |
511 | The orientation of the image. See supported formats below. Most images will be `RNTensorIO.imageOrientationUp`, and this is the default value that is used if this field is not specified. However, pixel buffer data coming directly from an iOS device camera will be `RNTensorIO.imageOrientationRight`.
512 |
513 | #### Image Data Types
514 |
515 | ```js
516 | RNTensorIO.imageTypeUnknown
517 | RNTensorIO.imageTypeARGB
518 | RNTensorIO.imageTypeBGRA
519 | RNTensorIO.imageTypeJPEG
520 | RNTensorIO.imageTypePNG
521 | RNTensorIO.imageTypeFile
522 | ```
523 |
524 | ##### RNTensorIO.imageTypeUnknown
525 |
526 | A placeholder for an unknown image type. RNTensorIO will return an error if you specify this format.
527 |
528 | ##### RNTensorIO.imageTypeARGB
529 |
530 | Pixel buffer data whose pixels are unrolled into an alpha-red-green-blue byte representation.
531 |
532 | ##### RNTensorIO.imageTypeBGRA
533 |
534 | Pixel buffer data whose pixels are unrolled into a blue-green-red-alpha byte representation. Pixel data coming directly from an iOS device camera will usually be in this format.
535 |
536 | ##### RNTensorIO.imageTypeJPEG
537 |
538 | JPEG image data. The base64 encoded string must be prefixed with `data:image/jpeg;base64,`.
539 |
540 | ##### RNTensorIO.imageTypePNG
541 |
542 | PNG image data. The base64 encoded string must be prefixed with `data:image/png;base64,`.
543 |
544 | ##### RNTensorIO.imageTypeFile
545 |
546 | Indicates tha the image data will contain the fully qualified path to an image on the filesystem.
547 |
548 | #### Image Orientations
549 |
550 | ```js
551 | RNTensorIO.imageOrientationUp
552 | RNTensorIO.imageOrientationUpMirrored
553 | RNTensorIO.imageOrientationDown
554 | RNTensorIO.imageOrientationDownMirrored
555 | RNTensorIO.imageOrientationLeftMirrored
556 | RNTensorIO.imageOrientationRight
557 | RNTensorIO.imageOrientationRightMirrored
558 | RNTensorIO.imageOrientationLeft
559 | ```
560 |
561 | ##### RNTensorIO.imageOrientationUp
562 |
563 | 0th row at top, 0th column on left. Default orientation.
564 |
565 | ##### RNTensorIO.imageOrientationUpMirrored
566 |
567 | 0th row at top, 0th column on right. Horizontal flip.
568 |
569 | ##### RNTensorIO.imageOrientationDown
570 |
571 | 0th row at bottom, 0th column on right. 180 degree rotation.
572 |
573 | ##### RNTensorIO.imageOrientationDownMirrored
574 |
575 | 0th row at bottom, 0th column on left. Vertical flip.
576 |
577 | ##### RNTensorIO.imageOrientationLeftMirrored
578 |
579 | 0th row on left, 0th column at top.
580 |
581 | ##### RNTensorIO.imageOrientationRight
582 |
583 | 0th row on right, 0th column at top. 90 degree clockwise rotation. Pixel buffer data coming from an iOS device camera will usually have this orientation.
584 |
585 | ##### RNTensorIO.imageOrientationRightMirrored
586 |
587 | 0th row on right, 0th column on bottom.
588 |
589 | ##### RNTensorIO.imageOrientationLeft
590 |
591 | 0th row on left, 0th column at bottom. 90 degree counter-clockwise rotation.
592 |
--------------------------------------------------------------------------------
/android/build.gradle:
--------------------------------------------------------------------------------
1 |
2 | buildscript {
3 | repositories {
4 | jcenter()
5 | }
6 |
7 | dependencies {
8 | classpath 'com.android.tools.build:gradle:1.3.1'
9 | }
10 | }
11 |
12 | apply plugin: 'com.android.library'
13 |
14 | android {
15 | compileSdkVersion 23
16 | buildToolsVersion "23.0.1"
17 |
18 | defaultConfig {
19 | minSdkVersion 16
20 | targetSdkVersion 22
21 | versionCode 1
22 | versionName "1.0"
23 | }
24 | lintOptions {
25 | abortOnError false
26 | }
27 | }
28 |
29 | repositories {
30 | mavenCentral()
31 | }
32 |
33 | dependencies {
34 | compile 'com.facebook.react:react-native:+'
35 | }
36 |
--------------------------------------------------------------------------------
/android/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/android/src/main/java/com/reactlibrary/RNTensorIOModule.java:
--------------------------------------------------------------------------------
1 |
2 | package com.reactlibrary;
3 |
4 | import com.facebook.react.bridge.ReactApplicationContext;
5 | import com.facebook.react.bridge.ReactContextBaseJavaModule;
6 | import com.facebook.react.bridge.ReactMethod;
7 | import com.facebook.react.bridge.Callback;
8 |
9 | public class RNTensorIOModule extends ReactContextBaseJavaModule {
10 |
11 | private final ReactApplicationContext reactContext;
12 |
13 | public RNTensorIOModule(ReactApplicationContext reactContext) {
14 | super(reactContext);
15 | this.reactContext = reactContext;
16 | }
17 |
18 | @Override
19 | public String getName() {
20 | return "RNTensorIO";
21 | }
22 | }
--------------------------------------------------------------------------------
/android/src/main/java/com/reactlibrary/RNTensorIOPackage.java:
--------------------------------------------------------------------------------
1 |
2 | package com.reactlibrary;
3 |
4 | import java.util.Arrays;
5 | import java.util.Collections;
6 | import java.util.List;
7 |
8 | import com.facebook.react.ReactPackage;
9 | import com.facebook.react.bridge.NativeModule;
10 | import com.facebook.react.bridge.ReactApplicationContext;
11 | import com.facebook.react.uimanager.ViewManager;
12 | import com.facebook.react.bridge.JavaScriptModule;
13 | public class RNTensorIOPackage implements ReactPackage {
14 | @Override
15 | public List createNativeModules(ReactApplicationContext reactContext) {
16 | return Arrays.asList(new RNTensorIOModule(reactContext));
17 | }
18 |
19 | // Deprecated from RN 0.47
20 | public List> createJSModules() {
21 | return Collections.emptyList();
22 | }
23 |
24 | @Override
25 | public List createViewManagers(ReactApplicationContext reactContext) {
26 | return Collections.emptyList();
27 | }
28 | }
--------------------------------------------------------------------------------
/cloudbuild.yaml:
--------------------------------------------------------------------------------
1 | steps:
2 | - name: gcr.io/cloud-builders/curl
3 | args: ['http://www.google.com']
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 |
2 | import { NativeModules } from 'react-native';
3 |
4 | export default NativeModules.RNTensorIO;
5 |
--------------------------------------------------------------------------------
/ios/RNPixelBufferUtilities.h:
--------------------------------------------------------------------------------
1 | //
2 | // RNPixelBufferUtilities.h
3 | // RNTensorIO
4 | //
5 | // Created by Phil Dow on 2/1/19.
6 | // Copyright © 2019 doc.ai. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 | #import
12 |
13 | NS_ASSUME_NONNULL_BEGIN
14 |
15 | _Nullable CVPixelBufferRef CreatePixelBufferWithBytes(unsigned char *bytes, size_t width, size_t height, OSType format);
16 |
17 | NS_ASSUME_NONNULL_END
18 |
--------------------------------------------------------------------------------
/ios/RNPixelBufferUtilities.mm:
--------------------------------------------------------------------------------
1 | //
2 | // RNPixelBufferUtilities.mm
3 | // RNTensorIO
4 | //
5 | // Created by Phil Dow on 2/1/19.
6 | // Copyright © 2019 doc.ai. All rights reserved.
7 | //
8 |
9 | #import "RNPixelBufferUtilities.h"
10 |
11 | // Format must be kCVPixelFormatType_32ARGB or kCVPixelFormatType_32BGRA
12 | // You must call CFRelease on the pixel buffer
13 |
14 | _Nullable CVPixelBufferRef CreatePixelBufferWithBytes(unsigned char *bytes, size_t width, size_t height, OSType format) {
15 | size_t bytes_per_row = width * 4; // ARGB and BGRA are four channel formats
16 | size_t byte_count = height * bytes_per_row;
17 |
18 | CVPixelBufferRef pixelBuffer;
19 |
20 | CVReturn status = CVPixelBufferCreate(
21 | kCFAllocatorDefault,
22 | width,
23 | height,
24 | format,
25 | NULL,
26 | &pixelBuffer);
27 |
28 | if ( status != kCVReturnSuccess ) {
29 | return NULL;
30 | }
31 |
32 | CVPixelBufferLockBaseAddress(pixelBuffer, kNilOptions);
33 | unsigned char *base_address = (unsigned char *)CVPixelBufferGetBaseAddress(pixelBuffer);
34 |
35 | memcpy(base_address, bytes, byte_count);
36 |
37 | CVPixelBufferUnlockBaseAddress(pixelBuffer, kNilOptions);
38 |
39 | return pixelBuffer;
40 | }
41 |
--------------------------------------------------------------------------------
/ios/RNTensorIO.h:
--------------------------------------------------------------------------------
1 | //
2 | // RNTensirIO.h
3 | // RNTensorIO
4 | //
5 | // Created by Phil Dow on 2/1/19.
6 | // Copyright © 2019 doc.ai. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 | #import
12 |
13 | #if __has_include("RCTBridgeModule.h")
14 | #import "RCTBridgeModule.h"
15 | #else
16 | #import
17 | #endif
18 |
19 | #if __has_include("RCTConvert.h")
20 | #import "RCTConvert.h"
21 | #else
22 | #import
23 | #endif
24 |
25 | @interface RNTensorIO : NSObject
26 |
27 | @end
28 |
29 |
--------------------------------------------------------------------------------
/ios/RNTensorIO.mm:
--------------------------------------------------------------------------------
1 | //
2 | // RNTensirIO.mm
3 | // RNTensorIO
4 | //
5 | // Created by Phil Dow on 2/1/19.
6 | // Copyright © 2019 doc.ai. All rights reserved.
7 | //
8 |
9 | #import "RNTensorIO.h"
10 | #import "RNPixelBufferUtilities.h"
11 |
12 | // Unsure why the library import statements do not work:
13 | // #import
14 | // @import TensorIO
15 |
16 | #import "TIOModelBundle.h"
17 | #import "TIOModel.h"
18 | #import "TIOLayerInterface.h"
19 | #import "TIOPixelBuffer.h"
20 | #import "TIOData.h"
21 | #import "NSDictionary+TIOExtensions.h"
22 | #import "UIImage+TIOCVPixelBufferExtensions.h"
23 | #import "TIOBatch.h"
24 | #import "TIOTrainableModel.h"
25 | #import "TIOModelModes.h"
26 | #import "TIOModelIO.h"
27 |
28 |
29 | /**
30 | * Image input keys.
31 | */
32 |
33 | static NSString * const RNTIOImageKeyData = @"RNTIOImageKeyData";
34 | static NSString * const RNTIOImageKeyFormat = @"RNTIOImageKeyFormat";
35 | static NSString * const RNTIOImageKeyWidth = @"RNTIOImageKeyWidth";
36 | static NSString * const RNTIOImageKeyHeight = @"RNTIOImageKeyHeight";
37 | static NSString * const RNTIOImageKeyOrientation = @"RNTIOImageKeyOrientation";
38 |
39 | /**
40 | * Supported image encodings.
41 | */
42 |
43 | typedef NS_ENUM(NSInteger, RNTIOImageDataType) {
44 | RNTIOImageDataTypeUnknown,
45 | RNTIOImageDataTypeARGB,
46 | RNTIOImageDataTypeBGRA,
47 | RNTIOImageDataTypeJPEG,
48 | RNTIOImageDataTypePNG,
49 | RNTIOImageDataTypeFile
50 | };
51 |
52 | // MARK: -
53 |
54 | @implementation RCTConvert (RNTensorIOEnumerations)
55 |
56 | /**
57 | * Bridged constants for supported image encodings. React Native images are
58 | * encoded as base64 strings and their format must be specified for image
59 | * inputs.
60 | */
61 |
62 | RCT_ENUM_CONVERTER(RNTIOImageDataType, (@{
63 | @"imageTypeUnknown": @(RNTIOImageDataTypeUnknown),
64 | @"imageTypeARGB": @(RNTIOImageDataTypeARGB),
65 | @"imageTypeBGRA": @(RNTIOImageDataTypeBGRA),
66 | @"imageTypeJPEG": @(RNTIOImageDataTypeJPEG),
67 | @"imageTypePNG": @(RNTIOImageDataTypePNG),
68 | @"imageTypeFile": @(RNTIOImageDataTypeFile)
69 | }), RNTIOImageDataTypeUnknown, integerValue);
70 |
71 | /**
72 | * Bridged constants for suppoted image orientations. Most images will be
73 | * oriented 'Up', and that is the default value, but images coming directly
74 | * from a camera pixel buffer will be oriented 'Right'.
75 | */
76 |
77 | RCT_ENUM_CONVERTER(CGImagePropertyOrientation, (@{
78 | @"imageOrientationUp": @(kCGImagePropertyOrientationUp),
79 | @"imageOrientationUpMirrored": @(kCGImagePropertyOrientationUpMirrored),
80 | @"imageOrientationDown": @(kCGImagePropertyOrientationDown),
81 | @"imageOrientationDownMirrored": @(kCGImagePropertyOrientationDownMirrored),
82 | @"imageOrientationLeftMirrored": @(kCGImagePropertyOrientationLeftMirrored),
83 | @"imageOrientationRight": @(kCGImagePropertyOrientationRight),
84 | @"imageOrientationRightMirrored": @(kCGImagePropertyOrientationRightMirrored),
85 | @"imageOrientationLeft": @(kCGImagePropertyOrientationLeft)
86 | }), kCGImagePropertyOrientationUp, integerValue);
87 |
88 | @end
89 |
90 | // MARK: -
91 |
92 | @interface RNTensorIO()
93 |
94 | @property id model;
95 |
96 | @end
97 |
98 | @implementation RNTensorIO
99 |
100 | RCT_EXPORT_MODULE();
101 |
102 | // MARK: - Exported Methods
103 |
104 | /**
105 | * Bridged method that loads a model given a model path. Relative paths will be
106 | * loaded from the application bundle.
107 | */
108 |
109 | RCT_EXPORT_METHOD(load:(NSString*)path) {
110 | [self unload];
111 |
112 | TIOModelBundle *bundle = [[TIOModelBundle alloc] initWithPath:[self absolutePath:path]];
113 | self.model = bundle.newModel;
114 | }
115 |
116 | /**
117 | * Bridged method that unloads a model, freeing the underlying resources.
118 | */
119 |
120 | RCT_EXPORT_METHOD(unload) {
121 | [self.model unload];
122 | self.model = nil;
123 | }
124 |
125 | /**
126 | * Bridged method that unloads a model, freeing the underlying resources.
127 | */
128 |
129 | RCT_EXPORT_METHOD(isTrainable:(RCTResponseSenderBlock)callback) {
130 | callback(@[NSNull.null, @(self.model.modes.trains)]);
131 | }
132 |
133 | /**
134 | * Bridged methods that performs inference with a loaded model and returns the
135 | * results.
136 | */
137 |
138 | RCT_EXPORT_METHOD(run:(NSDictionary*)inputs callback:(RCTResponseSenderBlock)callback) {
139 |
140 | // Ensure that a model has been loaded
141 |
142 | if (self.model == nil) {
143 | NSString *error = @"No model has been loaded. Call load() with the name of a model before calling run().";
144 | callback(@[error, NSNull.null]);
145 | return;
146 | }
147 |
148 | // Ensure that the provided keys match the model's expected keys, or return an error
149 |
150 | NSSet *expectedKeys = [NSSet setWithArray:[self inputKeysForModel:self.model]];
151 | NSSet *providedKeys = [NSSet setWithArray:inputs.allKeys];
152 |
153 | if (![expectedKeys isEqualToSet:providedKeys]) {
154 | NSString *error = [NSString stringWithFormat:@"Provided inputs %@ don't match model's expected inputs %@", providedKeys, expectedKeys];
155 | callback(@[error, NSNull.null]);
156 | return;
157 | }
158 |
159 | // Prepare inputs, converting base64 encoded image data or reading image data from the filesystem
160 |
161 | NSDictionary *preparedInputs = [self preparedInputs:inputs];
162 |
163 | if (preparedInputs == nil) {
164 | NSString *error = @"There was a problem preparing the inputs. Ensure that your image inputs are property encoded.";
165 | callback(@[error, NSNull.null]);
166 | return;
167 | }
168 |
169 | // Perform inference
170 |
171 | NSDictionary *results = (NSDictionary*)[self.model runOn:preparedInputs];
172 |
173 | // Prepare outputs, converting pixel buffer outputs to base64 encoded jpeg string data
174 |
175 | NSDictionary *preparedResults = [self preparedOutputs:results];
176 |
177 | if (preparedResults == nil) {
178 | NSString *error = @"There was a problem preparing the outputs. Pixel buffer outputs could not be converted to base64 JPEG string data.";
179 | callback(@[error, NSNull.null]);
180 | return;
181 | }
182 |
183 | // Return results
184 |
185 | callback(@[NSNull.null, preparedResults]);
186 | }
187 |
188 | /**
189 | *
190 | *
191 | */
192 | RCT_EXPORT_METHOD(train:(NSArray *)inputSet callback:(RCTResponseSenderBlock)callback) {
193 |
194 | // Ensure that a model has been loaded
195 |
196 | if (self.model == nil) {
197 | NSString *error = @"No model has been loaded. Call load() with the name of a model before calling run().";
198 | callback(@[error, NSNull.null]);
199 | return;
200 | }
201 |
202 | // Initialize a TIOBatch to store the training samples
203 |
204 | TIOBatch *batch = [[TIOBatch alloc] initWithKeys:[self inputKeysForModel:self.model]];
205 |
206 | // Ensure that the provided keys match the model's expected keys, or return an error
207 |
208 | NSSet *expectedKeys = [NSSet setWithArray:[self inputKeysForModel:self.model]];
209 |
210 | for (NSDictionary *input in inputSet) {
211 | NSSet *providedKeys = [NSSet setWithArray:input.allKeys];
212 |
213 | if (![expectedKeys isEqualToSet:providedKeys]) {
214 | NSString *error = [NSString stringWithFormat:@"Provided inputs %@ don't match model's expected inputs %@", providedKeys, expectedKeys];
215 | callback(@[error, NSNull.null]);
216 | return;
217 | }
218 |
219 | // Prepare inputs, converting base64 encoded image data or reading image data from the filesystem
220 |
221 | NSDictionary *preparedInputs = [self preparedInputs:input];
222 |
223 | if (preparedInputs == nil) {
224 | NSString *error = @"There was a problem preparing the inputs. Ensure that your image inputs are property encoded.";
225 | callback(@[error, NSNull.null]);
226 | return;
227 | }
228 |
229 | [batch addItem:preparedInputs];
230 | }
231 |
232 | // Perform inference
233 |
234 | NSDictionary *results = (NSDictionary*)[(id)self.model train:batch];
235 | callback(@[NSNull.null, results]);
236 | }
237 |
238 | /**
239 | * Bridged utility method for image classification models that returns the top N
240 | * probability label-scores.
241 | */
242 |
243 | RCT_EXPORT_METHOD(topN:(NSUInteger)count threshold:(float)threshold classifications:(NSDictionary*)classifications callback:(RCTResponseSenderBlock)callback) {
244 | NSDictionary *topN = [classifications topN:count threshold:threshold];
245 | callback(@[NSNull.null, topN]);
246 | }
247 |
248 | // MARK: - Load Utilities
249 |
250 | /**
251 | * Returns the absolute path to a model. If an absolute path is provided it is
252 | * returned. Otherwise the path will be treated as relative to the application
253 | * bundle.
254 | */
255 |
256 | - (NSString*)absolutePath:(NSString*)path {
257 | NSString *absolutePath;
258 |
259 | if ([self isAbsoluteFilepath:path]) {
260 | absolutePath = path;
261 | } else {
262 | if ([path.pathExtension isEqualToString:TIOModelBundleExtension]) {
263 | path = [path stringByDeletingPathExtension];
264 | }
265 | absolutePath = [NSBundle.mainBundle pathForResource:path ofType:TIOModelBundleExtension];
266 | }
267 |
268 | return absolutePath;
269 | }
270 |
271 | /**
272 | * Returns YES if the path describes an absolute path rather than a relative one.
273 | */
274 |
275 | - (BOOL)isAbsoluteFilepath:(NSString*)path {
276 | NSString *fullpath = [path stringByExpandingTildeInPath];
277 | return [fullpath hasPrefix:@"/"] || [fullpath hasPrefix:@"file:/"];
278 | }
279 |
280 | // MARK: - Input Key Checking
281 |
282 | /**
283 | * Returns the names of the model inputs, derived from a model bundle's
284 | * model.json file.
285 | */
286 |
287 | - (NSArray*)inputKeysForModel:(id)model {
288 | NSMutableArray *keys = [[NSMutableArray alloc] init];
289 | for (TIOLayerInterface *input in model.io.inputs.all) {
290 | [keys addObject:input.name];
291 | }
292 | return keys.copy;
293 | }
294 |
295 | // MARK: - Input Conversion
296 |
297 | /**
298 | * Prepares the model inputs sent from javascript for inference. Image inputs
299 | * are encoded as a base64 string and must be decoded and converted to pixel
300 | * buffers. Other inputs are taken as is.
301 | */
302 |
303 | - (nullable NSDictionary*)preparedInputs:(NSDictionary*)inputs {
304 |
305 | NSMutableDictionary> *preparedInputs = [[NSMutableDictionary alloc] init];
306 | __block BOOL error = NO;
307 |
308 | for (TIOLayerInterface *layer in self.model.io.inputs.all) {
309 | [layer matchCasePixelBuffer:^(TIOPixelBufferLayerDescription * _Nonnull pixelBufferDescription) {
310 | TIOPixelBuffer *pixelBuffer = [self pixelBufferForInput:inputs[layer.name]];
311 | if (pixelBuffer == nil) {
312 | error = YES;
313 | } else {
314 | preparedInputs[layer.name] = pixelBuffer;
315 | }
316 | } caseVector:^(TIOVectorLayerDescription * _Nonnull vectorDescription) {
317 | preparedInputs[layer.name] = inputs[layer.name];
318 | } caseString:^(TIOStringLayerDescription * _Nonnull stringDescription) {
319 | preparedInputs[layer.name] = inputs[layer.name];
320 | }];
321 | }
322 |
323 | if (error) {
324 | return nil;
325 | }
326 |
327 | return preparedInputs.copy;
328 | }
329 |
330 | /**
331 | * Prepares a pixel buffer input given an image encoding dictionary sent from
332 | * javascript, converting a base64 encoded string or reading data from the file
333 | * system.
334 | */
335 |
336 | - (nullable TIOPixelBuffer*)pixelBufferForInput:(NSDictionary*)input {
337 |
338 | RNTIOImageDataType format = (RNTIOImageDataType)[input[RNTIOImageKeyFormat] integerValue];
339 | CVPixelBufferRef pixelBuffer;
340 |
341 | switch (format) {
342 | case RNTIOImageDataTypeUnknown: {
343 | pixelBuffer = NULL;
344 | }
345 | break;
346 |
347 | case RNTIOImageDataTypeARGB: {
348 | OSType imageFormat = kCVPixelFormatType_32ARGB;
349 | NSUInteger width = [input[RNTIOImageKeyWidth] unsignedIntegerValue];
350 | NSUInteger height = [input[RNTIOImageKeyHeight] unsignedIntegerValue];
351 |
352 | NSString *base64 = input[RNTIOImageKeyData];
353 | NSData *data = [RCTConvert NSData:base64];
354 | unsigned char *bytes = (unsigned char *)data.bytes;
355 |
356 | pixelBuffer = CreatePixelBufferWithBytes(bytes, width, height, imageFormat);
357 | CFAutorelease(pixelBuffer);
358 |
359 | }
360 | break;
361 |
362 | case RNTIOImageDataTypeBGRA: {
363 | OSType imageFormat = kCVPixelFormatType_32BGRA;
364 | NSUInteger width = [input[RNTIOImageKeyWidth] unsignedIntegerValue];
365 | NSUInteger height = [input[RNTIOImageKeyHeight] unsignedIntegerValue];
366 |
367 | NSString *base64 = input[RNTIOImageKeyData];
368 | NSData *data = [RCTConvert NSData:base64];
369 | unsigned char *bytes = (unsigned char *)data.bytes;
370 |
371 | pixelBuffer = CreatePixelBufferWithBytes(bytes, width, height, imageFormat);
372 | CFAutorelease(pixelBuffer);
373 |
374 | }
375 | break;
376 |
377 | case RNTIOImageDataTypeJPEG: {
378 | NSString *base64 = input[RNTIOImageKeyData];
379 | UIImage *image = [RCTConvert UIImage:base64];
380 |
381 | pixelBuffer = image.pixelBuffer;
382 |
383 | }
384 | break;
385 |
386 | case RNTIOImageDataTypePNG: {
387 | NSString *base64 = input[RNTIOImageKeyData];
388 | UIImage *image = [RCTConvert UIImage:base64];
389 |
390 | pixelBuffer = image.pixelBuffer;
391 |
392 | }
393 | break;
394 |
395 | case RNTIOImageDataTypeFile: {
396 | NSString *path = input[RNTIOImageKeyData];
397 | NSURL *URL = [NSURL fileURLWithPath:path];
398 | UIImage *image = [[UIImage alloc] initWithContentsOfFile:URL.path];
399 |
400 | pixelBuffer = image.pixelBuffer;
401 |
402 | }
403 | break;
404 | }
405 |
406 | // Bail if the pixel buffer could not be created
407 |
408 | if (pixelBuffer == NULL) {
409 | return nil;
410 | }
411 |
412 | // Derive the image orientation
413 |
414 | CGImagePropertyOrientation orientation;
415 |
416 | if ([input objectForKey:RNTIOImageKeyOrientation] == nil) {
417 | orientation = kCGImagePropertyOrientationUp;
418 | } else {
419 | orientation = (CGImagePropertyOrientation)[input[RNTIOImageKeyOrientation] integerValue];
420 | }
421 |
422 | // Return the results
423 |
424 | return [[TIOPixelBuffer alloc] initWithPixelBuffer:pixelBuffer orientation:orientation];
425 | }
426 |
427 | // MARK: - Output Conversion
428 |
429 | /**
430 | * Prepares the model outputs for consumption by javascript. Pixel buffer outputs
431 | * are converted to base64 strings. Other outputs are taken as is.
432 | */
433 |
434 | - (NSDictionary*)preparedOutputs:(NSDictionary*)outputs {
435 | NSMutableDictionary *preparedOutputs = [[NSMutableDictionary alloc] init];
436 | __block BOOL error = NO;
437 |
438 | for (TIOLayerInterface *layer in self.model.io.outputs.all) {
439 | [layer matchCasePixelBuffer:^(TIOPixelBufferLayerDescription * _Nonnull pixelBufferDescription) {
440 | NSString *base64 = [self base64JPEGDataForPixelBuffer:outputs[layer.name]];
441 | if (base64 == nil) {
442 | error = YES;
443 | } else {
444 | preparedOutputs[layer.name] = base64;
445 | }
446 | } caseVector:^(TIOVectorLayerDescription * _Nonnull vectorDescription) {
447 | preparedOutputs[layer.name] = outputs[layer.name];
448 | } caseString:^(TIOStringLayerDescription * _Nonnull stringDescription) {
449 | preparedOutputs[layer.name] = outputs[layer.name];
450 | }];
451 | }
452 |
453 | if (error) {
454 | return nil;
455 | }
456 |
457 | return preparedOutputs.copy;
458 | }
459 |
460 | /**
461 | * Converts a pixel buffer output to a base64 encoded string that can be
462 | * consumed by React Native.
463 | */
464 |
465 | - (nullable NSString*)base64JPEGDataForPixelBuffer:(TIOPixelBuffer*)pixelBuffer {
466 | UIImage *image = [[UIImage alloc] initWithPixelBuffer:pixelBuffer.pixelBuffer];
467 | NSData *data = UIImageJPEGRepresentation(image, 1.0);
468 | NSString *base64 = [data base64EncodedStringWithOptions:0];
469 |
470 | return base64;
471 | }
472 |
473 | // MARK: - React Native Overrides
474 |
475 | - (NSDictionary *)constantsToExport {
476 | return @{
477 | @"imageKeyData": RNTIOImageKeyData,
478 | @"imageKeyFormat": RNTIOImageKeyFormat,
479 | @"imageKeyWidth": RNTIOImageKeyWidth,
480 | @"imageKeyHeight": RNTIOImageKeyHeight,
481 | @"imageKeyOrientation": RNTIOImageKeyOrientation,
482 |
483 | @"imageTypeUnknown": @(RNTIOImageDataTypeUnknown),
484 | @"imageTypeARGB": @(RNTIOImageDataTypeARGB),
485 | @"imageTypeBGRA": @(RNTIOImageDataTypeBGRA),
486 | @"imageTypeJPEG": @(RNTIOImageDataTypeJPEG),
487 | @"imageTypePNG": @(RNTIOImageDataTypePNG),
488 | @"imageTypeFile": @(RNTIOImageDataTypeFile),
489 |
490 | @"imageOrientationUp": @(kCGImagePropertyOrientationUp),
491 | @"imageOrientationUpMirrored": @(kCGImagePropertyOrientationUpMirrored),
492 | @"imageOrientationDown": @(kCGImagePropertyOrientationDown),
493 | @"imageOrientationDownMirrored": @(kCGImagePropertyOrientationDownMirrored),
494 | @"imageOrientationLeftMirrored": @(kCGImagePropertyOrientationLeftMirrored),
495 | @"imageOrientationRight": @(kCGImagePropertyOrientationRight),
496 | @"imageOrientationRightMirrored": @(kCGImagePropertyOrientationRightMirrored),
497 | @"imageOrientationLeft": @(kCGImagePropertyOrientationLeft)
498 | };
499 | }
500 |
501 | - (dispatch_queue_t)methodQueue {
502 | return dispatch_get_main_queue();
503 | }
504 |
505 | + (BOOL)requiresMainQueueSetup {
506 | return YES;
507 | }
508 |
509 | @end
510 |
--------------------------------------------------------------------------------
/ios/RNTensorIO.podspec:
--------------------------------------------------------------------------------
1 |
2 | Pod::Spec.new do |s|
3 | s.name = 'RNTensorIO'
4 | s.version = '0.4.0'
5 | s.summary = 'Machine Learning for React Native with TensorIO'
6 | s.description = 'Perform inference with TensorFlow Lite mondels in React Native'
7 | s.homepage = 'https://github.com/doc-ai/react-native-tensorio'
8 | s.license = { :type => 'Apache 2', :file => 'LICENSE' }
9 | s.authors = { 'doc.ai' => 'philip@doc.ai' }
10 | s.source = { :git => 'https://github.com/doc-ai/react-native-tensorio.git', :tag => s.version.to_s }
11 |
12 | s.ios.deployment_target = '12.0'
13 | s.source_files = '*.{h,m,mm}'
14 | s.requires_arc = true
15 | s.static_framework = true
16 |
17 | s.dependency 'React'
18 | s.dependency 'TensorIO'
19 | s.dependency 'TensorIO/TensorFlow'
20 | end
21 |
--------------------------------------------------------------------------------
/ios/RNTensorIO.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 46;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | A50530F8229E2285003CD27B /* RNModelRepository.mm in Sources */ = {isa = PBXBuildFile; fileRef = A50530F7229E2285003CD27B /* RNModelRepository.mm */; };
11 | A54FF36422A1E003004CE6BF /* RNFleaClient.mm in Sources */ = {isa = PBXBuildFile; fileRef = A54FF36322A1E003004CE6BF /* RNFleaClient.mm */; };
12 | B3E7B58A1CC2AC0600A0062D /* RNTensorIO.mm in Sources */ = {isa = PBXBuildFile; fileRef = B3E7B5891CC2AC0600A0062D /* RNTensorIO.mm */; };
13 | E30C3B8722091F05006D99F3 /* RNPixelBufferUtilities.mm in Sources */ = {isa = PBXBuildFile; fileRef = E30C3B8622091F05006D99F3 /* RNPixelBufferUtilities.mm */; };
14 | /* End PBXBuildFile section */
15 |
16 | /* Begin PBXCopyFilesBuildPhase section */
17 | 58B511D91A9E6C8500147676 /* CopyFiles */ = {
18 | isa = PBXCopyFilesBuildPhase;
19 | buildActionMask = 2147483647;
20 | dstPath = "include/$(PRODUCT_NAME)";
21 | dstSubfolderSpec = 16;
22 | files = (
23 | );
24 | runOnlyForDeploymentPostprocessing = 0;
25 | };
26 | /* End PBXCopyFilesBuildPhase section */
27 |
28 | /* Begin PBXFileReference section */
29 | 134814201AA4EA6300B7C361 /* libRNTensorIO.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libRNTensorIO.a; sourceTree = BUILT_PRODUCTS_DIR; };
30 | A50530F6229E223C003CD27B /* RNModelRepository.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNModelRepository.h; sourceTree = ""; };
31 | A50530F7229E2285003CD27B /* RNModelRepository.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = RNModelRepository.mm; sourceTree = ""; };
32 | A54FF36222A1DFDD004CE6BF /* RNFleaClient.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFleaClient.h; sourceTree = ""; };
33 | A54FF36322A1E003004CE6BF /* RNFleaClient.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = RNFleaClient.mm; sourceTree = ""; };
34 | B3E7B5881CC2AC0600A0062D /* RNTensorIO.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RNTensorIO.h; sourceTree = ""; };
35 | B3E7B5891CC2AC0600A0062D /* RNTensorIO.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = RNTensorIO.mm; sourceTree = ""; };
36 | E30C3B8522091F05006D99F3 /* RNPixelBufferUtilities.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNPixelBufferUtilities.h; sourceTree = ""; };
37 | E30C3B8622091F05006D99F3 /* RNPixelBufferUtilities.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = RNPixelBufferUtilities.mm; sourceTree = ""; };
38 | /* End PBXFileReference section */
39 |
40 | /* Begin PBXFrameworksBuildPhase section */
41 | 58B511D81A9E6C8500147676 /* Frameworks */ = {
42 | isa = PBXFrameworksBuildPhase;
43 | buildActionMask = 2147483647;
44 | files = (
45 | );
46 | runOnlyForDeploymentPostprocessing = 0;
47 | };
48 | /* End PBXFrameworksBuildPhase section */
49 |
50 | /* Begin PBXGroup section */
51 | 134814211AA4EA7D00B7C361 /* Products */ = {
52 | isa = PBXGroup;
53 | children = (
54 | 134814201AA4EA6300B7C361 /* libRNTensorIO.a */,
55 | );
56 | name = Products;
57 | sourceTree = "";
58 | };
59 | 58B511D21A9E6C8500147676 = {
60 | isa = PBXGroup;
61 | children = (
62 | A54FF36322A1E003004CE6BF /* RNFleaClient.mm */,
63 | A54FF36222A1DFDD004CE6BF /* RNFleaClient.h */,
64 | A50530F7229E2285003CD27B /* RNModelRepository.mm */,
65 | A50530F6229E223C003CD27B /* RNModelRepository.h */,
66 | B3E7B5881CC2AC0600A0062D /* RNTensorIO.h */,
67 | B3E7B5891CC2AC0600A0062D /* RNTensorIO.mm */,
68 | E30C3B8522091F05006D99F3 /* RNPixelBufferUtilities.h */,
69 | E30C3B8622091F05006D99F3 /* RNPixelBufferUtilities.mm */,
70 | 134814211AA4EA7D00B7C361 /* Products */,
71 | E39B872B22028BF9008DDE9B /* Frameworks */,
72 | );
73 | sourceTree = "";
74 | };
75 | E39B872B22028BF9008DDE9B /* Frameworks */ = {
76 | isa = PBXGroup;
77 | children = (
78 | );
79 | name = Frameworks;
80 | sourceTree = "";
81 | };
82 | /* End PBXGroup section */
83 |
84 | /* Begin PBXNativeTarget section */
85 | 58B511DA1A9E6C8500147676 /* RNTensorIO */ = {
86 | isa = PBXNativeTarget;
87 | buildConfigurationList = 58B511EF1A9E6C8500147676 /* Build configuration list for PBXNativeTarget "RNTensorIO" */;
88 | buildPhases = (
89 | 58B511D71A9E6C8500147676 /* Sources */,
90 | 58B511D81A9E6C8500147676 /* Frameworks */,
91 | 58B511D91A9E6C8500147676 /* CopyFiles */,
92 | );
93 | buildRules = (
94 | );
95 | dependencies = (
96 | );
97 | name = RNTensorIO;
98 | productName = RCTDataManager;
99 | productReference = 134814201AA4EA6300B7C361 /* libRNTensorIO.a */;
100 | productType = "com.apple.product-type.library.static";
101 | };
102 | /* End PBXNativeTarget section */
103 |
104 | /* Begin PBXProject section */
105 | 58B511D31A9E6C8500147676 /* Project object */ = {
106 | isa = PBXProject;
107 | attributes = {
108 | LastUpgradeCheck = 0830;
109 | ORGANIZATIONNAME = doc.ai;
110 | TargetAttributes = {
111 | 58B511DA1A9E6C8500147676 = {
112 | CreatedOnToolsVersion = 6.1.1;
113 | };
114 | };
115 | };
116 | buildConfigurationList = 58B511D61A9E6C8500147676 /* Build configuration list for PBXProject "RNTensorIO" */;
117 | compatibilityVersion = "Xcode 3.2";
118 | developmentRegion = English;
119 | hasScannedForEncodings = 0;
120 | knownRegions = (
121 | en,
122 | );
123 | mainGroup = 58B511D21A9E6C8500147676;
124 | productRefGroup = 58B511D21A9E6C8500147676;
125 | projectDirPath = "";
126 | projectRoot = "";
127 | targets = (
128 | 58B511DA1A9E6C8500147676 /* RNTensorIO */,
129 | );
130 | };
131 | /* End PBXProject section */
132 |
133 | /* Begin PBXSourcesBuildPhase section */
134 | 58B511D71A9E6C8500147676 /* Sources */ = {
135 | isa = PBXSourcesBuildPhase;
136 | buildActionMask = 2147483647;
137 | files = (
138 | A54FF36422A1E003004CE6BF /* RNFleaClient.mm in Sources */,
139 | A50530F8229E2285003CD27B /* RNModelRepository.mm in Sources */,
140 | B3E7B58A1CC2AC0600A0062D /* RNTensorIO.mm in Sources */,
141 | E30C3B8722091F05006D99F3 /* RNPixelBufferUtilities.mm in Sources */,
142 | );
143 | runOnlyForDeploymentPostprocessing = 0;
144 | };
145 | /* End PBXSourcesBuildPhase section */
146 |
147 | /* Begin XCBuildConfiguration section */
148 | 58B511ED1A9E6C8500147676 /* Debug */ = {
149 | isa = XCBuildConfiguration;
150 | buildSettings = {
151 | ALWAYS_SEARCH_USER_PATHS = NO;
152 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
153 | CLANG_CXX_LIBRARY = "libc++";
154 | CLANG_ENABLE_MODULES = YES;
155 | CLANG_ENABLE_OBJC_ARC = YES;
156 | CLANG_WARN_BOOL_CONVERSION = YES;
157 | CLANG_WARN_CONSTANT_CONVERSION = YES;
158 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
159 | CLANG_WARN_EMPTY_BODY = YES;
160 | CLANG_WARN_ENUM_CONVERSION = YES;
161 | CLANG_WARN_INFINITE_RECURSION = YES;
162 | CLANG_WARN_INT_CONVERSION = YES;
163 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
164 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
165 | CLANG_WARN_UNREACHABLE_CODE = YES;
166 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
167 | COPY_PHASE_STRIP = NO;
168 | ENABLE_STRICT_OBJC_MSGSEND = YES;
169 | ENABLE_TESTABILITY = YES;
170 | GCC_C_LANGUAGE_STANDARD = gnu99;
171 | GCC_DYNAMIC_NO_PIC = NO;
172 | GCC_NO_COMMON_BLOCKS = YES;
173 | GCC_OPTIMIZATION_LEVEL = 0;
174 | GCC_PREPROCESSOR_DEFINITIONS = (
175 | "DEBUG=1",
176 | "$(inherited)",
177 | );
178 | GCC_SYMBOLS_PRIVATE_EXTERN = NO;
179 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
180 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
181 | GCC_WARN_UNDECLARED_SELECTOR = YES;
182 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
183 | GCC_WARN_UNUSED_FUNCTION = YES;
184 | GCC_WARN_UNUSED_VARIABLE = YES;
185 | IPHONEOS_DEPLOYMENT_TARGET = 8.0;
186 | MTL_ENABLE_DEBUG_INFO = YES;
187 | ONLY_ACTIVE_ARCH = YES;
188 | SDKROOT = iphoneos;
189 | };
190 | name = Debug;
191 | };
192 | 58B511EE1A9E6C8500147676 /* Release */ = {
193 | isa = XCBuildConfiguration;
194 | buildSettings = {
195 | ALWAYS_SEARCH_USER_PATHS = NO;
196 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
197 | CLANG_CXX_LIBRARY = "libc++";
198 | CLANG_ENABLE_MODULES = YES;
199 | CLANG_ENABLE_OBJC_ARC = YES;
200 | CLANG_WARN_BOOL_CONVERSION = YES;
201 | CLANG_WARN_CONSTANT_CONVERSION = YES;
202 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
203 | CLANG_WARN_EMPTY_BODY = YES;
204 | CLANG_WARN_ENUM_CONVERSION = YES;
205 | CLANG_WARN_INFINITE_RECURSION = YES;
206 | CLANG_WARN_INT_CONVERSION = YES;
207 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
208 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
209 | CLANG_WARN_UNREACHABLE_CODE = YES;
210 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
211 | COPY_PHASE_STRIP = YES;
212 | ENABLE_NS_ASSERTIONS = NO;
213 | ENABLE_STRICT_OBJC_MSGSEND = YES;
214 | GCC_C_LANGUAGE_STANDARD = gnu99;
215 | GCC_NO_COMMON_BLOCKS = YES;
216 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
217 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
218 | GCC_WARN_UNDECLARED_SELECTOR = YES;
219 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
220 | GCC_WARN_UNUSED_FUNCTION = YES;
221 | GCC_WARN_UNUSED_VARIABLE = YES;
222 | IPHONEOS_DEPLOYMENT_TARGET = 8.0;
223 | MTL_ENABLE_DEBUG_INFO = NO;
224 | SDKROOT = iphoneos;
225 | VALIDATE_PRODUCT = YES;
226 | };
227 | name = Release;
228 | };
229 | 58B511F01A9E6C8500147676 /* Debug */ = {
230 | isa = XCBuildConfiguration;
231 | buildSettings = {
232 | FRAMEWORK_SEARCH_PATHS = (
233 | "$(inherited)",
234 | "$(PROJECT_DIR)",
235 | "$(PROJECT_DIR)/../../../ios/Frameworks/**",
236 | "$(PROJECT_DIR)/../../../ios/Pods/**",
237 | );
238 | HEADER_SEARCH_PATHS = (
239 | "$(inherited)",
240 | /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include,
241 | "$(SRCROOT)/../../../React/**",
242 | "$(SRCROOT)/../../react-native/React/**",
243 | "${SRCROOT}/../../../ios/Pods/**",
244 | );
245 | LIBRARY_SEARCH_PATHS = "$(inherited)";
246 | OTHER_CFLAGS = (
247 | "-fmodules",
248 | "-fcxx-modules",
249 | );
250 | OTHER_LDFLAGS = "-ObjC";
251 | PRODUCT_NAME = RNTensorIO;
252 | SKIP_INSTALL = YES;
253 | };
254 | name = Debug;
255 | };
256 | 58B511F11A9E6C8500147676 /* Release */ = {
257 | isa = XCBuildConfiguration;
258 | buildSettings = {
259 | FRAMEWORK_SEARCH_PATHS = (
260 | "$(inherited)",
261 | "$(PROJECT_DIR)",
262 | "$(PROJECT_DIR)/../../../ios/Frameworks/**",
263 | "$(PROJECT_DIR)/../../../ios/Pods/**",
264 | );
265 | HEADER_SEARCH_PATHS = (
266 | "$(inherited)",
267 | /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include,
268 | "$(SRCROOT)/../../../React/**",
269 | "$(SRCROOT)/../../react-native/React/**",
270 | "${SRCROOT}/../../../ios/Pods/**",
271 | );
272 | LIBRARY_SEARCH_PATHS = "$(inherited)";
273 | OTHER_CFLAGS = (
274 | "-fmodules",
275 | "-fcxx-modules",
276 | );
277 | OTHER_LDFLAGS = "-ObjC";
278 | PRODUCT_NAME = RNTensorIO;
279 | SKIP_INSTALL = YES;
280 | };
281 | name = Release;
282 | };
283 | /* End XCBuildConfiguration section */
284 |
285 | /* Begin XCConfigurationList section */
286 | 58B511D61A9E6C8500147676 /* Build configuration list for PBXProject "RNTensorIO" */ = {
287 | isa = XCConfigurationList;
288 | buildConfigurations = (
289 | 58B511ED1A9E6C8500147676 /* Debug */,
290 | 58B511EE1A9E6C8500147676 /* Release */,
291 | );
292 | defaultConfigurationIsVisible = 0;
293 | defaultConfigurationName = Release;
294 | };
295 | 58B511EF1A9E6C8500147676 /* Build configuration list for PBXNativeTarget "RNTensorIO" */ = {
296 | isa = XCConfigurationList;
297 | buildConfigurations = (
298 | 58B511F01A9E6C8500147676 /* Debug */,
299 | 58B511F11A9E6C8500147676 /* Release */,
300 | );
301 | defaultConfigurationIsVisible = 0;
302 | defaultConfigurationName = Release;
303 | };
304 | /* End XCConfigurationList section */
305 | };
306 | rootObject = 58B511D31A9E6C8500147676 /* Project object */;
307 | }
308 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 |
2 | {
3 | "name": "react-native-tensorio",
4 | "version": "0.7.0",
5 | "description": "Machine Learning for React Native with TensorIO and TensorFlow Lite",
6 | "main": "index.js",
7 | "scripts": {
8 | "test": "echo \"Error: no test specified\" && exit 1"
9 | },
10 | "repository": {
11 | "type": "git",
12 | "url": "https://github.com/doc-ai/react-native-tensorio.git"
13 | },
14 | "keywords": [
15 | "react-native",
16 | "react-component",
17 | "ios",
18 | "machine learning",
19 | "tensorflow",
20 | "tensorflow lite",
21 | "computer vision",
22 | "tensorio"
23 | ],
24 | "author": "doc.ai (https://doc.ai)",
25 | "license": "Apache 2.0",
26 | "peerDependencies": {
27 | "react-native": "^0.41.2",
28 | "react-native-windows": "0.41.0-rc.1"
29 | },
30 | "homepage": "https://doc.ai"
31 | }
32 |
--------------------------------------------------------------------------------
/windows/.gitignore:
--------------------------------------------------------------------------------
1 | *AppPackages*
2 | *BundleArtifacts*
3 | *ReactAssets*
4 |
5 | #OS junk files
6 | [Tt]humbs.db
7 | *.DS_Store
8 |
9 | #Visual Studio files
10 | *.[Oo]bj
11 | *.user
12 | *.aps
13 | *.pch
14 | *.vspscc
15 | *.vssscc
16 | *_i.c
17 | *_p.c
18 | *.ncb
19 | *.suo
20 | *.tlb
21 | *.tlh
22 | *.bak
23 | *.[Cc]ache
24 | *.ilk
25 | *.log
26 | *.lib
27 | *.sbr
28 | *.sdf
29 | *.opensdf
30 | *.opendb
31 | *.unsuccessfulbuild
32 | ipch/
33 | [Oo]bj/
34 | [Bb]in
35 | [Dd]ebug*/
36 | [Rr]elease*/
37 | Ankh.NoLoad
38 |
39 | #MonoDevelop
40 | *.pidb
41 | *.userprefs
42 |
43 | #Tooling
44 | _ReSharper*/
45 | *.resharper
46 | [Tt]est[Rr]esult*
47 | *.sass-cache
48 |
49 | #Project files
50 | [Bb]uild/
51 |
52 | #Subversion files
53 | .svn
54 |
55 | # Office Temp Files
56 | ~$*
57 |
58 | # vim Temp Files
59 | *~
60 |
61 | #NuGet
62 | packages/
63 | *.nupkg
64 |
65 | #ncrunch
66 | *ncrunch*
67 | *crunch*.local.xml
68 |
69 | # visual studio database projects
70 | *.dbmdl
71 |
72 | #Test files
73 | *.testsettings
74 |
75 | #Other files
76 | *.DotSettings
77 | .vs/
78 | *project.lock.json
79 |
--------------------------------------------------------------------------------
/windows/.npmignore:
--------------------------------------------------------------------------------
1 |
2 | # Make sure we don't publish build artifacts to NPM
3 | ARM/
4 | Debug/
5 | x64/
6 | x86/
7 | bin/
8 | obj/
9 | .vs/
10 |
--------------------------------------------------------------------------------
/windows/RNTensorIO.sln:
--------------------------------------------------------------------------------
1 | Microsoft Visual Studio Solution File, Format Version 12.00
2 | # Visual Studio 14
3 | VisualStudioVersion = 14.0.25123.0
4 | MinimumVisualStudioVersion = 10.0.40219.1
5 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RNTensorIO", "RNTensorIO\RNTensorIO.csproj", "{A13ABEE0-240C-11E9-8F53-4F743DB53201}"
6 | EndProject
7 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ReactNative", "..\node_modules\react-native-windows\ReactWindows\ReactNative\ReactNative.csproj", "{C7673AD5-E3AA-468C-A5FD-FA38154E205C}"
8 | EndProject
9 | Project("{D954291E-2A0B-460D-934E-DC6B0785DB48}") = "ReactNative.Shared", "..\node_modules\react-native-windows\ReactWindows\ReactNative.Shared\ReactNative.Shared.shproj", "{EEA8B852-4D07-48E1-8294-A21AB5909FE5}"
10 | EndProject
11 | Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "ChakraBridge", "..\node_modules\react-native-windows\ReactWindows\ChakraBridge\ChakraBridge.vcxproj", "{4B72C796-16D5-4E3A-81C0-3E36F531E578}"
12 | EndProject
13 | Global
14 | GlobalSection(SharedMSBuildProjectFiles) = preSolution
15 | ..\node_modules\react-native-windows\ReactWindows\ReactNative.Shared\ReactNative.Shared.projitems*{c7673ad5-e3aa-468c-a5fd-fa38154e205c}*SharedItemsImports = 4
16 | ..\node_modules\react-native-windows\ReactWindows\ReactNative.Shared\ReactNative.Shared.projitems*{eea8b852-4d07-48e1-8294-a21ab5909fe5}*SharedItemsImports = 13
17 | EndGlobalSection
18 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
19 | Debug|ARM = Debug|ARM
20 | Debug|x64 = Debug|x64
21 | Debug|x86 = Debug|x86
22 | Development|ARM = Development|ARM
23 | Development|x64 = Development|x64
24 | Development|x86 = Development|x86
25 | Release|ARM = Release|ARM
26 | Release|x64 = Release|x64
27 | Release|x86 = Release|x86
28 | EndGlobalSection
29 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
30 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Debug|ARM.ActiveCfg = Debug|ARM
31 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Debug|ARM.Build.0 = Debug|ARM
32 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Debug|x64.ActiveCfg = Debug|x64
33 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Debug|x64.Build.0 = Debug|x64
34 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Debug|x86.ActiveCfg = Debug|x86
35 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Debug|x86.Build.0 = Debug|x86
36 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Development|ARM.ActiveCfg = Development|ARM
37 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Development|ARM.Build.0 = Development|ARM
38 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Development|x64.ActiveCfg = Development|x64
39 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Development|x64.Build.0 = Development|x64
40 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Development|x86.ActiveCfg = Development|x86
41 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Development|x86.Build.0 = Development|x86
42 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Release|ARM.ActiveCfg = Release|ARM
43 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Release|ARM.Build.0 = Release|ARM
44 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Release|x64.ActiveCfg = Release|x64
45 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Release|x64.Build.0 = Release|x64
46 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Release|x86.ActiveCfg = Release|x86
47 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}.Release|x86.Build.0 = Release|x86
48 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Debug|ARM.ActiveCfg = Debug|ARM
49 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Debug|ARM.Build.0 = Debug|ARM
50 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Debug|x64.ActiveCfg = Debug|x64
51 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Debug|x64.Build.0 = Debug|x64
52 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Debug|x86.ActiveCfg = Debug|x86
53 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Debug|x86.Build.0 = Debug|x86
54 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Development|ARM.ActiveCfg = Debug|ARM
55 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Development|ARM.Build.0 = Debug|ARM
56 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Development|x64.ActiveCfg = Debug|x64
57 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Development|x64.Build.0 = Debug|x64
58 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Development|x86.ActiveCfg = Debug|x86
59 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Development|x86.Build.0 = Debug|x86
60 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Release|ARM.ActiveCfg = Release|ARM
61 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Release|ARM.Build.0 = Release|ARM
62 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Release|x64.ActiveCfg = Release|x64
63 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Release|x64.Build.0 = Release|x64
64 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Release|x86.ActiveCfg = Release|x86
65 | {C7673AD5-E3AA-468C-A5FD-FA38154E205C}.Release|x86.Build.0 = Release|x86
66 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Debug|ARM.ActiveCfg = Debug|ARM
67 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Debug|ARM.Build.0 = Debug|ARM
68 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Debug|x64.ActiveCfg = Debug|x64
69 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Debug|x64.Build.0 = Debug|x64
70 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Debug|x86.ActiveCfg = Debug|Win32
71 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Debug|x86.Build.0 = Debug|Win32
72 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Development|ARM.ActiveCfg = Debug|ARM
73 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Development|ARM.Build.0 = Debug|ARM
74 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Development|x64.ActiveCfg = Debug|x64
75 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Development|x64.Build.0 = Debug|x64
76 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Development|x86.ActiveCfg = Debug|Win32
77 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Development|x86.Build.0 = Debug|Win32
78 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Release|ARM.ActiveCfg = Release|ARM
79 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Release|ARM.Build.0 = Release|ARM
80 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Release|x64.ActiveCfg = Release|x64
81 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Release|x64.Build.0 = Release|x64
82 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Release|x86.ActiveCfg = Release|Win32
83 | {4B72C796-16D5-4E3A-81C0-3E36F531E578}.Release|x86.Build.0 = Release|Win32
84 | EndGlobalSection
85 | GlobalSection(SolutionProperties) = preSolution
86 | HideSolutionNode = FALSE
87 | EndGlobalSection
88 | EndGlobal
89 |
--------------------------------------------------------------------------------
/windows/RNTensorIO/Properties/AssemblyInfo.cs:
--------------------------------------------------------------------------------
1 | using System.Reflection;
2 | using System.Runtime.CompilerServices;
3 | using System.Runtime.InteropServices;
4 |
5 | // General Information about an assembly is controlled through the following
6 | // set of attributes. Change these attribute values to modify the information
7 | // associated with an assembly.
8 | [assembly: AssemblyTitle("RNTensorIO")]
9 | [assembly: AssemblyDescription("")]
10 | [assembly: AssemblyConfiguration("")]
11 | [assembly: AssemblyCompany("")]
12 | [assembly: AssemblyProduct("RNTensorIO")]
13 | [assembly: AssemblyCopyright("Copyright © 2016")]
14 | [assembly: AssemblyTrademark("")]
15 | [assembly: AssemblyCulture("")]
16 |
17 | // Version information for an assembly consists of the following four values:
18 | //
19 | // Major Version
20 | // Minor Version
21 | // Build Number
22 | // Revision
23 | //
24 | // You can specify all the values or you can default the Build and Revision Numbers
25 | // by using the '*' as shown below:
26 | // [assembly: AssemblyVersion("1.0.*")]
27 | [assembly: AssemblyVersion("1.0.0.0")]
28 | [assembly: AssemblyFileVersion("1.0.0.0")]
29 | [assembly: ComVisible(false)]
30 |
--------------------------------------------------------------------------------
/windows/RNTensorIO/Properties/RNTensorIO.rd.xml:
--------------------------------------------------------------------------------
1 |
2 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/windows/RNTensorIO/RNTensorIO.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Debug
6 | x86
7 | {A13ABEE0-240C-11E9-8F53-4F743DB53201}
8 | Library
9 | Properties
10 | Tensor.IO
11 | Tensor.IO
12 | en-US
13 | UAP
14 | 10.0.10586.0
15 | 10.0.10240.0
16 | 14
17 | 512
18 | {A5A43C5B-DE2A-4C0C-9213-0A381AF9435A};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}
19 | ..\..\node_modules
20 |
21 |
22 | ..\..
23 |
24 |
25 | x86
26 | true
27 | bin\x86\Debug\
28 | DEBUG;TRACE;NETFX_CORE;WINDOWS_UWP
29 | ;2008
30 | full
31 | x86
32 | false
33 | prompt
34 |
35 |
36 | x86
37 | bin\x86\Release\
38 | TRACE;NETFX_CORE;WINDOWS_UWP
39 | true
40 | ;2008
41 | pdbonly
42 | x86
43 | false
44 | prompt
45 |
46 |
47 | ARM
48 | true
49 | bin\ARM\Debug\
50 | DEBUG;TRACE;NETFX_CORE;WINDOWS_UWP
51 | ;2008
52 | full
53 | ARM
54 | false
55 | prompt
56 |
57 |
58 | ARM
59 | bin\ARM\Release\
60 | TRACE;NETFX_CORE;WINDOWS_UWP
61 | true
62 | ;2008
63 | pdbonly
64 | ARM
65 | false
66 | prompt
67 |
68 |
69 | x64
70 | true
71 | bin\x64\Debug\
72 | DEBUG;TRACE;NETFX_CORE;WINDOWS_UWP
73 | ;2008
74 | full
75 | x64
76 | false
77 | prompt
78 |
79 |
80 | x64
81 | bin\x64\Release\
82 | TRACE;NETFX_CORE;WINDOWS_UWP
83 | true
84 | ;2008
85 | pdbonly
86 | x64
87 | false
88 | prompt
89 |
90 |
91 | true
92 | bin\x86\Development\
93 | DEBUG;TRACE;NETFX_CORE;WINDOWS_UWP
94 | ;2008
95 | true
96 | full
97 | x86
98 | false
99 | prompt
100 | MinimumRecommendedRules.ruleset
101 |
102 |
103 | true
104 | bin\ARM\Development\
105 | DEBUG;TRACE;NETFX_CORE;WINDOWS_UWP
106 | ;2008
107 | true
108 | full
109 | ARM
110 | false
111 | prompt
112 | MinimumRecommendedRules.ruleset
113 |
114 |
115 | true
116 | bin\x64\Development\
117 | DEBUG;TRACE;NETFX_CORE;WINDOWS_UWP
118 | ;2008
119 | true
120 | full
121 | x64
122 | false
123 | prompt
124 | MinimumRecommendedRules.ruleset
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 | {c7673ad5-e3aa-468c-a5fd-fa38154e205c}
139 | ReactNative
140 |
141 |
142 |
143 | 14.0
144 |
145 |
146 |
153 |
154 |
--------------------------------------------------------------------------------
/windows/RNTensorIO/RNTensorIOModule.cs:
--------------------------------------------------------------------------------
1 | using ReactNative.Bridge;
2 | using System;
3 | using System.Collections.Generic;
4 | using Windows.ApplicationModel.Core;
5 | using Windows.UI.Core;
6 |
7 | namespace Tensor.IO.RNTensorIO
8 | {
9 | ///
10 | /// A module that allows JS to share data.
11 | ///
12 | class RNTensorIOModule : NativeModuleBase
13 | {
14 | ///
15 | /// Instantiates the .
16 | ///
17 | internal RNTensorIOModule()
18 | {
19 |
20 | }
21 |
22 | ///
23 | /// The name of the native module.
24 | ///
25 | public override string Name
26 | {
27 | get
28 | {
29 | return "RNTensorIO";
30 | }
31 | }
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/windows/RNTensorIO/RNTensorIOPackage.cs:
--------------------------------------------------------------------------------
1 | using ReactNative.Bridge;
2 | using ReactNative.Modules.Core;
3 | using ReactNative.UIManager;
4 | using System;
5 | using System.Collections.Generic;
6 |
7 | namespace Tensor.IO.RNTensorIO
8 | {
9 | ///
10 | /// Package defining core framework modules (e.g., ).
11 | /// It should be used for modules that require special integration with
12 | /// other framework parts (e.g., with the list of packages to load view
13 | /// managers from).
14 | ///
15 | public class RNTensorIOPackage : IReactPackage
16 | {
17 | ///
18 | /// Creates the list of native modules to register with the react
19 | /// instance.
20 | ///
21 | /// The react application context.
22 | /// The list of native modules.
23 | public IReadOnlyList CreateNativeModules(ReactContext reactContext)
24 | {
25 | return new List
26 | {
27 | new RNTensorIOModule(),
28 | };
29 | }
30 |
31 | ///
32 | /// Creates the list of JavaScript modules to register with the
33 | /// react instance.
34 | ///
35 | /// The list of JavaScript modules.
36 | public IReadOnlyList CreateJavaScriptModulesConfig()
37 | {
38 | return new List(0);
39 | }
40 |
41 | ///
42 | /// Creates the list of view managers that should be registered with
43 | /// the .
44 | ///
45 | /// The react application context.
46 | /// The list of view managers.
47 | public IReadOnlyList CreateViewManagers(
48 | ReactContext reactContext)
49 | {
50 | return new List(0);
51 | }
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/windows/RNTensorIO/project.json:
--------------------------------------------------------------------------------
1 | {
2 | "dependencies": {
3 | "Microsoft.NETCore.UniversalWindowsPlatform": "5.2.2"
4 | },
5 | "frameworks": {
6 | "uap10.0": {}
7 | },
8 | "runtimes": {
9 | "win10-arm": {},
10 | "win10-arm-aot": {},
11 | "win10-x86": {},
12 | "win10-x86-aot": {},
13 | "win10-x64": {},
14 | "win10-x64-aot": {}
15 | }
16 | }
17 |
--------------------------------------------------------------------------------