├── .gitignore ├── .npmignore ├── .travis.yml ├── LICENSE ├── README.md ├── android ├── build.gradle ├── gradle │ └── wrapper │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties ├── gradlew └── src │ └── main │ ├── AndroidManifest.xml │ └── java │ └── com │ └── rntensorflow │ ├── RNTensorFlowGraphModule.java │ ├── RNTensorFlowGraphOperationsModule.java │ ├── RNTensorFlowInferenceModule.java │ ├── RNTensorFlowPackage.java │ ├── RNTensorflowInference.java │ ├── ResourceManager.java │ ├── converter │ ├── ArrayConverter.java │ └── OutputConverter.java │ └── imagerecognition │ ├── ImageRecognizer.java │ └── RNImageRecognizerModule.java ├── examples └── ImageRecognitionExample │ ├── .babelrc │ ├── .buckconfig │ ├── .flowconfig │ ├── .gitattributes │ ├── .gitignore │ ├── .watchmanconfig │ ├── App.js │ ├── __tests__ │ └── App.js │ ├── android │ ├── app │ │ ├── BUCK │ │ ├── build.gradle │ │ ├── proguard-rules.pro │ │ └── src │ │ │ └── main │ │ │ ├── AndroidManifest.xml │ │ │ ├── java │ │ │ └── com │ │ │ │ └── imagerecognitionexample │ │ │ │ ├── MainActivity.java │ │ │ │ └── MainApplication.java │ │ │ └── res │ │ │ ├── mipmap-hdpi │ │ │ └── ic_launcher.png │ │ │ ├── mipmap-mdpi │ │ │ └── ic_launcher.png │ │ │ ├── mipmap-xhdpi │ │ │ └── ic_launcher.png │ │ │ ├── mipmap-xxhdpi │ │ │ └── ic_launcher.png │ │ │ └── values │ │ │ ├── strings.xml │ │ │ └── styles.xml │ ├── build.gradle │ ├── gradle.properties │ ├── gradle │ │ └── wrapper │ │ │ ├── gradle-wrapper.jar │ │ │ └── gradle-wrapper.properties │ ├── gradlew │ ├── keystores │ │ ├── BUCK │ │ └── debug.keystore.properties │ └── settings.gradle │ ├── app.json │ ├── assets │ ├── dumbbell.jpg │ ├── tensorflow_inception_graph.pb │ └── tensorflow_labels.txt │ ├── index.js │ ├── ios │ ├── ImageRecognitionExample-tvOS │ │ └── Info.plist │ ├── ImageRecognitionExample-tvOSTests │ │ └── Info.plist │ ├── ImageRecognitionExample.xcodeproj │ │ ├── project.pbxproj │ │ └── xcshareddata │ │ │ └── xcschemes │ │ │ ├── ImageRecognitionExample-tvOS.xcscheme │ │ │ └── ImageRecognitionExample.xcscheme │ ├── ImageRecognitionExample.xcworkspace │ │ └── contents.xcworkspacedata │ ├── ImageRecognitionExample │ │ ├── AppDelegate.h │ │ ├── AppDelegate.m │ │ ├── Base.lproj │ │ │ └── LaunchScreen.xib │ │ ├── Images.xcassets │ │ │ ├── AppIcon.appiconset │ │ │ │ └── Contents.json │ │ │ └── Contents.json │ │ ├── Info.plist │ │ └── main.m │ ├── ImageRecognitionExampleTests │ │ ├── ImageRecognitionExampleTests.m │ │ └── Info.plist │ └── Podfile │ ├── package.json │ └── rn-cli.config.js ├── index.js ├── ios ├── ImageRecognizer.h ├── ImageRecognizer.mm ├── RNImageRecognition.h ├── RNImageRecognition.mm ├── RNTensorFlowGraph.h ├── RNTensorFlowGraph.mm ├── RNTensorFlowGraphOperations.h ├── RNTensorFlowGraphOperations.mm ├── RNTensorFlowInference.h ├── RNTensorFlowInference.mm ├── RNTensorflow.xcodeproj │ ├── project.pbxproj │ └── project.xcworkspace │ │ └── contents.xcworkspacedata ├── RNTensorflow.xcworkspace │ └── contents.xcworkspacedata ├── TensorFlowInference.h └── TensorFlowInference.mm └── package.json /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | *.iws 3 | out/ 4 | 5 | logs 6 | *.log 7 | npm-debug.log* 8 | yarn-debug.log* 9 | yarn-error.log* 10 | pids 11 | *.pid 12 | *.seed 13 | *.pid.lock 14 | build/Release 15 | node_modules/ 16 | .eslintcache 17 | package-lock.json 18 | yarn.lock 19 | 20 | build/ 21 | DerivedData/ 22 | *.pbxuser 23 | !default.pbxuser 24 | *.mode1v3 25 | !default.mode1v3 26 | *.mode2v3 27 | !default.mode2v3 28 | *.perspectivev3 29 | !default.perspectivev3 30 | xcuserdata/ 31 | *.moved-aside 32 | *.xccheckout 33 | *.xcscmblueprint 34 | *.hmap 35 | *.ipa 36 | *.dSYM.zip 37 | *.dSYM 38 | Pods 39 | Podfile.lock 40 | .DS_Store 41 | 42 | *.apk 43 | *.dex 44 | *.class 45 | bin/ 46 | gen/ 47 | .gradle/ 48 | local.properties 49 | proguard/ 50 | .navigation/ 51 | captures/ 52 | *.iml 53 | gradlew.bat 54 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | examples/ 2 | 3 | .idea 4 | *.iws 5 | out/ 6 | 7 | logs 8 | *.log 9 | npm-debug.log* 10 | yarn-debug.log* 11 | yarn-error.log* 12 | pids 13 | *.pid 14 | *.seed 15 | *.pid.lock 16 | build/Release 17 | node_modules/ 18 | .eslintcache 19 | package-lock.json 20 | yarn.lock 21 | 22 | build/ 23 | DerivedData/ 24 | *.pbxuser 25 | !default.pbxuser 26 | *.mode1v3 27 | !default.mode1v3 28 | *.mode2v3 29 | !default.mode2v3 30 | *.perspectivev3 31 | !default.perspectivev3 32 | xcuserdata/ 33 | *.moved-aside 34 | *.xccheckout 35 | *.xcscmblueprint 36 | *.hmap 37 | *.ipa 38 | *.dSYM.zip 39 | *.dSYM 40 | Pods 41 | Podfile.lock 42 | .DS_Store 43 | 44 | *.apk 45 | *.dex 46 | *.class 47 | bin/ 48 | gen/ 49 | .gradle/ 50 | local.properties 51 | proguard/ 52 | .navigation/ 53 | captures/ 54 | *.iml 55 | gradlew.bat 56 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: android 2 | sudo: required 3 | jdk: oraclejdk8 4 | os: linux 5 | 6 | before_cache: 7 | - rm -f $HOME/.gradle/caches/modules-2/modules-2.lock 8 | - rm -fr $HOME/.gradle/caches/*/plugin-resolution/ 9 | 10 | cache: 11 | directories: 12 | - $HOME/.yarn-cache 13 | - $HOME/.gradle/caches/ 14 | - $HOME/.gradle/wrapper/ 15 | 16 | before_install: 17 | - nvm install 8 18 | - node --version 19 | - curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add - 20 | - echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list 21 | - sudo apt-get update -qq 22 | - sudo apt-get install -y -qq yarn 23 | install: 24 | - yarn 25 | - yarn add react-native 26 | android: 27 | components: 28 | - tools 29 | - platform-tools 30 | - build-tools-26.0.1 31 | - android-23 32 | - sys-img-armeabi-v7a-android-22 33 | script: 34 | - cd android && ./gradlew assembleDebug 35 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | # react-native-tensorflow 3 | 4 | **Note: This project is not maintained anymore** 5 | 6 | A TensorFlow inference library for react native. 7 | It follows the android inference api from TensorFlow: https://github.com/tensorflow/tensorflow/tree/master/tensorflow/contrib/android 8 | 9 | ## Getting started 10 | 11 | `$ npm install react-native-tensorflow --save` 12 | 13 | ### Linking 14 | 15 | `$ react-native link react-native-tensorflow` 16 | 17 | #### Additional steps for iOS 18 | 19 | For the iOS setup you will need CocoaPods. 20 | 21 | Create a Podfile in the iOS directory with the following content: 22 | ``` 23 | target '' 24 | pod 'TensorFlow-experimental' 25 | ``` 26 | 27 | Then run `pod install`. 28 | 29 | ## Usage 30 | 31 | This library provides a api to directly interact with TensorFlow and a simple image recognition api. 32 | For most use cases for image recognition the image recognition api should suffice. 33 | 34 | ### Image recognition 35 | 36 | First you need to add the TensorFlow model as well as the label file to the project. There are a few ways to do that as described [here](#fetching-files) 37 | 38 | Next you need to initialize the TfImageRecognition class using the model and label files and then call the `recognize` function of the class with the image to recognize: 39 | 40 | ```javascript 41 | import { TfImageRecognition } from 'react-native-tensorflow'; 42 | 43 | const tfImageRecognition = new TfImageRecognition({ 44 | model: require('./assets/tensorflow_inception_graph.pb'), 45 | labels: require('./assets/tensorflow_labels.txt'), 46 | imageMean: 117, // Optional, defaults to 117 47 | imageStd: 1 // Optional, defaults to 1 48 | }) 49 | 50 | const results = await tfImageRecognition.recognize({ 51 | image: require('./assets/apple.jpg'), 52 | inputName: "input", //Optional, defaults to "input" 53 | inputSize: 224, //Optional, defaults to 224 54 | outputName: "output", //Optional, defaults to "output" 55 | maxResults: 3, //Optional, defaults to 3 56 | threshold: 0.1, //Optional, defaults to 0.1 57 | }) 58 | 59 | results.forEach(result => 60 | console.log( 61 | result.id, // Id of the result 62 | result.name, // Name of the result 63 | result.confidence // Confidence value between 0 - 1 64 | ) 65 | ) 66 | 67 | await tfImageRecognition.close() // Necessary in order to release objects on native side 68 | ``` 69 | 70 | ### Direct API 71 | *Note: It is not recommended to use this API as it has some major problem described in [the second point in the known issues](https://github.com/reneweb/react-native-tensorflow/blob/master/README.md#known-issues) and is quite difficult to use in its current state.* 72 | 73 | First you need to add the TensorFlow model to the project. There are a few ways to do that as described [here](#fetching-files) 74 | 75 | After adding the model and creating a TensorFlow instance using the model you will need to feed your data as a array providing the input name, shape and data type. 76 | Then run the inference and lastly fetch the result. 77 | 78 | ```javascript 79 | import { TensorFlow } from 'react-native-tensorflow'; 80 | 81 | const tf = new TensorFlow('tensorflow_inception_graph.pb') 82 | await tf.feed({name: "inputName", data: [1,2,3], shape:[1,2,4], dtype: "int64"}) 83 | await tf.run(['outputNames']) 84 | const output = await tf.fetch('outputName') 85 | console.log(output) 86 | 87 | ``` 88 | 89 | Check the android TensorFlow example for more information on the API: https://github.com/tensorflow/tensorflow/blob/master/tensorflow/examples/android/src/org/tensorflow/demo/TensorFlowImageClassifier.java 90 | 91 | ### Fetching files 92 | 93 | - Add as react native asset 94 | 95 | Create the file `rn-cli.config.js` in the root of the project and add the following code where the array contains all the file endings you want to bundle (in this case we bundle pb and txt files next to the defaults). 96 | ``` 97 | module.exports = { 98 | getAssetExts() { 99 | return ['pb', 'txt'] 100 | } 101 | } 102 | ``` 103 | Then you can require the asset in the code, for example: `require('assets/tensorflow_inception_graph.pb')` 104 | 105 | - Add as iOS / Android asset 106 | 107 | Put the file in the android/src/main/assets folder for Android and for iOS put the file, using XCode, in the root of the project. In the code you can just reference the file path for the asset. 108 | 109 | - Load from file system 110 | 111 | Put the file into the file system and reference using the file path. 112 | 113 | - Fetch via url 114 | 115 | Pass a url to fetch the file from a url. This won't store it locally, thus the next time the code is executed it will fetch it again. 116 | 117 | ## Supported data types 118 | - DOUBLE 119 | - FLOAT 120 | - INT32 121 | - INT64 122 | - UINT8 123 | - BOOL - On Android will be converted into a byte array 124 | - STRING - On Android will be converted into a byte array 125 | 126 | ## Known issues 127 | - When using the image recognition api the results don't match exactly between Android and iOS. Most of the time they seem reasonable close though. 128 | - When using the direct api the data to feed to TensorFlow needs to be provided on the JS side and is then passed to the native side. Transferring large payloads this way is very inefficient and will likely have a negative performance impact. The same problem exists when loading large data, like images, from the native side into JS side for processing. 129 | - The TensorFlow library itself as well as the TensorFlow models are quite large in size resulting in large builds. 130 | -------------------------------------------------------------------------------- /android/build.gradle: -------------------------------------------------------------------------------- 1 | 2 | buildscript { 3 | repositories { 4 | jcenter() 5 | } 6 | 7 | dependencies { 8 | classpath 'com.android.tools.build:gradle:2.3.0' 9 | } 10 | } 11 | 12 | apply plugin: 'com.android.library' 13 | 14 | android { 15 | compileSdkVersion 23 16 | buildToolsVersion "26.0.1" 17 | 18 | defaultConfig { 19 | minSdkVersion 16 20 | targetSdkVersion 22 21 | versionCode 1 22 | versionName "1.0" 23 | } 24 | lintOptions { 25 | abortOnError false 26 | } 27 | } 28 | 29 | allprojects { 30 | repositories { 31 | maven { 32 | // All of React Native (JS, Android binaries) is installed from npm 33 | url "$rootDir/../node_modules/react-native/android" 34 | } 35 | jcenter() 36 | } 37 | } 38 | 39 | dependencies { 40 | compile 'com.facebook.react:react-native:+' 41 | compile 'org.tensorflow:tensorflow-android:1.3.0' 42 | } 43 | -------------------------------------------------------------------------------- /android/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/reneweb/react-native-tensorflow/fe47622d86ec11a3412bed54bb8ccbede324e8b1/android/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /android/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | zipStoreBase=GRADLE_USER_HOME 4 | zipStorePath=wrapper/dists 5 | distributionUrl=https\://services.gradle.org/distributions/gradle-4.2.1-bin.zip 6 | -------------------------------------------------------------------------------- /android/gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | ############################################################################## 4 | ## 5 | ## Gradle start up script for UN*X 6 | ## 7 | ############################################################################## 8 | 9 | # Attempt to set APP_HOME 10 | # Resolve links: $0 may be a link 11 | PRG="$0" 12 | # Need this for relative symlinks. 13 | while [ -h "$PRG" ] ; do 14 | ls=`ls -ld "$PRG"` 15 | link=`expr "$ls" : '.*-> \(.*\)$'` 16 | if expr "$link" : '/.*' > /dev/null; then 17 | PRG="$link" 18 | else 19 | PRG=`dirname "$PRG"`"/$link" 20 | fi 21 | done 22 | SAVED="`pwd`" 23 | cd "`dirname \"$PRG\"`/" >/dev/null 24 | APP_HOME="`pwd -P`" 25 | cd "$SAVED" >/dev/null 26 | 27 | APP_NAME="Gradle" 28 | APP_BASE_NAME=`basename "$0"` 29 | 30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 31 | DEFAULT_JVM_OPTS="" 32 | 33 | # Use the maximum available, or set MAX_FD != -1 to use that value. 34 | MAX_FD="maximum" 35 | 36 | warn () { 37 | echo "$*" 38 | } 39 | 40 | die () { 41 | echo 42 | echo "$*" 43 | echo 44 | exit 1 45 | } 46 | 47 | # OS specific support (must be 'true' or 'false'). 48 | cygwin=false 49 | msys=false 50 | darwin=false 51 | nonstop=false 52 | case "`uname`" in 53 | CYGWIN* ) 54 | cygwin=true 55 | ;; 56 | Darwin* ) 57 | darwin=true 58 | ;; 59 | MINGW* ) 60 | msys=true 61 | ;; 62 | NONSTOP* ) 63 | nonstop=true 64 | ;; 65 | esac 66 | 67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 68 | 69 | # Determine the Java command to use to start the JVM. 70 | if [ -n "$JAVA_HOME" ] ; then 71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 72 | # IBM's JDK on AIX uses strange locations for the executables 73 | JAVACMD="$JAVA_HOME/jre/sh/java" 74 | else 75 | JAVACMD="$JAVA_HOME/bin/java" 76 | fi 77 | if [ ! -x "$JAVACMD" ] ; then 78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 79 | 80 | Please set the JAVA_HOME variable in your environment to match the 81 | location of your Java installation." 82 | fi 83 | else 84 | JAVACMD="java" 85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 86 | 87 | Please set the JAVA_HOME variable in your environment to match the 88 | location of your Java installation." 89 | fi 90 | 91 | # Increase the maximum file descriptors if we can. 92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 93 | MAX_FD_LIMIT=`ulimit -H -n` 94 | if [ $? -eq 0 ] ; then 95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 96 | MAX_FD="$MAX_FD_LIMIT" 97 | fi 98 | ulimit -n $MAX_FD 99 | if [ $? -ne 0 ] ; then 100 | warn "Could not set maximum file descriptor limit: $MAX_FD" 101 | fi 102 | else 103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 104 | fi 105 | fi 106 | 107 | # For Darwin, add options to specify how the application appears in the dock 108 | if $darwin; then 109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 110 | fi 111 | 112 | # For Cygwin, switch paths to Windows format before running java 113 | if $cygwin ; then 114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 116 | JAVACMD=`cygpath --unix "$JAVACMD"` 117 | 118 | # We build the pattern for arguments to be converted via cygpath 119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 120 | SEP="" 121 | for dir in $ROOTDIRSRAW ; do 122 | ROOTDIRS="$ROOTDIRS$SEP$dir" 123 | SEP="|" 124 | done 125 | OURCYGPATTERN="(^($ROOTDIRS))" 126 | # Add a user-defined pattern to the cygpath arguments 127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 129 | fi 130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 131 | i=0 132 | for arg in "$@" ; do 133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 135 | 136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 138 | else 139 | eval `echo args$i`="\"$arg\"" 140 | fi 141 | i=$((i+1)) 142 | done 143 | case $i in 144 | (0) set -- ;; 145 | (1) set -- "$args0" ;; 146 | (2) set -- "$args0" "$args1" ;; 147 | (3) set -- "$args0" "$args1" "$args2" ;; 148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 154 | esac 155 | fi 156 | 157 | # Escape application args 158 | save () { 159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done 160 | echo " " 161 | } 162 | APP_ARGS=$(save "$@") 163 | 164 | # Collect all arguments for the java command, following the shell quoting and substitution rules 165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" 166 | 167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong 168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then 169 | cd "$(dirname "$0")" 170 | fi 171 | 172 | exec "$JAVACMD" "$@" 173 | -------------------------------------------------------------------------------- /android/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /android/src/main/java/com/rntensorflow/RNTensorFlowGraphModule.java: -------------------------------------------------------------------------------- 1 | package com.rntensorflow; 2 | 3 | import android.util.Base64; 4 | import com.facebook.react.bridge.Promise; 5 | import com.facebook.react.bridge.ReactApplicationContext; 6 | import com.facebook.react.bridge.ReactContextBaseJavaModule; 7 | import com.facebook.react.bridge.ReactMethod; 8 | import org.tensorflow.Graph; 9 | import org.tensorflow.Operation; 10 | 11 | import java.util.HashMap; 12 | import java.util.Map; 13 | 14 | public class RNTensorFlowGraphModule extends ReactContextBaseJavaModule { 15 | 16 | private ReactApplicationContext reactContext; 17 | private Map graphs = new HashMap<>(); 18 | 19 | public RNTensorFlowGraphModule(ReactApplicationContext reactContext) { 20 | super(reactContext); 21 | this.reactContext = reactContext; 22 | } 23 | 24 | @Override 25 | public String getName() { 26 | return "RNTensorFlowGraph"; 27 | } 28 | 29 | @Override 30 | public void onCatalystInstanceDestroy() { 31 | for (String key : graphs.keySet()) { 32 | graphs.get(key).close(); 33 | } 34 | } 35 | 36 | public void init(String id, Graph graph) { 37 | graphs.put(id, graph); 38 | } 39 | 40 | public Operation getOperation(String id, String name) { 41 | Graph graph = graphs.get(id); 42 | if(graph != null && name != null) { 43 | return graph.operation(name); 44 | } else { 45 | return null; 46 | } 47 | } 48 | 49 | @ReactMethod 50 | public void importGraphDef(String id, String graphDef, Promise promise) { 51 | importGraphDefWithPrefix(id, graphDef, "", promise); 52 | } 53 | 54 | @ReactMethod 55 | public void importGraphDefWithPrefix(String id, String graphDef, String prefix, Promise promise) { 56 | try { 57 | Graph graph = graphs.get(id); 58 | graph.importGraphDef(Base64.decode(graphDef, Base64.DEFAULT), prefix); 59 | promise.resolve(true); 60 | } catch (Exception e) { 61 | promise.reject(e); 62 | } 63 | } 64 | 65 | @ReactMethod 66 | public void toGraphDef(String id, Promise promise) { 67 | try { 68 | Graph graph = graphs.get(id); 69 | promise.resolve(Base64.encodeToString(graph.toGraphDef(), Base64.DEFAULT)); 70 | } catch (Exception e) { 71 | promise.reject(e); 72 | } 73 | } 74 | 75 | @ReactMethod 76 | public void close(String id, Promise promise) { 77 | try { 78 | Graph graph = graphs.get(id); 79 | graph.close(); 80 | promise.resolve(true); 81 | } catch (Exception e) { 82 | promise.reject(e); 83 | } 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /android/src/main/java/com/rntensorflow/RNTensorFlowGraphOperationsModule.java: -------------------------------------------------------------------------------- 1 | package com.rntensorflow; 2 | 3 | import com.facebook.react.bridge.*; 4 | import com.rntensorflow.converter.OutputConverter; 5 | import org.tensorflow.Operation; 6 | import org.tensorflow.Output; 7 | 8 | public class RNTensorFlowGraphOperationsModule extends ReactContextBaseJavaModule { 9 | 10 | public RNTensorFlowGraphOperationsModule(ReactApplicationContext reactContext) { 11 | super(reactContext); 12 | } 13 | 14 | @Override 15 | public String getName() { 16 | return "RNTensorFlowGraphOperations"; 17 | } 18 | 19 | @ReactMethod 20 | public void inputListLength(String id, String opName, String name, Promise promise) { 21 | try { 22 | Operation graphOperation = getGraphOperation(id, opName); 23 | promise.resolve(graphOperation.inputListLength(name)); 24 | } catch (Exception e) { 25 | promise.reject(e); 26 | } 27 | } 28 | 29 | @ReactMethod 30 | public void name(String id, String opName, Promise promise) { 31 | try { 32 | Operation graphOperation = getGraphOperation(id, opName); 33 | promise.resolve(graphOperation.name()); 34 | } catch (Exception e) { 35 | promise.reject(e); 36 | } 37 | } 38 | 39 | @ReactMethod 40 | public void numOutputs(String id, String opName, Promise promise) { 41 | try { 42 | Operation graphOperation = getGraphOperation(id, opName); 43 | promise.resolve(graphOperation.numOutputs()); 44 | } catch (Exception e) { 45 | promise.reject(e); 46 | } 47 | } 48 | 49 | @ReactMethod 50 | public void output(String id, String opName, int index, Promise promise) { 51 | try { 52 | Operation graphOperation = getGraphOperation(id, opName); 53 | promise.resolve(OutputConverter.convert(graphOperation.output(index))); 54 | } catch (Exception e) { 55 | promise.reject(e); 56 | } 57 | } 58 | 59 | @ReactMethod 60 | public void outputList(String id, String opName, int index, int length, Promise promise) { 61 | try { 62 | Operation graphOperation = getGraphOperation(id, opName); 63 | Output[] outputs = graphOperation.outputList(index, length); 64 | WritableArray outputsConverted = new WritableNativeArray(); 65 | for (Output output : outputs) { 66 | outputsConverted.pushMap(OutputConverter.convert(output)); 67 | } 68 | promise.resolve(outputsConverted); 69 | } catch (Exception e) { 70 | promise.reject(e); 71 | } 72 | } 73 | 74 | @ReactMethod 75 | public void outputListLength(String id, String opName, String name, Promise promise) { 76 | try { 77 | Operation graphOperation = getGraphOperation(id, opName); 78 | promise.resolve(graphOperation.outputListLength(name)); 79 | } catch (Exception e) { 80 | promise.reject(e); 81 | } 82 | } 83 | 84 | @ReactMethod 85 | public void type(String id, String opName, Promise promise) { 86 | try { 87 | Operation graphOperation = getGraphOperation(id, opName); 88 | promise.resolve(graphOperation.type()); 89 | } catch (Exception e) { 90 | promise.reject(e); 91 | } 92 | } 93 | 94 | private Operation getGraphOperation(String id, String name) { 95 | return getReactApplicationContext().getNativeModule(RNTensorFlowGraphModule.class).getOperation(id, name); 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /android/src/main/java/com/rntensorflow/RNTensorFlowInferenceModule.java: -------------------------------------------------------------------------------- 1 | 2 | package com.rntensorflow; 3 | 4 | import com.facebook.react.bridge.*; 5 | import org.tensorflow.DataType; 6 | import org.tensorflow.Graph; 7 | import org.tensorflow.Session; 8 | import org.tensorflow.Tensor; 9 | import org.tensorflow.contrib.android.RunStats; 10 | 11 | import java.io.IOException; 12 | import java.io.InputStream; 13 | import java.nio.ByteBuffer; 14 | import java.nio.DoubleBuffer; 15 | import java.nio.FloatBuffer; 16 | import java.nio.IntBuffer; 17 | import java.util.HashMap; 18 | import java.util.List; 19 | import java.util.Map; 20 | 21 | import static com.rntensorflow.converter.ArrayConverter.*; 22 | 23 | public class RNTensorFlowInferenceModule extends ReactContextBaseJavaModule { 24 | 25 | private final ReactApplicationContext reactContext; 26 | private Map inferenceMap = new HashMap<>(); 27 | 28 | public RNTensorFlowInferenceModule(ReactApplicationContext reactContext) { 29 | super(reactContext); 30 | this.reactContext = reactContext; 31 | } 32 | 33 | @Override 34 | public String getName() { 35 | return "RNTensorFlowInference"; 36 | } 37 | 38 | @Override 39 | public void onCatalystInstanceDestroy() { 40 | for (String id : inferenceMap.keySet()) { 41 | RNTensorflowInference inference = this.inferenceMap.remove(id); 42 | if(inference != null) { 43 | inference.close(); 44 | } 45 | } 46 | } 47 | 48 | @ReactMethod 49 | public void initTensorFlowInference(String id, String model, Promise promise) { 50 | try { 51 | RNTensorflowInference inference = RNTensorflowInference.init(reactContext, model); 52 | inferenceMap.put(id, inference); 53 | 54 | RNTensorFlowGraphModule graphModule = reactContext.getNativeModule(RNTensorFlowGraphModule.class); 55 | graphModule.init(id, inference.getTfContext().graph); 56 | 57 | promise.resolve(true); 58 | } catch (Exception e) { 59 | promise.reject(e); 60 | } 61 | } 62 | 63 | @ReactMethod 64 | public void feed(String id, ReadableMap data, Promise promise) { 65 | try { 66 | RNTensorflowInference inference = inferenceMap.get(id); 67 | 68 | String inputName = data.getString("name"); 69 | long[] shape = data.hasKey("shape") ? readableArrayToLongArray(data.getArray("shape")) : new long[0]; 70 | 71 | DataType dtype = data.hasKey("dtype") 72 | ? DataType.valueOf(data.getString("dtype").toUpperCase()) 73 | : DataType.DOUBLE; 74 | 75 | if(dtype == DataType.DOUBLE) { 76 | double[] srcData = readableArrayToDoubleArray(data.getArray("data")); 77 | inference.feed(inputName, Tensor.create(shape, DoubleBuffer.wrap(srcData))); 78 | } else if(dtype == DataType.FLOAT) { 79 | float[] srcData = readableArrayToFloatArray(data.getArray("data")); 80 | inference.feed(inputName, Tensor.create(shape, FloatBuffer.wrap(srcData))); 81 | } else if(dtype == DataType.INT32) { 82 | int[] srcData = readableArrayToIntArray(data.getArray("data")); 83 | inference.feed(inputName, Tensor.create(shape, IntBuffer.wrap(srcData))); 84 | } else if(dtype == DataType.INT64) { 85 | double[] srcData = readableArrayToDoubleArray(data.getArray("data")); 86 | inference.feed(inputName, Tensor.create(shape, DoubleBuffer.wrap(srcData))); 87 | } else if(dtype == DataType.UINT8) { 88 | int[] srcData = readableArrayToIntArray(data.getArray("data")); 89 | inference.feed(inputName, Tensor.create(shape, IntBuffer.wrap(srcData))); 90 | } else if(dtype == DataType.BOOL) { 91 | byte[] srcData = readableArrayToByteBoolArray(data.getArray("data")); 92 | inference.feed(inputName, Tensor.create(dtype, shape, ByteBuffer.wrap(srcData))); 93 | } else if(dtype == DataType.STRING) { 94 | byte[] srcData = readableArrayToByteStringArray(data.getArray("data")); 95 | inference.feed(inputName, Tensor.create(dtype, shape, ByteBuffer.wrap(srcData))); 96 | } else { 97 | promise.reject(new IllegalArgumentException("Data type is not supported")); 98 | return; 99 | } 100 | promise.resolve(true); 101 | } catch (Exception e) { 102 | promise.reject(e); 103 | } 104 | } 105 | 106 | @ReactMethod 107 | public void run(String id, ReadableArray outputNames, boolean enableStats, Promise promise) { 108 | try { 109 | RNTensorflowInference inference = inferenceMap.get(id); 110 | inference.run(readableArrayToStringArray(outputNames), enableStats); 111 | promise.resolve(true); 112 | } catch (Exception e) { 113 | promise.reject(e); 114 | } 115 | } 116 | 117 | @ReactMethod 118 | public void fetch(String id, String outputName, Promise promise) { 119 | try { 120 | RNTensorflowInference inference = inferenceMap.get(id); 121 | promise.resolve(inference.fetch(outputName)); 122 | } catch (Exception e) { 123 | promise.reject(e); 124 | } 125 | } 126 | 127 | @ReactMethod 128 | public void reset(String id, Promise promise) { 129 | try { 130 | RNTensorflowInference inference = inferenceMap.get(id); 131 | inference.getTfContext().reset(); 132 | promise.resolve(true); 133 | } catch (Exception e) { 134 | promise.reject(e); 135 | } 136 | } 137 | 138 | @ReactMethod 139 | public void close(String id, Promise promise) { 140 | try { 141 | RNTensorflowInference inference = inferenceMap.get(id); 142 | inference.close(); 143 | promise.resolve(true); 144 | } catch (Exception e) { 145 | promise.reject(e); 146 | } 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /android/src/main/java/com/rntensorflow/RNTensorFlowPackage.java: -------------------------------------------------------------------------------- 1 | 2 | package com.rntensorflow; 3 | 4 | import java.util.Arrays; 5 | import java.util.Collections; 6 | import java.util.List; 7 | 8 | import com.facebook.react.ReactPackage; 9 | import com.facebook.react.bridge.NativeModule; 10 | import com.facebook.react.bridge.ReactApplicationContext; 11 | import com.facebook.react.uimanager.ViewManager; 12 | import com.facebook.react.bridge.JavaScriptModule; 13 | import com.rntensorflow.imagerecognition.RNImageRecognizerModule; 14 | 15 | public class RNTensorFlowPackage implements ReactPackage { 16 | @Override 17 | public List createNativeModules(ReactApplicationContext reactContext) { 18 | return Arrays.asList( 19 | new RNImageRecognizerModule(reactContext), 20 | new RNTensorFlowInferenceModule(reactContext), 21 | new RNTensorFlowGraphModule(reactContext), 22 | new RNTensorFlowGraphOperationsModule(reactContext)); 23 | } 24 | 25 | // Deprecated from RN 0.47 26 | public List> createJSModules() { 27 | return Collections.emptyList(); 28 | } 29 | 30 | @Override 31 | public List createViewManagers(ReactApplicationContext reactContext) { 32 | return Collections.emptyList(); 33 | } 34 | } -------------------------------------------------------------------------------- /android/src/main/java/com/rntensorflow/RNTensorflowInference.java: -------------------------------------------------------------------------------- 1 | package com.rntensorflow; 2 | 3 | import com.facebook.react.bridge.*; 4 | import org.tensorflow.DataType; 5 | import org.tensorflow.Graph; 6 | import org.tensorflow.Session; 7 | import org.tensorflow.Tensor; 8 | import org.tensorflow.contrib.android.RunStats; 9 | 10 | import java.io.IOException; 11 | import java.nio.ByteBuffer; 12 | import java.nio.DoubleBuffer; 13 | import java.nio.FloatBuffer; 14 | import java.nio.IntBuffer; 15 | import java.util.HashMap; 16 | import java.util.List; 17 | import java.util.Map; 18 | 19 | import static com.rntensorflow.converter.ArrayConverter.*; 20 | import static com.rntensorflow.converter.ArrayConverter.byteArrayToBoolReadableArray; 21 | import static com.rntensorflow.converter.ArrayConverter.intArrayToReadableArray; 22 | 23 | public class RNTensorflowInference { 24 | 25 | private final ReactContext reactContext; 26 | private final TfContext tfContext; 27 | 28 | public RNTensorflowInference(ReactContext reactContext, TfContext tfContext) { 29 | this.reactContext = reactContext; 30 | this.tfContext = tfContext; 31 | } 32 | 33 | public static RNTensorflowInference init(ReactContext reactContext, String model) throws IOException { 34 | loadNativeTf(); 35 | TfContext context = createContext(reactContext, model); 36 | return new RNTensorflowInference(reactContext, context); 37 | } 38 | 39 | private static void loadNativeTf() { 40 | try { 41 | new RunStats(); 42 | } catch (UnsatisfiedLinkError ule) { 43 | System.loadLibrary("tensorflow_inference"); 44 | } 45 | } 46 | 47 | private static TfContext createContext(ReactContext reactContext, String model) throws IOException { 48 | byte[] b = new ResourceManager(reactContext).loadResource(model); 49 | 50 | Graph graph = new Graph(); 51 | graph.importGraphDef(b); 52 | Session session = new Session(graph); 53 | Session.Runner runner = session.runner(); 54 | 55 | return new TfContext(session, runner, graph); 56 | } 57 | 58 | public void feed(String inputName, Tensor tensor) { 59 | tfContext.runner.feed(inputName, tensor); 60 | } 61 | 62 | public void run(String[] outputNames, boolean enableStats) { 63 | if(tfContext != null) { 64 | for (String outputName : outputNames) { 65 | tfContext.runner.fetch(outputName); 66 | } 67 | List tensors = tfContext.runner.run(); 68 | 69 | tfContext.outputTensors.clear(); 70 | for (int i = 0; i < outputNames.length; i++) { 71 | tfContext.outputTensors.put(outputNames[i], tensors.get(i)); 72 | } 73 | 74 | } else { 75 | throw new IllegalStateException("Could not find inference for id"); 76 | } 77 | } 78 | 79 | public ReadableArray fetch(String outputName) { 80 | Tensor tensor = tfContext.outputTensors.get(outputName); 81 | int numElements = tensor.numElements(); 82 | 83 | if(tensor.dataType() == DataType.DOUBLE) { 84 | DoubleBuffer dst = DoubleBuffer.allocate(numElements); 85 | tensor.writeTo(dst); 86 | return doubleArrayToReadableArray(dst.array()); 87 | } else if(tensor.dataType() == DataType.FLOAT) { 88 | FloatBuffer dst = FloatBuffer.allocate(numElements); 89 | tensor.writeTo(dst); 90 | return floatArrayToReadableArray(dst.array()); 91 | } else if(tensor.dataType() == DataType.INT32) { 92 | IntBuffer dst = IntBuffer.allocate(numElements); 93 | tensor.writeTo(dst); 94 | return intArrayToReadableArray(dst.array()); 95 | } else if(tensor.dataType() == DataType.INT64) { 96 | DoubleBuffer dst = DoubleBuffer.allocate(numElements); 97 | tensor.writeTo(dst); 98 | return doubleArrayToReadableArray(dst.array()); 99 | } else if(tensor.dataType() == DataType.UINT8) { 100 | IntBuffer dst = IntBuffer.allocate(numElements); 101 | tensor.writeTo(dst); 102 | return intArrayToReadableArray(dst.array()); 103 | } else if(tensor.dataType() == DataType.BOOL) { 104 | ByteBuffer dst = ByteBuffer.allocate(numElements); 105 | tensor.writeTo(dst); 106 | return byteArrayToBoolReadableArray(dst.array()); 107 | } else { 108 | throw new IllegalArgumentException("Data type is not supported"); 109 | } 110 | } 111 | 112 | public void close() { 113 | if(tfContext != null) { 114 | tfContext.session.close(); 115 | tfContext.outputTensors.clear(); 116 | } else { 117 | throw new IllegalStateException("Could not find inference for id"); 118 | } 119 | } 120 | 121 | public TfContext getTfContext() { 122 | return tfContext; 123 | } 124 | 125 | public static class TfContext { 126 | final Session session; 127 | Session.Runner runner; 128 | final Graph graph; 129 | private final Map outputTensors; 130 | 131 | TfContext(Session session, Session.Runner runner, Graph graph) { 132 | this.session = session; 133 | this.runner = runner; 134 | this.graph = graph; 135 | outputTensors = new HashMap<>(); 136 | } 137 | 138 | public void reset() { 139 | runner = session.runner(); 140 | outputTensors.clear(); 141 | } 142 | } 143 | } 144 | -------------------------------------------------------------------------------- /android/src/main/java/com/rntensorflow/ResourceManager.java: -------------------------------------------------------------------------------- 1 | package com.rntensorflow; 2 | 3 | import android.content.res.Resources; 4 | import android.webkit.URLUtil; 5 | import com.facebook.react.bridge.ReactContext; 6 | import com.facebook.react.modules.network.OkHttpClientProvider; 7 | import okhttp3.Request; 8 | import okhttp3.Response; 9 | 10 | import java.io.FileInputStream; 11 | import java.io.IOException; 12 | import java.io.InputStream; 13 | 14 | public class ResourceManager { 15 | 16 | private ReactContext reactContext; 17 | 18 | public ResourceManager(ReactContext reactContext) { 19 | this.reactContext = reactContext; 20 | } 21 | 22 | public String loadResourceAsString(String resource) { 23 | return new String(loadResource(resource)); 24 | } 25 | 26 | public byte[] loadResource(String resource) { 27 | if(resource.startsWith("file://")) { 28 | return loadFromLocal(resource.substring(7)); 29 | } else if(URLUtil.isValidUrl(resource)) { 30 | return loadFromUrl(resource); 31 | } else { 32 | return loadFromLocal(resource); 33 | } 34 | } 35 | 36 | private byte[] loadFromLocal(String resource) { 37 | try { 38 | int identifier = reactContext.getResources().getIdentifier(resource, "drawable", reactContext.getPackageName()); 39 | InputStream inputStream = reactContext.getResources().openRawResource(identifier); 40 | return inputStreamToByteArray(inputStream); 41 | } catch (IOException | Resources.NotFoundException e) { 42 | try { 43 | InputStream inputStream = reactContext.getAssets().open(resource); 44 | return inputStreamToByteArray(inputStream); 45 | } catch (IOException e1) { 46 | try { 47 | InputStream inputStream = new FileInputStream(resource); 48 | return inputStreamToByteArray(inputStream); 49 | } catch (IOException e2) { 50 | throw new IllegalArgumentException("Could not load resource"); 51 | } 52 | } 53 | } 54 | } 55 | 56 | private byte[] inputStreamToByteArray(InputStream inputStream) throws IOException { 57 | byte[] b = new byte[inputStream.available()]; 58 | inputStream.read(b); 59 | return b; 60 | } 61 | 62 | private byte[] loadFromUrl(String url) { 63 | try { 64 | Request request = new Request.Builder().url(url).get().build(); 65 | Response response = OkHttpClientProvider.createClient().newCall(request).execute(); 66 | return response.body().bytes(); 67 | } catch (IOException e) { 68 | throw new IllegalStateException("Could not fetch data from url " + url); 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /android/src/main/java/com/rntensorflow/converter/ArrayConverter.java: -------------------------------------------------------------------------------- 1 | package com.rntensorflow.converter; 2 | 3 | import com.facebook.react.bridge.ReadableArray; 4 | import com.facebook.react.bridge.WritableArray; 5 | import com.facebook.react.bridge.WritableNativeArray; 6 | 7 | import java.util.ArrayList; 8 | import java.util.List; 9 | 10 | public class ArrayConverter { 11 | 12 | public static String[] readableArrayToStringArray(ReadableArray readableArray) { 13 | String[] arr = new String[readableArray.size()]; 14 | for (int i = 0; i < readableArray.size(); i++) { 15 | arr[i] = readableArray.getString(i); 16 | } 17 | 18 | return arr; 19 | } 20 | 21 | public static double[] readableArrayToDoubleArray(ReadableArray readableArray) { 22 | double[] arr = new double[readableArray.size()]; 23 | for (int i = 0; i < readableArray.size(); i++) { 24 | arr[i] = readableArray.getDouble(i); 25 | } 26 | 27 | return arr; 28 | } 29 | 30 | public static long[] readableArrayToLongArray(ReadableArray readableArray) { 31 | long[] arr = new long[readableArray.size()]; 32 | for (int i = 0; i < readableArray.size(); i++) { 33 | arr[i] = (long)readableArray.getDouble(i); 34 | } 35 | 36 | return arr; 37 | } 38 | 39 | public static float[] readableArrayToFloatArray(ReadableArray readableArray) { 40 | float[] arr = new float[readableArray.size()]; 41 | for (int i = 0; i < readableArray.size(); i++) { 42 | arr[i] = (float)readableArray.getDouble(i); 43 | } 44 | 45 | return arr; 46 | } 47 | 48 | public static int[] readableArrayToIntArray(ReadableArray readableArray) { 49 | int[] arr = new int[readableArray.size()]; 50 | for (int i = 0; i < readableArray.size(); i++) { 51 | arr[i] = readableArray.getInt(i); 52 | } 53 | 54 | return arr; 55 | } 56 | 57 | public static byte[] readableArrayToByteBoolArray(ReadableArray readableArray) { 58 | byte[] bytesArr = new byte[readableArray.size() / 8 + 1]; 59 | for (int entry = 0; entry < bytesArr.length; entry++) { 60 | for (int bit = 0; bit < 8; bit++) { 61 | if (readableArray.getBoolean(entry * 8 + bit)) { 62 | bytesArr[entry] |= (128 >> bit); 63 | } 64 | } 65 | } 66 | 67 | return bytesArr; 68 | } 69 | 70 | public static byte[] readableArrayToByteStringArray(ReadableArray readableArray) { 71 | List bytes = new ArrayList<>(readableArray.size() * 5); 72 | for (int i = 0; i < readableArray.size(); i++) { 73 | for (byte b :readableArray.getString(i).getBytes()) { 74 | bytes.add(b); 75 | } 76 | } 77 | 78 | byte[] bytesArr = new byte[bytes.size()]; 79 | for (int i = 0; i < bytes.size(); i++) { 80 | bytesArr[i] = bytes.get(i); 81 | } 82 | 83 | return bytesArr; 84 | } 85 | 86 | public static ReadableArray doubleArrayToReadableArray(double[] arr) { 87 | WritableArray writableArray = new WritableNativeArray(); 88 | for (double d : arr) { 89 | writableArray.pushDouble(d); 90 | } 91 | 92 | return writableArray; 93 | } 94 | 95 | public static ReadableArray floatArrayToReadableArray(float[] arr) { 96 | WritableArray writableArray = new WritableNativeArray(); 97 | for (float f : arr) { 98 | writableArray.pushDouble(f); 99 | } 100 | 101 | return writableArray; 102 | } 103 | 104 | public static ReadableArray intArrayToReadableArray(int[] arr) { 105 | WritableArray writableArray = new WritableNativeArray(); 106 | for (int i : arr) { 107 | writableArray.pushInt(i); 108 | } 109 | 110 | return writableArray; 111 | } 112 | 113 | public static ReadableArray byteArrayToBoolReadableArray(byte[] arr) { 114 | WritableArray writableArray = new WritableNativeArray(); 115 | byte[] pos = new byte[]{(byte)0x80, 0x40, 0x20, 0x10, 0x8, 0x4, 0x2, 0x1}; 116 | 117 | for(int i = 0; i < arr.length; i++){ 118 | for(int k = 0; k < 8; k++){ 119 | boolean res = (arr[i] & pos[k]) != 0; 120 | writableArray.pushBoolean(res); 121 | } 122 | } 123 | 124 | return writableArray; 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /android/src/main/java/com/rntensorflow/converter/OutputConverter.java: -------------------------------------------------------------------------------- 1 | package com.rntensorflow.converter; 2 | 3 | import com.facebook.react.bridge.WritableMap; 4 | import com.facebook.react.bridge.WritableNativeMap; 5 | import org.tensorflow.Output; 6 | 7 | public class OutputConverter { 8 | 9 | public static WritableMap convert(Output output) { 10 | WritableNativeMap shapeMap = new WritableNativeMap(); 11 | shapeMap.putInt("numDimensions", output.shape().numDimensions()); 12 | 13 | WritableNativeMap map = new WritableNativeMap(); 14 | map.putInt("index", output.index()); 15 | map.putString("dataType", output.dataType().name()); 16 | map.putMap("shape", shapeMap); 17 | 18 | return map; 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /android/src/main/java/com/rntensorflow/imagerecognition/ImageRecognizer.java: -------------------------------------------------------------------------------- 1 | package com.rntensorflow.imagerecognition; 2 | 3 | import android.graphics.Bitmap; 4 | import android.graphics.BitmapFactory; 5 | import android.graphics.Canvas; 6 | import android.graphics.Matrix; 7 | import com.facebook.react.bridge.*; 8 | import com.rntensorflow.RNTensorflowInference; 9 | import com.rntensorflow.ResourceManager; 10 | import org.tensorflow.Tensor; 11 | 12 | import java.io.IOException; 13 | import java.nio.FloatBuffer; 14 | import java.util.*; 15 | 16 | public class ImageRecognizer { 17 | 18 | 19 | private static final int IMAGE_MEAN = 117; 20 | private static final float IMAGE_STD = 1; 21 | 22 | private static final int MAX_RESULTS = 3; 23 | private static final float THRESHOLD = 0.1f; 24 | 25 | private RNTensorflowInference inference; 26 | private ResourceManager resourceManager; 27 | 28 | private int imageMean; 29 | private float imageStd; 30 | 31 | private String[] labels; 32 | 33 | public ImageRecognizer(RNTensorflowInference inference, ResourceManager resourceManager, 34 | int imageMean, float imageStd, String[] labels) { 35 | this.inference = inference; 36 | this.resourceManager = resourceManager; 37 | this.imageMean = imageMean; 38 | this.imageStd = imageStd; 39 | this.labels = labels; 40 | } 41 | 42 | public static ImageRecognizer init( 43 | ReactContext reactContext, 44 | String modelFilename, 45 | String labelFilename, 46 | Integer imageMean, 47 | Double imageStd) throws IOException { 48 | Integer imageMeanResolved = imageMean != null ? imageMean : IMAGE_MEAN; 49 | Float imageStdResolved = imageStd != null ? imageStd.floatValue() : IMAGE_STD; 50 | 51 | RNTensorflowInference inference = RNTensorflowInference.init(reactContext, modelFilename); 52 | ResourceManager resourceManager = new ResourceManager(reactContext); 53 | String[] labels = resourceManager.loadResourceAsString(labelFilename).split("\\r?\\n"); 54 | return new ImageRecognizer(inference, resourceManager, imageMeanResolved, imageStdResolved, labels); 55 | } 56 | 57 | public WritableArray recognizeImage(final String image, 58 | final String inputName, 59 | final Integer inputSize, 60 | final String outputName, 61 | final Integer maxResults, 62 | final Double threshold) { 63 | 64 | String inputNameResolved = inputName != null ? inputName : "input"; 65 | String outputNameResolved = outputName != null ? outputName : "output"; 66 | Integer maxResultsResolved = maxResults != null ? maxResults : MAX_RESULTS; 67 | Float thresholdResolved = threshold != null ? threshold.floatValue() : THRESHOLD; 68 | 69 | Bitmap bitmapRaw = loadImage(resourceManager.loadResource(image)); 70 | 71 | int inputSizeResolved = inputSize != null ? inputSize : 224; 72 | int[] intValues = new int[inputSizeResolved * inputSizeResolved]; 73 | float[] floatValues = new float[inputSizeResolved * inputSizeResolved * 3]; 74 | 75 | Bitmap bitmap = Bitmap.createBitmap(inputSizeResolved, inputSizeResolved, Bitmap.Config.ARGB_8888); 76 | Matrix matrix = createMatrix(bitmapRaw.getWidth(), bitmapRaw.getHeight(), inputSizeResolved, inputSizeResolved); 77 | final Canvas canvas = new Canvas(bitmap); 78 | canvas.drawBitmap(bitmapRaw, matrix, null); 79 | bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight()); 80 | for (int i = 0; i < intValues.length; ++i) { 81 | final int val = intValues[i]; 82 | floatValues[i * 3 + 0] = (((val >> 16) & 0xFF) - imageMean) / imageStd; 83 | floatValues[i * 3 + 1] = (((val >> 8) & 0xFF) - imageMean) / imageStd; 84 | floatValues[i * 3 + 2] = ((val & 0xFF) - imageMean) / imageStd; 85 | } 86 | Tensor tensor = Tensor.create(new long[]{1, inputSizeResolved, inputSizeResolved, 3}, FloatBuffer.wrap(floatValues)); 87 | inference.feed(inputNameResolved, tensor); 88 | inference.run(new String[] {outputNameResolved}, false); 89 | ReadableArray outputs = inference.fetch(outputNameResolved); 90 | 91 | List results = new ArrayList<>(); 92 | for (int i = 0; i < outputs.size(); ++i) { 93 | if (outputs.getDouble(i) > thresholdResolved) { 94 | WritableMap entry = new WritableNativeMap(); 95 | entry.putString("id", String.valueOf(i)); 96 | entry.putString("name", labels.length > i ? labels[i] : "unknown"); 97 | entry.putDouble("confidence", outputs.getDouble(i)); 98 | results.add(entry); 99 | } 100 | } 101 | 102 | Collections.sort(results, new Comparator() { 103 | @Override 104 | public int compare(ReadableMap first, ReadableMap second) { 105 | return Double.compare(second.getDouble("confidence"), first.getDouble("confidence")); 106 | } 107 | }); 108 | int finalSize = Math.min(results.size(), maxResultsResolved); 109 | WritableArray array = new WritableNativeArray(); 110 | for (int i = 0; i < finalSize; i++) { 111 | array.pushMap(results.get(i)); 112 | } 113 | 114 | inference.getTfContext().reset(); 115 | return array; 116 | } 117 | 118 | private Bitmap loadImage(byte[] image) { 119 | BitmapFactory.Options options = new BitmapFactory.Options(); 120 | options.inPreferredConfig = Bitmap.Config.ARGB_8888; 121 | return BitmapFactory.decodeByteArray(image, 0, image.length); 122 | } 123 | 124 | private Matrix createMatrix(int srcWidth, int srcHeight, int dstWidth, int dstHeight) { 125 | Matrix matrix = new Matrix(); 126 | 127 | if (srcWidth != dstWidth || srcHeight != dstHeight) { 128 | float scaleFactorX = dstWidth / (float) srcWidth; 129 | float scaleFactorY = dstHeight / (float) srcHeight; 130 | float scaleFactor = Math.max(scaleFactorX, scaleFactorY); 131 | matrix.postScale(scaleFactor, scaleFactor); 132 | } 133 | 134 | matrix.invert(new Matrix()); 135 | return matrix; 136 | } 137 | 138 | } 139 | -------------------------------------------------------------------------------- /android/src/main/java/com/rntensorflow/imagerecognition/RNImageRecognizerModule.java: -------------------------------------------------------------------------------- 1 | package com.rntensorflow.imagerecognition; 2 | 3 | import com.facebook.react.bridge.*; 4 | 5 | import java.util.HashMap; 6 | import java.util.Map; 7 | 8 | public class RNImageRecognizerModule extends ReactContextBaseJavaModule { 9 | 10 | private Map imageRecognizers = new HashMap<>(); 11 | private ReactApplicationContext reactContext; 12 | 13 | public RNImageRecognizerModule(ReactApplicationContext reactContext) { 14 | super(reactContext); 15 | this.reactContext = reactContext; 16 | } 17 | 18 | @Override 19 | public String getName() { 20 | return "RNImageRecognition"; 21 | } 22 | 23 | @Override 24 | public void onCatalystInstanceDestroy() { 25 | for (String id : imageRecognizers.keySet()) { 26 | this.imageRecognizers.remove(id); 27 | } 28 | } 29 | 30 | @ReactMethod 31 | public void initImageRecognizer(String id, ReadableMap data, Promise promise) { 32 | try { 33 | String model = data.getString("model"); 34 | String labels = data.getString("labels"); 35 | Integer imageMean = data.hasKey("imageMean") ? data.getInt("imageMean") : null; 36 | Double imageStd = data.hasKey("imageStd") ? data.getDouble("imageStd") : null; 37 | 38 | ImageRecognizer imageRecognizer = ImageRecognizer.init(reactContext, model, labels, imageMean, imageStd); 39 | imageRecognizers.put(id, imageRecognizer); 40 | promise.resolve(true); 41 | } catch (Exception e) { 42 | promise.reject(e); 43 | } 44 | } 45 | 46 | @ReactMethod 47 | public void recognize(String id, ReadableMap data, Promise promise) { 48 | try { 49 | String image = data.getString("image"); 50 | String inputName = data.hasKey("inputName") ? data.getString("inputName") : null; 51 | Integer inputSize = data.hasKey("inputSize") ? data.getInt("inputSize") : null; 52 | String outputName = data.hasKey("outputName") ? data.getString("outputName") : null; 53 | Integer maxResults = data.hasKey("maxResults") ? data.getInt("maxResults") : null; 54 | Double threshold = data.hasKey("threshold") ? data.getDouble("threshold") : null; 55 | 56 | ImageRecognizer imageRecognizer = imageRecognizers.get(id); 57 | WritableArray result = imageRecognizer.recognizeImage(image, inputName, inputSize, outputName, maxResults, threshold); 58 | promise.resolve(result); 59 | } catch (Exception e) { 60 | promise.reject(e); 61 | } 62 | } 63 | 64 | @ReactMethod 65 | public void close(String id, Promise promise) { 66 | try { 67 | this.imageRecognizers.remove(id); 68 | promise.resolve(true); 69 | } catch (Exception e) { 70 | promise.reject(e); 71 | } 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": ["react-native"] 3 | } 4 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/.buckconfig: -------------------------------------------------------------------------------- 1 | 2 | [android] 3 | target = Google Inc.:Google APIs:23 4 | 5 | [maven_repositories] 6 | central = https://repo1.maven.org/maven2 7 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/.flowconfig: -------------------------------------------------------------------------------- 1 | [ignore] 2 | ; We fork some components by platform 3 | .*/*[.]android.js 4 | 5 | ; Ignore "BUCK" generated dirs 6 | /\.buckd/ 7 | 8 | ; Ignore unexpected extra "@providesModule" 9 | .*/node_modules/.*/node_modules/fbjs/.* 10 | 11 | ; Ignore duplicate module providers 12 | ; For RN Apps installed via npm, "Libraries" folder is inside 13 | ; "node_modules/react-native" but in the source repo it is in the root 14 | .*/Libraries/react-native/React.js 15 | 16 | ; Ignore polyfills 17 | .*/Libraries/polyfills/.* 18 | 19 | [include] 20 | 21 | [libs] 22 | node_modules/react-native/Libraries/react-native/react-native-interface.js 23 | node_modules/react-native/flow/ 24 | 25 | [options] 26 | emoji=true 27 | 28 | module.system=haste 29 | 30 | munge_underscores=true 31 | 32 | module.name_mapper='^[./a-zA-Z0-9$_-]+\.\(bmp\|gif\|jpg\|jpeg\|png\|psd\|svg\|webp\|m4v\|mov\|mp4\|mpeg\|mpg\|webm\|aac\|aiff\|caf\|m4a\|mp3\|wav\|html\|pdf\)$' -> 'RelativeImageStub' 33 | 34 | suppress_type=$FlowIssue 35 | suppress_type=$FlowFixMe 36 | suppress_type=$FlowFixMeProps 37 | suppress_type=$FlowFixMeState 38 | suppress_type=$FixMe 39 | 40 | suppress_comment=\\(.\\|\n\\)*\\$FlowFixMe\\($\\|[^(]\\|(\\(>=0\\.\\(5[0-7]\\|[1-4][0-9]\\|[0-9]\\).[0-9]\\)? *\\(site=[a-z,_]*react_native[a-z,_]*\\)?)\\) 41 | suppress_comment=\\(.\\|\n\\)*\\$FlowIssue\\((\\(>=0\\.\\(5[0-7]\\|[1-4][0-9]\\|[0-9]\\).[0-9]\\)? *\\(site=[a-z,_]*react_native[a-z,_]*\\)?)\\)?:? #[0-9]+ 42 | suppress_comment=\\(.\\|\n\\)*\\$FlowFixedInNextDeploy 43 | suppress_comment=\\(.\\|\n\\)*\\$FlowExpectedError 44 | 45 | unsafe.enable_getters_and_setters=true 46 | 47 | [version] 48 | ^0.57.0 49 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/.gitattributes: -------------------------------------------------------------------------------- 1 | *.pbxproj -text 2 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/.gitignore: -------------------------------------------------------------------------------- 1 | # OSX 2 | # 3 | .DS_Store 4 | 5 | # Xcode 6 | # 7 | build/ 8 | *.pbxuser 9 | !default.pbxuser 10 | *.mode1v3 11 | !default.mode1v3 12 | *.mode2v3 13 | !default.mode2v3 14 | *.perspectivev3 15 | !default.perspectivev3 16 | xcuserdata 17 | *.xccheckout 18 | *.moved-aside 19 | DerivedData 20 | *.hmap 21 | *.ipa 22 | *.xcuserstate 23 | project.xcworkspace 24 | 25 | # Android/IntelliJ 26 | # 27 | build/ 28 | .idea 29 | .gradle 30 | local.properties 31 | *.iml 32 | 33 | # node.js 34 | # 35 | node_modules/ 36 | npm-debug.log 37 | yarn-error.log 38 | 39 | # BUCK 40 | buck-out/ 41 | \.buckd/ 42 | *.keystore 43 | 44 | # fastlane 45 | # 46 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the 47 | # screenshots whenever they are needed. 48 | # For more information about the recommended setup visit: 49 | # https://docs.fastlane.tools/best-practices/source-control/ 50 | 51 | */fastlane/report.xml 52 | */fastlane/Preview.html 53 | */fastlane/screenshots 54 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/.watchmanconfig: -------------------------------------------------------------------------------- 1 | {} -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/App.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Sample React Native App 3 | * https://github.com/facebook/react-native 4 | * @flow 5 | */ 6 | 7 | import React, { Component } from 'react'; 8 | import { 9 | Platform, 10 | StyleSheet, 11 | Text, 12 | View, 13 | Image 14 | } from 'react-native'; 15 | import { TfImageRecognition } from 'react-native-tensorflow'; 16 | 17 | export default class App extends Component<{}> { 18 | 19 | constructor() { 20 | super() 21 | this.image = require('./assets/dumbbell.jpg'); 22 | this.state = {result: ""} 23 | } 24 | 25 | componentDidMount() { 26 | this.recognizeImage() 27 | } 28 | 29 | async recognizeImage() { 30 | 31 | try { 32 | const tfImageRecognition = new TfImageRecognition({ 33 | model:require('./assets/tensorflow_inception_graph.pb'), 34 | labels: require('./assets/tensorflow_labels.txt') 35 | }) 36 | 37 | const results = await tfImageRecognition.recognize({ 38 | image: this.image 39 | }) 40 | 41 | const resultText = `Name: ${results[0].name} - Confidence: ${results[0].confidence}` 42 | this.setState({result: resultText}) 43 | 44 | await tfImageRecognition.close() 45 | } catch(err) { 46 | alert(err) 47 | } 48 | } 49 | 50 | render() { 51 | return ( 52 | 53 | 54 | Welcome to React Native! 55 | 56 | 57 | 58 | {this.state.result} 59 | 60 | 61 | ); 62 | } 63 | } 64 | 65 | const styles = StyleSheet.create({ 66 | container: { 67 | flex: 1, 68 | justifyContent: 'center', 69 | alignItems: 'center', 70 | backgroundColor: '#F5FCFF', 71 | }, 72 | results: { 73 | textAlign: 'center', 74 | color: '#333333', 75 | marginBottom: 5, 76 | }, 77 | image: { 78 | width: 150, 79 | height: 100 80 | }, 81 | }); 82 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/__tests__/App.js: -------------------------------------------------------------------------------- 1 | import 'react-native'; 2 | import React from 'react'; 3 | import App from '../App'; 4 | 5 | // Note: test renderer must be required after react-native. 6 | import renderer from 'react-test-renderer'; 7 | 8 | it('renders correctly', () => { 9 | const tree = renderer.create( 10 | 11 | ); 12 | }); 13 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/app/BUCK: -------------------------------------------------------------------------------- 1 | # To learn about Buck see [Docs](https://buckbuild.com/). 2 | # To run your application with Buck: 3 | # - install Buck 4 | # - `npm start` - to start the packager 5 | # - `cd android` 6 | # - `keytool -genkey -v -keystore keystores/debug.keystore -storepass android -alias androiddebugkey -keypass android -dname "CN=Android Debug,O=Android,C=US"` 7 | # - `./gradlew :app:copyDownloadableDepsToLibs` - make all Gradle compile dependencies available to Buck 8 | # - `buck install -r android/app` - compile, install and run application 9 | # 10 | 11 | lib_deps = [] 12 | 13 | for jarfile in glob(['libs/*.jar']): 14 | name = 'jars__' + jarfile[jarfile.rindex('/') + 1: jarfile.rindex('.jar')] 15 | lib_deps.append(':' + name) 16 | prebuilt_jar( 17 | name = name, 18 | binary_jar = jarfile, 19 | ) 20 | 21 | for aarfile in glob(['libs/*.aar']): 22 | name = 'aars__' + aarfile[aarfile.rindex('/') + 1: aarfile.rindex('.aar')] 23 | lib_deps.append(':' + name) 24 | android_prebuilt_aar( 25 | name = name, 26 | aar = aarfile, 27 | ) 28 | 29 | android_library( 30 | name = "all-libs", 31 | exported_deps = lib_deps, 32 | ) 33 | 34 | android_library( 35 | name = "app-code", 36 | srcs = glob([ 37 | "src/main/java/**/*.java", 38 | ]), 39 | deps = [ 40 | ":all-libs", 41 | ":build_config", 42 | ":res", 43 | ], 44 | ) 45 | 46 | android_build_config( 47 | name = "build_config", 48 | package = "com.imagerecognitionexample", 49 | ) 50 | 51 | android_resource( 52 | name = "res", 53 | package = "com.imagerecognitionexample", 54 | res = "src/main/res", 55 | ) 56 | 57 | android_binary( 58 | name = "app", 59 | keystore = "//android/keystores:debug", 60 | manifest = "src/main/AndroidManifest.xml", 61 | package_type = "debug", 62 | deps = [ 63 | ":app-code", 64 | ], 65 | ) 66 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/app/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: "com.android.application" 2 | 3 | import com.android.build.OutputFile 4 | 5 | /** 6 | * The react.gradle file registers a task for each build variant (e.g. bundleDebugJsAndAssets 7 | * and bundleReleaseJsAndAssets). 8 | * These basically call `react-native bundle` with the correct arguments during the Android build 9 | * cycle. By default, bundleDebugJsAndAssets is skipped, as in debug/dev mode we prefer to load the 10 | * bundle directly from the development server. Below you can see all the possible configurations 11 | * and their defaults. If you decide to add a configuration block, make sure to add it before the 12 | * `apply from: "../../node_modules/react-native/react.gradle"` line. 13 | * 14 | * project.ext.react = [ 15 | * // the name of the generated asset file containing your JS bundle 16 | * bundleAssetName: "index.android.bundle", 17 | * 18 | * // the entry file for bundle generation 19 | * entryFile: "index.android.js", 20 | * 21 | * // whether to bundle JS and assets in debug mode 22 | * bundleInDebug: false, 23 | * 24 | * // whether to bundle JS and assets in release mode 25 | * bundleInRelease: true, 26 | * 27 | * // whether to bundle JS and assets in another build variant (if configured). 28 | * // See http://tools.android.com/tech-docs/new-build-system/user-guide#TOC-Build-Variants 29 | * // The configuration property can be in the following formats 30 | * // 'bundleIn${productFlavor}${buildType}' 31 | * // 'bundleIn${buildType}' 32 | * // bundleInFreeDebug: true, 33 | * // bundleInPaidRelease: true, 34 | * // bundleInBeta: true, 35 | * 36 | * // whether to disable dev mode in custom build variants (by default only disabled in release) 37 | * // for example: to disable dev mode in the staging build type (if configured) 38 | * devDisabledInStaging: true, 39 | * // The configuration property can be in the following formats 40 | * // 'devDisabledIn${productFlavor}${buildType}' 41 | * // 'devDisabledIn${buildType}' 42 | * 43 | * // the root of your project, i.e. where "package.json" lives 44 | * root: "../../", 45 | * 46 | * // where to put the JS bundle asset in debug mode 47 | * jsBundleDirDebug: "$buildDir/intermediates/assets/debug", 48 | * 49 | * // where to put the JS bundle asset in release mode 50 | * jsBundleDirRelease: "$buildDir/intermediates/assets/release", 51 | * 52 | * // where to put drawable resources / React Native assets, e.g. the ones you use via 53 | * // require('./image.png')), in debug mode 54 | * resourcesDirDebug: "$buildDir/intermediates/res/merged/debug", 55 | * 56 | * // where to put drawable resources / React Native assets, e.g. the ones you use via 57 | * // require('./image.png')), in release mode 58 | * resourcesDirRelease: "$buildDir/intermediates/res/merged/release", 59 | * 60 | * // by default the gradle tasks are skipped if none of the JS files or assets change; this means 61 | * // that we don't look at files in android/ or ios/ to determine whether the tasks are up to 62 | * // date; if you have any other folders that you want to ignore for performance reasons (gradle 63 | * // indexes the entire tree), add them here. Alternatively, if you have JS files in android/ 64 | * // for example, you might want to remove it from here. 65 | * inputExcludes: ["android/**", "ios/**"], 66 | * 67 | * // override which node gets called and with what additional arguments 68 | * nodeExecutableAndArgs: ["node"], 69 | * 70 | * // supply additional arguments to the packager 71 | * extraPackagerArgs: [] 72 | * ] 73 | */ 74 | 75 | project.ext.react = [ 76 | entryFile: "index.js" 77 | ] 78 | 79 | apply from: "../../node_modules/react-native/react.gradle" 80 | 81 | /** 82 | * Set this to true to create two separate APKs instead of one: 83 | * - An APK that only works on ARM devices 84 | * - An APK that only works on x86 devices 85 | * The advantage is the size of the APK is reduced by about 4MB. 86 | * Upload all the APKs to the Play Store and people will download 87 | * the correct one based on the CPU architecture of their device. 88 | */ 89 | def enableSeparateBuildPerCPUArchitecture = false 90 | 91 | /** 92 | * Run Proguard to shrink the Java bytecode in release builds. 93 | */ 94 | def enableProguardInReleaseBuilds = false 95 | 96 | android { 97 | compileSdkVersion 23 98 | buildToolsVersion "25.0.0" 99 | 100 | defaultConfig { 101 | applicationId "com.imagerecognitionexample" 102 | minSdkVersion 16 103 | targetSdkVersion 22 104 | versionCode 1 105 | versionName "1.0" 106 | ndk { 107 | abiFilters "armeabi-v7a", "x86" 108 | } 109 | } 110 | splits { 111 | abi { 112 | reset() 113 | enable enableSeparateBuildPerCPUArchitecture 114 | universalApk false // If true, also generate a universal APK 115 | include "armeabi-v7a", "x86" 116 | } 117 | } 118 | buildTypes { 119 | release { 120 | minifyEnabled enableProguardInReleaseBuilds 121 | proguardFiles getDefaultProguardFile("proguard-android.txt"), "proguard-rules.pro" 122 | } 123 | } 124 | // applicationVariants are e.g. debug, release 125 | applicationVariants.all { variant -> 126 | variant.outputs.each { output -> 127 | // For each separate APK per architecture, set a unique version code as described here: 128 | // http://tools.android.com/tech-docs/new-build-system/user-guide/apk-splits 129 | def versionCodes = ["armeabi-v7a":1, "x86":2] 130 | def abi = output.getFilter(OutputFile.ABI) 131 | if (abi != null) { // null for the universal-debug, universal-release variants 132 | output.versionCodeOverride = 133 | versionCodes.get(abi) * 1048576 + defaultConfig.versionCode 134 | } 135 | } 136 | } 137 | } 138 | 139 | dependencies { 140 | compile project(':react-native-tensorflow') 141 | compile fileTree(dir: "libs", include: ["*.jar"]) 142 | compile "com.android.support:appcompat-v7:23.0.1" 143 | compile "com.facebook.react:react-native:+" // From node_modules 144 | } 145 | 146 | // Run this once to be able to run the application with BUCK 147 | // puts all compile dependencies into folder libs for BUCK to use 148 | task copyDownloadableDepsToLibs(type: Copy) { 149 | from configurations.compile 150 | into 'libs' 151 | } 152 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/app/proguard-rules.pro: -------------------------------------------------------------------------------- 1 | # Add project specific ProGuard rules here. 2 | # By default, the flags in this file are appended to flags specified 3 | # in /usr/local/Cellar/android-sdk/24.3.3/tools/proguard/proguard-android.txt 4 | # You can edit the include path and order by changing the proguardFiles 5 | # directive in build.gradle. 6 | # 7 | # For more details, see 8 | # http://developer.android.com/guide/developing/tools/proguard.html 9 | 10 | # Add any project specific keep options here: 11 | 12 | # If your project uses WebView with JS, uncomment the following 13 | # and specify the fully qualified class name to the JavaScript interface 14 | # class: 15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview { 16 | # public *; 17 | #} 18 | 19 | # Disabling obfuscation is useful if you collect stack traces from production crashes 20 | # (unless you are using a system that supports de-obfuscate the stack traces). 21 | -dontobfuscate 22 | 23 | # React Native 24 | 25 | # Keep our interfaces so they can be used by other ProGuard rules. 26 | # See http://sourceforge.net/p/proguard/bugs/466/ 27 | -keep,allowobfuscation @interface com.facebook.proguard.annotations.DoNotStrip 28 | -keep,allowobfuscation @interface com.facebook.proguard.annotations.KeepGettersAndSetters 29 | -keep,allowobfuscation @interface com.facebook.common.internal.DoNotStrip 30 | 31 | # Do not strip any method/class that is annotated with @DoNotStrip 32 | -keep @com.facebook.proguard.annotations.DoNotStrip class * 33 | -keep @com.facebook.common.internal.DoNotStrip class * 34 | -keepclassmembers class * { 35 | @com.facebook.proguard.annotations.DoNotStrip *; 36 | @com.facebook.common.internal.DoNotStrip *; 37 | } 38 | 39 | -keepclassmembers @com.facebook.proguard.annotations.KeepGettersAndSetters class * { 40 | void set*(***); 41 | *** get*(); 42 | } 43 | 44 | -keep class * extends com.facebook.react.bridge.JavaScriptModule { *; } 45 | -keep class * extends com.facebook.react.bridge.NativeModule { *; } 46 | -keepclassmembers,includedescriptorclasses class * { native ; } 47 | -keepclassmembers class * { @com.facebook.react.uimanager.UIProp ; } 48 | -keepclassmembers class * { @com.facebook.react.uimanager.annotations.ReactProp ; } 49 | -keepclassmembers class * { @com.facebook.react.uimanager.annotations.ReactPropGroup ; } 50 | 51 | -dontwarn com.facebook.react.** 52 | 53 | # TextLayoutBuilder uses a non-public Android constructor within StaticLayout. 54 | # See libs/proxy/src/main/java/com/facebook/fbui/textlayoutbuilder/proxy for details. 55 | -dontwarn android.text.StaticLayout 56 | 57 | # okhttp 58 | 59 | -keepattributes Signature 60 | -keepattributes *Annotation* 61 | -keep class okhttp3.** { *; } 62 | -keep interface okhttp3.** { *; } 63 | -dontwarn okhttp3.** 64 | 65 | # okio 66 | 67 | -keep class sun.misc.Unsafe { *; } 68 | -dontwarn java.nio.file.* 69 | -dontwarn org.codehaus.mojo.animal_sniffer.IgnoreJRERequirement 70 | -dontwarn okio.** 71 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/app/src/main/AndroidManifest.xml: -------------------------------------------------------------------------------- 1 | 5 | 6 | 7 | 8 | 9 | 12 | 13 | 19 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/app/src/main/java/com/imagerecognitionexample/MainActivity.java: -------------------------------------------------------------------------------- 1 | package com.imagerecognitionexample; 2 | 3 | import com.facebook.react.ReactActivity; 4 | 5 | public class MainActivity extends ReactActivity { 6 | 7 | /** 8 | * Returns the name of the main component registered from JavaScript. 9 | * This is used to schedule rendering of the component. 10 | */ 11 | @Override 12 | protected String getMainComponentName() { 13 | return "ImageRecognitionExample"; 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/app/src/main/java/com/imagerecognitionexample/MainApplication.java: -------------------------------------------------------------------------------- 1 | package com.imagerecognitionexample; 2 | 3 | import android.app.Application; 4 | 5 | import com.facebook.react.ReactApplication; 6 | import com.rntensorflow.RNTensorFlowPackage; 7 | import com.facebook.react.ReactNativeHost; 8 | import com.facebook.react.ReactPackage; 9 | import com.facebook.react.shell.MainReactPackage; 10 | import com.facebook.soloader.SoLoader; 11 | 12 | import java.util.Arrays; 13 | import java.util.List; 14 | 15 | public class MainApplication extends Application implements ReactApplication { 16 | 17 | private final ReactNativeHost mReactNativeHost = new ReactNativeHost(this) { 18 | @Override 19 | public boolean getUseDeveloperSupport() { 20 | return BuildConfig.DEBUG; 21 | } 22 | 23 | @Override 24 | protected List getPackages() { 25 | return Arrays.asList( 26 | new MainReactPackage(), 27 | new RNTensorFlowPackage() 28 | ); 29 | } 30 | 31 | @Override 32 | protected String getJSMainModuleName() { 33 | return "index"; 34 | } 35 | }; 36 | 37 | @Override 38 | public ReactNativeHost getReactNativeHost() { 39 | return mReactNativeHost; 40 | } 41 | 42 | @Override 43 | public void onCreate() { 44 | super.onCreate(); 45 | SoLoader.init(this, /* native exopackage */ false); 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/app/src/main/res/mipmap-hdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/reneweb/react-native-tensorflow/fe47622d86ec11a3412bed54bb8ccbede324e8b1/examples/ImageRecognitionExample/android/app/src/main/res/mipmap-hdpi/ic_launcher.png -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/app/src/main/res/mipmap-mdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/reneweb/react-native-tensorflow/fe47622d86ec11a3412bed54bb8ccbede324e8b1/examples/ImageRecognitionExample/android/app/src/main/res/mipmap-mdpi/ic_launcher.png -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/reneweb/react-native-tensorflow/fe47622d86ec11a3412bed54bb8ccbede324e8b1/examples/ImageRecognitionExample/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/reneweb/react-native-tensorflow/fe47622d86ec11a3412bed54bb8ccbede324e8b1/examples/ImageRecognitionExample/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/app/src/main/res/values/strings.xml: -------------------------------------------------------------------------------- 1 | 2 | ImageRecognitionExample 3 | 4 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/app/src/main/res/values/styles.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/build.gradle: -------------------------------------------------------------------------------- 1 | // Top-level build file where you can add configuration options common to all sub-projects/modules. 2 | 3 | buildscript { 4 | repositories { 5 | jcenter() 6 | } 7 | dependencies { 8 | classpath 'com.android.tools.build:gradle:2.3.0' 9 | 10 | // NOTE: Do not place your application dependencies here; they belong 11 | // in the individual module build.gradle files 12 | } 13 | } 14 | 15 | allprojects { 16 | repositories { 17 | mavenLocal() 18 | jcenter() 19 | maven { 20 | // All of React Native (JS, Obj-C sources, Android binaries) is installed from npm 21 | url "$rootDir/../node_modules/react-native/android" 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/gradle.properties: -------------------------------------------------------------------------------- 1 | # Project-wide Gradle settings. 2 | 3 | # IDE (e.g. Android Studio) users: 4 | # Gradle settings configured through the IDE *will override* 5 | # any settings specified in this file. 6 | 7 | # For more details on how to configure your build environment visit 8 | # http://www.gradle.org/docs/current/userguide/build_environment.html 9 | 10 | # Specifies the JVM arguments used for the daemon process. 11 | # The setting is particularly useful for tweaking memory settings. 12 | # Default value: -Xmx10248m -XX:MaxPermSize=256m 13 | # org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8 14 | 15 | # When configured, Gradle will run in incubating parallel mode. 16 | # This option should only be used with decoupled projects. More details, visit 17 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects 18 | # org.gradle.parallel=true 19 | 20 | android.useDeprecatedNdk=true 21 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/reneweb/react-native-tensorflow/fe47622d86ec11a3412bed54bb8ccbede324e8b1/examples/ImageRecognitionExample/android/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | zipStoreBase=GRADLE_USER_HOME 4 | zipStorePath=wrapper/dists 5 | distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-all.zip 6 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ############################################################################## 4 | ## 5 | ## Gradle start up script for UN*X 6 | ## 7 | ############################################################################## 8 | 9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 10 | DEFAULT_JVM_OPTS="" 11 | 12 | APP_NAME="Gradle" 13 | APP_BASE_NAME=`basename "$0"` 14 | 15 | # Use the maximum available, or set MAX_FD != -1 to use that value. 16 | MAX_FD="maximum" 17 | 18 | warn ( ) { 19 | echo "$*" 20 | } 21 | 22 | die ( ) { 23 | echo 24 | echo "$*" 25 | echo 26 | exit 1 27 | } 28 | 29 | # OS specific support (must be 'true' or 'false'). 30 | cygwin=false 31 | msys=false 32 | darwin=false 33 | case "`uname`" in 34 | CYGWIN* ) 35 | cygwin=true 36 | ;; 37 | Darwin* ) 38 | darwin=true 39 | ;; 40 | MINGW* ) 41 | msys=true 42 | ;; 43 | esac 44 | 45 | # For Cygwin, ensure paths are in UNIX format before anything is touched. 46 | if $cygwin ; then 47 | [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"` 48 | fi 49 | 50 | # Attempt to set APP_HOME 51 | # Resolve links: $0 may be a link 52 | PRG="$0" 53 | # Need this for relative symlinks. 54 | while [ -h "$PRG" ] ; do 55 | ls=`ls -ld "$PRG"` 56 | link=`expr "$ls" : '.*-> \(.*\)$'` 57 | if expr "$link" : '/.*' > /dev/null; then 58 | PRG="$link" 59 | else 60 | PRG=`dirname "$PRG"`"/$link" 61 | fi 62 | done 63 | SAVED="`pwd`" 64 | cd "`dirname \"$PRG\"`/" >&- 65 | APP_HOME="`pwd -P`" 66 | cd "$SAVED" >&- 67 | 68 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 69 | 70 | # Determine the Java command to use to start the JVM. 71 | if [ -n "$JAVA_HOME" ] ; then 72 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 73 | # IBM's JDK on AIX uses strange locations for the executables 74 | JAVACMD="$JAVA_HOME/jre/sh/java" 75 | else 76 | JAVACMD="$JAVA_HOME/bin/java" 77 | fi 78 | if [ ! -x "$JAVACMD" ] ; then 79 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 80 | 81 | Please set the JAVA_HOME variable in your environment to match the 82 | location of your Java installation." 83 | fi 84 | else 85 | JAVACMD="java" 86 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 87 | 88 | Please set the JAVA_HOME variable in your environment to match the 89 | location of your Java installation." 90 | fi 91 | 92 | # Increase the maximum file descriptors if we can. 93 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then 94 | MAX_FD_LIMIT=`ulimit -H -n` 95 | if [ $? -eq 0 ] ; then 96 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 97 | MAX_FD="$MAX_FD_LIMIT" 98 | fi 99 | ulimit -n $MAX_FD 100 | if [ $? -ne 0 ] ; then 101 | warn "Could not set maximum file descriptor limit: $MAX_FD" 102 | fi 103 | else 104 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 105 | fi 106 | fi 107 | 108 | # For Darwin, add options to specify how the application appears in the dock 109 | if $darwin; then 110 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 111 | fi 112 | 113 | # For Cygwin, switch paths to Windows format before running java 114 | if $cygwin ; then 115 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 116 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 117 | 118 | # We build the pattern for arguments to be converted via cygpath 119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 120 | SEP="" 121 | for dir in $ROOTDIRSRAW ; do 122 | ROOTDIRS="$ROOTDIRS$SEP$dir" 123 | SEP="|" 124 | done 125 | OURCYGPATTERN="(^($ROOTDIRS))" 126 | # Add a user-defined pattern to the cygpath arguments 127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 129 | fi 130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 131 | i=0 132 | for arg in "$@" ; do 133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 135 | 136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 138 | else 139 | eval `echo args$i`="\"$arg\"" 140 | fi 141 | i=$((i+1)) 142 | done 143 | case $i in 144 | (0) set -- ;; 145 | (1) set -- "$args0" ;; 146 | (2) set -- "$args0" "$args1" ;; 147 | (3) set -- "$args0" "$args1" "$args2" ;; 148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 154 | esac 155 | fi 156 | 157 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules 158 | function splitJvmOpts() { 159 | JVM_OPTS=("$@") 160 | } 161 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS 162 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" 163 | 164 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" 165 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/keystores/BUCK: -------------------------------------------------------------------------------- 1 | keystore( 2 | name = "debug", 3 | properties = "debug.keystore.properties", 4 | store = "debug.keystore", 5 | visibility = [ 6 | "PUBLIC", 7 | ], 8 | ) 9 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/keystores/debug.keystore.properties: -------------------------------------------------------------------------------- 1 | key.store=debug.keystore 2 | key.alias=androiddebugkey 3 | key.store.password=android 4 | key.alias.password=android 5 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/android/settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'ImageRecognitionExample' 2 | include ':react-native-tensorflow' 3 | project(':react-native-tensorflow').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-tensorflow/android') 4 | 5 | include ':app' 6 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/app.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ImageRecognitionExample", 3 | "displayName": "ImageRecognitionExample" 4 | } -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/assets/dumbbell.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/reneweb/react-native-tensorflow/fe47622d86ec11a3412bed54bb8ccbede324e8b1/examples/ImageRecognitionExample/assets/dumbbell.jpg -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/assets/tensorflow_inception_graph.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/reneweb/react-native-tensorflow/fe47622d86ec11a3412bed54bb8ccbede324e8b1/examples/ImageRecognitionExample/assets/tensorflow_inception_graph.pb -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/assets/tensorflow_labels.txt: -------------------------------------------------------------------------------- 1 | dummy 2 | kit fox 3 | English setter 4 | Siberian husky 5 | Australian terrier 6 | English springer 7 | grey whale 8 | lesser panda 9 | Egyptian cat 10 | ibex 11 | Persian cat 12 | cougar 13 | gazelle 14 | porcupine 15 | sea lion 16 | malamute 17 | badger 18 | Great Dane 19 | Walker hound 20 | Welsh springer spaniel 21 | whippet 22 | Scottish deerhound 23 | killer whale 24 | mink 25 | African elephant 26 | Weimaraner 27 | soft-coated wheaten terrier 28 | Dandie Dinmont 29 | red wolf 30 | Old English sheepdog 31 | jaguar 32 | otterhound 33 | bloodhound 34 | Airedale 35 | hyena 36 | meerkat 37 | giant schnauzer 38 | titi 39 | three-toed sloth 40 | sorrel 41 | black-footed ferret 42 | dalmatian 43 | black-and-tan coonhound 44 | papillon 45 | skunk 46 | Staffordshire bullterrier 47 | Mexican hairless 48 | Bouvier des Flandres 49 | weasel 50 | miniature poodle 51 | Cardigan 52 | malinois 53 | bighorn 54 | fox squirrel 55 | colobus 56 | tiger cat 57 | Lhasa 58 | impala 59 | coyote 60 | Yorkshire terrier 61 | Newfoundland 62 | brown bear 63 | red fox 64 | Norwegian elkhound 65 | Rottweiler 66 | hartebeest 67 | Saluki 68 | grey fox 69 | schipperke 70 | Pekinese 71 | Brabancon griffon 72 | West Highland white terrier 73 | Sealyham terrier 74 | guenon 75 | mongoose 76 | indri 77 | tiger 78 | Irish wolfhound 79 | wild boar 80 | EntleBucher 81 | zebra 82 | ram 83 | French bulldog 84 | orangutan 85 | basenji 86 | leopard 87 | Bernese mountain dog 88 | Maltese dog 89 | Norfolk terrier 90 | toy terrier 91 | vizsla 92 | cairn 93 | squirrel monkey 94 | groenendael 95 | clumber 96 | Siamese cat 97 | chimpanzee 98 | komondor 99 | Afghan hound 100 | Japanese spaniel 101 | proboscis monkey 102 | guinea pig 103 | white wolf 104 | ice bear 105 | gorilla 106 | borzoi 107 | toy poodle 108 | Kerry blue terrier 109 | ox 110 | Scotch terrier 111 | Tibetan mastiff 112 | spider monkey 113 | Doberman 114 | Boston bull 115 | Greater Swiss Mountain dog 116 | Appenzeller 117 | Shih-Tzu 118 | Irish water spaniel 119 | Pomeranian 120 | Bedlington terrier 121 | warthog 122 | Arabian camel 123 | siamang 124 | miniature schnauzer 125 | collie 126 | golden retriever 127 | Irish terrier 128 | affenpinscher 129 | Border collie 130 | hare 131 | boxer 132 | silky terrier 133 | beagle 134 | Leonberg 135 | German short-haired pointer 136 | patas 137 | dhole 138 | baboon 139 | macaque 140 | Chesapeake Bay retriever 141 | bull mastiff 142 | kuvasz 143 | capuchin 144 | pug 145 | curly-coated retriever 146 | Norwich terrier 147 | flat-coated retriever 148 | hog 149 | keeshond 150 | Eskimo dog 151 | Brittany spaniel 152 | standard poodle 153 | Lakeland terrier 154 | snow leopard 155 | Gordon setter 156 | dingo 157 | standard schnauzer 158 | hamster 159 | Tibetan terrier 160 | Arctic fox 161 | wire-haired fox terrier 162 | basset 163 | water buffalo 164 | American black bear 165 | Angora 166 | bison 167 | howler monkey 168 | hippopotamus 169 | chow 170 | giant panda 171 | American Staffordshire terrier 172 | Shetland sheepdog 173 | Great Pyrenees 174 | Chihuahua 175 | tabby 176 | marmoset 177 | Labrador retriever 178 | Saint Bernard 179 | armadillo 180 | Samoyed 181 | bluetick 182 | redbone 183 | polecat 184 | marmot 185 | kelpie 186 | gibbon 187 | llama 188 | miniature pinscher 189 | wood rabbit 190 | Italian greyhound 191 | lion 192 | cocker spaniel 193 | Irish setter 194 | dugong 195 | Indian elephant 196 | beaver 197 | Sussex spaniel 198 | Pembroke 199 | Blenheim spaniel 200 | Madagascar cat 201 | Rhodesian ridgeback 202 | lynx 203 | African hunting dog 204 | langur 205 | Ibizan hound 206 | timber wolf 207 | cheetah 208 | English foxhound 209 | briard 210 | sloth bear 211 | Border terrier 212 | German shepherd 213 | otter 214 | koala 215 | tusker 216 | echidna 217 | wallaby 218 | platypus 219 | wombat 220 | revolver 221 | umbrella 222 | schooner 223 | soccer ball 224 | accordion 225 | ant 226 | starfish 227 | chambered nautilus 228 | grand piano 229 | laptop 230 | strawberry 231 | airliner 232 | warplane 233 | airship 234 | balloon 235 | space shuttle 236 | fireboat 237 | gondola 238 | speedboat 239 | lifeboat 240 | canoe 241 | yawl 242 | catamaran 243 | trimaran 244 | container ship 245 | liner 246 | pirate 247 | aircraft carrier 248 | submarine 249 | wreck 250 | half track 251 | tank 252 | missile 253 | bobsled 254 | dogsled 255 | bicycle-built-for-two 256 | mountain bike 257 | freight car 258 | passenger car 259 | barrow 260 | shopping cart 261 | motor scooter 262 | forklift 263 | electric locomotive 264 | steam locomotive 265 | amphibian 266 | ambulance 267 | beach wagon 268 | cab 269 | convertible 270 | jeep 271 | limousine 272 | minivan 273 | Model T 274 | racer 275 | sports car 276 | go-kart 277 | golfcart 278 | moped 279 | snowplow 280 | fire engine 281 | garbage truck 282 | pickup 283 | tow truck 284 | trailer truck 285 | moving van 286 | police van 287 | recreational vehicle 288 | streetcar 289 | snowmobile 290 | tractor 291 | mobile home 292 | tricycle 293 | unicycle 294 | horse cart 295 | jinrikisha 296 | oxcart 297 | bassinet 298 | cradle 299 | crib 300 | four-poster 301 | bookcase 302 | china cabinet 303 | medicine chest 304 | chiffonier 305 | table lamp 306 | file 307 | park bench 308 | barber chair 309 | throne 310 | folding chair 311 | rocking chair 312 | studio couch 313 | toilet seat 314 | desk 315 | pool table 316 | dining table 317 | entertainment center 318 | wardrobe 319 | Granny Smith 320 | orange 321 | lemon 322 | fig 323 | pineapple 324 | banana 325 | jackfruit 326 | custard apple 327 | pomegranate 328 | acorn 329 | hip 330 | ear 331 | rapeseed 332 | corn 333 | buckeye 334 | organ 335 | upright 336 | chime 337 | drum 338 | gong 339 | maraca 340 | marimba 341 | steel drum 342 | banjo 343 | cello 344 | violin 345 | harp 346 | acoustic guitar 347 | electric guitar 348 | cornet 349 | French horn 350 | trombone 351 | harmonica 352 | ocarina 353 | panpipe 354 | bassoon 355 | oboe 356 | sax 357 | flute 358 | daisy 359 | yellow lady's slipper 360 | cliff 361 | valley 362 | alp 363 | volcano 364 | promontory 365 | sandbar 366 | coral reef 367 | lakeside 368 | seashore 369 | geyser 370 | hatchet 371 | cleaver 372 | letter opener 373 | plane 374 | power drill 375 | lawn mower 376 | hammer 377 | corkscrew 378 | can opener 379 | plunger 380 | screwdriver 381 | shovel 382 | plow 383 | chain saw 384 | cock 385 | hen 386 | ostrich 387 | brambling 388 | goldfinch 389 | house finch 390 | junco 391 | indigo bunting 392 | robin 393 | bulbul 394 | jay 395 | magpie 396 | chickadee 397 | water ouzel 398 | kite 399 | bald eagle 400 | vulture 401 | great grey owl 402 | black grouse 403 | ptarmigan 404 | ruffed grouse 405 | prairie chicken 406 | peacock 407 | quail 408 | partridge 409 | African grey 410 | macaw 411 | sulphur-crested cockatoo 412 | lorikeet 413 | coucal 414 | bee eater 415 | hornbill 416 | hummingbird 417 | jacamar 418 | toucan 419 | drake 420 | red-breasted merganser 421 | goose 422 | black swan 423 | white stork 424 | black stork 425 | spoonbill 426 | flamingo 427 | American egret 428 | little blue heron 429 | bittern 430 | crane 431 | limpkin 432 | American coot 433 | bustard 434 | ruddy turnstone 435 | red-backed sandpiper 436 | redshank 437 | dowitcher 438 | oystercatcher 439 | European gallinule 440 | pelican 441 | king penguin 442 | albatross 443 | great white shark 444 | tiger shark 445 | hammerhead 446 | electric ray 447 | stingray 448 | barracouta 449 | coho 450 | tench 451 | goldfish 452 | eel 453 | rock beauty 454 | anemone fish 455 | lionfish 456 | puffer 457 | sturgeon 458 | gar 459 | loggerhead 460 | leatherback turtle 461 | mud turtle 462 | terrapin 463 | box turtle 464 | banded gecko 465 | common iguana 466 | American chameleon 467 | whiptail 468 | agama 469 | frilled lizard 470 | alligator lizard 471 | Gila monster 472 | green lizard 473 | African chameleon 474 | Komodo dragon 475 | triceratops 476 | African crocodile 477 | American alligator 478 | thunder snake 479 | ringneck snake 480 | hognose snake 481 | green snake 482 | king snake 483 | garter snake 484 | water snake 485 | vine snake 486 | night snake 487 | boa constrictor 488 | rock python 489 | Indian cobra 490 | green mamba 491 | sea snake 492 | horned viper 493 | diamondback 494 | sidewinder 495 | European fire salamander 496 | common newt 497 | eft 498 | spotted salamander 499 | axolotl 500 | bullfrog 501 | tree frog 502 | tailed frog 503 | whistle 504 | wing 505 | paintbrush 506 | hand blower 507 | oxygen mask 508 | snorkel 509 | loudspeaker 510 | microphone 511 | screen 512 | mouse 513 | electric fan 514 | oil filter 515 | strainer 516 | space heater 517 | stove 518 | guillotine 519 | barometer 520 | rule 521 | odometer 522 | scale 523 | analog clock 524 | digital clock 525 | wall clock 526 | hourglass 527 | sundial 528 | parking meter 529 | stopwatch 530 | digital watch 531 | stethoscope 532 | syringe 533 | magnetic compass 534 | binoculars 535 | projector 536 | sunglasses 537 | loupe 538 | radio telescope 539 | bow 540 | cannon [ground] 541 | assault rifle 542 | rifle 543 | projectile 544 | computer keyboard 545 | typewriter keyboard 546 | crane 547 | lighter 548 | abacus 549 | cash machine 550 | slide rule 551 | desktop computer 552 | hand-held computer 553 | notebook 554 | web site 555 | harvester 556 | thresher 557 | printer 558 | slot 559 | vending machine 560 | sewing machine 561 | joystick 562 | switch 563 | hook 564 | car wheel 565 | paddlewheel 566 | pinwheel 567 | potter's wheel 568 | gas pump 569 | carousel 570 | swing 571 | reel 572 | radiator 573 | puck 574 | hard disc 575 | sunglass 576 | pick 577 | car mirror 578 | solar dish 579 | remote control 580 | disk brake 581 | buckle 582 | hair slide 583 | knot 584 | combination lock 585 | padlock 586 | nail 587 | safety pin 588 | screw 589 | muzzle 590 | seat belt 591 | ski 592 | candle 593 | jack-o'-lantern 594 | spotlight 595 | torch 596 | neck brace 597 | pier 598 | tripod 599 | maypole 600 | mousetrap 601 | spider web 602 | trilobite 603 | harvestman 604 | scorpion 605 | black and gold garden spider 606 | barn spider 607 | garden spider 608 | black widow 609 | tarantula 610 | wolf spider 611 | tick 612 | centipede 613 | isopod 614 | Dungeness crab 615 | rock crab 616 | fiddler crab 617 | king crab 618 | American lobster 619 | spiny lobster 620 | crayfish 621 | hermit crab 622 | tiger beetle 623 | ladybug 624 | ground beetle 625 | long-horned beetle 626 | leaf beetle 627 | dung beetle 628 | rhinoceros beetle 629 | weevil 630 | fly 631 | bee 632 | grasshopper 633 | cricket 634 | walking stick 635 | cockroach 636 | mantis 637 | cicada 638 | leafhopper 639 | lacewing 640 | dragonfly 641 | damselfly 642 | admiral 643 | ringlet 644 | monarch 645 | cabbage butterfly 646 | sulphur butterfly 647 | lycaenid 648 | jellyfish 649 | sea anemone 650 | brain coral 651 | flatworm 652 | nematode 653 | conch 654 | snail 655 | slug 656 | sea slug 657 | chiton 658 | sea urchin 659 | sea cucumber 660 | iron 661 | espresso maker 662 | microwave 663 | Dutch oven 664 | rotisserie 665 | toaster 666 | waffle iron 667 | vacuum 668 | dishwasher 669 | refrigerator 670 | washer 671 | Crock Pot 672 | frying pan 673 | wok 674 | caldron 675 | coffeepot 676 | teapot 677 | spatula 678 | altar 679 | triumphal arch 680 | patio 681 | steel arch bridge 682 | suspension bridge 683 | viaduct 684 | barn 685 | greenhouse 686 | palace 687 | monastery 688 | library 689 | apiary 690 | boathouse 691 | church 692 | mosque 693 | stupa 694 | planetarium 695 | restaurant 696 | cinema 697 | home theater 698 | lumbermill 699 | coil 700 | obelisk 701 | totem pole 702 | castle 703 | prison 704 | grocery store 705 | bakery 706 | barbershop 707 | bookshop 708 | butcher shop 709 | confectionery 710 | shoe shop 711 | tobacco shop 712 | toyshop 713 | fountain 714 | cliff dwelling 715 | yurt 716 | dock 717 | brass 718 | megalith 719 | bannister 720 | breakwater 721 | dam 722 | chainlink fence 723 | picket fence 724 | worm fence 725 | stone wall 726 | grille 727 | sliding door 728 | turnstile 729 | mountain tent 730 | scoreboard 731 | honeycomb 732 | plate rack 733 | pedestal 734 | beacon 735 | mashed potato 736 | bell pepper 737 | head cabbage 738 | broccoli 739 | cauliflower 740 | zucchini 741 | spaghetti squash 742 | acorn squash 743 | butternut squash 744 | cucumber 745 | artichoke 746 | cardoon 747 | mushroom 748 | shower curtain 749 | jean 750 | carton 751 | handkerchief 752 | sandal 753 | ashcan 754 | safe 755 | plate 756 | necklace 757 | croquet ball 758 | fur coat 759 | thimble 760 | pajama 761 | running shoe 762 | cocktail shaker 763 | chest 764 | manhole cover 765 | modem 766 | tub 767 | tray 768 | balance beam 769 | bagel 770 | prayer rug 771 | kimono 772 | hot pot 773 | whiskey jug 774 | knee pad 775 | book jacket 776 | spindle 777 | ski mask 778 | beer bottle 779 | crash helmet 780 | bottlecap 781 | tile roof 782 | mask 783 | maillot 784 | Petri dish 785 | football helmet 786 | bathing cap 787 | teddy bear 788 | holster 789 | pop bottle 790 | photocopier 791 | vestment 792 | crossword puzzle 793 | golf ball 794 | trifle 795 | suit 796 | water tower 797 | feather boa 798 | cloak 799 | red wine 800 | drumstick 801 | shield 802 | Christmas stocking 803 | hoopskirt 804 | menu 805 | stage 806 | bonnet 807 | meat loaf 808 | baseball 809 | face powder 810 | scabbard 811 | sunscreen 812 | beer glass 813 | hen-of-the-woods 814 | guacamole 815 | lampshade 816 | wool 817 | hay 818 | bow tie 819 | mailbag 820 | water jug 821 | bucket 822 | dishrag 823 | soup bowl 824 | eggnog 825 | mortar 826 | trench coat 827 | paddle 828 | chain 829 | swab 830 | mixing bowl 831 | potpie 832 | wine bottle 833 | shoji 834 | bulletproof vest 835 | drilling platform 836 | binder 837 | cardigan 838 | sweatshirt 839 | pot 840 | birdhouse 841 | hamper 842 | ping-pong ball 843 | pencil box 844 | pay-phone 845 | consomme 846 | apron 847 | punching bag 848 | backpack 849 | groom 850 | bearskin 851 | pencil sharpener 852 | broom 853 | mosquito net 854 | abaya 855 | mortarboard 856 | poncho 857 | crutch 858 | Polaroid camera 859 | space bar 860 | cup 861 | racket 862 | traffic light 863 | quill 864 | radio 865 | dough 866 | cuirass 867 | military uniform 868 | lipstick 869 | shower cap 870 | monitor 871 | oscilloscope 872 | mitten 873 | brassiere 874 | French loaf 875 | vase 876 | milk can 877 | rugby ball 878 | paper towel 879 | earthstar 880 | envelope 881 | miniskirt 882 | cowboy hat 883 | trolleybus 884 | perfume 885 | bathtub 886 | hotdog 887 | coral fungus 888 | bullet train 889 | pillow 890 | toilet tissue 891 | cassette 892 | carpenter's kit 893 | ladle 894 | stinkhorn 895 | lotion 896 | hair spray 897 | academic gown 898 | dome 899 | crate 900 | wig 901 | burrito 902 | pill bottle 903 | chain mail 904 | theater curtain 905 | window shade 906 | barrel 907 | washbasin 908 | ballpoint 909 | basketball 910 | bath towel 911 | cowboy boot 912 | gown 913 | window screen 914 | agaric 915 | cellular telephone 916 | nipple 917 | barbell 918 | mailbox 919 | lab coat 920 | fire screen 921 | minibus 922 | packet 923 | maze 924 | pole 925 | horizontal bar 926 | sombrero 927 | pickelhaube 928 | rain barrel 929 | wallet 930 | cassette player 931 | comic book 932 | piggy bank 933 | street sign 934 | bell cote 935 | fountain pen 936 | Windsor tie 937 | volleyball 938 | overskirt 939 | sarong 940 | purse 941 | bolo tie 942 | bib 943 | parachute 944 | sleeping bag 945 | television 946 | swimming trunks 947 | measuring cup 948 | espresso 949 | pizza 950 | breastplate 951 | shopping basket 952 | wooden spoon 953 | saltshaker 954 | chocolate sauce 955 | ballplayer 956 | goblet 957 | gyromitra 958 | stretcher 959 | water bottle 960 | dial telephone 961 | soap dispenser 962 | jersey 963 | school bus 964 | jigsaw puzzle 965 | plastic bag 966 | reflex camera 967 | diaper 968 | Band Aid 969 | ice lolly 970 | velvet 971 | tennis ball 972 | gasmask 973 | doormat 974 | Loafer 975 | ice cream 976 | pretzel 977 | quilt 978 | maillot 979 | tape player 980 | clog 981 | iPod 982 | bolete 983 | scuba diver 984 | pitcher 985 | matchstick 986 | bikini 987 | sock 988 | CD player 989 | lens cap 990 | thatch 991 | vault 992 | beaker 993 | bubble 994 | cheeseburger 995 | parallel bars 996 | flagpole 997 | coffee mug 998 | rubber eraser 999 | stole 1000 | carbonara 1001 | dumbbell -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/index.js: -------------------------------------------------------------------------------- 1 | import { AppRegistry } from 'react-native'; 2 | import App from './App'; 3 | 4 | AppRegistry.registerComponent('ImageRecognitionExample', () => App); 5 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/ios/ImageRecognitionExample-tvOS/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | org.reactjs.native.example.$(PRODUCT_NAME:rfc1034identifier) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleSignature 20 | ???? 21 | CFBundleVersion 22 | 1 23 | LSRequiresIPhoneOS 24 | 25 | UILaunchStoryboardName 26 | LaunchScreen 27 | UIRequiredDeviceCapabilities 28 | 29 | armv7 30 | 31 | UISupportedInterfaceOrientations 32 | 33 | UIInterfaceOrientationPortrait 34 | UIInterfaceOrientationLandscapeLeft 35 | UIInterfaceOrientationLandscapeRight 36 | 37 | UIViewControllerBasedStatusBarAppearance 38 | 39 | NSLocationWhenInUseUsageDescription 40 | 41 | NSAppTransportSecurity 42 | 43 | 44 | NSExceptionDomains 45 | 46 | localhost 47 | 48 | NSExceptionAllowsInsecureHTTPLoads 49 | 50 | 51 | 52 | 53 | 54 | 55 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/ios/ImageRecognitionExample-tvOSTests/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | org.reactjs.native.example.$(PRODUCT_NAME:rfc1034identifier) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | BNDL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleSignature 20 | ???? 21 | CFBundleVersion 22 | 1 23 | 24 | 25 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/ios/ImageRecognitionExample.xcodeproj/xcshareddata/xcschemes/ImageRecognitionExample-tvOS.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 29 | 35 | 36 | 37 | 43 | 49 | 50 | 51 | 52 | 53 | 58 | 59 | 61 | 67 | 68 | 69 | 70 | 71 | 77 | 78 | 79 | 80 | 81 | 82 | 92 | 94 | 100 | 101 | 102 | 103 | 104 | 105 | 111 | 113 | 119 | 120 | 121 | 122 | 124 | 125 | 128 | 129 | 130 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/ios/ImageRecognitionExample.xcodeproj/xcshareddata/xcschemes/ImageRecognitionExample.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 29 | 35 | 36 | 37 | 43 | 49 | 50 | 51 | 52 | 53 | 58 | 59 | 61 | 67 | 68 | 69 | 70 | 71 | 77 | 78 | 79 | 80 | 81 | 82 | 92 | 94 | 100 | 101 | 102 | 103 | 104 | 105 | 111 | 113 | 119 | 120 | 121 | 122 | 124 | 125 | 128 | 129 | 130 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/ios/ImageRecognitionExample.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/ios/ImageRecognitionExample/AppDelegate.h: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (c) 2015-present, Facebook, Inc. 3 | * All rights reserved. 4 | * 5 | * This source code is licensed under the BSD-style license found in the 6 | * LICENSE file in the root directory of this source tree. An additional grant 7 | * of patent rights can be found in the PATENTS file in the same directory. 8 | */ 9 | 10 | #import 11 | 12 | @interface AppDelegate : UIResponder 13 | 14 | @property (nonatomic, strong) UIWindow *window; 15 | 16 | @end 17 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/ios/ImageRecognitionExample/AppDelegate.m: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (c) 2015-present, Facebook, Inc. 3 | * All rights reserved. 4 | * 5 | * This source code is licensed under the BSD-style license found in the 6 | * LICENSE file in the root directory of this source tree. An additional grant 7 | * of patent rights can be found in the PATENTS file in the same directory. 8 | */ 9 | 10 | #import "AppDelegate.h" 11 | 12 | #import 13 | #import 14 | 15 | @implementation AppDelegate 16 | 17 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions 18 | { 19 | NSURL *jsCodeLocation; 20 | 21 | jsCodeLocation = [[RCTBundleURLProvider sharedSettings] jsBundleURLForBundleRoot:@"index" fallbackResource:nil]; 22 | 23 | RCTRootView *rootView = [[RCTRootView alloc] initWithBundleURL:jsCodeLocation 24 | moduleName:@"ImageRecognitionExample" 25 | initialProperties:nil 26 | launchOptions:launchOptions]; 27 | rootView.backgroundColor = [[UIColor alloc] initWithRed:1.0f green:1.0f blue:1.0f alpha:1]; 28 | 29 | self.window = [[UIWindow alloc] initWithFrame:[UIScreen mainScreen].bounds]; 30 | UIViewController *rootViewController = [UIViewController new]; 31 | rootViewController.view = rootView; 32 | self.window.rootViewController = rootViewController; 33 | [self.window makeKeyAndVisible]; 34 | return YES; 35 | } 36 | 37 | @end 38 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/ios/ImageRecognitionExample/Base.lproj/LaunchScreen.xib: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 21 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/ios/ImageRecognitionExample/Images.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "29x29", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "29x29", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "40x40", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "40x40", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "60x60", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "60x60", 31 | "scale" : "3x" 32 | } 33 | ], 34 | "info" : { 35 | "version" : 1, 36 | "author" : "xcode" 37 | } 38 | } -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/ios/ImageRecognitionExample/Images.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/ios/ImageRecognitionExample/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleDisplayName 8 | ImageRecognitionExample 9 | CFBundleExecutable 10 | $(EXECUTABLE_NAME) 11 | CFBundleIdentifier 12 | org.reactjs.native.example.$(PRODUCT_NAME:rfc1034identifier) 13 | CFBundleInfoDictionaryVersion 14 | 6.0 15 | CFBundleName 16 | $(PRODUCT_NAME) 17 | CFBundlePackageType 18 | APPL 19 | CFBundleShortVersionString 20 | 1.0 21 | CFBundleSignature 22 | ???? 23 | CFBundleVersion 24 | 1 25 | LSRequiresIPhoneOS 26 | 27 | UILaunchStoryboardName 28 | LaunchScreen 29 | UIRequiredDeviceCapabilities 30 | 31 | armv7 32 | 33 | UISupportedInterfaceOrientations 34 | 35 | UIInterfaceOrientationPortrait 36 | UIInterfaceOrientationLandscapeLeft 37 | UIInterfaceOrientationLandscapeRight 38 | 39 | UIViewControllerBasedStatusBarAppearance 40 | 41 | NSLocationWhenInUseUsageDescription 42 | 43 | NSAppTransportSecurity 44 | 45 | 46 | NSExceptionDomains 47 | 48 | localhost 49 | 50 | NSExceptionAllowsInsecureHTTPLoads 51 | 52 | 53 | 54 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/ios/ImageRecognitionExample/main.m: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (c) 2015-present, Facebook, Inc. 3 | * All rights reserved. 4 | * 5 | * This source code is licensed under the BSD-style license found in the 6 | * LICENSE file in the root directory of this source tree. An additional grant 7 | * of patent rights can be found in the PATENTS file in the same directory. 8 | */ 9 | 10 | #import 11 | 12 | #import "AppDelegate.h" 13 | 14 | int main(int argc, char * argv[]) { 15 | @autoreleasepool { 16 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/ios/ImageRecognitionExampleTests/ImageRecognitionExampleTests.m: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (c) 2015-present, Facebook, Inc. 3 | * All rights reserved. 4 | * 5 | * This source code is licensed under the BSD-style license found in the 6 | * LICENSE file in the root directory of this source tree. An additional grant 7 | * of patent rights can be found in the PATENTS file in the same directory. 8 | */ 9 | 10 | #import 11 | #import 12 | 13 | #import 14 | #import 15 | 16 | #define TIMEOUT_SECONDS 600 17 | #define TEXT_TO_LOOK_FOR @"Welcome to React Native!" 18 | 19 | @interface ImageRecognitionExampleTests : XCTestCase 20 | 21 | @end 22 | 23 | @implementation ImageRecognitionExampleTests 24 | 25 | - (BOOL)findSubviewInView:(UIView *)view matching:(BOOL(^)(UIView *view))test 26 | { 27 | if (test(view)) { 28 | return YES; 29 | } 30 | for (UIView *subview in [view subviews]) { 31 | if ([self findSubviewInView:subview matching:test]) { 32 | return YES; 33 | } 34 | } 35 | return NO; 36 | } 37 | 38 | - (void)testRendersWelcomeScreen 39 | { 40 | UIViewController *vc = [[[RCTSharedApplication() delegate] window] rootViewController]; 41 | NSDate *date = [NSDate dateWithTimeIntervalSinceNow:TIMEOUT_SECONDS]; 42 | BOOL foundElement = NO; 43 | 44 | __block NSString *redboxError = nil; 45 | RCTSetLogFunction(^(RCTLogLevel level, RCTLogSource source, NSString *fileName, NSNumber *lineNumber, NSString *message) { 46 | if (level >= RCTLogLevelError) { 47 | redboxError = message; 48 | } 49 | }); 50 | 51 | while ([date timeIntervalSinceNow] > 0 && !foundElement && !redboxError) { 52 | [[NSRunLoop mainRunLoop] runMode:NSDefaultRunLoopMode beforeDate:[NSDate dateWithTimeIntervalSinceNow:0.1]]; 53 | [[NSRunLoop mainRunLoop] runMode:NSRunLoopCommonModes beforeDate:[NSDate dateWithTimeIntervalSinceNow:0.1]]; 54 | 55 | foundElement = [self findSubviewInView:vc.view matching:^BOOL(UIView *view) { 56 | if ([view.accessibilityLabel isEqualToString:TEXT_TO_LOOK_FOR]) { 57 | return YES; 58 | } 59 | return NO; 60 | }]; 61 | } 62 | 63 | RCTSetLogFunction(RCTDefaultLogFunction); 64 | 65 | XCTAssertNil(redboxError, @"RedBox error: %@", redboxError); 66 | XCTAssertTrue(foundElement, @"Couldn't find element with text '%@' in %d seconds", TEXT_TO_LOOK_FOR, TIMEOUT_SECONDS); 67 | } 68 | 69 | 70 | @end 71 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/ios/ImageRecognitionExampleTests/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | org.reactjs.native.example.$(PRODUCT_NAME:rfc1034identifier) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | BNDL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleSignature 20 | ???? 21 | CFBundleVersion 22 | 1 23 | 24 | 25 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/ios/Podfile: -------------------------------------------------------------------------------- 1 | target 'ImageRecognitionExample' 2 | pod 'TensorFlow-experimental' 3 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ImageRecognitionExample", 3 | "version": "0.0.1", 4 | "private": true, 5 | "scripts": { 6 | "start": "node node_modules/react-native/local-cli/cli.js start", 7 | "test": "jest" 8 | }, 9 | "dependencies": { 10 | "react": "16.0.0", 11 | "react-native": "0.51.0", 12 | "react-native-tensorflow": "git+https://github.com/reneweb/react-native-tensorflow.git" 13 | }, 14 | "devDependencies": { 15 | "babel-jest": "21.2.0", 16 | "babel-preset-react-native": "4.0.0", 17 | "jest": "21.2.1", 18 | "react-test-renderer": "16.0.0" 19 | }, 20 | "jest": { 21 | "preset": "react-native" 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /examples/ImageRecognitionExample/rn-cli.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | getAssetExts() { 3 | return ['pb', 'txt'] 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | 2 | import { NativeModules, Image } from 'react-native'; 3 | import uuid from 'uuid/v1'; 4 | 5 | const { RNImageRecognition, RNTensorFlowInference, RNTensorFlowGraph, RNTensorFlowGraphOperations } = NativeModules; 6 | 7 | class TensorFlowOperation { 8 | constructor(id, opName) { 9 | this.id = id 10 | this.opName = opName 11 | } 12 | 13 | inputListLength(name) { 14 | return RNTensorFlowGraphOperations.inputListLength(this.id, this.opName, name); 15 | } 16 | 17 | name() { 18 | return RNTensorFlowGraphOperations.name(this.id, this.opName) 19 | } 20 | 21 | numOutputs() { 22 | return RNTensorFlowGraphOperations.numOutputs(this.id, this.opName) 23 | } 24 | 25 | output(index) { 26 | return RNTensorFlowGraphOperations.output(this.id, this.opName, index) 27 | } 28 | 29 | outputList(index, length) { 30 | return RNTensorFlowGraphOperations.outputList(this.id, this.opName, index, length) 31 | } 32 | 33 | outputListLength(name) { 34 | return RNTensorFlowGraphOperations.outputListLength(this.id, this.opName, name) 35 | } 36 | 37 | type() { 38 | return RNTensorFlowGraphOperations.type(this.id, this.opName) 39 | } 40 | } 41 | 42 | class TensorFlowGraph { 43 | constructor(id) { 44 | this.id = id 45 | } 46 | 47 | importGraphDef(graphDef) { 48 | return RNTensorFlowGraph.importGraphDef(this.id, graphDef) 49 | } 50 | 51 | importGraphDefWithPrefix(graphDef, prefix) { 52 | return RNTensorFlowGraph.importGraphDefWithPrefix(this.id, graphDef, prefix) 53 | } 54 | 55 | toGraphDef() { 56 | return RNTensorFlowGraph.toGraphDef(this.id) 57 | } 58 | 59 | operation(name) { 60 | return new TensorFlowOperation(this.id, name) 61 | } 62 | 63 | close() { 64 | return RNTensorFlowGraph.close(this.id) 65 | } 66 | } 67 | 68 | class TensorFlow { 69 | 70 | constructor(modelLocation) { 71 | this.id = uuid() 72 | const resolvedModelLocation = Image.resolveAssetSource(modelLocation) != null 73 | ? Image.resolveAssetSource(modelLocation).uri 74 | : modelLocation 75 | this.init = RNTensorFlowInference.initTensorFlowInference(this.id, resolvedModelLocation) 76 | this.tfGraph = new TensorFlowGraph(this.id) 77 | } 78 | 79 | async feed(data) { 80 | await this.init 81 | return RNTensorFlowInference.feed(this.id, data) 82 | } 83 | 84 | async run(outputNames, withStats) { 85 | await this.init 86 | return RNTensorFlowInference.run(this.id, outputNames, withStats !== undefined ? withStats : false) 87 | } 88 | 89 | async fetch(outputName) { 90 | await this.init 91 | return RNTensorFlowInference.fetch(this.id, outputName) 92 | } 93 | 94 | async graph() { 95 | await this.init 96 | return this.tfGraph 97 | } 98 | 99 | async reset() { 100 | await this.init 101 | return RNTensorFlowInference.reset(this.id) 102 | } 103 | 104 | async close() { 105 | await this.init 106 | return RNTensorFlowInference.close(this.id) 107 | } 108 | } 109 | 110 | class TfImageRecognition { 111 | constructor(data) { 112 | this.id = uuid() 113 | data['model'] = Image.resolveAssetSource(data['model']) != null 114 | ? Image.resolveAssetSource(data['model']).uri 115 | : data['model'] 116 | 117 | data['labels'] = Image.resolveAssetSource(data['labels']) != null 118 | ? Image.resolveAssetSource(data['labels']).uri 119 | : data['labels'] 120 | 121 | this.init = RNImageRecognition.initImageRecognizer(this.id, data) 122 | } 123 | 124 | async recognize(data) { 125 | await this.init 126 | 127 | data['image'] = Image.resolveAssetSource(data['image']) != null 128 | ? Image.resolveAssetSource(data['image']).uri 129 | : data['image'] 130 | 131 | return RNImageRecognition.recognize(this.id, data) 132 | } 133 | 134 | async close() { 135 | await this.init 136 | return RNImageRecognition.close(this.id) 137 | } 138 | } 139 | 140 | export { TensorFlow, TfImageRecognition } 141 | -------------------------------------------------------------------------------- /ios/ImageRecognizer.h: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include "tensorflow/core/framework/op_kernel.h" 4 | #include "tensorflow/core/public/session.h" 5 | 6 | @interface ImageRecognizer: NSObject 7 | - (id) initWithData:(NSString *)model labels:(NSString *)labels imageMean:(NSNumber *)imageMean imageStd:(NSNumber *)imageStd; 8 | - (NSArray *) recognizeImage:(NSString *)image inputName:(NSString *)inputName inputSize:(NSNumber *)inputSize outputName:(NSString *)outputName maxResults:(NSNumber *)maxResults threshold:(NSNumber *)threshold; 9 | @end 10 | -------------------------------------------------------------------------------- /ios/ImageRecognizer.mm: -------------------------------------------------------------------------------- 1 | #import "ImageRecognizer.h" 2 | 3 | #include "TensorFlowInference.h" 4 | 5 | #include 6 | #include 7 | 8 | @implementation ImageRecognizer 9 | { 10 | TensorFlowInference * inference; 11 | NSArray * labels; 12 | NSNumber * imageMean; 13 | NSNumber * imageStd; 14 | } 15 | 16 | - (id) initWithData:(NSString *)modelInput labels:(NSString *)labelsInput imageMean:(NSNumber *)imageMeanInput imageStd:(NSNumber *)imageStdInput 17 | { 18 | self = [super init]; 19 | if (self != nil) { 20 | imageMean = imageMeanInput != nil ? imageMeanInput : [NSNumber numberWithInt:117]; 21 | imageStd = imageStdInput != nil ? imageStdInput : [NSNumber numberWithFloat:1]; 22 | 23 | TensorFlowInference * tensorFlowInference = [[TensorFlowInference alloc] initWithModel:modelInput]; 24 | inference = tensorFlowInference; 25 | 26 | NSString *bundlePath = [[NSBundle mainBundle] pathForResource:[labelsInput substringToIndex:[labelsInput length] - 4] ofType:@"txt"]; 27 | if(bundlePath != NULL) { 28 | NSString *labelString = [NSString stringWithContentsOfFile:bundlePath encoding:NSUTF8StringEncoding error:nil]; 29 | labels = [labelString componentsSeparatedByString:@"\n"]; 30 | } else if ([[NSFileManager defaultManager] fileExistsAtPath:labelsInput]) { 31 | NSData * labelData = [[NSData alloc] initWithContentsOfFile:labelsInput]; 32 | NSString * labelString = [[NSString alloc] initWithData:labelData encoding:NSUTF8StringEncoding]; 33 | labels = [labelString componentsSeparatedByString:@"\n"]; 34 | } else { 35 | NSURL *labelsUrl = [NSURL URLWithString:labelsInput]; 36 | NSData * labelData = [[NSData alloc] initWithContentsOfURL: labelsUrl]; 37 | NSString * labelString = [[NSString alloc] initWithData:labelData encoding:NSUTF8StringEncoding]; 38 | labels = [labelString componentsSeparatedByString:@"\n"]; 39 | } 40 | } 41 | return self; 42 | } 43 | 44 | - (NSArray *) recognizeImage:(NSString *)image inputName:(NSString *)inputName inputSize:(NSNumber *)inputSize outputName:(NSString *)outputName maxResults:(NSNumber *)maxResults threshold:(NSNumber *)threshold 45 | { 46 | NSString * inputNameResolved = inputName != nil ? inputName : @"input"; 47 | NSString * outputNameResolved = outputName != nil ? outputName : @"output"; 48 | NSNumber * inputSizeResolved = inputSize != nil ? inputSize : [NSNumber numberWithInt:224]; 49 | NSNumber * maxResultsResolved = maxResults != nil ? maxResults : [NSNumber numberWithInt:3]; 50 | NSNumber * thresholdResolved = threshold != nil ? threshold : [NSNumber numberWithFloat:0.1]; 51 | 52 | NSData * imageData; 53 | NSString * imageType = [image hasSuffix:@"png"] ? @"png" : @"jpg"; 54 | NSString * imageSanitized = [image hasPrefix:@"file://"] ? [image substringFromIndex:7] : image; 55 | 56 | NSString *bundlePath = [[NSBundle mainBundle] pathForResource:[imageSanitized substringToIndex:[imageSanitized length] - 4] ofType:imageType]; 57 | if(bundlePath != NULL) { 58 | imageData = [NSData dataWithContentsOfFile:bundlePath]; 59 | } else if ([[NSFileManager defaultManager] fileExistsAtPath:imageSanitized]) { 60 | imageData = [[NSData alloc] initWithContentsOfFile:imageSanitized]; 61 | } else { 62 | NSURL *imageUrl = [NSURL URLWithString:imageSanitized]; 63 | imageData = [[NSData alloc] initWithContentsOfURL: imageUrl]; 64 | } 65 | 66 | tensorflow::Tensor tensor = createImageTensor(imageData, [imageType UTF8String], [inputSizeResolved floatValue], [imageMean floatValue], [imageStd floatValue]); 67 | [inference feed:inputNameResolved tensor:tensor]; 68 | [inference run:[[NSArray alloc] initWithObjects:outputNameResolved, nil] enableStats:false]; 69 | NSArray * outputs = [inference fetch:outputNameResolved]; 70 | 71 | NSMutableArray * results = [NSMutableArray new]; 72 | for (NSUInteger i = 0; i < [outputs count]; i++) { 73 | id output = [outputs objectAtIndex:i]; 74 | if(output > thresholdResolved) { 75 | NSDictionary * entry = @{@"id": @(i), @"name": [labels count] > i ? labels[i] : @"unknown", @"confidence": output}; 76 | [results addObject:entry]; 77 | } 78 | } 79 | 80 | NSArray * resultsSorted = [results sortedArrayUsingComparator:^NSComparisonResult(id first, id second) { 81 | return [second[@"confidence"] compare:first[@"confidence"]]; 82 | }]; 83 | 84 | auto finalSize = MIN([resultsSorted count], [maxResultsResolved integerValue]); 85 | NSArray * finalResults = [resultsSorted subarrayWithRange:NSMakeRange(0, finalSize)]; 86 | 87 | [inference reset]; 88 | return finalResults; 89 | } 90 | 91 | tensorflow::Tensor createImageTensor(NSData * data, const char* image_type, float input_size, float input_mean, float input_std) { 92 | int image_width; 93 | int image_height; 94 | int image_channels; 95 | std::vector image_data = imageAsVector(data, image_type, &image_width, &image_height, &image_channels); 96 | 97 | const int wanted_width = input_size; 98 | const int wanted_height = input_size; 99 | const int wanted_channels = 3; 100 | 101 | tensorflow::Tensor image_tensor(tensorflow::DT_FLOAT, tensorflow::TensorShape({1, wanted_height, wanted_width, wanted_channels})); 102 | auto image_tensor_mapped = image_tensor.tensor(); 103 | tensorflow::uint8* in = image_data.data(); 104 | 105 | float* out = image_tensor_mapped.data(); 106 | for (int y = 0; y < wanted_height; ++y) { 107 | const int in_y = (y * image_height) / wanted_height; 108 | tensorflow::uint8* in_row = in + (in_y * image_width * image_channels); 109 | float* out_row = out + (y * wanted_width * wanted_channels); 110 | for (int x = 0; x < wanted_width; ++x) { 111 | const int in_x = (x * image_width) / wanted_width; 112 | tensorflow::uint8* in_pixel = in_row + (in_x * image_channels); 113 | float* out_pixel = out_row + (x * wanted_channels); 114 | for (int c = 0; c < wanted_channels; ++c) { 115 | out_pixel[c] = (in_pixel[c] - input_mean) / input_std; 116 | } 117 | } 118 | } 119 | 120 | return image_tensor; 121 | } 122 | 123 | std::vector imageAsVector(NSData * data, const char* image_type, int* out_width, int* out_height, int* out_channels) { 124 | 125 | CFDataRef file_data_ref = (__bridge CFDataRef)data; 126 | CGDataProviderRef image_provider = CGDataProviderCreateWithCFData(file_data_ref); 127 | 128 | CGImageRef image; 129 | if (strcasecmp(image_type, "png") == 0) { 130 | try { 131 | image = CGImageCreateWithPNGDataProvider(image_provider, NULL, true, kCGRenderingIntentDefault); 132 | } catch( std::exception& e ) { 133 | CFRelease(image_provider); 134 | CFRelease(file_data_ref); 135 | throw; 136 | } 137 | } else { 138 | try { 139 | image = CGImageCreateWithJPEGDataProvider(image_provider, NULL, true, kCGRenderingIntentDefault); 140 | } catch( std::exception& e ) { 141 | CFRelease(image_provider); 142 | CFRelease(file_data_ref); 143 | throw; 144 | } 145 | } 146 | 147 | const int width = (int)CGImageGetWidth(image); 148 | const int height = (int)CGImageGetHeight(image); 149 | const int channels = 4; 150 | CGColorSpaceRef color_space = CGColorSpaceCreateDeviceRGB(); 151 | const int bytes_per_row = (width * channels); 152 | const int bytes_in_image = (bytes_per_row * height); 153 | std::vector result(bytes_in_image); 154 | const int bits_per_component = 8; 155 | CGContextRef context = CGBitmapContextCreate(result.data(), width, height, 156 | bits_per_component, bytes_per_row, color_space, 157 | kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big); 158 | CGColorSpaceRelease(color_space); 159 | CGContextDrawImage(context, CGRectMake(0, 0, width, height), image); 160 | CGContextRelease(context); 161 | CFRelease(image_provider); 162 | CFRelease(file_data_ref); 163 | 164 | *out_width = width; 165 | *out_height = height; 166 | *out_channels = channels; 167 | return result; 168 | } 169 | 170 | @end 171 | -------------------------------------------------------------------------------- /ios/RNImageRecognition.h: -------------------------------------------------------------------------------- 1 | #if __has_include("RCTBridge.h") 2 | #import "RCTBridge.h" 3 | #else 4 | #import 5 | #endif 6 | 7 | @interface RNImageRecognition : NSObject 8 | 9 | @end 10 | -------------------------------------------------------------------------------- /ios/RNImageRecognition.mm: -------------------------------------------------------------------------------- 1 | #import "RNImageRecognition.h" 2 | 3 | #include "ImageRecognizer.h" 4 | 5 | #import "RCTUtils.h" 6 | 7 | #include 8 | #include 9 | 10 | #include "tensorflow/core/framework/op_kernel.h" 11 | #include "tensorflow/core/public/session.h" 12 | 13 | @implementation RNImageRecognition 14 | { 15 | std::unordered_map imageRecognizers; 16 | } 17 | 18 | @synthesize bridge = _bridge; 19 | 20 | - (dispatch_queue_t)methodQueue 21 | { 22 | return dispatch_get_main_queue(); 23 | } 24 | 25 | RCT_EXPORT_MODULE() 26 | 27 | RCT_EXPORT_METHOD(initImageRecognizer:(NSString *)tId data:(NSDictionary *)data resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 28 | { 29 | try { 30 | NSString * model = data[@"model"]; 31 | NSString * labels = data[@"labels"]; 32 | NSNumber * imageMean = [data objectForKey:@"imageMean"]; 33 | NSNumber * imageStd = [data objectForKey:@"imageStd"]; 34 | 35 | ImageRecognizer * imageRecognizer = [[ImageRecognizer alloc] initWithData:model labels:labels imageMean:imageMean imageStd:imageStd]; 36 | imageRecognizers[[tId UTF8String]] = imageRecognizer; 37 | 38 | resolve(@1); 39 | } catch( std::exception& e ) { 40 | reject(RCTErrorUnspecified, @(e.what()), nil); 41 | } 42 | } 43 | 44 | RCT_EXPORT_METHOD(recognize:(NSString *)tId data:(NSDictionary *)data resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 45 | { 46 | try { 47 | NSString * image = data[@"image"]; 48 | NSString * inputName = [data objectForKey:@"inputName"]; 49 | NSNumber * inputSize = [data objectForKey:@"inputSize"]; 50 | NSString * outputName = [data objectForKey:@"outputName"]; 51 | NSNumber * maxResults = [data objectForKey:@"maxResults"]; 52 | NSNumber * threshold = [data objectForKey:@"threshold"]; 53 | 54 | ImageRecognizer * imageRecognizer = imageRecognizers[[tId UTF8String]]; 55 | NSArray * result = [imageRecognizer recognizeImage:image inputName:inputName inputSize:inputSize outputName:outputName maxResults:maxResults threshold:threshold]; 56 | resolve(result); 57 | } catch( std::exception& e ) { 58 | reject(RCTErrorUnspecified, @(e.what()), nil); 59 | } 60 | } 61 | 62 | RCT_EXPORT_METHOD(close:(NSString *)tId resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 63 | { 64 | try { 65 | imageRecognizers.erase([tId UTF8String]); 66 | resolve(@1); 67 | } catch( std::exception& e ) { 68 | reject(RCTErrorUnspecified, @(e.what()), nil); 69 | } 70 | } 71 | 72 | @end 73 | -------------------------------------------------------------------------------- /ios/RNTensorFlowGraph.h: -------------------------------------------------------------------------------- 1 | #if __has_include("RCTBridge.h") 2 | #import "RCTBridge.h" 3 | #else 4 | #import 5 | #endif 6 | 7 | #include "tensorflow/core/framework/op_kernel.h" 8 | #include "tensorflow/core/public/session.h" 9 | 10 | @interface RNTensorFlowGraph : NSObject 11 | 12 | -(void)init:(NSString *)tId graph:(std::shared_ptr)graph; 13 | -(void)close:(NSString *)tId; 14 | -(const tensorflow::NodeDef&)operation:(NSString *)tId name:(NSString *)name; 15 | 16 | @end 17 | -------------------------------------------------------------------------------- /ios/RNTensorFlowGraph.mm: -------------------------------------------------------------------------------- 1 | #import "RNTensorFlowGraph.h" 2 | 3 | #import "RCTUtils.h" 4 | 5 | #include "RNTensorFlowGraphOperations.h" 6 | 7 | @implementation RNTensorFlowGraph { 8 | std::unordered_map> graphs; 9 | } 10 | 11 | - (dispatch_queue_t)methodQueue 12 | { 13 | return dispatch_get_main_queue(); 14 | } 15 | 16 | - (void)init:(NSString *)tId graph:(std::shared_ptr)graph 17 | { 18 | graphs[[tId UTF8String]] = graph; 19 | } 20 | 21 | - (const tensorflow::NodeDef&)operation:(NSString *)tId name:(NSString *)name 22 | { 23 | auto graph = graphs.find([tId UTF8String]); 24 | if(graph != graphs.end()) { 25 | auto nodes = graph->second->node(); 26 | for(auto const& node: nodes) { 27 | if(node.op() == [name UTF8String]) { 28 | return node; 29 | } 30 | } 31 | } 32 | 33 | throw std::invalid_argument("Node / Operation with name not found"); 34 | 35 | } 36 | 37 | RCT_EXPORT_MODULE() 38 | 39 | RCT_EXPORT_METHOD(importGraphDef:(NSString *)tId graphDef:(NSString *)graphDef resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 40 | { 41 | [self importGraphDefWithPrefix:tId graphDef:graphDef prefix:@"" resolver:resolve rejecter:reject]; 42 | } 43 | 44 | RCT_EXPORT_METHOD(importGraphDefWithPrefix:(NSString *)tId graphDef:(NSString *)graphDef prefix:(NSString *)prefix resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 45 | { 46 | try { 47 | NSData *graphDefDecodedData = [[NSData alloc] initWithBase64EncodedString:graphDef options:0]; 48 | NSString *graphDefDecodedString = [[NSString alloc] initWithData:graphDefDecodedData encoding:NSUTF8StringEncoding]; 49 | 50 | auto graph = graphs.find([tId UTF8String]); 51 | if(graph != graphs.end()) { 52 | graph->second->ParseFromString([graphDefDecodedString UTF8String]); 53 | resolve(@1); 54 | } else { 55 | reject(RCTErrorUnspecified, @"Could not find graph with given id", nil); 56 | } 57 | } catch( std::exception& e ) { 58 | reject(RCTErrorUnspecified, @(e.what()), nil); 59 | } 60 | } 61 | 62 | RCT_EXPORT_METHOD(toGraphDef:(NSString *)tId resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 63 | { 64 | try { 65 | auto graph = graphs.find([tId UTF8String]); 66 | if(graph != graphs.end()) { 67 | resolve(@(graph->second->SerializeAsString().c_str())); 68 | } else { 69 | reject(RCTErrorUnspecified, @"Could not find graph with given id", nil); 70 | } 71 | } catch( std::exception& e ) { 72 | reject(RCTErrorUnspecified, @(e.what()), nil); 73 | } 74 | } 75 | 76 | RCT_EXPORT_METHOD(close:(NSString *)tId resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 77 | { 78 | try { 79 | auto graph = graphs.find([tId UTF8String]); 80 | if(graph != graphs.end()) { 81 | graph->second->Clear(); 82 | graphs.erase(graph); 83 | resolve(@1); 84 | } else { 85 | reject(RCTErrorUnspecified, @"Could not find graph with given id", nil); 86 | } 87 | } catch( std::exception& e ) { 88 | reject(RCTErrorUnspecified, @(e.what()), nil); 89 | } 90 | } 91 | 92 | @end 93 | -------------------------------------------------------------------------------- /ios/RNTensorFlowGraphOperations.h: -------------------------------------------------------------------------------- 1 | #if __has_include("RCTBridge.h") 2 | #import "RCTBridge.h" 3 | #else 4 | #import 5 | #endif 6 | 7 | #include "tensorflow/core/framework/op_kernel.h" 8 | #include "tensorflow/core/public/session.h" 9 | 10 | @interface RNTensorFlowGraphOperations : NSObject 11 | 12 | @end 13 | -------------------------------------------------------------------------------- /ios/RNTensorFlowGraphOperations.mm: -------------------------------------------------------------------------------- 1 | #import "RNTensorFlowGraphOperations.h" 2 | 3 | #import "RCTUtils.h" 4 | 5 | #include "RNTensorFlowGraph.h" 6 | 7 | @implementation RNTensorFlowGraphOperations 8 | 9 | - (dispatch_queue_t)methodQueue 10 | { 11 | return dispatch_get_main_queue(); 12 | } 13 | 14 | @synthesize bridge = _bridge; 15 | 16 | RCT_EXPORT_MODULE() 17 | 18 | RCT_EXPORT_METHOD(inputListLength:(NSString *)tId opName:(NSString *)opName name:(NSString *)name resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 19 | { 20 | try { 21 | int result = [self operation:tId opName:opName].input_size(); 22 | resolve(@(result)); 23 | } catch( std::exception& e ) { 24 | reject(RCTErrorUnspecified, @(e.what()), nil); 25 | } 26 | } 27 | 28 | RCT_EXPORT_METHOD(name:(NSString *)tId opName:(NSString *)opName resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 29 | { 30 | try { 31 | std::string result = [self operation:tId opName:opName].name(); 32 | resolve(@(result.c_str())); 33 | } catch( std::exception& e ) { 34 | reject(RCTErrorUnspecified, @(e.what()), nil); 35 | } 36 | } 37 | 38 | RCT_EXPORT_METHOD(numOutputs:(NSString *)tId opName:(NSString *)opName resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 39 | { 40 | reject(RCTErrorUnspecified, @"Unsupported operation", nil); 41 | } 42 | 43 | RCT_EXPORT_METHOD(output:(NSString *)tId opName:(NSString *)opName index:(NSInteger *)index resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 44 | { 45 | reject(RCTErrorUnspecified, @"Unsupported operation", nil); 46 | } 47 | 48 | RCT_EXPORT_METHOD(outputList:(NSString *)tId opName:(NSString *)opName index:(NSInteger *)index length:(NSInteger *)length resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 49 | { 50 | reject(RCTErrorUnspecified, @"Unsupported operation", nil); 51 | } 52 | 53 | RCT_EXPORT_METHOD(outputListLength:(NSString *)tId opName:(NSString *)opName name:(NSString *)name resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 54 | { 55 | reject(RCTErrorUnspecified, @"Unsupported operation", nil); 56 | } 57 | 58 | RCT_EXPORT_METHOD(type:(NSString *)tId opName:(NSString *)opName resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 59 | { 60 | try { 61 | std::string result = [self operation:tId opName:opName].GetTypeName(); 62 | resolve(@(result.c_str())); 63 | } catch( std::exception& e ) { 64 | reject(RCTErrorUnspecified, @(e.what()), nil); 65 | } 66 | } 67 | 68 | -(const tensorflow::NodeDef&) operation:(NSString *) tId opName:(NSString *) opName 69 | { 70 | RNTensorFlowGraph * graph = [_bridge moduleForClass:[RNTensorFlowGraph class]]; 71 | return [graph operation:tId name:opName]; 72 | } 73 | 74 | @end 75 | -------------------------------------------------------------------------------- /ios/RNTensorFlowInference.h: -------------------------------------------------------------------------------- 1 | #if __has_include("RCTBridge.h") 2 | #import "RCTBridge.h" 3 | #else 4 | #import 5 | #endif 6 | 7 | @interface RNTensorFlowInference : NSObject 8 | 9 | @end 10 | -------------------------------------------------------------------------------- /ios/RNTensorFlowInference.mm: -------------------------------------------------------------------------------- 1 | #import "RNTensorFlowInference.h" 2 | 3 | #include "RNTensorFlowGraph.h" 4 | #include "TensorFlowInference.h" 5 | 6 | #import "RCTUtils.h" 7 | 8 | #include 9 | #include 10 | 11 | #include "tensorflow/core/framework/op_kernel.h" 12 | #include "tensorflow/core/public/session.h" 13 | 14 | @implementation RNTensorFlowInference 15 | { 16 | std::unordered_map inferenceMap; 17 | } 18 | 19 | @synthesize bridge = _bridge; 20 | 21 | - (dispatch_queue_t)methodQueue 22 | { 23 | return dispatch_get_main_queue(); 24 | } 25 | 26 | RCT_EXPORT_MODULE() 27 | 28 | RCT_EXPORT_METHOD(initTensorFlowInference:(NSString *)tId modelLocation:(NSString *)modelLocation resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 29 | { 30 | try { 31 | TensorFlowInference * inference = [[TensorFlowInference alloc] initWithModel:modelLocation]; 32 | inferenceMap[[tId UTF8String]] = inference; 33 | 34 | RNTensorFlowGraph * graphModule = [_bridge moduleForClass:[RNTensorFlowGraph class]]; 35 | [graphModule init:tId graph:[inference graph]]; 36 | 37 | resolve(@1); 38 | } catch( std::exception& e ) { 39 | reject(RCTErrorUnspecified, @(e.what()), nil); 40 | } 41 | } 42 | 43 | RCT_EXPORT_METHOD(feed:(NSString *)tId data:(NSDictionary *)data resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 44 | { 45 | try { 46 | 47 | NSString * inputName = data[@"name"]; 48 | NSArray * srcData = data[@"data"]; 49 | NSArray * shape = data[@"shape"] ? data[@"shape"] : [NSArray new]; 50 | 51 | tensorflow::DataType dtype; 52 | if(data[@"dtype"]) { 53 | tensorflow::DataType_Parse([[NSString stringWithFormat:@"%@%@", @"DT_", [data[@"dtype"] uppercaseString]] UTF8String] , &dtype); 54 | } else { 55 | dtype = tensorflow::DataType::DT_DOUBLE; 56 | } 57 | 58 | int shapeCount = [shape count]; 59 | std::vector shapeC(shapeCount); 60 | for (int i = 0; i < shapeCount; ++i) { 61 | shapeC[i] = [[shape objectAtIndex:i] intValue]; 62 | } 63 | 64 | tensorflow::Tensor tensor(dtype, tensorflow::TensorShape(shapeC)); 65 | 66 | if(dtype == tensorflow::DataType::DT_DOUBLE) { 67 | int srcDataCount = [srcData count]; 68 | std::vector srcDataC(srcDataCount); 69 | for (int i = 0; i < [srcData count]; ++i) { 70 | srcDataC[i] = [[srcData objectAtIndex:i] doubleValue]; 71 | } 72 | 73 | std::copy_n(srcDataC.begin(), srcDataC.size(), tensor.flat().data()); 74 | } else if(dtype == tensorflow::DataType::DT_FLOAT) { 75 | int srcDataCount = [srcData count]; 76 | std::vector srcDataC(srcDataCount); 77 | for (int i = 0; i < [srcData count]; ++i) { 78 | srcDataC[i] = [[srcData objectAtIndex:i] floatValue]; 79 | } 80 | 81 | std::copy_n(srcDataC.begin(), srcDataC.size(), tensor.flat().data()); 82 | } else if(dtype == tensorflow::DataType::DT_INT32) { 83 | int srcDataCount = [srcData count]; 84 | std::vector srcDataC(srcDataCount); 85 | for (int i = 0; i < [srcData count]; ++i) { 86 | srcDataC[i] = [[srcData objectAtIndex:i] intValue]; 87 | } 88 | 89 | std::copy_n(srcDataC.begin(), srcDataC.size(), tensor.flat().data()); 90 | } else if(dtype == tensorflow::DataType::DT_INT64) { 91 | int srcDataCount = [srcData count]; 92 | std::vector srcDataC(srcDataCount); 93 | for (int i = 0; i < [srcData count]; ++i) { 94 | srcDataC[i] = [[srcData objectAtIndex:i] longValue]; 95 | } 96 | 97 | std::copy_n(srcDataC.begin(), srcDataC.size(), tensor.flat().data()); 98 | } else if(dtype == tensorflow::DataType::DT_UINT8) { 99 | int srcDataCount = [srcData count]; 100 | std::vector srcDataC(srcDataCount); 101 | for (int i = 0; i < [srcData count]; ++i) { 102 | srcDataC[i] = [[srcData objectAtIndex:i] intValue]; 103 | } 104 | 105 | std::copy_n(srcDataC.begin(), srcDataC.size(), tensor.flat().data()); 106 | } else if(dtype == tensorflow::DataType::DT_BOOL) { 107 | int srcDataCount = [srcData count]; 108 | std::vector srcDataC(srcDataCount); 109 | for (int i = 0; i < [srcData count]; ++i) { 110 | srcDataC[i] = [[srcData objectAtIndex:i] boolValue]; 111 | } 112 | 113 | std::copy_n(srcDataC.begin(), srcDataC.size(), tensor.flat().data()); 114 | } else if(dtype == tensorflow::DataType::DT_STRING) { 115 | int srcDataCount = [srcData count]; 116 | std::vector srcDataC(srcDataCount); 117 | for (int i = 0; i < [srcData count]; ++i) { 118 | srcDataC[i] = [[srcData objectAtIndex:i] UTF8String]; 119 | } 120 | 121 | std::copy_n(srcDataC.begin(), srcDataC.size(), tensor.flat().data()); 122 | } else { 123 | throw std::invalid_argument("Invalid data type"); 124 | } 125 | 126 | TensorFlowInference * inference = inferenceMap[[tId UTF8String]]; 127 | [inference feed:inputName tensor:tensor]; 128 | resolve(@1); 129 | } catch( std::exception& e ) { 130 | reject(RCTErrorUnspecified, @(e.what()), nil); 131 | } 132 | } 133 | 134 | RCT_EXPORT_METHOD(run:(NSString *)tId outputNames:(NSArray *)outputNames enableStats:(BOOL)enableStats resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 135 | { 136 | try { 137 | TensorFlowInference * inference = inferenceMap[[tId UTF8String]]; 138 | [inference run:outputNames enableStats:enableStats]; 139 | resolve(@1); 140 | } catch( std::exception& e ) { 141 | reject(RCTErrorUnspecified, @(e.what()), nil); 142 | } 143 | } 144 | 145 | RCT_EXPORT_METHOD(fetch:(NSString *)tId outputName:(NSString *)outputName resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 146 | { 147 | try { 148 | TensorFlowInference * inference = inferenceMap[[tId UTF8String]]; 149 | NSArray *result = [inference fetch:outputName]; 150 | 151 | dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0), ^{ 152 | resolve(result); 153 | }); 154 | } catch( std::exception& e ) { 155 | reject(RCTErrorUnspecified, @(e.what()), nil); 156 | } 157 | } 158 | 159 | RCT_EXPORT_METHOD(reset:(NSString *)tId resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 160 | { 161 | try { 162 | TensorFlowInference * inference = inferenceMap[[tId UTF8String]]; 163 | [inference reset]; 164 | resolve(@1); 165 | } catch( std::exception& e ) { 166 | reject(RCTErrorUnspecified, @(e.what()), nil); 167 | } 168 | } 169 | 170 | RCT_EXPORT_METHOD(close:(NSString *)tId resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject) 171 | { 172 | try { 173 | TensorFlowInference * inference = inferenceMap[[tId UTF8String]]; 174 | [inference close]; 175 | inferenceMap.erase([tId UTF8String]); 176 | 177 | RNTensorFlowGraph * graph = [_bridge moduleForClass:[RNTensorFlowGraph class]]; 178 | [graph close:tId]; 179 | 180 | resolve(@1); 181 | } catch( std::exception& e ) { 182 | reject(RCTErrorUnspecified, @(e.what()), nil); 183 | } 184 | } 185 | 186 | @end 187 | -------------------------------------------------------------------------------- /ios/RNTensorflow.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 46; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 975CD8451F79E183005D2BB8 /* RNTensorFlowInference.mm in Sources */ = {isa = PBXBuildFile; fileRef = 975CD8441F79E183005D2BB8 /* RNTensorFlowInference.mm */; }; 11 | 975CD8481F79E1B6005D2BB8 /* RNTensorFlowGraph.mm in Sources */ = {isa = PBXBuildFile; fileRef = 975CD8471F79E1B6005D2BB8 /* RNTensorFlowGraph.mm */; }; 12 | 975CD84A1F79E1C8005D2BB8 /* RNTensorFlowGraphOperations.mm in Sources */ = {isa = PBXBuildFile; fileRef = 975CD8491F79E1C8005D2BB8 /* RNTensorFlowGraphOperations.mm */; }; 13 | A727A0BB1FCB466500D3B2C5 /* TensorFlowInference.mm in Sources */ = {isa = PBXBuildFile; fileRef = A727A0BA1FCB466500D3B2C5 /* TensorFlowInference.mm */; }; 14 | A7F0DCB51FDCD8C200E79844 /* ImageRecognizer.mm in Sources */ = {isa = PBXBuildFile; fileRef = A7F0DCB41FDCD8C200E79844 /* ImageRecognizer.mm */; }; 15 | A7F0DCB81FDCD8EA00E79844 /* RNImageRecognition.mm in Sources */ = {isa = PBXBuildFile; fileRef = A7F0DCB71FDCD8EA00E79844 /* RNImageRecognition.mm */; }; 16 | /* End PBXBuildFile section */ 17 | 18 | /* Begin PBXCopyFilesBuildPhase section */ 19 | 58B511D91A9E6C8500147676 /* CopyFiles */ = { 20 | isa = PBXCopyFilesBuildPhase; 21 | buildActionMask = 2147483647; 22 | dstPath = "include/$(PRODUCT_NAME)"; 23 | dstSubfolderSpec = 16; 24 | files = ( 25 | ); 26 | runOnlyForDeploymentPostprocessing = 0; 27 | }; 28 | /* End PBXCopyFilesBuildPhase section */ 29 | 30 | /* Begin PBXFileReference section */ 31 | 134814201AA4EA6300B7C361 /* libRNTensorflow.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libRNTensorflow.a; sourceTree = BUILT_PRODUCTS_DIR; }; 32 | 975CD8441F79E183005D2BB8 /* RNTensorFlowInference.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = RNTensorFlowInference.mm; sourceTree = ""; }; 33 | 975CD8461F79E199005D2BB8 /* RNTensorFlowInference.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNTensorFlowInference.h; sourceTree = ""; }; 34 | 975CD8471F79E1B6005D2BB8 /* RNTensorFlowGraph.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = RNTensorFlowGraph.mm; sourceTree = ""; }; 35 | 975CD8491F79E1C8005D2BB8 /* RNTensorFlowGraphOperations.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = RNTensorFlowGraphOperations.mm; sourceTree = ""; }; 36 | 975CD84B1F79E1D7005D2BB8 /* RNTensorFlowGraph.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNTensorFlowGraph.h; sourceTree = ""; }; 37 | 975CD84C1F79E1E1005D2BB8 /* RNTensorFlowGraphOperations.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNTensorFlowGraphOperations.h; sourceTree = ""; }; 38 | A727A0B91FCB465600D3B2C5 /* TensorFlowInference.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = TensorFlowInference.h; sourceTree = ""; }; 39 | A727A0BA1FCB466500D3B2C5 /* TensorFlowInference.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = TensorFlowInference.mm; sourceTree = ""; }; 40 | A7F0DCB41FDCD8C200E79844 /* ImageRecognizer.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = ImageRecognizer.mm; sourceTree = ""; }; 41 | A7F0DCB61FDCD8D700E79844 /* ImageRecognizer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ImageRecognizer.h; sourceTree = ""; }; 42 | A7F0DCB71FDCD8EA00E79844 /* RNImageRecognition.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = RNImageRecognition.mm; sourceTree = ""; }; 43 | A7F0DCB91FDCD90200E79844 /* RNImageRecognition.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNImageRecognition.h; sourceTree = ""; }; 44 | /* End PBXFileReference section */ 45 | 46 | /* Begin PBXFrameworksBuildPhase section */ 47 | 58B511D81A9E6C8500147676 /* Frameworks */ = { 48 | isa = PBXFrameworksBuildPhase; 49 | buildActionMask = 2147483647; 50 | files = ( 51 | ); 52 | runOnlyForDeploymentPostprocessing = 0; 53 | }; 54 | /* End PBXFrameworksBuildPhase section */ 55 | 56 | /* Begin PBXGroup section */ 57 | 134814211AA4EA7D00B7C361 /* Products */ = { 58 | isa = PBXGroup; 59 | children = ( 60 | 134814201AA4EA6300B7C361 /* libRNTensorflow.a */, 61 | ); 62 | name = Products; 63 | sourceTree = ""; 64 | }; 65 | 58B511D21A9E6C8500147676 = { 66 | isa = PBXGroup; 67 | children = ( 68 | A7F0DCB91FDCD90200E79844 /* RNImageRecognition.h */, 69 | A7F0DCB71FDCD8EA00E79844 /* RNImageRecognition.mm */, 70 | A7F0DCB61FDCD8D700E79844 /* ImageRecognizer.h */, 71 | A7F0DCB41FDCD8C200E79844 /* ImageRecognizer.mm */, 72 | A727A0BA1FCB466500D3B2C5 /* TensorFlowInference.mm */, 73 | A727A0B91FCB465600D3B2C5 /* TensorFlowInference.h */, 74 | 975CD84C1F79E1E1005D2BB8 /* RNTensorFlowGraphOperations.h */, 75 | 975CD84B1F79E1D7005D2BB8 /* RNTensorFlowGraph.h */, 76 | 975CD8491F79E1C8005D2BB8 /* RNTensorFlowGraphOperations.mm */, 77 | 975CD8471F79E1B6005D2BB8 /* RNTensorFlowGraph.mm */, 78 | 975CD8461F79E199005D2BB8 /* RNTensorFlowInference.h */, 79 | 975CD8441F79E183005D2BB8 /* RNTensorFlowInference.mm */, 80 | 134814211AA4EA7D00B7C361 /* Products */, 81 | ); 82 | sourceTree = ""; 83 | }; 84 | /* End PBXGroup section */ 85 | 86 | /* Begin PBXNativeTarget section */ 87 | 58B511DA1A9E6C8500147676 /* RNTensorflow */ = { 88 | isa = PBXNativeTarget; 89 | buildConfigurationList = 58B511EF1A9E6C8500147676 /* Build configuration list for PBXNativeTarget "RNTensorflow" */; 90 | buildPhases = ( 91 | 58B511D71A9E6C8500147676 /* Sources */, 92 | 58B511D81A9E6C8500147676 /* Frameworks */, 93 | 58B511D91A9E6C8500147676 /* CopyFiles */, 94 | ); 95 | buildRules = ( 96 | ); 97 | dependencies = ( 98 | ); 99 | name = RNTensorflow; 100 | productName = RCTDataManager; 101 | productReference = 134814201AA4EA6300B7C361 /* libRNTensorflow.a */; 102 | productType = "com.apple.product-type.library.static"; 103 | }; 104 | /* End PBXNativeTarget section */ 105 | 106 | /* Begin PBXProject section */ 107 | 58B511D31A9E6C8500147676 /* Project object */ = { 108 | isa = PBXProject; 109 | attributes = { 110 | LastUpgradeCheck = 0830; 111 | ORGANIZATIONNAME = ""; 112 | TargetAttributes = { 113 | 58B511DA1A9E6C8500147676 = { 114 | CreatedOnToolsVersion = 6.1.1; 115 | }; 116 | }; 117 | }; 118 | buildConfigurationList = 58B511D61A9E6C8500147676 /* Build configuration list for PBXProject "RNTensorflow" */; 119 | compatibilityVersion = "Xcode 3.2"; 120 | developmentRegion = English; 121 | hasScannedForEncodings = 0; 122 | knownRegions = ( 123 | en, 124 | ); 125 | mainGroup = 58B511D21A9E6C8500147676; 126 | productRefGroup = 58B511D21A9E6C8500147676; 127 | projectDirPath = ""; 128 | projectRoot = ""; 129 | targets = ( 130 | 58B511DA1A9E6C8500147676 /* RNTensorflow */, 131 | ); 132 | }; 133 | /* End PBXProject section */ 134 | 135 | /* Begin PBXSourcesBuildPhase section */ 136 | 58B511D71A9E6C8500147676 /* Sources */ = { 137 | isa = PBXSourcesBuildPhase; 138 | buildActionMask = 2147483647; 139 | files = ( 140 | 975CD8481F79E1B6005D2BB8 /* RNTensorFlowGraph.mm in Sources */, 141 | A7F0DCB51FDCD8C200E79844 /* ImageRecognizer.mm in Sources */, 142 | 975CD84A1F79E1C8005D2BB8 /* RNTensorFlowGraphOperations.mm in Sources */, 143 | A7F0DCB81FDCD8EA00E79844 /* RNImageRecognition.mm in Sources */, 144 | A727A0BB1FCB466500D3B2C5 /* TensorFlowInference.mm in Sources */, 145 | 975CD8451F79E183005D2BB8 /* RNTensorFlowInference.mm in Sources */, 146 | ); 147 | runOnlyForDeploymentPostprocessing = 0; 148 | }; 149 | /* End PBXSourcesBuildPhase section */ 150 | 151 | /* Begin XCBuildConfiguration section */ 152 | 58B511ED1A9E6C8500147676 /* Debug */ = { 153 | isa = XCBuildConfiguration; 154 | buildSettings = { 155 | ALWAYS_SEARCH_USER_PATHS = NO; 156 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 157 | CLANG_CXX_LIBRARY = "libc++"; 158 | CLANG_ENABLE_MODULES = YES; 159 | CLANG_ENABLE_OBJC_ARC = YES; 160 | CLANG_WARN_BOOL_CONVERSION = YES; 161 | CLANG_WARN_CONSTANT_CONVERSION = YES; 162 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 163 | CLANG_WARN_EMPTY_BODY = YES; 164 | CLANG_WARN_ENUM_CONVERSION = YES; 165 | CLANG_WARN_INFINITE_RECURSION = YES; 166 | CLANG_WARN_INT_CONVERSION = YES; 167 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 168 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 169 | CLANG_WARN_UNREACHABLE_CODE = YES; 170 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 171 | COPY_PHASE_STRIP = NO; 172 | ENABLE_STRICT_OBJC_MSGSEND = YES; 173 | ENABLE_TESTABILITY = YES; 174 | GCC_C_LANGUAGE_STANDARD = gnu99; 175 | GCC_DYNAMIC_NO_PIC = NO; 176 | GCC_NO_COMMON_BLOCKS = YES; 177 | GCC_OPTIMIZATION_LEVEL = 0; 178 | GCC_PREPROCESSOR_DEFINITIONS = ( 179 | "DEBUG=1", 180 | "$(inherited)", 181 | ); 182 | GCC_SYMBOLS_PRIVATE_EXTERN = NO; 183 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 184 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 185 | GCC_WARN_UNDECLARED_SELECTOR = YES; 186 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 187 | GCC_WARN_UNUSED_FUNCTION = YES; 188 | GCC_WARN_UNUSED_VARIABLE = YES; 189 | IPHONEOS_DEPLOYMENT_TARGET = 8.0; 190 | MTL_ENABLE_DEBUG_INFO = YES; 191 | ONLY_ACTIVE_ARCH = YES; 192 | SDKROOT = iphoneos; 193 | }; 194 | name = Debug; 195 | }; 196 | 58B511EE1A9E6C8500147676 /* Release */ = { 197 | isa = XCBuildConfiguration; 198 | buildSettings = { 199 | ALWAYS_SEARCH_USER_PATHS = NO; 200 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 201 | CLANG_CXX_LIBRARY = "libc++"; 202 | CLANG_ENABLE_MODULES = YES; 203 | CLANG_ENABLE_OBJC_ARC = YES; 204 | CLANG_WARN_BOOL_CONVERSION = YES; 205 | CLANG_WARN_CONSTANT_CONVERSION = YES; 206 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 207 | CLANG_WARN_EMPTY_BODY = YES; 208 | CLANG_WARN_ENUM_CONVERSION = YES; 209 | CLANG_WARN_INFINITE_RECURSION = YES; 210 | CLANG_WARN_INT_CONVERSION = YES; 211 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 212 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 213 | CLANG_WARN_UNREACHABLE_CODE = YES; 214 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 215 | COPY_PHASE_STRIP = YES; 216 | ENABLE_NS_ASSERTIONS = NO; 217 | ENABLE_STRICT_OBJC_MSGSEND = YES; 218 | GCC_C_LANGUAGE_STANDARD = gnu99; 219 | GCC_NO_COMMON_BLOCKS = YES; 220 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 221 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 222 | GCC_WARN_UNDECLARED_SELECTOR = YES; 223 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 224 | GCC_WARN_UNUSED_FUNCTION = YES; 225 | GCC_WARN_UNUSED_VARIABLE = YES; 226 | IPHONEOS_DEPLOYMENT_TARGET = 8.0; 227 | MTL_ENABLE_DEBUG_INFO = NO; 228 | SDKROOT = iphoneos; 229 | VALIDATE_PRODUCT = YES; 230 | }; 231 | name = Release; 232 | }; 233 | 58B511F01A9E6C8500147676 /* Debug */ = { 234 | isa = XCBuildConfiguration; 235 | buildSettings = { 236 | HEADER_SEARCH_PATHS = ( 237 | "$(inherited)", 238 | /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include, 239 | "$(SRCROOT)/../../../React/**", 240 | "$(SRCROOT)/../../react-native/React/**", 241 | "$(SRCROOT)/../../../ios/Pods/TensorFlow-experimental/Frameworks/tensorflow_experimental.framework/Headers", 242 | "$(SRCROOT)/../../../ios/Pods/Headers/Public/TensorFlow-experimental/tensorflow_experimental/third_party/eigen3", 243 | "$(SRCROOT)/../../../ios/Pods/TensorFlow-experimental/Frameworks/tensorflow_experimental.framework/Headers/third_party/eigen3", 244 | ); 245 | LIBRARY_SEARCH_PATHS = "$(inherited)"; 246 | OTHER_LDFLAGS = "-ObjC"; 247 | PRODUCT_NAME = RNTensorflow; 248 | SKIP_INSTALL = YES; 249 | }; 250 | name = Debug; 251 | }; 252 | 58B511F11A9E6C8500147676 /* Release */ = { 253 | isa = XCBuildConfiguration; 254 | buildSettings = { 255 | HEADER_SEARCH_PATHS = ( 256 | "$(inherited)", 257 | /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include, 258 | "$(SRCROOT)/../../../React/**", 259 | "$(SRCROOT)/../../react-native/React/**", 260 | "$(SRCROOT)/../../../ios/Pods/TensorFlow-experimental/Frameworks/tensorflow_experimental.framework/Headers", 261 | "$(SRCROOT)/../../../ios/Pods/Headers/Public/TensorFlow-experimental/tensorflow_experimental/third_party/eigen3", 262 | "$(SRCROOT)/../../../ios/Pods/TensorFlow-experimental/Frameworks/tensorflow_experimental.framework/Headers/third_party/eigen3", 263 | ); 264 | LIBRARY_SEARCH_PATHS = "$(inherited)"; 265 | OTHER_LDFLAGS = "-ObjC"; 266 | PRODUCT_NAME = RNTensorflow; 267 | SKIP_INSTALL = YES; 268 | }; 269 | name = Release; 270 | }; 271 | /* End XCBuildConfiguration section */ 272 | 273 | /* Begin XCConfigurationList section */ 274 | 58B511D61A9E6C8500147676 /* Build configuration list for PBXProject "RNTensorflow" */ = { 275 | isa = XCConfigurationList; 276 | buildConfigurations = ( 277 | 58B511ED1A9E6C8500147676 /* Debug */, 278 | 58B511EE1A9E6C8500147676 /* Release */, 279 | ); 280 | defaultConfigurationIsVisible = 0; 281 | defaultConfigurationName = Release; 282 | }; 283 | 58B511EF1A9E6C8500147676 /* Build configuration list for PBXNativeTarget "RNTensorflow" */ = { 284 | isa = XCConfigurationList; 285 | buildConfigurations = ( 286 | 58B511F01A9E6C8500147676 /* Debug */, 287 | 58B511F11A9E6C8500147676 /* Release */, 288 | ); 289 | defaultConfigurationIsVisible = 0; 290 | defaultConfigurationName = Release; 291 | }; 292 | /* End XCConfigurationList section */ 293 | }; 294 | rootObject = 58B511D31A9E6C8500147676 /* Project object */; 295 | } 296 | -------------------------------------------------------------------------------- /ios/RNTensorflow.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /ios/RNTensorflow.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | 3 | 5 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /ios/TensorFlowInference.h: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include "tensorflow/core/framework/op_kernel.h" 4 | #include "tensorflow/core/public/session.h" 5 | 6 | @interface TensorFlowInference: NSObject 7 | 8 | - (id) initWithModel:(NSString *)modelLocation; 9 | - (void) feed:(NSString *)inputName tensor:(tensorflow::Tensor)tensor; 10 | - (void) run:(NSArray *)outputNames enableStats:(BOOL)enableStats; 11 | - (NSArray *) fetch:(NSString *)outputName; 12 | - (std::shared_ptr) graph; 13 | - (void) reset; 14 | - (tensorflow::Status) close; 15 | 16 | @end 17 | -------------------------------------------------------------------------------- /ios/TensorFlowInference.mm: -------------------------------------------------------------------------------- 1 | #import "TensorFlowInference.h" 2 | 3 | #include 4 | #include 5 | 6 | 7 | namespace { 8 | class InputStream : public ::google::protobuf::io::CopyingInputStream { 9 | public: 10 | explicit InputStream(const std::string& file_name) : ifstream_(file_name.c_str(), std::ios::in | std::ios::binary) { 11 | } 12 | 13 | ~InputStream() { 14 | ifstream_.close(); 15 | } 16 | 17 | int Read(void* buffer, int size) { 18 | if (!ifstream_) { 19 | return -1; 20 | } 21 | ifstream_.read(static_cast(buffer), size); 22 | return ifstream_.gcount(); 23 | } 24 | 25 | private: 26 | std::ifstream ifstream_; 27 | }; 28 | } 29 | 30 | @implementation TensorFlowInference 31 | { 32 | std::shared_ptr session; 33 | std::shared_ptr tensorflowGraph; 34 | 35 | std::vector feedNames; 36 | std::vector feedTensors; 37 | 38 | std::vector fetchNames; 39 | std::vector fetchTensors; 40 | } 41 | 42 | - (id) initWithModel:(NSString *)modelLocation { 43 | self = [super init]; 44 | if (self != nil) { 45 | return [self initTensorFlow:modelLocation]; 46 | } 47 | return self; 48 | } 49 | 50 | - (id) initTensorFlow:(NSString *)modelLocation 51 | { 52 | tensorflow::GraphDef graph; 53 | LOG(INFO) << "Graph created."; 54 | 55 | NSString *bundlePath = [[NSBundle mainBundle] pathForResource:[modelLocation substringToIndex:[modelLocation length] - 3] ofType:@"pb"]; 56 | if(bundlePath != NULL) { 57 | fileToProto([bundlePath UTF8String], &graph); 58 | } else if ([[NSFileManager defaultManager] fileExistsAtPath:modelLocation]) { 59 | NSData *data = [[NSData alloc] initWithContentsOfFile:modelLocation]; 60 | 61 | const void *buf = [data bytes]; 62 | unsigned long numBytes = [data length]; 63 | 64 | graph.ParseFromArray(buf, numBytes); 65 | } else { 66 | NSURL *url = [NSURL URLWithString:modelLocation]; 67 | NSData *data = [NSData dataWithContentsOfURL:url]; 68 | 69 | const void *buf = [data bytes]; 70 | unsigned long numBytes = [data length]; 71 | 72 | graph.ParseFromArray(buf, numBytes); 73 | } 74 | 75 | tensorflow::SessionOptions options; 76 | 77 | tensorflow::Session* session_pointer = nullptr; 78 | tensorflow::Status session_status = tensorflow::NewSession(options, &session_pointer); 79 | if (!session_status.ok()) { 80 | std::string status_string = session_status.ToString(); 81 | std::stringstream str; 82 | str << "Session create failed - " << status_string.c_str(); 83 | throw std::runtime_error(str.str()); 84 | } 85 | std::shared_ptr sess(session_pointer); 86 | LOG(INFO) << "Session created."; 87 | 88 | LOG(INFO) << "Creating session."; 89 | tensorflow::Status s = sess->Create(graph); 90 | if (!s.ok()) { 91 | std::stringstream str; 92 | str << "Could not create TensorFlow Graph: " << s; 93 | throw std::runtime_error(str.str()); 94 | } 95 | 96 | session = sess; 97 | tensorflowGraph = std::make_shared(graph); 98 | 99 | return self; 100 | } 101 | 102 | - (void) feed:(NSString *)inputName tensor:(tensorflow::Tensor)tensor 103 | { 104 | feedNames.push_back([inputName UTF8String]); 105 | feedTensors.push_back(tensor); 106 | } 107 | 108 | - (void) run:(NSArray *)outputNames enableStats:(BOOL)enableStats 109 | { 110 | std::vector> feedC(feedNames.size()); 111 | for (int i = 0; i < feedNames.size(); ++i) { 112 | feedC[i] = {feedNames[i], feedTensors[i]}; 113 | } 114 | 115 | int outputNamesCount = [outputNames count]; 116 | std::vector outputNamesC(outputNamesCount); 117 | for (int i = 0; i < [outputNames count]; ++i) { 118 | outputNamesC[i] = [[outputNames objectAtIndex:i] UTF8String]; 119 | } 120 | 121 | std::vector outputs; 122 | tensorflow::Status run_status = session->Run(feedC, outputNamesC, {}, &outputs); 123 | 124 | if (!run_status.ok()) { 125 | tensorflow::LogAllRegisteredKernels(); 126 | std::stringstream str; 127 | str << "Running model failed: " << run_status; 128 | throw std::runtime_error(str.str()); 129 | } 130 | 131 | fetchNames = outputNamesC; 132 | fetchTensors = outputs; 133 | } 134 | 135 | - (NSArray *) fetch:(NSString *)outputName 136 | { 137 | int i = 0; 138 | tensorflow::Tensor *tensor = nullptr; 139 | for(auto n : fetchNames) { 140 | if (n == [outputName UTF8String]) { 141 | tensor = &fetchTensors[i]; 142 | } 143 | ++i; 144 | } 145 | 146 | return convertFetchResult(tensor); 147 | } 148 | 149 | NSArray* convertFetchResult(tensorflow::Tensor *tensor) { 150 | if(tensor->dtype() == tensorflow::DataType::DT_DOUBLE) { 151 | auto predictions = tensor->flat(); 152 | NSMutableArray * result = [NSMutableArray new]; 153 | for (int index = 0; index < predictions.size(); index += 1) { 154 | [result addObject:[NSNumber numberWithDouble:predictions(index)]]; 155 | } 156 | 157 | return result; 158 | } else if(tensor->dtype() == tensorflow::DataType::DT_FLOAT) { 159 | auto predictions = tensor->flat(); 160 | NSMutableArray * result = [NSMutableArray new]; 161 | for (int index = 0; index < predictions.size(); index += 1) { 162 | [result addObject:[NSNumber numberWithFloat:predictions(index)]]; 163 | } 164 | 165 | return result; 166 | } else if(tensor->dtype() == tensorflow::DataType::DT_INT32) { 167 | auto predictions = tensor->flat(); 168 | NSMutableArray * result = [NSMutableArray new]; 169 | for (int index = 0; index < predictions.size(); index += 1) { 170 | [result addObject:[NSNumber numberWithInt:predictions(index)]]; 171 | } 172 | 173 | return result; 174 | } else if(tensor->dtype() == tensorflow::DataType::DT_INT64) { 175 | auto predictions = tensor->flat(); 176 | NSMutableArray * result = [NSMutableArray new]; 177 | for (int index = 0; index < predictions.size(); index += 1) { 178 | [result addObject:[NSNumber numberWithLong:predictions(index)]]; 179 | } 180 | 181 | return result; 182 | } else if(tensor->dtype() == tensorflow::DataType::DT_UINT8) { 183 | auto predictions = tensor->flat(); 184 | NSMutableArray * result = [NSMutableArray new]; 185 | for (int index = 0; index < predictions.size(); index += 1) { 186 | [result addObject:[NSNumber numberWithInt:predictions(index)]]; 187 | } 188 | 189 | return result; 190 | } else if(tensor->dtype() == tensorflow::DataType::DT_BOOL) { 191 | auto predictions = tensor->flat(); 192 | NSMutableArray * result = [NSMutableArray new]; 193 | for (int index = 0; index < predictions.size(); index += 1) { 194 | [result addObject:predictions(index) == true ? [NSNumber numberWithBool:YES] : [NSNumber numberWithBool:NO]]; 195 | } 196 | 197 | return result; 198 | } else if(tensor->dtype() == tensorflow::DataType::DT_STRING) { 199 | auto predictions = tensor->flat(); 200 | NSMutableArray * result = [NSMutableArray new]; 201 | for (int index = 0; index < predictions.size(); index += 1) { 202 | [result addObject:[NSString stringWithUTF8String:predictions(index).c_str()]]; 203 | } 204 | 205 | return result; 206 | } else { 207 | throw std::invalid_argument("Invalid data type"); 208 | } 209 | 210 | } 211 | 212 | -(std::shared_ptr) graph 213 | { 214 | return tensorflowGraph; 215 | } 216 | 217 | -(void) reset 218 | { 219 | feedNames.clear(); 220 | feedTensors.clear(); 221 | fetchNames.clear(); 222 | fetchTensors.clear(); 223 | } 224 | 225 | -(tensorflow::Status) close 226 | { 227 | feedNames.clear(); 228 | feedTensors.clear(); 229 | fetchNames.clear(); 230 | fetchTensors.clear(); 231 | 232 | return session->Close(); 233 | } 234 | 235 | bool fileToProto(const std::string& file_name, ::google::protobuf::MessageLite* proto) { 236 | ::google::protobuf::io::CopyingInputStreamAdaptor stream(new InputStream(file_name)); 237 | stream.SetOwnsCopyingStream(true); 238 | ::google::protobuf::io::CodedInputStream coded_stream(&stream); 239 | coded_stream.SetTotalBytesLimit(1024LL << 20, 512LL << 20); 240 | return proto->ParseFromCodedStream(&coded_stream); 241 | } 242 | 243 | @end 244 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "react-native-tensorflow", 3 | "version": "0.1.8", 4 | "description": "A TensorFlow inference library for react native", 5 | "main": "index.js", 6 | "author": "reweber", 7 | "license": "Apache 2.0", 8 | "repository": { 9 | "type": "git", 10 | "url": "https://github.com/reneweb/react-native-tensorflow" 11 | }, 12 | "keywords": [ 13 | "react-native", 14 | "tensorflow", 15 | "mobile" 16 | ], 17 | "peerDependencies": { 18 | "react-native": ">=0.48.3" 19 | }, 20 | "dependencies": { 21 | "uuid": "^3.1.0" 22 | } 23 | } 24 | --------------------------------------------------------------------------------