├── .gitignore
├── LICENSE
├── README.md
├── app
├── CMakeLists.txt
├── build.gradle
├── proguard-rules.pro
└── src
│ └── main
│ ├── AndroidManifest.xml
│ ├── assets
│ ├── imagenet_comp_graph_label_strings.txt
│ └── tensorflow_inception_graph.pb
│ ├── cpp
│ ├── imageutils_jni.cc
│ ├── rgb2yuv.cc
│ ├── rgb2yuv.h
│ ├── yuv2rgb.cc
│ └── yuv2rgb.h
│ ├── java
│ └── org
│ │ └── tensorflow
│ │ └── demo
│ │ ├── AutoFitTextureView.java
│ │ ├── CameraActivity.java
│ │ ├── CameraConnectionFragment.java
│ │ ├── Classifier.java
│ │ ├── ClassifierActivity.java
│ │ ├── LegacyCameraConnectionFragment.java
│ │ ├── OverlayView.java
│ │ ├── RecognitionScoreView.java
│ │ ├── ResultsView.java
│ │ ├── TensorFlowImageClassifier.java
│ │ └── env
│ │ ├── BorderedText.java
│ │ ├── ImageUtils.java
│ │ └── Logger.java
│ └── res
│ ├── drawable-hdpi
│ ├── ic_action_info.png
│ ├── ic_launcher.png
│ └── tile.9.png
│ ├── drawable-mdpi
│ ├── ic_action_info.png
│ └── ic_launcher.png
│ ├── drawable-xhdpi
│ ├── ic_action_info.png
│ └── ic_launcher.png
│ ├── drawable-xxhdpi
│ ├── ic_action_info.png
│ └── ic_launcher.png
│ ├── layout
│ ├── activity_camera.xml
│ ├── camera_connection_fragment.xml
│ ├── camera_connection_fragment_stylize.xml
│ └── camera_connection_fragment_tracking.xml
│ ├── values-sw600dp
│ ├── template-dimens.xml
│ └── template-styles.xml
│ ├── values-v11
│ ├── styles.xml
│ └── template-styles.xml
│ ├── values-v14
│ └── styles.xml
│ ├── values-v21
│ ├── base-colors.xml
│ └── base-template-styles.xml
│ └── values
│ ├── attrs.xml
│ ├── base-strings.xml
│ ├── colors.xml
│ ├── strings.xml
│ ├── styles.xml
│ ├── template-dimens.xml
│ └── template-styles.xml
├── build.gradle
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | # Temporary files
2 | *~
3 | \#*#
4 |
5 | # Android binary
6 | *.apk
7 |
8 | # hprof captures
9 | captures/
10 |
11 | # Gradle files
12 | .gradle/
13 | build/
14 | .externalNativeBuild
15 |
16 | # Local configuration file
17 | local.properties
18 |
19 | # IntelliJ
20 | *.iml
21 | .idea/
22 |
23 | # OSX files
24 | .DS_Store
25 |
26 | # Windows files
27 | Thumbs.db
28 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright {yyyy} {name of copyright owner}
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # TensorFlow (1.4.0) Image Classifier Gradle Standalone Port
2 |
3 | - Clone the project, and checkout the tag `1.4.0`
4 | - Import it on Android Studio
5 | - Run it
6 | - That's all.
7 |
8 | This project is a way to get started with TensorFlow Image Classifier quickly.
9 |
10 | I am not planning to maintain it. If you need an updated version, build it yourself using hints from this [blog post][blog-post].
11 |
12 |
13 | ## Native libraries
14 |
15 | Native compiled libraries are embedded in the `1.4.0` tag, so you won't need to install the NDK.
16 | However, this means that you cannot change the `org.tensorflow.demo.env.ImageUtils` class.
17 | Here's what you need to do if you want, for example, to use a different package name:
18 |
19 | * Install the [NDK and build tools][ndk]
20 | * Checkout the `1.4.0-cmake` tag
21 | * Modify line 7 of the `app/src/main/cpp/imageutils_jni.cpp` file to specify your new package name
22 |
23 | [blog-post]: http://nilhcem.com/android/custom-tensorflow-classifier
24 | [ndk]: https://developer.android.com/studio/projects/add-native-code.html
25 |
--------------------------------------------------------------------------------
/app/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 3.4.1)
2 |
3 | set(CMAKE_VERBOSE_MAKEFILE on)
4 |
5 | if (ANDROID_ABI MATCHES "^armeabi-v7a$")
6 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mfloat-abi=softfp -mfpu=neon")
7 | elseif(ANDROID_ABI MATCHES "^arm64-v8a")
8 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O2 -ftree-vectorize")
9 | endif()
10 |
11 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DSTANDALONE_DEMO_LIB \
12 | -std=c++11 -fno-exceptions -fno-rtti -O2 -Wno-narrowing \
13 | -fPIE")
14 | set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} \
15 | -Wl,--allow-multiple-definition \
16 | -Wl,--whole-archive -fPIE -v")
17 |
18 | file(GLOB_RECURSE tensorflow_demo_sources src/main/cpp/*.*)
19 | add_library(tensorflow_demo SHARED ${tensorflow_demo_sources})
20 |
21 | target_link_libraries(tensorflow_demo)
22 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | android {
4 | compileSdkVersion 28
5 | buildToolsVersion '28.0.3'
6 | defaultConfig {
7 | applicationId 'org.tensorflow.demo'
8 | minSdkVersion 21
9 | targetSdkVersion 28
10 | versionCode 1
11 | versionName '1.0'
12 |
13 | externalNativeBuild {
14 | cmake {
15 | cppFlags ""
16 | }
17 | }
18 | ndk {
19 | abiFilters 'x86', 'x86_64', 'armeabi-v7a', 'arm64-v8a'
20 | }
21 | }
22 |
23 | externalNativeBuild {
24 | cmake {
25 | path "CMakeLists.txt"
26 | }
27 | }
28 | }
29 |
30 | dependencies {
31 | implementation 'org.tensorflow:tensorflow-android:1.4.0'
32 | }
33 |
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # By default, the flags in this file are appended to flags specified
3 | # in /opt/android-sdk/tools/proguard/proguard-android.txt
4 | # You can edit the include path and order by changing the proguardFiles
5 | # directive in build.gradle.
6 | #
7 | # For more details, see
8 | # http://developer.android.com/guide/developing/tools/proguard.html
9 |
10 | # Add any project specific keep options here:
11 |
12 | # If your project uses WebView with JS, uncomment the following
13 | # and specify the fully qualified class name to the JavaScript interface
14 | # class:
15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
16 | # public *;
17 | #}
18 |
19 | # Uncomment this to preserve the line number information for
20 | # debugging stack traces.
21 | #-keepattributes SourceFile,LineNumberTable
22 |
23 | # If you keep the line number information, uncomment this to
24 | # hide the original source file name.
25 | #-renamesourcefileattribute SourceFile
26 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
31 |
32 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/app/src/main/assets/imagenet_comp_graph_label_strings.txt:
--------------------------------------------------------------------------------
1 | dummy
2 | kit fox
3 | English setter
4 | Siberian husky
5 | Australian terrier
6 | English springer
7 | grey whale
8 | lesser panda
9 | Egyptian cat
10 | ibex
11 | Persian cat
12 | cougar
13 | gazelle
14 | porcupine
15 | sea lion
16 | malamute
17 | badger
18 | Great Dane
19 | Walker hound
20 | Welsh springer spaniel
21 | whippet
22 | Scottish deerhound
23 | killer whale
24 | mink
25 | African elephant
26 | Weimaraner
27 | soft-coated wheaten terrier
28 | Dandie Dinmont
29 | red wolf
30 | Old English sheepdog
31 | jaguar
32 | otterhound
33 | bloodhound
34 | Airedale
35 | hyena
36 | meerkat
37 | giant schnauzer
38 | titi
39 | three-toed sloth
40 | sorrel
41 | black-footed ferret
42 | dalmatian
43 | black-and-tan coonhound
44 | papillon
45 | skunk
46 | Staffordshire bullterrier
47 | Mexican hairless
48 | Bouvier des Flandres
49 | weasel
50 | miniature poodle
51 | Cardigan
52 | malinois
53 | bighorn
54 | fox squirrel
55 | colobus
56 | tiger cat
57 | Lhasa
58 | impala
59 | coyote
60 | Yorkshire terrier
61 | Newfoundland
62 | brown bear
63 | red fox
64 | Norwegian elkhound
65 | Rottweiler
66 | hartebeest
67 | Saluki
68 | grey fox
69 | schipperke
70 | Pekinese
71 | Brabancon griffon
72 | West Highland white terrier
73 | Sealyham terrier
74 | guenon
75 | mongoose
76 | indri
77 | tiger
78 | Irish wolfhound
79 | wild boar
80 | EntleBucher
81 | zebra
82 | ram
83 | French bulldog
84 | orangutan
85 | basenji
86 | leopard
87 | Bernese mountain dog
88 | Maltese dog
89 | Norfolk terrier
90 | toy terrier
91 | vizsla
92 | cairn
93 | squirrel monkey
94 | groenendael
95 | clumber
96 | Siamese cat
97 | chimpanzee
98 | komondor
99 | Afghan hound
100 | Japanese spaniel
101 | proboscis monkey
102 | guinea pig
103 | white wolf
104 | ice bear
105 | gorilla
106 | borzoi
107 | toy poodle
108 | Kerry blue terrier
109 | ox
110 | Scotch terrier
111 | Tibetan mastiff
112 | spider monkey
113 | Doberman
114 | Boston bull
115 | Greater Swiss Mountain dog
116 | Appenzeller
117 | Shih-Tzu
118 | Irish water spaniel
119 | Pomeranian
120 | Bedlington terrier
121 | warthog
122 | Arabian camel
123 | siamang
124 | miniature schnauzer
125 | collie
126 | golden retriever
127 | Irish terrier
128 | affenpinscher
129 | Border collie
130 | hare
131 | boxer
132 | silky terrier
133 | beagle
134 | Leonberg
135 | German short-haired pointer
136 | patas
137 | dhole
138 | baboon
139 | macaque
140 | Chesapeake Bay retriever
141 | bull mastiff
142 | kuvasz
143 | capuchin
144 | pug
145 | curly-coated retriever
146 | Norwich terrier
147 | flat-coated retriever
148 | hog
149 | keeshond
150 | Eskimo dog
151 | Brittany spaniel
152 | standard poodle
153 | Lakeland terrier
154 | snow leopard
155 | Gordon setter
156 | dingo
157 | standard schnauzer
158 | hamster
159 | Tibetan terrier
160 | Arctic fox
161 | wire-haired fox terrier
162 | basset
163 | water buffalo
164 | American black bear
165 | Angora
166 | bison
167 | howler monkey
168 | hippopotamus
169 | chow
170 | giant panda
171 | American Staffordshire terrier
172 | Shetland sheepdog
173 | Great Pyrenees
174 | Chihuahua
175 | tabby
176 | marmoset
177 | Labrador retriever
178 | Saint Bernard
179 | armadillo
180 | Samoyed
181 | bluetick
182 | redbone
183 | polecat
184 | marmot
185 | kelpie
186 | gibbon
187 | llama
188 | miniature pinscher
189 | wood rabbit
190 | Italian greyhound
191 | lion
192 | cocker spaniel
193 | Irish setter
194 | dugong
195 | Indian elephant
196 | beaver
197 | Sussex spaniel
198 | Pembroke
199 | Blenheim spaniel
200 | Madagascar cat
201 | Rhodesian ridgeback
202 | lynx
203 | African hunting dog
204 | langur
205 | Ibizan hound
206 | timber wolf
207 | cheetah
208 | English foxhound
209 | briard
210 | sloth bear
211 | Border terrier
212 | German shepherd
213 | otter
214 | koala
215 | tusker
216 | echidna
217 | wallaby
218 | platypus
219 | wombat
220 | revolver
221 | umbrella
222 | schooner
223 | soccer ball
224 | accordion
225 | ant
226 | starfish
227 | chambered nautilus
228 | grand piano
229 | laptop
230 | strawberry
231 | airliner
232 | warplane
233 | airship
234 | balloon
235 | space shuttle
236 | fireboat
237 | gondola
238 | speedboat
239 | lifeboat
240 | canoe
241 | yawl
242 | catamaran
243 | trimaran
244 | container ship
245 | liner
246 | pirate
247 | aircraft carrier
248 | submarine
249 | wreck
250 | half track
251 | tank
252 | missile
253 | bobsled
254 | dogsled
255 | bicycle-built-for-two
256 | mountain bike
257 | freight car
258 | passenger car
259 | barrow
260 | shopping cart
261 | motor scooter
262 | forklift
263 | electric locomotive
264 | steam locomotive
265 | amphibian
266 | ambulance
267 | beach wagon
268 | cab
269 | convertible
270 | jeep
271 | limousine
272 | minivan
273 | Model T
274 | racer
275 | sports car
276 | go-kart
277 | golfcart
278 | moped
279 | snowplow
280 | fire engine
281 | garbage truck
282 | pickup
283 | tow truck
284 | trailer truck
285 | moving van
286 | police van
287 | recreational vehicle
288 | streetcar
289 | snowmobile
290 | tractor
291 | mobile home
292 | tricycle
293 | unicycle
294 | horse cart
295 | jinrikisha
296 | oxcart
297 | bassinet
298 | cradle
299 | crib
300 | four-poster
301 | bookcase
302 | china cabinet
303 | medicine chest
304 | chiffonier
305 | table lamp
306 | file
307 | park bench
308 | barber chair
309 | throne
310 | folding chair
311 | rocking chair
312 | studio couch
313 | toilet seat
314 | desk
315 | pool table
316 | dining table
317 | entertainment center
318 | wardrobe
319 | Granny Smith
320 | orange
321 | lemon
322 | fig
323 | pineapple
324 | banana
325 | jackfruit
326 | custard apple
327 | pomegranate
328 | acorn
329 | hip
330 | ear
331 | rapeseed
332 | corn
333 | buckeye
334 | organ
335 | upright
336 | chime
337 | drum
338 | gong
339 | maraca
340 | marimba
341 | steel drum
342 | banjo
343 | cello
344 | violin
345 | harp
346 | acoustic guitar
347 | electric guitar
348 | cornet
349 | French horn
350 | trombone
351 | harmonica
352 | ocarina
353 | panpipe
354 | bassoon
355 | oboe
356 | sax
357 | flute
358 | daisy
359 | yellow lady's slipper
360 | cliff
361 | valley
362 | alp
363 | volcano
364 | promontory
365 | sandbar
366 | coral reef
367 | lakeside
368 | seashore
369 | geyser
370 | hatchet
371 | cleaver
372 | letter opener
373 | plane
374 | power drill
375 | lawn mower
376 | hammer
377 | corkscrew
378 | can opener
379 | plunger
380 | screwdriver
381 | shovel
382 | plow
383 | chain saw
384 | cock
385 | hen
386 | ostrich
387 | brambling
388 | goldfinch
389 | house finch
390 | junco
391 | indigo bunting
392 | robin
393 | bulbul
394 | jay
395 | magpie
396 | chickadee
397 | water ouzel
398 | kite
399 | bald eagle
400 | vulture
401 | great grey owl
402 | black grouse
403 | ptarmigan
404 | ruffed grouse
405 | prairie chicken
406 | peacock
407 | quail
408 | partridge
409 | African grey
410 | macaw
411 | sulphur-crested cockatoo
412 | lorikeet
413 | coucal
414 | bee eater
415 | hornbill
416 | hummingbird
417 | jacamar
418 | toucan
419 | drake
420 | red-breasted merganser
421 | goose
422 | black swan
423 | white stork
424 | black stork
425 | spoonbill
426 | flamingo
427 | American egret
428 | little blue heron
429 | bittern
430 | crane
431 | limpkin
432 | American coot
433 | bustard
434 | ruddy turnstone
435 | red-backed sandpiper
436 | redshank
437 | dowitcher
438 | oystercatcher
439 | European gallinule
440 | pelican
441 | king penguin
442 | albatross
443 | great white shark
444 | tiger shark
445 | hammerhead
446 | electric ray
447 | stingray
448 | barracouta
449 | coho
450 | tench
451 | goldfish
452 | eel
453 | rock beauty
454 | anemone fish
455 | lionfish
456 | puffer
457 | sturgeon
458 | gar
459 | loggerhead
460 | leatherback turtle
461 | mud turtle
462 | terrapin
463 | box turtle
464 | banded gecko
465 | common iguana
466 | American chameleon
467 | whiptail
468 | agama
469 | frilled lizard
470 | alligator lizard
471 | Gila monster
472 | green lizard
473 | African chameleon
474 | Komodo dragon
475 | triceratops
476 | African crocodile
477 | American alligator
478 | thunder snake
479 | ringneck snake
480 | hognose snake
481 | green snake
482 | king snake
483 | garter snake
484 | water snake
485 | vine snake
486 | night snake
487 | boa constrictor
488 | rock python
489 | Indian cobra
490 | green mamba
491 | sea snake
492 | horned viper
493 | diamondback
494 | sidewinder
495 | European fire salamander
496 | common newt
497 | eft
498 | spotted salamander
499 | axolotl
500 | bullfrog
501 | tree frog
502 | tailed frog
503 | whistle
504 | wing
505 | paintbrush
506 | hand blower
507 | oxygen mask
508 | snorkel
509 | loudspeaker
510 | microphone
511 | screen
512 | mouse
513 | electric fan
514 | oil filter
515 | strainer
516 | space heater
517 | stove
518 | guillotine
519 | barometer
520 | rule
521 | odometer
522 | scale
523 | analog clock
524 | digital clock
525 | wall clock
526 | hourglass
527 | sundial
528 | parking meter
529 | stopwatch
530 | digital watch
531 | stethoscope
532 | syringe
533 | magnetic compass
534 | binoculars
535 | projector
536 | sunglasses
537 | loupe
538 | radio telescope
539 | bow
540 | cannon [ground]
541 | assault rifle
542 | rifle
543 | projectile
544 | computer keyboard
545 | typewriter keyboard
546 | crane
547 | lighter
548 | abacus
549 | cash machine
550 | slide rule
551 | desktop computer
552 | hand-held computer
553 | notebook
554 | web site
555 | harvester
556 | thresher
557 | printer
558 | slot
559 | vending machine
560 | sewing machine
561 | joystick
562 | switch
563 | hook
564 | car wheel
565 | paddlewheel
566 | pinwheel
567 | potter's wheel
568 | gas pump
569 | carousel
570 | swing
571 | reel
572 | radiator
573 | puck
574 | hard disc
575 | sunglass
576 | pick
577 | car mirror
578 | solar dish
579 | remote control
580 | disk brake
581 | buckle
582 | hair slide
583 | knot
584 | combination lock
585 | padlock
586 | nail
587 | safety pin
588 | screw
589 | muzzle
590 | seat belt
591 | ski
592 | candle
593 | jack-o'-lantern
594 | spotlight
595 | torch
596 | neck brace
597 | pier
598 | tripod
599 | maypole
600 | mousetrap
601 | spider web
602 | trilobite
603 | harvestman
604 | scorpion
605 | black and gold garden spider
606 | barn spider
607 | garden spider
608 | black widow
609 | tarantula
610 | wolf spider
611 | tick
612 | centipede
613 | isopod
614 | Dungeness crab
615 | rock crab
616 | fiddler crab
617 | king crab
618 | American lobster
619 | spiny lobster
620 | crayfish
621 | hermit crab
622 | tiger beetle
623 | ladybug
624 | ground beetle
625 | long-horned beetle
626 | leaf beetle
627 | dung beetle
628 | rhinoceros beetle
629 | weevil
630 | fly
631 | bee
632 | grasshopper
633 | cricket
634 | walking stick
635 | cockroach
636 | mantis
637 | cicada
638 | leafhopper
639 | lacewing
640 | dragonfly
641 | damselfly
642 | admiral
643 | ringlet
644 | monarch
645 | cabbage butterfly
646 | sulphur butterfly
647 | lycaenid
648 | jellyfish
649 | sea anemone
650 | brain coral
651 | flatworm
652 | nematode
653 | conch
654 | snail
655 | slug
656 | sea slug
657 | chiton
658 | sea urchin
659 | sea cucumber
660 | iron
661 | espresso maker
662 | microwave
663 | Dutch oven
664 | rotisserie
665 | toaster
666 | waffle iron
667 | vacuum
668 | dishwasher
669 | refrigerator
670 | washer
671 | Crock Pot
672 | frying pan
673 | wok
674 | caldron
675 | coffeepot
676 | teapot
677 | spatula
678 | altar
679 | triumphal arch
680 | patio
681 | steel arch bridge
682 | suspension bridge
683 | viaduct
684 | barn
685 | greenhouse
686 | palace
687 | monastery
688 | library
689 | apiary
690 | boathouse
691 | church
692 | mosque
693 | stupa
694 | planetarium
695 | restaurant
696 | cinema
697 | home theater
698 | lumbermill
699 | coil
700 | obelisk
701 | totem pole
702 | castle
703 | prison
704 | grocery store
705 | bakery
706 | barbershop
707 | bookshop
708 | butcher shop
709 | confectionery
710 | shoe shop
711 | tobacco shop
712 | toyshop
713 | fountain
714 | cliff dwelling
715 | yurt
716 | dock
717 | brass
718 | megalith
719 | bannister
720 | breakwater
721 | dam
722 | chainlink fence
723 | picket fence
724 | worm fence
725 | stone wall
726 | grille
727 | sliding door
728 | turnstile
729 | mountain tent
730 | scoreboard
731 | honeycomb
732 | plate rack
733 | pedestal
734 | beacon
735 | mashed potato
736 | bell pepper
737 | head cabbage
738 | broccoli
739 | cauliflower
740 | zucchini
741 | spaghetti squash
742 | acorn squash
743 | butternut squash
744 | cucumber
745 | artichoke
746 | cardoon
747 | mushroom
748 | shower curtain
749 | jean
750 | carton
751 | handkerchief
752 | sandal
753 | ashcan
754 | safe
755 | plate
756 | necklace
757 | croquet ball
758 | fur coat
759 | thimble
760 | pajama
761 | running shoe
762 | cocktail shaker
763 | chest
764 | manhole cover
765 | modem
766 | tub
767 | tray
768 | balance beam
769 | bagel
770 | prayer rug
771 | kimono
772 | hot pot
773 | whiskey jug
774 | knee pad
775 | book jacket
776 | spindle
777 | ski mask
778 | beer bottle
779 | crash helmet
780 | bottlecap
781 | tile roof
782 | mask
783 | maillot
784 | Petri dish
785 | football helmet
786 | bathing cap
787 | teddy bear
788 | holster
789 | pop bottle
790 | photocopier
791 | vestment
792 | crossword puzzle
793 | golf ball
794 | trifle
795 | suit
796 | water tower
797 | feather boa
798 | cloak
799 | red wine
800 | drumstick
801 | shield
802 | Christmas stocking
803 | hoopskirt
804 | menu
805 | stage
806 | bonnet
807 | meat loaf
808 | baseball
809 | face powder
810 | scabbard
811 | sunscreen
812 | beer glass
813 | hen-of-the-woods
814 | guacamole
815 | lampshade
816 | wool
817 | hay
818 | bow tie
819 | mailbag
820 | water jug
821 | bucket
822 | dishrag
823 | soup bowl
824 | eggnog
825 | mortar
826 | trench coat
827 | paddle
828 | chain
829 | swab
830 | mixing bowl
831 | potpie
832 | wine bottle
833 | shoji
834 | bulletproof vest
835 | drilling platform
836 | binder
837 | cardigan
838 | sweatshirt
839 | pot
840 | birdhouse
841 | hamper
842 | ping-pong ball
843 | pencil box
844 | pay-phone
845 | consomme
846 | apron
847 | punching bag
848 | backpack
849 | groom
850 | bearskin
851 | pencil sharpener
852 | broom
853 | mosquito net
854 | abaya
855 | mortarboard
856 | poncho
857 | crutch
858 | Polaroid camera
859 | space bar
860 | cup
861 | racket
862 | traffic light
863 | quill
864 | radio
865 | dough
866 | cuirass
867 | military uniform
868 | lipstick
869 | shower cap
870 | monitor
871 | oscilloscope
872 | mitten
873 | brassiere
874 | French loaf
875 | vase
876 | milk can
877 | rugby ball
878 | paper towel
879 | earthstar
880 | envelope
881 | miniskirt
882 | cowboy hat
883 | trolleybus
884 | perfume
885 | bathtub
886 | hotdog
887 | coral fungus
888 | bullet train
889 | pillow
890 | toilet tissue
891 | cassette
892 | carpenter's kit
893 | ladle
894 | stinkhorn
895 | lotion
896 | hair spray
897 | academic gown
898 | dome
899 | crate
900 | wig
901 | burrito
902 | pill bottle
903 | chain mail
904 | theater curtain
905 | window shade
906 | barrel
907 | washbasin
908 | ballpoint
909 | basketball
910 | bath towel
911 | cowboy boot
912 | gown
913 | window screen
914 | agaric
915 | cellular telephone
916 | nipple
917 | barbell
918 | mailbox
919 | lab coat
920 | fire screen
921 | minibus
922 | packet
923 | maze
924 | pole
925 | horizontal bar
926 | sombrero
927 | pickelhaube
928 | rain barrel
929 | wallet
930 | cassette player
931 | comic book
932 | piggy bank
933 | street sign
934 | bell cote
935 | fountain pen
936 | Windsor tie
937 | volleyball
938 | overskirt
939 | sarong
940 | purse
941 | bolo tie
942 | bib
943 | parachute
944 | sleeping bag
945 | television
946 | swimming trunks
947 | measuring cup
948 | espresso
949 | pizza
950 | breastplate
951 | shopping basket
952 | wooden spoon
953 | saltshaker
954 | chocolate sauce
955 | ballplayer
956 | goblet
957 | gyromitra
958 | stretcher
959 | water bottle
960 | dial telephone
961 | soap dispenser
962 | jersey
963 | school bus
964 | jigsaw puzzle
965 | plastic bag
966 | reflex camera
967 | diaper
968 | Band Aid
969 | ice lolly
970 | velvet
971 | tennis ball
972 | gasmask
973 | doormat
974 | Loafer
975 | ice cream
976 | pretzel
977 | quilt
978 | maillot
979 | tape player
980 | clog
981 | iPod
982 | bolete
983 | scuba diver
984 | pitcher
985 | matchstick
986 | bikini
987 | sock
988 | CD player
989 | lens cap
990 | thatch
991 | vault
992 | beaker
993 | bubble
994 | cheeseburger
995 | parallel bars
996 | flagpole
997 | coffee mug
998 | rubber eraser
999 | stole
1000 | carbonara
1001 | dumbbell
--------------------------------------------------------------------------------
/app/src/main/assets/tensorflow_inception_graph.pb:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nilhcem/tensorflow-classifier-android/3b3be1cc38e505a4127f74e5f514049a81d2f329/app/src/main/assets/tensorflow_inception_graph.pb
--------------------------------------------------------------------------------
/app/src/main/cpp/imageutils_jni.cc:
--------------------------------------------------------------------------------
1 | #include
2 |
3 | #include "rgb2yuv.h"
4 | #include "yuv2rgb.h"
5 |
6 | #define IMAGEUTILS_METHOD(METHOD_NAME) \
7 | Java_org_tensorflow_demo_env_ImageUtils_##METHOD_NAME // NOLINT
8 |
9 | #ifdef __cplusplus
10 | extern "C" {
11 | #endif
12 |
13 | JNIEXPORT void JNICALL
14 | IMAGEUTILS_METHOD(convertYUV420SPToARGB8888)(
15 | JNIEnv *env, jclass clazz, jbyteArray input, jintArray output,
16 | jint width, jint height, jboolean halfSize);
17 |
18 | JNIEXPORT void JNICALL IMAGEUTILS_METHOD(convertYUV420ToARGB8888)(
19 | JNIEnv *env, jclass clazz, jbyteArray y, jbyteArray u, jbyteArray v,
20 | jintArray output, jint width, jint height, jint y_row_stride,
21 | jint uv_row_stride, jint uv_pixel_stride, jboolean halfSize);
22 |
23 | JNIEXPORT void JNICALL IMAGEUTILS_METHOD(convertYUV420SPToRGB565)(
24 | JNIEnv *env, jclass clazz, jbyteArray input, jbyteArray output, jint width,
25 | jint height);
26 |
27 | JNIEXPORT void JNICALL
28 | IMAGEUTILS_METHOD(convertARGB8888ToYUV420SP)(
29 | JNIEnv *env, jclass clazz, jintArray input, jbyteArray output,
30 | jint width, jint height);
31 |
32 | JNIEXPORT void JNICALL
33 | IMAGEUTILS_METHOD(convertRGB565ToYUV420SP)(
34 | JNIEnv *env, jclass clazz, jbyteArray input, jbyteArray output,
35 | jint width, jint height);
36 |
37 | #ifdef __cplusplus
38 | }
39 | #endif
40 |
41 | JNIEXPORT void JNICALL
42 | IMAGEUTILS_METHOD(convertYUV420SPToARGB8888)(
43 | JNIEnv *env, jclass clazz, jbyteArray input, jintArray output,
44 | jint width, jint height, jboolean halfSize) {
45 | jboolean inputCopy = JNI_FALSE;
46 | jbyte *const i = env->GetByteArrayElements(input, &inputCopy);
47 |
48 | jboolean outputCopy = JNI_FALSE;
49 | jint *const o = env->GetIntArrayElements(output, &outputCopy);
50 |
51 | if (halfSize) {
52 | ConvertYUV420SPToARGB8888HalfSize(reinterpret_cast(i),
53 | reinterpret_cast(o), width,
54 | height);
55 | } else {
56 | ConvertYUV420SPToARGB8888(reinterpret_cast(i),
57 | reinterpret_cast(i) + width * height,
58 | reinterpret_cast(o), width, height);
59 | }
60 |
61 | env->ReleaseByteArrayElements(input, i, JNI_ABORT);
62 | env->ReleaseIntArrayElements(output, o, 0);
63 | }
64 |
65 | JNIEXPORT void JNICALL IMAGEUTILS_METHOD(convertYUV420ToARGB8888)(
66 | JNIEnv *env, jclass clazz, jbyteArray y, jbyteArray u, jbyteArray v,
67 | jintArray output, jint width, jint height, jint y_row_stride,
68 | jint uv_row_stride, jint uv_pixel_stride, jboolean halfSize) {
69 | jboolean inputCopy = JNI_FALSE;
70 | jbyte *const y_buff = env->GetByteArrayElements(y, &inputCopy);
71 | jboolean outputCopy = JNI_FALSE;
72 | jint *const o = env->GetIntArrayElements(output, &outputCopy);
73 |
74 | if (halfSize) {
75 | ConvertYUV420SPToARGB8888HalfSize(reinterpret_cast(y_buff),
76 | reinterpret_cast(o), width,
77 | height);
78 | } else {
79 | jbyte *const u_buff = env->GetByteArrayElements(u, &inputCopy);
80 | jbyte *const v_buff = env->GetByteArrayElements(v, &inputCopy);
81 |
82 | ConvertYUV420ToARGB8888(
83 | reinterpret_cast(y_buff), reinterpret_cast(u_buff),
84 | reinterpret_cast(v_buff), reinterpret_cast(o),
85 | width, height, y_row_stride, uv_row_stride, uv_pixel_stride);
86 |
87 | env->ReleaseByteArrayElements(u, u_buff, JNI_ABORT);
88 | env->ReleaseByteArrayElements(v, v_buff, JNI_ABORT);
89 | }
90 |
91 | env->ReleaseByteArrayElements(y, y_buff, JNI_ABORT);
92 | env->ReleaseIntArrayElements(output, o, 0);
93 | }
94 |
95 | JNIEXPORT void JNICALL IMAGEUTILS_METHOD(convertYUV420SPToRGB565)(
96 | JNIEnv *env, jclass clazz, jbyteArray input, jbyteArray output, jint width,
97 | jint height) {
98 | jboolean inputCopy = JNI_FALSE;
99 | jbyte *const i = env->GetByteArrayElements(input, &inputCopy);
100 |
101 | jboolean outputCopy = JNI_FALSE;
102 | jbyte *const o = env->GetByteArrayElements(output, &outputCopy);
103 |
104 | ConvertYUV420SPToRGB565(reinterpret_cast(i),
105 | reinterpret_cast(o), width, height);
106 |
107 | env->ReleaseByteArrayElements(input, i, JNI_ABORT);
108 | env->ReleaseByteArrayElements(output, o, 0);
109 | }
110 |
111 | JNIEXPORT void JNICALL
112 | IMAGEUTILS_METHOD(convertARGB8888ToYUV420SP)(
113 | JNIEnv *env, jclass clazz, jintArray input, jbyteArray output,
114 | jint width, jint height) {
115 | jboolean inputCopy = JNI_FALSE;
116 | jint *const i = env->GetIntArrayElements(input, &inputCopy);
117 |
118 | jboolean outputCopy = JNI_FALSE;
119 | jbyte *const o = env->GetByteArrayElements(output, &outputCopy);
120 |
121 | ConvertARGB8888ToYUV420SP(reinterpret_cast(i),
122 | reinterpret_cast(o), width, height);
123 |
124 | env->ReleaseIntArrayElements(input, i, JNI_ABORT);
125 | env->ReleaseByteArrayElements(output, o, 0);
126 | }
127 |
128 | JNIEXPORT void JNICALL
129 | IMAGEUTILS_METHOD(convertRGB565ToYUV420SP)(
130 | JNIEnv *env, jclass clazz, jbyteArray input, jbyteArray output,
131 | jint width, jint height) {
132 | jboolean inputCopy = JNI_FALSE;
133 | jbyte *const i = env->GetByteArrayElements(input, &inputCopy);
134 |
135 | jboolean outputCopy = JNI_FALSE;
136 | jbyte *const o = env->GetByteArrayElements(output, &outputCopy);
137 |
138 | ConvertRGB565ToYUV420SP(reinterpret_cast(i),
139 | reinterpret_cast(o), width, height);
140 |
141 | env->ReleaseByteArrayElements(input, i, JNI_ABORT);
142 | env->ReleaseByteArrayElements(output, o, 0);
143 | }
144 |
--------------------------------------------------------------------------------
/app/src/main/cpp/rgb2yuv.cc:
--------------------------------------------------------------------------------
1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | // These utility functions allow for the conversion of RGB data to YUV data.
17 |
18 | #include "rgb2yuv.h"
19 |
20 | static inline void WriteYUV(const int x, const int y, const int width,
21 | const int r8, const int g8, const int b8,
22 | uint8_t* const pY, uint8_t* const pUV) {
23 | // Using formulas from http://msdn.microsoft.com/en-us/library/ms893078
24 | *pY = ((66 * r8 + 129 * g8 + 25 * b8 + 128) >> 8) + 16;
25 |
26 | // Odd widths get rounded up so that UV blocks on the side don't get cut off.
27 | const int blocks_per_row = (width + 1) / 2;
28 |
29 | // 2 bytes per UV block
30 | const int offset = 2 * (((y / 2) * blocks_per_row + (x / 2)));
31 |
32 | // U and V are the average values of all 4 pixels in the block.
33 | if (!(x & 1) && !(y & 1)) {
34 | // Explicitly clear the block if this is the first pixel in it.
35 | pUV[offset] = 0;
36 | pUV[offset + 1] = 0;
37 | }
38 |
39 | // V (with divide by 4 factored in)
40 | #ifdef __APPLE__
41 | const int u_offset = 0;
42 | const int v_offset = 1;
43 | #else
44 | const int u_offset = 1;
45 | const int v_offset = 0;
46 | #endif
47 | pUV[offset + v_offset] += ((112 * r8 - 94 * g8 - 18 * b8 + 128) >> 10) + 32;
48 |
49 | // U (with divide by 4 factored in)
50 | pUV[offset + u_offset] += ((-38 * r8 - 74 * g8 + 112 * b8 + 128) >> 10) + 32;
51 | }
52 |
53 | void ConvertARGB8888ToYUV420SP(const uint32_t* const input,
54 | uint8_t* const output, int width, int height) {
55 | uint8_t* pY = output;
56 | uint8_t* pUV = output + (width * height);
57 | const uint32_t* in = input;
58 |
59 | for (int y = 0; y < height; y++) {
60 | for (int x = 0; x < width; x++) {
61 | const uint32_t rgb = *in++;
62 | #ifdef __APPLE__
63 | const int nB = (rgb >> 8) & 0xFF;
64 | const int nG = (rgb >> 16) & 0xFF;
65 | const int nR = (rgb >> 24) & 0xFF;
66 | #else
67 | const int nR = (rgb >> 16) & 0xFF;
68 | const int nG = (rgb >> 8) & 0xFF;
69 | const int nB = rgb & 0xFF;
70 | #endif
71 | WriteYUV(x, y, width, nR, nG, nB, pY++, pUV);
72 | }
73 | }
74 | }
75 |
76 | void ConvertRGB565ToYUV420SP(const uint16_t* const input, uint8_t* const output,
77 | const int width, const int height) {
78 | uint8_t* pY = output;
79 | uint8_t* pUV = output + (width * height);
80 | const uint16_t* in = input;
81 |
82 | for (int y = 0; y < height; y++) {
83 | for (int x = 0; x < width; x++) {
84 | const uint32_t rgb = *in++;
85 |
86 | const int r5 = ((rgb >> 11) & 0x1F);
87 | const int g6 = ((rgb >> 5) & 0x3F);
88 | const int b5 = (rgb & 0x1F);
89 |
90 | // Shift left, then fill in the empty low bits with a copy of the high
91 | // bits so we can stretch across the entire 0 - 255 range.
92 | const int r8 = r5 << 3 | r5 >> 2;
93 | const int g8 = g6 << 2 | g6 >> 4;
94 | const int b8 = b5 << 3 | b5 >> 2;
95 |
96 | WriteYUV(x, y, width, r8, g8, b8, pY++, pUV);
97 | }
98 | }
99 | }
100 |
--------------------------------------------------------------------------------
/app/src/main/cpp/rgb2yuv.h:
--------------------------------------------------------------------------------
1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | #ifndef ORG_TENSORFLOW_JNI_IMAGEUTILS_RGB2YUV_H_
17 | #define ORG_TENSORFLOW_JNI_IMAGEUTILS_RGB2YUV_H_
18 |
19 | #include
20 |
21 | #ifdef __cplusplus
22 | extern "C" {
23 | #endif
24 |
25 | void ConvertARGB8888ToYUV420SP(const uint32_t* const input,
26 | uint8_t* const output, int width, int height);
27 |
28 | void ConvertRGB565ToYUV420SP(const uint16_t* const input, uint8_t* const output,
29 | const int width, const int height);
30 |
31 | #ifdef __cplusplus
32 | }
33 | #endif
34 |
35 | #endif // ORG_TENSORFLOW_JNI_IMAGEUTILS_RGB2YUV_H_
36 |
--------------------------------------------------------------------------------
/app/src/main/cpp/yuv2rgb.cc:
--------------------------------------------------------------------------------
1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | // This is a collection of routines which converts various YUV image formats
17 | // to ARGB.
18 |
19 | #include "yuv2rgb.h"
20 |
21 | #ifndef MAX
22 | #define MAX(a, b) ({__typeof__(a) _a = (a); __typeof__(b) _b = (b); _a > _b ? _a : _b; })
23 | #define MIN(a, b) ({__typeof__(a) _a = (a); __typeof__(b) _b = (b); _a < _b ? _a : _b; })
24 | #endif
25 |
26 | // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges
27 | // are normalized to eight bits.
28 | static const int kMaxChannelValue = 262143;
29 |
30 | static inline uint32_t YUV2RGB(int nY, int nU, int nV) {
31 | nY -= 16;
32 | nU -= 128;
33 | nV -= 128;
34 | if (nY < 0) nY = 0;
35 |
36 | // This is the floating point equivalent. We do the conversion in integer
37 | // because some Android devices do not have floating point in hardware.
38 | // nR = (int)(1.164 * nY + 2.018 * nU);
39 | // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
40 | // nB = (int)(1.164 * nY + 1.596 * nV);
41 |
42 | int nR = 1192 * nY + 1634 * nV;
43 | int nG = 1192 * nY - 833 * nV - 400 * nU;
44 | int nB = 1192 * nY + 2066 * nU;
45 |
46 | nR = MIN(kMaxChannelValue, MAX(0, nR));
47 | nG = MIN(kMaxChannelValue, MAX(0, nG));
48 | nB = MIN(kMaxChannelValue, MAX(0, nB));
49 |
50 | nR = (nR >> 10) & 0xff;
51 | nG = (nG >> 10) & 0xff;
52 | nB = (nB >> 10) & 0xff;
53 |
54 | return 0xff000000 | (nR << 16) | (nG << 8) | nB;
55 | }
56 |
57 | // Accepts a YUV 4:2:0 image with a plane of 8 bit Y samples followed by
58 | // separate u and v planes with arbitrary row and column strides,
59 | // containing 8 bit 2x2 subsampled chroma samples.
60 | // Converts to a packed ARGB 32 bit output of the same pixel dimensions.
61 | void ConvertYUV420ToARGB8888(const uint8_t* const yData,
62 | const uint8_t* const uData,
63 | const uint8_t* const vData, uint32_t* const output,
64 | const int width, const int height,
65 | const int y_row_stride, const int uv_row_stride,
66 | const int uv_pixel_stride) {
67 | uint32_t* out = output;
68 |
69 | for (int y = 0; y < height; y++) {
70 | const uint8_t* pY = yData + y_row_stride * y;
71 |
72 | const int uv_row_start = uv_row_stride * (y >> 1);
73 | const uint8_t* pU = uData + uv_row_start;
74 | const uint8_t* pV = vData + uv_row_start;
75 |
76 | for (int x = 0; x < width; x++) {
77 | const int uv_offset = (x >> 1) * uv_pixel_stride;
78 | *out++ = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
79 | }
80 | }
81 | }
82 |
83 | // Accepts a YUV 4:2:0 image with a plane of 8 bit Y samples followed by an
84 | // interleaved U/V plane containing 8 bit 2x2 subsampled chroma samples,
85 | // except the interleave order of U and V is reversed. Converts to a packed
86 | // ARGB 32 bit output of the same pixel dimensions.
87 | void ConvertYUV420SPToARGB8888(const uint8_t* const yData,
88 | const uint8_t* const uvData,
89 | uint32_t* const output, const int width,
90 | const int height) {
91 | const uint8_t* pY = yData;
92 | const uint8_t* pUV = uvData;
93 | uint32_t* out = output;
94 |
95 | for (int y = 0; y < height; y++) {
96 | for (int x = 0; x < width; x++) {
97 | int nY = *pY++;
98 | int offset = (y >> 1) * width + 2 * (x >> 1);
99 | #ifdef __APPLE__
100 | int nU = pUV[offset];
101 | int nV = pUV[offset + 1];
102 | #else
103 | int nV = pUV[offset];
104 | int nU = pUV[offset + 1];
105 | #endif
106 |
107 | *out++ = YUV2RGB(nY, nU, nV);
108 | }
109 | }
110 | }
111 |
112 | // The same as above, but downsamples each dimension to half size.
113 | void ConvertYUV420SPToARGB8888HalfSize(const uint8_t* const input,
114 | uint32_t* const output, int width,
115 | int height) {
116 | const uint8_t* pY = input;
117 | const uint8_t* pUV = input + (width * height);
118 | uint32_t* out = output;
119 | int stride = width;
120 | width >>= 1;
121 | height >>= 1;
122 |
123 | for (int y = 0; y < height; y++) {
124 | for (int x = 0; x < width; x++) {
125 | int nY = (pY[0] + pY[1] + pY[stride] + pY[stride + 1]) >> 2;
126 | pY += 2;
127 | #ifdef __APPLE__
128 | int nU = *pUV++;
129 | int nV = *pUV++;
130 | #else
131 | int nV = *pUV++;
132 | int nU = *pUV++;
133 | #endif
134 |
135 | *out++ = YUV2RGB(nY, nU, nV);
136 | }
137 | pY += stride;
138 | }
139 | }
140 |
141 | // Accepts a YUV 4:2:0 image with a plane of 8 bit Y samples followed by an
142 | // interleaved U/V plane containing 8 bit 2x2 subsampled chroma samples,
143 | // except the interleave order of U and V is reversed. Converts to a packed
144 | // RGB 565 bit output of the same pixel dimensions.
145 | void ConvertYUV420SPToRGB565(const uint8_t* const input, uint16_t* const output,
146 | const int width, const int height) {
147 | const uint8_t* pY = input;
148 | const uint8_t* pUV = input + (width * height);
149 | uint16_t* out = output;
150 |
151 | for (int y = 0; y < height; y++) {
152 | for (int x = 0; x < width; x++) {
153 | int nY = *pY++;
154 | int offset = (y >> 1) * width + 2 * (x >> 1);
155 | #ifdef __APPLE__
156 | int nU = pUV[offset];
157 | int nV = pUV[offset + 1];
158 | #else
159 | int nV = pUV[offset];
160 | int nU = pUV[offset + 1];
161 | #endif
162 |
163 | nY -= 16;
164 | nU -= 128;
165 | nV -= 128;
166 | if (nY < 0) nY = 0;
167 |
168 | // This is the floating point equivalent. We do the conversion in integer
169 | // because some Android devices do not have floating point in hardware.
170 | // nR = (int)(1.164 * nY + 2.018 * nU);
171 | // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
172 | // nB = (int)(1.164 * nY + 1.596 * nV);
173 |
174 | int nR = 1192 * nY + 1634 * nV;
175 | int nG = 1192 * nY - 833 * nV - 400 * nU;
176 | int nB = 1192 * nY + 2066 * nU;
177 |
178 | nR = MIN(kMaxChannelValue, MAX(0, nR));
179 | nG = MIN(kMaxChannelValue, MAX(0, nG));
180 | nB = MIN(kMaxChannelValue, MAX(0, nB));
181 |
182 | // Shift more than for ARGB8888 and apply appropriate bitmask.
183 | nR = (nR >> 13) & 0x1f;
184 | nG = (nG >> 12) & 0x3f;
185 | nB = (nB >> 13) & 0x1f;
186 |
187 | // R is high 5 bits, G is middle 6 bits, and B is low 5 bits.
188 | *out++ = (nR << 11) | (nG << 5) | nB;
189 | }
190 | }
191 | }
192 |
--------------------------------------------------------------------------------
/app/src/main/cpp/yuv2rgb.h:
--------------------------------------------------------------------------------
1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | // This is a collection of routines which converts various YUV image formats
17 | // to (A)RGB.
18 |
19 | #ifndef ORG_TENSORFLOW_JNI_IMAGEUTILS_YUV2RGB_H_
20 | #define ORG_TENSORFLOW_JNI_IMAGEUTILS_YUV2RGB_H_
21 |
22 | #include
23 |
24 | #ifdef __cplusplus
25 | extern "C" {
26 | #endif
27 |
28 | void ConvertYUV420ToARGB8888(const uint8_t* const yData,
29 | const uint8_t* const uData,
30 | const uint8_t* const vData, uint32_t* const output,
31 | const int width, const int height,
32 | const int y_row_stride, const int uv_row_stride,
33 | const int uv_pixel_stride);
34 |
35 | // Converts YUV420 semi-planar data to ARGB 8888 data using the supplied width
36 | // and height. The input and output must already be allocated and non-null.
37 | // For efficiency, no error checking is performed.
38 | void ConvertYUV420SPToARGB8888(const uint8_t* const pY,
39 | const uint8_t* const pUV, uint32_t* const output,
40 | const int width, const int height);
41 |
42 | // The same as above, but downsamples each dimension to half size.
43 | void ConvertYUV420SPToARGB8888HalfSize(const uint8_t* const input,
44 | uint32_t* const output, int width,
45 | int height);
46 |
47 | // Converts YUV420 semi-planar data to RGB 565 data using the supplied width
48 | // and height. The input and output must already be allocated and non-null.
49 | // For efficiency, no error checking is performed.
50 | void ConvertYUV420SPToRGB565(const uint8_t* const input, uint16_t* const output,
51 | const int width, const int height);
52 |
53 | #ifdef __cplusplus
54 | }
55 | #endif
56 |
57 | #endif // ORG_TENSORFLOW_JNI_IMAGEUTILS_YUV2RGB_H_
58 |
--------------------------------------------------------------------------------
/app/src/main/java/org/tensorflow/demo/AutoFitTextureView.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 The TensorFlow Authors. All Rights Reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package org.tensorflow.demo;
18 |
19 | import android.content.Context;
20 | import android.util.AttributeSet;
21 | import android.view.TextureView;
22 |
23 | /**
24 | * A {@link TextureView} that can be adjusted to a specified aspect ratio.
25 | */
26 | public class AutoFitTextureView extends TextureView {
27 | private int ratioWidth = 0;
28 | private int ratioHeight = 0;
29 |
30 | public AutoFitTextureView(final Context context) {
31 | this(context, null);
32 | }
33 |
34 | public AutoFitTextureView(final Context context, final AttributeSet attrs) {
35 | this(context, attrs, 0);
36 | }
37 |
38 | public AutoFitTextureView(final Context context, final AttributeSet attrs, final int defStyle) {
39 | super(context, attrs, defStyle);
40 | }
41 |
42 | /**
43 | * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
44 | * calculated from the parameters. Note that the actual sizes of parameters don't matter, that
45 | * is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
46 | *
47 | * @param width Relative horizontal size
48 | * @param height Relative vertical size
49 | */
50 | public void setAspectRatio(final int width, final int height) {
51 | if (width < 0 || height < 0) {
52 | throw new IllegalArgumentException("Size cannot be negative.");
53 | }
54 | ratioWidth = width;
55 | ratioHeight = height;
56 | requestLayout();
57 | }
58 |
59 | @Override
60 | protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
61 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
62 | final int width = MeasureSpec.getSize(widthMeasureSpec);
63 | final int height = MeasureSpec.getSize(heightMeasureSpec);
64 | if (0 == ratioWidth || 0 == ratioHeight) {
65 | setMeasuredDimension(width, height);
66 | } else {
67 | if (width < height * ratioWidth / ratioHeight) {
68 | setMeasuredDimension(width, width * ratioHeight / ratioWidth);
69 | } else {
70 | setMeasuredDimension(height * ratioWidth / ratioHeight, height);
71 | }
72 | }
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/app/src/main/java/org/tensorflow/demo/CameraActivity.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 The TensorFlow Authors. All Rights Reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package org.tensorflow.demo;
18 |
19 | import android.Manifest;
20 | import android.app.Activity;
21 | import android.app.Fragment;
22 | import android.content.Context;
23 | import android.content.pm.PackageManager;
24 | import android.hardware.Camera;
25 | import android.hardware.camera2.CameraAccessException;
26 | import android.hardware.camera2.CameraCharacteristics;
27 | import android.hardware.camera2.CameraManager;
28 | import android.hardware.camera2.params.StreamConfigurationMap;
29 | import android.media.Image;
30 | import android.media.Image.Plane;
31 | import android.media.ImageReader;
32 | import android.media.ImageReader.OnImageAvailableListener;
33 | import android.os.Build;
34 | import android.os.Bundle;
35 | import android.os.Handler;
36 | import android.os.HandlerThread;
37 | import android.os.Trace;
38 | import android.util.Size;
39 | import android.view.KeyEvent;
40 | import android.view.WindowManager;
41 | import android.widget.Toast;
42 | import java.nio.ByteBuffer;
43 | import org.tensorflow.demo.env.ImageUtils;
44 | import org.tensorflow.demo.env.Logger;
45 | import org.tensorflow.demo.R; // Explicit import needed for internal Google builds.
46 |
47 | public abstract class CameraActivity extends Activity
48 | implements OnImageAvailableListener, Camera.PreviewCallback {
49 | private static final Logger LOGGER = new Logger();
50 |
51 | private static final int PERMISSIONS_REQUEST = 1;
52 |
53 | private static final String PERMISSION_CAMERA = Manifest.permission.CAMERA;
54 | private static final String PERMISSION_STORAGE = Manifest.permission.WRITE_EXTERNAL_STORAGE;
55 |
56 | private boolean debug = false;
57 |
58 | private Handler handler;
59 | private HandlerThread handlerThread;
60 | private boolean useCamera2API;
61 | private boolean isProcessingFrame = false;
62 | private byte[][] yuvBytes = new byte[3][];
63 | private int[] rgbBytes = null;
64 | private int yRowStride;
65 |
66 | protected int previewWidth = 0;
67 | protected int previewHeight = 0;
68 |
69 | private Runnable postInferenceCallback;
70 | private Runnable imageConverter;
71 |
72 | @Override
73 | protected void onCreate(final Bundle savedInstanceState) {
74 | LOGGER.d("onCreate " + this);
75 | super.onCreate(null);
76 | getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
77 |
78 | setContentView(R.layout.activity_camera);
79 |
80 | if (hasPermission()) {
81 | setFragment();
82 | } else {
83 | requestPermission();
84 | }
85 | }
86 |
87 | private byte[] lastPreviewFrame;
88 |
89 | protected int[] getRgbBytes() {
90 | imageConverter.run();
91 | return rgbBytes;
92 | }
93 |
94 | protected int getLuminanceStride() {
95 | return yRowStride;
96 | }
97 |
98 | protected byte[] getLuminance() {
99 | return yuvBytes[0];
100 | }
101 |
102 | /**
103 | * Callback for android.hardware.Camera API
104 | */
105 | @Override
106 | public void onPreviewFrame(final byte[] bytes, final Camera camera) {
107 | if (isProcessingFrame) {
108 | LOGGER.w("Dropping frame!");
109 | return;
110 | }
111 |
112 | try {
113 | // Initialize the storage bitmaps once when the resolution is known.
114 | if (rgbBytes == null) {
115 | Camera.Size previewSize = camera.getParameters().getPreviewSize();
116 | previewHeight = previewSize.height;
117 | previewWidth = previewSize.width;
118 | rgbBytes = new int[previewWidth * previewHeight];
119 | onPreviewSizeChosen(new Size(previewSize.width, previewSize.height), 90);
120 | }
121 | } catch (final Exception e) {
122 | LOGGER.e(e, "Exception!");
123 | return;
124 | }
125 |
126 | isProcessingFrame = true;
127 | lastPreviewFrame = bytes;
128 | yuvBytes[0] = bytes;
129 | yRowStride = previewWidth;
130 |
131 | imageConverter =
132 | new Runnable() {
133 | @Override
134 | public void run() {
135 | ImageUtils.convertYUV420SPToARGB8888(bytes, previewWidth, previewHeight, rgbBytes);
136 | }
137 | };
138 |
139 | postInferenceCallback =
140 | new Runnable() {
141 | @Override
142 | public void run() {
143 | camera.addCallbackBuffer(bytes);
144 | isProcessingFrame = false;
145 | }
146 | };
147 | processImage();
148 | }
149 |
150 | /**
151 | * Callback for Camera2 API
152 | */
153 | @Override
154 | public void onImageAvailable(final ImageReader reader) {
155 | //We need wait until we have some size from onPreviewSizeChosen
156 | if (previewWidth == 0 || previewHeight == 0) {
157 | return;
158 | }
159 | if (rgbBytes == null) {
160 | rgbBytes = new int[previewWidth * previewHeight];
161 | }
162 | try {
163 | final Image image = reader.acquireLatestImage();
164 |
165 | if (image == null) {
166 | return;
167 | }
168 |
169 | if (isProcessingFrame) {
170 | image.close();
171 | return;
172 | }
173 | isProcessingFrame = true;
174 | Trace.beginSection("imageAvailable");
175 | final Plane[] planes = image.getPlanes();
176 | fillBytes(planes, yuvBytes);
177 | yRowStride = planes[0].getRowStride();
178 | final int uvRowStride = planes[1].getRowStride();
179 | final int uvPixelStride = planes[1].getPixelStride();
180 |
181 | imageConverter =
182 | new Runnable() {
183 | @Override
184 | public void run() {
185 | ImageUtils.convertYUV420ToARGB8888(
186 | yuvBytes[0],
187 | yuvBytes[1],
188 | yuvBytes[2],
189 | previewWidth,
190 | previewHeight,
191 | yRowStride,
192 | uvRowStride,
193 | uvPixelStride,
194 | rgbBytes);
195 | }
196 | };
197 |
198 | postInferenceCallback =
199 | new Runnable() {
200 | @Override
201 | public void run() {
202 | image.close();
203 | isProcessingFrame = false;
204 | }
205 | };
206 |
207 | processImage();
208 | } catch (final Exception e) {
209 | LOGGER.e(e, "Exception!");
210 | Trace.endSection();
211 | return;
212 | }
213 | Trace.endSection();
214 | }
215 |
216 | @Override
217 | public synchronized void onStart() {
218 | LOGGER.d("onStart " + this);
219 | super.onStart();
220 | }
221 |
222 | @Override
223 | public synchronized void onResume() {
224 | LOGGER.d("onResume " + this);
225 | super.onResume();
226 |
227 | handlerThread = new HandlerThread("inference");
228 | handlerThread.start();
229 | handler = new Handler(handlerThread.getLooper());
230 | }
231 |
232 | @Override
233 | public synchronized void onPause() {
234 | LOGGER.d("onPause " + this);
235 |
236 | if (!isFinishing()) {
237 | LOGGER.d("Requesting finish");
238 | finish();
239 | }
240 |
241 | handlerThread.quitSafely();
242 | try {
243 | handlerThread.join();
244 | handlerThread = null;
245 | handler = null;
246 | } catch (final InterruptedException e) {
247 | LOGGER.e(e, "Exception!");
248 | }
249 |
250 | super.onPause();
251 | }
252 |
253 | @Override
254 | public synchronized void onStop() {
255 | LOGGER.d("onStop " + this);
256 | super.onStop();
257 | }
258 |
259 | @Override
260 | public synchronized void onDestroy() {
261 | LOGGER.d("onDestroy " + this);
262 | super.onDestroy();
263 | }
264 |
265 | protected synchronized void runInBackground(final Runnable r) {
266 | if (handler != null) {
267 | handler.post(r);
268 | }
269 | }
270 |
271 | @Override
272 | public void onRequestPermissionsResult(
273 | final int requestCode, final String[] permissions, final int[] grantResults) {
274 | if (requestCode == PERMISSIONS_REQUEST) {
275 | if (grantResults.length > 0
276 | && grantResults[0] == PackageManager.PERMISSION_GRANTED
277 | && grantResults[1] == PackageManager.PERMISSION_GRANTED) {
278 | setFragment();
279 | } else {
280 | requestPermission();
281 | }
282 | }
283 | }
284 |
285 | private boolean hasPermission() {
286 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
287 | return checkSelfPermission(PERMISSION_CAMERA) == PackageManager.PERMISSION_GRANTED &&
288 | checkSelfPermission(PERMISSION_STORAGE) == PackageManager.PERMISSION_GRANTED;
289 | } else {
290 | return true;
291 | }
292 | }
293 |
294 | private void requestPermission() {
295 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
296 | if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA) ||
297 | shouldShowRequestPermissionRationale(PERMISSION_STORAGE)) {
298 | Toast.makeText(CameraActivity.this,
299 | "Camera AND storage permission are required for this demo", Toast.LENGTH_LONG).show();
300 | }
301 | requestPermissions(new String[] {PERMISSION_CAMERA, PERMISSION_STORAGE}, PERMISSIONS_REQUEST);
302 | }
303 | }
304 |
305 | // Returns true if the device supports the required hardware level, or better.
306 | private boolean isHardwareLevelSupported(
307 | CameraCharacteristics characteristics, int requiredLevel) {
308 | int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
309 | if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
310 | return requiredLevel == deviceLevel;
311 | }
312 | // deviceLevel is not LEGACY, can use numerical sort
313 | return requiredLevel <= deviceLevel;
314 | }
315 |
316 | private String chooseCamera() {
317 | final CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
318 | try {
319 | for (final String cameraId : manager.getCameraIdList()) {
320 | final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
321 |
322 | // We don't use a front facing camera in this sample.
323 | final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
324 | if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
325 | continue;
326 | }
327 |
328 | final StreamConfigurationMap map =
329 | characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
330 |
331 | if (map == null) {
332 | continue;
333 | }
334 |
335 | useCamera2API = isHardwareLevelSupported(characteristics,
336 | CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
337 | LOGGER.i("Camera API lv2?: %s", useCamera2API);
338 | return cameraId;
339 | }
340 | } catch (CameraAccessException e) {
341 | LOGGER.e(e, "Not allowed to access camera");
342 | }
343 |
344 | return null;
345 | }
346 |
347 | protected void setFragment() {
348 | String cameraId = chooseCamera();
349 |
350 | Fragment fragment;
351 | if (useCamera2API) {
352 | CameraConnectionFragment camera2Fragment =
353 | CameraConnectionFragment.newInstance(
354 | new CameraConnectionFragment.ConnectionCallback() {
355 | @Override
356 | public void onPreviewSizeChosen(final Size size, final int rotation) {
357 | previewHeight = size.getHeight();
358 | previewWidth = size.getWidth();
359 | CameraActivity.this.onPreviewSizeChosen(size, rotation);
360 | }
361 | },
362 | this,
363 | getLayoutId(),
364 | getDesiredPreviewFrameSize());
365 |
366 | camera2Fragment.setCamera(cameraId);
367 | fragment = camera2Fragment;
368 | } else {
369 | fragment =
370 | new LegacyCameraConnectionFragment(this, getLayoutId(), getDesiredPreviewFrameSize());
371 | }
372 |
373 | getFragmentManager()
374 | .beginTransaction()
375 | .replace(R.id.container, fragment)
376 | .commit();
377 | }
378 |
379 | protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
380 | // Because of the variable row stride it's not possible to know in
381 | // advance the actual necessary dimensions of the yuv planes.
382 | for (int i = 0; i < planes.length; ++i) {
383 | final ByteBuffer buffer = planes[i].getBuffer();
384 | if (yuvBytes[i] == null) {
385 | LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
386 | yuvBytes[i] = new byte[buffer.capacity()];
387 | }
388 | buffer.get(yuvBytes[i]);
389 | }
390 | }
391 |
392 | public boolean isDebug() {
393 | return debug;
394 | }
395 |
396 | public void requestRender() {
397 | final OverlayView overlay = (OverlayView) findViewById(R.id.debug_overlay);
398 | if (overlay != null) {
399 | overlay.postInvalidate();
400 | }
401 | }
402 |
403 | public void addCallback(final OverlayView.DrawCallback callback) {
404 | final OverlayView overlay = (OverlayView) findViewById(R.id.debug_overlay);
405 | if (overlay != null) {
406 | overlay.addCallback(callback);
407 | }
408 | }
409 |
410 | public void onSetDebug(final boolean debug) {}
411 |
412 | @Override
413 | public boolean onKeyDown(final int keyCode, final KeyEvent event) {
414 | if (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN || keyCode == KeyEvent.KEYCODE_VOLUME_UP) {
415 | debug = !debug;
416 | requestRender();
417 | onSetDebug(debug);
418 | return true;
419 | }
420 | return super.onKeyDown(keyCode, event);
421 | }
422 |
423 | protected void readyForNextImage() {
424 | if (postInferenceCallback != null) {
425 | postInferenceCallback.run();
426 | }
427 | }
428 |
429 | protected abstract void processImage();
430 |
431 | protected abstract void onPreviewSizeChosen(final Size size, final int rotation);
432 | protected abstract int getLayoutId();
433 | protected abstract Size getDesiredPreviewFrameSize();
434 | }
435 |
--------------------------------------------------------------------------------
/app/src/main/java/org/tensorflow/demo/CameraConnectionFragment.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 The TensorFlow Authors. All Rights Reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package org.tensorflow.demo;
18 |
19 | import android.app.Activity;
20 | import android.app.AlertDialog;
21 | import android.app.Dialog;
22 | import android.app.DialogFragment;
23 | import android.app.Fragment;
24 | import android.content.Context;
25 | import android.content.DialogInterface;
26 | import android.content.res.Configuration;
27 | import android.graphics.ImageFormat;
28 | import android.graphics.Matrix;
29 | import android.graphics.RectF;
30 | import android.graphics.SurfaceTexture;
31 | import android.hardware.camera2.CameraAccessException;
32 | import android.hardware.camera2.CameraCaptureSession;
33 | import android.hardware.camera2.CameraCharacteristics;
34 | import android.hardware.camera2.CameraDevice;
35 | import android.hardware.camera2.CameraManager;
36 | import android.hardware.camera2.CaptureRequest;
37 | import android.hardware.camera2.CaptureResult;
38 | import android.hardware.camera2.TotalCaptureResult;
39 | import android.hardware.camera2.params.StreamConfigurationMap;
40 | import android.media.ImageReader;
41 | import android.media.ImageReader.OnImageAvailableListener;
42 | import android.os.Bundle;
43 | import android.os.Handler;
44 | import android.os.HandlerThread;
45 | import android.text.TextUtils;
46 | import android.util.Size;
47 | import android.util.SparseIntArray;
48 | import android.view.LayoutInflater;
49 | import android.view.Surface;
50 | import android.view.TextureView;
51 | import android.view.View;
52 | import android.view.ViewGroup;
53 | import android.widget.Toast;
54 | import java.util.ArrayList;
55 | import java.util.Arrays;
56 | import java.util.Collections;
57 | import java.util.Comparator;
58 | import java.util.List;
59 | import java.util.concurrent.Semaphore;
60 | import java.util.concurrent.TimeUnit;
61 | import org.tensorflow.demo.env.Logger;
62 | import org.tensorflow.demo.R; // Explicit import needed for internal Google builds.
63 |
64 | public class CameraConnectionFragment extends Fragment {
65 | private static final Logger LOGGER = new Logger();
66 |
67 | /**
68 | * The camera preview size will be chosen to be the smallest frame by pixel size capable of
69 | * containing a DESIRED_SIZE x DESIRED_SIZE square.
70 | */
71 | private static final int MINIMUM_PREVIEW_SIZE = 320;
72 |
73 | /**
74 | * Conversion from screen rotation to JPEG orientation.
75 | */
76 | private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
77 | private static final String FRAGMENT_DIALOG = "dialog";
78 |
79 | static {
80 | ORIENTATIONS.append(Surface.ROTATION_0, 90);
81 | ORIENTATIONS.append(Surface.ROTATION_90, 0);
82 | ORIENTATIONS.append(Surface.ROTATION_180, 270);
83 | ORIENTATIONS.append(Surface.ROTATION_270, 180);
84 | }
85 |
86 | /**
87 | * {@link android.view.TextureView.SurfaceTextureListener} handles several lifecycle events on a
88 | * {@link TextureView}.
89 | */
90 | private final TextureView.SurfaceTextureListener surfaceTextureListener =
91 | new TextureView.SurfaceTextureListener() {
92 | @Override
93 | public void onSurfaceTextureAvailable(
94 | final SurfaceTexture texture, final int width, final int height) {
95 | openCamera(width, height);
96 | }
97 |
98 | @Override
99 | public void onSurfaceTextureSizeChanged(
100 | final SurfaceTexture texture, final int width, final int height) {
101 | configureTransform(width, height);
102 | }
103 |
104 | @Override
105 | public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
106 | return true;
107 | }
108 |
109 | @Override
110 | public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
111 | };
112 |
113 | /**
114 | * Callback for Activities to use to initialize their data once the
115 | * selected preview size is known.
116 | */
117 | public interface ConnectionCallback {
118 | void onPreviewSizeChosen(Size size, int cameraRotation);
119 | }
120 |
121 | /**
122 | * ID of the current {@link CameraDevice}.
123 | */
124 | private String cameraId;
125 |
126 | /**
127 | * An {@link AutoFitTextureView} for camera preview.
128 | */
129 | private AutoFitTextureView textureView;
130 |
131 | /**
132 | * A {@link CameraCaptureSession } for camera preview.
133 | */
134 | private CameraCaptureSession captureSession;
135 |
136 | /**
137 | * A reference to the opened {@link CameraDevice}.
138 | */
139 | private CameraDevice cameraDevice;
140 |
141 | /**
142 | * The rotation in degrees of the camera sensor from the display.
143 | */
144 | private Integer sensorOrientation;
145 |
146 | /**
147 | * The {@link android.util.Size} of camera preview.
148 | */
149 | private Size previewSize;
150 |
151 | /**
152 | * {@link android.hardware.camera2.CameraDevice.StateCallback}
153 | * is called when {@link CameraDevice} changes its state.
154 | */
155 | private final CameraDevice.StateCallback stateCallback =
156 | new CameraDevice.StateCallback() {
157 | @Override
158 | public void onOpened(final CameraDevice cd) {
159 | // This method is called when the camera is opened. We start camera preview here.
160 | cameraOpenCloseLock.release();
161 | cameraDevice = cd;
162 | createCameraPreviewSession();
163 | }
164 |
165 | @Override
166 | public void onDisconnected(final CameraDevice cd) {
167 | cameraOpenCloseLock.release();
168 | cd.close();
169 | cameraDevice = null;
170 | }
171 |
172 | @Override
173 | public void onError(final CameraDevice cd, final int error) {
174 | cameraOpenCloseLock.release();
175 | cd.close();
176 | cameraDevice = null;
177 | final Activity activity = getActivity();
178 | if (null != activity) {
179 | activity.finish();
180 | }
181 | }
182 | };
183 |
184 | /**
185 | * An additional thread for running tasks that shouldn't block the UI.
186 | */
187 | private HandlerThread backgroundThread;
188 |
189 | /**
190 | * A {@link Handler} for running tasks in the background.
191 | */
192 | private Handler backgroundHandler;
193 |
194 | /**
195 | * An {@link ImageReader} that handles preview frame capture.
196 | */
197 | private ImageReader previewReader;
198 |
199 | /**
200 | * {@link android.hardware.camera2.CaptureRequest.Builder} for the camera preview
201 | */
202 | private CaptureRequest.Builder previewRequestBuilder;
203 |
204 | /**
205 | * {@link CaptureRequest} generated by {@link #previewRequestBuilder}
206 | */
207 | private CaptureRequest previewRequest;
208 |
209 | /**
210 | * A {@link Semaphore} to prevent the app from exiting before closing the camera.
211 | */
212 | private final Semaphore cameraOpenCloseLock = new Semaphore(1);
213 |
214 | /**
215 | * A {@link OnImageAvailableListener} to receive frames as they are available.
216 | */
217 | private final OnImageAvailableListener imageListener;
218 |
219 | /** The input size in pixels desired by TensorFlow (width and height of a square bitmap). */
220 | private final Size inputSize;
221 |
222 | /**
223 | * The layout identifier to inflate for this Fragment.
224 | */
225 | private final int layout;
226 |
227 |
228 | private final ConnectionCallback cameraConnectionCallback;
229 |
230 | private CameraConnectionFragment(
231 | final ConnectionCallback connectionCallback,
232 | final OnImageAvailableListener imageListener,
233 | final int layout,
234 | final Size inputSize) {
235 | this.cameraConnectionCallback = connectionCallback;
236 | this.imageListener = imageListener;
237 | this.layout = layout;
238 | this.inputSize = inputSize;
239 | }
240 |
241 | /**
242 | * Shows a {@link Toast} on the UI thread.
243 | *
244 | * @param text The message to show
245 | */
246 | private void showToast(final String text) {
247 | final Activity activity = getActivity();
248 | if (activity != null) {
249 | activity.runOnUiThread(
250 | new Runnable() {
251 | @Override
252 | public void run() {
253 | Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
254 | }
255 | });
256 | }
257 | }
258 |
259 | /**
260 | * Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
261 | * width and height are at least as large as the minimum of both, or an exact match if possible.
262 | *
263 | * @param choices The list of sizes that the camera supports for the intended output class
264 | * @param width The minimum desired width
265 | * @param height The minimum desired height
266 | * @return The optimal {@code Size}, or an arbitrary one if none were big enough
267 | */
268 | protected static Size chooseOptimalSize(final Size[] choices, final int width, final int height) {
269 | final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE);
270 | final Size desiredSize = new Size(width, height);
271 |
272 | // Collect the supported resolutions that are at least as big as the preview Surface
273 | boolean exactSizeFound = false;
274 | final List bigEnough = new ArrayList();
275 | final List tooSmall = new ArrayList();
276 | for (final Size option : choices) {
277 | if (option.equals(desiredSize)) {
278 | // Set the size but don't return yet so that remaining sizes will still be logged.
279 | exactSizeFound = true;
280 | }
281 |
282 | if (option.getHeight() >= minSize && option.getWidth() >= minSize) {
283 | bigEnough.add(option);
284 | } else {
285 | tooSmall.add(option);
286 | }
287 | }
288 |
289 | LOGGER.i("Desired size: " + desiredSize + ", min size: " + minSize + "x" + minSize);
290 | LOGGER.i("Valid preview sizes: [" + TextUtils.join(", ", bigEnough) + "]");
291 | LOGGER.i("Rejected preview sizes: [" + TextUtils.join(", ", tooSmall) + "]");
292 |
293 | if (exactSizeFound) {
294 | LOGGER.i("Exact size match found.");
295 | return desiredSize;
296 | }
297 |
298 | // Pick the smallest of those, assuming we found any
299 | if (bigEnough.size() > 0) {
300 | final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea());
301 | LOGGER.i("Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight());
302 | return chosenSize;
303 | } else {
304 | LOGGER.e("Couldn't find any suitable preview size");
305 | return choices[0];
306 | }
307 | }
308 |
309 | public static CameraConnectionFragment newInstance(
310 | final ConnectionCallback callback,
311 | final OnImageAvailableListener imageListener,
312 | final int layout,
313 | final Size inputSize) {
314 | return new CameraConnectionFragment(callback, imageListener, layout, inputSize);
315 | }
316 |
317 | @Override
318 | public View onCreateView(
319 | final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
320 | return inflater.inflate(layout, container, false);
321 | }
322 |
323 | @Override
324 | public void onViewCreated(final View view, final Bundle savedInstanceState) {
325 | textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
326 | }
327 |
328 | @Override
329 | public void onActivityCreated(final Bundle savedInstanceState) {
330 | super.onActivityCreated(savedInstanceState);
331 | }
332 |
333 | @Override
334 | public void onResume() {
335 | super.onResume();
336 | startBackgroundThread();
337 |
338 | // When the screen is turned off and turned back on, the SurfaceTexture is already
339 | // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
340 | // a camera and start preview from here (otherwise, we wait until the surface is ready in
341 | // the SurfaceTextureListener).
342 | if (textureView.isAvailable()) {
343 | openCamera(textureView.getWidth(), textureView.getHeight());
344 | } else {
345 | textureView.setSurfaceTextureListener(surfaceTextureListener);
346 | }
347 | }
348 |
349 | @Override
350 | public void onPause() {
351 | closeCamera();
352 | stopBackgroundThread();
353 | super.onPause();
354 | }
355 |
356 | public void setCamera(String cameraId) {
357 | this.cameraId = cameraId;
358 | }
359 |
360 | /**
361 | * Sets up member variables related to camera.
362 | */
363 | private void setUpCameraOutputs() {
364 | final Activity activity = getActivity();
365 | final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
366 | try {
367 | final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
368 |
369 | final StreamConfigurationMap map =
370 | characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
371 |
372 | // For still image captures, we use the largest available size.
373 | final Size largest =
374 | Collections.max(
375 | Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)),
376 | new CompareSizesByArea());
377 |
378 | sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
379 |
380 | // Danger, W.R.! Attempting to use too large a preview size could exceed the camera
381 | // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
382 | // garbage capture data.
383 | previewSize =
384 | chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
385 | inputSize.getWidth(),
386 | inputSize.getHeight());
387 |
388 | // We fit the aspect ratio of TextureView to the size of preview we picked.
389 | final int orientation = getResources().getConfiguration().orientation;
390 | if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
391 | textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
392 | } else {
393 | textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
394 | }
395 | } catch (final CameraAccessException e) {
396 | LOGGER.e(e, "Exception!");
397 | } catch (final NullPointerException e) {
398 | // Currently an NPE is thrown when the Camera2API is used but not supported on the
399 | // device this code runs.
400 | // TODO(andrewharp): abstract ErrorDialog/RuntimeException handling out into new method and
401 | // reuse throughout app.
402 | ErrorDialog.newInstance(getString(R.string.camera_error))
403 | .show(getChildFragmentManager(), FRAGMENT_DIALOG);
404 | throw new RuntimeException(getString(R.string.camera_error));
405 | }
406 |
407 | cameraConnectionCallback.onPreviewSizeChosen(previewSize, sensorOrientation);
408 | }
409 |
410 | /**
411 | * Opens the camera specified by {@link CameraConnectionFragment#cameraId}.
412 | */
413 | private void openCamera(final int width, final int height) {
414 | setUpCameraOutputs();
415 | configureTransform(width, height);
416 | final Activity activity = getActivity();
417 | final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
418 | try {
419 | if (!cameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
420 | throw new RuntimeException("Time out waiting to lock camera opening.");
421 | }
422 | manager.openCamera(cameraId, stateCallback, backgroundHandler);
423 | } catch (final CameraAccessException e) {
424 | LOGGER.e(e, "Exception!");
425 | } catch (final InterruptedException e) {
426 | throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
427 | }
428 | }
429 |
430 | /**
431 | * Closes the current {@link CameraDevice}.
432 | */
433 | private void closeCamera() {
434 | try {
435 | cameraOpenCloseLock.acquire();
436 | if (null != captureSession) {
437 | captureSession.close();
438 | captureSession = null;
439 | }
440 | if (null != cameraDevice) {
441 | cameraDevice.close();
442 | cameraDevice = null;
443 | }
444 | if (null != previewReader) {
445 | previewReader.close();
446 | previewReader = null;
447 | }
448 | } catch (final InterruptedException e) {
449 | throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
450 | } finally {
451 | cameraOpenCloseLock.release();
452 | }
453 | }
454 |
455 | /**
456 | * Starts a background thread and its {@link Handler}.
457 | */
458 | private void startBackgroundThread() {
459 | backgroundThread = new HandlerThread("ImageListener");
460 | backgroundThread.start();
461 | backgroundHandler = new Handler(backgroundThread.getLooper());
462 | }
463 |
464 | /**
465 | * Stops the background thread and its {@link Handler}.
466 | */
467 | private void stopBackgroundThread() {
468 | backgroundThread.quitSafely();
469 | try {
470 | backgroundThread.join();
471 | backgroundThread = null;
472 | backgroundHandler = null;
473 | } catch (final InterruptedException e) {
474 | LOGGER.e(e, "Exception!");
475 | }
476 | }
477 |
478 | private final CameraCaptureSession.CaptureCallback captureCallback =
479 | new CameraCaptureSession.CaptureCallback() {
480 | @Override
481 | public void onCaptureProgressed(
482 | final CameraCaptureSession session,
483 | final CaptureRequest request,
484 | final CaptureResult partialResult) {}
485 |
486 | @Override
487 | public void onCaptureCompleted(
488 | final CameraCaptureSession session,
489 | final CaptureRequest request,
490 | final TotalCaptureResult result) {}
491 | };
492 |
493 | /**
494 | * Creates a new {@link CameraCaptureSession} for camera preview.
495 | */
496 | private void createCameraPreviewSession() {
497 | try {
498 | final SurfaceTexture texture = textureView.getSurfaceTexture();
499 | assert texture != null;
500 |
501 | // We configure the size of default buffer to be the size of camera preview we want.
502 | texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
503 |
504 | // This is the output Surface we need to start preview.
505 | final Surface surface = new Surface(texture);
506 |
507 | // We set up a CaptureRequest.Builder with the output Surface.
508 | previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
509 | previewRequestBuilder.addTarget(surface);
510 |
511 | LOGGER.i("Opening camera preview: " + previewSize.getWidth() + "x" + previewSize.getHeight());
512 |
513 | // Create the reader for the preview frames.
514 | previewReader =
515 | ImageReader.newInstance(
516 | previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);
517 |
518 | previewReader.setOnImageAvailableListener(imageListener, backgroundHandler);
519 | previewRequestBuilder.addTarget(previewReader.getSurface());
520 |
521 | // Here, we create a CameraCaptureSession for camera preview.
522 | cameraDevice.createCaptureSession(
523 | Arrays.asList(surface, previewReader.getSurface()),
524 | new CameraCaptureSession.StateCallback() {
525 |
526 | @Override
527 | public void onConfigured(final CameraCaptureSession cameraCaptureSession) {
528 | // The camera is already closed
529 | if (null == cameraDevice) {
530 | return;
531 | }
532 |
533 | // When the session is ready, we start displaying the preview.
534 | captureSession = cameraCaptureSession;
535 | try {
536 | // Auto focus should be continuous for camera preview.
537 | previewRequestBuilder.set(
538 | CaptureRequest.CONTROL_AF_MODE,
539 | CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
540 | // Flash is automatically enabled when necessary.
541 | previewRequestBuilder.set(
542 | CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
543 |
544 | // Finally, we start displaying the camera preview.
545 | previewRequest = previewRequestBuilder.build();
546 | captureSession.setRepeatingRequest(
547 | previewRequest, captureCallback, backgroundHandler);
548 | } catch (final CameraAccessException e) {
549 | LOGGER.e(e, "Exception!");
550 | }
551 | }
552 |
553 | @Override
554 | public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) {
555 | showToast("Failed");
556 | }
557 | },
558 | null);
559 | } catch (final CameraAccessException e) {
560 | LOGGER.e(e, "Exception!");
561 | }
562 | }
563 |
564 | /**
565 | * Configures the necessary {@link android.graphics.Matrix} transformation to `mTextureView`.
566 | * This method should be called after the camera preview size is determined in
567 | * setUpCameraOutputs and also the size of `mTextureView` is fixed.
568 | *
569 | * @param viewWidth The width of `mTextureView`
570 | * @param viewHeight The height of `mTextureView`
571 | */
572 | private void configureTransform(final int viewWidth, final int viewHeight) {
573 | final Activity activity = getActivity();
574 | if (null == textureView || null == previewSize || null == activity) {
575 | return;
576 | }
577 | final int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
578 | final Matrix matrix = new Matrix();
579 | final RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
580 | final RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth());
581 | final float centerX = viewRect.centerX();
582 | final float centerY = viewRect.centerY();
583 | if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
584 | bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
585 | matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
586 | final float scale =
587 | Math.max(
588 | (float) viewHeight / previewSize.getHeight(),
589 | (float) viewWidth / previewSize.getWidth());
590 | matrix.postScale(scale, scale, centerX, centerY);
591 | matrix.postRotate(90 * (rotation - 2), centerX, centerY);
592 | } else if (Surface.ROTATION_180 == rotation) {
593 | matrix.postRotate(180, centerX, centerY);
594 | }
595 | textureView.setTransform(matrix);
596 | }
597 |
598 | /**
599 | * Compares two {@code Size}s based on their areas.
600 | */
601 | static class CompareSizesByArea implements Comparator {
602 | @Override
603 | public int compare(final Size lhs, final Size rhs) {
604 | // We cast here to ensure the multiplications won't overflow
605 | return Long.signum(
606 | (long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
607 | }
608 | }
609 |
610 | /**
611 | * Shows an error message dialog.
612 | */
613 | public static class ErrorDialog extends DialogFragment {
614 | private static final String ARG_MESSAGE = "message";
615 |
616 | public static ErrorDialog newInstance(final String message) {
617 | final ErrorDialog dialog = new ErrorDialog();
618 | final Bundle args = new Bundle();
619 | args.putString(ARG_MESSAGE, message);
620 | dialog.setArguments(args);
621 | return dialog;
622 | }
623 |
624 | @Override
625 | public Dialog onCreateDialog(final Bundle savedInstanceState) {
626 | final Activity activity = getActivity();
627 | return new AlertDialog.Builder(activity)
628 | .setMessage(getArguments().getString(ARG_MESSAGE))
629 | .setPositiveButton(
630 | android.R.string.ok,
631 | new DialogInterface.OnClickListener() {
632 | @Override
633 | public void onClick(final DialogInterface dialogInterface, final int i) {
634 | activity.finish();
635 | }
636 | })
637 | .create();
638 | }
639 | }
640 | }
641 |
--------------------------------------------------------------------------------
/app/src/main/java/org/tensorflow/demo/Classifier.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.demo;
17 |
18 | import android.graphics.Bitmap;
19 | import android.graphics.RectF;
20 | import java.util.List;
21 |
22 | /**
23 | * Generic interface for interacting with different recognition engines.
24 | */
25 | public interface Classifier {
26 | /**
27 | * An immutable result returned by a Classifier describing what was recognized.
28 | */
29 | public class Recognition {
30 | /**
31 | * A unique identifier for what has been recognized. Specific to the class, not the instance of
32 | * the object.
33 | */
34 | private final String id;
35 |
36 | /**
37 | * Display name for the recognition.
38 | */
39 | private final String title;
40 |
41 | /**
42 | * A sortable score for how good the recognition is relative to others. Higher should be better.
43 | */
44 | private final Float confidence;
45 |
46 | /** Optional location within the source image for the location of the recognized object. */
47 | private RectF location;
48 |
49 | public Recognition(
50 | final String id, final String title, final Float confidence, final RectF location) {
51 | this.id = id;
52 | this.title = title;
53 | this.confidence = confidence;
54 | this.location = location;
55 | }
56 |
57 | public String getId() {
58 | return id;
59 | }
60 |
61 | public String getTitle() {
62 | return title;
63 | }
64 |
65 | public Float getConfidence() {
66 | return confidence;
67 | }
68 |
69 | public RectF getLocation() {
70 | return new RectF(location);
71 | }
72 |
73 | public void setLocation(RectF location) {
74 | this.location = location;
75 | }
76 |
77 | @Override
78 | public String toString() {
79 | String resultString = "";
80 | if (id != null) {
81 | resultString += "[" + id + "] ";
82 | }
83 |
84 | if (title != null) {
85 | resultString += title + " ";
86 | }
87 |
88 | if (confidence != null) {
89 | resultString += String.format("(%.1f%%) ", confidence * 100.0f);
90 | }
91 |
92 | if (location != null) {
93 | resultString += location + " ";
94 | }
95 |
96 | return resultString.trim();
97 | }
98 | }
99 |
100 | List recognizeImage(Bitmap bitmap);
101 |
102 | void enableStatLogging(final boolean debug);
103 |
104 | String getStatString();
105 |
106 | void close();
107 | }
108 |
--------------------------------------------------------------------------------
/app/src/main/java/org/tensorflow/demo/ClassifierActivity.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 The TensorFlow Authors. All Rights Reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package org.tensorflow.demo;
18 |
19 | import android.graphics.Bitmap;
20 | import android.graphics.Bitmap.Config;
21 | import android.graphics.Canvas;
22 | import android.graphics.Matrix;
23 | import android.graphics.Paint;
24 | import android.graphics.Typeface;
25 | import android.media.ImageReader.OnImageAvailableListener;
26 | import android.os.SystemClock;
27 | import android.util.Size;
28 | import android.util.TypedValue;
29 | import android.view.Display;
30 | import java.util.List;
31 | import java.util.Vector;
32 | import org.tensorflow.demo.OverlayView.DrawCallback;
33 | import org.tensorflow.demo.env.BorderedText;
34 | import org.tensorflow.demo.env.ImageUtils;
35 | import org.tensorflow.demo.env.Logger;
36 | import org.tensorflow.demo.R; // Explicit import needed for internal Google builds.
37 |
38 | public class ClassifierActivity extends CameraActivity implements OnImageAvailableListener {
39 | private static final Logger LOGGER = new Logger();
40 |
41 | protected static final boolean SAVE_PREVIEW_BITMAP = false;
42 |
43 | private ResultsView resultsView;
44 |
45 | private Bitmap rgbFrameBitmap = null;
46 | private Bitmap croppedBitmap = null;
47 | private Bitmap cropCopyBitmap = null;
48 |
49 | private long lastProcessingTimeMs;
50 |
51 | // These are the settings for the original v1 Inception model. If you want to
52 | // use a model that's been produced from the TensorFlow for Poets codelab,
53 | // you'll need to set IMAGE_SIZE = 299, IMAGE_MEAN = 128, IMAGE_STD = 128,
54 | // INPUT_NAME = "Mul", and OUTPUT_NAME = "final_result".
55 | // You'll also need to update the MODEL_FILE and LABEL_FILE paths to point to
56 | // the ones you produced.
57 | //
58 | // To use v3 Inception model, strip the DecodeJpeg Op from your retrained
59 | // model first:
60 | //
61 | // python strip_unused.py \
62 | // --input_graph= \
63 | // --output_graph= \
64 | // --input_node_names="Mul" \
65 | // --output_node_names="final_result" \
66 | // --input_binary=true
67 | private static final int INPUT_SIZE = 224;
68 | private static final int IMAGE_MEAN = 117;
69 | private static final float IMAGE_STD = 1;
70 | private static final String INPUT_NAME = "input";
71 | private static final String OUTPUT_NAME = "output";
72 |
73 |
74 | private static final String MODEL_FILE = "file:///android_asset/tensorflow_inception_graph.pb";
75 | private static final String LABEL_FILE =
76 | "file:///android_asset/imagenet_comp_graph_label_strings.txt";
77 |
78 |
79 | private static final boolean MAINTAIN_ASPECT = true;
80 |
81 | private static final Size DESIRED_PREVIEW_SIZE = new Size(640, 480);
82 |
83 |
84 | private Integer sensorOrientation;
85 | private Classifier classifier;
86 | private Matrix frameToCropTransform;
87 | private Matrix cropToFrameTransform;
88 |
89 |
90 | private BorderedText borderedText;
91 |
92 |
93 | @Override
94 | protected int getLayoutId() {
95 | return R.layout.camera_connection_fragment;
96 | }
97 |
98 | @Override
99 | protected Size getDesiredPreviewFrameSize() {
100 | return DESIRED_PREVIEW_SIZE;
101 | }
102 |
103 | private static final float TEXT_SIZE_DIP = 10;
104 |
105 | @Override
106 | public void onPreviewSizeChosen(final Size size, final int rotation) {
107 | final float textSizePx = TypedValue.applyDimension(
108 | TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
109 | borderedText = new BorderedText(textSizePx);
110 | borderedText.setTypeface(Typeface.MONOSPACE);
111 |
112 | classifier =
113 | TensorFlowImageClassifier.create(
114 | getAssets(),
115 | MODEL_FILE,
116 | LABEL_FILE,
117 | INPUT_SIZE,
118 | IMAGE_MEAN,
119 | IMAGE_STD,
120 | INPUT_NAME,
121 | OUTPUT_NAME);
122 |
123 | previewWidth = size.getWidth();
124 | previewHeight = size.getHeight();
125 |
126 | final Display display = getWindowManager().getDefaultDisplay();
127 | final int screenOrientation = display.getRotation();
128 |
129 | LOGGER.i("Sensor orientation: %d, Screen orientation: %d", rotation, screenOrientation);
130 |
131 | sensorOrientation = rotation + screenOrientation;
132 |
133 | LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
134 | rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
135 | croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Config.ARGB_8888);
136 |
137 | frameToCropTransform = ImageUtils.getTransformationMatrix(
138 | previewWidth, previewHeight,
139 | INPUT_SIZE, INPUT_SIZE,
140 | sensorOrientation, MAINTAIN_ASPECT);
141 |
142 | cropToFrameTransform = new Matrix();
143 | frameToCropTransform.invert(cropToFrameTransform);
144 |
145 | addCallback(
146 | new DrawCallback() {
147 | @Override
148 | public void drawCallback(final Canvas canvas) {
149 | renderDebug(canvas);
150 | }
151 | });
152 | }
153 |
154 | @Override
155 | protected void processImage() {
156 | rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight);
157 | final Canvas canvas = new Canvas(croppedBitmap);
158 | canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
159 |
160 | // For examining the actual TF input.
161 | if (SAVE_PREVIEW_BITMAP) {
162 | ImageUtils.saveBitmap(croppedBitmap);
163 | }
164 | runInBackground(
165 | new Runnable() {
166 | @Override
167 | public void run() {
168 | final long startTime = SystemClock.uptimeMillis();
169 | final List results = classifier.recognizeImage(croppedBitmap);
170 | lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
171 | LOGGER.i("Detect: %s", results);
172 | cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
173 | if (resultsView == null) {
174 | resultsView = (ResultsView) findViewById(R.id.results);
175 | }
176 | resultsView.setResults(results);
177 | requestRender();
178 | readyForNextImage();
179 | }
180 | });
181 | }
182 |
183 | @Override
184 | public void onSetDebug(boolean debug) {
185 | classifier.enableStatLogging(debug);
186 | }
187 |
188 | private void renderDebug(final Canvas canvas) {
189 | if (!isDebug()) {
190 | return;
191 | }
192 | final Bitmap copy = cropCopyBitmap;
193 | if (copy != null) {
194 | final Matrix matrix = new Matrix();
195 | final float scaleFactor = 2;
196 | matrix.postScale(scaleFactor, scaleFactor);
197 | matrix.postTranslate(
198 | canvas.getWidth() - copy.getWidth() * scaleFactor,
199 | canvas.getHeight() - copy.getHeight() * scaleFactor);
200 | canvas.drawBitmap(copy, matrix, new Paint());
201 |
202 | final Vector lines = new Vector();
203 | if (classifier != null) {
204 | String statString = classifier.getStatString();
205 | String[] statLines = statString.split("\n");
206 | for (String line : statLines) {
207 | lines.add(line);
208 | }
209 | }
210 |
211 | lines.add("Frame: " + previewWidth + "x" + previewHeight);
212 | lines.add("Crop: " + copy.getWidth() + "x" + copy.getHeight());
213 | lines.add("View: " + canvas.getWidth() + "x" + canvas.getHeight());
214 | lines.add("Rotation: " + sensorOrientation);
215 | lines.add("Inference time: " + lastProcessingTimeMs + "ms");
216 |
217 | borderedText.drawLines(canvas, 10, canvas.getHeight() - 10, lines);
218 | }
219 | }
220 | }
221 |
--------------------------------------------------------------------------------
/app/src/main/java/org/tensorflow/demo/LegacyCameraConnectionFragment.java:
--------------------------------------------------------------------------------
1 | package org.tensorflow.demo;
2 |
3 | /*
4 | * Copyright 2017 The TensorFlow Authors. All Rights Reserved.
5 | *
6 | * Licensed under the Apache License, Version 2.0 (the "License");
7 | * you may not use this file except in compliance with the License.
8 | * You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | import android.app.Fragment;
20 | import android.graphics.SurfaceTexture;
21 | import android.hardware.Camera;
22 | import android.hardware.Camera.CameraInfo;
23 | import android.os.Bundle;
24 | import android.os.Handler;
25 | import android.os.HandlerThread;
26 | import android.util.Size;
27 | import android.util.SparseIntArray;
28 | import android.view.LayoutInflater;
29 | import android.view.Surface;
30 | import android.view.TextureView;
31 | import android.view.View;
32 | import android.view.ViewGroup;
33 | import java.io.IOException;
34 | import java.util.List;
35 | import org.tensorflow.demo.env.ImageUtils;
36 | import org.tensorflow.demo.env.Logger;
37 | import org.tensorflow.demo.R; // Explicit import needed for internal Google builds.
38 |
39 | public class LegacyCameraConnectionFragment extends Fragment {
40 | private Camera camera;
41 | private static final Logger LOGGER = new Logger();
42 | private Camera.PreviewCallback imageListener;
43 | private Size desiredSize;
44 |
45 | /**
46 | * The layout identifier to inflate for this Fragment.
47 | */
48 | private int layout;
49 |
50 | public LegacyCameraConnectionFragment(
51 | final Camera.PreviewCallback imageListener, final int layout, final Size desiredSize) {
52 | this.imageListener = imageListener;
53 | this.layout = layout;
54 | this.desiredSize = desiredSize;
55 | }
56 |
57 | /**
58 | * Conversion from screen rotation to JPEG orientation.
59 | */
60 | private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
61 |
62 | static {
63 | ORIENTATIONS.append(Surface.ROTATION_0, 90);
64 | ORIENTATIONS.append(Surface.ROTATION_90, 0);
65 | ORIENTATIONS.append(Surface.ROTATION_180, 270);
66 | ORIENTATIONS.append(Surface.ROTATION_270, 180);
67 | }
68 |
69 | /**
70 | * {@link android.view.TextureView.SurfaceTextureListener} handles several lifecycle events on a
71 | * {@link TextureView}.
72 | */
73 | private final TextureView.SurfaceTextureListener surfaceTextureListener =
74 | new TextureView.SurfaceTextureListener() {
75 | @Override
76 | public void onSurfaceTextureAvailable(
77 | final SurfaceTexture texture, final int width, final int height) {
78 |
79 | int index = getCameraId();
80 | camera = Camera.open(index);
81 |
82 | try {
83 | Camera.Parameters parameters = camera.getParameters();
84 | parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
85 |
86 | List cameraSizes = parameters.getSupportedPreviewSizes();
87 | Size[] sizes = new Size[cameraSizes.size()];
88 | int i = 0;
89 | for (Camera.Size size : cameraSizes) {
90 | sizes[i++] = new Size(size.width, size.height);
91 | }
92 | Size previewSize =
93 | CameraConnectionFragment.chooseOptimalSize(
94 | sizes, desiredSize.getWidth(), desiredSize.getHeight());
95 | parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
96 | camera.setDisplayOrientation(90);
97 | camera.setParameters(parameters);
98 | camera.setPreviewTexture(texture);
99 | } catch (IOException exception) {
100 | camera.release();
101 | }
102 |
103 | camera.setPreviewCallbackWithBuffer(imageListener);
104 | Camera.Size s = camera.getParameters().getPreviewSize();
105 | camera.addCallbackBuffer(new byte[ImageUtils.getYUVByteSize(s.height, s.width)]);
106 |
107 | textureView.setAspectRatio(s.height, s.width);
108 |
109 | camera.startPreview();
110 | }
111 |
112 | @Override
113 | public void onSurfaceTextureSizeChanged(
114 | final SurfaceTexture texture, final int width, final int height) {}
115 |
116 | @Override
117 | public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
118 | return true;
119 | }
120 |
121 | @Override
122 | public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
123 | };
124 |
125 | /**
126 | * An {@link AutoFitTextureView} for camera preview.
127 | */
128 | private AutoFitTextureView textureView;
129 |
130 | /**
131 | * An additional thread for running tasks that shouldn't block the UI.
132 | */
133 | private HandlerThread backgroundThread;
134 |
135 | @Override
136 | public View onCreateView(
137 | final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
138 | return inflater.inflate(layout, container, false);
139 | }
140 |
141 | @Override
142 | public void onViewCreated(final View view, final Bundle savedInstanceState) {
143 | textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
144 | }
145 |
146 | @Override
147 | public void onActivityCreated(final Bundle savedInstanceState) {
148 | super.onActivityCreated(savedInstanceState);
149 | }
150 |
151 | @Override
152 | public void onResume() {
153 | super.onResume();
154 | startBackgroundThread();
155 | // When the screen is turned off and turned back on, the SurfaceTexture is already
156 | // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
157 | // a camera and start preview from here (otherwise, we wait until the surface is ready in
158 | // the SurfaceTextureListener).
159 |
160 | if (textureView.isAvailable()) {
161 | camera.startPreview();
162 | } else {
163 | textureView.setSurfaceTextureListener(surfaceTextureListener);
164 | }
165 | }
166 |
167 | @Override
168 | public void onPause() {
169 | stopCamera();
170 | stopBackgroundThread();
171 | super.onPause();
172 | }
173 |
174 | /**
175 | * Starts a background thread and its {@link Handler}.
176 | */
177 | private void startBackgroundThread() {
178 | backgroundThread = new HandlerThread("CameraBackground");
179 | backgroundThread.start();
180 | }
181 |
182 | /**
183 | * Stops the background thread and its {@link Handler}.
184 | */
185 | private void stopBackgroundThread() {
186 | backgroundThread.quitSafely();
187 | try {
188 | backgroundThread.join();
189 | backgroundThread = null;
190 | } catch (final InterruptedException e) {
191 | LOGGER.e(e, "Exception!");
192 | }
193 | }
194 |
195 | protected void stopCamera() {
196 | if (camera != null) {
197 | camera.stopPreview();
198 | camera.setPreviewCallback(null);
199 | camera.release();
200 | camera = null;
201 | }
202 | }
203 |
204 | private int getCameraId() {
205 | CameraInfo ci = new CameraInfo();
206 | for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
207 | Camera.getCameraInfo(i, ci);
208 | if (ci.facing == CameraInfo.CAMERA_FACING_BACK)
209 | return i;
210 | }
211 | return -1; // No camera found
212 | }
213 | }
214 |
--------------------------------------------------------------------------------
/app/src/main/java/org/tensorflow/demo/OverlayView.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.demo;
17 |
18 | import android.content.Context;
19 | import android.graphics.Canvas;
20 | import android.util.AttributeSet;
21 | import android.view.View;
22 | import java.util.LinkedList;
23 | import java.util.List;
24 |
25 | /**
26 | * A simple View providing a render callback to other classes.
27 | */
28 | public class OverlayView extends View {
29 | private final List callbacks = new LinkedList();
30 |
31 | public OverlayView(final Context context, final AttributeSet attrs) {
32 | super(context, attrs);
33 | }
34 |
35 | /**
36 | * Interface defining the callback for client classes.
37 | */
38 | public interface DrawCallback {
39 | public void drawCallback(final Canvas canvas);
40 | }
41 |
42 | public void addCallback(final DrawCallback callback) {
43 | callbacks.add(callback);
44 | }
45 |
46 | @Override
47 | public synchronized void draw(final Canvas canvas) {
48 | for (final DrawCallback callback : callbacks) {
49 | callback.drawCallback(canvas);
50 | }
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/app/src/main/java/org/tensorflow/demo/RecognitionScoreView.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.demo;
17 |
18 | import android.content.Context;
19 | import android.graphics.Canvas;
20 | import android.graphics.Paint;
21 | import android.util.AttributeSet;
22 | import android.util.TypedValue;
23 | import android.view.View;
24 |
25 | import org.tensorflow.demo.Classifier.Recognition;
26 |
27 | import java.util.List;
28 |
29 | public class RecognitionScoreView extends View implements ResultsView {
30 | private static final float TEXT_SIZE_DIP = 24;
31 | private List results;
32 | private final float textSizePx;
33 | private final Paint fgPaint;
34 | private final Paint bgPaint;
35 |
36 | public RecognitionScoreView(final Context context, final AttributeSet set) {
37 | super(context, set);
38 |
39 | textSizePx =
40 | TypedValue.applyDimension(
41 | TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
42 | fgPaint = new Paint();
43 | fgPaint.setTextSize(textSizePx);
44 |
45 | bgPaint = new Paint();
46 | bgPaint.setColor(0xcc4285f4);
47 | }
48 |
49 | @Override
50 | public void setResults(final List results) {
51 | this.results = results;
52 | postInvalidate();
53 | }
54 |
55 | @Override
56 | public void onDraw(final Canvas canvas) {
57 | final int x = 10;
58 | int y = (int) (fgPaint.getTextSize() * 1.5f);
59 |
60 | canvas.drawPaint(bgPaint);
61 |
62 | if (results != null) {
63 | for (final Recognition recog : results) {
64 | canvas.drawText(recog.getTitle() + ": " + recog.getConfidence(), x, y, fgPaint);
65 | y += fgPaint.getTextSize() * 1.5f;
66 | }
67 | }
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/app/src/main/java/org/tensorflow/demo/ResultsView.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.demo;
17 |
18 | import org.tensorflow.demo.Classifier.Recognition;
19 |
20 | import java.util.List;
21 |
22 | public interface ResultsView {
23 | public void setResults(final List results);
24 | }
25 |
--------------------------------------------------------------------------------
/app/src/main/java/org/tensorflow/demo/TensorFlowImageClassifier.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.demo;
17 |
18 | import android.content.res.AssetManager;
19 | import android.graphics.Bitmap;
20 | import android.os.Trace;
21 | import android.util.Log;
22 | import java.io.BufferedReader;
23 | import java.io.IOException;
24 | import java.io.InputStreamReader;
25 | import java.util.ArrayList;
26 | import java.util.Comparator;
27 | import java.util.List;
28 | import java.util.PriorityQueue;
29 | import java.util.Vector;
30 | import org.tensorflow.Operation;
31 | import org.tensorflow.contrib.android.TensorFlowInferenceInterface;
32 |
33 | /** A classifier specialized to label images using TensorFlow. */
34 | public class TensorFlowImageClassifier implements Classifier {
35 | private static final String TAG = "TensorFlowImageClassifier";
36 |
37 | // Only return this many results with at least this confidence.
38 | private static final int MAX_RESULTS = 3;
39 | private static final float THRESHOLD = 0.1f;
40 |
41 | // Config values.
42 | private String inputName;
43 | private String outputName;
44 | private int inputSize;
45 | private int imageMean;
46 | private float imageStd;
47 |
48 | // Pre-allocated buffers.
49 | private Vector labels = new Vector();
50 | private int[] intValues;
51 | private float[] floatValues;
52 | private float[] outputs;
53 | private String[] outputNames;
54 |
55 | private boolean logStats = false;
56 |
57 | private TensorFlowInferenceInterface inferenceInterface;
58 |
59 | private TensorFlowImageClassifier() {}
60 |
61 | /**
62 | * Initializes a native TensorFlow session for classifying images.
63 | *
64 | * @param assetManager The asset manager to be used to load assets.
65 | * @param modelFilename The filepath of the model GraphDef protocol buffer.
66 | * @param labelFilename The filepath of label file for classes.
67 | * @param inputSize The input size. A square image of inputSize x inputSize is assumed.
68 | * @param imageMean The assumed mean of the image values.
69 | * @param imageStd The assumed std of the image values.
70 | * @param inputName The label of the image input node.
71 | * @param outputName The label of the output node.
72 | * @throws IOException
73 | */
74 | public static Classifier create(
75 | AssetManager assetManager,
76 | String modelFilename,
77 | String labelFilename,
78 | int inputSize,
79 | int imageMean,
80 | float imageStd,
81 | String inputName,
82 | String outputName) {
83 | TensorFlowImageClassifier c = new TensorFlowImageClassifier();
84 | c.inputName = inputName;
85 | c.outputName = outputName;
86 |
87 | // Read the label names into memory.
88 | // TODO(andrewharp): make this handle non-assets.
89 | String actualFilename = labelFilename.split("file:///android_asset/")[1];
90 | Log.i(TAG, "Reading labels from: " + actualFilename);
91 | BufferedReader br = null;
92 | try {
93 | br = new BufferedReader(new InputStreamReader(assetManager.open(actualFilename)));
94 | String line;
95 | while ((line = br.readLine()) != null) {
96 | c.labels.add(line);
97 | }
98 | br.close();
99 | } catch (IOException e) {
100 | throw new RuntimeException("Problem reading label file!" , e);
101 | }
102 |
103 | c.inferenceInterface = new TensorFlowInferenceInterface(assetManager, modelFilename);
104 |
105 | // The shape of the output is [N, NUM_CLASSES], where N is the batch size.
106 | final Operation operation = c.inferenceInterface.graphOperation(outputName);
107 | final int numClasses = (int) operation.output(0).shape().size(1);
108 | Log.i(TAG, "Read " + c.labels.size() + " labels, output layer size is " + numClasses);
109 |
110 | // Ideally, inputSize could have been retrieved from the shape of the input operation. Alas,
111 | // the placeholder node for input in the graphdef typically used does not specify a shape, so it
112 | // must be passed in as a parameter.
113 | c.inputSize = inputSize;
114 | c.imageMean = imageMean;
115 | c.imageStd = imageStd;
116 |
117 | // Pre-allocate buffers.
118 | c.outputNames = new String[] {outputName};
119 | c.intValues = new int[inputSize * inputSize];
120 | c.floatValues = new float[inputSize * inputSize * 3];
121 | c.outputs = new float[numClasses];
122 |
123 | return c;
124 | }
125 |
126 | @Override
127 | public List recognizeImage(final Bitmap bitmap) {
128 | // Log this method so that it can be analyzed with systrace.
129 | Trace.beginSection("recognizeImage");
130 |
131 | Trace.beginSection("preprocessBitmap");
132 | // Preprocess the image data from 0-255 int to normalized float based
133 | // on the provided parameters.
134 | bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
135 | for (int i = 0; i < intValues.length; ++i) {
136 | final int val = intValues[i];
137 | floatValues[i * 3 + 0] = (((val >> 16) & 0xFF) - imageMean) / imageStd;
138 | floatValues[i * 3 + 1] = (((val >> 8) & 0xFF) - imageMean) / imageStd;
139 | floatValues[i * 3 + 2] = ((val & 0xFF) - imageMean) / imageStd;
140 | }
141 | Trace.endSection();
142 |
143 | // Copy the input data into TensorFlow.
144 | Trace.beginSection("feed");
145 | inferenceInterface.feed(inputName, floatValues, 1, inputSize, inputSize, 3);
146 | Trace.endSection();
147 |
148 | // Run the inference call.
149 | Trace.beginSection("run");
150 | inferenceInterface.run(outputNames, logStats);
151 | Trace.endSection();
152 |
153 | // Copy the output Tensor back into the output array.
154 | Trace.beginSection("fetch");
155 | inferenceInterface.fetch(outputName, outputs);
156 | Trace.endSection();
157 |
158 | // Find the best classifications.
159 | PriorityQueue pq =
160 | new PriorityQueue(
161 | 3,
162 | new Comparator() {
163 | @Override
164 | public int compare(Recognition lhs, Recognition rhs) {
165 | // Intentionally reversed to put high confidence at the head of the queue.
166 | return Float.compare(rhs.getConfidence(), lhs.getConfidence());
167 | }
168 | });
169 | for (int i = 0; i < outputs.length; ++i) {
170 | if (outputs[i] > THRESHOLD) {
171 | pq.add(
172 | new Recognition(
173 | "" + i, labels.size() > i ? labels.get(i) : "unknown", outputs[i], null));
174 | }
175 | }
176 | final ArrayList recognitions = new ArrayList();
177 | int recognitionsSize = Math.min(pq.size(), MAX_RESULTS);
178 | for (int i = 0; i < recognitionsSize; ++i) {
179 | recognitions.add(pq.poll());
180 | }
181 | Trace.endSection(); // "recognizeImage"
182 | return recognitions;
183 | }
184 |
185 | @Override
186 | public void enableStatLogging(boolean logStats) {
187 | this.logStats = logStats;
188 | }
189 |
190 | @Override
191 | public String getStatString() {
192 | return inferenceInterface.getStatString();
193 | }
194 |
195 | @Override
196 | public void close() {
197 | inferenceInterface.close();
198 | }
199 | }
200 |
--------------------------------------------------------------------------------
/app/src/main/java/org/tensorflow/demo/env/BorderedText.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.demo.env;
17 |
18 | import android.graphics.Canvas;
19 | import android.graphics.Color;
20 | import android.graphics.Paint;
21 | import android.graphics.Paint.Align;
22 | import android.graphics.Paint.Style;
23 | import android.graphics.Rect;
24 | import android.graphics.Typeface;
25 | import java.util.Vector;
26 |
27 | /**
28 | * A class that encapsulates the tedious bits of rendering legible, bordered text onto a canvas.
29 | */
30 | public class BorderedText {
31 | private final Paint interiorPaint;
32 | private final Paint exteriorPaint;
33 |
34 | private final float textSize;
35 |
36 | /**
37 | * Creates a left-aligned bordered text object with a white interior, and a black exterior with
38 | * the specified text size.
39 | *
40 | * @param textSize text size in pixels
41 | */
42 | public BorderedText(final float textSize) {
43 | this(Color.WHITE, Color.BLACK, textSize);
44 | }
45 |
46 | /**
47 | * Create a bordered text object with the specified interior and exterior colors, text size and
48 | * alignment.
49 | *
50 | * @param interiorColor the interior text color
51 | * @param exteriorColor the exterior text color
52 | * @param textSize text size in pixels
53 | */
54 | public BorderedText(final int interiorColor, final int exteriorColor, final float textSize) {
55 | interiorPaint = new Paint();
56 | interiorPaint.setTextSize(textSize);
57 | interiorPaint.setColor(interiorColor);
58 | interiorPaint.setStyle(Style.FILL);
59 | interiorPaint.setAntiAlias(false);
60 | interiorPaint.setAlpha(255);
61 |
62 | exteriorPaint = new Paint();
63 | exteriorPaint.setTextSize(textSize);
64 | exteriorPaint.setColor(exteriorColor);
65 | exteriorPaint.setStyle(Style.FILL_AND_STROKE);
66 | exteriorPaint.setStrokeWidth(textSize / 8);
67 | exteriorPaint.setAntiAlias(false);
68 | exteriorPaint.setAlpha(255);
69 |
70 | this.textSize = textSize;
71 | }
72 |
73 | public void setTypeface(Typeface typeface) {
74 | interiorPaint.setTypeface(typeface);
75 | exteriorPaint.setTypeface(typeface);
76 | }
77 |
78 | public void drawText(final Canvas canvas, final float posX, final float posY, final String text) {
79 | canvas.drawText(text, posX, posY, exteriorPaint);
80 | canvas.drawText(text, posX, posY, interiorPaint);
81 | }
82 |
83 | public void drawLines(Canvas canvas, final float posX, final float posY, Vector lines) {
84 | int lineNum = 0;
85 | for (final String line : lines) {
86 | drawText(canvas, posX, posY - getTextSize() * (lines.size() - lineNum - 1), line);
87 | ++lineNum;
88 | }
89 | }
90 |
91 | public void setInteriorColor(final int color) {
92 | interiorPaint.setColor(color);
93 | }
94 |
95 | public void setExteriorColor(final int color) {
96 | exteriorPaint.setColor(color);
97 | }
98 |
99 | public float getTextSize() {
100 | return textSize;
101 | }
102 |
103 | public void setAlpha(final int alpha) {
104 | interiorPaint.setAlpha(alpha);
105 | exteriorPaint.setAlpha(alpha);
106 | }
107 |
108 | public void getTextBounds(
109 | final String line, final int index, final int count, final Rect lineBounds) {
110 | interiorPaint.getTextBounds(line, index, count, lineBounds);
111 | }
112 |
113 | public void setTextAlign(final Align align) {
114 | interiorPaint.setTextAlign(align);
115 | exteriorPaint.setTextAlign(align);
116 | }
117 | }
118 |
--------------------------------------------------------------------------------
/app/src/main/java/org/tensorflow/demo/env/ImageUtils.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.demo.env;
17 |
18 | import android.graphics.Bitmap;
19 | import android.graphics.Matrix;
20 | import android.os.Environment;
21 | import java.io.File;
22 | import java.io.FileOutputStream;
23 |
24 | /**
25 | * Utility class for manipulating images.
26 | **/
27 | public class ImageUtils {
28 | @SuppressWarnings("unused")
29 | private static final Logger LOGGER = new Logger();
30 |
31 | static {
32 | try {
33 | System.loadLibrary("tensorflow_demo");
34 | } catch (UnsatisfiedLinkError e) {
35 | LOGGER.w("Native library not found, native RGB -> YUV conversion may be unavailable.");
36 | }
37 | }
38 |
39 | /**
40 | * Utility method to compute the allocated size in bytes of a YUV420SP image
41 | * of the given dimensions.
42 | */
43 | public static int getYUVByteSize(final int width, final int height) {
44 | // The luminance plane requires 1 byte per pixel.
45 | final int ySize = width * height;
46 |
47 | // The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up.
48 | // Each 2x2 block takes 2 bytes to encode, one each for U and V.
49 | final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2;
50 |
51 | return ySize + uvSize;
52 | }
53 |
54 | /**
55 | * Saves a Bitmap object to disk for analysis.
56 | *
57 | * @param bitmap The bitmap to save.
58 | */
59 | public static void saveBitmap(final Bitmap bitmap) {
60 | saveBitmap(bitmap, "preview.png");
61 | }
62 |
63 | /**
64 | * Saves a Bitmap object to disk for analysis.
65 | *
66 | * @param bitmap The bitmap to save.
67 | * @param filename The location to save the bitmap to.
68 | */
69 | public static void saveBitmap(final Bitmap bitmap, final String filename) {
70 | final String root =
71 | Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow";
72 | LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root);
73 | final File myDir = new File(root);
74 |
75 | if (!myDir.mkdirs()) {
76 | LOGGER.i("Make dir failed");
77 | }
78 |
79 | final String fname = filename;
80 | final File file = new File(myDir, fname);
81 | if (file.exists()) {
82 | file.delete();
83 | }
84 | try {
85 | final FileOutputStream out = new FileOutputStream(file);
86 | bitmap.compress(Bitmap.CompressFormat.PNG, 99, out);
87 | out.flush();
88 | out.close();
89 | } catch (final Exception e) {
90 | LOGGER.e(e, "Exception!");
91 | }
92 | }
93 |
94 | // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges
95 | // are normalized to eight bits.
96 | static final int kMaxChannelValue = 262143;
97 |
98 | // Always prefer the native implementation if available.
99 | private static boolean useNativeConversion = true;
100 |
101 | public static void convertYUV420SPToARGB8888(
102 | byte[] input,
103 | int width,
104 | int height,
105 | int[] output) {
106 | if (useNativeConversion) {
107 | try {
108 | ImageUtils.convertYUV420SPToARGB8888(input, output, width, height, false);
109 | return;
110 | } catch (UnsatisfiedLinkError e) {
111 | LOGGER.w(
112 | "Native YUV420SP -> RGB implementation not found, falling back to Java implementation");
113 | useNativeConversion = false;
114 | }
115 | }
116 |
117 | // Java implementation of YUV420SP to ARGB8888 converting
118 | final int frameSize = width * height;
119 | for (int j = 0, yp = 0; j < height; j++) {
120 | int uvp = frameSize + (j >> 1) * width;
121 | int u = 0;
122 | int v = 0;
123 |
124 | for (int i = 0; i < width; i++, yp++) {
125 | int y = 0xff & input[yp];
126 | if ((i & 1) == 0) {
127 | v = 0xff & input[uvp++];
128 | u = 0xff & input[uvp++];
129 | }
130 |
131 | output[yp] = YUV2RGB(y, u, v);
132 | }
133 | }
134 | }
135 |
136 | private static int YUV2RGB(int y, int u, int v) {
137 | // Adjust and check YUV values
138 | y = (y - 16) < 0 ? 0 : (y - 16);
139 | u -= 128;
140 | v -= 128;
141 |
142 | // This is the floating point equivalent. We do the conversion in integer
143 | // because some Android devices do not have floating point in hardware.
144 | // nR = (int)(1.164 * nY + 2.018 * nU);
145 | // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
146 | // nB = (int)(1.164 * nY + 1.596 * nV);
147 | int y1192 = 1192 * y;
148 | int r = (y1192 + 1634 * v);
149 | int g = (y1192 - 833 * v - 400 * u);
150 | int b = (y1192 + 2066 * u);
151 |
152 | // Clipping RGB values to be inside boundaries [ 0 , kMaxChannelValue ]
153 | r = r > kMaxChannelValue ? kMaxChannelValue : (r < 0 ? 0 : r);
154 | g = g > kMaxChannelValue ? kMaxChannelValue : (g < 0 ? 0 : g);
155 | b = b > kMaxChannelValue ? kMaxChannelValue : (b < 0 ? 0 : b);
156 |
157 | return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
158 | }
159 |
160 |
161 | public static void convertYUV420ToARGB8888(
162 | byte[] yData,
163 | byte[] uData,
164 | byte[] vData,
165 | int width,
166 | int height,
167 | int yRowStride,
168 | int uvRowStride,
169 | int uvPixelStride,
170 | int[] out) {
171 | if (useNativeConversion) {
172 | try {
173 | convertYUV420ToARGB8888(
174 | yData, uData, vData, out, width, height, yRowStride, uvRowStride, uvPixelStride, false);
175 | return;
176 | } catch (UnsatisfiedLinkError e) {
177 | LOGGER.w(
178 | "Native YUV420 -> RGB implementation not found, falling back to Java implementation");
179 | useNativeConversion = false;
180 | }
181 | }
182 |
183 | int yp = 0;
184 | for (int j = 0; j < height; j++) {
185 | int pY = yRowStride * j;
186 | int pUV = uvRowStride * (j >> 1);
187 |
188 | for (int i = 0; i < width; i++) {
189 | int uv_offset = pUV + (i >> 1) * uvPixelStride;
190 |
191 | out[yp++] = YUV2RGB(
192 | 0xff & yData[pY + i],
193 | 0xff & uData[uv_offset],
194 | 0xff & vData[uv_offset]);
195 | }
196 | }
197 | }
198 |
199 |
200 | /**
201 | * Converts YUV420 semi-planar data to ARGB 8888 data using the supplied width and height. The
202 | * input and output must already be allocated and non-null. For efficiency, no error checking is
203 | * performed.
204 | *
205 | * @param input The array of YUV 4:2:0 input data.
206 | * @param output A pre-allocated array for the ARGB 8:8:8:8 output data.
207 | * @param width The width of the input image.
208 | * @param height The height of the input image.
209 | * @param halfSize If true, downsample to 50% in each dimension, otherwise not.
210 | */
211 | private static native void convertYUV420SPToARGB8888(
212 | byte[] input, int[] output, int width, int height, boolean halfSize);
213 |
214 | /**
215 | * Converts YUV420 semi-planar data to ARGB 8888 data using the supplied width
216 | * and height. The input and output must already be allocated and non-null.
217 | * For efficiency, no error checking is performed.
218 | *
219 | * @param y
220 | * @param u
221 | * @param v
222 | * @param uvPixelStride
223 | * @param width The width of the input image.
224 | * @param height The height of the input image.
225 | * @param halfSize If true, downsample to 50% in each dimension, otherwise not.
226 | * @param output A pre-allocated array for the ARGB 8:8:8:8 output data.
227 | */
228 | private static native void convertYUV420ToARGB8888(
229 | byte[] y,
230 | byte[] u,
231 | byte[] v,
232 | int[] output,
233 | int width,
234 | int height,
235 | int yRowStride,
236 | int uvRowStride,
237 | int uvPixelStride,
238 | boolean halfSize);
239 |
240 | /**
241 | * Converts YUV420 semi-planar data to RGB 565 data using the supplied width
242 | * and height. The input and output must already be allocated and non-null.
243 | * For efficiency, no error checking is performed.
244 | *
245 | * @param input The array of YUV 4:2:0 input data.
246 | * @param output A pre-allocated array for the RGB 5:6:5 output data.
247 | * @param width The width of the input image.
248 | * @param height The height of the input image.
249 | */
250 | private static native void convertYUV420SPToRGB565(
251 | byte[] input, byte[] output, int width, int height);
252 |
253 | /**
254 | * Converts 32-bit ARGB8888 image data to YUV420SP data. This is useful, for
255 | * instance, in creating data to feed the classes that rely on raw camera
256 | * preview frames.
257 | *
258 | * @param input An array of input pixels in ARGB8888 format.
259 | * @param output A pre-allocated array for the YUV420SP output data.
260 | * @param width The width of the input image.
261 | * @param height The height of the input image.
262 | */
263 | private static native void convertARGB8888ToYUV420SP(
264 | int[] input, byte[] output, int width, int height);
265 |
266 | /**
267 | * Converts 16-bit RGB565 image data to YUV420SP data. This is useful, for
268 | * instance, in creating data to feed the classes that rely on raw camera
269 | * preview frames.
270 | *
271 | * @param input An array of input pixels in RGB565 format.
272 | * @param output A pre-allocated array for the YUV420SP output data.
273 | * @param width The width of the input image.
274 | * @param height The height of the input image.
275 | */
276 | private static native void convertRGB565ToYUV420SP(
277 | byte[] input, byte[] output, int width, int height);
278 |
279 | /**
280 | * Returns a transformation matrix from one reference frame into another.
281 | * Handles cropping (if maintaining aspect ratio is desired) and rotation.
282 | *
283 | * @param srcWidth Width of source frame.
284 | * @param srcHeight Height of source frame.
285 | * @param dstWidth Width of destination frame.
286 | * @param dstHeight Height of destination frame.
287 | * @param applyRotation Amount of rotation to apply from one frame to another.
288 | * Must be a multiple of 90.
289 | * @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant,
290 | * cropping the image if necessary.
291 | * @return The transformation fulfilling the desired requirements.
292 | */
293 | public static Matrix getTransformationMatrix(
294 | final int srcWidth,
295 | final int srcHeight,
296 | final int dstWidth,
297 | final int dstHeight,
298 | final int applyRotation,
299 | final boolean maintainAspectRatio) {
300 | final Matrix matrix = new Matrix();
301 |
302 | if (applyRotation != 0) {
303 | // Translate so center of image is at origin.
304 | matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
305 |
306 | // Rotate around origin.
307 | matrix.postRotate(applyRotation);
308 | }
309 |
310 | // Account for the already applied rotation, if any, and then determine how
311 | // much scaling is needed for each axis.
312 | final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
313 |
314 | final int inWidth = transpose ? srcHeight : srcWidth;
315 | final int inHeight = transpose ? srcWidth : srcHeight;
316 |
317 | // Apply scaling if necessary.
318 | if (inWidth != dstWidth || inHeight != dstHeight) {
319 | final float scaleFactorX = dstWidth / (float) inWidth;
320 | final float scaleFactorY = dstHeight / (float) inHeight;
321 |
322 | if (maintainAspectRatio) {
323 | // Scale by minimum factor so that dst is filled completely while
324 | // maintaining the aspect ratio. Some image may fall off the edge.
325 | final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
326 | matrix.postScale(scaleFactor, scaleFactor);
327 | } else {
328 | // Scale exactly to fill dst from src.
329 | matrix.postScale(scaleFactorX, scaleFactorY);
330 | }
331 | }
332 |
333 | if (applyRotation != 0) {
334 | // Translate back from origin centered reference to destination frame.
335 | matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
336 | }
337 |
338 | return matrix;
339 | }
340 | }
341 |
--------------------------------------------------------------------------------
/app/src/main/java/org/tensorflow/demo/env/Logger.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.demo.env;
17 |
18 | import android.util.Log;
19 |
20 | import java.util.HashSet;
21 | import java.util.Set;
22 |
23 | /**
24 | * Wrapper for the platform log function, allows convenient message prefixing and log disabling.
25 | */
26 | public final class Logger {
27 | private static final String DEFAULT_TAG = "tensorflow";
28 | private static final int DEFAULT_MIN_LOG_LEVEL = Log.DEBUG;
29 |
30 | // Classes to be ignored when examining the stack trace
31 | private static final Set IGNORED_CLASS_NAMES;
32 |
33 | static {
34 | IGNORED_CLASS_NAMES = new HashSet(3);
35 | IGNORED_CLASS_NAMES.add("dalvik.system.VMStack");
36 | IGNORED_CLASS_NAMES.add("java.lang.Thread");
37 | IGNORED_CLASS_NAMES.add(Logger.class.getCanonicalName());
38 | }
39 |
40 | private final String tag;
41 | private final String messagePrefix;
42 | private int minLogLevel = DEFAULT_MIN_LOG_LEVEL;
43 |
44 | /**
45 | * Creates a Logger using the class name as the message prefix.
46 | *
47 | * @param clazz the simple name of this class is used as the message prefix.
48 | */
49 | public Logger(final Class> clazz) {
50 | this(clazz.getSimpleName());
51 | }
52 |
53 | /**
54 | * Creates a Logger using the specified message prefix.
55 | *
56 | * @param messagePrefix is prepended to the text of every message.
57 | */
58 | public Logger(final String messagePrefix) {
59 | this(DEFAULT_TAG, messagePrefix);
60 | }
61 |
62 | /**
63 | * Creates a Logger with a custom tag and a custom message prefix. If the message prefix
64 | * is set to null
, the caller's class name is used as the prefix.
65 | *
66 | * @param tag identifies the source of a log message.
67 | * @param messagePrefix prepended to every message if non-null. If null, the name of the caller is
68 | * being used
69 | */
70 | public Logger(final String tag, final String messagePrefix) {
71 | this.tag = tag;
72 | final String prefix = messagePrefix == null ? getCallerSimpleName() : messagePrefix;
73 | this.messagePrefix = (prefix.length() > 0) ? prefix + ": " : prefix;
74 | }
75 |
76 | /**
77 | * Creates a Logger using the caller's class name as the message prefix.
78 | */
79 | public Logger() {
80 | this(DEFAULT_TAG, null);
81 | }
82 |
83 | /**
84 | * Creates a Logger using the caller's class name as the message prefix.
85 | */
86 | public Logger(final int minLogLevel) {
87 | this(DEFAULT_TAG, null);
88 | this.minLogLevel = minLogLevel;
89 | }
90 |
91 | public void setMinLogLevel(final int minLogLevel) {
92 | this.minLogLevel = minLogLevel;
93 | }
94 |
95 | public boolean isLoggable(final int logLevel) {
96 | return logLevel >= minLogLevel || Log.isLoggable(tag, logLevel);
97 | }
98 |
99 | /**
100 | * Return caller's simple name.
101 | *
102 | * Android getStackTrace() returns an array that looks like this:
103 | * stackTrace[0]: dalvik.system.VMStack
104 | * stackTrace[1]: java.lang.Thread
105 | * stackTrace[2]: com.google.android.apps.unveil.env.UnveilLogger
106 | * stackTrace[3]: com.google.android.apps.unveil.BaseApplication
107 | *
108 | * This function returns the simple version of the first non-filtered name.
109 | *
110 | * @return caller's simple name
111 | */
112 | private static String getCallerSimpleName() {
113 | // Get the current callstack so we can pull the class of the caller off of it.
114 | final StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();
115 |
116 | for (final StackTraceElement elem : stackTrace) {
117 | final String className = elem.getClassName();
118 | if (!IGNORED_CLASS_NAMES.contains(className)) {
119 | // We're only interested in the simple name of the class, not the complete package.
120 | final String[] classParts = className.split("\\.");
121 | return classParts[classParts.length - 1];
122 | }
123 | }
124 |
125 | return Logger.class.getSimpleName();
126 | }
127 |
128 | private String toMessage(final String format, final Object... args) {
129 | return messagePrefix + (args.length > 0 ? String.format(format, args) : format);
130 | }
131 |
132 | public void v(final String format, final Object... args) {
133 | if (isLoggable(Log.VERBOSE)) {
134 | Log.v(tag, toMessage(format, args));
135 | }
136 | }
137 |
138 | public void v(final Throwable t, final String format, final Object... args) {
139 | if (isLoggable(Log.VERBOSE)) {
140 | Log.v(tag, toMessage(format, args), t);
141 | }
142 | }
143 |
144 | public void d(final String format, final Object... args) {
145 | if (isLoggable(Log.DEBUG)) {
146 | Log.d(tag, toMessage(format, args));
147 | }
148 | }
149 |
150 | public void d(final Throwable t, final String format, final Object... args) {
151 | if (isLoggable(Log.DEBUG)) {
152 | Log.d(tag, toMessage(format, args), t);
153 | }
154 | }
155 |
156 | public void i(final String format, final Object... args) {
157 | if (isLoggable(Log.INFO)) {
158 | Log.i(tag, toMessage(format, args));
159 | }
160 | }
161 |
162 | public void i(final Throwable t, final String format, final Object... args) {
163 | if (isLoggable(Log.INFO)) {
164 | Log.i(tag, toMessage(format, args), t);
165 | }
166 | }
167 |
168 | public void w(final String format, final Object... args) {
169 | if (isLoggable(Log.WARN)) {
170 | Log.w(tag, toMessage(format, args));
171 | }
172 | }
173 |
174 | public void w(final Throwable t, final String format, final Object... args) {
175 | if (isLoggable(Log.WARN)) {
176 | Log.w(tag, toMessage(format, args), t);
177 | }
178 | }
179 |
180 | public void e(final String format, final Object... args) {
181 | if (isLoggable(Log.ERROR)) {
182 | Log.e(tag, toMessage(format, args));
183 | }
184 | }
185 |
186 | public void e(final Throwable t, final String format, final Object... args) {
187 | if (isLoggable(Log.ERROR)) {
188 | Log.e(tag, toMessage(format, args), t);
189 | }
190 | }
191 | }
192 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable-hdpi/ic_action_info.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nilhcem/tensorflow-classifier-android/3b3be1cc38e505a4127f74e5f514049a81d2f329/app/src/main/res/drawable-hdpi/ic_action_info.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nilhcem/tensorflow-classifier-android/3b3be1cc38e505a4127f74e5f514049a81d2f329/app/src/main/res/drawable-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-hdpi/tile.9.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nilhcem/tensorflow-classifier-android/3b3be1cc38e505a4127f74e5f514049a81d2f329/app/src/main/res/drawable-hdpi/tile.9.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-mdpi/ic_action_info.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nilhcem/tensorflow-classifier-android/3b3be1cc38e505a4127f74e5f514049a81d2f329/app/src/main/res/drawable-mdpi/ic_action_info.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nilhcem/tensorflow-classifier-android/3b3be1cc38e505a4127f74e5f514049a81d2f329/app/src/main/res/drawable-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xhdpi/ic_action_info.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nilhcem/tensorflow-classifier-android/3b3be1cc38e505a4127f74e5f514049a81d2f329/app/src/main/res/drawable-xhdpi/ic_action_info.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nilhcem/tensorflow-classifier-android/3b3be1cc38e505a4127f74e5f514049a81d2f329/app/src/main/res/drawable-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xxhdpi/ic_action_info.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nilhcem/tensorflow-classifier-android/3b3be1cc38e505a4127f74e5f514049a81d2f329/app/src/main/res/drawable-xxhdpi/ic_action_info.png
--------------------------------------------------------------------------------
/app/src/main/res/drawable-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nilhcem/tensorflow-classifier-android/3b3be1cc38e505a4127f74e5f514049a81d2f329/app/src/main/res/drawable-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_camera.xml:
--------------------------------------------------------------------------------
1 |
16 |
23 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/camera_connection_fragment.xml:
--------------------------------------------------------------------------------
1 |
16 |
19 |
20 |
25 |
26 |
31 |
32 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/camera_connection_fragment_stylize.xml:
--------------------------------------------------------------------------------
1 |
16 |
20 |
25 |
26 |
31 |
32 |
39 |
40 |
45 |
46 |
51 |
52 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/camera_connection_fragment_tracking.xml:
--------------------------------------------------------------------------------
1 |
16 |
19 |
20 |
24 |
25 |
29 |
30 |
34 |
35 |
--------------------------------------------------------------------------------
/app/src/main/res/values-sw600dp/template-dimens.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 |
18 |
19 |
20 |
21 | @dimen/margin_huge
22 | @dimen/margin_medium
23 |
24 |
25 |
--------------------------------------------------------------------------------
/app/src/main/res/values-sw600dp/template-styles.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 |
18 |
19 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/app/src/main/res/values-v11/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
8 |
11 |
12 |
19 |
20 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/app/src/main/res/values-v11/template-styles.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/app/src/main/res/values-v14/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
8 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/app/src/main/res/values-v21/base-colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/app/src/main/res/values-v21/base-template-styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 |
19 |
20 |
21 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/app/src/main/res/values/attrs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/app/src/main/res/values/base-strings.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 |
19 | TensorFlow Demo
20 | TF Classify
21 |
22 |
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 | #cc4285f4
19 |
20 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 | Info
18 | This sample needs camera permission.
19 | This device doesn\'t support Camera2 API.
20 |
21 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/app/src/main/res/values/template-dimens.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 |
18 |
19 |
20 |
21 | 4dp
22 | 8dp
23 | 16dp
24 | 32dp
25 | 64dp
26 |
27 |
28 |
29 | @dimen/margin_medium
30 | @dimen/margin_medium
31 |
32 |
33 |
--------------------------------------------------------------------------------
/app/src/main/res/values/template-styles.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
34 |
35 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 | repositories {
5 | google()
6 | jcenter()
7 | }
8 |
9 | dependencies {
10 | classpath 'com.android.tools.build:gradle:3.2.1' // gradle-3.3.0 is not supported yet. gradle-3.2.1 works well
11 | }
12 | }
13 |
14 | allprojects {
15 | repositories {
16 | google()
17 | jcenter()
18 | }
19 | }
20 |
21 | task clean(type: Delete) {
22 | delete rootProject.buildDir
23 | }
24 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 |
3 | # IDE (e.g. Android Studio) users:
4 | # Gradle settings configured through the IDE *will override*
5 | # any settings specified in this file.
6 |
7 | # For more details on how to configure your build environment visit
8 | # http://www.gradle.org/docs/current/userguide/build_environment.html
9 |
10 | # Specifies the JVM arguments used for the daemon process.
11 | # The setting is particularly useful for tweaking memory settings.
12 | org.gradle.jvmargs=-Xmx1536m
13 |
14 | # When configured, Gradle will run in incubating parallel mode.
15 | # This option should only be used with decoupled projects. More details, visit
16 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
17 | # org.gradle.parallel=true
18 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nilhcem/tensorflow-classifier-android/3b3be1cc38e505a4127f74e5f514049a81d2f329/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionBase=GRADLE_USER_HOME
2 | distributionPath=wrapper/dists
3 | zipStoreBase=GRADLE_USER_HOME
4 | zipStorePath=wrapper/dists
5 | distributionUrl = https\://services.gradle.org/distributions/gradle-4.10.1-all.zip
6 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Attempt to set APP_HOME
10 | # Resolve links: $0 may be a link
11 | PRG="$0"
12 | # Need this for relative symlinks.
13 | while [ -h "$PRG" ] ; do
14 | ls=`ls -ld "$PRG"`
15 | link=`expr "$ls" : '.*-> \(.*\)$'`
16 | if expr "$link" : '/.*' > /dev/null; then
17 | PRG="$link"
18 | else
19 | PRG=`dirname "$PRG"`"/$link"
20 | fi
21 | done
22 | SAVED="`pwd`"
23 | cd "`dirname \"$PRG\"`/" >/dev/null
24 | APP_HOME="`pwd -P`"
25 | cd "$SAVED" >/dev/null
26 |
27 | APP_NAME="Gradle"
28 | APP_BASE_NAME=`basename "$0"`
29 |
30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 | DEFAULT_JVM_OPTS=""
32 |
33 | # Use the maximum available, or set MAX_FD != -1 to use that value.
34 | MAX_FD="maximum"
35 |
36 | warn () {
37 | echo "$*"
38 | }
39 |
40 | die () {
41 | echo
42 | echo "$*"
43 | echo
44 | exit 1
45 | }
46 |
47 | # OS specific support (must be 'true' or 'false').
48 | cygwin=false
49 | msys=false
50 | darwin=false
51 | nonstop=false
52 | case "`uname`" in
53 | CYGWIN* )
54 | cygwin=true
55 | ;;
56 | Darwin* )
57 | darwin=true
58 | ;;
59 | MINGW* )
60 | msys=true
61 | ;;
62 | NONSTOP* )
63 | nonstop=true
64 | ;;
65 | esac
66 |
67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 |
69 | # Determine the Java command to use to start the JVM.
70 | if [ -n "$JAVA_HOME" ] ; then
71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 | # IBM's JDK on AIX uses strange locations for the executables
73 | JAVACMD="$JAVA_HOME/jre/sh/java"
74 | else
75 | JAVACMD="$JAVA_HOME/bin/java"
76 | fi
77 | if [ ! -x "$JAVACMD" ] ; then
78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 |
80 | Please set the JAVA_HOME variable in your environment to match the
81 | location of your Java installation."
82 | fi
83 | else
84 | JAVACMD="java"
85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 |
87 | Please set the JAVA_HOME variable in your environment to match the
88 | location of your Java installation."
89 | fi
90 |
91 | # Increase the maximum file descriptors if we can.
92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 | MAX_FD_LIMIT=`ulimit -H -n`
94 | if [ $? -eq 0 ] ; then
95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 | MAX_FD="$MAX_FD_LIMIT"
97 | fi
98 | ulimit -n $MAX_FD
99 | if [ $? -ne 0 ] ; then
100 | warn "Could not set maximum file descriptor limit: $MAX_FD"
101 | fi
102 | else
103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 | fi
105 | fi
106 |
107 | # For Darwin, add options to specify how the application appears in the dock
108 | if $darwin; then
109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 | fi
111 |
112 | # For Cygwin, switch paths to Windows format before running java
113 | if $cygwin ; then
114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 | JAVACMD=`cygpath --unix "$JAVACMD"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Escape application args
158 | save () {
159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 | echo " "
161 | }
162 | APP_ARGS=$(save "$@")
163 |
164 | # Collect all arguments for the java command, following the shell quoting and substitution rules
165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 |
167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 | cd "$(dirname "$0")"
170 | fi
171 |
172 | exec "$JAVACMD" "$@"
173 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------