├── android
├── app
│ ├── .gitignore
│ ├── src
│ │ ├── main
│ │ │ ├── assets
│ │ │ │ └── labelmap.txt
│ │ │ ├── ml
│ │ │ │ └── detect.tflite
│ │ │ ├── res
│ │ │ │ ├── drawable-xxxhdpi
│ │ │ │ │ ├── caret.jpg
│ │ │ │ │ ├── chair.jpg
│ │ │ │ │ └── sample_image.jpg
│ │ │ │ ├── drawable-hdpi
│ │ │ │ │ └── ic_launcher.png
│ │ │ │ ├── drawable-mdpi
│ │ │ │ │ └── ic_launcher.png
│ │ │ │ ├── drawable-xxhdpi
│ │ │ │ │ ├── tfl2_logo.png
│ │ │ │ │ ├── ic_launcher.png
│ │ │ │ │ ├── icn_chevron_up.png
│ │ │ │ │ ├── tfl2_logo_dark.png
│ │ │ │ │ └── icn_chevron_down.png
│ │ │ │ ├── mipmap-hdpi
│ │ │ │ │ ├── ic_launcher.png
│ │ │ │ │ ├── ic_launcher_round.png
│ │ │ │ │ └── ic_launcher_foreground.png
│ │ │ │ ├── mipmap-mdpi
│ │ │ │ │ ├── ic_launcher.png
│ │ │ │ │ ├── ic_launcher_round.png
│ │ │ │ │ └── ic_launcher_foreground.png
│ │ │ │ ├── mipmap-xhdpi
│ │ │ │ │ ├── ic_launcher.png
│ │ │ │ │ ├── ic_launcher_round.png
│ │ │ │ │ └── ic_launcher_foreground.png
│ │ │ │ ├── mipmap-xxhdpi
│ │ │ │ │ ├── ic_launcher.png
│ │ │ │ │ ├── ic_launcher_round.png
│ │ │ │ │ └── ic_launcher_foreground.png
│ │ │ │ ├── mipmap-xxxhdpi
│ │ │ │ │ ├── ic_launcher.png
│ │ │ │ │ ├── ic_launcher_round.png
│ │ │ │ │ └── ic_launcher_foreground.png
│ │ │ │ ├── values
│ │ │ │ │ ├── dimens.xml
│ │ │ │ │ ├── colors.xml
│ │ │ │ │ ├── strings.xml
│ │ │ │ │ └── styles.xml
│ │ │ │ ├── mipmap-anydpi-v26
│ │ │ │ │ ├── ic_launcher.xml
│ │ │ │ │ └── ic_launcher_round.xml
│ │ │ │ ├── drawable
│ │ │ │ │ ├── ic_baseline_remove.xml
│ │ │ │ │ ├── ic_baseline_add.xml
│ │ │ │ │ ├── bottom_sheet_bg.xml
│ │ │ │ │ ├── rectangle.xml
│ │ │ │ │ └── ic_launcher_background.xml
│ │ │ │ ├── layout
│ │ │ │ │ ├── tfe_od_camera_connection_fragment_tracking.xml
│ │ │ │ │ ├── tfe_od_activity_camera.xml
│ │ │ │ │ └── tfe_od_layout_bottom_sheet.xml
│ │ │ │ └── drawable-v24
│ │ │ │ │ └── ic_launcher_foreground.xml
│ │ │ ├── java
│ │ │ │ └── org
│ │ │ │ │ └── tensorflow
│ │ │ │ │ └── lite
│ │ │ │ │ └── examples
│ │ │ │ │ └── detection
│ │ │ │ │ ├── customview
│ │ │ │ │ ├── ResultsView.java
│ │ │ │ │ ├── OverlayView.java
│ │ │ │ │ ├── RecognitionScoreView.java
│ │ │ │ │ └── AutoFitTextureView.java
│ │ │ │ │ ├── env
│ │ │ │ │ ├── Size.java
│ │ │ │ │ ├── BorderedText.java
│ │ │ │ │ ├── Logger.java
│ │ │ │ │ └── ImageUtils.java
│ │ │ │ │ ├── LegacyCameraConnectionFragment.java
│ │ │ │ │ ├── tracking
│ │ │ │ │ └── MultiBoxTracker.java
│ │ │ │ │ ├── DetectorActivity.java
│ │ │ │ │ ├── CameraActivity.java
│ │ │ │ │ └── CameraConnectionFragment.java
│ │ │ └── AndroidManifest.xml
│ │ ├── androidTest
│ │ │ ├── assets
│ │ │ │ ├── table.jpg
│ │ │ │ └── table_results.txt
│ │ │ └── java
│ │ │ │ ├── AndroidManifest.xml
│ │ │ │ └── org
│ │ │ │ └── tensorflow
│ │ │ │ └── lite
│ │ │ │ └── examples
│ │ │ │ └── detection
│ │ │ │ └── DetectorTest.java
│ │ └── interpreter
│ │ │ └── ml
│ │ │ └── detect.tflite
│ ├── download_model.gradle
│ ├── proguard-rules.pro
│ └── build.gradle
├── screenshot
│ ├── OD_Android_1.png
│ ├── OD_Android_2.png
│ ├── device-2018-08-10-173223.png
│ ├── device-2018-09-04-121223.png
│ └── device-2018-09-04-121257.png
├── gradle
│ └── wrapper
│ │ ├── gradle-wrapper.jar
│ │ └── gradle-wrapper.properties
├── settings.gradle
├── lib_task_api
│ ├── src
│ │ └── main
│ │ │ ├── AndroidManifest.xml
│ │ │ └── java
│ │ │ └── org
│ │ │ └── tensorflow
│ │ │ └── lite
│ │ │ └── examples
│ │ │ └── detection
│ │ │ └── tflite
│ │ │ ├── Detector.java
│ │ │ └── TFLiteObjectDetectionAPIModel.java
│ ├── proguard-rules.pro
│ └── build.gradle
├── lib_interpreter
│ ├── src
│ │ └── main
│ │ │ ├── AndroidManifest.xml
│ │ │ └── java
│ │ │ └── org
│ │ │ └── tensorflow
│ │ │ └── lite
│ │ │ └── examples
│ │ │ └── detection
│ │ │ └── tflite
│ │ │ ├── Detector.java
│ │ │ └── TFLiteObjectDetectionAPIModel.java
│ ├── proguard-rules.pro
│ └── build.gradle
├── .gitignore
├── build.gradle
├── gradle.properties
├── gradlew.bat
├── gradlew
└── README.md
├── LICENSE
├── README.md
└── .gitignore
/android/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 | /build/
--------------------------------------------------------------------------------
/android/app/src/main/assets/labelmap.txt:
--------------------------------------------------------------------------------
1 | Pills
--------------------------------------------------------------------------------
/android/screenshot/OD_Android_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/screenshot/OD_Android_1.png
--------------------------------------------------------------------------------
/android/screenshot/OD_Android_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/screenshot/OD_Android_2.png
--------------------------------------------------------------------------------
/android/app/src/main/ml/detect.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/ml/detect.tflite
--------------------------------------------------------------------------------
/android/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/android/settings.gradle:
--------------------------------------------------------------------------------
1 | rootProject.name = 'TFLite Object Detection Demo App'
2 | include ':app', ':lib_interpreter', ':lib_task_api'
3 |
--------------------------------------------------------------------------------
/android/app/src/androidTest/assets/table.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/androidTest/assets/table.jpg
--------------------------------------------------------------------------------
/android/app/src/interpreter/ml/detect.tflite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/interpreter/ml/detect.tflite
--------------------------------------------------------------------------------
/android/screenshot/device-2018-08-10-173223.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/screenshot/device-2018-08-10-173223.png
--------------------------------------------------------------------------------
/android/screenshot/device-2018-09-04-121223.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/screenshot/device-2018-09-04-121223.png
--------------------------------------------------------------------------------
/android/screenshot/device-2018-09-04-121257.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/screenshot/device-2018-09-04-121257.png
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable-xxxhdpi/caret.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/drawable-xxxhdpi/caret.jpg
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable-xxxhdpi/chair.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/drawable-xxxhdpi/chair.jpg
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/drawable-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/drawable-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable-xxhdpi/tfl2_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/drawable-xxhdpi/tfl2_logo.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/drawable-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable-xxhdpi/icn_chevron_up.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/drawable-xxhdpi/icn_chevron_up.png
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable-xxhdpi/tfl2_logo_dark.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/drawable-xxhdpi/tfl2_logo_dark.png
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable-xxxhdpi/sample_image.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/drawable-xxxhdpi/sample_image.jpg
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable-xxhdpi/icn_chevron_down.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/drawable-xxhdpi/icn_chevron_down.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codeperfectplus/ASL/HEAD/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/android/lib_task_api/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
3 |
4 |
--------------------------------------------------------------------------------
/android/lib_interpreter/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
3 |
4 |
--------------------------------------------------------------------------------
/android/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | /.idea/libraries
5 | /.idea/modules.xml
6 | /.idea/workspace.xml
7 | .DS_Store
8 | /build
9 | /captures
10 | .externalNativeBuild
11 |
12 | /.gradle/
13 | /.idea/
14 |
--------------------------------------------------------------------------------
/android/app/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 15dp
4 | 8dp
5 |
--------------------------------------------------------------------------------
/android/app/src/androidTest/java/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
--------------------------------------------------------------------------------
/android/app/src/androidTest/assets/table_results.txt:
--------------------------------------------------------------------------------
1 | dining_table 27.492085 97.94615 623.1435 444.8627 0.48828125
2 | knife 342.53433 243.71082 583.89185 416.34595 0.4765625
3 | cup 68.025925 197.5857 202.02031 374.2206 0.4375
4 | book 185.43098 139.64153 244.51149 203.37737 0.3125
5 |
--------------------------------------------------------------------------------
/android/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Wed Mar 03 11:26:15 IST 2021
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.7-all.zip
7 |
--------------------------------------------------------------------------------
/android/app/download_model.gradle:
--------------------------------------------------------------------------------
1 | task downloadModelFile(type: Download) {
2 | src 'https://tfhub.dev/tensorflow/lite-model/ssd_mobilenet_v1/1/metadata/2?lite-format=tflite'
3 | dest project.ext.ASSET_DIR + '/detect.tflite'
4 | overwrite false
5 | }
6 |
7 | preBuild.dependsOn downloadModelFile
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/android/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #ffa800
4 | #ff6f00
5 | #425066
6 |
7 | #66000000
8 |
9 |
--------------------------------------------------------------------------------
/android/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | TFL Detect
3 | This device doesn\'t support Camera2 API.
4 |
5 |
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable/ic_baseline_remove.xml:
--------------------------------------------------------------------------------
1 |
6 |
9 |
10 |
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable/ic_baseline_add.xml:
--------------------------------------------------------------------------------
1 |
6 |
9 |
10 |
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable/bottom_sheet_bg.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/android/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable/rectangle.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
7 |
12 |
13 |
--------------------------------------------------------------------------------
/android/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 | repositories {
5 | google()
6 | jcenter()
7 | mavenLocal()
8 | }
9 | dependencies {
10 | classpath 'com.android.tools.build:gradle:4.1.2'
11 | classpath 'de.undercouch:gradle-download-task:4.0.2'
12 | // NOTE: Do not place your application dependencies here; they belong
13 | // in the individual module build.gradle files
14 | }
15 | }
16 |
17 | allprojects {
18 | repositories {
19 | google()
20 | jcenter()
21 | mavenLocal()
22 | }
23 | }
24 |
25 | task clean(type: Delete) {
26 | delete rootProject.buildDir
27 | }
28 |
--------------------------------------------------------------------------------
/android/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx1536m
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 | android.useAndroidX=true
15 | android.enableJetifier=true
16 |
--------------------------------------------------------------------------------
/android/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/android/lib_task_api/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/android/lib_interpreter/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/ResultsView.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.detection.customview;
17 |
18 | import java.util.List;
19 | import org.tensorflow.lite.examples.detection.tflite.Detector.Recognition;
20 |
21 | public interface ResultsView {
22 | public void setResults(final List results);
23 | }
24 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Deepak Raj
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/android/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
18 |
19 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/android/lib_task_api/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.library'
2 |
3 | android {
4 | compileSdkVersion 28
5 | buildToolsVersion "28.0.3"
6 |
7 | defaultConfig {
8 | minSdkVersion 21
9 | targetSdkVersion 28
10 | versionCode 1
11 | versionName "1.0"
12 |
13 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
14 | }
15 |
16 | buildTypes {
17 | release {
18 | minifyEnabled false
19 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
20 | }
21 | }
22 |
23 | aaptOptions {
24 | noCompress "tflite"
25 | }
26 |
27 | lintOptions {
28 | checkReleaseBuilds false
29 | // Or, if you prefer, you can continue to check for errors in release
30 | // builds, but continue the build even when errors are found.
31 | abortOnError false
32 | }
33 |
34 | compileOptions {
35 | sourceCompatibility 1.8
36 | targetCompatibility 1.8
37 | }
38 | }
39 |
40 | dependencies {
41 | implementation fileTree(dir: 'libs', include: ['*.jar'])
42 | implementation 'androidx.appcompat:appcompat:1.1.0'
43 | implementation 'org.tensorflow:tensorflow-lite-task-vision:0.1.0'
44 | }
45 |
--------------------------------------------------------------------------------
/android/app/src/main/res/layout/tfe_od_camera_connection_fragment_tracking.xml:
--------------------------------------------------------------------------------
1 |
16 |
19 |
20 |
24 |
25 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/android/lib_interpreter/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.library'
2 |
3 | android {
4 | compileSdkVersion 28
5 | buildToolsVersion "28.0.3"
6 |
7 | defaultConfig {
8 | minSdkVersion 21
9 | targetSdkVersion 28
10 | versionCode 1
11 | versionName "1.0"
12 |
13 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
14 | }
15 |
16 | buildTypes {
17 | release {
18 | minifyEnabled false
19 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
20 | }
21 | }
22 |
23 | aaptOptions {
24 | noCompress "tflite"
25 | }
26 |
27 | lintOptions {
28 | checkReleaseBuilds false
29 | // Or, if you prefer, you can continue to check for errors in release
30 | // builds, but continue the build even when errors are found.
31 | abortOnError false
32 | }
33 |
34 | compileOptions {
35 | sourceCompatibility 1.8
36 | targetCompatibility 1.8
37 | }
38 | }
39 |
40 | dependencies {
41 | implementation fileTree(dir: 'libs', include: ['*.jar'])
42 | implementation 'androidx.appcompat:appcompat:1.1.0'
43 |
44 | // Build off of nightly TensorFlow Lite
45 | implementation 'org.tensorflow:tensorflow-lite:0.0.0-nightly-SNAPSHOT'
46 | implementation 'org.tensorflow:tensorflow-lite-metadata:0.0.0-nightly-SNAPSHOT'
47 | }
48 |
--------------------------------------------------------------------------------
/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/OverlayView.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.detection.customview;
17 |
18 | import android.content.Context;
19 | import android.graphics.Canvas;
20 | import android.util.AttributeSet;
21 | import android.view.View;
22 | import java.util.LinkedList;
23 | import java.util.List;
24 |
25 | /** A simple View providing a render callback to other classes. */
26 | public class OverlayView extends View {
27 | private final List callbacks = new LinkedList();
28 |
29 | public OverlayView(final Context context, final AttributeSet attrs) {
30 | super(context, attrs);
31 | }
32 |
33 | public void addCallback(final DrawCallback callback) {
34 | callbacks.add(callback);
35 | }
36 |
37 | @Override
38 | public synchronized void draw(final Canvas canvas) {
39 | for (final DrawCallback callback : callbacks) {
40 | callback.drawCallback(canvas);
41 | }
42 | }
43 |
44 | /** Interface defining the callback for client classes. */
45 | public interface DrawCallback {
46 | public void drawCallback(final Canvas canvas);
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
7 |
12 |
13 |
19 |
22 |
25 |
26 |
27 |
28 |
34 |
35 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
👉 American Sign Language Detection Model with SSD_Mobilenet trained on Google Colab 👈
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 | Sign_Language_detection_SSD_Mobilenet_Colab_TFLITE.ipynb
15 |
16 | - Model Used - ssd_mobilenet_v2_fpnlite_320x320_coco17_tpu-8
17 | - DL FrameWork Used - TensorFlow Version 2
18 | - DataSet Used - [ASL By David Lee](https://app.roboflow.com/dataset/american-sign-language-letters-14kx4/)
19 | - Platform - Google Colab Using GPU
20 |
21 | Download the build from [Google Drive](https://drive.google.com/file/d/1UlQ-7A5yzj8CnRMTyeitIdj9WpVge9hk/view)
22 |
23 | 
24 |
25 | Read The Full Article [On CodePerfectPLus Blog](https://codeperfectplus.herokuapp.com/real-time-hand-sign-recogntion-using-tesnorflow)
26 |
27 | Thanks to [David Lee](https://www.linkedin.com/in/daviddaeshinlee/) for amazing DataSet.
28 |
29 | Star it if it's helpful.
30 |
--------------------------------------------------------------------------------
/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/RecognitionScoreView.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.detection.customview;
17 |
18 | import android.content.Context;
19 | import android.graphics.Canvas;
20 | import android.graphics.Paint;
21 | import android.util.AttributeSet;
22 | import android.util.TypedValue;
23 | import android.view.View;
24 | import java.util.List;
25 | import org.tensorflow.lite.examples.detection.tflite.Detector.Recognition;
26 |
27 | public class RecognitionScoreView extends View implements ResultsView {
28 | private static final float TEXT_SIZE_DIP = 14;
29 | private final float textSizePx;
30 | private final Paint fgPaint;
31 | private final Paint bgPaint;
32 | private List results;
33 |
34 | public RecognitionScoreView(final Context context, final AttributeSet set) {
35 | super(context, set);
36 |
37 | textSizePx =
38 | TypedValue.applyDimension(
39 | TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
40 | fgPaint = new Paint();
41 | fgPaint.setTextSize(textSizePx);
42 |
43 | bgPaint = new Paint();
44 | bgPaint.setColor(0xcc4285f4);
45 | }
46 |
47 | @Override
48 | public void setResults(final List results) {
49 | this.results = results;
50 | postInvalidate();
51 | }
52 |
53 | @Override
54 | public void onDraw(final Canvas canvas) {
55 | final int x = 10;
56 | int y = (int) (fgPaint.getTextSize() * 1.5f);
57 |
58 | canvas.drawPaint(bgPaint);
59 |
60 | if (results != null) {
61 | for (final Recognition recog : results) {
62 | canvas.drawText(recog.getTitle() + ": " + recog.getConfidence(), x, y, fgPaint);
63 | y += (int) (fgPaint.getTextSize() * 1.5f);
64 | }
65 | }
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/android/app/src/main/res/layout/tfe_od_activity_camera.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 |
22 |
23 |
29 |
30 |
31 |
37 |
38 |
44 |
45 |
49 |
50 |
51 |
52 |
53 |
56 |
57 |
--------------------------------------------------------------------------------
/android/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 | apply plugin: 'de.undercouch.download'
3 |
4 | android {
5 | compileSdkVersion 28
6 | buildToolsVersion '28.0.3'
7 | defaultConfig {
8 | applicationId "org.tensorflow.lite.examples.detection"
9 | minSdkVersion 21
10 | targetSdkVersion 28
11 | versionCode 1
12 | versionName "1.0"
13 |
14 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
15 | }
16 | buildTypes {
17 | release {
18 | minifyEnabled false
19 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
20 | }
21 | }
22 | aaptOptions {
23 | noCompress "tflite"
24 | }
25 | compileOptions {
26 | sourceCompatibility = '1.8'
27 | targetCompatibility = '1.8'
28 | }
29 | lintOptions {
30 | abortOnError false
31 | }
32 | flavorDimensions "tfliteInference"
33 | productFlavors {
34 | // The TFLite inference is built using the TFLite Java interpreter.
35 | interpreter {
36 | dimension "tfliteInference"
37 | }
38 | // Default: The TFLite inference is built using the TFLite Task library (high-level API).
39 | taskApi {
40 | getIsDefault().set(true)
41 | dimension "tfliteInference"
42 | }
43 | }
44 | buildFeatures {
45 | mlModelBinding true
46 | }
47 | }
48 |
49 | // import DownloadModels task
50 | project.ext.ASSET_DIR = projectDir.toString() + '/src/main/assets'
51 | project.ext.TMP_DIR = project.buildDir.toString() + '/downloads'
52 |
53 | // Download default models; if you wish to use your own models then
54 | // place them in the "assets" directory and comment out this line.
55 | //apply from:'download_model.gradle'
56 |
57 | dependencies {
58 | implementation fileTree(dir: 'libs', include: ['*.jar','*.aar'])
59 | implementation 'org.tensorflow:tensorflow-lite-support:0.1.0-rc1'
60 | implementation 'org.tensorflow:tensorflow-lite-metadata:0.1.0-rc1'
61 | implementation 'org.tensorflow:tensorflow-lite-gpu:2.2.0'
62 | interpreterImplementation project(":lib_interpreter")
63 | taskApiImplementation project(":lib_task_api")
64 | implementation 'androidx.appcompat:appcompat:1.0.0'
65 | implementation 'androidx.coordinatorlayout:coordinatorlayout:1.0.0'
66 | implementation 'com.google.android.material:material:1.0.0'
67 |
68 | androidTestImplementation 'androidx.test.ext:junit:1.1.1'
69 | androidTestImplementation 'com.google.truth:truth:1.0.1'
70 | androidTestImplementation 'androidx.test:runner:1.2.0'
71 | androidTestImplementation 'androidx.test:rules:1.1.0'
72 | }
73 |
--------------------------------------------------------------------------------
/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/AutoFitTextureView.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 The TensorFlow Authors. All Rights Reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package org.tensorflow.lite.examples.detection.customview;
18 |
19 | import android.content.Context;
20 | import android.util.AttributeSet;
21 | import android.view.TextureView;
22 |
23 | /** A {@link TextureView} that can be adjusted to a specified aspect ratio. */
24 | public class AutoFitTextureView extends TextureView {
25 | private int ratioWidth = 0;
26 | private int ratioHeight = 0;
27 |
28 | public AutoFitTextureView(final Context context) {
29 | this(context, null);
30 | }
31 |
32 | public AutoFitTextureView(final Context context, final AttributeSet attrs) {
33 | this(context, attrs, 0);
34 | }
35 |
36 | public AutoFitTextureView(final Context context, final AttributeSet attrs, final int defStyle) {
37 | super(context, attrs, defStyle);
38 | }
39 |
40 | /**
41 | * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
42 | * calculated from the parameters. Note that the actual sizes of parameters don't matter, that is,
43 | * calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
44 | *
45 | * @param width Relative horizontal size
46 | * @param height Relative vertical size
47 | */
48 | public void setAspectRatio(final int width, final int height) {
49 | if (width < 0 || height < 0) {
50 | throw new IllegalArgumentException("Size cannot be negative.");
51 | }
52 | ratioWidth = width;
53 | ratioHeight = height;
54 | requestLayout();
55 | }
56 |
57 | @Override
58 | protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
59 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
60 | final int width = MeasureSpec.getSize(widthMeasureSpec);
61 | final int height = MeasureSpec.getSize(heightMeasureSpec);
62 | if (0 == ratioWidth || 0 == ratioHeight) {
63 | setMeasuredDimension(width, height);
64 | } else {
65 | if (width < height * ratioWidth / ratioHeight) {
66 | setMeasuredDimension(width, width * ratioHeight / ratioWidth);
67 | } else {
68 | setMeasuredDimension(height * ratioWidth / ratioHeight, height);
69 | }
70 | }
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/android/gradlew.bat:
--------------------------------------------------------------------------------
1 | @rem
2 | @rem Copyright 2015 the original author or authors.
3 | @rem
4 | @rem Licensed under the Apache License, Version 2.0 (the "License");
5 | @rem you may not use this file except in compliance with the License.
6 | @rem You may obtain a copy of the License at
7 | @rem
8 | @rem https://www.apache.org/licenses/LICENSE-2.0
9 | @rem
10 | @rem Unless required by applicable law or agreed to in writing, software
11 | @rem distributed under the License is distributed on an "AS IS" BASIS,
12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | @rem See the License for the specific language governing permissions and
14 | @rem limitations under the License.
15 | @rem
16 |
17 | @if "%DEBUG%" == "" @echo off
18 | @rem ##########################################################################
19 | @rem
20 | @rem Gradle startup script for Windows
21 | @rem
22 | @rem ##########################################################################
23 |
24 | @rem Set local scope for the variables with windows NT shell
25 | if "%OS%"=="Windows_NT" setlocal
26 |
27 | set DIRNAME=%~dp0
28 | if "%DIRNAME%" == "" set DIRNAME=.
29 | set APP_BASE_NAME=%~n0
30 | set APP_HOME=%DIRNAME%
31 |
32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter.
33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
34 |
35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
37 |
38 | @rem Find java.exe
39 | if defined JAVA_HOME goto findJavaFromJavaHome
40 |
41 | set JAVA_EXE=java.exe
42 | %JAVA_EXE% -version >NUL 2>&1
43 | if "%ERRORLEVEL%" == "0" goto execute
44 |
45 | echo.
46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
47 | echo.
48 | echo Please set the JAVA_HOME variable in your environment to match the
49 | echo location of your Java installation.
50 |
51 | goto fail
52 |
53 | :findJavaFromJavaHome
54 | set JAVA_HOME=%JAVA_HOME:"=%
55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
56 |
57 | if exist "%JAVA_EXE%" goto execute
58 |
59 | echo.
60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
61 | echo.
62 | echo Please set the JAVA_HOME variable in your environment to match the
63 | echo location of your Java installation.
64 |
65 | goto fail
66 |
67 | :execute
68 | @rem Setup the command line
69 |
70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
71 |
72 |
73 | @rem Execute Gradle
74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
75 |
76 | :end
77 | @rem End local scope for the variables with windows NT shell
78 | if "%ERRORLEVEL%"=="0" goto mainEnd
79 |
80 | :fail
81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
82 | rem the _cmd.exe /c_ return code!
83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
84 | exit /b 1
85 |
86 | :mainEnd
87 | if "%OS%"=="Windows_NT" endlocal
88 |
89 | :omega
90 |
--------------------------------------------------------------------------------
/android/lib_interpreter/src/main/java/org/tensorflow/lite/examples/detection/tflite/Detector.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.detection.tflite;
17 |
18 | import android.graphics.Bitmap;
19 | import android.graphics.RectF;
20 | import java.util.List;
21 |
22 | /** Generic interface for interacting with different recognition engines. */
23 | public interface Detector {
24 | List recognizeImage(Bitmap bitmap);
25 |
26 | void enableStatLogging(final boolean debug);
27 |
28 | String getStatString();
29 |
30 | void close();
31 |
32 | void setNumThreads(int numThreads);
33 |
34 | void setUseNNAPI(boolean isChecked);
35 |
36 | /** An immutable result returned by a Detector describing what was recognized. */
37 | public class Recognition {
38 | /**
39 | * A unique identifier for what has been recognized. Specific to the class, not the instance of
40 | * the object.
41 | */
42 | private final String id;
43 |
44 | /** Display name for the recognition. */
45 | private final String title;
46 |
47 | /**
48 | * A sortable score for how good the recognition is relative to others. Higher should be better.
49 | */
50 | private final Float confidence;
51 |
52 | /** Optional location within the source image for the location of the recognized object. */
53 | private RectF location;
54 |
55 | public Recognition(
56 | final String id, final String title, final Float confidence, final RectF location) {
57 | this.id = id;
58 | this.title = title;
59 | this.confidence = confidence;
60 | this.location = location;
61 | }
62 |
63 | public String getId() {
64 | return id;
65 | }
66 |
67 | public String getTitle() {
68 | return title;
69 | }
70 |
71 | public Float getConfidence() {
72 | return confidence;
73 | }
74 |
75 | public RectF getLocation() {
76 | return new RectF(location);
77 | }
78 |
79 | public void setLocation(RectF location) {
80 | this.location = location;
81 | }
82 |
83 | @Override
84 | public String toString() {
85 | String resultString = "";
86 | if (id != null) {
87 | resultString += "[" + id + "] ";
88 | }
89 |
90 | if (title != null) {
91 | resultString += title + " ";
92 | }
93 |
94 | if (confidence != null) {
95 | resultString += String.format("(%.1f%%) ", confidence * 100.0f);
96 | }
97 |
98 | if (location != null) {
99 | resultString += location + " ";
100 | }
101 |
102 | return resultString.trim();
103 | }
104 | }
105 | }
106 |
--------------------------------------------------------------------------------
/android/lib_task_api/src/main/java/org/tensorflow/lite/examples/detection/tflite/Detector.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.detection.tflite;
17 |
18 | import android.graphics.Bitmap;
19 | import android.graphics.RectF;
20 | import java.util.List;
21 |
22 | /** Generic interface for interacting with different recognition engines. */
23 | public interface Detector {
24 | List recognizeImage(Bitmap bitmap);
25 |
26 | void enableStatLogging(final boolean debug);
27 |
28 | String getStatString();
29 |
30 | void close();
31 |
32 | void setNumThreads(int numThreads);
33 |
34 | void setUseNNAPI(boolean isChecked);
35 |
36 | /** An immutable result returned by a Detector describing what was recognized. */
37 | public class Recognition {
38 | /**
39 | * A unique identifier for what has been recognized. Specific to the class, not the instance of
40 | * the object.
41 | */
42 | private final String id;
43 |
44 | /** Display name for the recognition. */
45 | private final String title;
46 |
47 | /**
48 | * A sortable score for how good the recognition is relative to others. Higher should be better.
49 | */
50 | private final Float confidence;
51 |
52 | /** Optional location within the source image for the location of the recognized object. */
53 | private RectF location;
54 |
55 | public Recognition(
56 | final String id, final String title, final Float confidence, final RectF location) {
57 | this.id = id;
58 | this.title = title;
59 | this.confidence = confidence;
60 | this.location = location;
61 | }
62 |
63 | public String getId() {
64 | return id;
65 | }
66 |
67 | public String getTitle() {
68 | return title;
69 | }
70 |
71 | public Float getConfidence() {
72 | return confidence;
73 | }
74 |
75 | public RectF getLocation() {
76 | return new RectF(location);
77 | }
78 |
79 | public void setLocation(RectF location) {
80 | this.location = location;
81 | }
82 |
83 | @Override
84 | public String toString() {
85 | String resultString = "";
86 | if (id != null) {
87 | resultString += "[" + id + "] ";
88 | }
89 |
90 | if (title != null) {
91 | resultString += title + " ";
92 | }
93 |
94 | if (confidence != null) {
95 | resultString += String.format("(%.1f%%) ", confidence * 100.0f);
96 | }
97 |
98 | if (location != null) {
99 | resultString += location + " ";
100 | }
101 |
102 | return resultString.trim();
103 | }
104 | }
105 | }
106 |
--------------------------------------------------------------------------------
/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Size.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.detection.env;
17 |
18 | import android.graphics.Bitmap;
19 | import android.text.TextUtils;
20 | import java.io.Serializable;
21 | import java.util.ArrayList;
22 | import java.util.List;
23 |
24 | /** Size class independent of a Camera object. */
25 | public class Size implements Comparable, Serializable {
26 |
27 | // 1.4 went out with this UID so we'll need to maintain it to preserve pending queries when
28 | // upgrading.
29 | public static final long serialVersionUID = 7689808733290872361L;
30 |
31 | public final int width;
32 | public final int height;
33 |
34 | public Size(final int width, final int height) {
35 | this.width = width;
36 | this.height = height;
37 | }
38 |
39 | public Size(final Bitmap bmp) {
40 | this.width = bmp.getWidth();
41 | this.height = bmp.getHeight();
42 | }
43 |
44 | /**
45 | * Rotate a size by the given number of degrees.
46 | *
47 | * @param size Size to rotate.
48 | * @param rotation Degrees {0, 90, 180, 270} to rotate the size.
49 | * @return Rotated size.
50 | */
51 | public static Size getRotatedSize(final Size size, final int rotation) {
52 | if (rotation % 180 != 0) {
53 | // The phone is portrait, therefore the camera is sideways and frame should be rotated.
54 | return new Size(size.height, size.width);
55 | }
56 | return size;
57 | }
58 |
59 | public static Size parseFromString(String sizeString) {
60 | if (TextUtils.isEmpty(sizeString)) {
61 | return null;
62 | }
63 |
64 | sizeString = sizeString.trim();
65 |
66 | // The expected format is "x".
67 | final String[] components = sizeString.split("x");
68 | if (components.length == 2) {
69 | try {
70 | final int width = Integer.parseInt(components[0]);
71 | final int height = Integer.parseInt(components[1]);
72 | return new Size(width, height);
73 | } catch (final NumberFormatException e) {
74 | return null;
75 | }
76 | } else {
77 | return null;
78 | }
79 | }
80 |
81 | public static List sizeStringToList(final String sizes) {
82 | final List sizeList = new ArrayList();
83 | if (sizes != null) {
84 | final String[] pairs = sizes.split(",");
85 | for (final String pair : pairs) {
86 | final Size size = Size.parseFromString(pair);
87 | if (size != null) {
88 | sizeList.add(size);
89 | }
90 | }
91 | }
92 | return sizeList;
93 | }
94 |
95 | public static String sizeListToString(final List sizes) {
96 | String sizesString = "";
97 | if (sizes != null && sizes.size() > 0) {
98 | sizesString = sizes.get(0).toString();
99 | for (int i = 1; i < sizes.size(); i++) {
100 | sizesString += "," + sizes.get(i).toString();
101 | }
102 | }
103 | return sizesString;
104 | }
105 |
106 | public static final String dimensionsAsString(final int width, final int height) {
107 | return width + "x" + height;
108 | }
109 |
110 | public final float aspectRatio() {
111 | return (float) width / (float) height;
112 | }
113 |
114 | @Override
115 | public int compareTo(final Size other) {
116 | return width * height - other.width * other.height;
117 | }
118 |
119 | @Override
120 | public boolean equals(final Object other) {
121 | if (other == null) {
122 | return false;
123 | }
124 |
125 | if (!(other instanceof Size)) {
126 | return false;
127 | }
128 |
129 | final Size otherSize = (Size) other;
130 | return (width == otherSize.width && height == otherSize.height);
131 | }
132 |
133 | @Override
134 | public int hashCode() {
135 | return width * 32713 + height;
136 | }
137 |
138 | @Override
139 | public String toString() {
140 | return dimensionsAsString(width, height);
141 | }
142 | }
143 |
--------------------------------------------------------------------------------
/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/BorderedText.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.detection.env;
17 |
18 | import android.graphics.Canvas;
19 | import android.graphics.Color;
20 | import android.graphics.Paint;
21 | import android.graphics.Paint.Align;
22 | import android.graphics.Paint.Style;
23 | import android.graphics.Rect;
24 | import android.graphics.Typeface;
25 | import java.util.Vector;
26 |
27 | /** A class that encapsulates the tedious bits of rendering legible, bordered text onto a canvas. */
28 | public class BorderedText {
29 | private final Paint interiorPaint;
30 | private final Paint exteriorPaint;
31 |
32 | private final float textSize;
33 |
34 | /**
35 | * Creates a left-aligned bordered text object with a white interior, and a black exterior with
36 | * the specified text size.
37 | *
38 | * @param textSize text size in pixels
39 | */
40 | public BorderedText(final float textSize) {
41 | this(Color.WHITE, Color.BLACK, textSize);
42 | }
43 |
44 | /**
45 | * Create a bordered text object with the specified interior and exterior colors, text size and
46 | * alignment.
47 | *
48 | * @param interiorColor the interior text color
49 | * @param exteriorColor the exterior text color
50 | * @param textSize text size in pixels
51 | */
52 | public BorderedText(final int interiorColor, final int exteriorColor, final float textSize) {
53 | interiorPaint = new Paint();
54 | interiorPaint.setTextSize(textSize);
55 | interiorPaint.setColor(interiorColor);
56 | interiorPaint.setStyle(Style.FILL);
57 | interiorPaint.setAntiAlias(false);
58 | interiorPaint.setAlpha(255);
59 |
60 | exteriorPaint = new Paint();
61 | exteriorPaint.setTextSize(textSize);
62 | exteriorPaint.setColor(exteriorColor);
63 | exteriorPaint.setStyle(Style.FILL_AND_STROKE);
64 | exteriorPaint.setStrokeWidth(textSize / 8);
65 | exteriorPaint.setAntiAlias(false);
66 | exteriorPaint.setAlpha(255);
67 |
68 | this.textSize = textSize;
69 | }
70 |
71 | public void setTypeface(Typeface typeface) {
72 | interiorPaint.setTypeface(typeface);
73 | exteriorPaint.setTypeface(typeface);
74 | }
75 |
76 | public void drawText(final Canvas canvas, final float posX, final float posY, final String text) {
77 | canvas.drawText(text, posX, posY, exteriorPaint);
78 | canvas.drawText(text, posX, posY, interiorPaint);
79 | }
80 |
81 | public void drawText(
82 | final Canvas canvas, final float posX, final float posY, final String text, Paint bgPaint) {
83 |
84 | float width = exteriorPaint.measureText(text);
85 | float textSize = exteriorPaint.getTextSize();
86 | Paint paint = new Paint(bgPaint);
87 | paint.setStyle(Paint.Style.FILL);
88 | paint.setAlpha(160);
89 | canvas.drawRect(posX, (posY + (int) (textSize)), (posX + (int) (width)), posY, paint);
90 |
91 | canvas.drawText(text, posX, (posY + textSize), interiorPaint);
92 | }
93 |
94 | public void drawLines(Canvas canvas, final float posX, final float posY, Vector lines) {
95 | int lineNum = 0;
96 | for (final String line : lines) {
97 | drawText(canvas, posX, posY - getTextSize() * (lines.size() - lineNum - 1), line);
98 | ++lineNum;
99 | }
100 | }
101 |
102 | public void setInteriorColor(final int color) {
103 | interiorPaint.setColor(color);
104 | }
105 |
106 | public void setExteriorColor(final int color) {
107 | exteriorPaint.setColor(color);
108 | }
109 |
110 | public float getTextSize() {
111 | return textSize;
112 | }
113 |
114 | public void setAlpha(final int alpha) {
115 | interiorPaint.setAlpha(alpha);
116 | exteriorPaint.setAlpha(alpha);
117 | }
118 |
119 | public void getTextBounds(
120 | final String line, final int index, final int count, final Rect lineBounds) {
121 | interiorPaint.getTextBounds(line, index, count, lineBounds);
122 | }
123 |
124 | public void setTextAlign(final Align align) {
125 | interiorPaint.setTextAlign(align);
126 | exteriorPaint.setTextAlign(align);
127 | }
128 | }
129 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Built application files
2 | *.apk
3 | *.aar
4 | *.ap_
5 | *.aab
6 |
7 | # Files for the ART/Dalvik VM
8 | *.dex
9 |
10 | # Java class files
11 | *.class
12 |
13 | # Generated files
14 | bin/
15 | gen/
16 | out/
17 | # Uncomment the following line in case you need and you don't have the release build type files in your app
18 | # release/
19 |
20 | # Gradle files
21 | .gradle/
22 | build/
23 |
24 | # Local configuration file (sdk path, etc)
25 | local.properties
26 |
27 | # Proguard folder generated by Eclipse
28 | proguard/
29 |
30 | # Log Files
31 | *.log
32 |
33 | # Android Studio Navigation editor temp files
34 | .navigation/
35 |
36 | # Android Studio captures folder
37 | captures/
38 |
39 | # IntelliJ
40 | *.iml
41 | .idea/workspace.xml
42 | .idea/tasks.xml
43 | .idea/gradle.xml
44 | .idea/assetWizardSettings.xml
45 | .idea/dictionaries
46 | .idea/libraries
47 | # Android Studio 3 in .gitignore file.
48 | .idea/caches
49 | .idea/modules.xml
50 | # Comment next line if keeping position of elements in Navigation Editor is relevant for you
51 | .idea/navEditor.xml
52 |
53 | # Keystore files
54 | # Uncomment the following lines if you do not want to check your keystore files in.
55 | #*.jks
56 | #*.keystore
57 |
58 | # External native build folder generated in Android Studio 2.2 and later
59 | .externalNativeBuild
60 | .cxx/
61 |
62 | # Google Services (e.g. APIs or Firebase)
63 | # google-services.json
64 |
65 | # Freeline
66 | freeline.py
67 | freeline/
68 | freeline_project_description.json
69 |
70 | # fastlane
71 | fastlane/report.xml
72 | fastlane/Preview.html
73 | fastlane/screenshots
74 | fastlane/test_output
75 | fastlane/readme.md
76 |
77 | # Version control
78 | vcs.xml
79 |
80 | # lint
81 | lint/intermediates/
82 | lint/generated/
83 | lint/outputs/
84 | lint/tmp/
85 | # lint/reports/
86 |
87 | # Ignore site dir
88 | _site/
89 |
90 | # Byte-compiled / optimized / DLL files
91 | __pycache__/
92 | *.py[cod]
93 | *$py.class
94 |
95 | # C extensions
96 | *.so
97 |
98 | # Distribution / packaging
99 | .Python
100 | build/
101 | develop-eggs/
102 | dist/
103 | downloads/
104 | eggs/
105 | .eggs/
106 | lib/
107 | lib64/
108 | parts/
109 | sdist/
110 | var/
111 | wheels/
112 | pip-wheel-metadata/
113 | share/python-wheels/
114 | *.egg-info/
115 | .installed.cfg
116 | *.egg
117 | MANIFEST
118 |
119 | # PyInstaller
120 | # Usually these files are written by a python script from a template
121 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
122 | *.manifest
123 | *.spec
124 |
125 | # Installer logs
126 | pip-log.txt
127 | pip-delete-this-directory.txt
128 |
129 | # Unit test / coverage reports
130 | htmlcov/
131 | .tox/
132 | .nox/
133 | .coverage
134 | .coverage.*
135 | .cache
136 | nosetests.xml
137 | coverage.xml
138 | *.cover
139 | *.py,cover
140 | .hypothesis/
141 | .pytest_cache/
142 |
143 | # Translations
144 | *.mo
145 | *.pot
146 |
147 | # Django stuff:
148 | *.log
149 | local_settings.py
150 | db.sqlite3
151 | db.sqlite3-journal
152 |
153 | # Flask stuff:
154 | instance/
155 | .webassets-cache
156 |
157 | # Scrapy stuff:
158 | .scrapy
159 |
160 | # Sphinx documentation
161 | docs/_build/
162 |
163 | # PyBuilder
164 | target/
165 |
166 | # Jupyter Notebook
167 | .ipynb_checkpoints
168 |
169 | # IPython
170 | profile_default/
171 | ipython_config.py
172 |
173 | # pyenv
174 | .python-version
175 |
176 | # pipenv
177 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
178 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
179 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
180 | # install all needed dependencies.
181 | #Pipfile.lock
182 |
183 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
184 | __pypackages__/
185 |
186 | # Celery stuff
187 | celerybeat-schedule
188 | celerybeat.pid
189 |
190 | # SageMath parsed files
191 | *.sage.py
192 |
193 | # Environments
194 | .env.test
195 |
196 | # parcel-bundler cache (https://parceljs.org/)
197 | .cache
198 |
199 | # Next.js build output
200 | .next
201 |
202 | # Nuxt.js build / generate output
203 | .nuxt
204 | dist
205 |
206 | # Gatsby files
207 | .cache/
208 | # Comment in the public line in if your project uses Gatsby and *not* Next.js
209 | # https://nextjs.org/blog/next-9-1#public-directory-support
210 | # public
211 |
212 | # vuepress build output
213 | .vuepress/dist
214 |
215 | # Serverless directories
216 | .serverless/
217 |
218 | # FuseBox cache
219 | .fusebox/
220 |
221 | # DynamoDB Local files
222 | .dynamodb/
223 |
224 | # TernJS port file
225 | .tern-port
226 |
227 | # VSCode config
228 | .vscode/
229 | .venv
230 | venv/
231 | env.bak/
232 | venv.bak/
233 |
234 | # Spyder project settings
235 | .spyderproject
236 | .spyproject
237 |
238 | # Rope project settings
239 | .ropeproject
240 |
241 | # mkdocs documentation
242 | /site
243 |
244 | # mypy
245 | .mypy_cache/
246 | .dmypy.json
247 | dmypy.json
248 |
249 | # Pyre type checker
250 | .pyre/
251 |
252 | # vscode
253 | .vscode
254 |
255 | # exe files
256 | *.exe
257 |
258 | # MacOS folder
259 | .DS_Store
260 |
--------------------------------------------------------------------------------
/android/app/src/main/res/drawable/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
10 |
15 |
20 |
25 |
30 |
35 |
40 |
45 |
50 |
55 |
60 |
65 |
70 |
75 |
80 |
85 |
90 |
95 |
100 |
105 |
110 |
115 |
120 |
125 |
130 |
135 |
140 |
145 |
150 |
155 |
160 |
165 |
170 |
171 |
--------------------------------------------------------------------------------
/android/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | #
4 | # Copyright 2015 the original author or authors.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # https://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | #
18 |
19 | ##############################################################################
20 | ##
21 | ## Gradle start up script for UN*X
22 | ##
23 | ##############################################################################
24 |
25 | # Attempt to set APP_HOME
26 | # Resolve links: $0 may be a link
27 | PRG="$0"
28 | # Need this for relative symlinks.
29 | while [ -h "$PRG" ] ; do
30 | ls=`ls -ld "$PRG"`
31 | link=`expr "$ls" : '.*-> \(.*\)$'`
32 | if expr "$link" : '/.*' > /dev/null; then
33 | PRG="$link"
34 | else
35 | PRG=`dirname "$PRG"`"/$link"
36 | fi
37 | done
38 | SAVED="`pwd`"
39 | cd "`dirname \"$PRG\"`/" >/dev/null
40 | APP_HOME="`pwd -P`"
41 | cd "$SAVED" >/dev/null
42 |
43 | APP_NAME="Gradle"
44 | APP_BASE_NAME=`basename "$0"`
45 |
46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
48 |
49 | # Use the maximum available, or set MAX_FD != -1 to use that value.
50 | MAX_FD="maximum"
51 |
52 | warn () {
53 | echo "$*"
54 | }
55 |
56 | die () {
57 | echo
58 | echo "$*"
59 | echo
60 | exit 1
61 | }
62 |
63 | # OS specific support (must be 'true' or 'false').
64 | cygwin=false
65 | msys=false
66 | darwin=false
67 | nonstop=false
68 | case "`uname`" in
69 | CYGWIN* )
70 | cygwin=true
71 | ;;
72 | Darwin* )
73 | darwin=true
74 | ;;
75 | MINGW* )
76 | msys=true
77 | ;;
78 | NONSTOP* )
79 | nonstop=true
80 | ;;
81 | esac
82 |
83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
84 |
85 |
86 | # Determine the Java command to use to start the JVM.
87 | if [ -n "$JAVA_HOME" ] ; then
88 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
89 | # IBM's JDK on AIX uses strange locations for the executables
90 | JAVACMD="$JAVA_HOME/jre/sh/java"
91 | else
92 | JAVACMD="$JAVA_HOME/bin/java"
93 | fi
94 | if [ ! -x "$JAVACMD" ] ; then
95 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
96 |
97 | Please set the JAVA_HOME variable in your environment to match the
98 | location of your Java installation."
99 | fi
100 | else
101 | JAVACMD="java"
102 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
103 |
104 | Please set the JAVA_HOME variable in your environment to match the
105 | location of your Java installation."
106 | fi
107 |
108 | # Increase the maximum file descriptors if we can.
109 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
110 | MAX_FD_LIMIT=`ulimit -H -n`
111 | if [ $? -eq 0 ] ; then
112 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
113 | MAX_FD="$MAX_FD_LIMIT"
114 | fi
115 | ulimit -n $MAX_FD
116 | if [ $? -ne 0 ] ; then
117 | warn "Could not set maximum file descriptor limit: $MAX_FD"
118 | fi
119 | else
120 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
121 | fi
122 | fi
123 |
124 | # For Darwin, add options to specify how the application appears in the dock
125 | if $darwin; then
126 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
127 | fi
128 |
129 | # For Cygwin or MSYS, switch paths to Windows format before running java
130 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
131 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
132 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
133 |
134 | JAVACMD=`cygpath --unix "$JAVACMD"`
135 |
136 | # We build the pattern for arguments to be converted via cygpath
137 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
138 | SEP=""
139 | for dir in $ROOTDIRSRAW ; do
140 | ROOTDIRS="$ROOTDIRS$SEP$dir"
141 | SEP="|"
142 | done
143 | OURCYGPATTERN="(^($ROOTDIRS))"
144 | # Add a user-defined pattern to the cygpath arguments
145 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
146 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
147 | fi
148 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
149 | i=0
150 | for arg in "$@" ; do
151 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
152 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
153 |
154 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
155 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
156 | else
157 | eval `echo args$i`="\"$arg\""
158 | fi
159 | i=`expr $i + 1`
160 | done
161 | case $i in
162 | 0) set -- ;;
163 | 1) set -- "$args0" ;;
164 | 2) set -- "$args0" "$args1" ;;
165 | 3) set -- "$args0" "$args1" "$args2" ;;
166 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;;
167 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
168 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
169 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
170 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
171 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
172 | esac
173 | fi
174 |
175 | # Escape application args
176 | save () {
177 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
178 | echo " "
179 | }
180 | APP_ARGS=`save "$@"`
181 |
182 | # Collect all arguments for the java command, following the shell quoting and substitution rules
183 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
184 |
185 | exec "$JAVACMD" "$@"
186 |
--------------------------------------------------------------------------------
/android/lib_task_api/src/main/java/org/tensorflow/lite/examples/detection/tflite/TFLiteObjectDetectionAPIModel.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.detection.tflite;
17 |
18 | import android.content.Context;
19 | import android.graphics.Bitmap;
20 | import android.os.Trace;
21 | import java.io.IOException;
22 | import java.nio.MappedByteBuffer;
23 | import java.util.ArrayList;
24 | import java.util.List;
25 | import org.tensorflow.lite.support.common.FileUtil;
26 | import org.tensorflow.lite.support.image.TensorImage;
27 | import org.tensorflow.lite.task.vision.detector.Detection;
28 | import org.tensorflow.lite.task.vision.detector.ObjectDetector;
29 | import org.tensorflow.lite.task.vision.detector.ObjectDetector.ObjectDetectorOptions;
30 |
31 | /**
32 | * Wrapper for frozen detection models trained using the Tensorflow Object Detection API: -
33 | * https://github.com/tensorflow/models/tree/master/research/object_detection where you can find the
34 | * training code.
35 | *
36 | * To use pretrained models in the API or convert to TF Lite models, please see docs for details:
37 | * -
38 | * https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/tf1_detection_zoo.md
39 | * -
40 | * https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/tf2_detection_zoo.md
41 | * -
42 | * https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/running_on_mobile_tensorflowlite.md#running-our-model-on-android
43 | *
44 | *
For more information about Metadata and associated fields (eg: `labels.txt`), see Read the
46 | * metadata from models
47 | */
48 | public class TFLiteObjectDetectionAPIModel implements Detector {
49 | private static final String TAG = "TFLiteObjectDetectionAPIModelWithTaskApi";
50 |
51 | /** Only return this many results. */
52 | private static final int NUM_DETECTIONS = 10;
53 |
54 | private final MappedByteBuffer modelBuffer;
55 |
56 | /** An instance of the driver class to run model inference with Tensorflow Lite. */
57 | private ObjectDetector objectDetector;
58 |
59 | /** Builder of the options used to config the ObjectDetector. */
60 | private final ObjectDetectorOptions.Builder optionsBuilder;
61 |
62 | /**
63 | * Initializes a native TensorFlow session for classifying images.
64 | *
65 | *
{@code labelFilename}, {@code inputSize}, and {@code isQuantized}, are NOT required, but to
66 | * keep consistency with the implementation using the TFLite Interpreter Java API. See lib_interpreter.
68 | *
69 | * @param modelFilename The model file path relative to the assets folder
70 | * @param labelFilename The label file path relative to the assets folder
71 | * @param inputSize The size of image input
72 | * @param isQuantized Boolean representing model is quantized or not
73 | */
74 | public static Detector create(
75 | final Context context,
76 | final String modelFilename,
77 | final String labelFilename,
78 | final int inputSize,
79 | final boolean isQuantized)
80 | throws IOException {
81 | return new TFLiteObjectDetectionAPIModel(context, modelFilename);
82 | }
83 |
84 | private TFLiteObjectDetectionAPIModel(Context context, String modelFilename) throws IOException {
85 | modelBuffer = FileUtil.loadMappedFile(context, modelFilename);
86 | optionsBuilder = ObjectDetectorOptions.builder().setMaxResults(NUM_DETECTIONS);
87 | objectDetector = ObjectDetector.createFromBufferAndOptions(modelBuffer, optionsBuilder.build());
88 | }
89 |
90 | @Override
91 | public List recognizeImage(final Bitmap bitmap) {
92 | // Log this method so that it can be analyzed with systrace.
93 | Trace.beginSection("recognizeImage");
94 | List results = objectDetector.detect(TensorImage.fromBitmap(bitmap));
95 |
96 | // Converts a list of {@link Detection} objects into a list of {@link Recognition} objects
97 | // to match the interface of other inference method, such as using the TFLite
99 | // Java API..
100 | final ArrayList recognitions = new ArrayList<>();
101 | int cnt = 0;
102 | for (Detection detection : results) {
103 | recognitions.add(
104 | new Recognition(
105 | "" + cnt++,
106 | detection.getCategories().get(0).getLabel(),
107 | detection.getCategories().get(0).getScore(),
108 | detection.getBoundingBox()));
109 | }
110 | Trace.endSection(); // "recognizeImage"
111 | return recognitions;
112 | }
113 |
114 | @Override
115 | public void enableStatLogging(final boolean logStats) {}
116 |
117 | @Override
118 | public String getStatString() {
119 | return "";
120 | }
121 |
122 | @Override
123 | public void close() {
124 | if (objectDetector != null) {
125 | objectDetector.close();
126 | }
127 | }
128 |
129 | @Override
130 | public void setNumThreads(int numThreads) {
131 | if (objectDetector != null) {
132 | optionsBuilder.setNumThreads(numThreads);
133 | recreateDetector();
134 | }
135 | }
136 |
137 | @Override
138 | public void setUseNNAPI(boolean isChecked) {
139 | throw new UnsupportedOperationException(
140 | "Manipulating the hardware accelerators is not allowed in the Task"
141 | + " library currently. Only CPU is allowed.");
142 | }
143 |
144 | private void recreateDetector() {
145 | objectDetector.close();
146 | objectDetector = ObjectDetector.createFromBufferAndOptions(modelBuffer, optionsBuilder.build());
147 | }
148 | }
149 |
--------------------------------------------------------------------------------
/android/app/src/androidTest/java/org/tensorflow/lite/examples/detection/DetectorTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 The TensorFlow Authors. All Rights Reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package org.tensorflow.lite.examples.detection;
18 |
19 | import static com.google.common.truth.Truth.assertThat;
20 | import static java.lang.Math.abs;
21 | import static java.lang.Math.max;
22 | import static java.lang.Math.min;
23 |
24 | import android.content.res.AssetManager;
25 | import android.graphics.Bitmap;
26 | import android.graphics.Bitmap.Config;
27 | import android.graphics.BitmapFactory;
28 | import android.graphics.Canvas;
29 | import android.graphics.Matrix;
30 | import android.graphics.RectF;
31 | import android.util.Size;
32 | import androidx.test.ext.junit.runners.AndroidJUnit4;
33 | import androidx.test.platform.app.InstrumentationRegistry;
34 | import java.io.IOException;
35 | import java.io.InputStream;
36 | import java.util.ArrayList;
37 | import java.util.List;
38 | import java.util.Scanner;
39 | import org.junit.Before;
40 | import org.junit.Test;
41 | import org.junit.runner.RunWith;
42 | import org.tensorflow.lite.examples.detection.env.ImageUtils;
43 | import org.tensorflow.lite.examples.detection.tflite.Detector;
44 | import org.tensorflow.lite.examples.detection.tflite.Detector.Recognition;
45 | import org.tensorflow.lite.examples.detection.tflite.TFLiteObjectDetectionAPIModel;
46 |
47 | /** Golden test for Object Detection Reference app. */
48 | @RunWith(AndroidJUnit4.class)
49 | public class DetectorTest {
50 |
51 | private static final int MODEL_INPUT_SIZE = 320;
52 | private static final boolean IS_MODEL_QUANTIZED = true;
53 | private static final String MODEL_FILE = "detect.tflite";
54 | private static final String LABELS_FILE = "labelmap.txt";
55 | private static final Size IMAGE_SIZE = new Size(640, 480);
56 |
57 | private Detector detector;
58 | private Bitmap croppedBitmap;
59 | private Matrix frameToCropTransform;
60 | private Matrix cropToFrameTransform;
61 |
62 | @Before
63 | public void setUp() throws IOException {
64 | detector =
65 | TFLiteObjectDetectionAPIModel.create(
66 | InstrumentationRegistry.getInstrumentation().getContext(),
67 | MODEL_FILE,
68 | LABELS_FILE,
69 | MODEL_INPUT_SIZE,
70 | IS_MODEL_QUANTIZED);
71 | int cropSize = MODEL_INPUT_SIZE;
72 | int previewWidth = IMAGE_SIZE.getWidth();
73 | int previewHeight = IMAGE_SIZE.getHeight();
74 | int sensorOrientation = 0;
75 | croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
76 |
77 | frameToCropTransform =
78 | ImageUtils.getTransformationMatrix(
79 | previewWidth, previewHeight,
80 | cropSize, cropSize,
81 | sensorOrientation, false);
82 | cropToFrameTransform = new Matrix();
83 | frameToCropTransform.invert(cropToFrameTransform);
84 | }
85 |
86 | @Test
87 | public void detectionResultsShouldNotChange() throws Exception {
88 | Canvas canvas = new Canvas(croppedBitmap);
89 | canvas.drawBitmap(loadImage("table.jpg"), frameToCropTransform, null);
90 | final List results = detector.recognizeImage(croppedBitmap);
91 | final List expected = loadRecognitions("table_results.txt");
92 |
93 | for (Recognition target : expected) {
94 | // Find a matching result in results
95 | boolean matched = false;
96 | for (Recognition item : results) {
97 | RectF bbox = new RectF();
98 | cropToFrameTransform.mapRect(bbox, item.getLocation());
99 | if (item.getTitle().equals(target.getTitle())
100 | && matchBoundingBoxes(bbox, target.getLocation())
101 | && matchConfidence(item.getConfidence(), target.getConfidence())) {
102 | matched = true;
103 | break;
104 | }
105 | }
106 | assertThat(matched).isTrue();
107 | }
108 | }
109 |
110 | // Confidence tolerance: absolute 1%
111 | private static boolean matchConfidence(float a, float b) {
112 | return abs(a - b) < 0.01;
113 | }
114 |
115 | // Bounding Box tolerance: overlapped area > 95% of each one
116 | private static boolean matchBoundingBoxes(RectF a, RectF b) {
117 | float areaA = a.width() * a.height();
118 | float areaB = b.width() * b.height();
119 |
120 | RectF overlapped =
121 | new RectF(
122 | max(a.left, b.left), max(a.top, b.top), min(a.right, b.right), min(a.bottom, b.bottom));
123 | float overlappedArea = overlapped.width() * overlapped.height();
124 | return overlappedArea > 0.95 * areaA && overlappedArea > 0.95 * areaB;
125 | }
126 |
127 | private static Bitmap loadImage(String fileName) throws Exception {
128 | AssetManager assetManager =
129 | InstrumentationRegistry.getInstrumentation().getContext().getAssets();
130 | InputStream inputStream = assetManager.open(fileName);
131 | return BitmapFactory.decodeStream(inputStream);
132 | }
133 |
134 | // The format of result:
135 | // category bbox.left bbox.top bbox.right bbox.bottom confidence
136 | // ...
137 | // Example:
138 | // Apple 99 25 30 75 80 0.99
139 | // Banana 25 90 75 200 0.98
140 | // ...
141 | private static List loadRecognitions(String fileName) throws Exception {
142 | AssetManager assetManager =
143 | InstrumentationRegistry.getInstrumentation().getContext().getAssets();
144 | InputStream inputStream = assetManager.open(fileName);
145 | Scanner scanner = new Scanner(inputStream);
146 | List result = new ArrayList<>();
147 | while (scanner.hasNext()) {
148 | String category = scanner.next();
149 | category = category.replace('_', ' ');
150 | if (!scanner.hasNextFloat()) {
151 | break;
152 | }
153 | float left = scanner.nextFloat();
154 | float top = scanner.nextFloat();
155 | float right = scanner.nextFloat();
156 | float bottom = scanner.nextFloat();
157 | RectF boundingBox = new RectF(left, top, right, bottom);
158 | float confidence = scanner.nextFloat();
159 | Recognition recognition = new Recognition(null, category, confidence, boundingBox);
160 | result.add(recognition);
161 | }
162 | return result;
163 | }
164 | }
165 |
--------------------------------------------------------------------------------
/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Logger.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.detection.env;
17 |
18 | import android.util.Log;
19 | import java.util.HashSet;
20 | import java.util.Set;
21 |
22 | /** Wrapper for the platform log function, allows convenient message prefixing and log disabling. */
23 | public final class Logger {
24 | private static final String DEFAULT_TAG = "tensorflow";
25 | private static final int DEFAULT_MIN_LOG_LEVEL = Log.DEBUG;
26 |
27 | // Classes to be ignored when examining the stack trace
28 | private static final Set IGNORED_CLASS_NAMES;
29 |
30 | static {
31 | IGNORED_CLASS_NAMES = new HashSet(3);
32 | IGNORED_CLASS_NAMES.add("dalvik.system.VMStack");
33 | IGNORED_CLASS_NAMES.add("java.lang.Thread");
34 | IGNORED_CLASS_NAMES.add(Logger.class.getCanonicalName());
35 | }
36 |
37 | private final String tag;
38 | private final String messagePrefix;
39 | private int minLogLevel = DEFAULT_MIN_LOG_LEVEL;
40 |
41 | /**
42 | * Creates a Logger using the class name as the message prefix.
43 | *
44 | * @param clazz the simple name of this class is used as the message prefix.
45 | */
46 | public Logger(final Class> clazz) {
47 | this(clazz.getSimpleName());
48 | }
49 |
50 | /**
51 | * Creates a Logger using the specified message prefix.
52 | *
53 | * @param messagePrefix is prepended to the text of every message.
54 | */
55 | public Logger(final String messagePrefix) {
56 | this(DEFAULT_TAG, messagePrefix);
57 | }
58 |
59 | /**
60 | * Creates a Logger with a custom tag and a custom message prefix. If the message prefix is set to
61 | *
62 | * null
63 | *
64 | * , the caller's class name is used as the prefix.
65 | *
66 | * @param tag identifies the source of a log message.
67 | * @param messagePrefix prepended to every message if non-null. If null, the name of the caller is
68 | * being used
69 | */
70 | public Logger(final String tag, final String messagePrefix) {
71 | this.tag = tag;
72 | final String prefix = messagePrefix == null ? getCallerSimpleName() : messagePrefix;
73 | this.messagePrefix = (prefix.length() > 0) ? prefix + ": " : prefix;
74 | }
75 |
76 | /** Creates a Logger using the caller's class name as the message prefix. */
77 | public Logger() {
78 | this(DEFAULT_TAG, null);
79 | }
80 |
81 | /** Creates a Logger using the caller's class name as the message prefix. */
82 | public Logger(final int minLogLevel) {
83 | this(DEFAULT_TAG, null);
84 | this.minLogLevel = minLogLevel;
85 | }
86 |
87 | /**
88 | * Return caller's simple name.
89 | *
90 | * Android getStackTrace() returns an array that looks like this: stackTrace[0]:
91 | * dalvik.system.VMStack stackTrace[1]: java.lang.Thread stackTrace[2]:
92 | * com.google.android.apps.unveil.env.UnveilLogger stackTrace[3]:
93 | * com.google.android.apps.unveil.BaseApplication
94 | *
95 | *
This function returns the simple version of the first non-filtered name.
96 | *
97 | * @return caller's simple name
98 | */
99 | private static String getCallerSimpleName() {
100 | // Get the current callstack so we can pull the class of the caller off of it.
101 | final StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();
102 |
103 | for (final StackTraceElement elem : stackTrace) {
104 | final String className = elem.getClassName();
105 | if (!IGNORED_CLASS_NAMES.contains(className)) {
106 | // We're only interested in the simple name of the class, not the complete package.
107 | final String[] classParts = className.split("\\.");
108 | return classParts[classParts.length - 1];
109 | }
110 | }
111 |
112 | return Logger.class.getSimpleName();
113 | }
114 |
115 | public void setMinLogLevel(final int minLogLevel) {
116 | this.minLogLevel = minLogLevel;
117 | }
118 |
119 | public boolean isLoggable(final int logLevel) {
120 | return logLevel >= minLogLevel || Log.isLoggable(tag, logLevel);
121 | }
122 |
123 | private String toMessage(final String format, final Object... args) {
124 | return messagePrefix + (args.length > 0 ? String.format(format, args) : format);
125 | }
126 |
127 | public void v(final String format, final Object... args) {
128 | if (isLoggable(Log.VERBOSE)) {
129 | Log.v(tag, toMessage(format, args));
130 | }
131 | }
132 |
133 | public void v(final Throwable t, final String format, final Object... args) {
134 | if (isLoggable(Log.VERBOSE)) {
135 | Log.v(tag, toMessage(format, args), t);
136 | }
137 | }
138 |
139 | public void d(final String format, final Object... args) {
140 | if (isLoggable(Log.DEBUG)) {
141 | Log.d(tag, toMessage(format, args));
142 | }
143 | }
144 |
145 | public void d(final Throwable t, final String format, final Object... args) {
146 | if (isLoggable(Log.DEBUG)) {
147 | Log.d(tag, toMessage(format, args), t);
148 | }
149 | }
150 |
151 | public void i(final String format, final Object... args) {
152 | if (isLoggable(Log.INFO)) {
153 | Log.i(tag, toMessage(format, args));
154 | }
155 | }
156 |
157 | public void i(final Throwable t, final String format, final Object... args) {
158 | if (isLoggable(Log.INFO)) {
159 | Log.i(tag, toMessage(format, args), t);
160 | }
161 | }
162 |
163 | public void w(final String format, final Object... args) {
164 | if (isLoggable(Log.WARN)) {
165 | Log.w(tag, toMessage(format, args));
166 | }
167 | }
168 |
169 | public void w(final Throwable t, final String format, final Object... args) {
170 | if (isLoggable(Log.WARN)) {
171 | Log.w(tag, toMessage(format, args), t);
172 | }
173 | }
174 |
175 | public void e(final String format, final Object... args) {
176 | if (isLoggable(Log.ERROR)) {
177 | Log.e(tag, toMessage(format, args));
178 | }
179 | }
180 |
181 | public void e(final Throwable t, final String format, final Object... args) {
182 | if (isLoggable(Log.ERROR)) {
183 | Log.e(tag, toMessage(format, args), t);
184 | }
185 | }
186 | }
187 |
--------------------------------------------------------------------------------
/android/README.md:
--------------------------------------------------------------------------------
1 | # TensorFlow Lite Object Detection Android Demo
2 |
3 | ### Overview
4 |
5 | This is a camera app that continuously detects the objects (bounding boxes and
6 | classes) in the frames seen by your device's back camera, using a quantized
7 | [MobileNet SSD](https://github.com/tensorflow/models/tree/master/research/object_detection)
8 | model trained on the [COCO dataset](http://cocodataset.org/). These instructions
9 | walk you through building and running the demo on an Android device.
10 |
11 | The model files are downloaded via Gradle scripts when you build and run. You
12 | don't need to do any steps to download TFLite models into the project
13 | explicitly.
14 |
15 | Application can run either on device or emulator.
16 |
17 |
18 |
19 | ## Build the demo using Android Studio
20 |
21 | ### Prerequisites
22 |
23 | * If you don't have already, install
24 | **[Android Studio](https://developer.android.com/studio/index.html)**,
25 | following the instructions on the website.
26 |
27 | * You need an Android device and Android development environment with minimum
28 | API 21.
29 |
30 | * Android Studio 3.2 or later.
31 |
32 | ### Building
33 |
34 | * Open Android Studio, and from the Welcome screen, select Open an existing
35 | Android Studio project.
36 |
37 | * From the Open File or Project window that appears, navigate to and select
38 | the tensorflow-lite/examples/object_detection/android directory from
39 | wherever you cloned the TensorFlow Lite sample GitHub repo. Click OK.
40 |
41 | * If it asks you to do a Gradle Sync, click OK.
42 |
43 | * You may also need to install various platforms and tools, if you get errors
44 | like "Failed to find target with hash string 'android-21'" and similar.
45 | Click the `Run` button (the green arrow) or select `Run > Run 'android'`
46 | from the top menu. You may need to rebuild the project using `Build >
47 | Rebuild` Project.
48 |
49 | * If it asks you to use Instant Run, click Proceed Without Instant Run.
50 |
51 | * Also, you need to have an Android device plugged in with developer options
52 | enabled at this point. See
53 | **[here](https://developer.android.com/studio/run/device)** for more details
54 | on setting up developer devices.
55 |
56 | #### Switch between inference solutions (Task library vs TFLite Interpreter)
57 |
58 | This object detection Android reference app demonstrates two implementation
59 | solutions:
60 |
61 | (1)
62 | [`lib_task_api`](https://github.com/tensorflow/examples/tree/master/lite/examples/nl_classification/android/lib_task_api)
63 | that leverages the out-of-box API from the
64 | [TensorFlow Lite Task Library](https://www.tensorflow.org/lite/inference_with_metadata/task_library/object_detector);
65 |
66 | (2)
67 | [`lib_interpreter`](https://github.com/tensorflow/examples/tree/master/lite/examples/text_classification/android/lib_interpreter)
68 | that creates the custom inference pipleline using the
69 | [TensorFlow Lite Interpreter Java API](https://www.tensorflow.org/lite/guide/inference#load_and_run_a_model_in_java).
70 |
71 | The [`build.gradle`](app/build.gradle) inside `app` folder shows how to change
72 | `flavorDimensions "tfliteInference"` to switch between the two solutions.
73 |
74 | Inside **Android Studio**, you can change the build variant to whichever one you
75 | want to build and run—just go to `Build > Select Build Variant` and select one
76 | from the drop-down menu. See
77 | [configure product flavors in Android Studio](https://developer.android.com/studio/build/build-variants#product-flavors)
78 | for more details.
79 |
80 | For gradle CLI, running `./gradlew build` can create APKs for both solutions
81 | under `app/build/outputs/apk`.
82 |
83 | *Note: If you simply want the out-of-box API to run the app, we recommend
84 | `lib_task_api` for inference. If you want to customize your own models and
85 | control the detail of inputs and outputs, it might be easier to adapt your model
86 | inputs and outputs by using `lib_interpreter`.*
87 |
88 | ### Model used
89 |
90 | Downloading, extraction and placing it in assets folder has been managed
91 | automatically by download.gradle.
92 |
93 | If you explicitly want to download the model, you can download from
94 | **[here](http://storage.googleapis.com/download.tensorflow.org/models/tflite/coco_ssd_mobilenet_v1_1.0_quant_2018_06_29.zip)**.
95 | Extract the zip to get the .tflite and label file.
96 |
97 | ### Custom model used
98 |
99 | This example shows you how to perform TensorFlow Lite object detection using a
100 | custom model. * Clone the TensorFlow models GitHub repository to your computer.
101 | `git clone https://github.com/tensorflow/models/` * Build and install this
102 | repository. `cd models/research python3 setup.py build && python3 setup.py
103 | install` * Download the MobileNet SSD trained on
104 | **[Open Images v4](https://storage.googleapis.com/openimages/web/factsfigures_v4.html)**
105 | **[here](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/tf1_detection_zoo.md)**.
106 | Extract the pretrained TensorFlow model files. * Go to `models/research`
107 | directory and execute this code to get the frozen TensorFlow Lite graph.
108 | `python3 object_detection/export_tflite_ssd_graph.py \ --pipeline_config_path
109 | object_detection/samples/configs/ssd_mobilenet_v2_oid_v4.config \
110 | --trained_checkpoint_prefix /model.ckpt \ --output_directory
112 | exported_model` * Convert the frozen graph to the TFLite model. `tflite_convert
113 | \ --input_shape=1,300,300,3 \ --input_arrays=normalized_input_image_tensor \
114 | --output_arrays=TFLite_Detection_PostProcess,TFLite_Detection_PostProcess:1,TFLite_Detection_PostProcess:2,TFLite_Detection_PostProcess:3
115 | \ --allow_custom_ops \ --graph_def_file=exported_model/tflite_graph.pb \
116 | --output_file=/lite/examples/object_detection/android/app/src/main/assets/detect.tflite`
118 | `input_shape=1,300,300,3` because the pretrained model works only with that
119 | input shape.
120 |
121 | `allow_custom_ops` is necessary to allow TFLite_Detection_PostProcess operation.
122 |
123 | `input_arrays` and `output_arrays` can be drawn from the visualized graph of the
124 | example detection model. `bazel run //tensorflow/lite/tools:visualize \
125 | "/lite/examples/object_detection/android/app/src/main/assets/detect.tflite"
127 | \ detect.html`
128 |
129 | * Get `labelmap.txt` from the second column of
130 | **[class-descriptions-boxable](https://storage.googleapis.com/openimages/2018_04/class-descriptions-boxable.csv)**.
131 | * In `DetectorActivity.java` set `TF_OD_API_IS_QUANTIZED` to `false`.
132 |
133 | ### Additional Note
134 |
135 | _Please do not delete the assets folder content_. If you explicitly deleted the
136 | files, then please choose *Build*->*Rebuild* from menu to re-download the
137 | deleted model files into assets folder.
138 |
--------------------------------------------------------------------------------
/android/app/src/main/res/layout/tfe_od_layout_bottom_sheet.xml:
--------------------------------------------------------------------------------
1 |
2 |
14 |
15 |
22 |
23 |
29 |
30 |
31 |
32 |
36 |
37 |
44 |
45 |
53 |
54 |
55 |
59 |
60 |
67 |
68 |
76 |
77 |
78 |
79 |
80 |
84 |
85 |
92 |
93 |
101 |
102 |
103 |
108 |
109 |
114 |
115 |
121 |
122 |
130 |
131 |
136 |
137 |
146 |
147 |
152 |
153 |
154 |
155 |
160 |
161 |
162 |
167 |
168 |
175 |
176 |
185 |
186 |
187 |
--------------------------------------------------------------------------------
/android/app/src/main/java/org/tensorflow/lite/examples/detection/LegacyCameraConnectionFragment.java:
--------------------------------------------------------------------------------
1 | package org.tensorflow.lite.examples.detection;
2 |
3 | /*
4 | * Copyright 2019 The TensorFlow Authors. All Rights Reserved.
5 | *
6 | * Licensed under the Apache License, Version 2.0 (the "License");
7 | * you may not use this file except in compliance with the License.
8 | * You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 |
19 | import android.app.Fragment;
20 | import android.graphics.SurfaceTexture;
21 | import android.hardware.Camera;
22 | import android.hardware.Camera.CameraInfo;
23 | import android.os.Bundle;
24 | import android.os.Handler;
25 | import android.os.HandlerThread;
26 | import android.util.Size;
27 | import android.util.SparseIntArray;
28 | import android.view.LayoutInflater;
29 | import android.view.Surface;
30 | import android.view.TextureView;
31 | import android.view.View;
32 | import android.view.ViewGroup;
33 | import java.io.IOException;
34 | import java.util.List;
35 | import org.tensorflow.lite.examples.detection.customview.AutoFitTextureView;
36 | import org.tensorflow.lite.examples.detection.env.ImageUtils;
37 | import org.tensorflow.lite.examples.detection.env.Logger;
38 |
39 | public class LegacyCameraConnectionFragment extends Fragment {
40 | private static final Logger LOGGER = new Logger();
41 | /** Conversion from screen rotation to JPEG orientation. */
42 | private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
43 |
44 | static {
45 | ORIENTATIONS.append(Surface.ROTATION_0, 90);
46 | ORIENTATIONS.append(Surface.ROTATION_90, 0);
47 | ORIENTATIONS.append(Surface.ROTATION_180, 270);
48 | ORIENTATIONS.append(Surface.ROTATION_270, 180);
49 | }
50 |
51 | private Camera camera;
52 | private Camera.PreviewCallback imageListener;
53 | private Size desiredSize;
54 | /** The layout identifier to inflate for this Fragment. */
55 | private int layout;
56 | /** An {@link AutoFitTextureView} for camera preview. */
57 | private AutoFitTextureView textureView;
58 | private SurfaceTexture availableSurfaceTexture = null;
59 |
60 | /**
61 | * {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link
62 | * TextureView}.
63 | */
64 | private final TextureView.SurfaceTextureListener surfaceTextureListener =
65 | new TextureView.SurfaceTextureListener() {
66 | @Override
67 | public void onSurfaceTextureAvailable(
68 | final SurfaceTexture texture, final int width, final int height) {
69 | availableSurfaceTexture = texture;
70 | startCamera();
71 | }
72 |
73 | @Override
74 | public void onSurfaceTextureSizeChanged(
75 | final SurfaceTexture texture, final int width, final int height) {}
76 |
77 | @Override
78 | public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
79 | return true;
80 | }
81 |
82 | @Override
83 | public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
84 | };
85 | /** An additional thread for running tasks that shouldn't block the UI. */
86 | private HandlerThread backgroundThread;
87 |
88 | public LegacyCameraConnectionFragment(
89 | final Camera.PreviewCallback imageListener, final int layout, final Size desiredSize) {
90 | this.imageListener = imageListener;
91 | this.layout = layout;
92 | this.desiredSize = desiredSize;
93 | }
94 |
95 | @Override
96 | public View onCreateView(
97 | final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
98 | return inflater.inflate(layout, container, false);
99 | }
100 |
101 | @Override
102 | public void onViewCreated(final View view, final Bundle savedInstanceState) {
103 | textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
104 | }
105 |
106 | @Override
107 | public void onActivityCreated(final Bundle savedInstanceState) {
108 | super.onActivityCreated(savedInstanceState);
109 | }
110 |
111 | @Override
112 | public void onResume() {
113 | super.onResume();
114 | startBackgroundThread();
115 | // When the screen is turned off and turned back on, the SurfaceTexture is already
116 | // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
117 | // a camera and start preview from here (otherwise, we wait until the surface is ready in
118 | // the SurfaceTextureListener).
119 |
120 | if (textureView.isAvailable()) {
121 | startCamera();
122 | } else {
123 | textureView.setSurfaceTextureListener(surfaceTextureListener);
124 | }
125 | }
126 |
127 | @Override
128 | public void onPause() {
129 | stopCamera();
130 | stopBackgroundThread();
131 | super.onPause();
132 | }
133 |
134 | /** Starts a background thread and its {@link Handler}. */
135 | private void startBackgroundThread() {
136 | backgroundThread = new HandlerThread("CameraBackground");
137 | backgroundThread.start();
138 | }
139 |
140 | /** Stops the background thread and its {@link Handler}. */
141 | private void stopBackgroundThread() {
142 | backgroundThread.quitSafely();
143 | try {
144 | backgroundThread.join();
145 | backgroundThread = null;
146 | } catch (final InterruptedException e) {
147 | LOGGER.e(e, "Exception!");
148 | }
149 | }
150 |
151 | private void startCamera() {
152 | int index = getCameraId();
153 | camera = Camera.open(index);
154 |
155 | try {
156 | Camera.Parameters parameters = camera.getParameters();
157 | List focusModes = parameters.getSupportedFocusModes();
158 | if (focusModes != null
159 | && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
160 | parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
161 | }
162 | List cameraSizes = parameters.getSupportedPreviewSizes();
163 | Size[] sizes = new Size[cameraSizes.size()];
164 | int i = 0;
165 | for (Camera.Size size : cameraSizes) {
166 | sizes[i++] = new Size(size.width, size.height);
167 | }
168 | Size previewSize =
169 | CameraConnectionFragment.chooseOptimalSize(
170 | sizes, desiredSize.getWidth(), desiredSize.getHeight());
171 | parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
172 | camera.setDisplayOrientation(90);
173 | camera.setParameters(parameters);
174 | camera.setPreviewTexture(availableSurfaceTexture);
175 | } catch (IOException exception) {
176 | camera.release();
177 | }
178 |
179 | camera.setPreviewCallbackWithBuffer(imageListener);
180 | Camera.Size s = camera.getParameters().getPreviewSize();
181 | camera.addCallbackBuffer(new byte[ImageUtils.getYUVByteSize(s.height, s.width)]);
182 |
183 | textureView.setAspectRatio(s.height, s.width);
184 |
185 | camera.startPreview();
186 | }
187 |
188 | protected void stopCamera() {
189 | if (camera != null) {
190 | camera.stopPreview();
191 | camera.setPreviewCallback(null);
192 | camera.release();
193 | camera = null;
194 | }
195 | }
196 |
197 | private int getCameraId() {
198 | CameraInfo ci = new CameraInfo();
199 | for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
200 | Camera.getCameraInfo(i, ci);
201 | if (ci.facing == CameraInfo.CAMERA_FACING_BACK) return i;
202 | }
203 | return -1; // No camera found
204 | }
205 | }
206 |
--------------------------------------------------------------------------------
/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/ImageUtils.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.detection.env;
17 |
18 | import android.graphics.Bitmap;
19 | import android.graphics.Matrix;
20 | import android.os.Environment;
21 | import java.io.File;
22 | import java.io.FileOutputStream;
23 |
24 | /** Utility class for manipulating images. */
25 | public class ImageUtils {
26 | // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges
27 | // are normalized to eight bits.
28 | static final int kMaxChannelValue = 262143;
29 |
30 | @SuppressWarnings("unused")
31 | private static final Logger LOGGER = new Logger();
32 |
33 | /**
34 | * Utility method to compute the allocated size in bytes of a YUV420SP image of the given
35 | * dimensions.
36 | */
37 | public static int getYUVByteSize(final int width, final int height) {
38 | // The luminance plane requires 1 byte per pixel.
39 | final int ySize = width * height;
40 |
41 | // The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up.
42 | // Each 2x2 block takes 2 bytes to encode, one each for U and V.
43 | final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2;
44 |
45 | return ySize + uvSize;
46 | }
47 |
48 | /**
49 | * Saves a Bitmap object to disk for analysis.
50 | *
51 | * @param bitmap The bitmap to save.
52 | */
53 | public static void saveBitmap(final Bitmap bitmap) {
54 | saveBitmap(bitmap, "preview.png");
55 | }
56 |
57 | /**
58 | * Saves a Bitmap object to disk for analysis.
59 | *
60 | * @param bitmap The bitmap to save.
61 | * @param filename The location to save the bitmap to.
62 | */
63 | public static void saveBitmap(final Bitmap bitmap, final String filename) {
64 | final String root =
65 | Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow";
66 | LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root);
67 | final File myDir = new File(root);
68 |
69 | if (!myDir.mkdirs()) {
70 | LOGGER.i("Make dir failed");
71 | }
72 |
73 | final String fname = filename;
74 | final File file = new File(myDir, fname);
75 | if (file.exists()) {
76 | file.delete();
77 | }
78 | try {
79 | final FileOutputStream out = new FileOutputStream(file);
80 | bitmap.compress(Bitmap.CompressFormat.PNG, 99, out);
81 | out.flush();
82 | out.close();
83 | } catch (final Exception e) {
84 | LOGGER.e(e, "Exception!");
85 | }
86 | }
87 |
88 | public static void convertYUV420SPToARGB8888(byte[] input, int width, int height, int[] output) {
89 | final int frameSize = width * height;
90 | for (int j = 0, yp = 0; j < height; j++) {
91 | int uvp = frameSize + (j >> 1) * width;
92 | int u = 0;
93 | int v = 0;
94 |
95 | for (int i = 0; i < width; i++, yp++) {
96 | int y = 0xff & input[yp];
97 | if ((i & 1) == 0) {
98 | v = 0xff & input[uvp++];
99 | u = 0xff & input[uvp++];
100 | }
101 |
102 | output[yp] = YUV2RGB(y, u, v);
103 | }
104 | }
105 | }
106 |
107 | private static int YUV2RGB(int y, int u, int v) {
108 | // Adjust and check YUV values
109 | y = (y - 16) < 0 ? 0 : (y - 16);
110 | u -= 128;
111 | v -= 128;
112 |
113 | // This is the floating point equivalent. We do the conversion in integer
114 | // because some Android devices do not have floating point in hardware.
115 | // nR = (int)(1.164 * nY + 2.018 * nU);
116 | // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
117 | // nB = (int)(1.164 * nY + 1.596 * nV);
118 | int y1192 = 1192 * y;
119 | int r = (y1192 + 1634 * v);
120 | int g = (y1192 - 833 * v - 400 * u);
121 | int b = (y1192 + 2066 * u);
122 |
123 | // Clipping RGB values to be inside boundaries [ 0 , kMaxChannelValue ]
124 | r = r > kMaxChannelValue ? kMaxChannelValue : (r < 0 ? 0 : r);
125 | g = g > kMaxChannelValue ? kMaxChannelValue : (g < 0 ? 0 : g);
126 | b = b > kMaxChannelValue ? kMaxChannelValue : (b < 0 ? 0 : b);
127 |
128 | return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
129 | }
130 |
131 | public static void convertYUV420ToARGB8888(
132 | byte[] yData,
133 | byte[] uData,
134 | byte[] vData,
135 | int width,
136 | int height,
137 | int yRowStride,
138 | int uvRowStride,
139 | int uvPixelStride,
140 | int[] out) {
141 | int yp = 0;
142 | for (int j = 0; j < height; j++) {
143 | int pY = yRowStride * j;
144 | int pUV = uvRowStride * (j >> 1);
145 |
146 | for (int i = 0; i < width; i++) {
147 | int uv_offset = pUV + (i >> 1) * uvPixelStride;
148 |
149 | out[yp++] = YUV2RGB(0xff & yData[pY + i], 0xff & uData[uv_offset], 0xff & vData[uv_offset]);
150 | }
151 | }
152 | }
153 |
154 | /**
155 | * Returns a transformation matrix from one reference frame into another. Handles cropping (if
156 | * maintaining aspect ratio is desired) and rotation.
157 | *
158 | * @param srcWidth Width of source frame.
159 | * @param srcHeight Height of source frame.
160 | * @param dstWidth Width of destination frame.
161 | * @param dstHeight Height of destination frame.
162 | * @param applyRotation Amount of rotation to apply from one frame to another. Must be a multiple
163 | * of 90.
164 | * @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant,
165 | * cropping the image if necessary.
166 | * @return The transformation fulfilling the desired requirements.
167 | */
168 | public static Matrix getTransformationMatrix(
169 | final int srcWidth,
170 | final int srcHeight,
171 | final int dstWidth,
172 | final int dstHeight,
173 | final int applyRotation,
174 | final boolean maintainAspectRatio) {
175 | final Matrix matrix = new Matrix();
176 |
177 | if (applyRotation != 0) {
178 | if (applyRotation % 90 != 0) {
179 | LOGGER.w("Rotation of %d % 90 != 0", applyRotation);
180 | }
181 |
182 | // Translate so center of image is at origin.
183 | matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
184 |
185 | // Rotate around origin.
186 | matrix.postRotate(applyRotation);
187 | }
188 |
189 | // Account for the already applied rotation, if any, and then determine how
190 | // much scaling is needed for each axis.
191 | final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
192 |
193 | final int inWidth = transpose ? srcHeight : srcWidth;
194 | final int inHeight = transpose ? srcWidth : srcHeight;
195 |
196 | // Apply scaling if necessary.
197 | if (inWidth != dstWidth || inHeight != dstHeight) {
198 | final float scaleFactorX = dstWidth / (float) inWidth;
199 | final float scaleFactorY = dstHeight / (float) inHeight;
200 |
201 | if (maintainAspectRatio) {
202 | // Scale by minimum factor so that dst is filled completely while
203 | // maintaining the aspect ratio. Some image may fall off the edge.
204 | final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
205 | matrix.postScale(scaleFactor, scaleFactor);
206 | } else {
207 | // Scale exactly to fill dst from src.
208 | matrix.postScale(scaleFactorX, scaleFactorY);
209 | }
210 | }
211 |
212 | if (applyRotation != 0) {
213 | // Translate back from origin centered reference to destination frame.
214 | matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
215 | }
216 |
217 | return matrix;
218 | }
219 | }
220 |
--------------------------------------------------------------------------------
/android/app/src/main/java/org/tensorflow/lite/examples/detection/tracking/MultiBoxTracker.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.detection.tracking;
17 |
18 | import android.content.Context;
19 | import android.graphics.Canvas;
20 | import android.graphics.Color;
21 | import android.graphics.Matrix;
22 | import android.graphics.Paint;
23 | import android.graphics.Paint.Cap;
24 | import android.graphics.Paint.Join;
25 | import android.graphics.Paint.Style;
26 | import android.graphics.RectF;
27 | import android.text.TextUtils;
28 | import android.util.Pair;
29 | import android.util.TypedValue;
30 | import java.util.LinkedList;
31 | import java.util.List;
32 | import java.util.Queue;
33 | import org.tensorflow.lite.examples.detection.env.BorderedText;
34 | import org.tensorflow.lite.examples.detection.env.ImageUtils;
35 | import org.tensorflow.lite.examples.detection.env.Logger;
36 | import org.tensorflow.lite.examples.detection.tflite.Detector.Recognition;
37 |
38 | /** A tracker that handles non-max suppression and matches existing objects to new detections. */
39 | public class MultiBoxTracker {
40 | private static final float TEXT_SIZE_DIP = 18;
41 | private static final float MIN_SIZE = 16.0f;
42 | private static final int[] COLORS = {
43 | Color.BLUE,
44 | Color.RED,
45 | Color.GREEN,
46 | Color.YELLOW,
47 | Color.CYAN,
48 | Color.MAGENTA,
49 | Color.WHITE,
50 | Color.parseColor("#55FF55"),
51 | Color.parseColor("#FFA500"),
52 | Color.parseColor("#FF8888"),
53 | Color.parseColor("#AAAAFF"),
54 | Color.parseColor("#FFFFAA"),
55 | Color.parseColor("#55AAAA"),
56 | Color.parseColor("#AA33AA"),
57 | Color.parseColor("#0D0068")
58 | };
59 | final List> screenRects = new LinkedList>();
60 | private final Logger logger = new Logger();
61 | private final Queue availableColors = new LinkedList();
62 | private final List trackedObjects = new LinkedList();
63 | private final Paint boxPaint = new Paint();
64 | private final float textSizePx;
65 | private final BorderedText borderedText;
66 | private Matrix frameToCanvasMatrix;
67 | private int frameWidth;
68 | private int frameHeight;
69 | private int sensorOrientation;
70 |
71 | public MultiBoxTracker(final Context context) {
72 | for (final int color : COLORS) {
73 | availableColors.add(color);
74 | }
75 |
76 | boxPaint.setColor(Color.RED);
77 | boxPaint.setStyle(Style.STROKE);
78 | boxPaint.setStrokeWidth(10.0f);
79 | boxPaint.setStrokeCap(Cap.ROUND);
80 | boxPaint.setStrokeJoin(Join.ROUND);
81 | boxPaint.setStrokeMiter(100);
82 |
83 | textSizePx =
84 | TypedValue.applyDimension(
85 | TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, context.getResources().getDisplayMetrics());
86 | borderedText = new BorderedText(textSizePx);
87 | }
88 |
89 | public synchronized void setFrameConfiguration(
90 | final int width, final int height, final int sensorOrientation) {
91 | frameWidth = width;
92 | frameHeight = height;
93 | this.sensorOrientation = sensorOrientation;
94 | }
95 |
96 | public synchronized void drawDebug(final Canvas canvas) {
97 | final Paint textPaint = new Paint();
98 | textPaint.setColor(Color.WHITE);
99 | textPaint.setTextSize(60.0f);
100 |
101 | final Paint boxPaint = new Paint();
102 | boxPaint.setColor(Color.RED);
103 | boxPaint.setAlpha(200);
104 | boxPaint.setStyle(Style.STROKE);
105 |
106 | for (final Pair detection : screenRects) {
107 | final RectF rect = detection.second;
108 | canvas.drawRect(rect, boxPaint);
109 | canvas.drawText("" + detection.first, rect.left, rect.top, textPaint);
110 | borderedText.drawText(canvas, rect.centerX(), rect.centerY(), "" + detection.first);
111 | }
112 | }
113 |
114 | public synchronized void trackResults(final List results, final long timestamp) {
115 | logger.i("Processing %d results from %d", results.size(), timestamp);
116 | processResults(results);
117 | }
118 |
119 | private Matrix getFrameToCanvasMatrix() {
120 | return frameToCanvasMatrix;
121 | }
122 |
123 | public synchronized void draw(final Canvas canvas) {
124 | final boolean rotated = sensorOrientation % 180 == 90;
125 | final float multiplier =
126 | Math.min(
127 | canvas.getHeight() / (float) (rotated ? frameWidth : frameHeight),
128 | canvas.getWidth() / (float) (rotated ? frameHeight : frameWidth));
129 | frameToCanvasMatrix =
130 | ImageUtils.getTransformationMatrix(
131 | frameWidth,
132 | frameHeight,
133 | (int) (multiplier * (rotated ? frameHeight : frameWidth)),
134 | (int) (multiplier * (rotated ? frameWidth : frameHeight)),
135 | sensorOrientation,
136 | false);
137 | for (final TrackedRecognition recognition : trackedObjects) {
138 | final RectF trackedPos = new RectF(recognition.location);
139 |
140 | getFrameToCanvasMatrix().mapRect(trackedPos);
141 | boxPaint.setColor(recognition.color);
142 |
143 | float cornerSize = Math.min(trackedPos.width(), trackedPos.height()) / 8.0f;
144 | canvas.drawRoundRect(trackedPos, cornerSize, cornerSize, boxPaint);
145 |
146 | final String labelString =
147 | !TextUtils.isEmpty(recognition.title)
148 | ? String.format("%s %.2f", recognition.title, (100 * recognition.detectionConfidence))
149 | : String.format("%.2f", (100 * recognition.detectionConfidence));
150 | // borderedText.drawText(canvas, trackedPos.left + cornerSize, trackedPos.top,
151 | // labelString);
152 | borderedText.drawText(
153 | canvas, trackedPos.left + cornerSize, trackedPos.top, labelString + "%", boxPaint);
154 | }
155 | }
156 |
157 | private void processResults(final List results) {
158 | final List> rectsToTrack = new LinkedList>();
159 |
160 | screenRects.clear();
161 | final Matrix rgbFrameToScreen = new Matrix(getFrameToCanvasMatrix());
162 |
163 | for (final Recognition result : results) {
164 | if (result.getLocation() == null) {
165 | continue;
166 | }
167 | final RectF detectionFrameRect = new RectF(result.getLocation());
168 |
169 | final RectF detectionScreenRect = new RectF();
170 | rgbFrameToScreen.mapRect(detectionScreenRect, detectionFrameRect);
171 |
172 | logger.v(
173 | "Result! Frame: " + result.getLocation() + " mapped to screen:" + detectionScreenRect);
174 |
175 | screenRects.add(new Pair(result.getConfidence(), detectionScreenRect));
176 |
177 | if (detectionFrameRect.width() < MIN_SIZE || detectionFrameRect.height() < MIN_SIZE) {
178 | logger.w("Degenerate rectangle! " + detectionFrameRect);
179 | continue;
180 | }
181 |
182 | rectsToTrack.add(new Pair(result.getConfidence(), result));
183 | }
184 |
185 | trackedObjects.clear();
186 | if (rectsToTrack.isEmpty()) {
187 | logger.v("Nothing to track, aborting.");
188 | return;
189 | }
190 |
191 | for (final Pair potential : rectsToTrack) {
192 | final TrackedRecognition trackedRecognition = new TrackedRecognition();
193 | trackedRecognition.detectionConfidence = potential.first;
194 | trackedRecognition.location = new RectF(potential.second.getLocation());
195 | trackedRecognition.title = potential.second.getTitle();
196 | trackedRecognition.color = COLORS[trackedObjects.size()];
197 | trackedObjects.add(trackedRecognition);
198 |
199 | if (trackedObjects.size() >= COLORS.length) {
200 | break;
201 | }
202 | }
203 | }
204 |
205 | private static class TrackedRecognition {
206 | RectF location;
207 | float detectionConfidence;
208 | int color;
209 | String title;
210 | }
211 | }
212 |
--------------------------------------------------------------------------------
/android/app/src/main/java/org/tensorflow/lite/examples/detection/DetectorActivity.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 The TensorFlow Authors. All Rights Reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package org.tensorflow.lite.examples.detection;
18 |
19 | import android.graphics.Bitmap;
20 | import android.graphics.Bitmap.Config;
21 | import android.graphics.Canvas;
22 | import android.graphics.Color;
23 | import android.graphics.Matrix;
24 | import android.graphics.Paint;
25 | import android.graphics.Paint.Style;
26 | import android.graphics.RectF;
27 | import android.graphics.Typeface;
28 | import android.media.ImageReader.OnImageAvailableListener;
29 | import android.os.SystemClock;
30 | import android.util.Size;
31 | import android.util.TypedValue;
32 | import android.widget.Toast;
33 | import java.io.IOException;
34 | import java.util.ArrayList;
35 | import java.util.List;
36 | import org.tensorflow.lite.examples.detection.customview.OverlayView;
37 | import org.tensorflow.lite.examples.detection.customview.OverlayView.DrawCallback;
38 | import org.tensorflow.lite.examples.detection.env.BorderedText;
39 | import org.tensorflow.lite.examples.detection.env.ImageUtils;
40 | import org.tensorflow.lite.examples.detection.env.Logger;
41 | import org.tensorflow.lite.examples.detection.tflite.Detector;
42 | import org.tensorflow.lite.examples.detection.tflite.TFLiteObjectDetectionAPIModel;
43 | import org.tensorflow.lite.examples.detection.tracking.MultiBoxTracker;
44 |
45 | /**
46 | * An activity that uses a TensorFlowMultiBoxDetector and ObjectTracker to detect and then track
47 | * objects.
48 | */
49 | public class DetectorActivity extends CameraActivity implements OnImageAvailableListener {
50 | private static final Logger LOGGER = new Logger();
51 |
52 | // Configuration values for the prepackaged SSD model.
53 | private static final int TF_OD_API_INPUT_SIZE = 320;
54 | private static final boolean TF_OD_API_IS_QUANTIZED = true;
55 | private static final String TF_OD_API_MODEL_FILE = "detect.tflite";
56 | private static final String TF_OD_API_LABELS_FILE = "labelmap.txt";
57 | private static final DetectorMode MODE = DetectorMode.TF_OD_API;
58 | // Minimum detection confidence to track a detection.
59 | private static final float MINIMUM_CONFIDENCE_TF_OD_API = 0.5f;
60 | private static final boolean MAINTAIN_ASPECT = false;
61 | private static final Size DESIRED_PREVIEW_SIZE = new Size(640, 480);
62 | private static final boolean SAVE_PREVIEW_BITMAP = false;
63 | private static final float TEXT_SIZE_DIP = 10;
64 | OverlayView trackingOverlay;
65 | private Integer sensorOrientation;
66 |
67 | private Detector detector;
68 |
69 | private long lastProcessingTimeMs;
70 | private Bitmap rgbFrameBitmap = null;
71 | private Bitmap croppedBitmap = null;
72 | private Bitmap cropCopyBitmap = null;
73 |
74 | private boolean computingDetection = false;
75 |
76 | private long timestamp = 0;
77 |
78 | private Matrix frameToCropTransform;
79 | private Matrix cropToFrameTransform;
80 |
81 | private MultiBoxTracker tracker;
82 |
83 | private BorderedText borderedText;
84 |
85 | @Override
86 | public void onPreviewSizeChosen(final Size size, final int rotation) {
87 | final float textSizePx =
88 | TypedValue.applyDimension(
89 | TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
90 | borderedText = new BorderedText(textSizePx);
91 | borderedText.setTypeface(Typeface.MONOSPACE);
92 |
93 | tracker = new MultiBoxTracker(this);
94 |
95 | int cropSize = TF_OD_API_INPUT_SIZE;
96 |
97 | try {
98 | detector =
99 | TFLiteObjectDetectionAPIModel.create(
100 | this,
101 | TF_OD_API_MODEL_FILE,
102 | TF_OD_API_LABELS_FILE,
103 | TF_OD_API_INPUT_SIZE,
104 | TF_OD_API_IS_QUANTIZED);
105 | cropSize = TF_OD_API_INPUT_SIZE;
106 | } catch (final IOException e) {
107 | e.printStackTrace();
108 | LOGGER.e(e, "Exception initializing Detector!");
109 | Toast toast =
110 | Toast.makeText(
111 | getApplicationContext(), "Detector could not be initialized", Toast.LENGTH_SHORT);
112 | toast.show();
113 | finish();
114 | }
115 |
116 | previewWidth = size.getWidth();
117 | previewHeight = size.getHeight();
118 |
119 | sensorOrientation = rotation - getScreenOrientation();
120 | LOGGER.i("Camera orientation relative to screen canvas: %d", sensorOrientation);
121 |
122 | LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
123 | rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
124 | croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
125 |
126 | frameToCropTransform =
127 | ImageUtils.getTransformationMatrix(
128 | previewWidth, previewHeight,
129 | cropSize, cropSize,
130 | sensorOrientation, MAINTAIN_ASPECT);
131 |
132 | cropToFrameTransform = new Matrix();
133 | frameToCropTransform.invert(cropToFrameTransform);
134 |
135 | trackingOverlay = (OverlayView) findViewById(R.id.tracking_overlay);
136 | trackingOverlay.addCallback(
137 | new DrawCallback() {
138 | @Override
139 | public void drawCallback(final Canvas canvas) {
140 | tracker.draw(canvas);
141 | if (isDebug()) {
142 | tracker.drawDebug(canvas);
143 | }
144 | }
145 | });
146 |
147 | tracker.setFrameConfiguration(previewWidth, previewHeight, sensorOrientation);
148 | }
149 |
150 | @Override
151 | protected void processImage() {
152 | ++timestamp;
153 | final long currTimestamp = timestamp;
154 | trackingOverlay.postInvalidate();
155 |
156 | // No mutex needed as this method is not reentrant.
157 | if (computingDetection) {
158 | readyForNextImage();
159 | return;
160 | }
161 | computingDetection = true;
162 | LOGGER.i("Preparing image " + currTimestamp + " for detection in bg thread.");
163 |
164 | rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight);
165 |
166 | readyForNextImage();
167 |
168 | final Canvas canvas = new Canvas(croppedBitmap);
169 | canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
170 | // For examining the actual TF input.
171 | if (SAVE_PREVIEW_BITMAP) {
172 | ImageUtils.saveBitmap(croppedBitmap);
173 | }
174 |
175 | runInBackground(
176 | new Runnable() {
177 | @Override
178 | public void run() {
179 | LOGGER.i("Running detection on image " + currTimestamp);
180 | final long startTime = SystemClock.uptimeMillis();
181 | final List results = detector.recognizeImage(croppedBitmap);
182 | lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
183 |
184 | cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
185 | final Canvas canvas = new Canvas(cropCopyBitmap);
186 | final Paint paint = new Paint();
187 | paint.setColor(Color.RED);
188 | paint.setStyle(Style.STROKE);
189 | paint.setStrokeWidth(2.0f);
190 |
191 | float minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
192 | switch (MODE) {
193 | case TF_OD_API:
194 | minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
195 | break;
196 | }
197 |
198 | final List mappedRecognitions =
199 | new ArrayList();
200 |
201 | for (final Detector.Recognition result : results) {
202 | final RectF location = result.getLocation();
203 | if (location != null && result.getConfidence() >= minimumConfidence) {
204 | canvas.drawRect(location, paint);
205 |
206 | cropToFrameTransform.mapRect(location);
207 |
208 | result.setLocation(location);
209 | mappedRecognitions.add(result);
210 | }
211 | }
212 |
213 | tracker.trackResults(mappedRecognitions, currTimestamp);
214 | trackingOverlay.postInvalidate();
215 |
216 | computingDetection = false;
217 |
218 | runOnUiThread(
219 | new Runnable() {
220 | @Override
221 | public void run() {
222 | showFrameInfo(previewWidth + "x" + previewHeight);
223 | showCropInfo(cropCopyBitmap.getWidth() + "x" + cropCopyBitmap.getHeight());
224 | showInference(lastProcessingTimeMs + "ms");
225 | }
226 | });
227 | }
228 | });
229 | }
230 |
231 | @Override
232 | protected int getLayoutId() {
233 | return R.layout.tfe_od_camera_connection_fragment_tracking;
234 | }
235 |
236 | @Override
237 | protected Size getDesiredPreviewFrameSize() {
238 | return DESIRED_PREVIEW_SIZE;
239 | }
240 |
241 | // Which detection model to use: by default uses Tensorflow Object Detection API frozen
242 | // checkpoints.
243 | private enum DetectorMode {
244 | TF_OD_API;
245 | }
246 |
247 | @Override
248 | protected void setUseNNAPI(final boolean isChecked) {
249 | runInBackground(
250 | () -> {
251 | try {
252 | detector.setUseNNAPI(isChecked);
253 | } catch (UnsupportedOperationException e) {
254 | LOGGER.e(e, "Failed to set \"Use NNAPI\".");
255 | runOnUiThread(
256 | () -> {
257 | Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show();
258 | });
259 | }
260 | });
261 | }
262 |
263 | @Override
264 | protected void setNumThreads(final int numThreads) {
265 | runInBackground(() -> detector.setNumThreads(numThreads));
266 | }
267 | }
268 |
--------------------------------------------------------------------------------
/android/lib_interpreter/src/main/java/org/tensorflow/lite/examples/detection/tflite/TFLiteObjectDetectionAPIModel.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.lite.examples.detection.tflite;
17 |
18 | import static java.lang.Math.min;
19 |
20 | import android.content.Context;
21 | import android.content.res.AssetFileDescriptor;
22 | import android.content.res.AssetManager;
23 | import android.graphics.Bitmap;
24 | import android.graphics.RectF;
25 | import android.os.Trace;
26 | import android.util.Log;
27 | import java.io.BufferedReader;
28 | import java.io.FileInputStream;
29 | import java.io.IOException;
30 | import java.io.InputStreamReader;
31 | import java.nio.ByteBuffer;
32 | import java.nio.ByteOrder;
33 | import java.nio.MappedByteBuffer;
34 | import java.nio.channels.FileChannel;
35 | import java.nio.charset.Charset;
36 | import java.util.ArrayList;
37 | import java.util.HashMap;
38 | import java.util.List;
39 | import java.util.Map;
40 | import org.tensorflow.lite.Interpreter;
41 | import org.tensorflow.lite.support.metadata.MetadataExtractor;
42 |
43 | /**
44 | * Wrapper for frozen detection models trained using the Tensorflow Object Detection API: -
45 | * https://github.com/tensorflow/models/tree/master/research/object_detection where you can find the
46 | * training code.
47 | *
48 | * To use pretrained models in the API or convert to TF Lite models, please see docs for details:
49 | * -
50 | * https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/tf1_detection_zoo.md
51 | * -
52 | * https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/tf2_detection_zoo.md
53 | * -
54 | * https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/running_on_mobile_tensorflowlite.md#running-our-model-on-android
55 | */
56 | public class TFLiteObjectDetectionAPIModel implements Detector {
57 | private static final String TAG = "TFLiteObjectDetectionAPIModelWithInterpreter";
58 |
59 | // Only return this many results.
60 | private static final int NUM_DETECTIONS = 10;
61 | // Float model
62 | private static final float IMAGE_MEAN = 127.5f;
63 | private static final float IMAGE_STD = 127.5f;
64 | // Number of threads in the java app
65 | private static final int NUM_THREADS = 4;
66 | private boolean isModelQuantized;
67 | // Config values.
68 | private int inputSize;
69 | // Pre-allocated buffers.
70 | private final List labels = new ArrayList<>();
71 | private int[] intValues;
72 | // outputLocations: array of shape [Batchsize, NUM_DETECTIONS,4]
73 | // contains the location of detected boxes
74 | private float[][][] outputLocations;
75 | // outputClasses: array of shape [Batchsize, NUM_DETECTIONS]
76 | // contains the classes of detected boxes
77 | private float[][] outputClasses;
78 | // outputScores: array of shape [Batchsize, NUM_DETECTIONS]
79 | // contains the scores of detected boxes
80 | private float[][] outputScores;
81 | // numDetections: array of shape [Batchsize]
82 | // contains the number of detected boxes
83 | private float[] numDetections;
84 |
85 | private ByteBuffer imgData;
86 |
87 | private MappedByteBuffer tfLiteModel;
88 | private Interpreter.Options tfLiteOptions;
89 | private Interpreter tfLite;
90 |
91 | private TFLiteObjectDetectionAPIModel() {}
92 |
93 | /** Memory-map the model file in Assets. */
94 | private static MappedByteBuffer loadModelFile(AssetManager assets, String modelFilename)
95 | throws IOException {
96 | AssetFileDescriptor fileDescriptor = assets.openFd(modelFilename);
97 | FileInputStream inputStream = new FileInputStream(fileDescriptor.getFileDescriptor());
98 | FileChannel fileChannel = inputStream.getChannel();
99 | long startOffset = fileDescriptor.getStartOffset();
100 | long declaredLength = fileDescriptor.getDeclaredLength();
101 | return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength);
102 | }
103 |
104 | /**
105 | * Initializes a native TensorFlow session for classifying images.
106 | *
107 | * @param modelFilename The model file path relative to the assets folder
108 | * @param labelFilename The label file path relative to the assets folder
109 | * @param inputSize The size of image input
110 | * @param isQuantized Boolean representing model is quantized or not
111 | */
112 | public static Detector create(
113 | final Context context,
114 | final String modelFilename,
115 | final String labelFilename,
116 | final int inputSize,
117 | final boolean isQuantized)
118 | throws IOException {
119 | final TFLiteObjectDetectionAPIModel d = new TFLiteObjectDetectionAPIModel();
120 |
121 | MappedByteBuffer modelFile = loadModelFile(context.getAssets(), modelFilename);
122 | MetadataExtractor metadata = new MetadataExtractor(modelFile);
123 | try (BufferedReader br =
124 | new BufferedReader(
125 | new InputStreamReader(
126 | metadata.getAssociatedFile(labelFilename), Charset.defaultCharset()))) {
127 | String line;
128 | while ((line = br.readLine()) != null) {
129 | Log.w(TAG, line);
130 | d.labels.add(line);
131 | }
132 | }
133 |
134 | d.inputSize = inputSize;
135 |
136 | try {
137 | Interpreter.Options options = new Interpreter.Options();
138 | options.setNumThreads(NUM_THREADS);
139 | options.setUseXNNPACK(true);
140 | d.tfLite = new Interpreter(modelFile, options);
141 | d.tfLiteModel = modelFile;
142 | d.tfLiteOptions = options;
143 | } catch (Exception e) {
144 | throw new RuntimeException(e);
145 | }
146 |
147 | d.isModelQuantized = isQuantized;
148 | // Pre-allocate buffers.
149 | int numBytesPerChannel;
150 | if (isQuantized) {
151 | numBytesPerChannel = 1; // Quantized
152 | } else {
153 | numBytesPerChannel = 4; // Floating point
154 | }
155 | d.imgData = ByteBuffer.allocateDirect(1 * d.inputSize * d.inputSize * 3 * numBytesPerChannel);
156 | d.imgData.order(ByteOrder.nativeOrder());
157 | d.intValues = new int[d.inputSize * d.inputSize];
158 |
159 | d.outputLocations = new float[1][NUM_DETECTIONS][4];
160 | d.outputClasses = new float[1][NUM_DETECTIONS];
161 | d.outputScores = new float[1][NUM_DETECTIONS];
162 | d.numDetections = new float[1];
163 | return d;
164 | }
165 |
166 | @Override
167 | public List recognizeImage(final Bitmap bitmap) {
168 | // Log this method so that it can be analyzed with systrace.
169 | Trace.beginSection("recognizeImage");
170 |
171 | Trace.beginSection("preprocessBitmap");
172 | // Preprocess the image data from 0-255 int to normalized float based
173 | // on the provided parameters.
174 | bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
175 |
176 | imgData.rewind();
177 | for (int i = 0; i < inputSize; ++i) {
178 | for (int j = 0; j < inputSize; ++j) {
179 | int pixelValue = intValues[i * inputSize + j];
180 | if (isModelQuantized) {
181 | // Quantized model
182 | imgData.put((byte) ((pixelValue >> 16) & 0xFF));
183 | imgData.put((byte) ((pixelValue >> 8) & 0xFF));
184 | imgData.put((byte) (pixelValue & 0xFF));
185 | } else { // Float model
186 | imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
187 | imgData.putFloat((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
188 | imgData.putFloat(((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
189 | }
190 | }
191 | }
192 | Trace.endSection(); // preprocessBitmap
193 |
194 | // Copy the input data into TensorFlow.
195 | Trace.beginSection("feed");
196 | outputLocations = new float[1][NUM_DETECTIONS][4];
197 | outputClasses = new float[1][NUM_DETECTIONS];
198 | outputScores = new float[1][NUM_DETECTIONS];
199 | numDetections = new float[1];
200 |
201 | Object[] inputArray = {imgData};
202 | Map outputMap = new HashMap<>();
203 | outputMap.put(0, outputLocations);
204 | outputMap.put(1, outputClasses);
205 | outputMap.put(2, outputScores);
206 | outputMap.put(3, numDetections);
207 | Trace.endSection();
208 |
209 | // Run the inference call.
210 | Trace.beginSection("run");
211 | tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
212 | Trace.endSection();
213 |
214 | // Show the best detections.
215 | // after scaling them back to the input size.
216 | // You need to use the number of detections from the output and not the NUM_DETECTONS variable
217 | // declared on top
218 | // because on some models, they don't always output the same total number of detections
219 | // For example, your model's NUM_DETECTIONS = 20, but sometimes it only outputs 16 predictions
220 | // If you don't use the output's numDetections, you'll get nonsensical data
221 | int numDetectionsOutput =
222 | min(
223 | NUM_DETECTIONS,
224 | (int) numDetections[0]); // cast from float to integer, use min for safety
225 |
226 | final ArrayList recognitions = new ArrayList<>(numDetectionsOutput);
227 | for (int i = 0; i < numDetectionsOutput; ++i) {
228 | final RectF detection =
229 | new RectF(
230 | outputLocations[0][i][1] * inputSize,
231 | outputLocations[0][i][0] * inputSize,
232 | outputLocations[0][i][3] * inputSize,
233 | outputLocations[0][i][2] * inputSize);
234 |
235 | recognitions.add(
236 | new Recognition(
237 | "" + i, labels.get((int) outputClasses[0][i]), outputScores[0][i], detection));
238 | }
239 | Trace.endSection(); // "recognizeImage"
240 | return recognitions;
241 | }
242 |
243 | @Override
244 | public void enableStatLogging(final boolean logStats) {}
245 |
246 | @Override
247 | public String getStatString() {
248 | return "";
249 | }
250 |
251 | @Override
252 | public void close() {
253 | if (tfLite != null) {
254 | tfLite.close();
255 | tfLite = null;
256 | }
257 | }
258 |
259 | @Override
260 | public void setNumThreads(int numThreads) {
261 | if (tfLite != null) {
262 | tfLiteOptions.setNumThreads(numThreads);
263 | recreateInterpreter();
264 | }
265 | }
266 |
267 | @Override
268 | public void setUseNNAPI(boolean isChecked) {
269 | if (tfLite != null) {
270 | tfLiteOptions.setUseNNAPI(isChecked);
271 | recreateInterpreter();
272 | }
273 | }
274 |
275 | private void recreateInterpreter() {
276 | tfLite.close();
277 | tfLite = new Interpreter(tfLiteModel, tfLiteOptions);
278 | }
279 | }
280 |
--------------------------------------------------------------------------------
/android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraActivity.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 The TensorFlow Authors. All Rights Reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package org.tensorflow.lite.examples.detection;
18 |
19 | import android.Manifest;
20 | import android.app.Fragment;
21 | import android.content.Context;
22 | import android.content.pm.PackageManager;
23 | import android.hardware.Camera;
24 | import android.hardware.camera2.CameraAccessException;
25 | import android.hardware.camera2.CameraCharacteristics;
26 | import android.hardware.camera2.CameraManager;
27 | import android.hardware.camera2.params.StreamConfigurationMap;
28 | import android.media.Image;
29 | import android.media.Image.Plane;
30 | import android.media.ImageReader;
31 | import android.media.ImageReader.OnImageAvailableListener;
32 | import android.os.Build;
33 | import android.os.Bundle;
34 | import android.os.Handler;
35 | import android.os.HandlerThread;
36 | import android.os.Trace;
37 | import androidx.appcompat.app.AppCompatActivity;
38 | import androidx.appcompat.widget.SwitchCompat;
39 | import androidx.appcompat.widget.Toolbar;
40 | import android.util.Size;
41 | import android.view.Surface;
42 | import android.view.View;
43 | import android.view.ViewTreeObserver;
44 | import android.view.WindowManager;
45 | import android.widget.CompoundButton;
46 | import android.widget.ImageView;
47 | import android.widget.LinearLayout;
48 | import android.widget.TextView;
49 | import android.widget.Toast;
50 | import androidx.annotation.NonNull;
51 | import com.google.android.material.bottomsheet.BottomSheetBehavior;
52 | import java.nio.ByteBuffer;
53 | import org.tensorflow.lite.examples.detection.env.ImageUtils;
54 | import org.tensorflow.lite.examples.detection.env.Logger;
55 |
56 | public abstract class CameraActivity extends AppCompatActivity
57 | implements OnImageAvailableListener,
58 | Camera.PreviewCallback,
59 | CompoundButton.OnCheckedChangeListener,
60 | View.OnClickListener {
61 | private static final Logger LOGGER = new Logger();
62 |
63 | private static final int PERMISSIONS_REQUEST = 1;
64 |
65 | private static final String PERMISSION_CAMERA = Manifest.permission.CAMERA;
66 | protected int previewWidth = 0;
67 | protected int previewHeight = 0;
68 | private boolean debug = false;
69 | private Handler handler;
70 | private HandlerThread handlerThread;
71 | private boolean useCamera2API;
72 | private boolean isProcessingFrame = false;
73 | private byte[][] yuvBytes = new byte[3][];
74 | private int[] rgbBytes = null;
75 | private int yRowStride;
76 | private Runnable postInferenceCallback;
77 | private Runnable imageConverter;
78 |
79 | private LinearLayout bottomSheetLayout;
80 | private LinearLayout gestureLayout;
81 | private BottomSheetBehavior sheetBehavior;
82 |
83 | protected TextView frameValueTextView, cropValueTextView, inferenceTimeTextView;
84 | protected ImageView bottomSheetArrowImageView;
85 | private ImageView plusImageView, minusImageView;
86 | private SwitchCompat apiSwitchCompat;
87 | private TextView threadsTextView;
88 |
89 | @Override
90 | protected void onCreate(final Bundle savedInstanceState) {
91 | LOGGER.d("onCreate " + this);
92 | super.onCreate(null);
93 | getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
94 |
95 | setContentView(R.layout.tfe_od_activity_camera);
96 | Toolbar toolbar = findViewById(R.id.toolbar);
97 | setSupportActionBar(toolbar);
98 | getSupportActionBar().setDisplayShowTitleEnabled(false);
99 |
100 | if (hasPermission()) {
101 | setFragment();
102 | } else {
103 | requestPermission();
104 | }
105 |
106 | threadsTextView = findViewById(R.id.threads);
107 | plusImageView = findViewById(R.id.plus);
108 | minusImageView = findViewById(R.id.minus);
109 | apiSwitchCompat = findViewById(R.id.api_info_switch);
110 | bottomSheetLayout = findViewById(R.id.bottom_sheet_layout);
111 | gestureLayout = findViewById(R.id.gesture_layout);
112 | sheetBehavior = BottomSheetBehavior.from(bottomSheetLayout);
113 | bottomSheetArrowImageView = findViewById(R.id.bottom_sheet_arrow);
114 |
115 | ViewTreeObserver vto = gestureLayout.getViewTreeObserver();
116 | vto.addOnGlobalLayoutListener(
117 | new ViewTreeObserver.OnGlobalLayoutListener() {
118 | @Override
119 | public void onGlobalLayout() {
120 | if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
121 | gestureLayout.getViewTreeObserver().removeGlobalOnLayoutListener(this);
122 | } else {
123 | gestureLayout.getViewTreeObserver().removeOnGlobalLayoutListener(this);
124 | }
125 | // int width = bottomSheetLayout.getMeasuredWidth();
126 | int height = gestureLayout.getMeasuredHeight();
127 |
128 | sheetBehavior.setPeekHeight(height);
129 | }
130 | });
131 | sheetBehavior.setHideable(false);
132 |
133 | sheetBehavior.setBottomSheetCallback(
134 | new BottomSheetBehavior.BottomSheetCallback() {
135 | @Override
136 | public void onStateChanged(@NonNull View bottomSheet, int newState) {
137 | switch (newState) {
138 | case BottomSheetBehavior.STATE_HIDDEN:
139 | break;
140 | case BottomSheetBehavior.STATE_EXPANDED:
141 | {
142 | bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_down);
143 | }
144 | break;
145 | case BottomSheetBehavior.STATE_COLLAPSED:
146 | {
147 | bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_up);
148 | }
149 | break;
150 | case BottomSheetBehavior.STATE_DRAGGING:
151 | break;
152 | case BottomSheetBehavior.STATE_SETTLING:
153 | bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_up);
154 | break;
155 | }
156 | }
157 |
158 | @Override
159 | public void onSlide(@NonNull View bottomSheet, float slideOffset) {}
160 | });
161 |
162 | frameValueTextView = findViewById(R.id.frame_info);
163 | cropValueTextView = findViewById(R.id.crop_info);
164 | inferenceTimeTextView = findViewById(R.id.inference_info);
165 |
166 | apiSwitchCompat.setOnCheckedChangeListener(this);
167 |
168 | plusImageView.setOnClickListener(this);
169 | minusImageView.setOnClickListener(this);
170 | }
171 |
172 | protected int[] getRgbBytes() {
173 | imageConverter.run();
174 | return rgbBytes;
175 | }
176 |
177 | protected int getLuminanceStride() {
178 | return yRowStride;
179 | }
180 |
181 | protected byte[] getLuminance() {
182 | return yuvBytes[0];
183 | }
184 |
185 | /** Callback for android.hardware.Camera API */
186 | @Override
187 | public void onPreviewFrame(final byte[] bytes, final Camera camera) {
188 | if (isProcessingFrame) {
189 | LOGGER.w("Dropping frame!");
190 | return;
191 | }
192 |
193 | try {
194 | // Initialize the storage bitmaps once when the resolution is known.
195 | if (rgbBytes == null) {
196 | Camera.Size previewSize = camera.getParameters().getPreviewSize();
197 | previewHeight = previewSize.height;
198 | previewWidth = previewSize.width;
199 | rgbBytes = new int[previewWidth * previewHeight];
200 | onPreviewSizeChosen(new Size(previewSize.width, previewSize.height), 90);
201 | }
202 | } catch (final Exception e) {
203 | LOGGER.e(e, "Exception!");
204 | return;
205 | }
206 |
207 | isProcessingFrame = true;
208 | yuvBytes[0] = bytes;
209 | yRowStride = previewWidth;
210 |
211 | imageConverter =
212 | new Runnable() {
213 | @Override
214 | public void run() {
215 | ImageUtils.convertYUV420SPToARGB8888(bytes, previewWidth, previewHeight, rgbBytes);
216 | }
217 | };
218 |
219 | postInferenceCallback =
220 | new Runnable() {
221 | @Override
222 | public void run() {
223 | camera.addCallbackBuffer(bytes);
224 | isProcessingFrame = false;
225 | }
226 | };
227 | processImage();
228 | }
229 |
230 | /** Callback for Camera2 API */
231 | @Override
232 | public void onImageAvailable(final ImageReader reader) {
233 | // We need wait until we have some size from onPreviewSizeChosen
234 | if (previewWidth == 0 || previewHeight == 0) {
235 | return;
236 | }
237 | if (rgbBytes == null) {
238 | rgbBytes = new int[previewWidth * previewHeight];
239 | }
240 | try {
241 | final Image image = reader.acquireLatestImage();
242 |
243 | if (image == null) {
244 | return;
245 | }
246 |
247 | if (isProcessingFrame) {
248 | image.close();
249 | return;
250 | }
251 | isProcessingFrame = true;
252 | Trace.beginSection("imageAvailable");
253 | final Plane[] planes = image.getPlanes();
254 | fillBytes(planes, yuvBytes);
255 | yRowStride = planes[0].getRowStride();
256 | final int uvRowStride = planes[1].getRowStride();
257 | final int uvPixelStride = planes[1].getPixelStride();
258 |
259 | imageConverter =
260 | new Runnable() {
261 | @Override
262 | public void run() {
263 | ImageUtils.convertYUV420ToARGB8888(
264 | yuvBytes[0],
265 | yuvBytes[1],
266 | yuvBytes[2],
267 | previewWidth,
268 | previewHeight,
269 | yRowStride,
270 | uvRowStride,
271 | uvPixelStride,
272 | rgbBytes);
273 | }
274 | };
275 |
276 | postInferenceCallback =
277 | new Runnable() {
278 | @Override
279 | public void run() {
280 | image.close();
281 | isProcessingFrame = false;
282 | }
283 | };
284 |
285 | processImage();
286 | } catch (final Exception e) {
287 | LOGGER.e(e, "Exception!");
288 | Trace.endSection();
289 | return;
290 | }
291 | Trace.endSection();
292 | }
293 |
294 | @Override
295 | public synchronized void onStart() {
296 | LOGGER.d("onStart " + this);
297 | super.onStart();
298 | }
299 |
300 | @Override
301 | public synchronized void onResume() {
302 | LOGGER.d("onResume " + this);
303 | super.onResume();
304 |
305 | handlerThread = new HandlerThread("inference");
306 | handlerThread.start();
307 | handler = new Handler(handlerThread.getLooper());
308 | }
309 |
310 | @Override
311 | public synchronized void onPause() {
312 | LOGGER.d("onPause " + this);
313 |
314 | handlerThread.quitSafely();
315 | try {
316 | handlerThread.join();
317 | handlerThread = null;
318 | handler = null;
319 | } catch (final InterruptedException e) {
320 | LOGGER.e(e, "Exception!");
321 | }
322 |
323 | super.onPause();
324 | }
325 |
326 | @Override
327 | public synchronized void onStop() {
328 | LOGGER.d("onStop " + this);
329 | super.onStop();
330 | }
331 |
332 | @Override
333 | public synchronized void onDestroy() {
334 | LOGGER.d("onDestroy " + this);
335 | super.onDestroy();
336 | }
337 |
338 | protected synchronized void runInBackground(final Runnable r) {
339 | if (handler != null) {
340 | handler.post(r);
341 | }
342 | }
343 |
344 | @Override
345 | public void onRequestPermissionsResult(
346 | final int requestCode, final String[] permissions, final int[] grantResults) {
347 | super.onRequestPermissionsResult(requestCode, permissions, grantResults);
348 | if (requestCode == PERMISSIONS_REQUEST) {
349 | if (allPermissionsGranted(grantResults)) {
350 | setFragment();
351 | } else {
352 | requestPermission();
353 | }
354 | }
355 | }
356 |
357 | private static boolean allPermissionsGranted(final int[] grantResults) {
358 | for (int result : grantResults) {
359 | if (result != PackageManager.PERMISSION_GRANTED) {
360 | return false;
361 | }
362 | }
363 | return true;
364 | }
365 |
366 | private boolean hasPermission() {
367 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
368 | return checkSelfPermission(PERMISSION_CAMERA) == PackageManager.PERMISSION_GRANTED;
369 | } else {
370 | return true;
371 | }
372 | }
373 |
374 | private void requestPermission() {
375 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
376 | if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA)) {
377 | Toast.makeText(
378 | CameraActivity.this,
379 | "Camera permission is required for this demo",
380 | Toast.LENGTH_LONG)
381 | .show();
382 | }
383 | requestPermissions(new String[] {PERMISSION_CAMERA}, PERMISSIONS_REQUEST);
384 | }
385 | }
386 |
387 | // Returns true if the device supports the required hardware level, or better.
388 | private boolean isHardwareLevelSupported(
389 | CameraCharacteristics characteristics, int requiredLevel) {
390 | int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
391 | if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
392 | return requiredLevel == deviceLevel;
393 | }
394 | // deviceLevel is not LEGACY, can use numerical sort
395 | return requiredLevel <= deviceLevel;
396 | }
397 |
398 | private String chooseCamera() {
399 | final CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
400 | try {
401 | for (final String cameraId : manager.getCameraIdList()) {
402 | final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
403 |
404 | // We don't use a front facing camera in this sample.
405 | final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
406 | if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
407 | continue;
408 | }
409 |
410 | final StreamConfigurationMap map =
411 | characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
412 |
413 | if (map == null) {
414 | continue;
415 | }
416 |
417 | // Fallback to camera1 API for internal cameras that don't have full support.
418 | // This should help with legacy situations where using the camera2 API causes
419 | // distorted or otherwise broken previews.
420 | useCamera2API =
421 | (facing == CameraCharacteristics.LENS_FACING_EXTERNAL)
422 | || isHardwareLevelSupported(
423 | characteristics, CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
424 | LOGGER.i("Camera API lv2?: %s", useCamera2API);
425 | return cameraId;
426 | }
427 | } catch (CameraAccessException e) {
428 | LOGGER.e(e, "Not allowed to access camera");
429 | }
430 |
431 | return null;
432 | }
433 |
434 | protected void setFragment() {
435 | String cameraId = chooseCamera();
436 |
437 | Fragment fragment;
438 | if (useCamera2API) {
439 | CameraConnectionFragment camera2Fragment =
440 | CameraConnectionFragment.newInstance(
441 | new CameraConnectionFragment.ConnectionCallback() {
442 | @Override
443 | public void onPreviewSizeChosen(final Size size, final int rotation) {
444 | previewHeight = size.getHeight();
445 | previewWidth = size.getWidth();
446 | CameraActivity.this.onPreviewSizeChosen(size, rotation);
447 | }
448 | },
449 | this,
450 | getLayoutId(),
451 | getDesiredPreviewFrameSize());
452 |
453 | camera2Fragment.setCamera(cameraId);
454 | fragment = camera2Fragment;
455 | } else {
456 | fragment =
457 | new LegacyCameraConnectionFragment(this, getLayoutId(), getDesiredPreviewFrameSize());
458 | }
459 |
460 | getFragmentManager().beginTransaction().replace(R.id.container, fragment).commit();
461 | }
462 |
463 | protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
464 | // Because of the variable row stride it's not possible to know in
465 | // advance the actual necessary dimensions of the yuv planes.
466 | for (int i = 0; i < planes.length; ++i) {
467 | final ByteBuffer buffer = planes[i].getBuffer();
468 | if (yuvBytes[i] == null) {
469 | LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
470 | yuvBytes[i] = new byte[buffer.capacity()];
471 | }
472 | buffer.get(yuvBytes[i]);
473 | }
474 | }
475 |
476 | public boolean isDebug() {
477 | return debug;
478 | }
479 |
480 | protected void readyForNextImage() {
481 | if (postInferenceCallback != null) {
482 | postInferenceCallback.run();
483 | }
484 | }
485 |
486 | protected int getScreenOrientation() {
487 | switch (getWindowManager().getDefaultDisplay().getRotation()) {
488 | case Surface.ROTATION_270:
489 | return 270;
490 | case Surface.ROTATION_180:
491 | return 180;
492 | case Surface.ROTATION_90:
493 | return 90;
494 | default:
495 | return 0;
496 | }
497 | }
498 |
499 | @Override
500 | public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
501 | setUseNNAPI(isChecked);
502 | if (isChecked) apiSwitchCompat.setText("NNAPI");
503 | else apiSwitchCompat.setText("TFLITE");
504 | }
505 |
506 | @Override
507 | public void onClick(View v) {
508 | if (v.getId() == R.id.plus) {
509 | String threads = threadsTextView.getText().toString().trim();
510 | int numThreads = Integer.parseInt(threads);
511 | if (numThreads >= 9) return;
512 | numThreads++;
513 | threadsTextView.setText(String.valueOf(numThreads));
514 | setNumThreads(numThreads);
515 | } else if (v.getId() == R.id.minus) {
516 | String threads = threadsTextView.getText().toString().trim();
517 | int numThreads = Integer.parseInt(threads);
518 | if (numThreads == 1) {
519 | return;
520 | }
521 | numThreads--;
522 | threadsTextView.setText(String.valueOf(numThreads));
523 | setNumThreads(numThreads);
524 | }
525 | }
526 |
527 | protected void showFrameInfo(String frameInfo) {
528 | frameValueTextView.setText(frameInfo);
529 | }
530 |
531 | protected void showCropInfo(String cropInfo) {
532 | cropValueTextView.setText(cropInfo);
533 | }
534 |
535 | protected void showInference(String inferenceTime) {
536 | inferenceTimeTextView.setText(inferenceTime);
537 | }
538 |
539 | protected abstract void processImage();
540 |
541 | protected abstract void onPreviewSizeChosen(final Size size, final int rotation);
542 |
543 | protected abstract int getLayoutId();
544 |
545 | protected abstract Size getDesiredPreviewFrameSize();
546 |
547 | protected abstract void setNumThreads(int numThreads);
548 |
549 | protected abstract void setUseNNAPI(boolean isChecked);
550 | }
551 |
--------------------------------------------------------------------------------
/android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraConnectionFragment.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2019 The TensorFlow Authors. All Rights Reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package org.tensorflow.lite.examples.detection;
18 |
19 | import android.annotation.SuppressLint;
20 | import android.app.Activity;
21 | import android.app.AlertDialog;
22 | import android.app.Dialog;
23 | import android.app.DialogFragment;
24 | import android.app.Fragment;
25 | import android.content.Context;
26 | import android.content.DialogInterface;
27 | import android.content.res.Configuration;
28 | import android.graphics.ImageFormat;
29 | import android.graphics.Matrix;
30 | import android.graphics.RectF;
31 | import android.graphics.SurfaceTexture;
32 | import android.hardware.camera2.CameraAccessException;
33 | import android.hardware.camera2.CameraCaptureSession;
34 | import android.hardware.camera2.CameraCharacteristics;
35 | import android.hardware.camera2.CameraDevice;
36 | import android.hardware.camera2.CameraManager;
37 | import android.hardware.camera2.CaptureRequest;
38 | import android.hardware.camera2.CaptureResult;
39 | import android.hardware.camera2.TotalCaptureResult;
40 | import android.hardware.camera2.params.StreamConfigurationMap;
41 | import android.media.ImageReader;
42 | import android.media.ImageReader.OnImageAvailableListener;
43 | import android.os.Bundle;
44 | import android.os.Handler;
45 | import android.os.HandlerThread;
46 | import android.text.TextUtils;
47 | import android.util.Size;
48 | import android.util.SparseIntArray;
49 | import android.view.LayoutInflater;
50 | import android.view.Surface;
51 | import android.view.TextureView;
52 | import android.view.View;
53 | import android.view.ViewGroup;
54 | import android.widget.Toast;
55 | import java.util.ArrayList;
56 | import java.util.Arrays;
57 | import java.util.Collections;
58 | import java.util.Comparator;
59 | import java.util.List;
60 | import java.util.concurrent.Semaphore;
61 | import java.util.concurrent.TimeUnit;
62 | import org.tensorflow.lite.examples.detection.customview.AutoFitTextureView;
63 | import org.tensorflow.lite.examples.detection.env.Logger;
64 |
65 | @SuppressLint("ValidFragment")
66 | public class CameraConnectionFragment extends Fragment {
67 | private static final Logger LOGGER = new Logger();
68 |
69 | /**
70 | * The camera preview size will be chosen to be the smallest frame by pixel size capable of
71 | * containing a DESIRED_SIZE x DESIRED_SIZE square.
72 | */
73 | private static final int MINIMUM_PREVIEW_SIZE = 320;
74 |
75 | /** Conversion from screen rotation to JPEG orientation. */
76 | private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
77 |
78 | private static final String FRAGMENT_DIALOG = "dialog";
79 |
80 | static {
81 | ORIENTATIONS.append(Surface.ROTATION_0, 90);
82 | ORIENTATIONS.append(Surface.ROTATION_90, 0);
83 | ORIENTATIONS.append(Surface.ROTATION_180, 270);
84 | ORIENTATIONS.append(Surface.ROTATION_270, 180);
85 | }
86 |
87 | /** A {@link Semaphore} to prevent the app from exiting before closing the camera. */
88 | private final Semaphore cameraOpenCloseLock = new Semaphore(1);
89 | /** A {@link OnImageAvailableListener} to receive frames as they are available. */
90 | private final OnImageAvailableListener imageListener;
91 | /** The input size in pixels desired by TensorFlow (width and height of a square bitmap). */
92 | private final Size inputSize;
93 | /** The layout identifier to inflate for this Fragment. */
94 | private final int layout;
95 |
96 | private final ConnectionCallback cameraConnectionCallback;
97 | private final CameraCaptureSession.CaptureCallback captureCallback =
98 | new CameraCaptureSession.CaptureCallback() {
99 | @Override
100 | public void onCaptureProgressed(
101 | final CameraCaptureSession session,
102 | final CaptureRequest request,
103 | final CaptureResult partialResult) {}
104 |
105 | @Override
106 | public void onCaptureCompleted(
107 | final CameraCaptureSession session,
108 | final CaptureRequest request,
109 | final TotalCaptureResult result) {}
110 | };
111 | /** ID of the current {@link CameraDevice}. */
112 | private String cameraId;
113 | /** An {@link AutoFitTextureView} for camera preview. */
114 | private AutoFitTextureView textureView;
115 | /** A {@link CameraCaptureSession } for camera preview. */
116 | private CameraCaptureSession captureSession;
117 | /** A reference to the opened {@link CameraDevice}. */
118 | private CameraDevice cameraDevice;
119 | /** The rotation in degrees of the camera sensor from the display. */
120 | private Integer sensorOrientation;
121 | /** The {@link Size} of camera preview. */
122 | private Size previewSize;
123 | /** An additional thread for running tasks that shouldn't block the UI. */
124 | private HandlerThread backgroundThread;
125 | /** A {@link Handler} for running tasks in the background. */
126 | private Handler backgroundHandler;
127 | /** An {@link ImageReader} that handles preview frame capture. */
128 | private ImageReader previewReader;
129 | /** {@link CaptureRequest.Builder} for the camera preview */
130 | private CaptureRequest.Builder previewRequestBuilder;
131 | /** {@link CaptureRequest} generated by {@link #previewRequestBuilder} */
132 | private CaptureRequest previewRequest;
133 | /** {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state. */
134 | private final CameraDevice.StateCallback stateCallback =
135 | new CameraDevice.StateCallback() {
136 | @Override
137 | public void onOpened(final CameraDevice cd) {
138 | // This method is called when the camera is opened. We start camera preview here.
139 | cameraOpenCloseLock.release();
140 | cameraDevice = cd;
141 | createCameraPreviewSession();
142 | }
143 |
144 | @Override
145 | public void onDisconnected(final CameraDevice cd) {
146 | cameraOpenCloseLock.release();
147 | cd.close();
148 | cameraDevice = null;
149 | }
150 |
151 | @Override
152 | public void onError(final CameraDevice cd, final int error) {
153 | cameraOpenCloseLock.release();
154 | cd.close();
155 | cameraDevice = null;
156 | final Activity activity = getActivity();
157 | if (null != activity) {
158 | activity.finish();
159 | }
160 | }
161 | };
162 | /**
163 | * {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link
164 | * TextureView}.
165 | */
166 | private final TextureView.SurfaceTextureListener surfaceTextureListener =
167 | new TextureView.SurfaceTextureListener() {
168 | @Override
169 | public void onSurfaceTextureAvailable(
170 | final SurfaceTexture texture, final int width, final int height) {
171 | openCamera(width, height);
172 | }
173 |
174 | @Override
175 | public void onSurfaceTextureSizeChanged(
176 | final SurfaceTexture texture, final int width, final int height) {
177 | configureTransform(width, height);
178 | }
179 |
180 | @Override
181 | public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
182 | return true;
183 | }
184 |
185 | @Override
186 | public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
187 | };
188 |
189 | private CameraConnectionFragment(
190 | final ConnectionCallback connectionCallback,
191 | final OnImageAvailableListener imageListener,
192 | final int layout,
193 | final Size inputSize) {
194 | this.cameraConnectionCallback = connectionCallback;
195 | this.imageListener = imageListener;
196 | this.layout = layout;
197 | this.inputSize = inputSize;
198 | }
199 |
200 | /**
201 | * Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
202 | * width and height are at least as large as the minimum of both, or an exact match if possible.
203 | *
204 | * @param choices The list of sizes that the camera supports for the intended output class
205 | * @param width The minimum desired width
206 | * @param height The minimum desired height
207 | * @return The optimal {@code Size}, or an arbitrary one if none were big enough
208 | */
209 | protected static Size chooseOptimalSize(final Size[] choices, final int width, final int height) {
210 | final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE);
211 | final Size desiredSize = new Size(width, height);
212 |
213 | // Collect the supported resolutions that are at least as big as the preview Surface
214 | boolean exactSizeFound = false;
215 | final List bigEnough = new ArrayList();
216 | final List tooSmall = new ArrayList();
217 | for (final Size option : choices) {
218 | if (option.equals(desiredSize)) {
219 | // Set the size but don't return yet so that remaining sizes will still be logged.
220 | exactSizeFound = true;
221 | }
222 |
223 | if (option.getHeight() >= minSize && option.getWidth() >= minSize) {
224 | bigEnough.add(option);
225 | } else {
226 | tooSmall.add(option);
227 | }
228 | }
229 |
230 | LOGGER.i("Desired size: " + desiredSize + ", min size: " + minSize + "x" + minSize);
231 | LOGGER.i("Valid preview sizes: [" + TextUtils.join(", ", bigEnough) + "]");
232 | LOGGER.i("Rejected preview sizes: [" + TextUtils.join(", ", tooSmall) + "]");
233 |
234 | if (exactSizeFound) {
235 | LOGGER.i("Exact size match found.");
236 | return desiredSize;
237 | }
238 |
239 | // Pick the smallest of those, assuming we found any
240 | if (bigEnough.size() > 0) {
241 | final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea());
242 | LOGGER.i("Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight());
243 | return chosenSize;
244 | } else {
245 | LOGGER.e("Couldn't find any suitable preview size");
246 | return choices[0];
247 | }
248 | }
249 |
250 | public static CameraConnectionFragment newInstance(
251 | final ConnectionCallback callback,
252 | final OnImageAvailableListener imageListener,
253 | final int layout,
254 | final Size inputSize) {
255 | return new CameraConnectionFragment(callback, imageListener, layout, inputSize);
256 | }
257 |
258 | /**
259 | * Shows a {@link Toast} on the UI thread.
260 | *
261 | * @param text The message to show
262 | */
263 | private void showToast(final String text) {
264 | final Activity activity = getActivity();
265 | if (activity != null) {
266 | activity.runOnUiThread(
267 | new Runnable() {
268 | @Override
269 | public void run() {
270 | Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
271 | }
272 | });
273 | }
274 | }
275 |
276 | @Override
277 | public View onCreateView(
278 | final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
279 | return inflater.inflate(layout, container, false);
280 | }
281 |
282 | @Override
283 | public void onViewCreated(final View view, final Bundle savedInstanceState) {
284 | textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
285 | }
286 |
287 | @Override
288 | public void onActivityCreated(final Bundle savedInstanceState) {
289 | super.onActivityCreated(savedInstanceState);
290 | }
291 |
292 | @Override
293 | public void onResume() {
294 | super.onResume();
295 | startBackgroundThread();
296 |
297 | // When the screen is turned off and turned back on, the SurfaceTexture is already
298 | // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
299 | // a camera and start preview from here (otherwise, we wait until the surface is ready in
300 | // the SurfaceTextureListener).
301 | if (textureView.isAvailable()) {
302 | openCamera(textureView.getWidth(), textureView.getHeight());
303 | } else {
304 | textureView.setSurfaceTextureListener(surfaceTextureListener);
305 | }
306 | }
307 |
308 | @Override
309 | public void onPause() {
310 | closeCamera();
311 | stopBackgroundThread();
312 | super.onPause();
313 | }
314 |
315 | public void setCamera(String cameraId) {
316 | this.cameraId = cameraId;
317 | }
318 |
319 | /** Sets up member variables related to camera. */
320 | private void setUpCameraOutputs() {
321 | final Activity activity = getActivity();
322 | final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
323 | try {
324 | final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
325 |
326 | final StreamConfigurationMap map =
327 | characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
328 |
329 | sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
330 |
331 | // Danger, W.R.! Attempting to use too large a preview size could exceed the camera
332 | // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
333 | // garbage capture data.
334 | previewSize =
335 | chooseOptimalSize(
336 | map.getOutputSizes(SurfaceTexture.class),
337 | inputSize.getWidth(),
338 | inputSize.getHeight());
339 |
340 | // We fit the aspect ratio of TextureView to the size of preview we picked.
341 | final int orientation = getResources().getConfiguration().orientation;
342 | if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
343 | textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
344 | } else {
345 | textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
346 | }
347 | } catch (final CameraAccessException e) {
348 | LOGGER.e(e, "Exception!");
349 | } catch (final NullPointerException e) {
350 | // Currently an NPE is thrown when the Camera2API is used but not supported on the
351 | // device this code runs.
352 | ErrorDialog.newInstance(getString(R.string.tfe_od_camera_error))
353 | .show(getChildFragmentManager(), FRAGMENT_DIALOG);
354 | throw new IllegalStateException(getString(R.string.tfe_od_camera_error));
355 | }
356 |
357 | cameraConnectionCallback.onPreviewSizeChosen(previewSize, sensorOrientation);
358 | }
359 |
360 | /** Opens the camera specified by {@link CameraConnectionFragment#cameraId}. */
361 | private void openCamera(final int width, final int height) {
362 | setUpCameraOutputs();
363 | configureTransform(width, height);
364 | final Activity activity = getActivity();
365 | final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
366 | try {
367 | if (!cameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
368 | throw new RuntimeException("Time out waiting to lock camera opening.");
369 | }
370 | manager.openCamera(cameraId, stateCallback, backgroundHandler);
371 | } catch (final CameraAccessException e) {
372 | LOGGER.e(e, "Exception!");
373 | } catch (final InterruptedException e) {
374 | throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
375 | }
376 | }
377 |
378 | /** Closes the current {@link CameraDevice}. */
379 | private void closeCamera() {
380 | try {
381 | cameraOpenCloseLock.acquire();
382 | if (null != captureSession) {
383 | captureSession.close();
384 | captureSession = null;
385 | }
386 | if (null != cameraDevice) {
387 | cameraDevice.close();
388 | cameraDevice = null;
389 | }
390 | if (null != previewReader) {
391 | previewReader.close();
392 | previewReader = null;
393 | }
394 | } catch (final InterruptedException e) {
395 | throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
396 | } finally {
397 | cameraOpenCloseLock.release();
398 | }
399 | }
400 |
401 | /** Starts a background thread and its {@link Handler}. */
402 | private void startBackgroundThread() {
403 | backgroundThread = new HandlerThread("ImageListener");
404 | backgroundThread.start();
405 | backgroundHandler = new Handler(backgroundThread.getLooper());
406 | }
407 |
408 | /** Stops the background thread and its {@link Handler}. */
409 | private void stopBackgroundThread() {
410 | backgroundThread.quitSafely();
411 | try {
412 | backgroundThread.join();
413 | backgroundThread = null;
414 | backgroundHandler = null;
415 | } catch (final InterruptedException e) {
416 | LOGGER.e(e, "Exception!");
417 | }
418 | }
419 |
420 | /** Creates a new {@link CameraCaptureSession} for camera preview. */
421 | private void createCameraPreviewSession() {
422 | try {
423 | final SurfaceTexture texture = textureView.getSurfaceTexture();
424 | assert texture != null;
425 |
426 | // We configure the size of default buffer to be the size of camera preview we want.
427 | texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
428 |
429 | // This is the output Surface we need to start preview.
430 | final Surface surface = new Surface(texture);
431 |
432 | // We set up a CaptureRequest.Builder with the output Surface.
433 | previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
434 | previewRequestBuilder.addTarget(surface);
435 |
436 | LOGGER.i("Opening camera preview: " + previewSize.getWidth() + "x" + previewSize.getHeight());
437 |
438 | // Create the reader for the preview frames.
439 | previewReader =
440 | ImageReader.newInstance(
441 | previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);
442 |
443 | previewReader.setOnImageAvailableListener(imageListener, backgroundHandler);
444 | previewRequestBuilder.addTarget(previewReader.getSurface());
445 |
446 | // Here, we create a CameraCaptureSession for camera preview.
447 | cameraDevice.createCaptureSession(
448 | Arrays.asList(surface, previewReader.getSurface()),
449 | new CameraCaptureSession.StateCallback() {
450 |
451 | @Override
452 | public void onConfigured(final CameraCaptureSession cameraCaptureSession) {
453 | // The camera is already closed
454 | if (null == cameraDevice) {
455 | return;
456 | }
457 |
458 | // When the session is ready, we start displaying the preview.
459 | captureSession = cameraCaptureSession;
460 | try {
461 | // Auto focus should be continuous for camera preview.
462 | previewRequestBuilder.set(
463 | CaptureRequest.CONTROL_AF_MODE,
464 | CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
465 | // Flash is automatically enabled when necessary.
466 | previewRequestBuilder.set(
467 | CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
468 |
469 | // Finally, we start displaying the camera preview.
470 | previewRequest = previewRequestBuilder.build();
471 | captureSession.setRepeatingRequest(
472 | previewRequest, captureCallback, backgroundHandler);
473 | } catch (final CameraAccessException e) {
474 | LOGGER.e(e, "Exception!");
475 | }
476 | }
477 |
478 | @Override
479 | public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) {
480 | showToast("Failed");
481 | }
482 | },
483 | null);
484 | } catch (final CameraAccessException e) {
485 | LOGGER.e(e, "Exception!");
486 | }
487 | }
488 |
489 | /**
490 | * Configures the necessary {@link Matrix} transformation to `mTextureView`. This method should be
491 | * called after the camera preview size is determined in setUpCameraOutputs and also the size of
492 | * `mTextureView` is fixed.
493 | *
494 | * @param viewWidth The width of `mTextureView`
495 | * @param viewHeight The height of `mTextureView`
496 | */
497 | private void configureTransform(final int viewWidth, final int viewHeight) {
498 | final Activity activity = getActivity();
499 | if (null == textureView || null == previewSize || null == activity) {
500 | return;
501 | }
502 | final int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
503 | final Matrix matrix = new Matrix();
504 | final RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
505 | final RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth());
506 | final float centerX = viewRect.centerX();
507 | final float centerY = viewRect.centerY();
508 | if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
509 | bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
510 | matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
511 | final float scale =
512 | Math.max(
513 | (float) viewHeight / previewSize.getHeight(),
514 | (float) viewWidth / previewSize.getWidth());
515 | matrix.postScale(scale, scale, centerX, centerY);
516 | matrix.postRotate(90 * (rotation - 2), centerX, centerY);
517 | } else if (Surface.ROTATION_180 == rotation) {
518 | matrix.postRotate(180, centerX, centerY);
519 | }
520 | textureView.setTransform(matrix);
521 | }
522 |
523 | /**
524 | * Callback for Activities to use to initialize their data once the selected preview size is
525 | * known.
526 | */
527 | public interface ConnectionCallback {
528 | void onPreviewSizeChosen(Size size, int cameraRotation);
529 | }
530 |
531 | /** Compares two {@code Size}s based on their areas. */
532 | static class CompareSizesByArea implements Comparator {
533 | @Override
534 | public int compare(final Size lhs, final Size rhs) {
535 | // We cast here to ensure the multiplications won't overflow
536 | return Long.signum(
537 | (long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
538 | }
539 | }
540 |
541 | /** Shows an error message dialog. */
542 | public static class ErrorDialog extends DialogFragment {
543 | private static final String ARG_MESSAGE = "message";
544 |
545 | public static ErrorDialog newInstance(final String message) {
546 | final ErrorDialog dialog = new ErrorDialog();
547 | final Bundle args = new Bundle();
548 | args.putString(ARG_MESSAGE, message);
549 | dialog.setArguments(args);
550 | return dialog;
551 | }
552 |
553 | @Override
554 | public Dialog onCreateDialog(final Bundle savedInstanceState) {
555 | final Activity activity = getActivity();
556 | return new AlertDialog.Builder(activity)
557 | .setMessage(getArguments().getString(ARG_MESSAGE))
558 | .setPositiveButton(
559 | android.R.string.ok,
560 | new DialogInterface.OnClickListener() {
561 | @Override
562 | public void onClick(final DialogInterface dialogInterface, final int i) {
563 | activity.finish();
564 | }
565 | })
566 | .create();
567 | }
568 | }
569 | }
570 |
--------------------------------------------------------------------------------