├── .idea
└── vcs.xml
├── AndroidManifest.xml
├── BUILD
├── README.md
├── build.gradle
├── download-models.gradle
├── gradle.properties
├── gradlew
├── gradlew.bat
├── libs
└── libandroid_tensorflow_inference_java.jar
├── local.properties
├── res
├── drawable-hdpi
│ ├── ic_action_info.png
│ ├── ic_launcher.png
│ └── tile.9.png
├── drawable-mdpi
│ ├── ic_action_info.png
│ └── ic_launcher.png
├── drawable-xhdpi
│ ├── ic_action_info.png
│ └── ic_launcher.png
├── drawable-xxhdpi
│ ├── ic_action_info.png
│ └── ic_launcher.png
├── layout
│ ├── activity_camera.xml
│ ├── activity_photo_stylize.xml
│ ├── camera_connection_fragment.xml
│ ├── camera_connection_fragment_stylize.xml
│ └── camera_connection_fragment_tracking.xml
├── values-sw600dp
│ ├── template-dimens.xml
│ └── template-styles.xml
├── values-v11
│ ├── styles.xml
│ └── template-styles.xml
├── values-v14
│ └── styles.xml
├── values-v21
│ ├── base-colors.xml
│ └── base-template-styles.xml
├── values-w820dp
│ └── dimens.xml
└── values
│ ├── attrs.xml
│ ├── base-strings.xml
│ ├── colors.xml
│ ├── dimens.xml
│ ├── strings.xml
│ ├── styles.xml
│ ├── template-dimens.xml
│ └── template-styles.xml
├── sample_images
├── classify1.jpg
├── detect1.jpg
├── photo1.png
└── stylize1.jpg
└── src
└── org
└── tensorflow
└── demo
├── AutoFitTextureView.java
├── CameraActivity.java
├── CameraConnectionFragment.java
├── Classifier.java
├── ClassifierActivity.java
├── DetectorActivity.java
├── OverlayView.java
├── PhotoStylizeActivity.java
├── RecognitionScoreView.java
├── ResultsView.java
├── StylizeActivity.java
├── TensorFlowImageClassifier.java
├── TensorFlowMultiBoxDetector.java
├── TensorFlowYoloDetector.java
├── env
├── BorderedText.java
├── ImageUtils.java
├── Logger.java
├── Size.java
└── SplitTimer.java
├── tracking
├── MultiBoxTracker.java
└── ObjectTracker.java
└── util
├── BitmapUtil.java
├── FileUtil.java
└── MediaScanner.java
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
16 |
18 |
19 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
39 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
--------------------------------------------------------------------------------
/BUILD:
--------------------------------------------------------------------------------
1 | # Description:
2 | # TensorFlow camera demo app for Android.
3 |
4 | package(default_visibility = ["//visibility:public"])
5 |
6 | licenses(["notice"]) # Apache 2.0
7 |
8 | load(
9 | "//tensorflow:tensorflow.bzl",
10 | "tf_copts",
11 | "tf_opts_nortti_if_android",
12 | )
13 |
14 | exports_files(["LICENSE"])
15 |
16 | LINKER_SCRIPT = "//tensorflow/contrib/android:jni/version_script.lds"
17 |
18 | # libtensorflow_demo.so contains the native code for image colorspace conversion
19 | # and object tracking used by the demo. It does not require TF as a dependency
20 | # to build if STANDALONE_DEMO_LIB is defined.
21 | # TF support for the demo is provided separately by libtensorflow_inference.so.
22 | cc_binary(
23 | name = "libtensorflow_demo.so",
24 | srcs = glob([
25 | "jni/**/*.cc",
26 | "jni/**/*.h",
27 | ]),
28 | copts = tf_copts(),
29 | defines = ["STANDALONE_DEMO_LIB"],
30 | linkopts = [
31 | "-landroid",
32 | "-ljnigraphics",
33 | "-llog",
34 | "-lm",
35 | "-z defs",
36 | "-s",
37 | "-Wl,--version-script", # This line must be directly followed by LINKER_SCRIPT.
38 | LINKER_SCRIPT,
39 | ],
40 | linkshared = 1,
41 | linkstatic = 1,
42 | tags = [
43 | "manual",
44 | "notap",
45 | ],
46 | deps = [
47 | LINKER_SCRIPT,
48 | ],
49 | )
50 |
51 | cc_library(
52 | name = "tensorflow_native_libs",
53 | srcs = [
54 | ":libtensorflow_demo.so",
55 | "//tensorflow/contrib/android:libtensorflow_inference.so",
56 | ],
57 | tags = [
58 | "manual",
59 | "notap",
60 | ],
61 | )
62 |
63 | android_binary(
64 | name = "tensorflow_demo",
65 | srcs = glob([
66 | "src/**/*.java",
67 | ]),
68 | # Package assets from assets dir as well as all model targets. Remove undesired models
69 | # (and corresponding Activities in source) to reduce APK size.
70 | assets = [
71 | "//tensorflow/examples/android/assets:asset_files",
72 | ":external_assets",
73 | ],
74 | assets_dir = "",
75 | custom_package = "org.tensorflow.demo",
76 | inline_constants = 1,
77 | manifest = "AndroidManifest.xml",
78 | manifest_merger = "legacy",
79 | resource_files = glob(["res/**"]),
80 | tags = [
81 | "manual",
82 | "notap",
83 | ],
84 | deps = [
85 | ":tensorflow_native_libs",
86 | "//tensorflow/contrib/android:android_tensorflow_inference_java",
87 | ],
88 | )
89 |
90 | # LINT.IfChange
91 | filegroup(
92 | name = "external_assets",
93 | srcs = [
94 | "@inception5h//:model_files",
95 | "@mobile_multibox//:model_files",
96 | "@stylize//:model_files",
97 | ],
98 | )
99 | # LINT.ThenChange(//tensorflow/examples/android/download-models.gradle)
100 |
101 | filegroup(
102 | name = "all_files",
103 | srcs = glob(
104 | ["**/*"],
105 | exclude = [
106 | "**/METADATA",
107 | "**/OWNERS",
108 | "bin/**",
109 | "gen/**",
110 | "gradleBuild/**",
111 | "libs/**",
112 | ],
113 | ),
114 | visibility = ["//tensorflow:__subpackages__"],
115 | )
116 |
117 | filegroup(
118 | name = "java_files",
119 | srcs = glob(["src/**/*.java"]),
120 | )
121 |
122 | filegroup(
123 | name = "jni_files",
124 | srcs = glob([
125 | "jni/**/*.cc",
126 | "jni/**/*.h",
127 | ]),
128 | )
129 |
130 | filegroup(
131 | name = "resource_files",
132 | srcs = glob(["res/**"]),
133 | )
134 |
135 | exports_files(["AndroidManifest.xml"])
136 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | TensorFlow官方Android端示例中,风格迁移TF Stylize是对摄像头获取的画面动态渲染,
3 | ###
4 | 本人渣机运行起来非常卡,基本看不出效果,因此改为对图片进行风格迁移。
5 |
6 | 截图
7 | -----------------------------------
8 | 
9 |
10 |
11 | > Written with [StackEdit](https://stackedit.io/).
12 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // This file provides basic support for building the TensorFlow demo
2 | // in Android Studio with Gradle.
3 | //
4 | // Note that Bazel is still used by default to compile the native libs,
5 | // and should be installed at the location noted below. This build file
6 | // automates the process of calling out to it and copying the compiled
7 | // libraries back into the appropriate directory.
8 | //
9 | // Alternatively, experimental support for Makefile builds is provided by
10 | // setting buildWithMake below to true. This will allow building the demo
11 | // on Windows machines, but note that full equivalence with the Bazel
12 | // build is not yet guaranteed. See comments below for caveats and tips
13 | // for speeding up the build, such as as enabling ccache.
14 |
15 | // Set to true to build with make.
16 | // NOTE: Running a make build will cause subsequent Bazel builds to *fail*
17 | // unless the contrib/makefile/downloads/ and gen/ dirs are deleted afterwards.
18 | def buildWithMake = false
19 |
20 | // Controls output directory in APK and CPU type for Bazel builds.
21 | // NOTE: Does not affect the Makefile build target API (yet), which currently
22 | // assumes armeabi-v7a. If building with make, changing this will require
23 | // editing the Makefile as well.
24 | def cpuType = 'armeabi-v7a'
25 |
26 | // Output directory in the local directory for packaging into the APK.
27 | def nativeOutDir = 'libs/' + cpuType
28 |
29 | // Default to building with Bazel and override with make if requested.
30 | def nativeBuildRule = 'buildNativeBazel'
31 | def demoLibPath = '../../../bazel-bin/tensorflow/examples/android/libtensorflow_demo.so'
32 | def inferenceLibPath = '../../../bazel-bin/tensorflow/contrib/android/libtensorflow_inference.so'
33 | if (buildWithMake) {
34 | nativeBuildRule = 'buildNativeMake'
35 | demoLibPath = '../../../tensorflow/contrib/makefile/gen/lib/libtensorflow_demo.so'
36 | inferenceLibPath = '../../../tensorflow/contrib/makefile/gen/lib/libtensorflow_inference.so'
37 | }
38 |
39 | // Defines the NDK location for Makefile builds. Does *not* affect Bazel builds.
40 | // Override with your absolute NDK location if this fails to get the location
41 | // automatically.
42 | def makeNdkRoot = System.getenv('NDK_ROOT')
43 |
44 | // If building with Bazel, this is the location of the bazel binary.
45 | // NOTE: Bazel does not yet support building for Android on Windows,
46 | // so in this case the Makefile build must be used as described above.
47 | def bazelLocation = '/usr/local/bin/bazel'
48 |
49 | project.buildDir = 'gradleBuild'
50 | getProject().setBuildDir('gradleBuild')
51 |
52 | // import DownloadModels task
53 | project.ext.ASSET_DIR = projectDir.toString() + '/assets'
54 | project.ext.TMP_DIR = project.buildDir.toString() + '/downloads'
55 |
56 | buildscript {
57 | repositories {
58 | jcenter()
59 | }
60 |
61 | dependencies {
62 | classpath 'com.android.tools.build:gradle:2.3.0'
63 | }
64 | }
65 |
66 | apply plugin: 'com.android.application'
67 |
68 | android {
69 | compileSdkVersion 23
70 | buildToolsVersion "25.0.1"
71 |
72 | lintOptions {
73 | abortOnError false
74 | }
75 |
76 | sourceSets {
77 | main {
78 | // TensorFlow Java API sources.
79 | java {
80 | srcDir '../../java/src/main/java'
81 | exclude '**/examples/**'
82 | }
83 |
84 | // Android TensorFlow wrappers, etc.
85 | java {
86 | srcDir '../../contrib/android/java'
87 | }
88 |
89 | // Android demo app sources.
90 | java {
91 | srcDir 'src'
92 | }
93 |
94 | manifest.srcFile 'AndroidManifest.xml'
95 | resources.srcDirs = ['src']
96 | aidl.srcDirs = ['src']
97 | renderscript.srcDirs = ['src']
98 | res.srcDirs = ['res']
99 | assets.srcDirs = [project.ext.ASSET_DIR]
100 | jniLibs.srcDirs = ['jniLibs']
101 | }
102 |
103 | debug.setRoot('build-types/debug')
104 | release.setRoot('build-types/release')
105 | }
106 | }
107 |
108 | //task buildNativeBazel(type: Exec) {
109 | // workingDir '../../..'
110 | // commandLine bazelLocation, 'build', '-c', 'opt', \
111 | // 'tensorflow/examples/android:tensorflow_native_libs', \
112 | // '--crosstool_top=//external:android/crosstool', \
113 | // '--cpu=' + cpuType, \
114 | // '--host_crosstool_top=@bazel_tools//tools/cpp:toolchain'
115 | //}
116 | //
117 | //task buildNativeMake(type: Exec) {
118 | // environment "NDK_ROOT", makeNdkRoot
119 | // // Tip: install ccache and uncomment the following to speed up
120 | // // builds significantly.
121 | // // environment "CC_PREFIX", 'ccache'
122 | // workingDir '../../..'
123 | // commandLine 'tensorflow/contrib/makefile/build_all_android.sh', \
124 | // '-s', \
125 | // 'tensorflow/contrib/makefile/sub_makefiles/android/Makefile.in', \
126 | // '-t', \
127 | // 'libtensorflow_inference.so libtensorflow_demo.so' \
128 | // //, '-T' // Uncomment to skip protobuf and speed up subsequent builds.
129 | //}
130 | //
131 | //
132 | //task copyNativeLibs(type: Copy) {
133 | // from demoLibPath
134 | // from inferenceLibPath
135 | // into nativeOutDir
136 | // duplicatesStrategy = 'include'
137 | // dependsOn nativeBuildRule
138 | // fileMode 0644
139 | //}
140 | //
141 | //assemble.dependsOn copyNativeLibs
142 | //afterEvaluate {
143 | // assembleDebug.dependsOn copyNativeLibs
144 | // assembleRelease.dependsOn copyNativeLibs
145 | //}
146 |
147 | // Download default models; if you wish to use your own models then
148 | // place them in the "assets" directory and comment out this line.
149 | //apply from: "download-models.gradle"
150 |
151 | dependencies {
152 | compile fileTree(include: ['*.jar'], dir: 'libs')
153 | compile 'com.android.support:appcompat-v7:23.4.0'
154 | }
155 |
--------------------------------------------------------------------------------
/download-models.gradle:
--------------------------------------------------------------------------------
1 | /*
2 | * download-models.gradle
3 | * Downloads model files from ${MODEL_URL} into application's asset folder
4 | * Input:
5 | * project.ext.TMP_DIR: absolute path to hold downloaded zip files
6 | * project.ext.ASSET_DIR: absolute path to save unzipped model files
7 | * Output:
8 | * 3 model files will be downloaded into given folder of ext.ASSET_DIR
9 | */
10 | // hard coded model files
11 | // LINT.IfChange
12 | def models = ['inception5h.zip',
13 | 'mobile_multibox_v1a.zip',
14 | 'stylize_v1.zip']
15 | // LINT.ThenChange(//tensorflow/examples/android/BUILD)
16 |
17 | // Root URL for model archives
18 | def MODEL_URL = 'https://storage.googleapis.com/download.tensorflow.org/models'
19 |
20 | buildscript {
21 | repositories {
22 | jcenter()
23 | }
24 | dependencies {
25 | classpath 'de.undercouch:gradle-download-task:3.2.0'
26 | }
27 | }
28 |
29 | import de.undercouch.gradle.tasks.download.Download
30 | task downloadFile(type: Download){
31 | for (f in models) {
32 | src "${MODEL_URL}/" + f
33 | }
34 | dest new File(project.ext.TMP_DIR)
35 | overwrite true
36 | }
37 |
38 | task extractModels(type: Copy) {
39 | for (f in models) {
40 | from zipTree(project.ext.TMP_DIR + '/' + f)
41 | }
42 |
43 | into file(project.ext.ASSET_DIR)
44 | fileMode 0644
45 | exclude '**/LICENSE'
46 |
47 | dependsOn downloadFile
48 | }
49 |
50 | afterEvaluate {
51 | // if models are not available, download & unzip them
52 | def needDownload = false
53 | for (f in models) {
54 | if (!(new File(project.ext.TMP_DIR + '/' + f)).exists()) {
55 | needDownload = true
56 | }
57 | }
58 |
59 | if (needDownload) {
60 | assembleDebug.dependsOn extractModels
61 | assembleRelease.dependsOn extractModels
62 | }
63 | }
64 |
65 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | ## Project-wide Gradle settings.
2 | #
3 | # For more details on how to configure your build environment visit
4 | # http://www.gradle.org/docs/current/userguide/build_environment.html
5 | #
6 | # Specifies the JVM arguments used for the daemon process.
7 | # The setting is particularly useful for tweaking memory settings.
8 | # Default value: -Xmx1024m -XX:MaxPermSize=256m
9 | # org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8
10 | #
11 | # When configured, Gradle will run in incubating parallel mode.
12 | # This option should only be used with decoupled projects. More details, visit
13 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
14 | # org.gradle.parallel=true
15 | #Sun Mar 26 13:48:36 GMT+08:00 2017
16 | systemProp.http.proxyHost=127.0.0.1
17 | systemProp.http.proxyPort=1080
18 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # Attempt to set APP_HOME
46 | # Resolve links: $0 may be a link
47 | PRG="$0"
48 | # Need this for relative symlinks.
49 | while [ -h "$PRG" ] ; do
50 | ls=`ls -ld "$PRG"`
51 | link=`expr "$ls" : '.*-> \(.*\)$'`
52 | if expr "$link" : '/.*' > /dev/null; then
53 | PRG="$link"
54 | else
55 | PRG=`dirname "$PRG"`"/$link"
56 | fi
57 | done
58 | SAVED="`pwd`"
59 | cd "`dirname \"$PRG\"`/" >/dev/null
60 | APP_HOME="`pwd -P`"
61 | cd "$SAVED" >/dev/null
62 |
63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
64 |
65 | # Determine the Java command to use to start the JVM.
66 | if [ -n "$JAVA_HOME" ] ; then
67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
68 | # IBM's JDK on AIX uses strange locations for the executables
69 | JAVACMD="$JAVA_HOME/jre/sh/java"
70 | else
71 | JAVACMD="$JAVA_HOME/bin/java"
72 | fi
73 | if [ ! -x "$JAVACMD" ] ; then
74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
75 |
76 | Please set the JAVA_HOME variable in your environment to match the
77 | location of your Java installation."
78 | fi
79 | else
80 | JAVACMD="java"
81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
82 |
83 | Please set the JAVA_HOME variable in your environment to match the
84 | location of your Java installation."
85 | fi
86 |
87 | # Increase the maximum file descriptors if we can.
88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
89 | MAX_FD_LIMIT=`ulimit -H -n`
90 | if [ $? -eq 0 ] ; then
91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
92 | MAX_FD="$MAX_FD_LIMIT"
93 | fi
94 | ulimit -n $MAX_FD
95 | if [ $? -ne 0 ] ; then
96 | warn "Could not set maximum file descriptor limit: $MAX_FD"
97 | fi
98 | else
99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
100 | fi
101 | fi
102 |
103 | # For Darwin, add options to specify how the application appears in the dock
104 | if $darwin; then
105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
106 | fi
107 |
108 | # For Cygwin, switch paths to Windows format before running java
109 | if $cygwin ; then
110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
112 | JAVACMD=`cygpath --unix "$JAVACMD"`
113 |
114 | # We build the pattern for arguments to be converted via cygpath
115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
116 | SEP=""
117 | for dir in $ROOTDIRSRAW ; do
118 | ROOTDIRS="$ROOTDIRS$SEP$dir"
119 | SEP="|"
120 | done
121 | OURCYGPATTERN="(^($ROOTDIRS))"
122 | # Add a user-defined pattern to the cygpath arguments
123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
125 | fi
126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
127 | i=0
128 | for arg in "$@" ; do
129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
131 |
132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
134 | else
135 | eval `echo args$i`="\"$arg\""
136 | fi
137 | i=$((i+1))
138 | done
139 | case $i in
140 | (0) set -- ;;
141 | (1) set -- "$args0" ;;
142 | (2) set -- "$args0" "$args1" ;;
143 | (3) set -- "$args0" "$args1" "$args2" ;;
144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
150 | esac
151 | fi
152 |
153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
154 | function splitJvmOpts() {
155 | JVM_OPTS=("$@")
156 | }
157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
159 |
160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
161 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/libs/libandroid_tensorflow_inference_java.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonCherryGZ/TensorFlow_Android/929cf902450ebbff3fc61bc161ceefcab8930342/libs/libandroid_tensorflow_inference_java.jar
--------------------------------------------------------------------------------
/local.properties:
--------------------------------------------------------------------------------
1 | ## This file is automatically generated by Android Studio.
2 | # Do not modify this file -- YOUR CHANGES WILL BE ERASED!
3 | #
4 | # This file must *NOT* be checked into Version Control Systems,
5 | # as it contains information specific to your local configuration.
6 | #
7 | # Location of the SDK. This is only used by Gradle.
8 | # For customization when using a Version Control System, please read the
9 | # header note.
10 | #Sun Mar 26 13:48:34 GMT+08:00 2017
11 | ndk.dir=D\:\\Simon\\Software\\adt-bundle-windows-x86_64-20140321\\sdk\\ndk-bundle
12 | sdk.dir=D\:\\Simon\\Software\\adt-bundle-windows-x86_64-20140321\\sdk
13 |
--------------------------------------------------------------------------------
/res/drawable-hdpi/ic_action_info.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonCherryGZ/TensorFlow_Android/929cf902450ebbff3fc61bc161ceefcab8930342/res/drawable-hdpi/ic_action_info.png
--------------------------------------------------------------------------------
/res/drawable-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonCherryGZ/TensorFlow_Android/929cf902450ebbff3fc61bc161ceefcab8930342/res/drawable-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/res/drawable-hdpi/tile.9.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonCherryGZ/TensorFlow_Android/929cf902450ebbff3fc61bc161ceefcab8930342/res/drawable-hdpi/tile.9.png
--------------------------------------------------------------------------------
/res/drawable-mdpi/ic_action_info.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonCherryGZ/TensorFlow_Android/929cf902450ebbff3fc61bc161ceefcab8930342/res/drawable-mdpi/ic_action_info.png
--------------------------------------------------------------------------------
/res/drawable-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonCherryGZ/TensorFlow_Android/929cf902450ebbff3fc61bc161ceefcab8930342/res/drawable-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/res/drawable-xhdpi/ic_action_info.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonCherryGZ/TensorFlow_Android/929cf902450ebbff3fc61bc161ceefcab8930342/res/drawable-xhdpi/ic_action_info.png
--------------------------------------------------------------------------------
/res/drawable-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonCherryGZ/TensorFlow_Android/929cf902450ebbff3fc61bc161ceefcab8930342/res/drawable-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/res/drawable-xxhdpi/ic_action_info.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonCherryGZ/TensorFlow_Android/929cf902450ebbff3fc61bc161ceefcab8930342/res/drawable-xxhdpi/ic_action_info.png
--------------------------------------------------------------------------------
/res/drawable-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonCherryGZ/TensorFlow_Android/929cf902450ebbff3fc61bc161ceefcab8930342/res/drawable-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/res/layout/activity_camera.xml:
--------------------------------------------------------------------------------
1 |
16 |
23 |
--------------------------------------------------------------------------------
/res/layout/activity_photo_stylize.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
18 |
19 |
27 |
28 |
33 |
34 |
40 |
41 |
47 |
48 |
49 |
56 |
57 |
64 |
65 |
--------------------------------------------------------------------------------
/res/layout/camera_connection_fragment.xml:
--------------------------------------------------------------------------------
1 |
16 |
19 |
20 |
25 |
26 |
31 |
32 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/res/layout/camera_connection_fragment_stylize.xml:
--------------------------------------------------------------------------------
1 |
16 |
20 |
25 |
26 |
31 |
32 |
39 |
40 |
45 |
46 |
51 |
52 |
--------------------------------------------------------------------------------
/res/layout/camera_connection_fragment_tracking.xml:
--------------------------------------------------------------------------------
1 |
16 |
19 |
20 |
24 |
25 |
29 |
30 |
34 |
35 |
--------------------------------------------------------------------------------
/res/values-sw600dp/template-dimens.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 |
18 |
19 |
20 |
21 | @dimen/margin_huge
22 | @dimen/margin_medium
23 |
24 |
25 |
--------------------------------------------------------------------------------
/res/values-sw600dp/template-styles.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 |
18 |
19 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/res/values-v11/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
8 |
11 |
12 |
19 |
20 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/res/values-v11/template-styles.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/res/values-v14/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
8 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/res/values-v21/base-colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/res/values-v21/base-template-styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 |
19 |
20 |
21 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/res/values-w820dp/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 64dp
6 |
7 |
--------------------------------------------------------------------------------
/res/values/attrs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/res/values/base-strings.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 |
19 | TensorFlow Demo
20 | TF Classify
21 | TF Detect
22 | TF Stylize
23 | Photo Stylize
24 |
25 |
--------------------------------------------------------------------------------
/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 | #cc4285f4
19 |
20 |
--------------------------------------------------------------------------------
/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 16dp
4 | 16dp
5 |
6 |
--------------------------------------------------------------------------------
/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 | Info
18 | This sample needs camera permission.
19 | This device doesn\'t support Camera2 API.
20 |
21 |
--------------------------------------------------------------------------------
/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/res/values/template-dimens.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 |
18 |
19 |
20 |
21 | 4dp
22 | 8dp
23 | 16dp
24 | 32dp
25 | 64dp
26 |
27 |
28 |
29 | @dimen/margin_medium
30 | @dimen/margin_medium
31 |
32 |
33 |
--------------------------------------------------------------------------------
/res/values/template-styles.xml:
--------------------------------------------------------------------------------
1 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
34 |
35 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/sample_images/classify1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonCherryGZ/TensorFlow_Android/929cf902450ebbff3fc61bc161ceefcab8930342/sample_images/classify1.jpg
--------------------------------------------------------------------------------
/sample_images/detect1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonCherryGZ/TensorFlow_Android/929cf902450ebbff3fc61bc161ceefcab8930342/sample_images/detect1.jpg
--------------------------------------------------------------------------------
/sample_images/photo1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonCherryGZ/TensorFlow_Android/929cf902450ebbff3fc61bc161ceefcab8930342/sample_images/photo1.png
--------------------------------------------------------------------------------
/sample_images/stylize1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SimonCherryGZ/TensorFlow_Android/929cf902450ebbff3fc61bc161ceefcab8930342/sample_images/stylize1.jpg
--------------------------------------------------------------------------------
/src/org/tensorflow/demo/AutoFitTextureView.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 The TensorFlow Authors. All Rights Reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package org.tensorflow.demo;
18 |
19 | import android.content.Context;
20 | import android.util.AttributeSet;
21 | import android.view.TextureView;
22 |
23 | /**
24 | * A {@link TextureView} that can be adjusted to a specified aspect ratio.
25 | */
26 | public class AutoFitTextureView extends TextureView {
27 | private int ratioWidth = 0;
28 | private int ratioHeight = 0;
29 |
30 | public AutoFitTextureView(final Context context) {
31 | this(context, null);
32 | }
33 |
34 | public AutoFitTextureView(final Context context, final AttributeSet attrs) {
35 | this(context, attrs, 0);
36 | }
37 |
38 | public AutoFitTextureView(final Context context, final AttributeSet attrs, final int defStyle) {
39 | super(context, attrs, defStyle);
40 | }
41 |
42 | /**
43 | * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
44 | * calculated from the parameters. Note that the actual sizes of parameters don't matter, that
45 | * is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
46 | *
47 | * @param width Relative horizontal size
48 | * @param height Relative vertical size
49 | */
50 | public void setAspectRatio(final int width, final int height) {
51 | if (width < 0 || height < 0) {
52 | throw new IllegalArgumentException("Size cannot be negative.");
53 | }
54 | ratioWidth = width;
55 | ratioHeight = height;
56 | requestLayout();
57 | }
58 |
59 | @Override
60 | protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
61 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
62 | final int width = MeasureSpec.getSize(widthMeasureSpec);
63 | final int height = MeasureSpec.getSize(heightMeasureSpec);
64 | if (0 == ratioWidth || 0 == ratioHeight) {
65 | setMeasuredDimension(width, height);
66 | } else {
67 | if (width < height * ratioWidth / ratioHeight) {
68 | setMeasuredDimension(width, width * ratioHeight / ratioWidth);
69 | } else {
70 | setMeasuredDimension(height * ratioWidth / ratioHeight, height);
71 | }
72 | }
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/src/org/tensorflow/demo/CameraActivity.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 The TensorFlow Authors. All Rights Reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package org.tensorflow.demo;
18 |
19 | import android.Manifest;
20 | import android.app.Activity;
21 | import android.app.Fragment;
22 | import android.content.pm.PackageManager;
23 | import android.media.Image.Plane;
24 | import android.media.ImageReader.OnImageAvailableListener;
25 | import android.os.Build;
26 | import android.os.Bundle;
27 | import android.os.Handler;
28 | import android.os.HandlerThread;
29 | import android.util.Size;
30 | import android.view.KeyEvent;
31 | import android.view.WindowManager;
32 | import android.widget.Toast;
33 | import java.nio.ByteBuffer;
34 | import org.tensorflow.demo.env.Logger;
35 | import org.tensorflow.demo.R;
36 |
37 | public abstract class CameraActivity extends Activity implements OnImageAvailableListener {
38 | private static final Logger LOGGER = new Logger();
39 |
40 | private static final int PERMISSIONS_REQUEST = 1;
41 |
42 | private static final String PERMISSION_CAMERA = Manifest.permission.CAMERA;
43 | private static final String PERMISSION_STORAGE = Manifest.permission.WRITE_EXTERNAL_STORAGE;
44 |
45 | private boolean debug = false;
46 |
47 | private Handler handler;
48 | private HandlerThread handlerThread;
49 |
50 | @Override
51 | protected void onCreate(final Bundle savedInstanceState) {
52 | LOGGER.d("onCreate " + this);
53 | super.onCreate(null);
54 | getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
55 |
56 | setContentView(R.layout.activity_camera);
57 |
58 | if (hasPermission()) {
59 | setFragment();
60 | } else {
61 | requestPermission();
62 | }
63 | }
64 |
65 | @Override
66 | public synchronized void onStart() {
67 | LOGGER.d("onStart " + this);
68 | super.onStart();
69 | }
70 |
71 | @Override
72 | public synchronized void onResume() {
73 | LOGGER.d("onResume " + this);
74 | super.onResume();
75 |
76 | handlerThread = new HandlerThread("inference");
77 | handlerThread.start();
78 | handler = new Handler(handlerThread.getLooper());
79 | }
80 |
81 | @Override
82 | public synchronized void onPause() {
83 | LOGGER.d("onPause " + this);
84 |
85 | if (!isFinishing()) {
86 | LOGGER.d("Requesting finish");
87 | finish();
88 | }
89 |
90 | handlerThread.quitSafely();
91 | try {
92 | handlerThread.join();
93 | handlerThread = null;
94 | handler = null;
95 | } catch (final InterruptedException e) {
96 | LOGGER.e(e, "Exception!");
97 | }
98 |
99 | super.onPause();
100 | }
101 |
102 | @Override
103 | public synchronized void onStop() {
104 | LOGGER.d("onStop " + this);
105 | super.onStop();
106 | }
107 |
108 | @Override
109 | public synchronized void onDestroy() {
110 | LOGGER.d("onDestroy " + this);
111 | super.onDestroy();
112 | }
113 |
114 | protected synchronized void runInBackground(final Runnable r) {
115 | if (handler != null) {
116 | handler.post(r);
117 | }
118 | }
119 |
120 | @Override
121 | public void onRequestPermissionsResult(
122 | final int requestCode, final String[] permissions, final int[] grantResults) {
123 | switch (requestCode) {
124 | case PERMISSIONS_REQUEST: {
125 | if (grantResults.length > 0
126 | && grantResults[0] == PackageManager.PERMISSION_GRANTED
127 | && grantResults[1] == PackageManager.PERMISSION_GRANTED) {
128 | setFragment();
129 | } else {
130 | requestPermission();
131 | }
132 | }
133 | }
134 | }
135 |
136 | private boolean hasPermission() {
137 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
138 | return checkSelfPermission(PERMISSION_CAMERA) == PackageManager.PERMISSION_GRANTED && checkSelfPermission(PERMISSION_STORAGE) == PackageManager.PERMISSION_GRANTED;
139 | } else {
140 | return true;
141 | }
142 | }
143 |
144 | private void requestPermission() {
145 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
146 | if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA) || shouldShowRequestPermissionRationale(PERMISSION_STORAGE)) {
147 | Toast.makeText(CameraActivity.this, "Camera AND storage permission are required for this demo", Toast.LENGTH_LONG).show();
148 | }
149 | requestPermissions(new String[] {PERMISSION_CAMERA, PERMISSION_STORAGE}, PERMISSIONS_REQUEST);
150 | }
151 | }
152 |
153 | protected void setFragment() {
154 | final Fragment fragment = CameraConnectionFragment.newInstance(
155 | new CameraConnectionFragment.ConnectionCallback(){
156 | @Override
157 | public void onPreviewSizeChosen(final Size size, final int rotation) {
158 | CameraActivity.this.onPreviewSizeChosen(size, rotation);
159 | }
160 | },
161 | this, getLayoutId(), getDesiredPreviewFrameSize());
162 |
163 | getFragmentManager()
164 | .beginTransaction()
165 | .replace(R.id.container, fragment)
166 | .commit();
167 | }
168 |
169 | protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
170 | // Because of the variable row stride it's not possible to know in
171 | // advance the actual necessary dimensions of the yuv planes.
172 | for (int i = 0; i < planes.length; ++i) {
173 | final ByteBuffer buffer = planes[i].getBuffer();
174 | if (yuvBytes[i] == null) {
175 | LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
176 | yuvBytes[i] = new byte[buffer.capacity()];
177 | }
178 | buffer.get(yuvBytes[i]);
179 | }
180 | }
181 |
182 | public boolean isDebug() {
183 | return debug;
184 | }
185 |
186 | public void requestRender() {
187 | final OverlayView overlay = (OverlayView) findViewById(R.id.debug_overlay);
188 | if (overlay != null) {
189 | overlay.postInvalidate();
190 | }
191 | }
192 |
193 | public void addCallback(final OverlayView.DrawCallback callback) {
194 | final OverlayView overlay = (OverlayView) findViewById(R.id.debug_overlay);
195 | if (overlay != null) {
196 | overlay.addCallback(callback);
197 | }
198 | }
199 |
200 | public void onSetDebug(final boolean debug) {}
201 |
202 | @Override
203 | public boolean onKeyDown(final int keyCode, final KeyEvent event) {
204 | if (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN || keyCode == KeyEvent.KEYCODE_VOLUME_UP) {
205 | debug = !debug;
206 | requestRender();
207 | onSetDebug(debug);
208 | return true;
209 | }
210 | return super.onKeyDown(keyCode, event);
211 | }
212 |
213 | protected abstract void onPreviewSizeChosen(final Size size, final int rotation);
214 | protected abstract int getLayoutId();
215 | protected abstract int getDesiredPreviewFrameSize();
216 | }
217 |
--------------------------------------------------------------------------------
/src/org/tensorflow/demo/Classifier.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.demo;
17 |
18 | import android.graphics.Bitmap;
19 | import android.graphics.RectF;
20 | import java.util.List;
21 |
22 | /**
23 | * Generic interface for interacting with different recognition engines.
24 | */
25 | public interface Classifier {
26 | /**
27 | * An immutable result returned by a Classifier describing what was recognized.
28 | */
29 | public class Recognition {
30 | /**
31 | * A unique identifier for what has been recognized. Specific to the class, not the instance of
32 | * the object.
33 | */
34 | private final String id;
35 |
36 | /**
37 | * Display name for the recognition.
38 | */
39 | private final String title;
40 |
41 | /**
42 | * A sortable score for how good the recognition is relative to others. Higher should be better.
43 | */
44 | private final Float confidence;
45 |
46 | /** Optional location within the source image for the location of the recognized object. */
47 | private RectF location;
48 |
49 | public Recognition(
50 | final String id, final String title, final Float confidence, final RectF location) {
51 | this.id = id;
52 | this.title = title;
53 | this.confidence = confidence;
54 | this.location = location;
55 | }
56 |
57 | public String getId() {
58 | return id;
59 | }
60 |
61 | public String getTitle() {
62 | return title;
63 | }
64 |
65 | public Float getConfidence() {
66 | return confidence;
67 | }
68 |
69 | public RectF getLocation() {
70 | return new RectF(location);
71 | }
72 |
73 | public void setLocation(RectF location) {
74 | this.location = location;
75 | }
76 |
77 | @Override
78 | public String toString() {
79 | String resultString = "";
80 | if (id != null) {
81 | resultString += "[" + id + "] ";
82 | }
83 |
84 | if (title != null) {
85 | resultString += title + " ";
86 | }
87 |
88 | if (confidence != null) {
89 | resultString += String.format("(%.1f%%) ", confidence * 100.0f);
90 | }
91 |
92 | if (location != null) {
93 | resultString += location + " ";
94 | }
95 |
96 | return resultString.trim();
97 | }
98 | }
99 |
100 | List recognizeImage(Bitmap bitmap);
101 |
102 | void enableStatLogging(final boolean debug);
103 |
104 | String getStatString();
105 |
106 | void close();
107 | }
108 |
--------------------------------------------------------------------------------
/src/org/tensorflow/demo/ClassifierActivity.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 The TensorFlow Authors. All Rights Reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package org.tensorflow.demo;
18 |
19 | import android.graphics.Bitmap;
20 | import android.graphics.Bitmap.Config;
21 | import android.graphics.Canvas;
22 | import android.graphics.Matrix;
23 | import android.graphics.Paint;
24 | import android.graphics.Typeface;
25 | import android.media.Image;
26 | import android.media.Image.Plane;
27 | import android.media.ImageReader;
28 | import android.media.ImageReader.OnImageAvailableListener;
29 | import android.os.SystemClock;
30 | import android.os.Trace;
31 | import android.util.Size;
32 | import android.util.TypedValue;
33 | import android.view.Display;
34 | import java.util.List;
35 | import java.util.Vector;
36 | import org.tensorflow.demo.OverlayView.DrawCallback;
37 | import org.tensorflow.demo.env.BorderedText;
38 | import org.tensorflow.demo.env.ImageUtils;
39 | import org.tensorflow.demo.env.Logger;
40 | import org.tensorflow.demo.R;
41 |
42 | public class ClassifierActivity extends CameraActivity implements OnImageAvailableListener {
43 | private static final Logger LOGGER = new Logger();
44 |
45 | // These are the settings for the original v1 Inception model. If you want to
46 | // use a model that's been produced from the TensorFlow for Poets codelab,
47 | // you'll need to set IMAGE_SIZE = 299, IMAGE_MEAN = 128, IMAGE_STD = 128,
48 | // INPUT_NAME = "Mul", and OUTPUT_NAME = "final_result".
49 | // You'll also need to update the MODEL_FILE and LABEL_FILE paths to point to
50 | // the ones you produced.
51 | //
52 | // To use v3 Inception model, strip the DecodeJpeg Op from your retrained
53 | // model first:
54 | //
55 | // python strip_unused.py \
56 | // --input_graph= \
57 | // --output_graph= \
58 | // --input_node_names="Mul" \
59 | // --output_node_names="final_result" \
60 | // --input_binary=true
61 | private static final int INPUT_SIZE = 224;
62 | private static final int IMAGE_MEAN = 117;
63 | private static final float IMAGE_STD = 1;
64 | private static final String INPUT_NAME = "input";
65 | private static final String OUTPUT_NAME = "output";
66 |
67 | private static final String MODEL_FILE = "file:///android_asset/tensorflow_inception_graph.pb";
68 | private static final String LABEL_FILE =
69 | "file:///android_asset/imagenet_comp_graph_label_strings.txt";
70 |
71 | private static final boolean SAVE_PREVIEW_BITMAP = false;
72 |
73 | private static final boolean MAINTAIN_ASPECT = true;
74 |
75 | private Classifier classifier;
76 |
77 | private Integer sensorOrientation;
78 |
79 | private int previewWidth = 0;
80 | private int previewHeight = 0;
81 | private byte[][] yuvBytes;
82 | private int[] rgbBytes = null;
83 | private Bitmap rgbFrameBitmap = null;
84 | private Bitmap croppedBitmap = null;
85 |
86 | private Bitmap cropCopyBitmap;
87 |
88 | private boolean computing = false;
89 |
90 | private Matrix frameToCropTransform;
91 | private Matrix cropToFrameTransform;
92 |
93 | private ResultsView resultsView;
94 |
95 | private BorderedText borderedText;
96 |
97 | private long lastProcessingTimeMs;
98 |
99 | @Override
100 | protected int getLayoutId() {
101 | return R.layout.camera_connection_fragment;
102 | }
103 |
104 | @Override
105 | protected int getDesiredPreviewFrameSize() {
106 | return INPUT_SIZE;
107 | }
108 |
109 | private static final float TEXT_SIZE_DIP = 10;
110 |
111 | @Override
112 | public void onPreviewSizeChosen(final Size size, final int rotation) {
113 | final float textSizePx =
114 | TypedValue.applyDimension(
115 | TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
116 | borderedText = new BorderedText(textSizePx);
117 | borderedText.setTypeface(Typeface.MONOSPACE);
118 |
119 | classifier =
120 | TensorFlowImageClassifier.create(
121 | getAssets(),
122 | MODEL_FILE,
123 | LABEL_FILE,
124 | INPUT_SIZE,
125 | IMAGE_MEAN,
126 | IMAGE_STD,
127 | INPUT_NAME,
128 | OUTPUT_NAME);
129 |
130 | resultsView = (ResultsView) findViewById(R.id.results);
131 | previewWidth = size.getWidth();
132 | previewHeight = size.getHeight();
133 |
134 | final Display display = getWindowManager().getDefaultDisplay();
135 | final int screenOrientation = display.getRotation();
136 |
137 | LOGGER.i("Sensor orientation: %d, Screen orientation: %d", rotation, screenOrientation);
138 |
139 | sensorOrientation = rotation + screenOrientation;
140 |
141 | LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
142 | rgbBytes = new int[previewWidth * previewHeight];
143 | rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
144 | croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Config.ARGB_8888);
145 |
146 | frameToCropTransform =
147 | ImageUtils.getTransformationMatrix(
148 | previewWidth, previewHeight,
149 | INPUT_SIZE, INPUT_SIZE,
150 | sensorOrientation, MAINTAIN_ASPECT);
151 |
152 | cropToFrameTransform = new Matrix();
153 | frameToCropTransform.invert(cropToFrameTransform);
154 |
155 | yuvBytes = new byte[3][];
156 |
157 | addCallback(
158 | new DrawCallback() {
159 | @Override
160 | public void drawCallback(final Canvas canvas) {
161 | renderDebug(canvas);
162 | }
163 | });
164 | }
165 |
166 | @Override
167 | public void onImageAvailable(final ImageReader reader) {
168 | Image image = null;
169 |
170 | try {
171 | image = reader.acquireLatestImage();
172 |
173 | if (image == null) {
174 | return;
175 | }
176 |
177 | if (computing) {
178 | image.close();
179 | return;
180 | }
181 | computing = true;
182 |
183 | Trace.beginSection("imageAvailable");
184 |
185 | final Plane[] planes = image.getPlanes();
186 | fillBytes(planes, yuvBytes);
187 |
188 | final int yRowStride = planes[0].getRowStride();
189 | final int uvRowStride = planes[1].getRowStride();
190 | final int uvPixelStride = planes[1].getPixelStride();
191 | ImageUtils.convertYUV420ToARGB8888(
192 | yuvBytes[0],
193 | yuvBytes[1],
194 | yuvBytes[2],
195 | rgbBytes,
196 | previewWidth,
197 | previewHeight,
198 | yRowStride,
199 | uvRowStride,
200 | uvPixelStride,
201 | false);
202 |
203 | image.close();
204 | } catch (final Exception e) {
205 | if (image != null) {
206 | image.close();
207 | }
208 | LOGGER.e(e, "Exception!");
209 | Trace.endSection();
210 | return;
211 | }
212 |
213 | rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
214 | final Canvas canvas = new Canvas(croppedBitmap);
215 | canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
216 |
217 | // For examining the actual TF input.
218 | if (SAVE_PREVIEW_BITMAP) {
219 | ImageUtils.saveBitmap(croppedBitmap);
220 | }
221 |
222 | runInBackground(
223 | new Runnable() {
224 | @Override
225 | public void run() {
226 | final long startTime = SystemClock.uptimeMillis();
227 | final List results = classifier.recognizeImage(croppedBitmap);
228 | lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
229 |
230 | cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
231 | resultsView.setResults(results);
232 | requestRender();
233 | computing = false;
234 | }
235 | });
236 |
237 | Trace.endSection();
238 | }
239 |
240 | @Override
241 | public void onSetDebug(boolean debug) {
242 | classifier.enableStatLogging(debug);
243 | }
244 |
245 | private void renderDebug(final Canvas canvas) {
246 | if (!isDebug()) {
247 | return;
248 | }
249 | final Bitmap copy = cropCopyBitmap;
250 | if (copy != null) {
251 | final Matrix matrix = new Matrix();
252 | final float scaleFactor = 2;
253 | matrix.postScale(scaleFactor, scaleFactor);
254 | matrix.postTranslate(
255 | canvas.getWidth() - copy.getWidth() * scaleFactor,
256 | canvas.getHeight() - copy.getHeight() * scaleFactor);
257 | canvas.drawBitmap(copy, matrix, new Paint());
258 |
259 | final Vector lines = new Vector();
260 | if (classifier != null) {
261 | String statString = classifier.getStatString();
262 | String[] statLines = statString.split("\n");
263 | for (String line : statLines) {
264 | lines.add(line);
265 | }
266 | }
267 |
268 | lines.add("Frame: " + previewWidth + "x" + previewHeight);
269 | lines.add("Crop: " + copy.getWidth() + "x" + copy.getHeight());
270 | lines.add("View: " + canvas.getWidth() + "x" + canvas.getHeight());
271 | lines.add("Rotation: " + sensorOrientation);
272 | lines.add("Inference time: " + lastProcessingTimeMs + "ms");
273 |
274 | borderedText.drawLines(canvas, 10, canvas.getHeight() - 10, lines);
275 | }
276 | }
277 | }
278 |
--------------------------------------------------------------------------------
/src/org/tensorflow/demo/DetectorActivity.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2016 The TensorFlow Authors. All Rights Reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package org.tensorflow.demo;
18 |
19 | import android.graphics.Bitmap;
20 | import android.graphics.Bitmap.Config;
21 | import android.graphics.Canvas;
22 | import android.graphics.Color;
23 | import android.graphics.Matrix;
24 | import android.graphics.Paint;
25 | import android.graphics.Paint.Style;
26 | import android.graphics.RectF;
27 | import android.graphics.Typeface;
28 | import android.media.Image;
29 | import android.media.Image.Plane;
30 | import android.media.ImageReader;
31 | import android.media.ImageReader.OnImageAvailableListener;
32 | import android.os.SystemClock;
33 | import android.os.Trace;
34 | import android.util.Size;
35 | import android.util.TypedValue;
36 | import android.view.Display;
37 | import java.util.LinkedList;
38 | import java.util.List;
39 | import java.util.Vector;
40 | import org.tensorflow.demo.OverlayView.DrawCallback;
41 | import org.tensorflow.demo.env.BorderedText;
42 | import org.tensorflow.demo.env.ImageUtils;
43 | import org.tensorflow.demo.env.Logger;
44 | import org.tensorflow.demo.tracking.MultiBoxTracker;
45 | import org.tensorflow.demo.R;
46 |
47 | /**
48 | * An activity that uses a TensorFlowMultiBoxDetector and ObjectTracker to detect and then track
49 | * objects.
50 | */
51 | public class DetectorActivity extends CameraActivity implements OnImageAvailableListener {
52 | private static final Logger LOGGER = new Logger();
53 |
54 | // Configuration values for the prepackaged multibox model.
55 | private static final int MB_INPUT_SIZE = 224;
56 | private static final int MB_IMAGE_MEAN = 128;
57 | private static final float MB_IMAGE_STD = 128;
58 | private static final String MB_INPUT_NAME = "ResizeBilinear";
59 | private static final String MB_OUTPUT_LOCATIONS_NAME = "output_locations/Reshape";
60 | private static final String MB_OUTPUT_SCORES_NAME = "output_scores/Reshape";
61 | private static final String MB_MODEL_FILE = "file:///android_asset/multibox_model.pb";
62 | private static final String MB_LOCATION_FILE =
63 | "file:///android_asset/multibox_location_priors.txt";
64 |
65 | // Configuration values for tiny-yolo-voc. Note that the graph is not included with TensorFlow and
66 | // must be manually placed in the assets/ directory by the user.
67 | // Graphs and models downloaded from http://pjreddie.com/darknet/yolo/ may be converted e.g. via
68 | // DarkFlow (https://github.com/thtrieu/darkflow). Sample command:
69 | // ./flow --model cfg/tiny-yolo-voc.cfg --load bin/tiny-yolo-voc.weights --savepb --verbalise=True
70 | private static final String YOLO_MODEL_FILE = "file:///android_asset/graph-tiny-yolo-voc.pb";
71 | private static final int YOLO_INPUT_SIZE = 416;
72 | private static final String YOLO_INPUT_NAME = "input";
73 | private static final String YOLO_OUTPUT_NAMES = "output";
74 | private static final int YOLO_BLOCK_SIZE = 32;
75 |
76 | // Default to the included multibox model.
77 | private static final boolean USE_YOLO = false;
78 |
79 | private static final int CROP_SIZE = USE_YOLO ? YOLO_INPUT_SIZE : MB_INPUT_SIZE;
80 |
81 | // Minimum detection confidence to track a detection.
82 | private static final float MINIMUM_CONFIDENCE = USE_YOLO ? 0.25f : 0.1f;
83 |
84 | private static final boolean MAINTAIN_ASPECT = USE_YOLO;
85 |
86 | private static final boolean SAVE_PREVIEW_BITMAP = false;
87 | private static final float TEXT_SIZE_DIP = 10;
88 |
89 | private Integer sensorOrientation;
90 |
91 | private Classifier detector;
92 |
93 | private int previewWidth = 0;
94 | private int previewHeight = 0;
95 | private byte[][] yuvBytes;
96 | private int[] rgbBytes = null;
97 | private Bitmap rgbFrameBitmap = null;
98 | private Bitmap croppedBitmap = null;
99 |
100 | private boolean computing = false;
101 |
102 | private long timestamp = 0;
103 |
104 | private Matrix frameToCropTransform;
105 | private Matrix cropToFrameTransform;
106 |
107 | private Bitmap cropCopyBitmap;
108 |
109 | private MultiBoxTracker tracker;
110 |
111 | private byte[] luminance;
112 |
113 | private BorderedText borderedText;
114 |
115 | private long lastProcessingTimeMs;
116 |
117 | @Override
118 | public void onPreviewSizeChosen(final Size size, final int rotation) {
119 | final float textSizePx =
120 | TypedValue.applyDimension(
121 | TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
122 | borderedText = new BorderedText(textSizePx);
123 | borderedText.setTypeface(Typeface.MONOSPACE);
124 |
125 | tracker = new MultiBoxTracker(getResources().getDisplayMetrics());
126 |
127 | if (USE_YOLO) {
128 | detector =
129 | TensorFlowYoloDetector.create(
130 | getAssets(),
131 | YOLO_MODEL_FILE,
132 | YOLO_INPUT_SIZE,
133 | YOLO_INPUT_NAME,
134 | YOLO_OUTPUT_NAMES,
135 | YOLO_BLOCK_SIZE);
136 | } else {
137 | detector =
138 | TensorFlowMultiBoxDetector.create(
139 | getAssets(),
140 | MB_MODEL_FILE,
141 | MB_LOCATION_FILE,
142 | MB_IMAGE_MEAN,
143 | MB_IMAGE_STD,
144 | MB_INPUT_NAME,
145 | MB_OUTPUT_LOCATIONS_NAME,
146 | MB_OUTPUT_SCORES_NAME);
147 | }
148 |
149 | previewWidth = size.getWidth();
150 | previewHeight = size.getHeight();
151 |
152 | final Display display = getWindowManager().getDefaultDisplay();
153 | final int screenOrientation = display.getRotation();
154 |
155 | LOGGER.i("Sensor orientation: %d, Screen orientation: %d", rotation, screenOrientation);
156 |
157 | sensorOrientation = rotation + screenOrientation;
158 |
159 | LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
160 | rgbBytes = new int[previewWidth * previewHeight];
161 | rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
162 | croppedBitmap = Bitmap.createBitmap(CROP_SIZE, CROP_SIZE, Config.ARGB_8888);
163 |
164 | frameToCropTransform =
165 | ImageUtils.getTransformationMatrix(
166 | previewWidth, previewHeight,
167 | CROP_SIZE, CROP_SIZE,
168 | sensorOrientation, MAINTAIN_ASPECT);
169 |
170 | cropToFrameTransform = new Matrix();
171 | frameToCropTransform.invert(cropToFrameTransform);
172 | yuvBytes = new byte[3][];
173 |
174 | trackingOverlay = (OverlayView) findViewById(R.id.tracking_overlay);
175 | trackingOverlay.addCallback(
176 | new DrawCallback() {
177 | @Override
178 | public void drawCallback(final Canvas canvas) {
179 | tracker.draw(canvas);
180 | if (isDebug()) {
181 | tracker.drawDebug(canvas);
182 | }
183 | }
184 | });
185 |
186 | addCallback(
187 | new DrawCallback() {
188 | @Override
189 | public void drawCallback(final Canvas canvas) {
190 | if (!isDebug()) {
191 | return;
192 | }
193 | final Bitmap copy = cropCopyBitmap;
194 | if (copy == null) {
195 | return;
196 | }
197 |
198 | final int backgroundColor = Color.argb(100, 0, 0, 0);
199 | canvas.drawColor(backgroundColor);
200 |
201 | final Matrix matrix = new Matrix();
202 | final float scaleFactor = 2;
203 | matrix.postScale(scaleFactor, scaleFactor);
204 | matrix.postTranslate(
205 | canvas.getWidth() - copy.getWidth() * scaleFactor,
206 | canvas.getHeight() - copy.getHeight() * scaleFactor);
207 | canvas.drawBitmap(copy, matrix, new Paint());
208 |
209 | final Vector lines = new Vector();
210 | if (detector != null) {
211 | final String statString = detector.getStatString();
212 | final String[] statLines = statString.split("\n");
213 | for (final String line : statLines) {
214 | lines.add(line);
215 | }
216 | }
217 | lines.add("");
218 |
219 | lines.add("Frame: " + previewWidth + "x" + previewHeight);
220 | lines.add("Crop: " + copy.getWidth() + "x" + copy.getHeight());
221 | lines.add("View: " + canvas.getWidth() + "x" + canvas.getHeight());
222 | lines.add("Rotation: " + sensorOrientation);
223 | lines.add("Inference time: " + lastProcessingTimeMs + "ms");
224 |
225 | borderedText.drawLines(canvas, 10, canvas.getHeight() - 10, lines);
226 | }
227 | });
228 | }
229 |
230 | OverlayView trackingOverlay;
231 |
232 | @Override
233 | public void onImageAvailable(final ImageReader reader) {
234 | Image image = null;
235 |
236 | ++timestamp;
237 | final long currTimestamp = timestamp;
238 |
239 | try {
240 | image = reader.acquireLatestImage();
241 |
242 | if (image == null) {
243 | return;
244 | }
245 |
246 | Trace.beginSection("imageAvailable");
247 |
248 | final Plane[] planes = image.getPlanes();
249 | fillBytes(planes, yuvBytes);
250 |
251 | tracker.onFrame(
252 | previewWidth,
253 | previewHeight,
254 | planes[0].getRowStride(),
255 | sensorOrientation,
256 | yuvBytes[0],
257 | timestamp);
258 | trackingOverlay.postInvalidate();
259 |
260 | // No mutex needed as this method is not reentrant.
261 | if (computing) {
262 | image.close();
263 | return;
264 | }
265 | computing = true;
266 |
267 | final int yRowStride = planes[0].getRowStride();
268 | final int uvRowStride = planes[1].getRowStride();
269 | final int uvPixelStride = planes[1].getPixelStride();
270 | ImageUtils.convertYUV420ToARGB8888(
271 | yuvBytes[0],
272 | yuvBytes[1],
273 | yuvBytes[2],
274 | rgbBytes,
275 | previewWidth,
276 | previewHeight,
277 | yRowStride,
278 | uvRowStride,
279 | uvPixelStride,
280 | false);
281 |
282 | image.close();
283 | } catch (final Exception e) {
284 | if (image != null) {
285 | image.close();
286 | }
287 | LOGGER.e(e, "Exception!");
288 | Trace.endSection();
289 | return;
290 | }
291 |
292 | rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);
293 | final Canvas canvas = new Canvas(croppedBitmap);
294 | canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
295 |
296 | // For examining the actual TF input.
297 | if (SAVE_PREVIEW_BITMAP) {
298 | ImageUtils.saveBitmap(croppedBitmap);
299 | }
300 |
301 | if (luminance == null) {
302 | luminance = new byte[yuvBytes[0].length];
303 | }
304 | System.arraycopy(yuvBytes[0], 0, luminance, 0, luminance.length);
305 |
306 | runInBackground(
307 | new Runnable() {
308 | @Override
309 | public void run() {
310 | final long startTime = SystemClock.uptimeMillis();
311 | final List results = detector.recognizeImage(croppedBitmap);
312 | lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
313 |
314 | cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
315 | final Canvas canvas = new Canvas(cropCopyBitmap);
316 | final Paint paint = new Paint();
317 | paint.setColor(Color.RED);
318 | paint.setStyle(Style.STROKE);
319 | paint.setStrokeWidth(2.0f);
320 |
321 | final List mappedRecognitions =
322 | new LinkedList();
323 |
324 | for (final Classifier.Recognition result : results) {
325 | final RectF location = result.getLocation();
326 | if (location != null && result.getConfidence() >= MINIMUM_CONFIDENCE) {
327 | canvas.drawRect(location, paint);
328 |
329 | cropToFrameTransform.mapRect(location);
330 | result.setLocation(location);
331 | mappedRecognitions.add(result);
332 | }
333 | }
334 |
335 | tracker.trackResults(mappedRecognitions, luminance, currTimestamp);
336 | trackingOverlay.postInvalidate();
337 |
338 | requestRender();
339 | computing = false;
340 | }
341 | });
342 |
343 | Trace.endSection();
344 | }
345 |
346 | @Override
347 | protected int getLayoutId() {
348 | return R.layout.camera_connection_fragment_tracking;
349 | }
350 |
351 | @Override
352 | protected int getDesiredPreviewFrameSize() {
353 | return CROP_SIZE;
354 | }
355 |
356 | @Override
357 | public void onSetDebug(final boolean debug) {
358 | detector.enableStatLogging(debug);
359 | }
360 | }
361 |
--------------------------------------------------------------------------------
/src/org/tensorflow/demo/OverlayView.java:
--------------------------------------------------------------------------------
1 | /* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 | ==============================================================================*/
15 |
16 | package org.tensorflow.demo;
17 |
18 | import android.content.Context;
19 | import android.graphics.Canvas;
20 | import android.util.AttributeSet;
21 | import android.view.View;
22 | import java.util.LinkedList;
23 | import java.util.List;
24 |
25 | /**
26 | * A simple View providing a render callback to other classes.
27 | */
28 | public class OverlayView extends View {
29 | private final List callbacks = new LinkedList();
30 |
31 | public OverlayView(final Context context, final AttributeSet attrs) {
32 | super(context, attrs);
33 | }
34 |
35 | /**
36 | * Interface defining the callback for client classes.
37 | */
38 | public interface DrawCallback {
39 | public void drawCallback(final Canvas canvas);
40 | }
41 |
42 | public void addCallback(final DrawCallback callback) {
43 | callbacks.add(callback);
44 | }
45 |
46 | @Override
47 | public synchronized void draw(final Canvas canvas) {
48 | for (final DrawCallback callback : callbacks) {
49 | callback.drawCallback(canvas);
50 | }
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/src/org/tensorflow/demo/PhotoStylizeActivity.java:
--------------------------------------------------------------------------------
1 | package org.tensorflow.demo;
2 |
3 | import android.app.Activity;
4 | import android.content.Context;
5 | import android.content.Intent;
6 | import android.graphics.Bitmap;
7 | import android.graphics.Canvas;
8 | import android.graphics.Color;
9 | import android.graphics.Paint;
10 | import android.graphics.Rect;
11 | import android.net.Uri;
12 | import android.os.AsyncTask;
13 | import android.os.Bundle;
14 | import android.os.Environment;
15 | import android.view.MotionEvent;
16 | import android.view.View;
17 | import android.view.ViewGroup;
18 | import android.webkit.MimeTypeMap;
19 | import android.widget.BaseAdapter;
20 | import android.widget.Button;
21 | import android.widget.GridView;
22 | import android.widget.ImageView;
23 | import android.widget.ProgressBar;
24 | import android.widget.Toast;
25 |
26 | import org.tensorflow.contrib.android.TensorFlowInferenceInterface;
27 | import org.tensorflow.demo.env.ImageUtils;
28 | import org.tensorflow.demo.env.Logger;
29 | import org.tensorflow.demo.util.BitmapUtil;
30 | import org.tensorflow.demo.util.FileUtil;
31 | import org.tensorflow.demo.util.MediaScanner;
32 |
33 | import java.io.File;
34 | import java.util.ArrayList;
35 |
36 | import static org.tensorflow.demo.StylizeActivity.getBitmapFromAsset;
37 |
38 | public class PhotoStylizeActivity extends Activity {
39 |
40 | static {
41 | System.loadLibrary("tensorflow_demo");
42 | }
43 |
44 | private static final Logger LOGGER = new Logger();
45 |
46 | private static final String MODEL_FILE = "file:///android_asset/stylize_quantized.pb";
47 | private static final String INPUT_NODE = "input";
48 | private static final String STYLE_NODE = "style_num";
49 | private static final String OUTPUT_NODE = "transformer/expand/conv3/conv/Sigmoid";
50 | private static final int NUM_STYLES = 26;
51 |
52 | // Whether to actively manipulate non-selected sliders so that sum of activations always appears
53 | // to be 1.0. The actual style input tensor will be normalized to sum to 1.0 regardless.
54 | private static final boolean NORMALIZE_SLIDERS = true;
55 |
56 | private static final boolean DEBUG_MODEL = false;
57 |
58 | private static final int[] SIZES = {32, 48, 64, 96, 128, 192, 256, 384, 512, 768, 1024};
59 |
60 | // Start at a medium size, but let the user step up through smaller sizes so they don't get
61 | // immediately stuck processing a large image.
62 | private int desiredSizeIndex = -1;
63 | private int desiredSize = 256;
64 |
65 | private final float[] styleVals = new float[NUM_STYLES];
66 |
67 | private int frameNum = 0;
68 |
69 | private Bitmap srcBitmap;
70 | private Bitmap dstBitmap;
71 |
72 | private TensorFlowInferenceInterface inferenceInterface;
73 |
74 | private int lastOtherStyle = 1;
75 |
76 | private boolean allZero = false;
77 |
78 | private ImageGridAdapter adapter;
79 | private GridView grid;
80 | private ImageView ivPhoto;
81 | private ProgressBar progressBar;
82 | private View viewMask;
83 |
84 | private StylizeTask stylizeTask = new StylizeTask();
85 |
86 |
87 | @Override
88 | protected void onCreate(Bundle savedInstanceState) {
89 | super.onCreate(savedInstanceState);
90 | setContentView(R.layout.activity_photo_stylize);
91 |
92 | grid = (GridView) findViewById(R.id.grid_layout);
93 | ivPhoto = (ImageView) findViewById(R.id.iv_photo);
94 | progressBar = (ProgressBar) findViewById(R.id.progress_bar);
95 | viewMask = findViewById(R.id.view_mask);
96 |
97 | init();
98 | }
99 |
100 | @Override
101 | protected void onDestroy() {
102 | super.onDestroy();
103 | if (stylizeTask.getStatus() != AsyncTask.Status.FINISHED) {
104 | stylizeTask.cancel(true);
105 | stylizeTask = null;
106 | }
107 | }
108 |
109 | @Override
110 | public void onWindowFocusChanged(boolean hasFocus) {
111 | super.onWindowFocusChanged(hasFocus);
112 | if (hasFocus) {
113 | srcBitmap = getBitmapFromImageView(ivPhoto);
114 | dstBitmap = Bitmap.createBitmap(srcBitmap);
115 | }
116 | }
117 |
118 | private void init() {
119 | inferenceInterface = new TensorFlowInferenceInterface(getAssets(), MODEL_FILE);
120 | initListener();
121 | initStyleGrid();
122 | }
123 |
124 | private void initListener() {
125 | findViewById(R.id.btn_reset).setOnClickListener(new View.OnClickListener() {
126 | @Override
127 | public void onClick(View v) {
128 | if (stylizeTask.getStatus() != AsyncTask.Status.FINISHED) {
129 | stylizeTask.cancel(true);
130 | }
131 | ivPhoto.setImageBitmap(srcBitmap);
132 | }
133 | });
134 |
135 | findViewById(R.id.btn_stylize).setOnClickListener(new View.OnClickListener() {
136 | @Override
137 | public void onClick(View v) {
138 | if (stylizeTask.getStatus() == AsyncTask.Status.FINISHED) {
139 | stylizeTask = new StylizeTask();
140 | }
141 | if (stylizeTask.getStatus() != AsyncTask.Status.RUNNING) {
142 | progressBar.setVisibility(View.VISIBLE);
143 | viewMask.setVisibility(View.VISIBLE);
144 | stylizeTask.execute();
145 | }
146 | }
147 | });
148 |
149 | ivPhoto.setOnClickListener(new View.OnClickListener() {
150 | @Override
151 | public void onClick(View v) {
152 | startFileManager();
153 | }
154 | });
155 | }
156 |
157 | /**
158 | * 初始化Style表格
159 | */
160 | private void initStyleGrid() {
161 | adapter = new ImageGridAdapter();
162 | grid.setAdapter(adapter);
163 | grid.setOnTouchListener(gridTouchAdapter);
164 |
165 | setStyle(adapter.items[0], 1.0f);
166 | }
167 |
168 | /**
169 | * 获取ImageView中的Bitmap
170 | * @param imageView
171 | * @return
172 | */
173 | private Bitmap getBitmapFromImageView(ImageView imageView) {
174 | imageView.setDrawingCacheEnabled(true);
175 | Bitmap bitmap = Bitmap.createBitmap(imageView.getDrawingCache());
176 | imageView.setDrawingCacheEnabled(false);
177 | return bitmap;
178 | }
179 |
180 | /**
181 | * 打开文件管理器
182 | */
183 | private void startFileManager() {
184 | Intent intent = new Intent(Intent.ACTION_GET_CONTENT);
185 | intent.setType("image/*");
186 | intent.addCategory(Intent.CATEGORY_OPENABLE);
187 | startActivityForResult(intent, 1024);
188 | }
189 |
190 | @Override
191 | protected void onActivityResult(int requestCode, int resultCode, Intent data) {
192 | super.onActivityResult(requestCode, resultCode, data);
193 | if (resultCode == RESULT_OK) {
194 | if (requestCode == 1024) {
195 | Uri uri = data.getData();
196 | String path = FileUtil.getFileAbsolutePath(PhotoStylizeActivity.this, uri);
197 | if (path != null) {
198 | srcBitmap = BitmapUtil.decodeSampledBitmapFromFilePath(path, ivPhoto.getWidth(), ivPhoto.getHeight());
199 | ivPhoto.setImageBitmap(srcBitmap);
200 | } else {
201 | Toast.makeText(this, "cannot get path", Toast.LENGTH_SHORT).show();
202 | }
203 | }
204 | }
205 | }
206 |
207 | /**
208 | * 保存图片后,调用MediaScanner通知系统图库刷新
209 | * @param fileName
210 | */
211 | private void callMediaScanner(String fileName) {
212 | String root = Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow";
213 | String filePath = root + "/" + fileName;
214 |
215 | MediaScanner mediaScanner = new MediaScanner(getApplicationContext());
216 | String[] filePaths = new String[]{filePath};
217 | String[] mimeTypes = new String[]{MimeTypeMap.getSingleton().getMimeTypeFromExtension("png")};
218 | mediaScanner.scanFiles(filePaths, mimeTypes);
219 | }
220 |
221 | /**
222 | * 设置Style
223 | * @param slider
224 | * @param value
225 | */
226 | private void setStyle(final ImageSlider slider, final float value) {
227 | slider.setValue(value);
228 |
229 | if (NORMALIZE_SLIDERS) {
230 | // Slider vals correspond directly to the input tensor vals, and normalization is visually
231 | // maintained by remanipulating non-selected sliders.
232 | float otherSum = 0.0f;
233 |
234 | for (int i = 0; i < NUM_STYLES; ++i) {
235 | if (adapter.items[i] != slider) {
236 | otherSum += adapter.items[i].value;
237 | }
238 | }
239 |
240 | if (otherSum > 0.0) {
241 | float highestOtherVal = 0;
242 | final float factor = otherSum > 0.0f ? (1.0f - value) / otherSum : 0.0f;
243 | for (int i = 0; i < NUM_STYLES; ++i) {
244 | final ImageSlider child = adapter.items[i];
245 | if (child == slider) {
246 | continue;
247 | }
248 | final float newVal = child.value * factor;
249 | child.setValue(newVal > 0.01f ? newVal : 0.0f);
250 |
251 | if (child.value > highestOtherVal) {
252 | lastOtherStyle = i;
253 | highestOtherVal = child.value;
254 | }
255 | }
256 | } else {
257 | // Everything else is 0, so just pick a suitable slider to push up when the
258 | // selected one goes down.
259 | if (adapter.items[lastOtherStyle] == slider) {
260 | lastOtherStyle = (lastOtherStyle + 1) % NUM_STYLES;
261 | }
262 | adapter.items[lastOtherStyle].setValue(1.0f - value);
263 | }
264 | }
265 |
266 | final boolean lastAllZero = allZero;
267 | float sum = 0.0f;
268 | for (int i = 0; i < NUM_STYLES; ++i) {
269 | sum += adapter.items[i].value;
270 | }
271 | allZero = sum == 0.0f;
272 |
273 | // Now update the values used for the input tensor. If nothing is set, mix in everything
274 | // equally. Otherwise everything is normalized to sum to 1.0.
275 | for (int i = 0; i < NUM_STYLES; ++i) {
276 | styleVals[i] = allZero ? 1.0f / NUM_STYLES : adapter.items[i].value / sum;
277 |
278 | if (lastAllZero != allZero) {
279 | adapter.items[i].postInvalidate();
280 | }
281 | }
282 | }
283 |
284 | /**
285 | * 对指定Bitmap进行Stylize
286 | * @param bitmap
287 | * @return
288 | */
289 | private Bitmap stylizeImage(final Bitmap bitmap) {
290 | desiredSize = bitmap.getWidth();
291 | int[] intValues = new int[desiredSize * desiredSize];
292 | float[] floatValues = new float[desiredSize * desiredSize * 3];
293 | ++frameNum;
294 | bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
295 |
296 | if (DEBUG_MODEL) {
297 | // Create a white square that steps through a black background 1 pixel per frame.
298 | final int centerX = (frameNum + bitmap.getWidth() / 2) % bitmap.getWidth();
299 | final int centerY = bitmap.getHeight() / 2;
300 | final int squareSize = 10;
301 | for (int i = 0; i < intValues.length; ++i) {
302 | final int x = i % bitmap.getWidth();
303 | final int y = i / bitmap.getHeight();
304 | final float val =
305 | Math.abs(x - centerX) < squareSize && Math.abs(y - centerY) < squareSize ? 1.0f : 0.0f;
306 | floatValues[i * 3] = val;
307 | floatValues[i * 3 + 1] = val;
308 | floatValues[i * 3 + 2] = val;
309 | }
310 | } else {
311 | for (int i = 0; i < intValues.length; ++i) {
312 | final int val = intValues[i];
313 | floatValues[i * 3] = ((val >> 16) & 0xFF) / 255.0f;
314 | floatValues[i * 3 + 1] = ((val >> 8) & 0xFF) / 255.0f;
315 | floatValues[i * 3 + 2] = (val & 0xFF) / 255.0f;
316 | }
317 | }
318 |
319 | // Copy the input data into TensorFlow.
320 | inferenceInterface.feed(
321 | INPUT_NODE, floatValues, 1, bitmap.getWidth(), bitmap.getHeight(), 3);
322 | inferenceInterface.feed(STYLE_NODE, styleVals, NUM_STYLES);
323 |
324 | inferenceInterface.run(new String[] {OUTPUT_NODE}, false);
325 | inferenceInterface.fetch(OUTPUT_NODE, floatValues);
326 |
327 | for (int i = 0; i < intValues.length; ++i) {
328 | intValues[i] =
329 | 0xFF000000
330 | | (((int) (floatValues[i * 3] * 255)) << 16)
331 | | (((int) (floatValues[i * 3 + 1] * 255)) << 8)
332 | | ((int) (floatValues[i * 3 + 2] * 255));
333 | }
334 |
335 | //bitmap.setPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
336 | Bitmap newBitmap = Bitmap.createBitmap(bitmap);
337 | newBitmap.setPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
338 | return newBitmap;
339 | }
340 |
341 | /**
342 | * 监听Style表格中的ImageSlider的滑动。根据滑动的百分比切换Style。可以多种Style混合。
343 | */
344 | private final View.OnTouchListener gridTouchAdapter =
345 | new View.OnTouchListener() {
346 | ImageSlider slider = null;
347 |
348 | @Override
349 | public boolean onTouch(final View v, final MotionEvent event) {
350 | switch (event.getActionMasked()) {
351 | case MotionEvent.ACTION_DOWN:
352 | for (int i = 0; i < NUM_STYLES; ++i) {
353 | final ImageSlider child = adapter.items[i];
354 | final Rect rect = new Rect();
355 | child.getHitRect(rect);
356 | if (rect.contains((int) event.getX(), (int) event.getY())) {
357 | slider = child;
358 | slider.setHilighted(true);
359 | }
360 | }
361 | break;
362 |
363 | case MotionEvent.ACTION_MOVE:
364 | if (slider != null) {
365 | final Rect rect = new Rect();
366 | slider.getHitRect(rect);
367 |
368 | final float newSliderVal =
369 | (float)
370 | Math.min(
371 | 1.0,
372 | Math.max(
373 | 0.0, 1.0 - (event.getY() - slider.getTop()) / slider.getHeight()));
374 |
375 | setStyle(slider, newSliderVal);
376 | }
377 | break;
378 |
379 | case MotionEvent.ACTION_UP:
380 | if (slider != null) {
381 | slider.setHilighted(false);
382 | slider = null;
383 | }
384 | break;
385 |
386 | default: // fall out
387 |
388 | }
389 | return true;
390 | }
391 | };
392 |
393 | /**
394 | * Style表格的Item控件。
395 | */
396 | private class ImageSlider extends ImageView {
397 | private float value = 0.0f;
398 | private boolean hilighted = false;
399 |
400 | private final Paint boxPaint;
401 | private final Paint linePaint;
402 |
403 | public ImageSlider(final Context context) {
404 | super(context);
405 | value = 0.0f;
406 |
407 | boxPaint = new Paint();
408 | boxPaint.setColor(Color.BLACK);
409 | boxPaint.setAlpha(128);
410 |
411 | linePaint = new Paint();
412 | linePaint.setColor(Color.WHITE);
413 | linePaint.setStrokeWidth(10.0f);
414 | linePaint.setStyle(Paint.Style.STROKE);
415 | }
416 |
417 | @Override
418 | public void onDraw(final Canvas canvas) {
419 | super.onDraw(canvas);
420 | final float y = (1.0f - value) * canvas.getHeight();
421 |
422 | // If all sliders are zero, don't bother shading anything.
423 | if (!allZero) {
424 | canvas.drawRect(0, 0, canvas.getWidth(), y, boxPaint);
425 | }
426 |
427 | if (value > 0.0f) {
428 | canvas.drawLine(0, y, canvas.getWidth(), y, linePaint);
429 | }
430 |
431 | if (hilighted) {
432 | canvas.drawRect(0, 0, getWidth(), getHeight(), linePaint);
433 | }
434 | }
435 |
436 | @Override
437 | protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
438 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
439 | setMeasuredDimension(getMeasuredWidth(), getMeasuredWidth());
440 | }
441 |
442 | public void setValue(final float value) {
443 | this.value = value;
444 | postInvalidate();
445 | }
446 |
447 | public void setHilighted(final boolean highlighted) {
448 | this.hilighted = highlighted;
449 | this.postInvalidate();
450 | }
451 | }
452 |
453 | /**
454 | * Style表格适配器
455 | */
456 | private class ImageGridAdapter extends BaseAdapter {
457 | final ImageSlider[] items = new ImageSlider[NUM_STYLES];
458 | final ArrayList