├── .gitignore
├── README.md
├── app
├── .gitignore
├── apk
│ └── app-debug.apk
├── build.gradle
├── download-models.gradle
├── proguard-rules.pro
└── src
│ ├── androidTest
│ └── java
│ │ └── com
│ │ └── busradeniz
│ │ └── detection
│ │ └── ExampleInstrumentedTest.java
│ ├── main
│ ├── AndroidManifest.xml
│ ├── java
│ │ └── com
│ │ │ └── busradeniz
│ │ │ └── detection
│ │ │ ├── AutoFitTextureView.java
│ │ │ ├── CameraActivity.java
│ │ │ ├── CameraConnectionFragment.java
│ │ │ ├── Classifier.java
│ │ │ ├── DetectorActivity.java
│ │ │ ├── OverlayView.java
│ │ │ ├── TensorFlowObjectDetectionAPIModel.java
│ │ │ ├── env
│ │ │ ├── BorderedText.java
│ │ │ ├── ImageUtils.java
│ │ │ └── Logger.java
│ │ │ └── tracking
│ │ │ └── MultiBoxTracker.java
│ └── res
│ │ ├── drawable
│ │ └── ic_launcher_background.xml
│ │ ├── layout
│ │ ├── activity_camera.xml
│ │ └── camera_connection_fragment_tracking.xml
│ │ ├── mipmap-anydpi-v26
│ │ ├── ic_launcher.xml
│ │ └── ic_launcher_round.xml
│ │ ├── mipmap-hdpi
│ │ ├── ic_launcher.png
│ │ ├── ic_launcher_foreground.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-mdpi
│ │ ├── ic_launcher.png
│ │ ├── ic_launcher_foreground.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xhdpi
│ │ ├── ic_launcher.png
│ │ ├── ic_launcher_foreground.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xxhdpi
│ │ ├── ic_launcher.png
│ │ ├── ic_launcher_foreground.png
│ │ └── ic_launcher_round.png
│ │ ├── mipmap-xxxhdpi
│ │ ├── ic_launcher.png
│ │ ├── ic_launcher_foreground.png
│ │ └── ic_launcher_round.png
│ │ └── values
│ │ ├── colors.xml
│ │ ├── strings.xml
│ │ └── styles.xml
│ └── test
│ └── java
│ └── com
│ └── busradeniz
│ └── detection
│ └── ExampleUnitTest.java
├── build.gradle
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | /local.properties
2 | /.idea
3 | /build
4 | /captures
5 | /app/src/main/assets
6 | *.iml
7 | .gradle
8 | .DS_Store
9 | .externalNativeBuild
10 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # android-tensorflow-detection
2 |
3 | This application makes real time object detection via TensorFlow for Mobile and warns the user verbally about them and their locations.
4 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 | /main/assets
--------------------------------------------------------------------------------
/app/apk/app-debug.apk:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/busradeniz/android-tensorflow-detection/36565c3e1e35b334f30fa26e46fee0ed7d51fd77/app/apk/app-debug.apk
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | project.ext.ASSET_DIR = projectDir.toString() + '/src/main/assets'
4 | project.ext.TMP_DIR = project.buildDir.toString() + '/downloads'
5 | apply from: "download-models.gradle"
6 |
7 | android {
8 | compileSdkVersion 26
9 | buildToolsVersion '27.0.1'
10 | defaultConfig {
11 | applicationId "com.busradeniz.detection"
12 | minSdkVersion 25
13 | targetSdkVersion 26
14 | versionCode 1
15 | versionName "1.0"
16 | testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
17 | }
18 | buildTypes {
19 | release {
20 | minifyEnabled false
21 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
22 | }
23 | }
24 |
25 | sourceSets {
26 | main {
27 | assets.srcDirs = [project.ext.ASSET_DIR]
28 | }
29 | }
30 |
31 | lintOptions {
32 | abortOnError false
33 | }
34 | }
35 |
36 | dependencies {
37 | implementation fileTree(dir: 'libs', include: ['*.jar'])
38 |
39 | // Support libraries
40 | implementation 'com.android.support:appcompat-v7:26.1.0'
41 | implementation 'com.android.support.constraint:constraint-layout:1.0.2'
42 |
43 | // Tensorflow
44 | compile 'org.tensorflow:tensorflow-android:+'
45 |
46 | // Test Dependencies
47 | testImplementation 'junit:junit:4.12'
48 | androidTestImplementation('com.android.support.test.espresso:espresso-core:3.0.1', {
49 | exclude group: 'com.android.support', module: 'support-annotations'
50 | })
51 | }
52 |
--------------------------------------------------------------------------------
/app/download-models.gradle:
--------------------------------------------------------------------------------
1 |
2 | // Root URL for model archives
3 | def models = ['object_detection/ssd_mobilenet_v1_android_export.zip']
4 | // hard coded model files
5 | def MODEL_URL = 'https://storage.googleapis.com/download.tensorflow.org/models'
6 |
7 | buildscript {
8 | repositories {
9 | jcenter()
10 | }
11 | dependencies {
12 | classpath 'de.undercouch:gradle-download-task:3.2.0'
13 | }
14 | }
15 |
16 | import de.undercouch.gradle.tasks.download.Download
17 |
18 | task downloadFile(type: Download){
19 | for (f in models) {
20 | src "${MODEL_URL}/" + f
21 | }
22 | dest new File(project.ext.TMP_DIR)
23 | overwrite true
24 | }
25 |
26 | task extractModels(type: Copy) {
27 | if (!new File(project.ext.TMP_DIR).exists()) {
28 | new File(project.ext.TMP_DIR).mkdirs()
29 | }
30 |
31 | if (!new File(project.ext.ASSET_DIR).exists()) {
32 | new File(project.ext.ASSET_DIR).mkdirs()
33 | }
34 |
35 | for (f in models) {
36 | def localFile = f.split("/")[-1]
37 | from zipTree(project.ext.TMP_DIR + '/' + localFile)
38 | }
39 |
40 | into file(project.ext.ASSET_DIR)
41 | fileMode 0644
42 | exclude '**/LICENSE'
43 |
44 | def needDownload = false
45 | for (f in models) {
46 | def localFile = f.split("/")[-1]
47 | if (!(new File(project.ext.TMP_DIR + '/' + localFile)).exists()) {
48 | needDownload = true
49 | }
50 | }
51 |
52 | if (needDownload) {
53 | dependsOn downloadFile
54 | }
55 | }
56 |
57 | tasks.whenTaskAdded { task ->
58 | if (task.name == 'assembleDebug') {
59 | task.dependsOn 'extractModels'
60 | }
61 | if (task.name == 'assembleRelease') {
62 | task.dependsOn 'extractModels'
63 | }
64 | }
65 |
66 |
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/app/src/androidTest/java/com/busradeniz/detection/ExampleInstrumentedTest.java:
--------------------------------------------------------------------------------
1 | package com.busradeniz.detection;
2 |
3 | import android.content.Context;
4 | import android.support.test.InstrumentationRegistry;
5 | import android.support.test.runner.AndroidJUnit4;
6 |
7 | import org.junit.Test;
8 | import org.junit.runner.RunWith;
9 |
10 | import static org.junit.Assert.*;
11 |
12 | /**
13 | * Instrumented test, which will execute on an Android device.
14 | *
15 | * @see Testing documentation
16 | */
17 | @RunWith(AndroidJUnit4.class)
18 | public class ExampleInstrumentedTest {
19 | @Test
20 | public void useAppContext() throws Exception {
21 | // Context of the app under test.
22 | Context appContext = InstrumentationRegistry.getTargetContext();
23 |
24 | assertEquals("com.busradeniz.detection", appContext.getPackageName());
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/app/src/main/java/com/busradeniz/detection/AutoFitTextureView.java:
--------------------------------------------------------------------------------
1 | package com.busradeniz.detection;
2 |
3 | import android.content.Context;
4 | import android.util.AttributeSet;
5 | import android.view.TextureView;
6 |
7 | /**
8 | * A {@link TextureView} that can be adjusted to a specified aspect ratio.
9 | */
10 | public class AutoFitTextureView extends TextureView {
11 | private int ratioWidth = 0;
12 | private int ratioHeight = 0;
13 |
14 | public AutoFitTextureView(final Context context) {
15 | this(context, null);
16 | }
17 |
18 | public AutoFitTextureView(final Context context, final AttributeSet attrs) {
19 | this(context, attrs, 0);
20 | }
21 |
22 | public AutoFitTextureView(final Context context, final AttributeSet attrs, final int defStyle) {
23 | super(context, attrs, defStyle);
24 | }
25 |
26 | /**
27 | * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
28 | * calculated from the parameters. Note that the actual sizes of parameters don't matter, that
29 | * is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
30 | *
31 | * @param width Relative horizontal size
32 | * @param height Relative vertical size
33 | */
34 | public void setAspectRatio(final int width, final int height) {
35 | if (width < 0 || height < 0) {
36 | throw new IllegalArgumentException("Size cannot be negative.");
37 | }
38 | ratioWidth = width;
39 | ratioHeight = height;
40 | requestLayout();
41 | }
42 |
43 | @Override
44 | protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
45 | super.onMeasure(widthMeasureSpec, heightMeasureSpec);
46 | final int width = MeasureSpec.getSize(widthMeasureSpec);
47 | final int height = MeasureSpec.getSize(heightMeasureSpec);
48 | if (0 == ratioWidth || 0 == ratioHeight) {
49 | setMeasuredDimension(width, height);
50 | } else {
51 | if (width < height * ratioWidth / ratioHeight) {
52 | setMeasuredDimension(width, width * ratioHeight / ratioWidth);
53 | } else {
54 | setMeasuredDimension(height * ratioWidth / ratioHeight, height);
55 | }
56 | }
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/app/src/main/java/com/busradeniz/detection/CameraActivity.java:
--------------------------------------------------------------------------------
1 | package com.busradeniz.detection;
2 |
3 | import android.Manifest;
4 | import android.app.Activity;
5 | import android.content.Context;
6 | import android.content.pm.PackageManager;
7 | import android.hardware.camera2.CameraAccessException;
8 | import android.hardware.camera2.CameraCharacteristics;
9 | import android.hardware.camera2.CameraManager;
10 | import android.hardware.camera2.params.StreamConfigurationMap;
11 | import android.media.Image;
12 | import android.media.Image.Plane;
13 | import android.media.ImageReader;
14 | import android.media.ImageReader.OnImageAvailableListener;
15 | import android.os.Build;
16 | import android.os.Bundle;
17 | import android.os.Handler;
18 | import android.os.HandlerThread;
19 | import android.os.Trace;
20 | import android.speech.tts.TextToSpeech;
21 | import android.util.Size;
22 | import android.view.Surface;
23 | import android.view.WindowManager;
24 | import android.widget.Toast;
25 |
26 | import com.busradeniz.detection.env.ImageUtils;
27 | import com.busradeniz.detection.env.Logger;
28 |
29 | import java.nio.ByteBuffer;
30 | import java.util.Collections;
31 | import java.util.HashSet;
32 | import java.util.List;
33 | import java.util.Set;
34 | import java.util.concurrent.atomic.AtomicBoolean;
35 |
36 | public abstract class CameraActivity extends Activity
37 | implements OnImageAvailableListener {
38 | private static final Logger LOGGER = new Logger();
39 |
40 | private static final int PERMISSIONS_REQUEST = 1;
41 |
42 | private static final String PERMISSION_CAMERA = Manifest.permission.CAMERA;
43 | private static final String PERMISSION_STORAGE = Manifest.permission.WRITE_EXTERNAL_STORAGE;
44 |
45 | private Handler handler;
46 | private HandlerThread handlerThread;
47 | private boolean isProcessingFrame = false;
48 | private byte[][] yuvBytes = new byte[3][];
49 | private int[] rgbBytes = null;
50 | private int yRowStride;
51 |
52 | protected int previewWidth = 0;
53 | protected int previewHeight = 0;
54 |
55 | private Runnable postInferenceCallback;
56 | private Runnable imageConverter;
57 |
58 | private TextToSpeech textToSpeech;
59 |
60 | @Override
61 | protected void onCreate(final Bundle savedInstanceState) {
62 | LOGGER.d("onCreate " + this);
63 | super.onCreate(null);
64 | getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
65 |
66 | setContentView(R.layout.activity_camera);
67 |
68 | if (hasPermission()) {
69 | setFragment();
70 | } else {
71 | requestPermission();
72 | }
73 |
74 | this.textToSpeech = new TextToSpeech(this, new TextToSpeech.OnInitListener() {
75 | @Override
76 | public void onInit(int status) {
77 | if (status == TextToSpeech.SUCCESS) {
78 | LOGGER.i("onCreate", "TextToSpeech is initialised");
79 | } else {
80 | LOGGER.e("onCreate", "Cannot initialise text to speech!");
81 | }
82 | }
83 | });
84 |
85 | }
86 |
87 | protected int[] getRgbBytes() {
88 | imageConverter.run();
89 | return rgbBytes;
90 | }
91 |
92 | protected byte[] getLuminance() {
93 | return yuvBytes[0];
94 | }
95 |
96 | /**
97 | * Callback for Camera2 API
98 | */
99 | @Override
100 | public void onImageAvailable(final ImageReader reader) {
101 | //We need wait until we have some size from onPreviewSizeChosen
102 | if (previewWidth == 0 || previewHeight == 0) {
103 | return;
104 | }
105 | if (rgbBytes == null) {
106 | rgbBytes = new int[previewWidth * previewHeight];
107 | }
108 | try {
109 | final Image image = reader.acquireLatestImage();
110 |
111 | if (image == null) {
112 | return;
113 | }
114 |
115 | if (isProcessingFrame) {
116 | image.close();
117 | return;
118 | }
119 | isProcessingFrame = true;
120 | Trace.beginSection("imageAvailable");
121 | final Plane[] planes = image.getPlanes();
122 | fillBytes(planes, yuvBytes);
123 | yRowStride = planes[0].getRowStride();
124 | final int uvRowStride = planes[1].getRowStride();
125 | final int uvPixelStride = planes[1].getPixelStride();
126 |
127 | imageConverter =
128 | new Runnable() {
129 | @Override
130 | public void run() {
131 | ImageUtils.convertYUV420ToARGB8888(
132 | yuvBytes[0],
133 | yuvBytes[1],
134 | yuvBytes[2],
135 | previewWidth,
136 | previewHeight,
137 | yRowStride,
138 | uvRowStride,
139 | uvPixelStride,
140 | rgbBytes);
141 | }
142 | };
143 |
144 | postInferenceCallback =
145 | new Runnable() {
146 | @Override
147 | public void run() {
148 | image.close();
149 | isProcessingFrame = false;
150 | }
151 | };
152 |
153 | processImage();
154 | } catch (final Exception e) {
155 | LOGGER.e(e, "Exception!");
156 | Trace.endSection();
157 | return;
158 | }
159 | Trace.endSection();
160 | }
161 |
162 | @Override
163 | public synchronized void onStart() {
164 | LOGGER.d("onStart " + this);
165 | super.onStart();
166 | }
167 |
168 | @Override
169 | public synchronized void onResume() {
170 | LOGGER.d("onResume " + this);
171 | super.onResume();
172 |
173 | handlerThread = new HandlerThread("inference");
174 | handlerThread.start();
175 | handler = new Handler(handlerThread.getLooper());
176 | }
177 |
178 | @Override
179 | public synchronized void onPause() {
180 | LOGGER.d("onPause " + this);
181 |
182 | if (!isFinishing()) {
183 | LOGGER.d("Requesting finish");
184 | finish();
185 | }
186 |
187 | handlerThread.quitSafely();
188 | try {
189 | handlerThread.join();
190 | handlerThread = null;
191 | handler = null;
192 | } catch (final InterruptedException e) {
193 | LOGGER.e(e, "Exception!");
194 | }
195 |
196 | if (textToSpeech != null) {
197 | textToSpeech.stop();
198 | textToSpeech.shutdown();
199 | }
200 |
201 | super.onPause();
202 | }
203 |
204 | @Override
205 | public synchronized void onStop() {
206 | LOGGER.d("onStop " + this);
207 | super.onStop();
208 | }
209 |
210 | @Override
211 | public synchronized void onDestroy() {
212 | LOGGER.d("onDestroy " + this);
213 | super.onDestroy();
214 | }
215 |
216 | protected synchronized void runInBackground(final Runnable r) {
217 | if (handler != null) {
218 | handler.post(r);
219 | }
220 | }
221 |
222 | @Override
223 | public void onRequestPermissionsResult(
224 | final int requestCode, final String[] permissions, final int[] grantResults) {
225 | if (requestCode == PERMISSIONS_REQUEST) {
226 | if (grantResults.length > 0
227 | && grantResults[0] == PackageManager.PERMISSION_GRANTED
228 | && grantResults[1] == PackageManager.PERMISSION_GRANTED) {
229 | setFragment();
230 | } else {
231 | requestPermission();
232 | }
233 | }
234 | }
235 |
236 | private boolean hasPermission() {
237 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
238 | return checkSelfPermission(PERMISSION_CAMERA) == PackageManager.PERMISSION_GRANTED &&
239 | checkSelfPermission(PERMISSION_STORAGE) == PackageManager.PERMISSION_GRANTED;
240 | } else {
241 | return true;
242 | }
243 | }
244 |
245 | private void requestPermission() {
246 | if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
247 | if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA) ||
248 | shouldShowRequestPermissionRationale(PERMISSION_STORAGE)) {
249 | Toast.makeText(CameraActivity.this,
250 | "Camera AND storage permission are required for this demo", Toast.LENGTH_LONG).show();
251 | }
252 | requestPermissions(new String[]{PERMISSION_CAMERA, PERMISSION_STORAGE}, PERMISSIONS_REQUEST);
253 | }
254 | }
255 |
256 | // Returns true if the device supports the required hardware level, or better.
257 | private boolean isHardwareLevelSupported(
258 | CameraCharacteristics characteristics, int requiredLevel) {
259 | int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
260 | if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
261 | return requiredLevel == deviceLevel;
262 | }
263 | // deviceLevel is not LEGACY, can use numerical sort
264 | return requiredLevel <= deviceLevel;
265 | }
266 |
267 | private String chooseCamera() {
268 | final CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
269 | try {
270 | for (final String cameraId : manager.getCameraIdList()) {
271 | final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
272 |
273 | // We don't use a front facing camera in this sample.
274 | final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
275 | if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
276 | continue;
277 | }
278 |
279 | final StreamConfigurationMap map =
280 | characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
281 |
282 | if (map == null) {
283 | continue;
284 | }
285 |
286 | boolean useCamera2API = isHardwareLevelSupported(characteristics,
287 | CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
288 | LOGGER.i("Camera API lv2?: %s", useCamera2API);
289 | return cameraId;
290 | }
291 | } catch (CameraAccessException e) {
292 | LOGGER.e(e, "Not allowed to access camera");
293 | }
294 |
295 | return null;
296 | }
297 |
298 | protected void setFragment() {
299 | String cameraId = chooseCamera();
300 |
301 | CameraConnectionFragment camera2Fragment =
302 | CameraConnectionFragment.newInstance(
303 | new CameraConnectionFragment.ConnectionCallback() {
304 | @Override
305 | public void onPreviewSizeChosen(final Size size, final int rotation) {
306 | previewHeight = size.getHeight();
307 | previewWidth = size.getWidth();
308 | CameraActivity.this.onPreviewSizeChosen(size, rotation);
309 | }
310 | },
311 | this,
312 | getLayoutId(),
313 | getDesiredPreviewFrameSize());
314 |
315 | camera2Fragment.setCamera(cameraId);
316 |
317 | getFragmentManager()
318 | .beginTransaction()
319 | .replace(R.id.container, camera2Fragment)
320 | .commit();
321 | }
322 |
323 | protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
324 | // Because of the variable row stride it's not possible to know in
325 | // advance the actual necessary dimensions of the yuv planes.
326 | for (int i = 0; i < planes.length; ++i) {
327 | final ByteBuffer buffer = planes[i].getBuffer();
328 | if (yuvBytes[i] == null) {
329 | LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
330 | yuvBytes[i] = new byte[buffer.capacity()];
331 | }
332 | buffer.get(yuvBytes[i]);
333 | }
334 | }
335 |
336 | protected void readyForNextImage() {
337 | if (postInferenceCallback != null) {
338 | postInferenceCallback.run();
339 | }
340 | }
341 |
342 | protected int getScreenOrientation() {
343 | switch (getWindowManager().getDefaultDisplay().getRotation()) {
344 | case Surface.ROTATION_270:
345 | return 270;
346 | case Surface.ROTATION_180:
347 | return 180;
348 | case Surface.ROTATION_90:
349 | return 90;
350 | default:
351 | return 0;
352 | }
353 | }
354 |
355 |
356 | private List currentRecognitions;
357 |
358 | protected void toSpeech(List recognitions) {
359 | if (recognitions.isEmpty() || textToSpeech.isSpeaking()) {
360 | currentRecognitions = Collections.emptyList();
361 | return;
362 | }
363 |
364 | if (currentRecognitions != null) {
365 |
366 | // Ignore if current and new are same.
367 | if (currentRecognitions.equals(recognitions)) {
368 | return;
369 | }
370 | final Set intersection = new HashSet<>(recognitions);
371 | intersection.retainAll(currentRecognitions);
372 |
373 | // Ignore if new is sub set of the current
374 | if (intersection.equals(recognitions)) {
375 | return;
376 | }
377 | }
378 |
379 | currentRecognitions = recognitions;
380 |
381 | speak();
382 | }
383 |
384 | private void speak() {
385 |
386 | final double rightStart = previewWidth / 2 - 0.10 * previewWidth;
387 | final double rightFinish = previewWidth;
388 | final double letStart = 0;
389 | final double leftFinish = previewWidth / 2 + 0.10 * previewWidth;
390 | final double previewArea = previewWidth * previewHeight;
391 |
392 | StringBuilder stringBuilder = new StringBuilder();
393 |
394 | for (int i = 0; i < currentRecognitions.size(); i++) {
395 | Classifier.Recognition recognition = currentRecognitions.get(i);
396 | stringBuilder.append(recognition.getTitle());
397 |
398 | float start = recognition.getLocation().top;
399 | float end = recognition.getLocation().bottom;
400 | double objArea = recognition.getLocation().width() * recognition.getLocation().height();
401 |
402 | if (objArea > previewArea / 2) {
403 | stringBuilder.append(" in front of you ");
404 | } else {
405 |
406 |
407 | if (start > letStart && end < leftFinish) {
408 | stringBuilder.append(" on the left ");
409 | } else if (start > rightStart && end < rightFinish) {
410 | stringBuilder.append(" on the right ");
411 | } else {
412 | stringBuilder.append(" in front of you ");
413 | }
414 | }
415 |
416 | if (i + 1 < currentRecognitions.size()) {
417 | stringBuilder.append(" and ");
418 | }
419 | }
420 | stringBuilder.append(" detected.");
421 |
422 | textToSpeech.speak(stringBuilder.toString(), TextToSpeech.QUEUE_FLUSH, null);
423 | }
424 |
425 | protected abstract void processImage();
426 |
427 | protected abstract void onPreviewSizeChosen(final Size size, final int rotation);
428 |
429 | protected abstract int getLayoutId();
430 |
431 | protected abstract Size getDesiredPreviewFrameSize();
432 | }
433 |
--------------------------------------------------------------------------------
/app/src/main/java/com/busradeniz/detection/CameraConnectionFragment.java:
--------------------------------------------------------------------------------
1 | package com.busradeniz.detection;
2 |
3 | import android.annotation.SuppressLint;
4 | import android.app.Activity;
5 | import android.app.AlertDialog;
6 | import android.app.Dialog;
7 | import android.app.DialogFragment;
8 | import android.app.Fragment;
9 | import android.content.Context;
10 | import android.content.DialogInterface;
11 | import android.content.res.Configuration;
12 | import android.graphics.ImageFormat;
13 | import android.graphics.Matrix;
14 | import android.graphics.RectF;
15 | import android.graphics.SurfaceTexture;
16 | import android.hardware.camera2.CameraAccessException;
17 | import android.hardware.camera2.CameraCaptureSession;
18 | import android.hardware.camera2.CameraCharacteristics;
19 | import android.hardware.camera2.CameraDevice;
20 | import android.hardware.camera2.CameraManager;
21 | import android.hardware.camera2.CaptureRequest;
22 | import android.hardware.camera2.CaptureResult;
23 | import android.hardware.camera2.TotalCaptureResult;
24 | import android.hardware.camera2.params.StreamConfigurationMap;
25 | import android.media.ImageReader;
26 | import android.media.ImageReader.OnImageAvailableListener;
27 | import android.os.Bundle;
28 | import android.os.Handler;
29 | import android.os.HandlerThread;
30 | import android.text.TextUtils;
31 | import android.util.Size;
32 | import android.util.SparseIntArray;
33 | import android.view.LayoutInflater;
34 | import android.view.Surface;
35 | import android.view.TextureView;
36 | import android.view.View;
37 | import android.view.ViewGroup;
38 | import android.widget.Toast;
39 |
40 | import com.busradeniz.detection.env.Logger;
41 |
42 | import java.util.ArrayList;
43 | import java.util.Arrays;
44 | import java.util.Collections;
45 | import java.util.Comparator;
46 | import java.util.List;
47 | import java.util.concurrent.Semaphore;
48 | import java.util.concurrent.TimeUnit;
49 |
50 | public class CameraConnectionFragment extends Fragment {
51 | private static final Logger LOGGER = new Logger();
52 |
53 | /**
54 | * The camera preview size will be chosen to be the smallest frame by pixel size capable of
55 | * containing a DESIRED_SIZE x DESIRED_SIZE square.
56 | */
57 | private static final int MINIMUM_PREVIEW_SIZE = 320;
58 |
59 | /**
60 | * Conversion from screen rotation to JPEG orientation.
61 | */
62 | private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
63 | private static final String FRAGMENT_DIALOG = "dialog";
64 |
65 | static {
66 | ORIENTATIONS.append(Surface.ROTATION_0, 90);
67 | ORIENTATIONS.append(Surface.ROTATION_90, 0);
68 | ORIENTATIONS.append(Surface.ROTATION_180, 270);
69 | ORIENTATIONS.append(Surface.ROTATION_270, 180);
70 | }
71 |
72 | /**
73 | * {@link android.view.TextureView.SurfaceTextureListener} handles several lifecycle events on a
74 | * {@link TextureView}.
75 | */
76 | private final TextureView.SurfaceTextureListener surfaceTextureListener =
77 | new TextureView.SurfaceTextureListener() {
78 | @Override
79 | public void onSurfaceTextureAvailable(
80 | final SurfaceTexture texture, final int width, final int height) {
81 | openCamera(width, height);
82 | }
83 |
84 | @Override
85 | public void onSurfaceTextureSizeChanged(
86 | final SurfaceTexture texture, final int width, final int height) {
87 | configureTransform(width, height);
88 | }
89 |
90 | @Override
91 | public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
92 | return true;
93 | }
94 |
95 | @Override
96 | public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
97 | };
98 |
99 | /**
100 | * Callback for Activities to use to initialize their data once the
101 | * selected preview size is known.
102 | */
103 | public interface ConnectionCallback {
104 | void onPreviewSizeChosen(Size size, int cameraRotation);
105 | }
106 |
107 | /**
108 | * ID of the current {@link CameraDevice}.
109 | */
110 | private String cameraId;
111 |
112 | /**
113 | * An {@link AutoFitTextureView} for camera preview.
114 | */
115 | private AutoFitTextureView textureView;
116 |
117 | /**
118 | * A {@link CameraCaptureSession } for camera preview.
119 | */
120 | private CameraCaptureSession captureSession;
121 |
122 | /**
123 | * A reference to the opened {@link CameraDevice}.
124 | */
125 | private CameraDevice cameraDevice;
126 |
127 | /**
128 | * The rotation in degrees of the camera sensor from the display.
129 | */
130 | private Integer sensorOrientation;
131 |
132 | /**
133 | * The {@link android.util.Size} of camera preview.
134 | */
135 | private Size previewSize;
136 |
137 | /**
138 | * {@link android.hardware.camera2.CameraDevice.StateCallback}
139 | * is called when {@link CameraDevice} changes its state.
140 | */
141 | private final CameraDevice.StateCallback stateCallback =
142 | new CameraDevice.StateCallback() {
143 | @Override
144 | public void onOpened(final CameraDevice cd) {
145 | // This method is called when the camera is opened. We start camera preview here.
146 | cameraOpenCloseLock.release();
147 | cameraDevice = cd;
148 | createCameraPreviewSession();
149 | }
150 |
151 | @Override
152 | public void onDisconnected(final CameraDevice cd) {
153 | cameraOpenCloseLock.release();
154 | cd.close();
155 | cameraDevice = null;
156 | }
157 |
158 | @Override
159 | public void onError(final CameraDevice cd, final int error) {
160 | cameraOpenCloseLock.release();
161 | cd.close();
162 | cameraDevice = null;
163 | final Activity activity = getActivity();
164 | if (null != activity) {
165 | activity.finish();
166 | }
167 | }
168 | };
169 |
170 | /**
171 | * An additional thread for running tasks that shouldn't block the UI.
172 | */
173 | private HandlerThread backgroundThread;
174 |
175 | /**
176 | * A {@link Handler} for running tasks in the background.
177 | */
178 | private Handler backgroundHandler;
179 |
180 | /**
181 | * An {@link ImageReader} that handles preview frame capture.
182 | */
183 | private ImageReader previewReader;
184 |
185 | /**
186 | * {@link android.hardware.camera2.CaptureRequest.Builder} for the camera preview
187 | */
188 | private CaptureRequest.Builder previewRequestBuilder;
189 |
190 | /**
191 | * {@link CaptureRequest} generated by {@link #previewRequestBuilder}
192 | */
193 | private CaptureRequest previewRequest;
194 |
195 | /**
196 | * A {@link Semaphore} to prevent the app from exiting before closing the camera.
197 | */
198 | private final Semaphore cameraOpenCloseLock = new Semaphore(1);
199 |
200 | /**
201 | * A {@link OnImageAvailableListener} to receive frames as they are available.
202 | */
203 | private final OnImageAvailableListener imageListener;
204 |
205 | /** The input size in pixels desired by TensorFlow (width and height of a square bitmap). */
206 | private final Size inputSize;
207 |
208 | /**
209 | * The layout identifier to inflate for this Fragment.
210 | */
211 | private final int layout;
212 |
213 |
214 | private final ConnectionCallback cameraConnectionCallback;
215 |
216 | @SuppressLint("ValidFragment")
217 | private CameraConnectionFragment(
218 | final ConnectionCallback connectionCallback,
219 | final OnImageAvailableListener imageListener,
220 | final int layout,
221 | final Size inputSize) {
222 | this.cameraConnectionCallback = connectionCallback;
223 | this.imageListener = imageListener;
224 | this.layout = layout;
225 | this.inputSize = inputSize;
226 | }
227 |
228 | /**
229 | * Shows a {@link Toast} on the UI thread.
230 | *
231 | * @param text The message to show
232 | */
233 | private void showToast(final String text) {
234 | final Activity activity = getActivity();
235 | if (activity != null) {
236 | activity.runOnUiThread(
237 | new Runnable() {
238 | @Override
239 | public void run() {
240 | Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
241 | }
242 | });
243 | }
244 | }
245 |
246 | /**
247 | * Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
248 | * width and height are at least as large as the minimum of both, or an exact match if possible.
249 | *
250 | * @param choices The list of sizes that the camera supports for the intended output class
251 | * @param width The minimum desired width
252 | * @param height The minimum desired height
253 | * @return The optimal {@code Size}, or an arbitrary one if none were big enough
254 | */
255 | protected static Size chooseOptimalSize(final Size[] choices, final int width, final int height) {
256 | final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE);
257 | final Size desiredSize = new Size(width, height);
258 |
259 | // Collect the supported resolutions that are at least as big as the preview Surface
260 | boolean exactSizeFound = false;
261 | final List bigEnough = new ArrayList();
262 | final List tooSmall = new ArrayList();
263 | for (final Size option : choices) {
264 | if (option.equals(desiredSize)) {
265 | // Set the size but don't return yet so that remaining sizes will still be logged.
266 | exactSizeFound = true;
267 | }
268 |
269 | if (option.getHeight() >= minSize && option.getWidth() >= minSize) {
270 | bigEnough.add(option);
271 | } else {
272 | tooSmall.add(option);
273 | }
274 | }
275 |
276 | LOGGER.i("Desired size: " + desiredSize + ", min size: " + minSize + "x" + minSize);
277 | LOGGER.i("Valid preview sizes: [" + TextUtils.join(", ", bigEnough) + "]");
278 | LOGGER.i("Rejected preview sizes: [" + TextUtils.join(", ", tooSmall) + "]");
279 |
280 | if (exactSizeFound) {
281 | LOGGER.i("Exact size match found.");
282 | return desiredSize;
283 | }
284 |
285 | // Pick the smallest of those, assuming we found any
286 | if (bigEnough.size() > 0) {
287 | final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea());
288 | LOGGER.i("Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight());
289 | return chosenSize;
290 | } else {
291 | LOGGER.e("Couldn't find any suitable preview size");
292 | return choices[0];
293 | }
294 | }
295 |
296 | public static CameraConnectionFragment newInstance(
297 | final ConnectionCallback callback,
298 | final OnImageAvailableListener imageListener,
299 | final int layout,
300 | final Size inputSize) {
301 | return new CameraConnectionFragment(callback, imageListener, layout, inputSize);
302 | }
303 |
304 | @Override
305 | public View onCreateView(
306 | final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
307 | return inflater.inflate(layout, container, false);
308 | }
309 |
310 | @Override
311 | public void onViewCreated(final View view, final Bundle savedInstanceState) {
312 | textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
313 | }
314 |
315 | @Override
316 | public void onActivityCreated(final Bundle savedInstanceState) {
317 | super.onActivityCreated(savedInstanceState);
318 | }
319 |
320 | @Override
321 | public void onResume() {
322 | super.onResume();
323 | startBackgroundThread();
324 |
325 | // When the screen is turned off and turned back on, the SurfaceTexture is already
326 | // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
327 | // a camera and start preview from here (otherwise, we wait until the surface is ready in
328 | // the SurfaceTextureListener).
329 | if (textureView.isAvailable()) {
330 | openCamera(textureView.getWidth(), textureView.getHeight());
331 | } else {
332 | textureView.setSurfaceTextureListener(surfaceTextureListener);
333 | }
334 | }
335 |
336 | @Override
337 | public void onPause() {
338 | closeCamera();
339 | stopBackgroundThread();
340 | super.onPause();
341 | }
342 |
343 | public void setCamera(String cameraId) {
344 | this.cameraId = cameraId;
345 | }
346 |
347 | /**
348 | * Sets up member variables related to camera.
349 | */
350 | private void setUpCameraOutputs() {
351 | final Activity activity = getActivity();
352 | final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
353 | try {
354 | final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
355 |
356 | final StreamConfigurationMap map =
357 | characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
358 |
359 | // For still image captures, we use the largest available size.
360 | final Size largest =
361 | Collections.max(
362 | Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)),
363 | new CompareSizesByArea());
364 |
365 | sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
366 |
367 | // Danger, W.R.! Attempting to use too large a preview size could exceed the camera
368 | // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
369 | // garbage capture data.
370 | previewSize =
371 | chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
372 | inputSize.getWidth(),
373 | inputSize.getHeight());
374 |
375 | // We fit the aspect ratio of TextureView to the size of preview we picked.
376 | final int orientation = getResources().getConfiguration().orientation;
377 | if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
378 | textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
379 | } else {
380 | textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
381 | }
382 | } catch (final CameraAccessException e) {
383 | LOGGER.e(e, "Exception!");
384 | } catch (final NullPointerException e) {
385 | // Currently an NPE is thrown when the Camera2API is used but not supported on the
386 | // device this code runs.
387 | // TODO(andrewharp): abstract ErrorDialog/RuntimeException handling out into new method and
388 | // reuse throughout app.
389 | ErrorDialog.newInstance(getString(R.string.camera_error))
390 | .show(getChildFragmentManager(), FRAGMENT_DIALOG);
391 | throw new RuntimeException(getString(R.string.camera_error));
392 | }
393 |
394 | cameraConnectionCallback.onPreviewSizeChosen(previewSize, sensorOrientation);
395 | }
396 |
397 | /**
398 | * Opens the camera specified by {@link CameraConnectionFragment#cameraId}.
399 | */
400 | private void openCamera(final int width, final int height) {
401 | setUpCameraOutputs();
402 | configureTransform(width, height);
403 | final Activity activity = getActivity();
404 | final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
405 | try {
406 | if (!cameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
407 | throw new RuntimeException("Time out waiting to lock camera opening.");
408 | }
409 | manager.openCamera(cameraId, stateCallback, backgroundHandler);
410 | } catch (final CameraAccessException e) {
411 | LOGGER.e(e, "Exception!");
412 | } catch (final InterruptedException e) {
413 | throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
414 | }
415 | }
416 |
417 | /**
418 | * Closes the current {@link CameraDevice}.
419 | */
420 | private void closeCamera() {
421 | try {
422 | cameraOpenCloseLock.acquire();
423 | if (null != captureSession) {
424 | captureSession.close();
425 | captureSession = null;
426 | }
427 | if (null != cameraDevice) {
428 | cameraDevice.close();
429 | cameraDevice = null;
430 | }
431 | if (null != previewReader) {
432 | previewReader.close();
433 | previewReader = null;
434 | }
435 | } catch (final InterruptedException e) {
436 | throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
437 | } finally {
438 | cameraOpenCloseLock.release();
439 | }
440 | }
441 |
442 | /**
443 | * Starts a background thread and its {@link Handler}.
444 | */
445 | private void startBackgroundThread() {
446 | backgroundThread = new HandlerThread("ImageListener");
447 | backgroundThread.start();
448 | backgroundHandler = new Handler(backgroundThread.getLooper());
449 | }
450 |
451 | /**
452 | * Stops the background thread and its {@link Handler}.
453 | */
454 | private void stopBackgroundThread() {
455 | backgroundThread.quitSafely();
456 | try {
457 | backgroundThread.join();
458 | backgroundThread = null;
459 | backgroundHandler = null;
460 | } catch (final InterruptedException e) {
461 | LOGGER.e(e, "Exception!");
462 | }
463 | }
464 |
465 | private final CameraCaptureSession.CaptureCallback captureCallback =
466 | new CameraCaptureSession.CaptureCallback() {
467 | @Override
468 | public void onCaptureProgressed(
469 | final CameraCaptureSession session,
470 | final CaptureRequest request,
471 | final CaptureResult partialResult) {}
472 |
473 | @Override
474 | public void onCaptureCompleted(
475 | final CameraCaptureSession session,
476 | final CaptureRequest request,
477 | final TotalCaptureResult result) {}
478 | };
479 |
480 | /**
481 | * Creates a new {@link CameraCaptureSession} for camera preview.
482 | */
483 | private void createCameraPreviewSession() {
484 | try {
485 | final SurfaceTexture texture = textureView.getSurfaceTexture();
486 | assert texture != null;
487 |
488 | // We configure the size of default buffer to be the size of camera preview we want.
489 | texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
490 |
491 | // This is the output Surface we need to start preview.
492 | final Surface surface = new Surface(texture);
493 |
494 | // We set up a CaptureRequest.Builder with the output Surface.
495 | previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
496 | previewRequestBuilder.addTarget(surface);
497 |
498 | LOGGER.i("Opening camera preview: " + previewSize.getWidth() + "x" + previewSize.getHeight());
499 |
500 | // Create the reader for the preview frames.
501 | previewReader =
502 | ImageReader.newInstance(
503 | previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);
504 |
505 | previewReader.setOnImageAvailableListener(imageListener, backgroundHandler);
506 | previewRequestBuilder.addTarget(previewReader.getSurface());
507 |
508 | // Here, we create a CameraCaptureSession for camera preview.
509 | cameraDevice.createCaptureSession(
510 | Arrays.asList(surface, previewReader.getSurface()),
511 | new CameraCaptureSession.StateCallback() {
512 |
513 | @Override
514 | public void onConfigured(final CameraCaptureSession cameraCaptureSession) {
515 | // The camera is already closed
516 | if (null == cameraDevice) {
517 | return;
518 | }
519 |
520 | // When the session is ready, we start displaying the preview.
521 | captureSession = cameraCaptureSession;
522 | try {
523 | // Auto focus should be continuous for camera preview.
524 | previewRequestBuilder.set(
525 | CaptureRequest.CONTROL_AF_MODE,
526 | CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
527 | // Flash is automatically enabled when necessary.
528 | previewRequestBuilder.set(
529 | CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
530 |
531 | // Finally, we start displaying the camera preview.
532 | previewRequest = previewRequestBuilder.build();
533 | captureSession.setRepeatingRequest(
534 | previewRequest, captureCallback, backgroundHandler);
535 | } catch (final CameraAccessException e) {
536 | LOGGER.e(e, "Exception!");
537 | }
538 | }
539 |
540 | @Override
541 | public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) {
542 | showToast("Failed");
543 | }
544 | },
545 | null);
546 | } catch (final CameraAccessException e) {
547 | LOGGER.e(e, "Exception!");
548 | }
549 | }
550 |
551 | /**
552 | * Configures the necessary {@link android.graphics.Matrix} transformation to `mTextureView`.
553 | * This method should be called after the camera preview size is determined in
554 | * setUpCameraOutputs and also the size of `mTextureView` is fixed.
555 | *
556 | * @param viewWidth The width of `mTextureView`
557 | * @param viewHeight The height of `mTextureView`
558 | */
559 | private void configureTransform(final int viewWidth, final int viewHeight) {
560 | final Activity activity = getActivity();
561 | if (null == textureView || null == previewSize || null == activity) {
562 | return;
563 | }
564 | final int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
565 | final Matrix matrix = new Matrix();
566 | final RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
567 | final RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth());
568 | final float centerX = viewRect.centerX();
569 | final float centerY = viewRect.centerY();
570 | if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
571 | bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
572 | matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
573 | final float scale =
574 | Math.max(
575 | (float) viewHeight / previewSize.getHeight(),
576 | (float) viewWidth / previewSize.getWidth());
577 | matrix.postScale(scale, scale, centerX, centerY);
578 | matrix.postRotate(90 * (rotation - 2), centerX, centerY);
579 | } else if (Surface.ROTATION_180 == rotation) {
580 | matrix.postRotate(180, centerX, centerY);
581 | }
582 | textureView.setTransform(matrix);
583 | }
584 |
585 | /**
586 | * Compares two {@code Size}s based on their areas.
587 | */
588 | static class CompareSizesByArea implements Comparator {
589 | @Override
590 | public int compare(final Size lhs, final Size rhs) {
591 | // We cast here to ensure the multiplications won't overflow
592 | return Long.signum(
593 | (long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
594 | }
595 | }
596 |
597 | /**
598 | * Shows an error message dialog.
599 | */
600 | public static class ErrorDialog extends DialogFragment {
601 | private static final String ARG_MESSAGE = "message";
602 |
603 | public static ErrorDialog newInstance(final String message) {
604 | final ErrorDialog dialog = new ErrorDialog();
605 | final Bundle args = new Bundle();
606 | args.putString(ARG_MESSAGE, message);
607 | dialog.setArguments(args);
608 | return dialog;
609 | }
610 |
611 | @Override
612 | public Dialog onCreateDialog(final Bundle savedInstanceState) {
613 | final Activity activity = getActivity();
614 | return new AlertDialog.Builder(activity)
615 | .setMessage(getArguments().getString(ARG_MESSAGE))
616 | .setPositiveButton(
617 | android.R.string.ok,
618 | new DialogInterface.OnClickListener() {
619 | @Override
620 | public void onClick(final DialogInterface dialogInterface, final int i) {
621 | activity.finish();
622 | }
623 | })
624 | .create();
625 | }
626 | }
627 | }
628 |
--------------------------------------------------------------------------------
/app/src/main/java/com/busradeniz/detection/Classifier.java:
--------------------------------------------------------------------------------
1 | package com.busradeniz.detection;
2 |
3 | import android.graphics.Bitmap;
4 | import android.graphics.RectF;
5 |
6 | import java.util.List;
7 | import java.util.Objects;
8 |
9 | /**
10 | * Generic interface for interacting with different recognition engines.
11 | */
12 | public interface Classifier {
13 | /**
14 | * An immutable result returned by a Classifier describing what was recognized.
15 | */
16 | public class Recognition {
17 | /**
18 | * A unique identifier for what has been recognized. Specific to the class, not the instance of
19 | * the object.
20 | */
21 | private final String id;
22 |
23 | /**
24 | * Display name for the recognition.
25 | */
26 | private final String title;
27 |
28 | /**
29 | * A sortable score for how good the recognition is relative to others. Higher should be better.
30 | */
31 | private final Float confidence;
32 |
33 | /** Optional location within the source image for the location of the recognized object. */
34 | private RectF location;
35 |
36 | public Recognition(
37 | final String id, final String title, final Float confidence, final RectF location) {
38 | this.id = id;
39 | this.title = title;
40 | this.confidence = confidence;
41 | this.location = location;
42 | }
43 |
44 | public String getId() {
45 | return id;
46 | }
47 |
48 | public String getTitle() {
49 | return title;
50 | }
51 |
52 | public Float getConfidence() {
53 | return confidence;
54 | }
55 |
56 | public RectF getLocation() {
57 | return new RectF(location);
58 | }
59 |
60 | public void setLocation(RectF location) {
61 | this.location = location;
62 | }
63 |
64 | @Override
65 | public String toString() {
66 | String resultString = "";
67 | if (id != null) {
68 | resultString += "[" + id + "] ";
69 | }
70 |
71 | if (title != null) {
72 | resultString += title + " ";
73 | }
74 |
75 | if (confidence != null) {
76 | resultString += String.format("(%.1f%%) ", confidence * 100.0f);
77 | }
78 |
79 | if (location != null) {
80 | resultString += location + " ";
81 | }
82 |
83 | return resultString.trim();
84 | }
85 |
86 | @Override
87 | public boolean equals(Object o) {
88 | if (this == o) return true;
89 | if (o == null || getClass() != o.getClass()) return false;
90 | Recognition that = (Recognition) o;
91 | return Objects.equals(title, that.title);
92 | }
93 |
94 | @Override
95 | public int hashCode() {
96 | return Objects.hash(title);
97 | }
98 | }
99 |
100 | List recognizeImage(Bitmap bitmap);
101 |
102 | void enableStatLogging(final boolean debug);
103 |
104 | String getStatString();
105 |
106 | void close();
107 | }
108 |
--------------------------------------------------------------------------------
/app/src/main/java/com/busradeniz/detection/DetectorActivity.java:
--------------------------------------------------------------------------------
1 | package com.busradeniz.detection;
2 |
3 | import android.graphics.Bitmap;
4 | import android.graphics.Bitmap.Config;
5 | import android.graphics.Canvas;
6 | import android.graphics.Color;
7 | import android.graphics.Matrix;
8 | import android.graphics.Paint;
9 | import android.graphics.Paint.Style;
10 | import android.graphics.RectF;
11 | import android.graphics.Typeface;
12 | import android.media.ImageReader.OnImageAvailableListener;
13 | import android.util.Size;
14 | import android.util.TypedValue;
15 | import android.widget.Toast;
16 |
17 | import com.busradeniz.detection.env.BorderedText;
18 | import com.busradeniz.detection.env.ImageUtils;
19 | import com.busradeniz.detection.env.Logger;
20 | import com.busradeniz.detection.tracking.MultiBoxTracker;
21 |
22 | import java.io.IOException;
23 | import java.util.LinkedList;
24 | import java.util.List;
25 |
26 |
27 | public class DetectorActivity extends CameraActivity implements OnImageAvailableListener {
28 | private static final Logger LOGGER = new Logger();
29 |
30 | private static final int TF_OD_API_INPUT_SIZE = 300;
31 | private static final String TF_OD_API_MODEL_FILE =
32 | "file:///android_asset/ssd_mobilenet_v1_android_export.pb";
33 | private static final String TF_OD_API_LABELS_FILE = "file:///android_asset/coco_labels_list.txt";
34 |
35 | // Minimum detection confidence to track a detection.
36 | private static final float MINIMUM_CONFIDENCE_TF_OD_API = 0.6f;
37 |
38 | private static final Size DESIRED_PREVIEW_SIZE = new Size(640, 480);
39 |
40 | private static final float TEXT_SIZE_DIP = 10;
41 |
42 | private Integer sensorOrientation;
43 |
44 | private Classifier detector;
45 |
46 | private Bitmap rgbFrameBitmap = null;
47 | private Bitmap croppedBitmap = null;
48 | private Bitmap cropCopyBitmap = null;
49 |
50 | private boolean computingDetection = false;
51 |
52 | private long timestamp = 0;
53 |
54 | private Matrix frameToCropTransform;
55 | private Matrix cropToFrameTransform;
56 |
57 | private MultiBoxTracker tracker;
58 |
59 | private byte[] luminanceCopy;
60 |
61 | private BorderedText borderedText;
62 |
63 | @Override
64 | public void onPreviewSizeChosen(final Size size, final int rotation) {
65 | final float textSizePx =
66 | TypedValue.applyDimension(
67 | TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
68 | borderedText = new BorderedText(textSizePx);
69 | borderedText.setTypeface(Typeface.MONOSPACE);
70 |
71 | tracker = new MultiBoxTracker(this);
72 |
73 | int cropSize = TF_OD_API_INPUT_SIZE;
74 |
75 | try {
76 | detector = TensorFlowObjectDetectionAPIModel.create(
77 | getAssets(), TF_OD_API_MODEL_FILE, TF_OD_API_LABELS_FILE, TF_OD_API_INPUT_SIZE);
78 | cropSize = TF_OD_API_INPUT_SIZE;
79 | } catch (final IOException e) {
80 | LOGGER.e("Exception initializing classifier!", e);
81 | Toast toast =
82 | Toast.makeText(
83 | getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);
84 | toast.show();
85 | finish();
86 | }
87 |
88 | previewWidth = size.getWidth();
89 | previewHeight = size.getHeight();
90 |
91 | sensorOrientation = rotation - getScreenOrientation();
92 | LOGGER.i("Camera orientation relative to screen canvas: %d", sensorOrientation);
93 |
94 | LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
95 | rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
96 | croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
97 |
98 | frameToCropTransform =
99 | ImageUtils.getTransformationMatrix(
100 | previewWidth, previewHeight,
101 | cropSize, cropSize,
102 | sensorOrientation, false);
103 |
104 | cropToFrameTransform = new Matrix();
105 | frameToCropTransform.invert(cropToFrameTransform);
106 |
107 | trackingOverlay = findViewById(R.id.tracking_overlay);
108 | trackingOverlay.addCallback(
109 | new OverlayView.DrawCallback() {
110 | @Override
111 | public void drawCallback(final Canvas canvas) {
112 | tracker.draw(canvas);
113 | }
114 | });
115 | }
116 |
117 | OverlayView trackingOverlay;
118 |
119 | @Override
120 | protected void processImage() {
121 | ++timestamp;
122 | final long currTimestamp = timestamp;
123 | byte[] originalLuminance = getLuminance();
124 | tracker.onFrame(
125 | previewWidth,
126 | previewHeight,
127 | sensorOrientation);
128 | trackingOverlay.postInvalidate();
129 |
130 | // No mutex needed as this method is not reentrant.
131 | if (computingDetection) {
132 | readyForNextImage();
133 | return;
134 | }
135 | computingDetection = true;
136 | LOGGER.i("Preparing image " + currTimestamp + " for detection in bg thread.");
137 |
138 | rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight);
139 |
140 | if (luminanceCopy == null) {
141 | luminanceCopy = new byte[originalLuminance.length];
142 | }
143 | System.arraycopy(originalLuminance, 0, luminanceCopy, 0, originalLuminance.length);
144 | readyForNextImage();
145 |
146 | final Canvas canvas = new Canvas(croppedBitmap);
147 | canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
148 |
149 | runInBackground(
150 | new Runnable() {
151 | @Override
152 | public void run() {
153 | LOGGER.i("Running detection on image " + currTimestamp);
154 | final List results = detector.recognizeImage(croppedBitmap);
155 |
156 | cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
157 | final Canvas canvas = new Canvas(cropCopyBitmap);
158 | final Paint paint = new Paint();
159 | paint.setColor(Color.RED);
160 | paint.setStyle(Style.STROKE);
161 | paint.setStrokeWidth(2.0f);
162 |
163 | final List mappedRecognitions =
164 | new LinkedList<>();
165 |
166 | for (final Classifier.Recognition result : results) {
167 | final RectF location = result.getLocation();
168 | if (location != null && result.getConfidence() >= MINIMUM_CONFIDENCE_TF_OD_API) {
169 | LOGGER.i("Title: " + result.getTitle());
170 | canvas.drawRect(location, paint);
171 |
172 | cropToFrameTransform.mapRect(location);
173 | result.setLocation(location);
174 | mappedRecognitions.add(result);
175 | }
176 | }
177 |
178 | tracker.trackResults(mappedRecognitions);
179 | toSpeech(mappedRecognitions);
180 | trackingOverlay.postInvalidate();
181 |
182 | computingDetection = false;
183 | }
184 | });
185 | }
186 |
187 | @Override
188 | protected int getLayoutId() {
189 | return R.layout.camera_connection_fragment_tracking;
190 | }
191 |
192 | @Override
193 | protected Size getDesiredPreviewFrameSize() {
194 | return DESIRED_PREVIEW_SIZE;
195 | }
196 |
197 | }
198 |
--------------------------------------------------------------------------------
/app/src/main/java/com/busradeniz/detection/OverlayView.java:
--------------------------------------------------------------------------------
1 | package com.busradeniz.detection;
2 |
3 | import android.content.Context;
4 | import android.graphics.Canvas;
5 | import android.util.AttributeSet;
6 | import android.view.View;
7 |
8 | import java.util.LinkedList;
9 | import java.util.List;
10 |
11 | /**
12 | * A simple View providing a render callback to other classes.
13 | */
14 | public class OverlayView extends View {
15 | private final List callbacks = new LinkedList();
16 |
17 | public OverlayView(final Context context, final AttributeSet attrs) {
18 | super(context, attrs);
19 | }
20 |
21 | /**
22 | * Interface defining the callback for client classes.
23 | */
24 | public interface DrawCallback {
25 | public void drawCallback(final Canvas canvas);
26 | }
27 |
28 | public void addCallback(final DrawCallback callback) {
29 | callbacks.add(callback);
30 | }
31 |
32 | @Override
33 | public synchronized void draw(final Canvas canvas) {
34 | for (final DrawCallback callback : callbacks) {
35 | callback.drawCallback(canvas);
36 | }
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/app/src/main/java/com/busradeniz/detection/TensorFlowObjectDetectionAPIModel.java:
--------------------------------------------------------------------------------
1 | package com.busradeniz.detection;
2 |
3 | import android.content.res.AssetManager;
4 | import android.graphics.Bitmap;
5 | import android.graphics.RectF;
6 | import android.os.Trace;
7 |
8 | import com.busradeniz.detection.env.Logger;
9 |
10 | import org.tensorflow.Graph;
11 | import org.tensorflow.Operation;
12 | import org.tensorflow.contrib.android.TensorFlowInferenceInterface;
13 |
14 | import java.io.BufferedReader;
15 | import java.io.IOException;
16 | import java.io.InputStream;
17 | import java.io.InputStreamReader;
18 | import java.util.ArrayList;
19 | import java.util.Comparator;
20 | import java.util.List;
21 | import java.util.PriorityQueue;
22 | import java.util.Vector;
23 |
24 | /**
25 | * Wrapper for frozen detection models trained using the Tensorflow Object Detection API:
26 | * github.com/tensorflow/models/tree/master/research/object_detection
27 | */
28 | public class TensorFlowObjectDetectionAPIModel implements Classifier {
29 | private static final Logger LOGGER = new Logger();
30 |
31 | // Only return this many results.
32 | private static final int MAX_RESULTS = 100;
33 |
34 | // Config values.
35 | private String inputName;
36 | private int inputSize;
37 |
38 | // Pre-allocated buffers.
39 | private Vector labels = new Vector();
40 | private int[] intValues;
41 | private byte[] byteValues;
42 | private float[] outputLocations;
43 | private float[] outputScores;
44 | private float[] outputClasses;
45 | private float[] outputNumDetections;
46 | private String[] outputNames;
47 |
48 | private boolean logStats = false;
49 |
50 | private TensorFlowInferenceInterface inferenceInterface;
51 |
52 | /**
53 | * Initializes a native TensorFlow session for classifying images.
54 | *
55 | * @param assetManager The asset manager to be used to load assets.
56 | * @param modelFilename The filepath of the model GraphDef protocol buffer.
57 | * @param labelFilename The filepath of label file for classes.
58 | */
59 | public static Classifier create(
60 | final AssetManager assetManager,
61 | final String modelFilename,
62 | final String labelFilename,
63 | final int inputSize) throws IOException {
64 | final TensorFlowObjectDetectionAPIModel d = new TensorFlowObjectDetectionAPIModel();
65 |
66 | InputStream labelsInput = null;
67 | String actualFilename = labelFilename.split("file:///android_asset/")[1];
68 | labelsInput = assetManager.open(actualFilename);
69 | BufferedReader br = null;
70 | br = new BufferedReader(new InputStreamReader(labelsInput));
71 | String line;
72 | while ((line = br.readLine()) != null) {
73 | LOGGER.w(line);
74 | d.labels.add(line);
75 | }
76 | br.close();
77 |
78 |
79 | d.inferenceInterface = new TensorFlowInferenceInterface(assetManager, modelFilename);
80 |
81 | final Graph g = d.inferenceInterface.graph();
82 |
83 | d.inputName = "image_tensor";
84 | // The inputName node has a shape of [N, H, W, C], where
85 | // N is the batch size
86 | // H = W are the height and width
87 | // C is the number of channels (3 for our purposes - RGB)
88 | final Operation inputOp = g.operation(d.inputName);
89 | if (inputOp == null) {
90 | throw new RuntimeException("Failed to find input Node '" + d.inputName + "'");
91 | }
92 | d.inputSize = inputSize;
93 | // The outputScoresName node has a shape of [N, NumLocations], where N
94 | // is the batch size.
95 | final Operation outputOp1 = g.operation("detection_scores");
96 | if (outputOp1 == null) {
97 | throw new RuntimeException("Failed to find output Node 'detection_scores'");
98 | }
99 | final Operation outputOp2 = g.operation("detection_boxes");
100 | if (outputOp2 == null) {
101 | throw new RuntimeException("Failed to find output Node 'detection_boxes'");
102 | }
103 | final Operation outputOp3 = g.operation("detection_classes");
104 | if (outputOp3 == null) {
105 | throw new RuntimeException("Failed to find output Node 'detection_classes'");
106 | }
107 |
108 | // Pre-allocate buffers.
109 | d.outputNames = new String[] {"detection_boxes", "detection_scores",
110 | "detection_classes", "num_detections"};
111 | d.intValues = new int[d.inputSize * d.inputSize];
112 | d.byteValues = new byte[d.inputSize * d.inputSize * 3];
113 | d.outputScores = new float[MAX_RESULTS];
114 | d.outputLocations = new float[MAX_RESULTS * 4];
115 | d.outputClasses = new float[MAX_RESULTS];
116 | d.outputNumDetections = new float[1];
117 | return d;
118 | }
119 |
120 | private TensorFlowObjectDetectionAPIModel() {}
121 |
122 | @Override
123 | public List recognizeImage(final Bitmap bitmap) {
124 | // Log this method so that it can be analyzed with systrace.
125 | Trace.beginSection("recognizeImage");
126 |
127 | Trace.beginSection("preprocessBitmap");
128 | // Preprocess the image data from 0-255 int to normalized float based
129 | // on the provided parameters.
130 | bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
131 |
132 | for (int i = 0; i < intValues.length; ++i) {
133 | byteValues[i * 3 + 2] = (byte) (intValues[i] & 0xFF);
134 | byteValues[i * 3 + 1] = (byte) ((intValues[i] >> 8) & 0xFF);
135 | byteValues[i * 3 + 0] = (byte) ((intValues[i] >> 16) & 0xFF);
136 | }
137 | Trace.endSection(); // preprocessBitmap
138 |
139 | // Copy the input data into TensorFlow.
140 | Trace.beginSection("feed");
141 | inferenceInterface.feed(inputName, byteValues, 1, inputSize, inputSize, 3);
142 | Trace.endSection();
143 |
144 | // Run the inference call.
145 | Trace.beginSection("run");
146 | inferenceInterface.run(outputNames, logStats);
147 | Trace.endSection();
148 |
149 | // Copy the output Tensor back into the output array.
150 | Trace.beginSection("fetch");
151 | outputLocations = new float[MAX_RESULTS * 4];
152 | outputScores = new float[MAX_RESULTS];
153 | outputClasses = new float[MAX_RESULTS];
154 | outputNumDetections = new float[1];
155 | inferenceInterface.fetch(outputNames[0], outputLocations);
156 | inferenceInterface.fetch(outputNames[1], outputScores);
157 | inferenceInterface.fetch(outputNames[2], outputClasses);
158 | inferenceInterface.fetch(outputNames[3], outputNumDetections);
159 | Trace.endSection();
160 |
161 | // Find the best detections.
162 | final PriorityQueue pq =
163 | new PriorityQueue(
164 | 1,
165 | new Comparator() {
166 | @Override
167 | public int compare(final Recognition lhs, final Recognition rhs) {
168 | // Intentionally reversed to put high confidence at the head of the queue.
169 | return Float.compare(rhs.getConfidence(), lhs.getConfidence());
170 | }
171 | });
172 |
173 | // Scale them back to the input size.
174 | for (int i = 0; i < outputScores.length; ++i) {
175 | final RectF detection =
176 | new RectF(
177 | outputLocations[4 * i + 1] * inputSize,
178 | outputLocations[4 * i] * inputSize,
179 | outputLocations[4 * i + 3] * inputSize,
180 | outputLocations[4 * i + 2] * inputSize);
181 | pq.add(
182 | new Recognition("" + i, labels.get((int) outputClasses[i]), outputScores[i], detection));
183 | }
184 |
185 | final ArrayList recognitions = new ArrayList();
186 | for (int i = 0; i < Math.min(pq.size(), MAX_RESULTS); ++i) {
187 | recognitions.add(pq.poll());
188 | }
189 | Trace.endSection(); // "recognizeImage"
190 | return recognitions;
191 | }
192 |
193 | @Override
194 | public void enableStatLogging(final boolean logStats) {
195 | this.logStats = logStats;
196 | }
197 |
198 | @Override
199 | public String getStatString() {
200 | return inferenceInterface.getStatString();
201 | }
202 |
203 | @Override
204 | public void close() {
205 | inferenceInterface.close();
206 | }
207 | }
208 |
--------------------------------------------------------------------------------
/app/src/main/java/com/busradeniz/detection/env/BorderedText.java:
--------------------------------------------------------------------------------
1 | package com.busradeniz.detection.env;
2 |
3 | import android.graphics.Canvas;
4 | import android.graphics.Color;
5 | import android.graphics.Paint;
6 | import android.graphics.Paint.Align;
7 | import android.graphics.Paint.Style;
8 | import android.graphics.Rect;
9 | import android.graphics.Typeface;
10 |
11 | import java.util.Vector;
12 |
13 | /**
14 | * A class that encapsulates the tedious bits of rendering legible, bordered text onto a canvas.
15 | */
16 | public class BorderedText {
17 | private final Paint interiorPaint;
18 | private final Paint exteriorPaint;
19 |
20 | private final float textSize;
21 |
22 | /**
23 | * Creates a left-aligned bordered text object with a white interior, and a black exterior with
24 | * the specified text size.
25 | *
26 | * @param textSize text size in pixels
27 | */
28 | public BorderedText(final float textSize) {
29 | this(Color.WHITE, Color.BLACK, textSize);
30 | }
31 |
32 | /**
33 | * Create a bordered text object with the specified interior and exterior colors, text size and
34 | * alignment.
35 | *
36 | * @param interiorColor the interior text color
37 | * @param exteriorColor the exterior text color
38 | * @param textSize text size in pixels
39 | */
40 | public BorderedText(final int interiorColor, final int exteriorColor, final float textSize) {
41 | interiorPaint = new Paint();
42 | interiorPaint.setTextSize(textSize);
43 | interiorPaint.setColor(interiorColor);
44 | interiorPaint.setStyle(Style.FILL);
45 | interiorPaint.setAntiAlias(false);
46 | interiorPaint.setAlpha(255);
47 |
48 | exteriorPaint = new Paint();
49 | exteriorPaint.setTextSize(textSize);
50 | exteriorPaint.setColor(exteriorColor);
51 | exteriorPaint.setStyle(Style.FILL_AND_STROKE);
52 | exteriorPaint.setStrokeWidth(textSize / 8);
53 | exteriorPaint.setAntiAlias(false);
54 | exteriorPaint.setAlpha(255);
55 |
56 | this.textSize = textSize;
57 | }
58 |
59 | public void setTypeface(Typeface typeface) {
60 | interiorPaint.setTypeface(typeface);
61 | exteriorPaint.setTypeface(typeface);
62 | }
63 |
64 | public void drawText(final Canvas canvas, final float posX, final float posY, final String text) {
65 | canvas.drawText(text, posX, posY, exteriorPaint);
66 | canvas.drawText(text, posX, posY, interiorPaint);
67 | }
68 |
69 | }
70 |
--------------------------------------------------------------------------------
/app/src/main/java/com/busradeniz/detection/env/ImageUtils.java:
--------------------------------------------------------------------------------
1 | package com.busradeniz.detection.env;
2 |
3 | import android.graphics.Matrix;
4 |
5 | /**
6 | * Utility class for manipulating images.
7 | **/
8 | public class ImageUtils {
9 | @SuppressWarnings("unused")
10 | private static final Logger LOGGER = new Logger();
11 |
12 | // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges
13 | // are normalized to eight bits.
14 | static final int kMaxChannelValue = 262143;
15 |
16 | public static void convertYUV420SPToARGB8888(
17 | byte[] input,
18 | int width,
19 | int height,
20 | int[] output) {
21 |
22 | // Java implementation of YUV420SP to ARGB8888 converting
23 | final int frameSize = width * height;
24 | for (int j = 0, yp = 0; j < height; j++) {
25 | int uvp = frameSize + (j >> 1) * width;
26 | int u = 0;
27 | int v = 0;
28 |
29 | for (int i = 0; i < width; i++, yp++) {
30 | int y = 0xff & input[yp];
31 | if ((i & 1) == 0) {
32 | v = 0xff & input[uvp++];
33 | u = 0xff & input[uvp++];
34 | }
35 |
36 | output[yp] = YUV2RGB(y, u, v);
37 | }
38 | }
39 | }
40 |
41 | private static int YUV2RGB(int y, int u, int v) {
42 | // Adjust and check YUV values
43 | y = (y - 16) < 0 ? 0 : (y - 16);
44 | u -= 128;
45 | v -= 128;
46 |
47 | // This is the floating point equivalent. We do the conversion in integer
48 | // because some Android devices do not have floating point in hardware.
49 | // nR = (int)(1.164 * nY + 2.018 * nU);
50 | // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
51 | // nB = (int)(1.164 * nY + 1.596 * nV);
52 | int y1192 = 1192 * y;
53 | int r = (y1192 + 1634 * v);
54 | int g = (y1192 - 833 * v - 400 * u);
55 | int b = (y1192 + 2066 * u);
56 |
57 | // Clipping RGB values to be inside boundaries [ 0 , kMaxChannelValue ]
58 | r = r > kMaxChannelValue ? kMaxChannelValue : (r < 0 ? 0 : r);
59 | g = g > kMaxChannelValue ? kMaxChannelValue : (g < 0 ? 0 : g);
60 | b = b > kMaxChannelValue ? kMaxChannelValue : (b < 0 ? 0 : b);
61 |
62 | return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
63 | }
64 |
65 |
66 | public static void convertYUV420ToARGB8888(
67 | byte[] yData,
68 | byte[] uData,
69 | byte[] vData,
70 | int width,
71 | int height,
72 | int yRowStride,
73 | int uvRowStride,
74 | int uvPixelStride,
75 | int[] out) {
76 | int yp = 0;
77 | for (int j = 0; j < height; j++) {
78 | int pY = yRowStride * j;
79 | int pUV = uvRowStride * (j >> 1);
80 |
81 | for (int i = 0; i < width; i++) {
82 | int uv_offset = pUV + (i >> 1) * uvPixelStride;
83 |
84 | out[yp++] = YUV2RGB(
85 | 0xff & yData[pY + i],
86 | 0xff & uData[uv_offset],
87 | 0xff & vData[uv_offset]);
88 | }
89 | }
90 | }
91 |
92 | /**
93 | * Returns a transformation matrix from one reference frame into another.
94 | * Handles cropping (if maintaining aspect ratio is desired) and rotation.
95 | *
96 | * @param srcWidth Width of source frame.
97 | * @param srcHeight Height of source frame.
98 | * @param dstWidth Width of destination frame.
99 | * @param dstHeight Height of destination frame.
100 | * @param applyRotation Amount of rotation to apply from one frame to another.
101 | * Must be a multiple of 90.
102 | * @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant,
103 | * cropping the image if necessary.
104 | * @return The transformation fulfilling the desired requirements.
105 | */
106 | public static Matrix getTransformationMatrix(
107 | final int srcWidth,
108 | final int srcHeight,
109 | final int dstWidth,
110 | final int dstHeight,
111 | final int applyRotation,
112 | final boolean maintainAspectRatio) {
113 | final Matrix matrix = new Matrix();
114 |
115 | if (applyRotation != 0) {
116 | if (applyRotation % 90 != 0) {
117 | LOGGER.w("Rotation of %d % 90 != 0", applyRotation);
118 | }
119 |
120 | // Translate so center of image is at origin.
121 | matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
122 |
123 | // Rotate around origin.
124 | matrix.postRotate(applyRotation);
125 | }
126 |
127 | // Account for the already applied rotation, if any, and then determine how
128 | // much scaling is needed for each axis.
129 | final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
130 |
131 | final int inWidth = transpose ? srcHeight : srcWidth;
132 | final int inHeight = transpose ? srcWidth : srcHeight;
133 |
134 | // Apply scaling if necessary.
135 | if (inWidth != dstWidth || inHeight != dstHeight) {
136 | final float scaleFactorX = dstWidth / (float) inWidth;
137 | final float scaleFactorY = dstHeight / (float) inHeight;
138 |
139 | if (maintainAspectRatio) {
140 | // Scale by minimum factor so that dst is filled completely while
141 | // maintaining the aspect ratio. Some image may fall off the edge.
142 | final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
143 | matrix.postScale(scaleFactor, scaleFactor);
144 | } else {
145 | // Scale exactly to fill dst from src.
146 | matrix.postScale(scaleFactorX, scaleFactorY);
147 | }
148 | }
149 |
150 | if (applyRotation != 0) {
151 | // Translate back from origin centered reference to destination frame.
152 | matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
153 | }
154 |
155 | return matrix;
156 | }
157 | }
158 |
--------------------------------------------------------------------------------
/app/src/main/java/com/busradeniz/detection/env/Logger.java:
--------------------------------------------------------------------------------
1 | package com.busradeniz.detection.env;
2 |
3 | import android.util.Log;
4 |
5 | import java.util.HashSet;
6 | import java.util.Set;
7 |
8 | /**
9 | * Wrapper for the platform log function, allows convenient message prefixing and log disabling.
10 | */
11 | public final class Logger {
12 | private static final String DEFAULT_TAG = "tensorflow";
13 | private static final int DEFAULT_MIN_LOG_LEVEL = Log.DEBUG;
14 |
15 | // Classes to be ignored when examining the stack trace
16 | private static final Set IGNORED_CLASS_NAMES;
17 |
18 | static {
19 | IGNORED_CLASS_NAMES = new HashSet(3);
20 | IGNORED_CLASS_NAMES.add("dalvik.system.VMStack");
21 | IGNORED_CLASS_NAMES.add("java.lang.Thread");
22 | IGNORED_CLASS_NAMES.add(Logger.class.getCanonicalName());
23 | }
24 |
25 | private final String tag;
26 | private final String messagePrefix;
27 | private int minLogLevel = DEFAULT_MIN_LOG_LEVEL;
28 |
29 | /**
30 | * Creates a Logger using the class name as the message prefix.
31 | *
32 | * @param clazz the simple name of this class is used as the message prefix.
33 | */
34 | public Logger(final Class> clazz) {
35 | this(clazz.getSimpleName());
36 | }
37 |
38 | /**
39 | * Creates a Logger using the specified message prefix.
40 | *
41 | * @param messagePrefix is prepended to the text of every message.
42 | */
43 | public Logger(final String messagePrefix) {
44 | this(DEFAULT_TAG, messagePrefix);
45 | }
46 |
47 | /**
48 | * Creates a Logger with a custom tag and a custom message prefix. If the message prefix
49 | * is set to null
, the caller's class name is used as the prefix.
50 | *
51 | * @param tag identifies the source of a log message.
52 | * @param messagePrefix prepended to every message if non-null. If null, the name of the caller is
53 | * being used
54 | */
55 | public Logger(final String tag, final String messagePrefix) {
56 | this.tag = tag;
57 | final String prefix = messagePrefix == null ? getCallerSimpleName() : messagePrefix;
58 | this.messagePrefix = (prefix.length() > 0) ? prefix + ": " : prefix;
59 | }
60 |
61 | /**
62 | * Creates a Logger using the caller's class name as the message prefix.
63 | */
64 | public Logger() {
65 | this(DEFAULT_TAG, null);
66 | }
67 |
68 | /**
69 | * Creates a Logger using the caller's class name as the message prefix.
70 | */
71 | public Logger(final int minLogLevel) {
72 | this(DEFAULT_TAG, null);
73 | this.minLogLevel = minLogLevel;
74 | }
75 |
76 | public void setMinLogLevel(final int minLogLevel) {
77 | this.minLogLevel = minLogLevel;
78 | }
79 |
80 | public boolean isLoggable(final int logLevel) {
81 | return logLevel >= minLogLevel || Log.isLoggable(tag, logLevel);
82 | }
83 |
84 | /**
85 | * Return caller's simple name.
86 | *
87 | * Android getStackTrace() returns an array that looks like this:
88 | * stackTrace[0]: dalvik.system.VMStack
89 | * stackTrace[1]: java.lang.Thread
90 | * stackTrace[2]: com.google.android.apps.unveil.env.UnveilLogger
91 | * stackTrace[3]: com.google.android.apps.unveil.BaseApplication
92 | *
93 | * This function returns the simple version of the first non-filtered name.
94 | *
95 | * @return caller's simple name
96 | */
97 | private static String getCallerSimpleName() {
98 | // Get the current callstack so we can pull the class of the caller off of it.
99 | final StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();
100 |
101 | for (final StackTraceElement elem : stackTrace) {
102 | final String className = elem.getClassName();
103 | if (!IGNORED_CLASS_NAMES.contains(className)) {
104 | // We're only interested in the simple name of the class, not the complete package.
105 | final String[] classParts = className.split("\\.");
106 | return classParts[classParts.length - 1];
107 | }
108 | }
109 |
110 | return Logger.class.getSimpleName();
111 | }
112 |
113 | private String toMessage(final String format, final Object... args) {
114 | return messagePrefix + (args.length > 0 ? String.format(format, args) : format);
115 | }
116 |
117 | public void v(final String format, final Object... args) {
118 | if (isLoggable(Log.VERBOSE)) {
119 | Log.v(tag, toMessage(format, args));
120 | }
121 | }
122 |
123 | public void v(final Throwable t, final String format, final Object... args) {
124 | if (isLoggable(Log.VERBOSE)) {
125 | Log.v(tag, toMessage(format, args), t);
126 | }
127 | }
128 |
129 | public void d(final String format, final Object... args) {
130 | if (isLoggable(Log.DEBUG)) {
131 | Log.d(tag, toMessage(format, args));
132 | }
133 | }
134 |
135 | public void d(final Throwable t, final String format, final Object... args) {
136 | if (isLoggable(Log.DEBUG)) {
137 | Log.d(tag, toMessage(format, args), t);
138 | }
139 | }
140 |
141 | public void i(final String format, final Object... args) {
142 | if (isLoggable(Log.INFO)) {
143 | Log.i(tag, toMessage(format, args));
144 | }
145 | }
146 |
147 | public void i(final Throwable t, final String format, final Object... args) {
148 | if (isLoggable(Log.INFO)) {
149 | Log.i(tag, toMessage(format, args), t);
150 | }
151 | }
152 |
153 | public void w(final String format, final Object... args) {
154 | if (isLoggable(Log.WARN)) {
155 | Log.w(tag, toMessage(format, args));
156 | }
157 | }
158 |
159 | public void w(final Throwable t, final String format, final Object... args) {
160 | if (isLoggable(Log.WARN)) {
161 | Log.w(tag, toMessage(format, args), t);
162 | }
163 | }
164 |
165 | public void e(final String format, final Object... args) {
166 | if (isLoggable(Log.ERROR)) {
167 | Log.e(tag, toMessage(format, args));
168 | }
169 | }
170 |
171 | public void e(final Throwable t, final String format, final Object... args) {
172 | if (isLoggable(Log.ERROR)) {
173 | Log.e(tag, toMessage(format, args), t);
174 | }
175 | }
176 | }
177 |
--------------------------------------------------------------------------------
/app/src/main/java/com/busradeniz/detection/tracking/MultiBoxTracker.java:
--------------------------------------------------------------------------------
1 | package com.busradeniz.detection.tracking;
2 |
3 | import android.content.Context;
4 | import android.graphics.Canvas;
5 | import android.graphics.Color;
6 | import android.graphics.Matrix;
7 | import android.graphics.Paint;
8 | import android.graphics.Paint.Cap;
9 | import android.graphics.Paint.Join;
10 | import android.graphics.Paint.Style;
11 | import android.graphics.RectF;
12 | import android.text.TextUtils;
13 | import android.util.Pair;
14 | import android.util.TypedValue;
15 |
16 | import com.busradeniz.detection.Classifier;
17 | import com.busradeniz.detection.env.BorderedText;
18 | import com.busradeniz.detection.env.ImageUtils;
19 | import com.busradeniz.detection.env.Logger;
20 |
21 | import java.util.LinkedList;
22 | import java.util.List;
23 | import java.util.Queue;
24 |
25 | /**
26 | * A tracker wrapping ObjectTracker that also handles non-max suppression and matching existing
27 | * objects to new detections.
28 | */
29 | public class MultiBoxTracker {
30 | private final Logger logger = new Logger();
31 |
32 | private static final float TEXT_SIZE_DIP = 18;
33 |
34 | private static final float MIN_SIZE = 16.0f;
35 |
36 | private static final int[] COLORS = {
37 | Color.BLUE, Color.RED, Color.GREEN, Color.YELLOW, Color.CYAN, Color.MAGENTA, Color.WHITE,
38 | Color.parseColor("#55FF55"), Color.parseColor("#FFA500"), Color.parseColor("#FF8888"),
39 | Color.parseColor("#AAAAFF"), Color.parseColor("#FFFFAA"), Color.parseColor("#55AAAA"),
40 | Color.parseColor("#AA33AA"), Color.parseColor("#0D0068")
41 | };
42 |
43 | private final Queue availableColors = new LinkedList<>();
44 |
45 | final List> screenRects = new LinkedList<>();
46 |
47 | private static class TrackedRecognition {
48 | RectF location;
49 | float detectionConfidence;
50 | int color;
51 | String title;
52 | }
53 |
54 | private final List trackedObjects = new LinkedList<>();
55 |
56 | private final Paint boxPaint = new Paint();
57 |
58 | private final float textSizePx;
59 | private final BorderedText borderedText;
60 |
61 | private Matrix frameToCanvasMatrix;
62 |
63 | private int frameWidth;
64 | private int frameHeight;
65 |
66 | private int sensorOrientation;
67 |
68 | public MultiBoxTracker(final Context context) {
69 | for (final int color : COLORS) {
70 | availableColors.add(color);
71 | }
72 |
73 | boxPaint.setColor(Color.RED);
74 | boxPaint.setStyle(Style.STROKE);
75 | boxPaint.setStrokeWidth(12.0f);
76 | boxPaint.setStrokeCap(Cap.ROUND);
77 | boxPaint.setStrokeJoin(Join.ROUND);
78 | boxPaint.setStrokeMiter(100);
79 |
80 | textSizePx =
81 | TypedValue.applyDimension(
82 | TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, context.getResources().getDisplayMetrics());
83 | borderedText = new BorderedText(textSizePx);
84 | }
85 |
86 | private Matrix getFrameToCanvasMatrix() {
87 | return frameToCanvasMatrix;
88 | }
89 |
90 | public synchronized void trackResults(final List results) {
91 | logger.i("Processing %d results", results.size());
92 | processResults(results);
93 | }
94 |
95 | public synchronized void draw(final Canvas canvas) {
96 | final boolean rotated = sensorOrientation % 180 == 90;
97 | final float multiplier =
98 | Math.min(canvas.getHeight() / (float) (rotated ? frameWidth : frameHeight),
99 | canvas.getWidth() / (float) (rotated ? frameHeight : frameWidth));
100 | frameToCanvasMatrix =
101 | ImageUtils.getTransformationMatrix(
102 | frameWidth,
103 | frameHeight,
104 | (int) (multiplier * (rotated ? frameHeight : frameWidth)),
105 | (int) (multiplier * (rotated ? frameWidth : frameHeight)),
106 | sensorOrientation,
107 | false);
108 | for (final TrackedRecognition recognition : trackedObjects) {
109 | final RectF trackedPos = new RectF(recognition.location);
110 |
111 | getFrameToCanvasMatrix().mapRect(trackedPos);
112 | boxPaint.setColor(recognition.color);
113 |
114 | final float cornerSize = Math.min(trackedPos.width(), trackedPos.height()) / 8.0f;
115 | canvas.drawRoundRect(trackedPos, cornerSize, cornerSize, boxPaint);
116 |
117 | final String labelString =
118 | !TextUtils.isEmpty(recognition.title)
119 | ? String.format("%s %.2f", recognition.title, recognition.detectionConfidence)
120 | : String.format("%.2f", recognition.detectionConfidence);
121 | borderedText.drawText(canvas, trackedPos.left + cornerSize, trackedPos.bottom, labelString);
122 | }
123 | }
124 |
125 | private boolean initialized = false;
126 |
127 | public synchronized void onFrame(
128 | final int w,
129 | final int h,
130 | final int sensorOrienation) {
131 | if (!initialized) {
132 | frameWidth = w;
133 | frameHeight = h;
134 | this.sensorOrientation = sensorOrienation;
135 | initialized = true;
136 | }
137 | }
138 |
139 | private void processResults(final List results) {
140 | final List> rectsToTrack = new LinkedList<>();
141 |
142 | screenRects.clear();
143 | final Matrix rgbFrameToScreen = new Matrix(getFrameToCanvasMatrix());
144 |
145 | for (final Classifier.Recognition result : results) {
146 | if (result.getLocation() == null) {
147 | continue;
148 | }
149 | final RectF detectionFrameRect = new RectF(result.getLocation());
150 |
151 | final RectF detectionScreenRect = new RectF();
152 | rgbFrameToScreen.mapRect(detectionScreenRect, detectionFrameRect);
153 |
154 | logger.v(
155 | "Result! Frame: " + result.getLocation() + " mapped to screen:" + detectionScreenRect);
156 |
157 | screenRects.add(new Pair<>(result.getConfidence(), detectionScreenRect));
158 |
159 | if (detectionFrameRect.width() < MIN_SIZE || detectionFrameRect.height() < MIN_SIZE) {
160 | logger.w("Degenerate rectangle! " + detectionFrameRect);
161 | continue;
162 | }
163 |
164 | rectsToTrack.add(new Pair<>(result.getConfidence(), result));
165 | }
166 |
167 | if (rectsToTrack.isEmpty()) {
168 | logger.v("Nothing to track, aborting.");
169 | return;
170 | }
171 |
172 | trackedObjects.clear();
173 | for (final Pair potential : rectsToTrack) {
174 | final TrackedRecognition trackedRecognition = new TrackedRecognition();
175 | trackedRecognition.detectionConfidence = potential.first;
176 | trackedRecognition.location = new RectF(potential.second.getLocation());
177 | trackedRecognition.title = potential.second.getTitle();
178 | trackedRecognition.color = COLORS[trackedObjects.size()];
179 | trackedObjects.add(trackedRecognition);
180 |
181 | if (trackedObjects.size() >= COLORS.length) {
182 | break;
183 | }
184 | }
185 | }
186 | }
187 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
12 |
17 |
22 |
27 |
32 |
37 |
42 |
47 |
52 |
57 |
62 |
67 |
72 |
77 |
82 |
87 |
92 |
97 |
102 |
107 |
112 |
113 |
114 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_camera.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/camera_connection_fragment_tracking.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
10 |
11 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/busradeniz/android-tensorflow-detection/36565c3e1e35b334f30fa26e46fee0ed7d51fd77/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/busradeniz/android-tensorflow-detection/36565c3e1e35b334f30fa26e46fee0ed7d51fd77/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/busradeniz/android-tensorflow-detection/36565c3e1e35b334f30fa26e46fee0ed7d51fd77/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/busradeniz/android-tensorflow-detection/36565c3e1e35b334f30fa26e46fee0ed7d51fd77/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/busradeniz/android-tensorflow-detection/36565c3e1e35b334f30fa26e46fee0ed7d51fd77/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/busradeniz/android-tensorflow-detection/36565c3e1e35b334f30fa26e46fee0ed7d51fd77/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/busradeniz/android-tensorflow-detection/36565c3e1e35b334f30fa26e46fee0ed7d51fd77/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/busradeniz/android-tensorflow-detection/36565c3e1e35b334f30fa26e46fee0ed7d51fd77/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/busradeniz/android-tensorflow-detection/36565c3e1e35b334f30fa26e46fee0ed7d51fd77/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/busradeniz/android-tensorflow-detection/36565c3e1e35b334f30fa26e46fee0ed7d51fd77/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/busradeniz/android-tensorflow-detection/36565c3e1e35b334f30fa26e46fee0ed7d51fd77/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/busradeniz/android-tensorflow-detection/36565c3e1e35b334f30fa26e46fee0ed7d51fd77/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/busradeniz/android-tensorflow-detection/36565c3e1e35b334f30fa26e46fee0ed7d51fd77/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/busradeniz/android-tensorflow-detection/36565c3e1e35b334f30fa26e46fee0ed7d51fd77/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/busradeniz/android-tensorflow-detection/36565c3e1e35b334f30fa26e46fee0ed7d51fd77/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #3F51B5
4 | #303F9F
5 | #FF4081
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | andorid-tensorflow-detection
3 | This device doesn\'t support Camera2 API.
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/app/src/test/java/com/busradeniz/detection/ExampleUnitTest.java:
--------------------------------------------------------------------------------
1 | package com.busradeniz.detection;
2 |
3 | import org.junit.Test;
4 |
5 | import static org.junit.Assert.*;
6 |
7 | /**
8 | * Example local unit test, which will execute on the development machine (host).
9 | *
10 | * @see Testing documentation
11 | */
12 | public class ExampleUnitTest {
13 | @Test
14 | public void addition_isCorrect() throws Exception {
15 | assertEquals(4, 2 + 2);
16 | }
17 | }
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 |
5 | repositories {
6 | google()
7 | jcenter()
8 | }
9 | dependencies {
10 | classpath 'com.android.tools.build:gradle:3.1.0-alpha04'
11 | classpath 'org.apache.httpcomponents:httpclient:4.5.2'
12 | // NOTE: Do not place your application dependencies here; they belong
13 | // in the individual module build.gradle files
14 | }
15 | }
16 |
17 | allprojects {
18 | repositories {
19 | google()
20 | jcenter()
21 | }
22 | }
23 |
24 | task clean(type: Delete) {
25 | delete rootProject.buildDir
26 | }
27 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 |
3 | # IDE (e.g. Android Studio) users:
4 | # Gradle settings configured through the IDE *will override*
5 | # any settings specified in this file.
6 |
7 | # For more details on how to configure your build environment visit
8 | # http://www.gradle.org/docs/current/userguide/build_environment.html
9 |
10 | # Specifies the JVM arguments used for the daemon process.
11 | # The setting is particularly useful for tweaking memory settings.
12 | org.gradle.jvmargs=-Xmx1536m
13 |
14 | # When configured, Gradle will run in incubating parallel mode.
15 | # This option should only be used with decoupled projects. More details, visit
16 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
17 | # org.gradle.parallel=true
18 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/busradeniz/android-tensorflow-detection/36565c3e1e35b334f30fa26e46fee0ed7d51fd77/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Mon Nov 27 20:34:05 GMT 2017
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions-snapshots/gradle-4.4-20171031235950+0000-all.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # Attempt to set APP_HOME
46 | # Resolve links: $0 may be a link
47 | PRG="$0"
48 | # Need this for relative symlinks.
49 | while [ -h "$PRG" ] ; do
50 | ls=`ls -ld "$PRG"`
51 | link=`expr "$ls" : '.*-> \(.*\)$'`
52 | if expr "$link" : '/.*' > /dev/null; then
53 | PRG="$link"
54 | else
55 | PRG=`dirname "$PRG"`"/$link"
56 | fi
57 | done
58 | SAVED="`pwd`"
59 | cd "`dirname \"$PRG\"`/" >/dev/null
60 | APP_HOME="`pwd -P`"
61 | cd "$SAVED" >/dev/null
62 |
63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
64 |
65 | # Determine the Java command to use to start the JVM.
66 | if [ -n "$JAVA_HOME" ] ; then
67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
68 | # IBM's JDK on AIX uses strange locations for the executables
69 | JAVACMD="$JAVA_HOME/jre/sh/java"
70 | else
71 | JAVACMD="$JAVA_HOME/bin/java"
72 | fi
73 | if [ ! -x "$JAVACMD" ] ; then
74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
75 |
76 | Please set the JAVA_HOME variable in your environment to match the
77 | location of your Java installation."
78 | fi
79 | else
80 | JAVACMD="java"
81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
82 |
83 | Please set the JAVA_HOME variable in your environment to match the
84 | location of your Java installation."
85 | fi
86 |
87 | # Increase the maximum file descriptors if we can.
88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
89 | MAX_FD_LIMIT=`ulimit -H -n`
90 | if [ $? -eq 0 ] ; then
91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
92 | MAX_FD="$MAX_FD_LIMIT"
93 | fi
94 | ulimit -n $MAX_FD
95 | if [ $? -ne 0 ] ; then
96 | warn "Could not set maximum file descriptor limit: $MAX_FD"
97 | fi
98 | else
99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
100 | fi
101 | fi
102 |
103 | # For Darwin, add options to specify how the application appears in the dock
104 | if $darwin; then
105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
106 | fi
107 |
108 | # For Cygwin, switch paths to Windows format before running java
109 | if $cygwin ; then
110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
112 | JAVACMD=`cygpath --unix "$JAVACMD"`
113 |
114 | # We build the pattern for arguments to be converted via cygpath
115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
116 | SEP=""
117 | for dir in $ROOTDIRSRAW ; do
118 | ROOTDIRS="$ROOTDIRS$SEP$dir"
119 | SEP="|"
120 | done
121 | OURCYGPATTERN="(^($ROOTDIRS))"
122 | # Add a user-defined pattern to the cygpath arguments
123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
125 | fi
126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
127 | i=0
128 | for arg in "$@" ; do
129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
131 |
132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
134 | else
135 | eval `echo args$i`="\"$arg\""
136 | fi
137 | i=$((i+1))
138 | done
139 | case $i in
140 | (0) set -- ;;
141 | (1) set -- "$args0" ;;
142 | (2) set -- "$args0" "$args1" ;;
143 | (3) set -- "$args0" "$args1" "$args2" ;;
144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
150 | esac
151 | fi
152 |
153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
154 | function splitJvmOpts() {
155 | JVM_OPTS=("$@")
156 | }
157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
159 |
160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
161 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------