11 |
12 |
--------------------------------------------------------------------------------
/vision-quickstart/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/vision-quickstart/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/vision-quickstart/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 | apply plugin: 'kotlin-android'
3 | apply plugin: 'kotlin-android-extensions'
4 |
5 | android {
6 | compileSdkVersion 29
7 |
8 | defaultConfig {
9 | applicationId "com.mobed.mlkit.vision.datacollector"
10 | minSdkVersion 24
11 | multiDexEnabled true
12 | targetSdkVersion 29
13 |
14 | versionCode 11
15 | versionName "1.11"
16 | testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
17 | vectorDrawables.useSupportLibrary = true
18 | }
19 | buildTypes {
20 | debug {
21 | minifyEnabled false
22 | proguardFiles 'proguard-rules.pro'
23 | }
24 | }
25 |
26 | compileOptions {
27 | sourceCompatibility JavaVersion.VERSION_1_8
28 | targetCompatibility JavaVersion.VERSION_1_8
29 | }
30 |
31 | // Do NOT compress tflite model files (need to call out to developers!)
32 | aaptOptions {
33 | noCompress "tflite"
34 | }
35 | }
36 |
37 | repositories {
38 | maven { url 'https://google.bintray.com/tensorflow' }
39 | }
40 |
41 | dependencies {
42 | //gif Loading
43 | implementation ("com.github.bumptech.glide:glide:4.11.0")
44 | //TF Lite
45 | implementation 'androidx.multidex:multidex:2.0.1'
46 | implementation 'com.google.firebase:firebase-ml-model-interpreter:22.0.3'
47 | implementation 'com.google.firebase:firebase-analytics:17.4.4'
48 | // Face features
49 | implementation 'com.google.mlkit:face-detection:16.0.1'
50 |
51 | // -------------------------------------------------------
52 |
53 | implementation 'com.google.code.gson:gson:2.8.6'
54 | implementation 'com.google.guava:guava:17.0'
55 |
56 | // For how to setup gradle dependencies in Android X, see:
57 | // https://developer.android.com/training/testing/set-up-project#gradle-dependencies
58 | // Core library
59 | androidTestImplementation 'androidx.test:core:1.2.0'
60 |
61 | // AndroidJUnitRunner and JUnit Rules
62 | androidTestImplementation 'androidx.test:runner:1.2.0'
63 | androidTestImplementation 'androidx.test:rules:1.2.0'
64 |
65 | // Assertions
66 | androidTestImplementation 'androidx.test.ext:junit:1.1.1'
67 |
68 | // ViewModel and LiveData
69 | implementation "androidx.lifecycle:lifecycle-livedata:2.2.0"
70 | implementation "androidx.lifecycle:lifecycle-viewmodel:2.2.0"
71 |
72 | implementation 'androidx.appcompat:appcompat:1.1.0'
73 | implementation 'androidx.annotation:annotation:1.1.0'
74 | implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
75 | }
76 |
77 | configurations {
78 | // Resolves dependency conflict caused by some dependencies use
79 | // com.google.guava:guava and com.google.guava:listenablefuture together.
80 | all*.exclude group: 'com.google.guava', module: 'listenablefuture'
81 | }
82 |
83 | apply plugin: 'com.google.gms.google-services'
--------------------------------------------------------------------------------
/vision-quickstart/app/google-services.json:
--------------------------------------------------------------------------------
1 | {
2 | "project_info": {
3 | "project_number": "421372490474",
4 | "firebase_url": "https://mobigaze.firebaseio.com",
5 | "project_id": "mobigaze",
6 | "storage_bucket": "mobigaze.appspot.com"
7 | },
8 | "client": [
9 | {
10 | "client_info": {
11 | "mobilesdk_app_id": "1:421372490474:android:6de66840625cd8ea269d93",
12 | "android_client_info": {
13 | "package_name": "com.mobed.mlkit.vision.datacollector"
14 | }
15 | },
16 | "oauth_client": [
17 | {
18 | "client_id": "421372490474-74hb5l5f21vipt6mn439jtivhtbam2r3.apps.googleusercontent.com",
19 | "client_type": 3
20 | }
21 | ],
22 | "api_key": [
23 | {
24 | "current_key": "AIzaSyArm4eAJImKYpUW4Ged54oeSh549-kvRzo"
25 | }
26 | ],
27 | "services": {
28 | "appinvite_service": {
29 | "other_platform_oauth_client": [
30 | {
31 | "client_id": "421372490474-74hb5l5f21vipt6mn439jtivhtbam2r3.apps.googleusercontent.com",
32 | "client_type": 3
33 | }
34 | ]
35 | }
36 | }
37 | }
38 | ],
39 | "configuration_version": "1"
40 | }
--------------------------------------------------------------------------------
/vision-quickstart/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
13 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
31 |
32 |
35 |
36 |
38 |
41 |
42 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
57 |
58 |
59 |
60 |
61 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/java/com/mobed/mlkit/vision/datacollector/BitmapUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.mobed.mlkit.vision.datacollector;
18 |
19 | import android.annotation.TargetApi;
20 | import android.content.ContentResolver;
21 | import android.content.Context;
22 | import android.graphics.Bitmap;
23 | import android.graphics.BitmapFactory;
24 | import android.graphics.ImageFormat;
25 | import android.graphics.Matrix;
26 | import android.graphics.Rect;
27 | import android.graphics.YuvImage;
28 | import android.media.Image;
29 | import android.media.Image.Plane;
30 | import android.net.Uri;
31 | import android.os.Build.VERSION_CODES;
32 | import android.provider.MediaStore;
33 | import android.util.Log;
34 |
35 | import androidx.annotation.Nullable;
36 | import androidx.annotation.RequiresApi;
37 | import androidx.exifinterface.media.ExifInterface;
38 |
39 | import java.io.ByteArrayOutputStream;
40 | import java.io.IOException;
41 | import java.io.InputStream;
42 | import java.nio.ByteBuffer;
43 |
44 | /**
45 | * Utils functions for bitmap conversions.
46 | */
47 | public class BitmapUtils {
48 | private static final String TAG = "MOBED_BitmapUtils";
49 |
50 | /**
51 | * Converts NV21 format byte buffer to bitmap.
52 | */
53 | @Nullable
54 | public static Bitmap getBitmap(ByteBuffer data, FrameMetadata metadata) {
55 | data.rewind();
56 | byte[] imageInBuffer = new byte[data.limit()];
57 | data.get(imageInBuffer, 0, imageInBuffer.length);
58 | try {
59 | YuvImage image =
60 | new YuvImage(
61 | imageInBuffer, ImageFormat.NV21, metadata.getWidth(), metadata.getHeight(), null);
62 | ByteArrayOutputStream stream = new ByteArrayOutputStream();
63 | image.compressToJpeg(new Rect(0, 0, metadata.getWidth(), metadata.getHeight()), 80, stream);
64 |
65 | Bitmap bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
66 |
67 | stream.close();
68 | return rotateBitmap(bmp, metadata.getRotation(), false, false);
69 | } catch (Exception e) {
70 | Log.e("VisionProcessorBase", "Error: " + e.getMessage());
71 | }
72 | return null;
73 | }
74 |
75 |
76 | /**
77 | * Rotates a bitmap if it is converted from a bytebuffer.
78 | */
79 | private static Bitmap rotateBitmap(
80 | Bitmap bitmap, int rotationDegrees, boolean flipX, boolean flipY) {
81 | Matrix matrix = new Matrix();
82 |
83 | // Rotate the image back to straight.
84 | matrix.postRotate(rotationDegrees);
85 |
86 | // Mirror the image along the X or Y axis.
87 | matrix.postScale(flipX ? -1.0f : 1.0f, flipY ? -1.0f : 1.0f);
88 | Bitmap rotatedBitmap =
89 | Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
90 |
91 | // Recycle the old bitmap if it has changed.
92 | if (rotatedBitmap != bitmap) {
93 | bitmap.recycle();
94 | }
95 | return rotatedBitmap;
96 | }
97 |
98 | @Nullable
99 | public static Bitmap getBitmapFromAsset(Context context, String fileName) {
100 | InputStream inputStream = null;
101 | try {
102 | inputStream = context.getAssets().open(fileName);
103 | return BitmapFactory.decodeStream(inputStream);
104 | } catch (IOException e) {
105 | Log.e(TAG, "Error reading asset: " + fileName, e);
106 | } finally {
107 | if (inputStream != null) {
108 | try {
109 | inputStream.close();
110 | } catch (IOException e) {
111 | Log.e(TAG, "Failed to close input stream: ", e);
112 | }
113 | }
114 | }
115 |
116 | return null;
117 | }
118 |
119 | @Nullable
120 | public static Bitmap getBitmapFromContentUri(ContentResolver contentResolver, Uri imageUri)
121 | throws IOException {
122 | Bitmap decodedBitmap = MediaStore.Images.Media.getBitmap(contentResolver, imageUri);
123 | if (decodedBitmap==null) {
124 | return null;
125 | }
126 | int orientation = getExifOrientationTag(contentResolver, imageUri);
127 |
128 | int rotationDegrees = 0;
129 | boolean flipX = false;
130 | boolean flipY = false;
131 | // See e.g. https://magnushoff.com/articles/jpeg-orientation/ for a detailed explanation on each
132 | // orientation.
133 | switch (orientation) {
134 | case ExifInterface.ORIENTATION_FLIP_HORIZONTAL:
135 | flipX = true;
136 | break;
137 | case ExifInterface.ORIENTATION_ROTATE_90:
138 | rotationDegrees = 90;
139 | break;
140 | case ExifInterface.ORIENTATION_TRANSPOSE:
141 | rotationDegrees = 90;
142 | flipX = true;
143 | break;
144 | case ExifInterface.ORIENTATION_ROTATE_180:
145 | rotationDegrees = 180;
146 | break;
147 | case ExifInterface.ORIENTATION_FLIP_VERTICAL:
148 | flipY = true;
149 | break;
150 | case ExifInterface.ORIENTATION_ROTATE_270:
151 | rotationDegrees = -90;
152 | break;
153 | case ExifInterface.ORIENTATION_TRANSVERSE:
154 | rotationDegrees = -90;
155 | flipX = true;
156 | break;
157 | case ExifInterface.ORIENTATION_UNDEFINED:
158 | case ExifInterface.ORIENTATION_NORMAL:
159 | default:
160 | // No transformations necessary in this case.
161 | }
162 |
163 | return rotateBitmap(decodedBitmap, rotationDegrees, flipX, flipY);
164 | }
165 |
166 | private static int getExifOrientationTag(ContentResolver resolver, Uri imageUri) {
167 | // We only support parsing EXIF orientation tag from local file on the device.
168 | // See also:
169 | // https://android-developers.googleblog.com/2016/12/introducing-the-exifinterface-support-library.html
170 | if (!ContentResolver.SCHEME_CONTENT.equals(imageUri.getScheme())
171 | && !ContentResolver.SCHEME_FILE.equals(imageUri.getScheme())) {
172 | return 0;
173 | }
174 |
175 | ExifInterface exif;
176 | try (InputStream inputStream = resolver.openInputStream(imageUri)) {
177 | if (inputStream == null) {
178 | return 0;
179 | }
180 |
181 | exif = new ExifInterface(inputStream);
182 | } catch (IOException e) {
183 | Log.e(TAG, "failed to open file to read rotation meta data: " + imageUri, e);
184 | return 0;
185 | }
186 |
187 | return exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
188 | }
189 |
190 | public static ByteBuffer convertBitmapToNv21Buffer(Bitmap bitmap) {
191 | return ByteBuffer.wrap(convertBitmapToNv21Bytes(bitmap));
192 | }
193 |
194 | public static byte[] convertBitmapToNv21Bytes(Bitmap bitmap) {
195 | int inputWidth = bitmap.getWidth();
196 | int inputHeight = bitmap.getHeight();
197 | int[] argb = new int[inputWidth * inputHeight];
198 |
199 | bitmap.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
200 |
201 | byte[] nv21Bytes =
202 | new byte
203 | [inputHeight * inputWidth
204 | + 2 * (int) Math.ceil(inputHeight / 2.0) * (int) Math.ceil(inputWidth / 2.0)];
205 | encodeToNv21(nv21Bytes, argb, inputWidth, inputHeight);
206 | return nv21Bytes;
207 | }
208 |
209 | private static void encodeToNv21(byte[] nv21Bytes, int[] argb, int width, int height) {
210 | int frameSize = width * height;
211 |
212 | int yIndex = 0;
213 | int uvIndex = frameSize;
214 |
215 | int red;
216 | int green;
217 | int blue;
218 | int y;
219 | int u;
220 | int v;
221 | int index = 0;
222 | for (int j = 0; j < height; j++) {
223 | for (int i = 0; i < width; i++) {
224 |
225 | // first byte is alpha, but is unused
226 | red = (argb[index] & 0xff0000) >> 16;
227 | green = (argb[index] & 0xff00) >> 8;
228 | blue = (argb[index] & 0xff) >> 0;
229 |
230 | // well known RGB to YUV algorithm
231 | y = ((66 * red + 129 * green + 25 * blue + 128) >> 8) + 16;
232 | u = ((-38 * red - 74 * green + 112 * blue + 128) >> 8) + 128;
233 | v = ((112 * red - 94 * green - 18 * blue + 128) >> 8) + 128;
234 |
235 | // NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
236 | // meaning for every 4 Y pixels there are 1 V and 1 U. Note the sampling is every other
237 | // pixel AND every other scanline.
238 | nv21Bytes[yIndex++] = (byte) ((y < 0) ? 0 : ((y > 255) ? 255 : y));
239 | if (j % 2 == 0 && index % 2 == 0) {
240 | nv21Bytes[uvIndex++] = (byte) ((v < 0) ? 0 : ((v > 255) ? 255 : v));
241 | nv21Bytes[uvIndex++] = (byte) ((u < 0) ? 0 : ((u > 255) ? 255 : u));
242 | }
243 |
244 | index++;
245 | }
246 | }
247 | }
248 |
249 | public static ByteBuffer convertBitmapToYv12Buffer(Bitmap bitmap) {
250 | return ByteBuffer.wrap(convertBitmapToYv12Bytes(bitmap));
251 | }
252 |
253 | public static byte[] convertBitmapToYv12Bytes(Bitmap bitmap) {
254 | byte[] nv21Bytes = convertBitmapToNv21Bytes(bitmap);
255 | return nv21Toyv12(nv21Bytes);
256 | }
257 |
258 | /**
259 | * Converts nv21 byte[] to yv12 byte[].
260 | *
261 | *
NV21 (4:2:0) Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y V U V U V U V U V U V U
262 | *
263 | *
YV12 (4:2:0) Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y V V V V V V U U U U U U
264 | */
265 | private static byte[] nv21Toyv12(byte[] nv21Bytes) {
266 | int totalBytes = nv21Bytes.length;
267 | int rowSize = totalBytes / 6; // 4+2+0
268 | byte[] yv12Bytes = new byte[totalBytes];
269 | System.arraycopy(nv21Bytes, 0, yv12Bytes, 0, rowSize * 4);
270 | int offSet = totalBytes / 6 * 4;
271 | for (int i = 0; i < rowSize; i++) {
272 | yv12Bytes[offSet + i] = nv21Bytes[offSet + 2 * i]; // V
273 | yv12Bytes[offSet + rowSize + i] = nv21Bytes[offSet + 2 * i + 1]; // U
274 | }
275 |
276 | return yv12Bytes;
277 | }
278 |
279 | /**
280 | * Converts YUV_420_888 to NV21 bytebuffer.
281 | *
282 | *
The NV21 format consists of a single byte array containing the Y, U and V values. For an
283 | * image of size S, the first S positions of the array contain all the Y values. The remaining
284 | * positions contain interleaved V and U values. U and V are subsampled by a factor of 2 in both
285 | * dimensions, so there are S/4 U values and S/4 V values. In summary, the NV21 array will contain
286 | * S Y values followed by S/4 VU values: YYYYYYYYYYYYYY(...)YVUVUVUVU(...)VU
287 | *
288 | *
YUV_420_888 is a generic format that can describe any YUV image where U and V are subsampled
289 | * by a factor of 2 in both dimensions. {@link Image#getPlanes} returns an array with the Y, U and
290 | * V planes. The Y plane is guaranteed not to be interleaved, so we can just copy its values into
291 | * the first part of the NV21 array. The U and V planes may already have the representation in the
292 | * NV21 format. This happens if the planes share the same buffer, the V buffer is one position
293 | * before the U buffer and the planes have a pixelStride of 2. If this is case, we can just copy
294 | * them to the NV21 array.
295 | */
296 | @RequiresApi(VERSION_CODES.KITKAT)
297 | private static ByteBuffer yuv420ThreePlanesToNV21(
298 | Plane[] yuv420888planes, int width, int height) {
299 | int imageSize = width * height;
300 | byte[] out = new byte[imageSize + 2 * (imageSize / 4)];
301 |
302 | if (areUVPlanesNV21(yuv420888planes, width, height)) {
303 | // Copy the Y values.
304 | yuv420888planes[0].getBuffer().get(out, 0, imageSize);
305 |
306 | ByteBuffer uBuffer = yuv420888planes[1].getBuffer();
307 | ByteBuffer vBuffer = yuv420888planes[2].getBuffer();
308 | // Get the first V value from the V buffer, since the U buffer does not contain it.
309 | vBuffer.get(out, imageSize, 1);
310 | // Copy the first U value and the remaining VU values from the U buffer.
311 | uBuffer.get(out, imageSize + 1, 2 * imageSize / 4 - 1);
312 | } else {
313 | // Fallback to copying the UV values one by one, which is slower but also works.
314 | // Unpack Y.
315 | unpackPlane(yuv420888planes[0], width, height, out, 0, 1);
316 | // Unpack U.
317 | unpackPlane(yuv420888planes[1], width, height, out, imageSize + 1, 2);
318 | // Unpack V.
319 | unpackPlane(yuv420888planes[2], width, height, out, imageSize, 2);
320 | }
321 |
322 | return ByteBuffer.wrap(out);
323 | }
324 |
325 | /**
326 | * Checks if the UV plane buffers of a YUV_420_888 image are in the NV21 format.
327 | */
328 | @RequiresApi(VERSION_CODES.KITKAT)
329 | private static boolean areUVPlanesNV21(Plane[] planes, int width, int height) {
330 | int imageSize = width * height;
331 |
332 | ByteBuffer uBuffer = planes[1].getBuffer();
333 | ByteBuffer vBuffer = planes[2].getBuffer();
334 |
335 | // Backup buffer properties.
336 | int vBufferPosition = vBuffer.position();
337 | int uBufferLimit = uBuffer.limit();
338 |
339 | // Advance the V buffer by 1 byte, since the U buffer will not contain the first V value.
340 | vBuffer.position(vBufferPosition + 1);
341 | // Chop off the last byte of the U buffer, since the V buffer will not contain the last U value.
342 | uBuffer.limit(uBufferLimit - 1);
343 |
344 | // Check that the buffers are equal and have the expected number of elements.
345 | boolean areNV21 =
346 | (vBuffer.remaining() == (2 * imageSize / 4 - 2)) && (vBuffer.compareTo(uBuffer) == 0);
347 |
348 | // Restore buffers to their initial state.
349 | vBuffer.position(vBufferPosition);
350 | uBuffer.limit(uBufferLimit);
351 |
352 | return areNV21;
353 | }
354 |
355 | /**
356 | * Unpack an image plane into a byte array.
357 | *
358 | *
The input plane data will be copied in 'out', starting at 'offset' and every pixel will be
359 | * spaced by 'pixelStride'. Note that there is no row padding on the output.
360 | */
361 | @TargetApi(VERSION_CODES.KITKAT)
362 | private static void unpackPlane(
363 | Plane plane, int width, int height, byte[] out, int offset, int pixelStride) {
364 | ByteBuffer buffer = plane.getBuffer();
365 | buffer.rewind();
366 |
367 | // Compute the size of the current plane.
368 | // We assume that it has the aspect ratio as the original image.
369 | int numRow = (buffer.limit() + plane.getRowStride() - 1) / plane.getRowStride();
370 | if (numRow == 0) {
371 | return;
372 | }
373 | int scaleFactor = height / numRow;
374 | int numCol = width / scaleFactor;
375 |
376 | // Extract the data in the output buffer.
377 | int outputPos = offset;
378 | int rowStart = 0;
379 | for (int row = 0; row < numRow; row++) {
380 | int inputPos = rowStart;
381 | for (int col = 0; col < numCol; col++) {
382 | out[outputPos] = buffer.get(inputPos);
383 | outputPos += pixelStride;
384 | inputPos += plane.getPixelStride();
385 | }
386 | rowStart += plane.getRowStride();
387 | }
388 | }
389 | }
390 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/java/com/mobed/mlkit/vision/datacollector/CameraImageGraphic.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.mobed.mlkit.vision.datacollector;
18 |
19 | import android.graphics.Bitmap;
20 | import android.graphics.Canvas;
21 |
22 | import com.mobed.mlkit.vision.datacollector.GraphicOverlay.Graphic;
23 |
24 | /**
25 | * Draw camera image to background.
26 | */
27 | public class CameraImageGraphic extends Graphic {
28 |
29 | private final Bitmap bitmap;
30 |
31 | public CameraImageGraphic(GraphicOverlay overlay, Bitmap bitmap) {
32 | super(overlay);
33 | this.bitmap = bitmap;
34 | }
35 |
36 | @Override
37 | public void draw(Canvas canvas) {
38 | canvas.drawBitmap(bitmap, getTransformationMatrix(), null);
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/java/com/mobed/mlkit/vision/datacollector/CameraSource.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.mobed.mlkit.vision.datacollector;
18 |
19 | import android.Manifest;
20 | import android.annotation.SuppressLint;
21 | import android.app.Activity;
22 | import android.content.Context;
23 | import android.graphics.ImageFormat;
24 | import android.graphics.SurfaceTexture;
25 | import android.hardware.Camera;
26 | import android.hardware.Camera.CameraInfo;
27 | import android.util.Log;
28 | import android.view.Surface;
29 | import android.view.SurfaceHolder;
30 | import android.view.WindowManager;
31 |
32 | import androidx.annotation.Nullable;
33 | import androidx.annotation.RequiresPermission;
34 |
35 | import com.google.android.gms.common.images.Size;
36 | import com.mobed.mlkit.vision.datacollector.preference.PreferenceUtils;
37 |
38 | import java.io.IOException;
39 | import java.lang.Thread.State;
40 | import java.nio.ByteBuffer;
41 | import java.util.ArrayList;
42 | import java.util.IdentityHashMap;
43 | import java.util.List;
44 | import java.util.Map;
45 |
46 | /**
47 | * Manages the camera and allows UI updates on top of it (e.g. overlaying extra Graphics or
48 | * displaying extra information). This receives preview frames from the camera at a specified rate,
49 | * sending those frames to child classes' detectors / classifiers as fast as it is able to process.
50 | */
51 | public class CameraSource {
52 | @SuppressLint("InlinedApi")
53 | public static final int CAMERA_FACING_BACK = CameraInfo.CAMERA_FACING_BACK;
54 |
55 | @SuppressLint("InlinedApi")
56 | public static final int CAMERA_FACING_FRONT = CameraInfo.CAMERA_FACING_FRONT;
57 |
58 | public static final int IMAGE_FORMAT = ImageFormat.NV21;
59 | //public static final int IMAGE_FORMAT = ImageFormat.YV12;
60 | public static final int DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH = 480;
61 | public static final int DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT = 360;
62 |
63 | private static final String TAG = "MOBED_CameraSource";
64 |
65 | /**
66 | * The dummy surface texture must be assigned a chosen name. Since we never use an OpenGL context,
67 | * we can choose any ID we want here. The dummy surface texture is not a crazy hack - it is
68 | * actually how the camera team recommends using the camera without a preview.
69 | */
70 | private static final int DUMMY_TEXTURE_NAME = 100;
71 |
72 | /**
73 | * If the absolute difference between a preview size aspect ratio and a picture size aspect ratio
74 | * is less than this tolerance, they are considered to be the same aspect ratio.
75 | */
76 | private static final float ASPECT_RATIO_TOLERANCE = 0.01f;
77 |
78 | protected Activity activity;
79 |
80 | private Camera camera;
81 |
82 | private int facing = CAMERA_FACING_FRONT;
83 |
84 | /**
85 | * Rotation of the device, and thus the associated preview images captured from the device.
86 | */
87 | private int rotationDegrees;
88 |
89 | private Size previewSize;
90 |
91 | private final float requestedFps = 30.0f;
92 | private final boolean requestedAutoFocus = true;
93 |
94 | // These instances need to be held onto to avoid GC of their underlying resources. Even though
95 | // these aren't used outside of the method that creates them, they still must have hard
96 | // references maintained to them.
97 | private SurfaceTexture dummySurfaceTexture;
98 | private final GraphicOverlay graphicOverlay;
99 |
100 | // True if a SurfaceTexture is being used for the preview, false if a SurfaceHolder is being
101 | // used for the preview. We want to be compatible back to Gingerbread, but SurfaceTexture
102 | // wasn't introduced until Honeycomb. Since the interface cannot use a SurfaceTexture, if the
103 | // developer wants to display a preview we must use a SurfaceHolder. If the developer doesn't
104 | // want to display a preview we use a SurfaceTexture if we are running at least Honeycomb.
105 | private boolean usingSurfaceTexture;
106 |
107 | /**
108 | * Dedicated thread and associated runnable for calling into the detector with frames, as the
109 | * frames become available from the camera.
110 | */
111 | private Thread processingThread;
112 |
113 | private final FrameProcessingRunnable processingRunnable;
114 |
115 | private final Object processorLock = new Object();
116 | // TODO(b/74400062) Re-enable the annotaion
117 | // @GuardedBy("processorLock")
118 | private VisionImageProcessor frameProcessor;
119 |
120 | /**
121 | * Map to convert between a byte array, received from the camera, and its associated byte buffer.
122 | * We use byte buffers internally because this is a more efficient way to call into native code
123 | * later (avoids a potential copy).
124 | *
125 | *
Note: uses IdentityHashMap here instead of HashMap because the behavior of an array's
126 | * equals, hashCode and toString methods is both useless and unexpected. IdentityHashMap enforces
127 | * identity ('==') check on the keys.
128 | */
129 | private final Map bytesToByteBuffer = new IdentityHashMap<>();
130 |
131 | public CameraSource(Activity activity, GraphicOverlay overlay) {
132 | this.activity = activity;
133 | graphicOverlay = overlay;
134 | graphicOverlay.clear();
135 | processingRunnable = new FrameProcessingRunnable();
136 | }
137 |
138 | // ==============================================================================================
139 | // Public
140 | // ==============================================================================================
141 |
142 | /**
143 | * Stops the camera and releases the resources of the camera and underlying detector.
144 | */
145 | public void release() {
146 | synchronized (processorLock) {
147 | stop();
148 | processingRunnable.release();
149 | cleanScreen();
150 |
151 | if (frameProcessor != null) {
152 | frameProcessor.stop();
153 | }
154 | }
155 | }
156 |
157 | /**
158 | * Opens the camera and starts sending preview frames to the underlying detector. The preview
159 | * frames are not displayed.
160 | *
161 | * @throws IOException if the camera's preview texture or display could not be initialized
162 | */
163 | @RequiresPermission(Manifest.permission.CAMERA)
164 | public synchronized CameraSource start() throws IOException {
165 | if (camera != null) {
166 | return this;
167 | }
168 |
169 | camera = createCamera();
170 | dummySurfaceTexture = new SurfaceTexture(DUMMY_TEXTURE_NAME);
171 | camera.setPreviewTexture(dummySurfaceTexture);
172 | usingSurfaceTexture = true;
173 | camera.startPreview();
174 |
175 | processingThread = new Thread(processingRunnable);
176 | processingRunnable.setActive(true);
177 | processingThread.start();
178 | return this;
179 | }
180 |
181 | /**
182 | * Opens the camera and starts sending preview frames to the underlying detector. The supplied
183 | * surface holder is used for the preview so frames can be displayed to the user.
184 | *
185 | * @param surfaceHolder the surface holder to use for the preview frames
186 | * @throws IOException if the supplied surface holder could not be used as the preview display
187 | */
188 | @RequiresPermission(Manifest.permission.CAMERA)
189 | public synchronized CameraSource start(SurfaceHolder surfaceHolder) throws IOException {
190 | if (camera != null) {
191 | return this;
192 | }
193 |
194 | camera = createCamera();
195 | camera.setPreviewDisplay(surfaceHolder);
196 | camera.startPreview();
197 |
198 | processingThread = new Thread(processingRunnable);
199 | processingRunnable.setActive(true);
200 | processingThread.start();
201 |
202 | usingSurfaceTexture = false;
203 | return this;
204 | }
205 |
206 | /**
207 | * Closes the camera and stops sending frames to the underlying frame detector.
208 | *
209 | *
This camera source may be restarted again by calling {@link #start()} or {@link
210 | * #start(SurfaceHolder)}.
211 | *
212 | *
Call {@link #release()} instead to completely shut down this camera source and release the
213 | * resources of the underlying detector.
214 | */
215 | public synchronized void stop() {
216 | processingRunnable.setActive(false);
217 | if (processingThread != null) {
218 | try {
219 | // Wait for the thread to complete to ensure that we can't have multiple threads
220 | // executing at the same time (i.e., which would happen if we called start too
221 | // quickly after stop).
222 | processingThread.join();
223 | } catch (InterruptedException e) {
224 | Log.d(TAG, "Frame processing thread interrupted on release.");
225 | }
226 | processingThread = null;
227 | }
228 |
229 | if (camera != null) {
230 | camera.stopPreview();
231 | camera.setPreviewCallbackWithBuffer(null);
232 | try {
233 | if (usingSurfaceTexture) {
234 | camera.setPreviewTexture(null);
235 | } else {
236 | camera.setPreviewDisplay(null);
237 | }
238 | } catch (Exception e) {
239 | Log.e(TAG, "Failed to clear camera preview: " + e);
240 | }
241 | camera.release();
242 | camera = null;
243 | }
244 |
245 | // Release the reference to any image buffers, since these will no longer be in use.
246 | bytesToByteBuffer.clear();
247 | }
248 |
249 | /**
250 | * Changes the facing of the camera.
251 | */
252 | public synchronized void setFacing(int facing) {
253 | if ((facing != CAMERA_FACING_BACK) && (facing != CAMERA_FACING_FRONT)) {
254 | throw new IllegalArgumentException("Invalid camera: " + facing);
255 | }
256 | this.facing = facing;
257 | }
258 |
259 | /**
260 | * Returns the preview size that is currently in use by the underlying camera.
261 | */
262 | public Size getPreviewSize() {
263 | return previewSize;
264 | }
265 |
266 | /**
267 | * Returns the selected camera; one of {@link #CAMERA_FACING_BACK} or {@link
268 | * #CAMERA_FACING_FRONT}.
269 | */
270 | public int getCameraFacing() {
271 | return facing;
272 | }
273 |
274 | /**
275 | * Opens the camera and applies the user settings.
276 | *
277 | * @throws IOException if camera cannot be found or preview cannot be processed
278 | */
279 | @SuppressLint("InlinedApi")
280 | private Camera createCamera() throws IOException {
281 | int requestedCameraId = getIdForRequestedCamera(facing);
282 | if (requestedCameraId == -1) {
283 | throw new IOException("Could not find requested camera.");
284 | }
285 | Camera camera = Camera.open(requestedCameraId);
286 |
287 | SizePair sizePair = PreferenceUtils.getCameraPreviewSizePair(activity, requestedCameraId);
288 | if (sizePair == null) {
289 | sizePair =
290 | selectSizePair(
291 | camera,
292 | DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH,
293 | DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT);
294 | }
295 |
296 | if (sizePair == null) {
297 | throw new IOException("Could not find suitable preview size.");
298 | }
299 |
300 | previewSize = sizePair.preview;
301 | Log.v(TAG, "Camera preview size: " + previewSize);
302 |
303 | int[] previewFpsRange = selectPreviewFpsRange(camera, requestedFps);
304 | if (previewFpsRange == null) {
305 | throw new IOException("Could not find suitable preview frames per second range.");
306 | }
307 |
308 | Camera.Parameters parameters = camera.getParameters();
309 |
310 | Size pictureSize = sizePair.picture;
311 | if (pictureSize != null) {
312 | Log.v(TAG, "Camera picture size: " + pictureSize);
313 | parameters.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight());
314 | }
315 | parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
316 | parameters.setPreviewFpsRange(
317 | previewFpsRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
318 | previewFpsRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
319 | // Use YV12 so that we can exercise YV12->NV21 auto-conversion logic for OCR detection
320 | parameters.setPreviewFormat(IMAGE_FORMAT);
321 |
322 | setRotation(camera, parameters, requestedCameraId);
323 |
324 | if (requestedAutoFocus) {
325 | if (parameters
326 | .getSupportedFocusModes()
327 | .contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
328 | parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
329 | } else {
330 | Log.i(TAG, "Camera auto focus is not supported on this device.");
331 | }
332 | }
333 |
334 | camera.setParameters(parameters);
335 |
336 | // Four frame buffers are needed for working with the camera:
337 | //
338 | // one for the frame that is currently being executed upon in doing detection
339 | // one for the next pending frame to process immediately upon completing detection
340 | // two for the frames that the camera uses to populate future preview images
341 | //
342 | // Through trial and error it appears that two free buffers, in addition to the two buffers
343 | // used in this code, are needed for the camera to work properly. Perhaps the camera has
344 | // one thread for acquiring images, and another thread for calling into user code. If only
345 | // three buffers are used, then the camera will spew thousands of warning messages when
346 | // detection takes a non-trivial amount of time.
347 | camera.setPreviewCallbackWithBuffer(new CameraPreviewCallback());
348 | camera.addCallbackBuffer(createPreviewBuffer(previewSize));
349 | camera.addCallbackBuffer(createPreviewBuffer(previewSize));
350 | camera.addCallbackBuffer(createPreviewBuffer(previewSize));
351 | camera.addCallbackBuffer(createPreviewBuffer(previewSize));
352 |
353 | return camera;
354 | }
355 |
356 | /**
357 | * Gets the id for the camera specified by the direction it is facing. Returns -1 if no such
358 | * camera was found.
359 | *
360 | * @param facing the desired camera (front-facing or rear-facing)
361 | */
362 | private static int getIdForRequestedCamera(int facing) {
363 | CameraInfo cameraInfo = new CameraInfo();
364 | for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
365 | Camera.getCameraInfo(i, cameraInfo);
366 | if (cameraInfo.facing == facing) {
367 | return i;
368 | }
369 | }
370 | return -1;
371 | }
372 |
373 | /**
374 | * Selects the most suitable preview and picture size, given the desired width and height.
375 | *
376 | *
Even though we only need to find the preview size, it's necessary to find both the preview
377 | * size and the picture size of the camera together, because these need to have the same aspect
378 | * ratio. On some hardware, if you would only set the preview size, you will get a distorted
379 | * image.
380 | *
381 | * @param camera the camera to select a preview size from
382 | * @param desiredWidth the desired width of the camera preview frames
383 | * @param desiredHeight the desired height of the camera preview frames
384 | * @return the selected preview and picture size pair
385 | */
386 | public static SizePair selectSizePair(Camera camera, int desiredWidth, int desiredHeight) {
387 | List validPreviewSizes = generateValidPreviewSizeList(camera);
388 |
389 | // The method for selecting the best size is to minimize the sum of the differences between
390 | // the desired values and the actual values for width and height. This is certainly not the
391 | // only way to select the best size, but it provides a decent tradeoff between using the
392 | // closest aspect ratio vs. using the closest pixel area.
393 | SizePair selectedPair = null;
394 | int minDiff = Integer.MAX_VALUE;
395 | for (SizePair sizePair : validPreviewSizes) {
396 | Size size = sizePair.preview;
397 | int diff =
398 | Math.abs(size.getWidth() - desiredWidth) + Math.abs(size.getHeight() - desiredHeight);
399 | if (diff < minDiff) {
400 | selectedPair = sizePair;
401 | minDiff = diff;
402 | }
403 | }
404 |
405 | return selectedPair;
406 | }
407 |
408 | /**
409 | * Stores a preview size and a corresponding same-aspect-ratio picture size. To avoid distorted
410 | * preview images on some devices, the picture size must be set to a size that is the same aspect
411 | * ratio as the preview size or the preview may end up being distorted. If the picture size is
412 | * null, then there is no picture size with the same aspect ratio as the preview size.
413 | */
414 | public static class SizePair {
415 | public final Size preview;
416 | @Nullable
417 | public final Size picture;
418 |
419 | SizePair(
420 | Camera.Size previewSize,
421 | @Nullable Camera.Size pictureSize) {
422 | preview = new Size(previewSize.width, previewSize.height);
423 | picture = pictureSize != null ? new Size(pictureSize.width, pictureSize.height) : null;
424 | }
425 |
426 | public SizePair(Size previewSize, @Nullable Size pictureSize) {
427 | preview = previewSize;
428 | picture = pictureSize;
429 | }
430 | }
431 |
432 | /**
433 | * Generates a list of acceptable preview sizes. Preview sizes are not acceptable if there is not
434 | * a corresponding picture size of the same aspect ratio. If there is a corresponding picture size
435 | * of the same aspect ratio, the picture size is paired up with the preview size.
436 | *
437 | *
This is necessary because even if we don't use still pictures, the still picture size must
438 | * be set to a size that is the same aspect ratio as the preview size we choose. Otherwise, the
439 | * preview images may be distorted on some devices.
440 | */
441 | public static List generateValidPreviewSizeList(Camera camera) {
442 | Camera.Parameters parameters = camera.getParameters();
443 | List supportedPreviewSizes =
444 | parameters.getSupportedPreviewSizes();
445 | List supportedPictureSizes =
446 | parameters.getSupportedPictureSizes();
447 | List validPreviewSizes = new ArrayList<>();
448 | for (Camera.Size previewSize : supportedPreviewSizes) {
449 | float previewAspectRatio = (float) previewSize.width / (float) previewSize.height;
450 |
451 | // By looping through the picture sizes in order, we favor the higher resolutions.
452 | // We choose the highest resolution in order to support taking the full resolution
453 | // picture later.
454 | for (Camera.Size pictureSize : supportedPictureSizes) {
455 | float pictureAspectRatio = (float) pictureSize.width / (float) pictureSize.height;
456 | if (Math.abs(previewAspectRatio - pictureAspectRatio) < ASPECT_RATIO_TOLERANCE) {
457 | validPreviewSizes.add(new SizePair(previewSize, pictureSize));
458 | break;
459 | }
460 | }
461 | }
462 |
463 | // If there are no picture sizes with the same aspect ratio as any preview sizes, allow all
464 | // of the preview sizes and hope that the camera can handle it. Probably unlikely, but we
465 | // still account for it.
466 | if (validPreviewSizes.size() == 0) {
467 | Log.w(TAG, "No preview sizes have a corresponding same-aspect-ratio picture size");
468 | for (Camera.Size previewSize : supportedPreviewSizes) {
469 | // The null picture size will let us know that we shouldn't set a picture size.
470 | validPreviewSizes.add(new SizePair(previewSize, null));
471 | }
472 | }
473 |
474 | return validPreviewSizes;
475 | }
476 |
477 | /**
478 | * Selects the most suitable preview frames per second range, given the desired frames per second.
479 | *
480 | * @param camera the camera to select a frames per second range from
481 | * @param desiredPreviewFps the desired frames per second for the camera preview frames
482 | * @return the selected preview frames per second range
483 | */
484 | @SuppressLint("InlinedApi")
485 | private static int[] selectPreviewFpsRange(Camera camera, float desiredPreviewFps) {
486 | // The camera API uses integers scaled by a factor of 1000 instead of floating-point frame
487 | // rates.
488 | int desiredPreviewFpsScaled = (int) (desiredPreviewFps * 1000.0f);
489 |
490 | // The method for selecting the best range is to minimize the sum of the differences between
491 | // the desired value and the upper and lower bounds of the range. This may select a range
492 | // that the desired value is outside of, but this is often preferred. For example, if the
493 | // desired frame rate is 29.97, the range (30, 30) is probably more desirable than the
494 | // range (15, 30).
495 | int[] selectedFpsRange = null;
496 | int minDiff = Integer.MAX_VALUE;
497 | List previewFpsRangeList = camera.getParameters().getSupportedPreviewFpsRange();
498 | for (int[] range : previewFpsRangeList) {
499 | int deltaMin = desiredPreviewFpsScaled - range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
500 | int deltaMax = desiredPreviewFpsScaled - range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
501 | int diff = Math.abs(deltaMin) + Math.abs(deltaMax);
502 | if (diff < minDiff) {
503 | selectedFpsRange = range;
504 | minDiff = diff;
505 | }
506 | }
507 | return selectedFpsRange;
508 | }
509 |
510 | /**
511 | * Calculates the correct rotation for the given camera id and sets the rotation in the
512 | * parameters. It also sets the camera's display orientation and rotation.
513 | *
514 | * @param parameters the camera parameters for which to set the rotation
515 | * @param cameraId the camera id to set rotation based on
516 | */
517 | private void setRotation(Camera camera, Camera.Parameters parameters, int cameraId) {
518 | WindowManager windowManager = (WindowManager) activity.getSystemService(Context.WINDOW_SERVICE);
519 | int degrees = 0;
520 | int rotation = windowManager.getDefaultDisplay().getRotation();
521 | switch (rotation) {
522 | case Surface.ROTATION_0:
523 | degrees = 0;
524 | break;
525 | case Surface.ROTATION_90:
526 | degrees = 90;
527 | break;
528 | case Surface.ROTATION_180:
529 | degrees = 180;
530 | break;
531 | case Surface.ROTATION_270:
532 | degrees = 270;
533 | break;
534 | default:
535 | Log.e(TAG, "Bad rotation value: " + rotation);
536 | }
537 |
538 | CameraInfo cameraInfo = new CameraInfo();
539 | Camera.getCameraInfo(cameraId, cameraInfo);
540 |
541 | int displayAngle;
542 | if (cameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT) {
543 | this.rotationDegrees = (cameraInfo.orientation + degrees) % 360;
544 | displayAngle = (360 - this.rotationDegrees) % 360; // compensate for it being mirrored
545 | } else { // back-facing
546 | this.rotationDegrees = (cameraInfo.orientation - degrees + 360) % 360;
547 | displayAngle = this.rotationDegrees;
548 | }
549 | Log.d(TAG, "Display rotation is: " + rotation);
550 | Log.d(TAG, "Camera face is: " + cameraInfo.facing);
551 | Log.d(TAG, "Camera rotation is: " + cameraInfo.orientation);
552 | // This value should be one of the degrees that ImageMetadata accepts: 0, 90, 180 or 270.
553 | Log.d(TAG, "RotationDegrees is: " + this.rotationDegrees);
554 |
555 | camera.setDisplayOrientation(displayAngle);
556 | parameters.setRotation(this.rotationDegrees);
557 | }
558 |
559 | /**
560 | * Creates one buffer for the camera preview callback. The size of the buffer is based off of the
561 | * camera preview size and the format of the camera image.
562 | *
563 | * @return a new preview buffer of the appropriate size for the current camera settings
564 | */
565 | @SuppressLint("InlinedApi")
566 | private byte[] createPreviewBuffer(Size previewSize) {
567 | int bitsPerPixel = ImageFormat.getBitsPerPixel(IMAGE_FORMAT);
568 | long sizeInBits = (long) previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
569 | int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;
570 |
571 | // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
572 | // should guarantee that there will be an array to work with.
573 | byte[] byteArray = new byte[bufferSize];
574 | ByteBuffer buffer = ByteBuffer.wrap(byteArray);
575 | if (!buffer.hasArray() || (buffer.array() != byteArray)) {
576 | // I don't think that this will ever happen. But if it does, then we wouldn't be
577 | // passing the preview content to the underlying detector later.
578 | throw new IllegalStateException("Failed to create valid buffer for camera source.");
579 | }
580 |
581 | bytesToByteBuffer.put(byteArray, buffer);
582 | return byteArray;
583 | }
584 |
585 | // ==============================================================================================
586 | // Frame processing
587 | // ==============================================================================================
588 |
589 | /**
590 | * Called when the camera has a new preview frame.
591 | */
592 | private class CameraPreviewCallback implements Camera.PreviewCallback {
593 | @Override
594 | public void onPreviewFrame(byte[] data, Camera camera) {
595 | processingRunnable.setNextFrame(data, camera);
596 | }
597 | }
598 |
599 | public void setMachineLearningFrameProcessor(VisionImageProcessor processor) {
600 | synchronized (processorLock) {
601 | cleanScreen();
602 | if (frameProcessor != null) {
603 | frameProcessor.stop();
604 | }
605 | frameProcessor = processor;
606 | }
607 | }
608 |
609 | /**
610 | * This runnable controls access to the underlying receiver, calling it to process frames when
611 | * available from the camera. This is designed to run detection on frames as fast as possible
612 | * (i.e., without unnecessary context switching or waiting on the next frame).
613 | *
614 | *
While detection is running on a frame, new frames may be received from the camera. As these
615 | * frames come in, the most recent frame is held onto as pending. As soon as detection and its
616 | * associated processing is done for the previous frame, detection on the mostly recently received
617 | * frame will immediately start on the same thread.
618 | */
619 | private class FrameProcessingRunnable implements Runnable {
620 |
621 | // This lock guards all of the member variables below.
622 | private final Object lock = new Object();
623 | private boolean active = true;
624 |
625 | // These pending variables hold the state associated with the new frame awaiting processing.
626 | private ByteBuffer pendingFrameData;
627 |
628 | FrameProcessingRunnable() {
629 | }
630 |
631 | /**
632 | * Releases the underlying receiver. This is only safe to do after the associated thread has
633 | * completed, which is managed in camera source's release method above.
634 | */
635 | @SuppressLint("Assert")
636 | void release() {
637 | assert (processingThread.getState() == State.TERMINATED);
638 | }
639 |
640 | /**
641 | * Marks the runnable as active/not active. Signals any blocked threads to continue.
642 | */
643 | void setActive(boolean active) {
644 | synchronized (lock) {
645 | this.active = active;
646 | lock.notifyAll();
647 | }
648 | }
649 |
650 | /**
651 | * Sets the frame data received from the camera. This adds the previous unused frame buffer (if
652 | * present) back to the camera, and keeps a pending reference to the frame data for future use.
653 | */
654 | @SuppressWarnings("ByteBufferBackingArray")
655 | void setNextFrame(byte[] data, Camera camera) {
656 | synchronized (lock) {
657 | if (pendingFrameData != null) {
658 | camera.addCallbackBuffer(pendingFrameData.array());
659 | pendingFrameData = null;
660 | }
661 |
662 | if (!bytesToByteBuffer.containsKey(data)) {
663 | Log.d(
664 | TAG,
665 | "Skipping frame. Could not find ByteBuffer associated with the image "
666 | + "data from the camera.");
667 | return;
668 | }
669 |
670 | pendingFrameData = bytesToByteBuffer.get(data);
671 |
672 | // Notify the processor thread if it is waiting on the next frame (see below).
673 | lock.notifyAll();
674 | }
675 | }
676 |
677 | /**
678 | * As long as the processing thread is active, this executes detection on frames continuously.
679 | * The next pending frame is either immediately available or hasn't been received yet. Once it
680 | * is available, we transfer the frame info to local variables and run detection on that frame.
681 | * It immediately loops back for the next frame without pausing.
682 | *
683 | *
If detection takes longer than the time in between new frames from the camera, this will
684 | * mean that this loop will run without ever waiting on a frame, avoiding any context switching
685 | * or frame acquisition time latency.
686 | *
687 | *
If you find that this is using more CPU than you'd like, you should probably decrease the
688 | * FPS setting above to allow for some idle time in between frames.
689 | */
690 | @SuppressLint("InlinedApi")
691 | @SuppressWarnings({"GuardedBy", "ByteBufferBackingArray"})
692 | @Override
693 | public void run() {
694 | ByteBuffer data;
695 |
696 | while (true) {
697 | synchronized (lock) {
698 | while (active && (pendingFrameData == null)) {
699 | try {
700 | // Wait for the next frame to be received from the camera, since we
701 | // don't have it yet.
702 | lock.wait();
703 | } catch (InterruptedException e) {
704 | Log.d(TAG, "Frame processing loop terminated.", e);
705 | return;
706 | }
707 | }
708 |
709 | if (!active) {
710 | // Exit the loop once this camera source is stopped or released. We check
711 | // this here, immediately after the wait() above, to handle the case where
712 | // setActive(false) had been called, triggering the termination of this
713 | // loop.
714 | return;
715 | }
716 |
717 | // Hold onto the frame data locally, so that we can use this for detection
718 | // below. We need to clear pendingFrameData to ensure that this buffer isn't
719 | // recycled back to the camera before we are done using that data.
720 | data = pendingFrameData;
721 | pendingFrameData = null;
722 | }
723 |
724 | // The code below needs to run outside of synchronization, because this will allow
725 | // the camera to add pending frame(s) while we are running detection on the current
726 | // frame.
727 |
728 | try {
729 | synchronized (processorLock) {
730 | frameProcessor.processByteBuffer(
731 | data,
732 | new FrameMetadata.Builder()
733 | .setWidth(previewSize.getWidth())
734 | .setHeight(previewSize.getHeight())
735 | .setRotation(rotationDegrees)
736 | .build(),
737 | graphicOverlay);
738 | }
739 | } catch (Exception t) {
740 | Log.e(TAG, "Exception thrown from receiver.", t);
741 | } finally {
742 | camera.addCallbackBuffer(data.array());
743 | }
744 | }
745 | }
746 | }
747 |
748 | /**
749 | * Cleans up graphicOverlay and child classes can do their cleanups as well .
750 | */
751 | private void cleanScreen() {
752 | graphicOverlay.clear();
753 | }
754 | }
755 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/java/com/mobed/mlkit/vision/datacollector/CameraSourcePreview.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.mobed.mlkit.vision.datacollector;
18 |
19 | import android.content.Context;
20 | import android.content.res.Configuration;
21 | import android.util.AttributeSet;
22 | import android.util.Log;
23 | import android.view.SurfaceHolder;
24 | import android.view.SurfaceView;
25 | import android.view.ViewGroup;
26 |
27 | import com.google.android.gms.common.images.Size;
28 | import com.mobed.mlkit.vision.datacollector.preference.PreferenceUtils;
29 |
30 | import java.io.IOException;
31 |
32 | /**
33 | * Preview the camera image in the screen.
34 | */
35 | public class CameraSourcePreview extends ViewGroup {
36 | private static final String TAG = "MOBED_MIDemoApp:Preview";
37 |
38 | private final Context context;
39 | private final SurfaceView surfaceView;
40 | private boolean startRequested;
41 | private boolean surfaceAvailable;
42 | private CameraSource cameraSource;
43 |
44 | private GraphicOverlay overlay;
45 |
46 | public CameraSourcePreview(Context context, AttributeSet attrs) {
47 | super(context, attrs);
48 | this.context = context;
49 | startRequested = false;
50 | surfaceAvailable = false;
51 |
52 | surfaceView = new SurfaceView(context);
53 | surfaceView.getHolder().addCallback(new SurfaceCallback());
54 | addView(surfaceView);
55 | }
56 |
57 | private void start(CameraSource cameraSource) throws IOException {
58 | if (cameraSource == null) {
59 | stop();
60 | }
61 |
62 | this.cameraSource = cameraSource;
63 |
64 | if (this.cameraSource != null) {
65 | startRequested = true;
66 | startIfReady();
67 | }
68 | }
69 |
70 | public void start(CameraSource cameraSource, GraphicOverlay overlay) throws IOException {
71 | this.overlay = overlay;
72 | start(cameraSource);
73 | }
74 |
75 | public void stop() {
76 | if (cameraSource != null) {
77 | cameraSource.stop();
78 | }
79 | }
80 |
81 | public void release() {
82 | if (cameraSource != null) {
83 | cameraSource.release();
84 | cameraSource = null;
85 | }
86 | surfaceView.getHolder().getSurface().release();
87 | }
88 |
89 | private void startIfReady() throws IOException, SecurityException {
90 | if (startRequested && surfaceAvailable) {
91 | if (PreferenceUtils.isCameraLiveViewportEnabled(context)) {
92 | cameraSource.start(surfaceView.getHolder());
93 | } else {
94 | cameraSource.start();
95 | }
96 | requestLayout();
97 |
98 | if (overlay != null) {
99 | Size size = cameraSource.getPreviewSize();
100 | int min = Math.min(size.getWidth(), size.getHeight());
101 | int max = Math.max(size.getWidth(), size.getHeight());
102 | boolean isImageFlipped = cameraSource.getCameraFacing() == CameraSource.CAMERA_FACING_FRONT;
103 | if (isPortraitMode()) {
104 | // Swap width and height sizes when in portrait, since it will be rotated by 90 degrees.
105 | // The camera preview and the image being processed have the same size.
106 | overlay.setImageSourceInfo(min, max, isImageFlipped);
107 | } else {
108 | overlay.setImageSourceInfo(max, min, isImageFlipped);
109 | }
110 | overlay.clear();
111 | }
112 | startRequested = false;
113 | }
114 | }
115 |
116 | private class SurfaceCallback implements SurfaceHolder.Callback {
117 | @Override
118 | public void surfaceCreated(SurfaceHolder surface) {
119 | surfaceAvailable = true;
120 | try {
121 | startIfReady();
122 | } catch (IOException e) {
123 | Log.e(TAG, "Could not start camera source.", e);
124 | }
125 | }
126 |
127 | @Override
128 | public void surfaceDestroyed(SurfaceHolder surface) {
129 | surfaceAvailable = false;
130 | }
131 |
132 | @Override
133 | public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
134 | }
135 | }
136 |
137 | @Override
138 | protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
139 | int width = 320;
140 | int height = 240;
141 | if (cameraSource != null) {
142 | Size size = cameraSource.getPreviewSize();
143 | if (size != null) {
144 | width = size.getWidth();
145 | height = size.getHeight();
146 | }
147 | }
148 |
149 | // Swap width and height sizes when in portrait, since it will be rotated 90 degrees
150 | if (isPortraitMode()) {
151 | int tmp = width;
152 | width = height;
153 | height = tmp;
154 | }
155 |
156 | final int layoutWidth = right - left;
157 | final int layoutHeight = bottom - top;
158 |
159 | // Computes height and width for potentially doing fit width.
160 | int childWidth = layoutWidth;
161 | int childHeight = (int) (((float) layoutWidth / (float) width) * height);
162 |
163 | // If height is too tall using fit width, does fit height instead.
164 | if (childHeight > layoutHeight) {
165 | childHeight = layoutHeight;
166 | childWidth = (int) (((float) layoutHeight / (float) height) * width);
167 | }
168 |
169 | for (int i = 0; i < getChildCount(); ++i) {
170 | getChildAt(i).layout(0, 0, childWidth, childHeight);
171 | Log.d(TAG, "Assigned view: " + i);
172 | }
173 |
174 | try {
175 | startIfReady();
176 | } catch (IOException e) {
177 | Log.e(TAG, "Could not start camera source.", e);
178 | }
179 | }
180 |
181 | private boolean isPortraitMode() {
182 | int orientation = context.getResources().getConfiguration().orientation;
183 | if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
184 | return false;
185 | }
186 | if (orientation == Configuration.ORIENTATION_PORTRAIT) {
187 | return true;
188 | }
189 |
190 | Log.d(TAG, "isPortraitMode returning false by default");
191 | return false;
192 | }
193 | }
194 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/java/com/mobed/mlkit/vision/datacollector/FrameMetadata.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.mobed.mlkit.vision.datacollector;
18 |
19 | /**
20 | * Describing a frame info.
21 | */
22 | public class FrameMetadata {
23 |
24 | private final int width;
25 | private final int height;
26 | private final int rotation;
27 |
28 | public int getWidth() {
29 | return width;
30 | }
31 |
32 | public int getHeight() {
33 | return height;
34 | }
35 |
36 | public int getRotation() {
37 | return rotation;
38 | }
39 |
40 | private FrameMetadata(int width, int height, int rotation) {
41 | this.width = width;
42 | this.height = height;
43 | this.rotation = rotation;
44 | }
45 |
46 | /**
47 | * Builder of {@link FrameMetadata}.
48 | */
49 | public static class Builder {
50 |
51 | private int width;
52 | private int height;
53 | private int rotation;
54 |
55 | public Builder setWidth(int width) {
56 | this.width = width;
57 | return this;
58 | }
59 |
60 | public Builder setHeight(int height) {
61 | this.height = height;
62 | return this;
63 | }
64 |
65 | public Builder setRotation(int rotation) {
66 | this.rotation = rotation;
67 | return this;
68 | }
69 |
70 | public FrameMetadata build() {
71 | return new FrameMetadata(width, height, rotation);
72 | }
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/java/com/mobed/mlkit/vision/datacollector/GraphicOverlay.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.mobed.mlkit.vision.datacollector;
18 |
19 | import android.content.Context;
20 | import android.graphics.Canvas;
21 | import android.graphics.Matrix;
22 | import android.util.AttributeSet;
23 | import android.util.Log;
24 | import android.view.View;
25 |
26 | import com.google.common.base.Preconditions;
27 |
28 | import java.util.ArrayList;
29 | import java.util.List;
30 |
31 | /**
32 | * A view which renders a series of custom graphics to be overlayed on top of an associated preview
33 | * (i.e., the camera preview). The creator can add graphics objects, update the objects, and remove
34 | * them, triggering the appropriate drawing and invalidation within the view.
35 | *
36 | *
Supports scaling and mirroring of the graphics relative the camera's preview properties. The
37 | * idea is that detection items are expressed in terms of an image size, but need to be scaled up
38 | * to the full view size, and also mirrored in the case of the front-facing camera.
39 | *
40 | *
Associated {@link Graphic} items should use the following methods to convert to view
41 | * coordinates for the graphics that are drawn:
42 | *
43 | *
44 | *
{@link Graphic#scale(float)} adjusts the size of the supplied value from the image scale
45 | * to the view scale.
46 | *
{@link Graphic#translateX(float)} and {@link Graphic#translateY(float)} adjust the
47 | * coordinate from the image's coordinate system to the view coordinate system.
48 | *
49 | */
50 | public class GraphicOverlay extends View {
51 | private String TAG = "MOBED_GraphicOverlay";
52 | private final Object lock = new Object();
53 | private final List graphics = new ArrayList<>();
54 | // Matrix for transforming from image coordinates to overlay view coordinates.
55 | private final Matrix transformationMatrix = new Matrix();
56 |
57 | private int imageWidth;
58 | private int imageHeight;
59 | // The factor of overlay View size to image size. Anything in the image coordinates need to be
60 | // scaled by this amount to fit with the area of overlay View.
61 | public float scaleFactor = 1.0f;
62 | // The number of horizontal pixels needed to be cropped on each side to fit the image with the
63 | // area of overlay View after scaling.
64 | public float postScaleWidthOffset;
65 | // The number of vertical pixels needed to be cropped on each side to fit the image with the
66 | // area of overlay View after scaling.
67 | public float postScaleHeightOffset;
68 | public boolean isImageFlipped;
69 | private boolean needUpdateTransformation = true;
70 |
71 |
72 | /**
73 | * Base class for a custom graphics object to be rendered within the graphic overlay. Subclass
74 | * this and implement the {@link Graphic#draw(Canvas)} method to define the graphics element. Add
75 | * instances to the overlay using {@link GraphicOverlay#add(Graphic)}.
76 | */
77 | public abstract static class Graphic {
78 | private GraphicOverlay overlay;
79 |
80 | public Graphic(GraphicOverlay overlay) {
81 | this.overlay = overlay;
82 | }
83 |
84 | /**
85 | * Draw the graphic on the supplied canvas. Drawing should use the following methods to convert
86 | * to view coordinates for the graphics that are drawn:
87 | *
88 | *
89 | *
{@link Graphic#scale(float)} adjusts the size of the supplied value from the image
90 | * scale to the view scale.
91 | *
{@link Graphic#translateX(float)} and {@link Graphic#translateY(float)} adjust the
92 | * coordinate from the image's coordinate system to the view coordinate system.
93 | *
94 | *
95 | * @param canvas drawing canvas
96 | */
97 | public abstract void draw(Canvas canvas);
98 |
99 | /**
100 | * Adjusts the supplied value from the image scale to the view scale.
101 | */
102 | public float scale(float imagePixel) {
103 | return imagePixel * overlay.scaleFactor;
104 | }
105 |
106 | /**
107 | * Returns the application context of the app.
108 | */
109 | public Context getApplicationContext() {
110 | return overlay.getContext().getApplicationContext();
111 | }
112 |
113 | public boolean isImageFlipped() {
114 | return overlay.isImageFlipped;
115 | }
116 |
117 | /**
118 | * Adjusts the x coordinate from the image's coordinate system to the view coordinate system.
119 | */
120 | public float translateX(float x) {
121 | if (overlay.isImageFlipped) {
122 | return overlay.getWidth() - (scale(x) - overlay.postScaleWidthOffset);
123 | } else {
124 | return scale(x) - overlay.postScaleWidthOffset;
125 | }
126 | }
127 |
128 | /**
129 | * Adjusts the y coordinate from the image's coordinate system to the view coordinate system.
130 | */
131 | public float translateY(float y) {
132 | return scale(y) - overlay.postScaleHeightOffset;
133 | }
134 |
135 | /**
136 | * Returns a {@link Matrix} for transforming from image coordinates to overlay view coordinates.
137 | */
138 | public Matrix getTransformationMatrix() {
139 | return overlay.transformationMatrix;
140 | }
141 |
142 | public void postInvalidate() {
143 | overlay.postInvalidate();
144 | }
145 | }
146 |
147 | public GraphicOverlay(Context context, AttributeSet attrs) {
148 | super(context, attrs);
149 | addOnLayoutChangeListener(
150 | (view, left, top, right, bottom, oldLeft, oldTop, oldRight, oldBottom) ->
151 | needUpdateTransformation = true);
152 | }
153 |
154 | /**
155 | * Removes all graphics from the overlay.
156 | */
157 | public void clear() {
158 | synchronized (lock) {
159 | graphics.clear();
160 | }
161 | postInvalidate();
162 | }
163 |
164 | /**
165 | * Adds a graphic to the overlay.
166 | */
167 | public void add(Graphic graphic) {
168 | synchronized (lock) {
169 | graphics.add(graphic);
170 | }
171 | }
172 |
173 | /**
174 | * Removes a graphic from the overlay.
175 | */
176 | public void remove(Graphic graphic) {
177 | synchronized (lock) {
178 | graphics.remove(graphic);
179 | }
180 | postInvalidate();
181 | }
182 |
183 | /**
184 | * Sets the source information of the image being processed by detectors, including size and
185 | * whether it is flipped, which informs how to transform image coordinates later.
186 | *
187 | * @param imageWidth the width of the image sent to ML Kit detectors
188 | * @param imageHeight the height of the image sent to ML Kit detectors
189 | * @param isFlipped whether the image is flipped. Should set it to true when the image is from the
190 | * front camera.
191 | */
192 | public void setImageSourceInfo(int imageWidth, int imageHeight, boolean isFlipped) {
193 | Preconditions.checkState(imageWidth > 0, "image width must be positive");
194 | Preconditions.checkState(imageHeight > 0, "image height must be positive");
195 | synchronized (lock) {
196 | this.imageWidth = imageWidth;
197 | this.imageHeight = imageHeight;
198 | this.isImageFlipped = isFlipped;
199 | Log.d(TAG,"isImageFlipped: "+isImageFlipped);
200 | needUpdateTransformation = true;
201 | }
202 | postInvalidate();
203 | }
204 |
205 | public int getImageWidth() {
206 | return imageWidth;
207 | }
208 |
209 | public int getImageHeight() {
210 | return imageHeight;
211 | }
212 |
213 | private void updateTransformationIfNeeded() {
214 | if (!needUpdateTransformation || imageWidth <= 0 || imageHeight <= 0) {
215 | return;
216 | }
217 | float viewAspectRatio = (float) getWidth() / getHeight();
218 | float imageAspectRatio = (float) imageWidth / imageHeight;
219 | postScaleWidthOffset = 0;
220 | postScaleHeightOffset = 0;
221 | if (viewAspectRatio > imageAspectRatio) {
222 | // The image needs to be vertically cropped to be displayed in this view.
223 | scaleFactor = (float) getWidth() / imageWidth;
224 | postScaleHeightOffset = ((float) getWidth() / imageAspectRatio - getHeight()) / 2;
225 | } else {
226 | // The image needs to be horizontally cropped to be displayed in this view.
227 | scaleFactor = (float) getHeight() / imageHeight;
228 | postScaleWidthOffset = ((float) getHeight() * imageAspectRatio - getWidth()) / 2;
229 | }
230 |
231 | transformationMatrix.reset();
232 | transformationMatrix.setScale(scaleFactor, scaleFactor);
233 | transformationMatrix.postTranslate(-postScaleWidthOffset, -postScaleHeightOffset);
234 |
235 | if (isImageFlipped) {
236 | transformationMatrix.postScale(-1f, 1f, getWidth() / 2f, getHeight() / 2f);
237 | }
238 |
239 | needUpdateTransformation = false;
240 | }
241 |
242 | public float getScaleFactor() {
243 | return scaleFactor;
244 | }
245 |
246 | /**
247 | * Draws the overlay with its associated graphic objects.
248 | */
249 | @Override
250 | protected void onDraw(Canvas canvas) {
251 | super.onDraw(canvas);
252 |
253 | synchronized (lock) {
254 | updateTransformationIfNeeded();
255 |
256 | for (Graphic graphic : graphics) {
257 | graphic.draw(canvas);
258 | }
259 | }
260 | }
261 | }
262 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/java/com/mobed/mlkit/vision/datacollector/InferenceInfoGraphic.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.mobed.mlkit.vision.datacollector;
17 |
18 | import android.graphics.Canvas;
19 | import android.graphics.Color;
20 | import android.graphics.Paint;
21 | import android.util.Log;
22 |
23 | import androidx.annotation.Nullable;
24 |
25 | /**
26 | * Graphic instance for rendering inference info (latency, FPS, resolution) in an overlay view.
27 | */
28 | public class InferenceInfoGraphic extends GraphicOverlay.Graphic {
29 |
30 | private static final int TEXT_COLOR = Color.WHITE;
31 | private static final float TEXT_SIZE = 60.0f;
32 |
33 | private final Paint textPaint;
34 | private final GraphicOverlay overlay;
35 |
36 | public static double getLatency() {
37 | return latency;
38 | }
39 |
40 | // Only valid when a stream of input images is being processed. Null for single image mode.
41 | @Nullable
42 | private static Integer framesPerSecond = null;
43 | private static double latency;
44 |
45 | @Nullable
46 | public static Integer getFramesPerSecond() {
47 | return framesPerSecond;
48 | }
49 |
50 | public InferenceInfoGraphic(
51 | GraphicOverlay overlay, double latency, @Nullable Integer framesPerSecond) {
52 | super(overlay);
53 | this.overlay = overlay;
54 | this.latency = latency;
55 | this.framesPerSecond = framesPerSecond;
56 | textPaint = new Paint();
57 | textPaint.setColor(TEXT_COLOR);
58 | textPaint.setTextSize(TEXT_SIZE);
59 | postInvalidate();
60 | }
61 |
62 | @Override
63 | public synchronized void draw(Canvas canvas) {
64 | float x = TEXT_SIZE * 0.5f;
65 | float y = TEXT_SIZE * 1.5f;
66 | // Log.d("MOBED_INFO","InputImage size: " + overlay.getImageWidth() + "x" + overlay.getImageHeight());
67 | // Log.d("MOBED_INFO","FPS: " + framesPerSecond + ", latency: " + latency + " ms");
68 | // Log.d("MOBED_INFO","Latency: " + latency + " ms");
69 |
70 | // canvas.drawText(
71 | // "InputImage size: " + overlay.getImageWidth() + "x" + overlay.getImageHeight(),
72 | // x,
73 | // y,
74 | // textPaint);
75 | //
76 | // // Draw FPS (if valid) and inference latency
77 | // if (framesPerSecond != null) {
78 | // canvas.drawText(
79 | // "FPS: " + framesPerSecond + ", latency: " + latency + " ms", x, y + TEXT_SIZE, textPaint);
80 | // } else {
81 | // canvas.drawText("Latency: " + latency + " ms", x, y + TEXT_SIZE, textPaint);
82 | // }
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/java/com/mobed/mlkit/vision/datacollector/LivePreviewActivity.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.mobed.mlkit.vision.datacollector;
18 |
19 | import android.content.Context;
20 | import android.content.SharedPreferences;
21 | import android.content.pm.PackageInfo;
22 | import android.content.pm.PackageManager;
23 | import android.hardware.Sensor;
24 | import android.hardware.SensorEvent;
25 | import android.hardware.SensorEventListener;
26 | import android.hardware.SensorManager;
27 | import android.os.Bundle;
28 |
29 | import androidx.core.app.ActivityCompat;
30 | import androidx.core.app.ActivityCompat.OnRequestPermissionsResultCallback;
31 | import androidx.core.content.ContextCompat;
32 | import androidx.appcompat.app.AppCompatActivity;
33 |
34 | import android.os.Environment;
35 | import android.util.Log;
36 | import android.view.View;
37 | import android.widget.AdapterView;
38 | import android.widget.AdapterView.OnItemSelectedListener;
39 | import android.widget.CompoundButton;
40 | import android.widget.Toast;
41 |
42 | import com.google.android.gms.common.annotation.KeepName;
43 | import com.mobed.mlkit.vision.datacollector.R;
44 | import com.mobed.mlkit.vision.datacollector.facedetector.FaceDetectorProcessor;
45 | import com.mobed.mlkit.vision.datacollector.preference.PreferenceUtils;
46 | import com.google.mlkit.vision.face.FaceDetectorOptions;
47 |
48 | import org.tensorflow.lite.Interpreter;
49 |
50 | import java.io.File;
51 | import java.io.IOException;
52 | import java.util.ArrayList;
53 | import java.util.List;
54 |
55 | /**
56 | * Live preview demo for ML Kit APIs.
57 | */
58 | @KeepName
59 | public final class LivePreviewActivity extends AppCompatActivity
60 | implements OnRequestPermissionsResultCallback,
61 | OnItemSelectedListener,
62 | CompoundButton.OnCheckedChangeListener {
63 | private static final String FACE_DETECTION = "Face Detection";
64 | private static final String TAG = "MOBED_LivePreview";
65 | private static final int PERMISSION_REQUESTS = 2;
66 |
67 | private CameraSource cameraSource = null;
68 | private CameraSourcePreview preview;
69 | private GraphicOverlay graphicOverlay;
70 | private String selectedModel = FACE_DETECTION;
71 | private static SharedPreferences sf;
72 | private static int count;
73 | private SensorManager mSensorManager;
74 | private Sensor mGyroSensor = null;
75 | private Sensor mAccelerometer = null;
76 | private Sensor mRotationVector = null;
77 |
78 | private static double gyroX;
79 | private static double gyroY;
80 | private static double gyroZ;
81 |
82 | private static double accX;
83 | private static double accY;
84 | private static double accZ;
85 |
86 | private static float pitch;
87 | private static float roll;
88 |
89 | //private static final float RADIAN_TO_DEGREE= (float) -57.2958;
90 | private static final int RADIAN_TO_DEGREE= -57;
91 |
92 | @Override
93 | protected void onCreate(Bundle savedInstanceState) {
94 | super.onCreate(savedInstanceState);
95 | Log.d(TAG, "onCreate");
96 |
97 | setContentView(R.layout.activity_vision_live_preview);
98 |
99 | preview = findViewById(R.id.preview);
100 | if (preview == null) {
101 | Log.d(TAG, "Preview is null");
102 | }
103 | graphicOverlay = findViewById(R.id.graphic_overlay);
104 | if (graphicOverlay == null) {
105 | Log.d(TAG, "graphicOverlay is null");
106 | }
107 |
108 | List options = new ArrayList<>();
109 | options.add(FACE_DETECTION);
110 |
111 | if (allPermissionsGranted()) {
112 | createCameraSource(selectedModel);
113 | } else {
114 | getRuntimePermissions();
115 | }
116 |
117 | sf = getPreferences(Context.MODE_PRIVATE);
118 | //sf.edit().remove("count").commit();
119 | count = sf.getInt("count",0);
120 |
121 | createDirectories();
122 |
123 | mSensorManager = (SensorManager) getSystemService(Context.SENSOR_SERVICE);
124 | mGyroSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE);
125 | mAccelerometer = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
126 | mRotationVector = mSensorManager.getDefaultSensor(Sensor.TYPE_ROTATION_VECTOR);
127 | }
128 |
129 | public static int getCount(){
130 | count = sf.getInt("count",0);
131 | return count;
132 | }
133 |
134 |
135 | public static SharedPreferences getSf() {
136 | return sf;
137 | }
138 |
139 | public static int addCount(){
140 | count+=1;
141 | SharedPreferences.Editor editor = sf.edit();
142 | editor.putInt("count",count);
143 | editor.commit();
144 | return count;
145 | }
146 |
147 | @Override
148 | public synchronized void onItemSelected(AdapterView> parent, View view, int pos, long id) {
149 | // An item was selected. You can retrieve the selected item using
150 | // parent.getItemAtPosition(pos)
151 | selectedModel = parent.getItemAtPosition(pos).toString();
152 | Log.d(TAG, "Selected model: " + selectedModel);
153 | preview.stop();
154 | if (allPermissionsGranted()) {
155 | createCameraSource(selectedModel);
156 | startCameraSource();
157 | } else {
158 | getRuntimePermissions();
159 | }
160 | }
161 |
162 | @Override
163 | public void onNothingSelected(AdapterView> parent) {
164 | // Do nothing.
165 | }
166 |
167 | @Override
168 | public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
169 | Log.d(TAG, "Set facing");
170 | if (cameraSource != null) {
171 | if (isChecked) {
172 | cameraSource.setFacing(CameraSource.CAMERA_FACING_FRONT);
173 | } else {
174 | cameraSource.setFacing(CameraSource.CAMERA_FACING_BACK);
175 | }
176 | }
177 | preview.stop();
178 | startCameraSource();
179 | }
180 |
181 | private void createCameraSource(String model) {
182 | // If there's no existing cameraSource, create one.
183 | if (cameraSource == null) {
184 | cameraSource = new CameraSource(this, graphicOverlay);
185 | }
186 |
187 | try {
188 | switch (model) {
189 | case FACE_DETECTION:
190 | Log.i(TAG, "Using Face Detector Processor");
191 | FaceDetectorOptions faceDetectorOptions =
192 | PreferenceUtils.getFaceDetectorOptionsForLivePreview(this);
193 |
194 | cameraSource.setMachineLearningFrameProcessor(
195 | new FaceDetectorProcessor(this, faceDetectorOptions));
196 | break;
197 | default:
198 | Log.e(TAG, "Unknown model: " + model);
199 | }
200 | } catch (Exception e) {
201 | Log.e(TAG, "Can not create image processor: " + model, e);
202 | Toast.makeText(
203 | getApplicationContext(),
204 | "Can not create image processor: " + e.getMessage(),
205 | Toast.LENGTH_LONG)
206 | .show();
207 | }
208 | }
209 |
210 | /**
211 | * Starts or restarts the camera source, if it exists. If the camera source doesn't exist yet
212 | * (e.g., because onResume was called before the camera source was created), this will be called
213 | * again when the camera source is created.
214 | */
215 | private void startCameraSource() {
216 | if (cameraSource != null) {
217 | try {
218 | if (preview == null) {
219 | Log.d(TAG, "resume: Preview is null");
220 | }
221 | if (graphicOverlay == null) {
222 | Log.d(TAG, "resume: graphOverlay is null");
223 | }
224 | preview.start(cameraSource, graphicOverlay);
225 | } catch (IOException e) {
226 | Log.e(TAG, "Unable to start camera source.", e);
227 | cameraSource.release();
228 | cameraSource = null;
229 | }
230 | }
231 | }
232 |
233 | @Override
234 | public void onResume() {
235 | super.onResume();
236 | Log.d(TAG, "onResume");
237 | createCameraSource(selectedModel);
238 | startCameraSource();
239 | createDirectories();
240 | mSensorManager.registerListener(gyroListener, mGyroSensor, SensorManager.SENSOR_DELAY_GAME);
241 | mSensorManager.registerListener(acceleroListener, mAccelerometer, SensorManager.SENSOR_DELAY_GAME);
242 | mSensorManager.registerListener(rotationListener, mRotationVector, SensorManager.SENSOR_DELAY_GAME);
243 | }
244 |
245 | /**
246 | * Stops the camera.
247 | */
248 | @Override
249 | protected void onPause() {
250 | super.onPause();
251 | preview.stop();
252 | mSensorManager.unregisterListener(gyroListener);
253 | mSensorManager.unregisterListener(acceleroListener);
254 | mSensorManager.unregisterListener(rotationListener);
255 | }
256 |
257 | @Override
258 | public void onDestroy() {
259 | super.onDestroy();
260 | if (cameraSource != null) {
261 | cameraSource.release();
262 | }
263 | }
264 |
265 | private String[] getRequiredPermissions() {
266 | try {
267 | PackageInfo info =
268 | this.getPackageManager()
269 | .getPackageInfo(this.getPackageName(), PackageManager.GET_PERMISSIONS);
270 | String[] ps = info.requestedPermissions;
271 | if (ps != null && ps.length > 0) {
272 | return ps;
273 | } else {
274 | return new String[0];
275 | }
276 | } catch (Exception e) {
277 | return new String[0];
278 | }
279 | }
280 |
281 | private boolean allPermissionsGranted() {
282 | for (String permission : getRequiredPermissions()) {
283 | if (!isPermissionGranted(this, permission)) {
284 | return false;
285 | }
286 | }
287 | return true;
288 | }
289 |
290 | private void getRuntimePermissions() {
291 | List allNeededPermissions = new ArrayList<>();
292 | for (String permission : getRequiredPermissions()) {
293 | if (!isPermissionGranted(this, permission)) {
294 | allNeededPermissions.add(permission);
295 | }
296 | }
297 |
298 | if (!allNeededPermissions.isEmpty()) {
299 | ActivityCompat.requestPermissions(
300 | this, allNeededPermissions.toArray(new String[0]), PERMISSION_REQUESTS);
301 | }
302 | }
303 |
304 | @Override
305 | public void onRequestPermissionsResult(
306 | int requestCode, String[] permissions, int[] grantResults) {
307 | Log.i(TAG, "Permission granted!");
308 | if (allPermissionsGranted()) {
309 | createCameraSource(selectedModel);
310 | }
311 | super.onRequestPermissionsResult(requestCode, permissions, grantResults);
312 | }
313 |
314 | private static boolean isPermissionGranted(Context context, String permission) {
315 | if (ContextCompat.checkSelfPermission(context, permission)
316 | == PackageManager.PERMISSION_GRANTED) {
317 | Log.i(TAG, "Permission granted: " + permission);
318 | return true;
319 | }
320 | Log.i(TAG, "Permission NOT granted: " + permission);
321 | return false;
322 | }
323 |
324 | public boolean dir_exists(String dir_path) {
325 | boolean ret = false;
326 | File dir = new File(dir_path);
327 | if(dir.exists() && dir.isDirectory())
328 | ret = true;
329 | return ret;
330 | }
331 |
332 |
333 | public SensorEventListener gyroListener = new SensorEventListener() {
334 | public void onAccuracyChanged(Sensor mGyroSensor, int acc) {
335 | }
336 |
337 | public void onSensorChanged(SensorEvent event) {
338 | gyroX = event.values[0];
339 | gyroY = event.values[1];
340 | gyroZ = event.values[2];
341 | //SLog.d("MOBED","Gyro: "+gyroX+ " "+gyroY+" "+gyroZ+"rad/s");
342 | }
343 | };
344 | public SensorEventListener acceleroListener = new SensorEventListener() {
345 | public void onAccuracyChanged(Sensor mGyroSensor, int acc) {
346 | }
347 |
348 | public void onSensorChanged(SensorEvent event) {
349 | accX = event.values[0];
350 | accY = event.values[1];
351 | accZ = event.values[2];
352 | //Log.d("MOBED","Accelerometer: "+accX+ " "+accY+" "+accZ+"m/s^2");
353 | }
354 | };
355 |
356 | //Copied from https://rosia.tistory.com/128
357 | public SensorEventListener rotationListener = new SensorEventListener() {
358 | public void onAccuracyChanged(Sensor mRotationVector, int acc) {
359 | }
360 |
361 | public void onSensorChanged(SensorEvent event) {
362 | if(event.values.length>4) {
363 | //Log.d(TAG,"Rotation Vector event.values[0]: "+event.values[0]+" event.values[1]: "+event.values[1]+" event.values[2]: "+event.values[2]+" event.values[3]: "+event.values[3]);
364 | checkOrientation(event.values);
365 | }
366 | }
367 | };
368 |
369 | private void checkOrientation(float[] rotationVector) {
370 | float[] rotationMatrix = new float[9];
371 | SensorManager.getRotationMatrixFromVector(rotationMatrix, rotationVector);
372 |
373 | final int worldAxisForDeviceAxisX = SensorManager.AXIS_X;
374 | final int worldAxisForDeviceAxisY = SensorManager.AXIS_Z;
375 |
376 |
377 |
378 | float[] adjustedRotationMatrix = new float[9];
379 | SensorManager.remapCoordinateSystem(rotationMatrix, worldAxisForDeviceAxisX,
380 | worldAxisForDeviceAxisY, adjustedRotationMatrix);
381 |
382 | // Transform rotation matrix into azimuth/pitch/roll
383 | float[] orientation = new float[3];
384 | SensorManager.getOrientation(adjustedRotationMatrix, orientation);
385 |
386 | // Convert radians to degrees
387 | pitch = orientation[1] * RADIAN_TO_DEGREE;
388 | roll = orientation[2] * RADIAN_TO_DEGREE;
389 | //Log.d(TAG,"Rotation Vector Pitch: "+pitch+" Roll: "+roll);
390 | }
391 |
392 | public static String getGyroData(){
393 | return gyroX+ ","+gyroY+","+gyroZ;
394 | }
395 | public static String getAcceleroData(){
396 | return accX+ ","+accY+","+accZ;
397 | }
398 | public static String getOrientation(){
399 | return pitch+ ","+roll;
400 | }
401 |
402 |
403 | private final void createDirectories(){
404 | //MOBED
405 | String dir_path = Environment.getExternalStorageDirectory() + "/CaptureApp";
406 | if (!dir_exists(dir_path)){
407 | File directory = new File(dir_path);
408 | if(!directory.mkdirs()){
409 | Log.e(TAG, "Cannot create Directory "+dir_path);
410 | }
411 | }
412 | dir_path = Environment.getExternalStorageDirectory() + "/CaptureApp/lefteye";
413 | if (!dir_exists(dir_path)){
414 | File directory = new File(dir_path);
415 | if(!directory.mkdirs()){
416 | Log.e(TAG, "Cannot create Directory "+dir_path);
417 | }
418 | }
419 | dir_path = Environment.getExternalStorageDirectory() + "/CaptureApp/righteye";
420 | if (!dir_exists(dir_path)){
421 | File directory = new File(dir_path);
422 | if(!directory.mkdirs()){
423 | Log.e(TAG, "Cannot create Directory "+dir_path);
424 | }
425 | }
426 | dir_path = Environment.getExternalStorageDirectory() + "/CaptureApp/face";
427 | if (!dir_exists(dir_path)){
428 | File directory = new File(dir_path);
429 | if(!directory.mkdirs()){
430 | Log.e(TAG, "Cannot create Directory "+dir_path);
431 | }
432 | }
433 | dir_path = Environment.getExternalStorageDirectory() + "/CaptureApp/facegrid";
434 | if (!dir_exists(dir_path)){
435 | File directory = new File(dir_path);
436 | if(!directory.mkdirs()){
437 | Log.e(TAG, "Cannot create Directory "+dir_path);
438 | }
439 | }
440 | dir_path = Environment.getExternalStorageDirectory() + "/CaptureApp/lefteyegrid";
441 | if (!dir_exists(dir_path)){
442 | File directory = new File(dir_path);
443 | if(!directory.mkdirs()){
444 | Log.e(TAG, "Cannot create Directory "+dir_path);
445 | }
446 | }
447 | dir_path = Environment.getExternalStorageDirectory() + "/CaptureApp/righteyegrid";
448 | if (!dir_exists(dir_path)){
449 | File directory = new File(dir_path);
450 | if(!directory.mkdirs()){
451 | Log.e(TAG, "Cannot create Directory "+dir_path);
452 | }
453 | }
454 | dir_path = Environment.getExternalStorageDirectory() + "/CaptureApp/temp";
455 | if (!dir_exists(dir_path)){
456 | File directory = new File(dir_path);
457 | if(!directory.mkdirs()){
458 | Log.e(TAG, "Cannot create Directory "+dir_path);
459 | }
460 | }
461 | dir_path = Environment.getExternalStorageDirectory() + "/CaptureApp/temp/lefteye";
462 | if (!dir_exists(dir_path)){
463 | File directory = new File(dir_path);
464 | if(!directory.mkdirs()){
465 | Log.e(TAG, "Cannot create Directory "+dir_path);
466 | }
467 | }
468 | dir_path = Environment.getExternalStorageDirectory() + "/CaptureApp/temp/righteye";
469 | if (!dir_exists(dir_path)){
470 | File directory = new File(dir_path);
471 | if(!directory.mkdirs()){
472 | Log.e(TAG, "Cannot create Directory "+dir_path);
473 | }
474 | }
475 | dir_path = Environment.getExternalStorageDirectory() + "/CaptureApp/temp/face";
476 | if (!dir_exists(dir_path)){
477 | File directory = new File(dir_path);
478 | if(!directory.mkdirs()){
479 | Log.e(TAG, "Cannot create Directory "+dir_path);
480 | }
481 | }
482 | dir_path = Environment.getExternalStorageDirectory() + "/CaptureApp/temp/facegrid";
483 | if (!dir_exists(dir_path)){
484 | File directory = new File(dir_path);
485 | if(!directory.mkdirs()){
486 | Log.e(TAG, "Cannot create Directory "+dir_path);
487 | }
488 | }
489 | dir_path = Environment.getExternalStorageDirectory() + "/CaptureApp/temp/lefteyegrid";
490 | if (!dir_exists(dir_path)){
491 | File directory = new File(dir_path);
492 | if(!directory.mkdirs()){
493 | Log.e(TAG, "Cannot create Directory "+dir_path);
494 | }
495 | }
496 | dir_path = Environment.getExternalStorageDirectory() + "/CaptureApp/temp/righteyegrid";
497 | if (!dir_exists(dir_path)){
498 | File directory = new File(dir_path);
499 | if(!directory.mkdirs()){
500 | Log.e(TAG, "Cannot create Directory "+dir_path);
501 | }
502 | }
503 | }
504 | }
505 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/java/com/mobed/mlkit/vision/datacollector/ScopedExecutor.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.mobed.mlkit.vision.datacollector;
18 |
19 | import androidx.annotation.NonNull;
20 |
21 | import java.util.concurrent.Executor;
22 | import java.util.concurrent.atomic.AtomicBoolean;
23 |
24 | /**
25 | * Wraps an existing executor to provide a {@link #shutdown} method that allows subsequent
26 | * cancellation of submitted runnables.
27 | */
28 | public class ScopedExecutor implements Executor {
29 |
30 | private final Executor executor;
31 | private final AtomicBoolean shutdown = new AtomicBoolean();
32 |
33 | public ScopedExecutor(@NonNull Executor executor) {
34 | this.executor = executor;
35 | }
36 |
37 | @Override
38 | public void execute(@NonNull Runnable command) {
39 | // Return early if this object has been shut down.
40 | if (shutdown.get()) {
41 | return;
42 | }
43 | executor.execute(
44 | () -> {
45 | // Check again in case it has been shut down in the mean time.
46 | if (shutdown.get()) {
47 | return;
48 | }
49 | command.run();
50 | });
51 | }
52 |
53 | /**
54 | * After this method is called, no runnables that have been submitted or are subsequently
55 | * submitted will start to execute, turning this executor into a no-op.
56 | *
57 | *
Runnables that have already started to execute will continue.
58 | */
59 | public void shutdown() {
60 | shutdown.set(true);
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/java/com/mobed/mlkit/vision/datacollector/VisionImageProcessor.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.mobed.mlkit.vision.datacollector;
18 |
19 | import android.graphics.Bitmap;
20 |
21 | import com.google.mlkit.common.MlKitException;
22 |
23 | import java.nio.ByteBuffer;
24 |
25 | /**
26 | * An interface to process the images with different vision detectors and custom image models.
27 | */
28 | public interface VisionImageProcessor {
29 |
30 | /**
31 | * Processes a bitmap image.
32 | */
33 | void processBitmap(Bitmap bitmap, GraphicOverlay graphicOverlay);
34 |
35 | /**
36 | * Processes ByteBuffer image data, e.g. used for Camera1 live preview case.
37 | */
38 | void processByteBuffer(
39 | ByteBuffer data, FrameMetadata frameMetadata, GraphicOverlay graphicOverlay)
40 | throws MlKitException;
41 |
42 | /**
43 | * Stops the underlying machine learning model and release resources.
44 | */
45 | void stop();
46 | }
47 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/java/com/mobed/mlkit/vision/datacollector/VisionProcessorBase.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.mobed.mlkit.vision.datacollector;
18 |
19 | import android.app.ActivityManager;
20 | import android.app.ActivityManager.MemoryInfo;
21 | import android.content.Context;
22 | import android.graphics.Bitmap;
23 | import android.os.SystemClock;
24 | import android.util.Log;
25 | import android.widget.Toast;
26 |
27 | import androidx.annotation.GuardedBy;
28 | import androidx.annotation.NonNull;
29 | import androidx.annotation.Nullable;
30 |
31 | import com.google.android.gms.tasks.Task;
32 | import com.google.android.gms.tasks.TaskExecutors;
33 | import com.google.mlkit.vision.common.InputImage;
34 | import com.mobed.mlkit.vision.datacollector.facedetector.FaceDetectorProcessor;
35 | import com.mobed.mlkit.vision.datacollector.preference.PreferenceUtils;
36 |
37 | import java.nio.ByteBuffer;
38 | import java.util.Timer;
39 | import java.util.TimerTask;
40 |
41 | /**
42 | * Abstract base class for vision frame processors. Subclasses need to implement {@link
43 | * #onSuccess(Object, GraphicOverlay)} to define what they want to with the detection results and
44 | * {@link #detectInImage(InputImage)} to specify the detector object.
45 | *
46 | * @param The type of the detected feature.
47 | */
48 | public abstract class VisionProcessorBase implements VisionImageProcessor {
49 |
50 | protected static final String MANUAL_TESTING_LOG = "MOBED_LogTagForTest";
51 | private static final String TAG = "VisionProcessorBase";
52 |
53 | private final ActivityManager activityManager;
54 | private final Timer fpsTimer = new Timer();
55 | private final ScopedExecutor executor;
56 |
57 | // Whether this processor is already shut down
58 | private boolean isShutdown;
59 |
60 | // Used to calculate latency, running in the same thread, no sync needed.
61 | private int numRuns = 0;
62 | private long totalRunMs = 0;
63 | private long maxRunMs = 0;
64 | private long minRunMs = Long.MAX_VALUE;
65 |
66 | // Frame count that have been processed so far in an one second interval to calculate FPS.
67 | private int frameProcessedInOneSecondInterval = 0;
68 | private int framesPerSecond = 0;
69 |
70 | // To keep the latest images and its metadata.
71 | @GuardedBy("this")
72 | private ByteBuffer latestImage;
73 | @GuardedBy("this")
74 | private FrameMetadata latestImageMetaData;
75 | // To keep the images and metadata in process.
76 | @GuardedBy("this")
77 | private ByteBuffer processingImage;
78 | @GuardedBy("this")
79 | private FrameMetadata processingMetaData;
80 |
81 | protected VisionProcessorBase(Context context) {
82 | activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
83 | executor = new ScopedExecutor(TaskExecutors.MAIN_THREAD);
84 | fpsTimer.scheduleAtFixedRate(
85 | new TimerTask() {
86 | @Override
87 | public void run() {
88 | framesPerSecond = frameProcessedInOneSecondInterval;
89 | frameProcessedInOneSecondInterval = 0;
90 | }
91 | },
92 | /* delay= */ 0,
93 | /* period= */ 1000);
94 | }
95 |
96 | // -----------------Code for processing single still image----------------------------------------
97 | @Override
98 | public void processBitmap(Bitmap bitmap, final GraphicOverlay graphicOverlay) {
99 | requestDetectInImage(
100 | InputImage.fromBitmap(bitmap, 0),
101 | graphicOverlay,
102 | /* originalCameraImage= */ null,
103 | /* shouldShowFps= */ false);
104 | }
105 |
106 | // -----------------Code for processing live preview frame from Camera1 API-----------------------
107 | @Override
108 | public synchronized void processByteBuffer(
109 | ByteBuffer data, final FrameMetadata frameMetadata, final GraphicOverlay graphicOverlay) {
110 | latestImage = data;
111 | latestImageMetaData = frameMetadata;
112 | if (processingImage == null && processingMetaData == null) {
113 | processLatestImage(graphicOverlay);
114 | }
115 | }
116 |
117 | private synchronized void processLatestImage(final GraphicOverlay graphicOverlay) {
118 | processingImage = latestImage;
119 | processingMetaData = latestImageMetaData;
120 | latestImage = null;
121 | latestImageMetaData = null;
122 | if (processingImage != null && processingMetaData != null && !isShutdown) {
123 | processImage(processingImage, processingMetaData, graphicOverlay);
124 | }
125 | }
126 |
127 | private void processImage(
128 | ByteBuffer data, final FrameMetadata frameMetadata, final GraphicOverlay graphicOverlay) {
129 | // If live viewport is on (that is the underneath surface view takes care of the camera preview
130 | // drawing), skip the unnecessary bitmap creation that used for the manual preview drawing.
131 | Bitmap bitmap =
132 | PreferenceUtils.isCameraLiveViewportEnabled(graphicOverlay.getContext())
133 | ? null
134 | : BitmapUtils.getBitmap(data, frameMetadata);
135 |
136 | requestDetectInImage(
137 | InputImage.fromByteBuffer(
138 | data,
139 | frameMetadata.getWidth(),
140 | frameMetadata.getHeight(),
141 | frameMetadata.getRotation(),
142 | InputImage.IMAGE_FORMAT_NV21),
143 | graphicOverlay,
144 | bitmap,
145 | /* shouldShowFps= */ true)
146 | .addOnSuccessListener(executor, results -> processLatestImage(graphicOverlay));
147 | }
148 |
149 | // -----------------Common processing logic-------------------------------------------------------
150 | private Task requestDetectInImage(
151 | final InputImage image,
152 | final GraphicOverlay graphicOverlay,
153 | @Nullable final Bitmap originalCameraImage,
154 | boolean shouldShowFps) {
155 | final long startMs = SystemClock.elapsedRealtime();
156 | return detectInImage(image)
157 | .addOnSuccessListener(
158 | executor,
159 | results -> {
160 | if (originalCameraImage != null) {
161 | FaceDetectorProcessor.image = originalCameraImage;
162 | }
163 | long currentLatencyMs = SystemClock.elapsedRealtime() - startMs;
164 | numRuns++;
165 | frameProcessedInOneSecondInterval++;
166 | totalRunMs += currentLatencyMs;
167 | maxRunMs = Math.max(currentLatencyMs, maxRunMs);
168 | minRunMs = Math.min(currentLatencyMs, minRunMs);
169 |
170 | // Only log inference info once per second. When frameProcessedInOneSecondInterval is
171 | // equal to 1, it means this is the first frame processed during the current second.
172 | if (frameProcessedInOneSecondInterval == 1) {
173 | Log.d(TAG, "Max latency is: " + maxRunMs);
174 | Log.d(TAG, "Min latency is: " + minRunMs);
175 | Log.d(TAG, "Num of Runs: " + numRuns + ", Avg latency is: " + totalRunMs / numRuns);
176 | MemoryInfo mi = new MemoryInfo();
177 | activityManager.getMemoryInfo(mi);
178 | long availableMegs = mi.availMem / 0x100000L;
179 | Log.d(TAG, "Memory available in system: " + availableMegs + " MB");
180 | }
181 |
182 | graphicOverlay.clear();
183 | if (originalCameraImage != null) {
184 | graphicOverlay.add(new CameraImageGraphic(graphicOverlay, originalCameraImage));
185 | }
186 | VisionProcessorBase.this.onSuccess(results, graphicOverlay);
187 | graphicOverlay.add(
188 | new InferenceInfoGraphic(
189 | graphicOverlay, currentLatencyMs, shouldShowFps ? framesPerSecond : null));
190 | graphicOverlay.postInvalidate();
191 | })
192 | .addOnFailureListener(
193 | executor,
194 | e -> {
195 | graphicOverlay.clear();
196 | graphicOverlay.postInvalidate();
197 | String error = "Failed to process. Error: " + e.getLocalizedMessage();
198 | Toast.makeText(
199 | graphicOverlay.getContext(),
200 | error + "\nCause: " + e.getCause(),
201 | Toast.LENGTH_SHORT)
202 | .show();
203 | Log.d(TAG, error);
204 | e.printStackTrace();
205 | VisionProcessorBase.this.onFailure(e);
206 | });
207 | }
208 |
209 | @Override
210 | public void stop() {
211 | executor.shutdown();
212 | isShutdown = true;
213 | numRuns = 0;
214 | totalRunMs = 0;
215 | fpsTimer.cancel();
216 | }
217 |
218 | protected abstract Task detectInImage(InputImage image);
219 |
220 | protected abstract void onSuccess(@NonNull T results, @NonNull GraphicOverlay graphicOverlay);
221 |
222 | protected abstract void onFailure(@NonNull Exception e);
223 | }
224 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/java/com/mobed/mlkit/vision/datacollector/facedetector/FaceGraphic.java:
--------------------------------------------------------------------------------
1 | package com.mobed.mlkit.vision.datacollector.facedetector;
2 | /*
3 | * Copyright 2020 Google LLC. All rights reserved.
4 | *
5 | * Licensed under the Apache License, Version 2.0 (the "License");
6 | * you may not use this file except in compliance with the License.
7 | * You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | import android.graphics.Canvas;
19 | import android.graphics.Color;
20 | import android.graphics.Paint;
21 | import android.util.DisplayMetrics;
22 |
23 | import com.mobed.mlkit.vision.datacollector.GraphicOverlay;
24 | import com.mobed.mlkit.vision.datacollector.GraphicOverlay.Graphic;
25 | import com.google.mlkit.vision.face.Face;
26 |
27 | /**
28 | * Graphic instance for rendering eye position and Gaze point
29 | * graphic overlay view.
30 | */
31 | public class FaceGraphic extends Graphic {
32 | private String TAG = "MOBED_FaceGraphic";
33 |
34 | private final Paint rectColor;
35 |
36 | FaceGraphic(GraphicOverlay overlay) {
37 | super(overlay);
38 |
39 | rectColor = new Paint();
40 | rectColor.setARGB(128, 255, 0, 0);
41 | }
42 |
43 | /**
44 | * Draws the eye positions and gaze point.
45 | */
46 | @Override
47 | public void draw(Canvas canvas) {
48 | //Log.d(TAG, "Canvas Width: "+canvas.getWidth()+" Height: "+ canvas.getHeight());
49 |
50 | // Draws a circle at the position of the estimated gaze point
51 | DisplayMetrics dm = getApplicationContext().getResources().getDisplayMetrics();
52 | //Log.d(TAG, "Display Metric w/h: " + width+"/"+height);
53 | int width = canvas.getWidth();
54 | int height = canvas.getHeight();
55 | canvas.drawRect(0, 0, width, height, rectColor);
56 | }
57 | }
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/java/com/mobed/mlkit/vision/datacollector/preference/LivePreviewPreferenceFragment.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.mobed.mlkit.vision.datacollector.preference;
18 |
19 | import android.hardware.Camera;
20 | import android.os.Bundle;
21 | import android.preference.EditTextPreference;
22 | import android.preference.ListPreference;
23 | import android.preference.PreferenceCategory;
24 | import android.preference.PreferenceFragment;
25 | import android.widget.Toast;
26 |
27 | import androidx.annotation.StringRes;
28 |
29 | import com.mobed.mlkit.vision.datacollector.CameraSource;
30 | import com.mobed.mlkit.vision.datacollector.CameraSource.SizePair;
31 | import com.mobed.mlkit.vision.datacollector.R;
32 |
33 | import java.util.HashMap;
34 | import java.util.List;
35 | import java.util.Map;
36 |
37 | /**
38 | * Configures live preview demo settings.
39 | */
40 | public class LivePreviewPreferenceFragment extends PreferenceFragment {
41 |
42 | protected boolean isCameraXSetting;
43 |
44 | @Override
45 | public void onCreate(Bundle savedInstanceState) {
46 | super.onCreate(savedInstanceState);
47 |
48 | addPreferencesFromResource(R.xml.preference_live_preview);
49 | setUpCameraPreferences();
50 | setUpFaceDetectionPreferences();
51 | }
52 |
53 | private void setUpCameraPreferences() {
54 | PreferenceCategory cameraPreference =
55 | (PreferenceCategory) findPreference(getString(R.string.pref_category_key_camera));
56 |
57 | if (isCameraXSetting) {
58 | cameraPreference.removePreference(
59 | findPreference(getString(R.string.pref_key_rear_camera_preview_size)));
60 | cameraPreference.removePreference(
61 | findPreference(getString(R.string.pref_key_front_camera_preview_size)));
62 | setUpCameraXTargetAnalysisSizePreference();
63 | } else {
64 | cameraPreference.removePreference(
65 | findPreference(getString(R.string.pref_key_camerax_target_analysis_size)));
66 | setUpCameraPreviewSizePreference(
67 | R.string.pref_key_rear_camera_preview_size,
68 | R.string.pref_key_rear_camera_picture_size,
69 | CameraSource.CAMERA_FACING_BACK);
70 | setUpCameraPreviewSizePreference(
71 | R.string.pref_key_front_camera_preview_size,
72 | R.string.pref_key_front_camera_picture_size,
73 | CameraSource.CAMERA_FACING_FRONT);
74 | }
75 | }
76 |
77 | private void setUpCameraPreviewSizePreference(
78 | @StringRes int previewSizePrefKeyId, @StringRes int pictureSizePrefKeyId, int cameraId) {
79 | ListPreference previewSizePreference =
80 | (ListPreference) findPreference(getString(previewSizePrefKeyId));
81 |
82 | Camera camera = null;
83 | try {
84 | camera = Camera.open(cameraId);
85 |
86 | List previewSizeList = CameraSource.generateValidPreviewSizeList(camera);
87 | String[] previewSizeStringValues = new String[previewSizeList.size()];
88 | Map previewToPictureSizeStringMap = new HashMap<>();
89 | for (int i = 0; i < previewSizeList.size(); i++) {
90 | SizePair sizePair = previewSizeList.get(i);
91 | previewSizeStringValues[i] = sizePair.preview.toString();
92 | if (sizePair.picture != null) {
93 | previewToPictureSizeStringMap.put(
94 | sizePair.preview.toString(), sizePair.picture.toString());
95 | }
96 | }
97 | previewSizePreference.setEntries(previewSizeStringValues);
98 | previewSizePreference.setEntryValues(previewSizeStringValues);
99 |
100 | if (previewSizePreference.getEntry() == null) {
101 | // First time of opening the Settings page.
102 | SizePair sizePair =
103 | CameraSource.selectSizePair(
104 | camera,
105 | CameraSource.DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH,
106 | CameraSource.DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT);
107 | String previewSizeString = sizePair.preview.toString();
108 | previewSizePreference.setValue(previewSizeString);
109 | previewSizePreference.setSummary(previewSizeString);
110 | PreferenceUtils.saveString(
111 | getActivity(),
112 | pictureSizePrefKeyId,
113 | sizePair.picture != null ? sizePair.picture.toString() : null);
114 | } else {
115 | previewSizePreference.setSummary(previewSizePreference.getEntry());
116 | }
117 |
118 | previewSizePreference.setOnPreferenceChangeListener(
119 | (preference, newValue) -> {
120 | String newPreviewSizeStringValue = (String) newValue;
121 | previewSizePreference.setSummary(newPreviewSizeStringValue);
122 | PreferenceUtils.saveString(
123 | getActivity(),
124 | pictureSizePrefKeyId,
125 | previewToPictureSizeStringMap.get(newPreviewSizeStringValue));
126 | return true;
127 | });
128 |
129 | } catch (Exception e) {
130 | // If there's no camera for the given camera id, hide the corresponding preference.
131 | ((PreferenceCategory) findPreference(getString(R.string.pref_category_key_camera)))
132 | .removePreference(previewSizePreference);
133 | } finally {
134 | if (camera != null) {
135 | camera.release();
136 | }
137 | }
138 | }
139 |
140 | private void setUpCameraXTargetAnalysisSizePreference() {
141 | ListPreference pref =
142 | (ListPreference) findPreference(getString(R.string.pref_key_camerax_target_analysis_size));
143 | String[] entries = new String[]{
144 | "2000x2000",
145 | "1600x1600",
146 | "1200x1200",
147 | "1000x1000",
148 | "800x800",
149 | "600x600",
150 | "400x400",
151 | "200x200",
152 | "100x100",
153 | };
154 | pref.setEntries(entries);
155 | pref.setEntryValues(entries);
156 | pref.setSummary(pref.getEntry() == null ? "Default" : pref.getEntry());
157 | pref.setOnPreferenceChangeListener(
158 | (preference, newValue) -> {
159 | String newStringValue = (String) newValue;
160 | pref.setSummary(newStringValue);
161 | PreferenceUtils.saveString(
162 | getActivity(),
163 | R.string.pref_key_camerax_target_analysis_size,
164 | newStringValue);
165 | return true;
166 | });
167 | }
168 |
169 | private void setUpFaceDetectionPreferences() {
170 | setUpListPreference(R.string.pref_key_live_preview_face_detection_landmark_mode);
171 | setUpListPreference(R.string.pref_key_live_preview_face_detection_contour_mode);
172 | setUpListPreference(R.string.pref_key_live_preview_face_detection_classification_mode);
173 | setUpListPreference(R.string.pref_key_live_preview_face_detection_performance_mode);
174 |
175 | EditTextPreference minFaceSizePreference =
176 | (EditTextPreference)
177 | findPreference(getString(R.string.pref_key_live_preview_face_detection_min_face_size));
178 | minFaceSizePreference.setSummary(minFaceSizePreference.getText());
179 | minFaceSizePreference.setOnPreferenceChangeListener(
180 | (preference, newValue) -> {
181 | try {
182 | float minFaceSize = Float.parseFloat((String) newValue);
183 | if (minFaceSize >= 0.0f && minFaceSize <= 1.0f) {
184 | minFaceSizePreference.setSummary((String) newValue);
185 | return true;
186 | }
187 | } catch (NumberFormatException e) {
188 | // Fall through intentionally.
189 | }
190 |
191 | Toast.makeText(
192 | getActivity(), R.string.pref_toast_invalid_min_face_size, Toast.LENGTH_LONG)
193 | .show();
194 | return false;
195 | });
196 | }
197 |
198 | private void setUpListPreference(@StringRes int listPreferenceKeyId) {
199 | ListPreference listPreference = (ListPreference) findPreference(getString(listPreferenceKeyId));
200 | listPreference.setSummary(listPreference.getEntry());
201 | listPreference.setOnPreferenceChangeListener(
202 | (preference, newValue) -> {
203 | int index = listPreference.findIndexOfValue((String) newValue);
204 | listPreference.setSummary(listPreference.getEntries()[index]);
205 | return true;
206 | });
207 | }
208 | }
209 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/java/com/mobed/mlkit/vision/datacollector/preference/PreferenceUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.mobed.mlkit.vision.datacollector.preference;
18 |
19 | import android.content.Context;
20 | import android.content.SharedPreferences;
21 | import android.os.Build.VERSION_CODES;
22 | import android.preference.PreferenceManager;
23 |
24 | import androidx.annotation.Nullable;
25 | import androidx.annotation.RequiresApi;
26 | import androidx.annotation.StringRes;
27 |
28 | import com.google.android.gms.common.images.Size;
29 | import com.google.common.base.Preconditions;
30 | import com.mobed.mlkit.vision.datacollector.CameraSource;
31 | import com.mobed.mlkit.vision.datacollector.CameraSource.SizePair;
32 | import com.mobed.mlkit.vision.datacollector.R;
33 | import com.google.mlkit.vision.face.FaceDetectorOptions;
34 |
35 | /**
36 | * Utility class to retrieve shared preferences.
37 | */
38 | public class PreferenceUtils {
39 |
40 | static void saveString(Context context, @StringRes int prefKeyId, @Nullable String value) {
41 | PreferenceManager.getDefaultSharedPreferences(context)
42 | .edit()
43 | .putString(context.getString(prefKeyId), value)
44 | .apply();
45 | }
46 |
47 | @Nullable
48 | public static SizePair getCameraPreviewSizePair(Context context, int cameraId) {
49 | Preconditions.checkArgument(
50 | cameraId == CameraSource.CAMERA_FACING_BACK
51 | || cameraId == CameraSource.CAMERA_FACING_FRONT);
52 | String previewSizePrefKey;
53 | String pictureSizePrefKey;
54 | if (cameraId == CameraSource.CAMERA_FACING_BACK) {
55 | previewSizePrefKey = context.getString(R.string.pref_key_rear_camera_preview_size);
56 | pictureSizePrefKey = context.getString(R.string.pref_key_rear_camera_picture_size);
57 | } else {
58 | previewSizePrefKey = context.getString(R.string.pref_key_front_camera_preview_size);
59 | pictureSizePrefKey = context.getString(R.string.pref_key_front_camera_picture_size);
60 | }
61 |
62 | try {
63 | SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
64 | return new SizePair(
65 | Size.parseSize(sharedPreferences.getString(previewSizePrefKey, null)),
66 | Size.parseSize(sharedPreferences.getString(pictureSizePrefKey, null)));
67 | } catch (Exception e) {
68 | return null;
69 | }
70 | }
71 |
72 | @RequiresApi(VERSION_CODES.LOLLIPOP)
73 | @Nullable
74 | public static android.util.Size getCameraXTargetAnalysisSize(Context context) {
75 | String prefKey = context.getString(R.string.pref_key_camerax_target_analysis_size);
76 | SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
77 | try {
78 | return android.util.Size.parseSize(sharedPreferences.getString(prefKey, null));
79 | } catch (Exception e) {
80 | return null;
81 | }
82 | }
83 |
84 | public static FaceDetectorOptions getFaceDetectorOptionsForLivePreview(Context context) {
85 | int landmarkMode =
86 | getModeTypePreferenceValue(
87 | context,
88 | R.string.pref_key_live_preview_face_detection_landmark_mode,
89 | FaceDetectorOptions.LANDMARK_MODE_ALL);
90 | int contourMode =
91 | getModeTypePreferenceValue(
92 | context,
93 | R.string.pref_key_live_preview_face_detection_contour_mode,
94 | FaceDetectorOptions.CONTOUR_MODE_ALL);
95 | int classificationMode =
96 | getModeTypePreferenceValue(
97 | context,
98 | R.string.pref_key_live_preview_face_detection_classification_mode,
99 | FaceDetectorOptions.CLASSIFICATION_MODE_ALL);
100 | int performanceMode =
101 | getModeTypePreferenceValue(
102 | context,
103 | R.string.pref_key_live_preview_face_detection_performance_mode,
104 | FaceDetectorOptions.PERFORMANCE_MODE_FAST);
105 |
106 | SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
107 | boolean enableFaceTracking =
108 | sharedPreferences.getBoolean(
109 | context.getString(R.string.pref_key_live_preview_face_detection_face_tracking), false);
110 | float minFaceSize =
111 | Float.parseFloat(
112 | sharedPreferences.getString(
113 | context.getString(R.string.pref_key_live_preview_face_detection_min_face_size),
114 | "0.1"));
115 |
116 | FaceDetectorOptions.Builder optionsBuilder =
117 | new FaceDetectorOptions.Builder()
118 | .setLandmarkMode(landmarkMode)
119 | .setContourMode(contourMode)
120 | .setClassificationMode(classificationMode)
121 | .setPerformanceMode(performanceMode)
122 | .setMinFaceSize(minFaceSize);
123 | if (enableFaceTracking) {
124 | optionsBuilder.enableTracking();
125 | }
126 | return optionsBuilder.build();
127 | }
128 |
129 | /**
130 | * Mode type preference is backed by {@link android.preference.ListPreference} which only support
131 | * storing its entry value as string type, so we need to retrieve as string and then convert to
132 | * integer.
133 | */
134 | private static int getModeTypePreferenceValue(
135 | Context context, @StringRes int prefKeyResId, int defaultValue) {
136 | SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
137 | String prefKey = context.getString(prefKeyResId);
138 | return Integer.parseInt(sharedPreferences.getString(prefKey, String.valueOf(defaultValue)));
139 | }
140 |
141 | public static boolean isCameraLiveViewportEnabled(Context context) {
142 | SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
143 | String prefKey = context.getString(R.string.pref_key_camera_live_viewport);
144 | return sharedPreferences.getBoolean(prefKey, false);
145 | }
146 | }
147 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/java/com/mobed/mlkit/vision/datacollector/preference/SettingsActivity.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC. All rights reserved.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.mobed.mlkit.vision.datacollector.preference;
18 |
19 | import android.os.Bundle;
20 | import android.preference.PreferenceFragment;
21 |
22 | import androidx.appcompat.app.ActionBar;
23 | import androidx.appcompat.app.AppCompatActivity;
24 |
25 | import com.mobed.mlkit.vision.datacollector.R;
26 |
27 | /**
28 | * Hosts the preference fragment to configure settings for a demo activity that specified by the
29 | * {@link LaunchSource}.
30 | */
31 | public class SettingsActivity extends AppCompatActivity {
32 |
33 | public static final String EXTRA_LAUNCH_SOURCE = "extra_launch_source";
34 |
35 | /**
36 | * Specifies where this activity is launched from.
37 | */
38 | public enum LaunchSource {
39 | LIVE_PREVIEW(R.string.pref_screen_title_live_preview, LivePreviewPreferenceFragment.class);
40 |
41 | private final int titleResId;
42 | private final Class extends PreferenceFragment> prefFragmentClass;
43 |
44 | LaunchSource(int titleResId, Class extends PreferenceFragment> prefFragmentClass) {
45 | this.titleResId = titleResId;
46 | this.prefFragmentClass = prefFragmentClass;
47 | }
48 | }
49 |
50 | @Override
51 | protected void onCreate(Bundle savedInstanceState) {
52 | super.onCreate(savedInstanceState);
53 |
54 | setContentView(R.layout.activity_settings);
55 |
56 | LaunchSource launchSource =
57 | (LaunchSource) getIntent().getSerializableExtra(EXTRA_LAUNCH_SOURCE);
58 | ActionBar actionBar = getSupportActionBar();
59 | if (actionBar != null) {
60 | actionBar.setTitle(launchSource.titleResId);
61 | }
62 |
63 | try {
64 | getFragmentManager()
65 | .beginTransaction()
66 | .replace(
67 | R.id.settings_container,
68 | launchSource.prefFragmentClass.getDeclaredConstructor().newInstance())
69 | .commit();
70 | } catch (Exception e) {
71 | throw new RuntimeException(e);
72 | }
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/res/drawable-hdpi/mobed_icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/joonb14/MLKitGazeDataCollectingButton/bc902739e4a2a70c25716c8073edc0d1678a23eb/vision-quickstart/app/src/main/res/drawable-hdpi/mobed_icon.png
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/res/layout-land/activity_vision_live_preview.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
14 |
15 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/res/layout/activity_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/res/layout/activity_vision_live_preview.xml:
--------------------------------------------------------------------------------
1 |
2 |
10 |
11 |
17 |
18 |
25 |
26 |
27 |
28 |
37 |
46 |
54 |
55 |
56 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/joonb14/MLKitGazeDataCollectingButton/bc902739e4a2a70c25716c8073edc0d1678a23eb/vision-quickstart/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/joonb14/MLKitGazeDataCollectingButton/bc902739e4a2a70c25716c8073edc0d1678a23eb/vision-quickstart/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/joonb14/MLKitGazeDataCollectingButton/bc902739e4a2a70c25716c8073edc0d1678a23eb/vision-quickstart/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/joonb14/MLKitGazeDataCollectingButton/bc902739e4a2a70c25716c8073edc0d1678a23eb/vision-quickstart/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/res/values/arrays.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | @string/pref_entries_face_detector_landmark_mode_no_landmarks
6 | @string/pref_entries_face_detector_landmark_mode_all_landmarks
7 |
8 |
9 |
10 | @string/pref_entry_values_face_detector_landmark_mode_no_landmarks
11 | @string/pref_entry_values_face_detector_landmark_mode_all_landmarks
12 |
13 |
14 |
15 | @string/pref_entries_face_detector_contour_mode_no_contours
16 | @string/pref_entries_face_detector_contour_mode_all_contours
17 |
18 |
19 |
20 | @string/pref_entry_values_face_detector_contour_mode_no_contours
21 | @string/pref_entry_values_face_detector_contour_mode_all_contours
22 |
23 |
24 |
25 | @string/pref_entries_face_detector_classification_mode_no_classifications
26 | @string/pref_entries_face_detector_classification_mode_all_classifications
27 |
28 |
29 |
30 | @string/pref_entry_values_face_detector_classification_mode_no_classifications
31 | @string/pref_entry_values_face_detector_classification_mode_all_classifications
32 |
33 |
34 |
35 | @string/pref_entries_face_detector_performance_mode_fast
36 | @string/pref_entries_face_detector_performance_mode_accurate
37 |
38 |
39 |
40 | @string/pref_entry_values_face_detector_performance_mode_fast
41 | @string/pref_entry_values_face_detector_performance_mode_accurate
42 |
43 |
44 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #4CAF50
4 | #388E3C
5 | #7C4DFF
6 |
7 | #78909C
8 | #E6E6E6
9 | #689F38
10 | #BFBFBF
11 | #FFFFFF
12 | #4286f4
13 | #f44242
14 |
15 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 16dp
4 | 16dp
5 | 10dp
6 |
7 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | MLKit-DataCollector
4 | Run the ML Kit quickstart written in Java
5 | Run the ML Kit quickstart written in Kotlin
6 | OK
7 | Access to the camera is needed for detection
8 | This application cannot run because it does not have the camera permission. The application will now exit.
9 | Face detector dependencies cannot be downloaded due to low device storage
10 | Front
11 | Back
12 | Vision detectors demo with live camera preview
13 | Vision detectors demo with a still image
14 | Vision detectors demo with live preview using CameraX. Note that CameraX is only supported on API 21+
15 | Download error
16 | Start over
17 | Settings
18 | Select image
19 |
20 |
21 | Live preview settings
22 | Still image settings
23 | CameraX live preview settings
24 | Face Detection
25 | Object Detection / Custom Object Detection
26 | AutoML Image Labeling
27 |
28 |
29 | pckc
30 | Camera
31 | rcpvs
32 | rcpts
33 | fcpvs
34 | fcpts
35 | ctas
36 | clv
37 | Rear camera preview size
38 | Front camera preview size
39 | CameraX target analysis resolution
40 | Enable live viewport
41 | Do not block camera preview drawing on detection
42 |
43 |
44 | Enable multiple objects
45 | lpodemo
46 | siodemo
47 |
48 |
49 | Enable classification
50 | lpodec
51 | siodec
52 |
53 |
54 | Landmark mode
55 | lpfdlm
56 | No landmarks
57 | All landmarks
58 |
59 | 1
60 | 2
61 |
62 |
63 | Contour mode
64 | lpfdcm
65 | No contours
66 | All contours
67 |
68 | 1
69 | 2
70 |
71 |
72 | Classification mode
73 | lpfdcfm
74 | No classifications
75 | All classifications
76 |
77 | 1
78 | 2
79 |
80 |
81 | Performance mode
82 | lpfdpm
83 | Fast
84 | Accurate
85 |
86 | 1
87 | 2
88 |
89 |
90 | Face tracking
91 | lpfdft
92 |
93 |
94 | Minimum face size
95 | lpfdmfs
96 | Proportion of the head width to the image width, and the valid value range is [0.0, 1.0]
97 | Minimum face size must be a float value and in the range [0.0, 1.0]
98 | Count:
99 | FPS: / Latency:
100 |
101 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/res/xml/preference_live_preview.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
8 |
9 |
13 |
14 |
18 |
19 |
23 |
24 |
30 |
31 |
32 |
33 |
35 |
36 |
41 |
42 |
47 |
48 |
49 |
50 |
52 |
53 |
60 |
61 |
68 |
69 |
76 |
77 |
84 |
85 |
90 |
91 |
97 |
98 |
99 |
100 |
--------------------------------------------------------------------------------
/vision-quickstart/app/src/main/res/xml/preference_still_image.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
6 |
7 |
12 |
13 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/vision-quickstart/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 |
5 | repositories {
6 | mavenLocal()
7 | google()
8 | jcenter()
9 | }
10 | dependencies {
11 | classpath 'com.android.tools.build:gradle:3.6.3'
12 | classpath 'com.google.gms:google-services:4.3.3'
13 | classpath 'org.jetbrains.kotlin:kotlin-gradle-plugin:1.3.72'
14 |
15 | // NOTE: Do not place your application dependencies here; they belong
16 | // in the individual module build.gradle files
17 | }
18 | }
19 |
20 | allprojects {
21 | repositories {
22 | mavenLocal()
23 | google()
24 | jcenter()
25 | }
26 | }
27 |
28 | task clean(type: Delete) {
29 | delete rootProject.buildDir
30 | }
31 |
--------------------------------------------------------------------------------
/vision-quickstart/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 | # IDE (e.g. Android Studio) users:
3 | # Gradle settings configured through the IDE *will override*
4 | # any settings specified in this file.
5 | # For more details on how to configure your build environment visit
6 | # http://www.gradle.org/docs/current/userguide/build_environment.html
7 | # Specifies the JVM arguments used for the daemon process.
8 | # The setting is particularly useful for tweaking memory settings.
9 | org.gradle.jvmargs=-Xmx1536m
10 | # When configured, Gradle will run in incubating parallel mode.
11 | # This option should only be used with decoupled projects. More details, visit
12 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
13 | # org.gradle.parallel=true
14 | # AndroidX package structure to make it clearer which packages are bundled with the
15 | # Android operating system, and which are packaged with your app's APK
16 | # https://developer.android.com/topic/libraries/support-library/androidx-rn
17 | android.useAndroidX=true
18 | # Automatically convert third-party libraries to use AndroidX
19 | android.enableJetifier=true
20 |
21 |
--------------------------------------------------------------------------------
/vision-quickstart/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/joonb14/MLKitGazeDataCollectingButton/bc902739e4a2a70c25716c8073edc0d1678a23eb/vision-quickstart/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/vision-quickstart/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Tue May 26 20:17:39 PDT 2020
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip
7 |
--------------------------------------------------------------------------------
/vision-quickstart/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Attempt to set APP_HOME
10 | # Resolve links: $0 may be a link
11 | PRG="$0"
12 | # Need this for relative symlinks.
13 | while [ -h "$PRG" ] ; do
14 | ls=`ls -ld "$PRG"`
15 | link=`expr "$ls" : '.*-> \(.*\)$'`
16 | if expr "$link" : '/.*' > /dev/null; then
17 | PRG="$link"
18 | else
19 | PRG=`dirname "$PRG"`"/$link"
20 | fi
21 | done
22 | SAVED="`pwd`"
23 | cd "`dirname \"$PRG\"`/" >/dev/null
24 | APP_HOME="`pwd -P`"
25 | cd "$SAVED" >/dev/null
26 |
27 | APP_NAME="Gradle"
28 | APP_BASE_NAME=`basename "$0"`
29 |
30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 | DEFAULT_JVM_OPTS=""
32 |
33 | # Use the maximum available, or set MAX_FD != -1 to use that value.
34 | MAX_FD="maximum"
35 |
36 | warn () {
37 | echo "$*"
38 | }
39 |
40 | die () {
41 | echo
42 | echo "$*"
43 | echo
44 | exit 1
45 | }
46 |
47 | # OS specific support (must be 'true' or 'false').
48 | cygwin=false
49 | msys=false
50 | darwin=false
51 | nonstop=false
52 | case "`uname`" in
53 | CYGWIN* )
54 | cygwin=true
55 | ;;
56 | Darwin* )
57 | darwin=true
58 | ;;
59 | MINGW* )
60 | msys=true
61 | ;;
62 | NONSTOP* )
63 | nonstop=true
64 | ;;
65 | esac
66 |
67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 |
69 | # Determine the Java command to use to start the JVM.
70 | if [ -n "$JAVA_HOME" ] ; then
71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 | # IBM's JDK on AIX uses strange locations for the executables
73 | JAVACMD="$JAVA_HOME/jre/sh/java"
74 | else
75 | JAVACMD="$JAVA_HOME/bin/java"
76 | fi
77 | if [ ! -x "$JAVACMD" ] ; then
78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 |
80 | Please set the JAVA_HOME variable in your environment to match the
81 | location of your Java installation."
82 | fi
83 | else
84 | JAVACMD="java"
85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 |
87 | Please set the JAVA_HOME variable in your environment to match the
88 | location of your Java installation."
89 | fi
90 |
91 | # Increase the maximum file descriptors if we can.
92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 | MAX_FD_LIMIT=`ulimit -H -n`
94 | if [ $? -eq 0 ] ; then
95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 | MAX_FD="$MAX_FD_LIMIT"
97 | fi
98 | ulimit -n $MAX_FD
99 | if [ $? -ne 0 ] ; then
100 | warn "Could not set maximum file descriptor limit: $MAX_FD"
101 | fi
102 | else
103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 | fi
105 | fi
106 |
107 | # For Darwin, add options to specify how the application appears in the dock
108 | if $darwin; then
109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 | fi
111 |
112 | # For Cygwin, switch paths to Windows format before running java
113 | if $cygwin ; then
114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 | JAVACMD=`cygpath --unix "$JAVACMD"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Escape application args
158 | save () {
159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 | echo " "
161 | }
162 | APP_ARGS=$(save "$@")
163 |
164 | # Collect all arguments for the java command, following the shell quoting and substitution rules
165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 |
167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 | cd "$(dirname "$0")"
170 | fi
171 |
172 | exec "$JAVACMD" "$@"
173 |
--------------------------------------------------------------------------------
/vision-quickstart/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/vision-quickstart/settings.gradle:
--------------------------------------------------------------------------------
1 | rootProject.name='ML Kit Vision Quickstart'
2 | include ':app'
3 |
--------------------------------------------------------------------------------